diff --git a/jobs/nr-garbage-collector/Jenkinsfiles/build.groovy b/jobs/nr-garbage-collector/Jenkinsfiles/build.groovy deleted file mode 100644 index 38918ae27..000000000 --- a/jobs/nr-garbage-collector/Jenkinsfiles/build.groovy +++ /dev/null @@ -1,78 +0,0 @@ -#!/usr/bin/env groovy -// Copyright © 2018 Province of British Columbia -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -//JENKINS DEPLOY ENVIRONMENT VARIABLES: -// - JENKINS_JAVA_OVERRIDES -Dhudson.model.DirectoryBrowserSupport.CSP= -Duser.timezone=America/Vancouver -// -> user.timezone : set the local timezone so logfiles report correxct time -// -> hudson.model.DirectoryBrowserSupport.CSP : removes restrictions on CSS file load, thus html pages of test reports are displayed pretty -// See: https://docs.openshift.com/container-platform/3.9/using_images/other_images/jenkins.html for a complete list of JENKINS env vars -// define constants -def COMPONENT_NAME = 'nr-garbage-collector' -def TAG_NAME = 'dev' -def NAMESPACE = 'servicebc-ne' - -// define groovy functions -import groovy.json.JsonOutput - -// Get an image's hash tag -String getImageTagHash(String imageName, String tag = "") { - - if(!tag?.trim()) { - tag = "latest" - } - - def istag = openshift.raw("get istag ${imageName}:${tag} -o template --template='{{.image.dockerImageReference}}'") - return istag.out.tokenize('@')[1].trim() -} - -// pipeline -// define job properties - keep 10 builds only -properties([ - [$class: 'BuildDiscarderProperty', strategy: [$class: 'LogRotator', artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '3' - ] - ] -]) - -node { - stage("Build ${COMPONENT_NAME}") { - script { - openshift.withCluster() { - openshift.withProject() { - echo "Building ${COMPONENT_NAME} ..." - def base_build = openshift.selector("bc", "${COMPONENT_NAME}-base") - base_build.startBuild("--wait=true").logs("-f") - def build = openshift.selector("bc", "${COMPONENT_NAME}") - build.startBuild("--wait=true").logs("-f") - } - } - } - } - stage("Tag ${COMPONENT_NAME} to ${TAG_NAME}") { - script { - openshift.withCluster() { - openshift.withProject() { - - echo "Tagging ${COMPONENT_NAME} to ${TAG_NAME} ..." - - // Don't tag with BUILD_ID so the pruner can do it's job; it won't delete tagged images. - // Tag the images for deployment based on the image's hash - def IMAGE_HASH = getImageTagHash("${COMPONENT_NAME}") - echo "IMAGE_HASH: ${IMAGE_HASH}" - openshift.tag("${COMPONENT_NAME}@${IMAGE_HASH}", "${COMPONENT_NAME}:${TAG_NAME}") - } - } - } - } -} diff --git a/jobs/nr-garbage-collector/Jenkinsfiles/deploy-prod.groovy b/jobs/nr-garbage-collector/Jenkinsfiles/deploy-prod.groovy deleted file mode 100644 index ca4c08719..000000000 --- a/jobs/nr-garbage-collector/Jenkinsfiles/deploy-prod.groovy +++ /dev/null @@ -1,70 +0,0 @@ -#!/usr/bin/env groovy -// Copyright © 2018 Province of British Columbia -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -//JENKINS DEPLOY ENVIRONMENT VARIABLES: -// - JENKINS_JAVA_OVERRIDES -Dhudson.model.DirectoryBrowserSupport.CSP= -Duser.timezone=America/Vancouver -// -> user.timezone : set the local timezone so logfiles report correxct time -// -> hudson.model.DirectoryBrowserSupport.CSP : removes restrictions on CSS file load, thus html pages of test reports are displayed pretty -// See: https://docs.openshift.com/container-platform/3.9/using_images/other_images/jenkins.html for a complete list of JENKINS env vars -// define constants -def NAMESPACE = 'servicebc-ne' -def COMPONENT_NAME = 'nr-garbage-collector' -def TAG_NAME = 'prod' -def SOURCE_TAG = 'test' - -// define groovy functions -import groovy.json.JsonOutput - -// Get an image's hash tag -String getImageTagHash(String imageName, String tag = "") { - - if(!tag?.trim()) { - tag = "latest" - } - - def istag = openshift.raw("get istag ${imageName}:${tag} -o template --template='{{.image.dockerImageReference}}'") - return istag.out.tokenize('@')[1].trim() -} - -// define job properties - keep 10 builds only -properties([ - [$class: 'BuildDiscarderProperty', strategy: [$class: 'LogRotator', artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '2' - ] - ] -]) - -stage("Tag ${COMPONENT_NAME}-${TAG_NAME}") { - script { - timeout(time: 1, unit: 'DAYS') { - input message: "Deploy to PROD?", id: "1234" - } - openshift.withCluster() { - openshift.withProject() { - - echo "Updating ${COMPONENT_NAME}-previous tag..." - def IMAGE_HASH = getImageTagHash("${COMPONENT_NAME}", "${TAG_NAME}") - echo "IMAGE_HASH: ${IMAGE_HASH}" - openshift.tag("${COMPONENT_NAME}@${IMAGE_HASH}", "${COMPONENT_NAME}:${TAG_NAME}-previous") - - echo "Tagging ${COMPONENT_NAME} to ${TAG_NAME} ..." - // Don't tag with BUILD_ID so the pruner can do it's job; it won't delete tagged images. - // Tag the images for deployment based on the image's hash - IMAGE_HASH = getImageTagHash("${COMPONENT_NAME}", "${SOURCE_TAG}") - echo "IMAGE_HASH: ${IMAGE_HASH}" - openshift.tag("${COMPONENT_NAME}@${IMAGE_HASH}", "${COMPONENT_NAME}:${TAG_NAME}") - } - } - } -} diff --git a/jobs/nr-garbage-collector/Jenkinsfiles/deploy-test.groovy b/jobs/nr-garbage-collector/Jenkinsfiles/deploy-test.groovy deleted file mode 100644 index 2166cc7db..000000000 --- a/jobs/nr-garbage-collector/Jenkinsfiles/deploy-test.groovy +++ /dev/null @@ -1,67 +0,0 @@ -#!/usr/bin/env groovy -// Copyright © 2018 Province of British Columbia -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -//JENKINS DEPLOY ENVIRONMENT VARIABLES: -// - JENKINS_JAVA_OVERRIDES -Dhudson.model.DirectoryBrowserSupport.CSP= -Duser.timezone=America/Vancouver -// -> user.timezone : set the local timezone so logfiles report correxct time -// -> hudson.model.DirectoryBrowserSupport.CSP : removes restrictions on CSS file load, thus html pages of test reports are displayed pretty -// See: https://docs.openshift.com/container-platform/3.9/using_images/other_images/jenkins.html for a complete list of JENKINS env vars -// define constants -def NAMESPACE = 'servicebc-ne' -def COMPONENT_NAME = 'nr-garbage-collector' -def TAG_NAME = 'test' -def SOURCE_TAG = 'dev' - -// define groovy functions -import groovy.json.JsonOutput - -// Get an image's hash tag -String getImageTagHash(String imageName, String tag = "") { - - if(!tag?.trim()) { - tag = "latest" - } - - def istag = openshift.raw("get istag ${imageName}:${tag} -o template --template='{{.image.dockerImageReference}}'") - return istag.out.tokenize('@')[1].trim() -} - -// define job properties - keep 10 builds only -properties([ - [$class: 'BuildDiscarderProperty', strategy: [$class: 'LogRotator', artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '2' - ] - ] -]) - -stage("Tag ${COMPONENT_NAME}-${TAG_NAME}") { - script { - openshift.withCluster() { - openshift.withProject() { - - echo "Updating ${COMPONENT_NAME}-previous tag..." - def IMAGE_HASH = getImageTagHash("${COMPONENT_NAME}", "${TAG_NAME}") - echo "IMAGE_HASH: ${IMAGE_HASH}" - openshift.tag("${COMPONENT_NAME}@${IMAGE_HASH}", "${COMPONENT_NAME}:${TAG_NAME}-previous") - - echo "Tagging ${COMPONENT_NAME} to ${TAG_NAME} ..." - // Don't tag with BUILD_ID so the pruner can do it's job; it won't delete tagged images. - // Tag the images for deployment based on the image's hash - IMAGE_HASH = getImageTagHash("${COMPONENT_NAME}", "${SOURCE_TAG}") - echo "IMAGE_HASH: ${IMAGE_HASH}" - openshift.tag("${COMPONENT_NAME}@${IMAGE_HASH}", "${COMPONENT_NAME}:${TAG_NAME}") - } - } - } -} diff --git a/jobs/nr-garbage-collector/Makefile b/jobs/nr-garbage-collector/Makefile deleted file mode 100644 index 8482ad820..000000000 --- a/jobs/nr-garbage-collector/Makefile +++ /dev/null @@ -1,44 +0,0 @@ -SHELL:=/bin/bash -mkfile_path := $(abspath $(lastword $(MAKEFILE_LIST))) -current_dir := $(notdir $(patsubst %/,%,$(dir $(mkfile_path)))) -current_abs_dir := $(patsubst %/,%,$(dir $(mkfile_path))) - -################################################################################# -# COMMANDS # -################################################################################# -clean: clean-build clean-pyc clean-test - rm -rf venv/ - -clean-build: - rm -fr build/ - rm -fr dist/ - rm -fr .eggs/ - find . -name '*.egg-info' -exec rm -fr {} + - find . -name '*.egg' -exec rm -fr {} + - -clean-pyc: - find . -name '*.pyc' -exec rm -f {} + - find . -name '*.pyo' -exec rm -f {} + - find . -name '*~' -exec rm -f {} + - find . -name '__pycache__' -exec rm -fr {} + - -clean-test: - find . -name '.pytest_cache' -exec rm -fr {} + - rm -fr .tox/ - rm -f .coverage - rm -fr htmlcov/ - - -setup: clean create-venv install-dev - -create-venv: - test -f venv/bin/activate || python3 -m venv $(current_abs_dir)/venv - -install-dev: - . venv/bin/activate ; \ - pip install --upgrade pip ;\ - pip install -Ur requirements/prod.txt ;\ - pip install -Ur requirements/dev.txt - -flake8: - flake8 nr_garbage_collector.py config.py \ No newline at end of file diff --git a/jobs/nr-garbage-collector/config.py b/jobs/nr-garbage-collector/config.py deleted file mode 100644 index 4c65ebc5a..000000000 --- a/jobs/nr-garbage-collector/config.py +++ /dev/null @@ -1,110 +0,0 @@ -"""Config for this service.""" -import os -import sys - -from dotenv import find_dotenv, load_dotenv - - -# this will load all the envars from a .env file -load_dotenv(find_dotenv()) - -CONFIGURATION = { - 'development': 'config.DevConfig', - 'testing': 'config.TestConfig', - 'production': 'config.ProdConfig', - 'default': 'config.ProdConfig' -} - - -def get_named_config(config_name: str = 'production'): - """Return the configuration object based on the name.""" - if config_name in ['production', 'staging', 'default']: - config = ProdConfig() - elif config_name == 'testing': - config = TestConfig() - elif config_name == 'development': - config = DevConfig() - else: - raise KeyError(f"Unknown configuration '{config_name}'") - return config - - -class Config(object): - """Base config init.""" - - PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__)) - - MAX_ROW_LIMIT = os.getenv('MAX_ROWS', '100') - MIN_DELAY_SECONDS = os.getenv('MIN_DELAY_SECONDS', '600') - SECRET_KEY = 'a secret' - - SQLALCHEMY_TRACK_MODIFICATIONS = False - - STALE_THRESHOLD = os.getenv('STALE_THRESHOLD', 1800) - MAX_ROWS_LIMIT = os.getenv('MAX_ROWS_LIMIT', 100) - SOLR_BASE_URL = os.getenv('SOLR_BASE_URL', None) - - SQLALCHEMY_TRACK_MODIFICATIONS = False - - SQLALCHEMY_MAX_OVERFLOW = 10 - - # POSTGRESQL - DB_USER = os.getenv('DATABASE_USERNAME', '') - DB_PASSWORD = os.getenv('DATABASE_PASSWORD', '') - DB_NAME = os.getenv('DATABASE_NAME', '') - DB_HOST = os.getenv('DATABASE_HOST', '') - DB_PORT = os.getenv('DATABASE_PORT', '5432') - SQLALCHEMY_DATABASE_URI = 'postgresql://{user}:{password}@{host}:{port}/{name}'.format( - user=DB_USER, - password=DB_PASSWORD, - host=DB_HOST, - port=int(DB_PORT), - name=DB_NAME, - ) - - NRO_USER = os.getenv('NRO_USER', '') - NRO_PASSWORD = os.getenv('NRO_PASSWORD', '') - NRO_DB_NAME = os.getenv('NRO_DB_NAME', '') - NRO_HOST = os.getenv('NRO_HOST', '') - NRO_PORT = int(os.getenv('NRO_PORT', '1521')) - - -class DevConfig(Config): - """Dev environment config.""" - - DEBUG = True - TESTING = True - - -class TestConfig(Config): - """Test environment config.""" - - DEBUG = True - TESTING = True - - # POSTGRESQL - DB_USER = os.getenv('DATABASE_USERNAME', '') - DB_PASSWORD = os.getenv('DATABASE_PASSWORD', '') - DB_NAME = os.getenv('DATABASE_NAME_TEST', '') - DB_HOST = os.getenv('DATABASE_HOST', '') - DB_PORT = os.getenv('DATABASE_PORT', '5432') - SQLALCHEMY_DATABASE_URI = 'postgresql://{user}:{password}@{host}:{port}/{name}'.format( - user=DB_USER, - password=DB_PASSWORD, - host=DB_HOST, - port=int(DB_PORT), - name=DB_NAME, - ) - - -class ProdConfig(Config): - """Prod environment config.""" - - SECRET_KEY = os.getenv('SECRET_KEY', None) - - if not SECRET_KEY: - SECRET_KEY = os.urandom(24) - print('WARNING: SECRET_KEY being set as a one-shot', file=sys.stderr) - - TESTING = False - DEBUG = False diff --git a/jobs/nr-garbage-collector/logging.conf b/jobs/nr-garbage-collector/logging.conf deleted file mode 100644 index b2f3c4f57..000000000 --- a/jobs/nr-garbage-collector/logging.conf +++ /dev/null @@ -1,28 +0,0 @@ -[loggers] -keys=root,api - -[handlers] -keys=console - -[formatters] -keys=simple - -[logger_root] -level=DEBUG -handlers=console - -[logger_api] -level=DEBUG -handlers=console -qualname=nr_garbage_collector -propagate=0 - -[handler_console] -class=StreamHandler -level=DEBUG -formatter=simple -args=(sys.stdout,) - -[formatter_simple] -format=%(asctime)s - %(name)s - %(levelname)s in %(module)s:%(filename)s:%(lineno)d - %(funcName)s: %(message)s -datefmt= \ No newline at end of file diff --git a/jobs/nr-garbage-collector/nr_garbage_collector.py b/jobs/nr-garbage-collector/nr_garbage_collector.py deleted file mode 100644 index 7d871c5d4..000000000 --- a/jobs/nr-garbage-collector/nr_garbage_collector.py +++ /dev/null @@ -1,116 +0,0 @@ -"""Script used to regularly cancel test NRs.""" -from flask import Flask, current_app -from namex import db -from namex.models import Request, State -from namex.resources.name_requests.abstract_solr_resource import AbstractSolrResource -from namex.utils.logging import setup_logging -from sqlalchemy import and_, or_, text - -from config import get_named_config - - -setup_logging() # important to do this first - - -def create_app(environment='production'): - """Create instance of service.""" - app = Flask(__name__) - app.config.from_object(get_named_config(environment)) - db.init_app(app) - app.app_context().push() - current_app.logger.debug('created the Flask App and pushed the App Context') - - return app - - -def delete_from_solr(request, original_state: str, cancelled_nrs: list) -> list: - """Delete doc from solr core.""" - if request.names.all(): - try: - current_app.logger.debug(' -- deleted from solr') - deletion = AbstractSolrResource.delete_solr_doc('possible.conflicts', request.nrNum) - if deletion: - cancelled_nrs.append( - { - 'id': request.nrNum, - 'name': request.names[0].name, - 'source': 'NR', - 'start_date': request.submittedDate.strftime('%Y-%m-%dT%H:%M:00Z') - } - ) - current_app.logger.debug(' -- deleted from solr') - else: - raise Exception(f'Failed to delete {request.nrNum} from solr possible.conflicts core') - except Exception as err: - current_app.logger.error(err) - current_app.logger.debug(f'setting {request.nrNum} back to original state...') - request.stateCd = original_state - - return cancelled_nrs - - -def run_nr_garbage_collection(): - """Search for stale test NRs and cancel them.""" - app = create_app() - - delay = current_app.config.get('STALE_THRESHOLD') - max_rows = current_app.config.get('MAX_ROWS_LIMIT') - cancelled_nrs = [] - - try: - reqs = db.session.query(Request). \ - filter(or_(Request.stateCd.in_((State.COND_RESERVE, State.RESERVED)), - and_(Request.stateCd == State.DRAFT, or_(Request._source == 'NAMEREQUEST', - Request.nrNum.contains('NR L'))))). \ - filter(Request.lastUpdate <= text(f"(now() at time zone 'utc') - INTERVAL '{delay} SECONDS'")). \ - order_by(Request.lastUpdate.asc()). \ - limit(max_rows). \ - with_for_update().all() - - row_count = 0 - for r in reqs: - ignore_nr = False - if r.payments: - # only cancel this NR if there is a payment_status_code=CREATED and payment_completion_date - for payment in r.payments: - if payment.payment_status_code != 'CREATED' or not payment.payment_completion_date: - # skip this NR - ignore_nr = True - else: - # if there are any payments that fit this criteria, cancel it - ignore_nr = False - break - if not ignore_nr: - current_app.logger.debug(f'Cancelling {r.nrNum}...') - original_state = r.stateCd - r.stateCd = State.CANCELLED - current_app.logger.debug(' -- cancelled in postgres') - - # all cases are deleted from solr and cancelled in postgres - cancelled_nrs = delete_from_solr(r, original_state, cancelled_nrs) - db.session.add(r) - row_count += 1 - - db.session.commit() - current_app.logger.debug(f'Successfully cancelled {row_count} NRs.') - app.do_teardown_appcontext() - - except Exception as err: - current_app.logger.error(err) - current_app.logger.debug(f'adding {len(cancelled_nrs)} back into possible conflicts...') - try: - addition = AbstractSolrResource.add_solr_doc('possible.conflicts', cancelled_nrs) - if addition: - current_app.logger.debug(f'successfully added {len(cancelled_nrs)} back into possible.conflics.') - else: - raise Exception('Failed to add to solr possible.conflicts core') - except Exception as err: - current_app.logger.error(err) - current_app.logger.error(f'Failed to add {len(cancelled_nrs)} nrs back into possible conflicts core.') - current_app.logger.debug('rolling back db changes...') - db.session.rollback() - current_app.logger.debug('successfully rolled back db.') - - -if __name__ == '__main__': - run_nr_garbage_collection() diff --git a/jobs/nr-garbage-collector/openshift/cronjob.param b/jobs/nr-garbage-collector/openshift/cronjob.param deleted file mode 100644 index 5ef695269..000000000 --- a/jobs/nr-garbage-collector/openshift/cronjob.param +++ /dev/null @@ -1,13 +0,0 @@ -#========================================================= -# Template parameters for: -# Project: legal-updater -# Component: cronjob -# Action: build -# Template File: templates/cronjob.json -# Hint: oc process -n servicebc-ne-dev -f templates/cronjob.json --param-file=cronjob.param | oc create -n servicebc-ne-dev -f - -# Hint: oc process -n servicebc-ne-dev -f templates/cronjob.json --param-file=cronjob.param | oc replace -n servicebc-ne-dev -f - -#========================================================= -NAME="nr-garbage-collector" -NAMESPACE="servicebc-ne" -ENV="dev" -SCHEDULE="*/5 * * * *" diff --git a/jobs/nr-garbage-collector/openshift/job.param b/jobs/nr-garbage-collector/openshift/job.param deleted file mode 100644 index b6cb12634..000000000 --- a/jobs/nr-garbage-collector/openshift/job.param +++ /dev/null @@ -1,12 +0,0 @@ -#========================================================= -# Template parameters for: -# Project: legal-updater -# Component: job -# Action: build -# Template File: templates/job.json -# Hint: oc process -n servicebc-ne-dev -f templates/job.json --param-file=job.param | oc create -n servicebc-ne-dev -f - -# Hint: oc process -n servicebc-ne-dev -f templates/job.json --param-file=job.param | oc replace -n servicebc-ne-dev -f - -#========================================================= -NAME="nr-garbage-collector" -NAMESPACE="servicebc-ne" -ENV="dev" diff --git a/jobs/nr-garbage-collector/openshift/pipeline.param b/jobs/nr-garbage-collector/openshift/pipeline.param deleted file mode 100644 index 15350a2e2..000000000 --- a/jobs/nr-garbage-collector/openshift/pipeline.param +++ /dev/null @@ -1,16 +0,0 @@ -#========================================================= -# Template parameters for: -# Project: legal-updater -# Component: pipeline -# Action: build -# Template File: templates/pipeline.json -# Hint: oc process -n servicebc-ne-tools -f templates/pipeline.json --param-file=pipeline.param | oc create -n servicebc-ne-tools -f - -# Hint: oc process -n servicebc-ne-tools -f templates/pipeline.json --param-file=pipeline.param | oc replace -n servicebc-ne-tools -f - -#========================================================= -NAME="nr-garbage-collector" -PIPELINE_PURPOSE="build-dev" -GIT_REPO_URL="https://github.com/bcgov/namex.git" -GIT_REF="master" -SOURCE_CONTEXT_DIR="jobs/nr-garbage-collector" -JENKINS_FILE="Jenkinsfiles/build.groovy" -WEBHOOK="unknown" diff --git a/jobs/nr-garbage-collector/openshift/templates/cronjob.json b/jobs/nr-garbage-collector/openshift/templates/cronjob.json deleted file mode 100644 index 307b14daa..000000000 --- a/jobs/nr-garbage-collector/openshift/templates/cronjob.json +++ /dev/null @@ -1,209 +0,0 @@ -{ - "kind": "Template", - "apiVersion": "v1", - "metadata": { - "name": "cronjob-template" - }, - "objects": [ - { - "apiVersion": "batch/v1beta1", - "kind": "CronJob", - "metadata": { - "creationTimestamp": null, - "name": "${NAME}", - "selfLink": "/apis/batch/v1beta1/namespaces/${NAMESPACE}-${ENV}/cronjobs/${NAME}" - }, - "spec": { - "concurrencyPolicy": "Forbid", - "failedJobsHistoryLimit": 1, - "jobTemplate": { - "metadata": { - "creationTimestamp": null - }, - "spec": { - "template": { - "metadata": { - "creationTimestamp": null - }, - "spec": { - "containers": [ - { - "args": [ - "/bin/sh", - "-c", - "cd /opt/app-root/src; ./run.sh" - ], - "env": [ - { - "name": "DATABASE_USERNAME", - "valueFrom": { - "configMapKeyRef": { - "key": "DATABASE_USERNAME", - "name": "${NAME}-${ENV}-config" - } - } - }, - { - "name": "DATABASE_PASSWORD", - "valueFrom": { - "configMapKeyRef": { - "key": "DATABASE_PASSWORD", - "name": "${NAME}-${ENV}-config" - } - } - }, - { - "name": "DATABASE_NAME", - "valueFrom": { - "configMapKeyRef": { - "key": "DATABASE_NAME", - "name": "${NAME}-${ENV}-config" - } - } - }, - { - "name": "DATABASE_HOST", - "valueFrom": { - "configMapKeyRef": { - "key": "DATABASE_HOST", - "name": "${NAME}-${ENV}-config" - } - } - }, - { - "name": "DATABASE_PORT", - "valueFrom": { - "configMapKeyRef": { - "key": "DATABASE_PORT", - "name": "${NAME}-${ENV}-config" - } - } - }, - { - "name": "NRO_USER", - "valueFrom": { - "secretKeyRef": { - "key": "username", - "name": "namex-nro-services" - } - } - }, - { - "name": "NRO_PASSWORD", - "valueFrom": { - "secretKeyRef": { - "key": "password", - "name": "namex-nro-services" - } - } - }, - { - "name": "NRO_DB_NAME", - "valueFrom": { - "secretKeyRef": { - "key": "db_name", - "name": "namex-nro-services" - } - } - }, - { - "name": "NRO_HOST", - "valueFrom": { - "secretKeyRef": { - "key": "host", - "name": "namex-nro-services" - } - } - }, - { - "name": "NRO_PORT", - "valueFrom": { - "secretKeyRef": { - "key": "port", - "name": "namex-nro-services" - } - } - }, - { - "name": "SOLR_BASE_URL", - "valueFrom": { - "configMapKeyRef": { - "key": "SOLR_BASE_URL", - "name": "${NAME}-${ENV}-config" - } - } - }, - { - "name": "STATE_THRESHOLD", - "valueFrom": { - "configMapKeyRef": { - "key": "STATE_THRESHOLD", - "name": "${NAME}-${ENV}-config" - } - } - }, - { - "name": "MAX_ROWS_LIMIT", - "valueFrom": { - "configMapKeyRef": { - "key": "MAX_ROWS_LIMIT", - "name": "${NAME}-${ENV}-config" - } - } - } - ], - "image": "docker-registry.default.svc:5000/servicebc-ne-tools/${NAME}:${ENV}", - "imagePullPolicy": "Always", - "name": "${NAME}", - "resources": {}, - "terminationMessagePath": "/{ENV}/termination-log", - "terminationMessagePolicy": "File" - } - ], - "dnsPolicy": "ClusterFirst", - "restartPolicy": "Never", - "schedulerName": "default-scheduler", - "securityContext": {}, - "terminationGracePeriodSeconds": 30 - } - } - } - }, - "schedule": "${SCHEDULE}", - "successfulJobsHistoryLimit": 3, - "suspend": false - }, - "status": {} - } - ], - "parameters": [ - { - "name": "NAME", - "displayName": "Name", - "description": "Name of the cronjob.", - "required": true, - "value": "update-colin-filings" - }, - { - "name": "NAMESPACE", - "displayName": "Namespace", - "description": "Namespace of the cronjob.", - "required": true, - "value": "servicebc-ne" - }, - { - "name": "ENV", - "displayName": "Environment", - "description": "Environment the cronjob is being created/updated in.", - "required": true, - "value": "dev" - }, - { - "name": "SCHEDULE", - "displayName": "Schedule", - "description": "Value that determines how often the cronjob runs.", - "required": true, - "value": "*/5 * * * *" - } - ] -} diff --git a/jobs/nr-garbage-collector/openshift/templates/job.json b/jobs/nr-garbage-collector/openshift/templates/job.json deleted file mode 100644 index 088e02f01..000000000 --- a/jobs/nr-garbage-collector/openshift/templates/job.json +++ /dev/null @@ -1,190 +0,0 @@ -{ - "kind": "Template", - "apiVersion": "v1", - "metadata": { - "name": "job-template" - }, - "objects": [ - { - "apiVersion": "batch/v1", - "kind": "Job", - "metadata": { - "creationTimestamp": null, - "name": "${NAME}", - "selfLink": "/apis/batch/v1/namespaces/${NAMESPACE}-${ENV}/jobs/${NAME}" - }, - "spec": { - "template": { - "metadata": { - "creationTimestamp": null - }, - "spec": { - "containers": [ - { - "args": [ - "/bin/sh", - "-c", - "cd /opt/app-root/src; ./run.sh" - ], - "env": [ - { - "name": "DATABASE_USERNAME", - "valueFrom": { - "configMapKeyRef": { - "key": "DATABASE_USERNAME", - "name": "${NAME}-${ENV}-config" - } - } - }, - { - "name": "DATABASE_PASSWORD", - "valueFrom": { - "configMapKeyRef": { - "key": "DATABASE_PASSWORD", - "name": "${NAME}-${ENV}-config" - } - } - }, - { - "name": "DATABASE_NAME", - "valueFrom": { - "configMapKeyRef": { - "key": "DATABASE_NAME", - "name": "${NAME}-${ENV}-config" - } - } - }, - { - "name": "DATABASE_HOST", - "valueFrom": { - "configMapKeyRef": { - "key": "DATABASE_HOST", - "name": "${NAME}-${ENV}-config" - } - } - }, - { - "name": "DATABASE_PORT", - "valueFrom": { - "configMapKeyRef": { - "key": "DATABASE_PORT", - "name": "${NAME}-${ENV}-config" - } - } - }, - { - "name": "NRO_USER", - "valueFrom": { - "secretKeyRef": { - "key": "username", - "name": "namex-nro-services" - } - } - }, - { - "name": "NRO_PASSWORD", - "valueFrom": { - "secretKeyRef": { - "key": "password", - "name": "namex-nro-services" - } - } - }, - { - "name": "NRO_DB_NAME", - "valueFrom": { - "secretKeyRef": { - "key": "db_name", - "name": "namex-nro-services" - } - } - }, - { - "name": "NRO_HOST", - "valueFrom": { - "secretKeyRef": { - "key": "host", - "name": "namex-nro-services" - } - } - }, - { - "name": "NRO_PORT", - "valueFrom": { - "secretKeyRef": { - "key": "port", - "name": "namex-nro-services" - } - } - }, - { - "name": "SOLR_BASE_URL", - "valueFrom": { - "configMapKeyRef": { - "key": "SOLR_BASE_URL", - "name": "${NAME}-${ENV}-config" - } - } - }, - { - "name": "STATE_THRESHOLD", - "valueFrom": { - "configMapKeyRef": { - "key": "STATE_THRESHOLD", - "name": "${NAME}-${ENV}-config" - } - } - }, - { - "name": "MAX_ROWS_LIMIT", - "valueFrom": { - "configMapKeyRef": { - "key": "MAX_ROWS_LIMIT", - "name": "${NAME}-${ENV}-config" - } - } - } - ], - "image": "docker-registry.default.svc:5000/gl2uos-tools/${NAME}:${ENV}", - "imagePullPolicy": "Always", - "name": "${NAME}", - "resources": {}, - "terminationMessagePath": "/${ENV}/termination-log", - "terminationMessagePolicy": "File" - } - ], - "dnsPolicy": "ClusterFirst", - "restartPolicy": "Never", - "schedulerName": "default-scheduler", - "securityContext": {}, - "terminationGracePeriodSeconds": 30 - } - } - }, - "status": {} - } - ], - "parameters": [ - { - "name": "NAME", - "displayName": "Name", - "description": "Name of the job.", - "required": true, - "value": "update-legal-filings" - }, - { - "name": "NAMESPACE", - "displayName": "Namespace", - "description": "Namespace of the job.", - "required": true, - "value": "gl2uos" - }, - { - "name": "ENV", - "displayName": "Environment", - "description": "Environment the cronjob is being created/updated in.", - "required": true, - "value": "dev" - } - ] -} diff --git a/jobs/nr-garbage-collector/openshift/templates/pipeline.json b/jobs/nr-garbage-collector/openshift/templates/pipeline.json deleted file mode 100644 index 244faa7cc..000000000 --- a/jobs/nr-garbage-collector/openshift/templates/pipeline.json +++ /dev/null @@ -1,106 +0,0 @@ -{ - "kind": "Template", - "apiVersion": "v1", - "metadata": { - "name": "pipeline-template" - }, - "objects": [ - { - "kind": "BuildConfig", - "apiVersion": "v1", - "metadata": { - "name": "${NAME}-${PIPELINE_PURPOSE}-pipeline", - "creationTimestamp": null, - "labels": { - "app": "${NAME}-${PIPELINE_PURPOSE}-pipeline", - "name": "${NAME}-${PIPELINE_PURPOSE}-pipeline", - "template": "${NAME}-${PIPELINE_PURPOSE}-pipeline" - } - }, - "spec": { - "triggers": [ - { - "type": "GitHub", - "github": { - "secretReference": { - "name": "${WEBHOOK}" - } - } - } - ], - "runPolicy": "Serial", - "source": { - "type": "Git", - "git": { - "uri": "${GIT_REPO_URL}", - "ref": "${GIT_REF}" - }, - "contextDir": "${SOURCE_CONTEXT_DIR}" - }, - "strategy": { - "type": "JenkinsPipeline", - "jenkinsPipelineStrategy": { - "jenkinsfilePath": "${JENKINS_FILE}" - } - }, - "output": {}, - "resources": {}, - "postCommit": {}, - "nodeSelector": {}, - "successfulBuildsHistoryLimit": 5, - "failedBuildsHistoryLimit": 5 - } - } - ], - "parameters": [ - { - "name": "NAME", - "displayName": "Name", - "description": "The name assigned to all of the resources defined in this template.", - "required": true, - "value": "business-create-ui" - }, - { - "name": "PIPELINE_PURPOSE", - "displayName": "Pipeline purpose", - "description": "The activity that this pipeline will manage. eg. build, test, promote, etc.", - "required": true, - "value": "build" - }, - { - "name": "GIT_REPO_URL", - "displayName": "Git Repo URL", - "description": "The URL to your GIT repo.", - "required": true, - "value": "https://github.com/bcgov/bcrs-business-create-ui" - }, - { - "name": "GIT_REF", - "displayName": "Git Reference", - "description": "The git reference or branch.", - "required": true, - "value": "master" - }, - { - "name": "WEBHOOK", - "displayName": "Secret name for the webhook.", - "description": "The name of the Secret that holds the webhook.", - "required": true, - "value": "unknown" - }, - { - "name": "SOURCE_CONTEXT_DIR", - "displayName": "Source Context Directory", - "description": "The source context directory.", - "required": false, - "value": "/src" - }, - { - "name": "JENKINS_FILE", - "displayName": "The Jenksinfile this pipeline should use.", - "description": "The Jenkinsfile this pipeline should use.", - "required": false, - "value": "Jenkinsfile" - } - ] -} \ No newline at end of file diff --git a/jobs/nr-garbage-collector/requirements.txt b/jobs/nr-garbage-collector/requirements.txt deleted file mode 100644 index 316fd0c10..000000000 --- a/jobs/nr-garbage-collector/requirements.txt +++ /dev/null @@ -1,3 +0,0 @@ -# Just installs the production requirements. -# some setups need a requirements.txt file at the root level --r requirements/prod.txt diff --git a/jobs/nr-garbage-collector/requirements/dev.txt b/jobs/nr-garbage-collector/requirements/dev.txt deleted file mode 100644 index 4ec8f5d95..000000000 --- a/jobs/nr-garbage-collector/requirements/dev.txt +++ /dev/null @@ -1,11 +0,0 @@ -# Everything the developer needs in addition to the production requirements --r prod.txt - -# Lint and code style -flake8 -flake8-blind-except -flake8-debugger -flake8-docstrings -flake8-isort -flake8-quotes -pep8-naming diff --git a/jobs/nr-garbage-collector/requirements/prod.txt b/jobs/nr-garbage-collector/requirements/prod.txt deleted file mode 100644 index 212a99bba..000000000 --- a/jobs/nr-garbage-collector/requirements/prod.txt +++ /dev/null @@ -1,29 +0,0 @@ -git+https://github.com/bcgov/namex.git#egg=namex&subdirectory=api -git+https://github.com/bcgov/namex-synonyms-api-py-client.git#egg=swagger_client -git+https://github.com/bcgov/namex-payment-api-py-client.git@dev#egg=openapi_client - -cx_Oracle -psycopg2-binary -python-dotenv==0.8.2 - -Flask==1.1.2 -Flask-Migrate -Flask-Script -Flask-Moment -Flask-SQLAlchemy==2.4.1 -Flask-RESTplus==0.13.0 -Flask-Marshmallow==0.11.0 -flask-jwt-oidc>=0.1.5 -jsonpickle -pandas -marshmallow==2.19.2 -marshmallow-sqlalchemy==0.19.0 -pronouncing -requests -toolz -nltk==3.4.5 -lxml -inflect -werkzeug==0.16.1 -pysolr -dataclasses \ No newline at end of file diff --git a/jobs/nr-garbage-collector/run.sh b/jobs/nr-garbage-collector/run.sh deleted file mode 100755 index 135057e7d..000000000 --- a/jobs/nr-garbage-collector/run.sh +++ /dev/null @@ -1,8 +0,0 @@ -export LIBRARY_PATH=/opt/rh/httpd24/root/usr/lib64 -export X_SCLS=rh-python35 httpd24 -export LD_LIBRARY_PATH=/opt/rh/rh-python35/root/usr/lib64::/opt/rh/httpd24/root/usr/lib64 -export PATH=/opt/app-root/bin:/opt/rh/rh-python35/root/usr/bin::/opt/rh/httpd24/root/usr/bin:/opt/rh/httpd24/root/usr/sbin:/opt/app-root/src/.local/bin/:/opt/app-root/src/bin:/opt/app-root/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - -cd /opt/app-root/src -echo 'run nr_garbage_collector' -/opt/app-root/bin/python nr_garbage_collector.py \ No newline at end of file diff --git a/jobs/nr-garbage-collector/setup.cfg b/jobs/nr-garbage-collector/setup.cfg deleted file mode 100644 index bfb8f2e48..000000000 --- a/jobs/nr-garbage-collector/setup.cfg +++ /dev/null @@ -1,20 +0,0 @@ -[flake8] -exclude = .git,*migrations* -max-line-length = 120 -docstring-min-length=10 -per-file-ignores = - */__init__.py:F401 - -[isort] -line_length = 120 -indent = 4 -multi_line_output = 4 -lines_after_imports = 2 - -[pylint] -ignore=migrations,test -max_line_length=120 -notes=FIXME,XXX,TODO -ignored-modules=flask_sqlalchemy,sqlalchemy,SQLAlchemy,alembic,scoped_session -ignored-classes=scoped_session -min-similarity-lines=100 diff --git a/jobs/solr-synonym-updater/config.py b/jobs/solr-synonym-updater/config.py deleted file mode 100644 index 89d3ba04f..000000000 --- a/jobs/solr-synonym-updater/config.py +++ /dev/null @@ -1,30 +0,0 @@ -import os -from dotenv import load_dotenv, find_dotenv - -#this will load all the envars from a .env file located in the project root (api) -load_dotenv(find_dotenv()) - - -class Config(object): - PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__)) - - MAX_ROW_LIMIT = os.getenv('MAX_ROWS', '100') - MIN_DELAY_SECONDS = os.getenv('MIN_DELAY_SECONDS', '600') - - SECRET_KEY = 'a secret' - - SQLALCHEMY_TRACK_MODIFICATIONS = False - - # POSTGRESQL-SOLR - DB_USER = os.getenv('DATABASE_USERNAME', '') - DB_PASSWORD = os.getenv('DATABASE_PASSWORD','') - DB_NAME = os.getenv('DATABASE_NAME','') - DB_HOST = os.getenv('DATABASE_HOST','') - DB_PORT = os.getenv('DATABASE_PORT','5432') - SQLALCHEMY_DATABASE_URI = 'postgresql://{user}:{password}@{host}:{port}/{name}'.format( - user=DB_USER, - password=DB_PASSWORD, - host=DB_HOST, - port=int(DB_PORT), - name=DB_NAME, - ) diff --git a/jobs/solr-synonym-updater/multi_word_syns.txt b/jobs/solr-synonym-updater/multi_word_syns.txt deleted file mode 100644 index 19eff56cb..000000000 --- a/jobs/solr-synonym-updater/multi_word_syns.txt +++ /dev/null @@ -1,97 +0,0 @@ - -point of sale=>pointofsale -tree care=>treecare -tree prune=>treeprune -tree service=>treeservice -tree surgeon=>treesurgeon -tree top=>treetop -mixed media=>mixedmedia -bio chemcial=>biochemcial -bio chemical=>biochemical -bio hazard=>biohazard -night stand=>nightstand -food service=>foodservice -bed and breakfast=>bedandbreakfast -motor court=>motorcourt -time share=>timeshare -aroma therapy=>aromatherapy -essential oils=>essentialoils -smart homes=>smarthomes -back hoe=>backhoe -motor cycle=>motorcycle -vending machine=>vendingmachine -prince george=>princegeorge -post trauma=>posttrauma -queen charlotte=>queencharlotte -port coquitlam=>portcoquitlam -fire department=>firedepartment -fire fighter=>firefighter -fire fighters=>firefighters -fire protection=>fireprotection -fire rescue=>firerescue -fire services=>fireservices -crypto currency=>cryptocurrency -drive way=>driveway -road work=>roadwork -bubble tea=>bubbletea -chai tea=>chaitea -cheese tea=>cheesetea -energy drink=>energydrink -hot chocolate=>hotchocolate -public house=>publichouse -social house=>socialhouse -tap house=>taphouse -fifth wheel=>fifthwheel -1st nation=>1stnation -1st people=>1stpeople -first nation=>firstnation -first people=>firstpeople -british columbia=>britishcolumbia -point of sales=>pointofsales -med tech=>medtech -smart homess=>smarthomess -virtual reality=>virtualreality -fire departments=>firedepartments -fire fighterss=>firefighterss -fire protections=>fireprotections -fire rescues=>firerescues -fire servicess=>fireservicess -real estate=>realestate -real estates=>realestates -tree cares=>treecares -tree prunes=>treeprunes -tree services=>treeservices -tree surgeons=>treesurgeons -tree tops=>treetops -crypto currencys=>cryptocurrencys -mixed medias=>mixedmedias -bio chemcials=>biochemcials -bio chemicals=>biochemicals -bio hazards=>biohazards -drive ways=>driveways -road works=>roadworks -night stands=>nightstands -bed and breakfasts=>bedandbreakfasts -motor courts=>motorcourts -time shares=>timeshares -back hoes=>backhoes -food services=>foodservices -motor cycles=>motorcycles -public houses=>publichouses -social houses=>socialhouses -tap houses=>taphouses -fifth wheels=>fifthwheels -aroma therapys=>aromatherapys -essential oilss=>essentialoilss -vending machines=>vendingmachines -post traumas=>posttraumas -bubble teas=>bubbleteas -chai teas=>chaiteas -cheese teas=>cheeseteas -energy drinks=>energydrinks -hot chocolates=>hotchocolates -queen charlottes=>queencharlottes -1st nations=>1stnations -1st peoples=>1stpeoples -first nations=>firstnations -first peoples=>firstpeoples \ No newline at end of file diff --git a/jobs/solr-synonym-updater/openshift/scripts/Readme.md b/jobs/solr-synonym-updater/openshift/scripts/Readme.md deleted file mode 100755 index a7f835866..000000000 --- a/jobs/solr-synonym-updater/openshift/scripts/Readme.md +++ /dev/null @@ -1,4 +0,0 @@ -### Helper Scripts - - -* exportTemplate.sh - Export deploy, build, routes, services as templates from an existing project. diff --git a/jobs/solr-synonym-updater/openshift/scripts/exportTemplate.sh b/jobs/solr-synonym-updater/openshift/scripts/exportTemplate.sh deleted file mode 100755 index fad770727..000000000 --- a/jobs/solr-synonym-updater/openshift/scripts/exportTemplate.sh +++ /dev/null @@ -1,78 +0,0 @@ -#!/bin/bash -SCRIPT_DIR=$(dirname $0) - -# ===================================================================== -# Author: Wade Barnes -# ===================================================================== - -# =================================================================================================== -# Funtions -# --------------------------------------------------------------------------------------------------- -usage (){ - echo "========================================================================================" - echo "Export an OpenShift resource as a template." - echo - echo "----------------------------------------------------------------------------------------" - echo "Usage:" - echo - echo "${0} [output_format] [output_path]" - echo - echo "Where:" - echo " - csv list of resources to export." - echo " - The name of the resource to export." - echo " - The name to assign to the template." - echo " - [output_format] Optional: Output file format; json (default) or yaml." - echo " - [output_path] Optiona: Output path." - echo - echo "Examples:" - echo "${0} bc solr solr-template" - echo "========================================================================================" - exit 1 -} - -exitOnError (){ - rtnCd=$? - if [ ${rtnCd} -ne 0 ]; then - echo "An error has occurred.! Please check the previous output message(s) for details." - exit ${rtnCd} - fi -} -# =================================================================================================== - -# =================================================================================================== -# Setup -# --------------------------------------------------------------------------------------------------- -if [ -z "${1}" ]; then - usage -elif [ -z "${2}" ]; then - usage -elif [ -z "${3}" ]; then - usage -else - RESOURCE_LIST=$1 - RESOURCE_NAME=$2 - TEMPLATE_NAME=$3 -fi - -if [ ! -z "${4}" ]; then - OUTPUT_FORMAT=$4 -fi - -if [ ! -z "${5}" ]; then - OUTPUT_PATH=$5 -fi - -if [ ! -z "${6}" ]; then - usage -fi - -if [ -z "$OUTPUT_FORMAT" ]; then - OUTPUT_FORMAT=json -fi - -if [ -z "$OUTPUT_PATH" ]; then - OUTPUT_PATH="${SCRIPT_DIR}/${TEMPLATE_NAME}.${OUTPUT_FORMAT}" -fi -# =================================================================================================== - -oc export ${RESOURCE_LIST} ${RESOURCE_NAME} --as-template=${TEMPLATE_NAME} -o ${OUTPUT_FORMAT} > ${OUTPUT_PATH} diff --git a/jobs/solr-synonym-updater/openshift/scripts/jenkins-pvc.yaml b/jobs/solr-synonym-updater/openshift/scripts/jenkins-pvc.yaml deleted file mode 100755 index 35ce74215..000000000 --- a/jobs/solr-synonym-updater/openshift/scripts/jenkins-pvc.yaml +++ /dev/null @@ -1,12 +0,0 @@ -apiVersion: v1 -kind: PersistentVolumeClaim -metadata: - name: jenkins -spec: - accessModes: - - ReadWriteOnce - resources: - requests: - storage: 1Gi - volumename: "jenkins-data" -status: {} diff --git a/jobs/solr-synonym-updater/openshift/templates/cron-solr-synonym-updater.yml b/jobs/solr-synonym-updater/openshift/templates/cron-solr-synonym-updater.yml deleted file mode 100644 index 4faa1d258..000000000 --- a/jobs/solr-synonym-updater/openshift/templates/cron-solr-synonym-updater.yml +++ /dev/null @@ -1,59 +0,0 @@ ---- -kind: "Template" -apiVersion: "v1" -metadata: - name: "solr-synonym-updater" - annotations: - description: "Scheduled Task to concatenate multi-word synonyms into single words in the postgres-solr database" - tags: "cronjob" -objects: -- kind: "CronJob" - apiVersion: "batch/v1beta1" - metadata: - name: "solr-synonym-updater" - spec: - schedule: "* 23 * * *" - suspend: false - jobTemplate: - spec: - template: - spec: - containers: - - name: "solr-synonym-updater" - image: "docker-registry.default.svc:5000/servicebc-ne-tools/solr-synonym-updater:${ENV_TAG}" - imagePullPolicy: Always - args: - - /bin/sh - - -c - - cd /opt/app-root/src; ./run.sh - env: - - name: DATABASE_USERNAME - valueFrom: - secretKeyRef: - key: database-user - name: postgresql-solr - - name: DATABASE_PASSWORD - valueFrom: - secretKeyRef: - key: database-password - name: postgresql-solr - - name: DATABASE_NAME - valueFrom: - secretKeyRef: - key: database-name - name: postgresql-solr - - name: DATABASE_HOST - value: 'postgresql-solr' - - name: DATABASE_PORT - value: '5432' - restartPolicy: "Never" - concurrencyPolicy: "Forbid" -parameters: [ - { - "name": "ENV_TAG", - "displayName": "ENV_TAG", - "description": "the tag for the environment that the job image runs from.", - "required": true, - "value": "dev" - }, -] \ No newline at end of file diff --git a/jobs/solr-synonym-updater/openshift/templates/solr-synonym-updater-bc.json b/jobs/solr-synonym-updater/openshift/templates/solr-synonym-updater-bc.json deleted file mode 100644 index 77dd5397d..000000000 --- a/jobs/solr-synonym-updater/openshift/templates/solr-synonym-updater-bc.json +++ /dev/null @@ -1,59 +0,0 @@ -{ - "kind": "Template", - "apiVersion": "v1", - "metadata": { - "name": "solr-synonym-updater", - "creationTimestamp": null - }, - "objects": [ - { - "kind": "BuildConfig", - "apiVersion": "v1", - "metadata": { - "name": "solr-synonym-updater", - "creationTimestamp": null, - "labels": { - "app": "solr-synonym-updater" - } - }, - "spec": { - "triggers": [ - { - "type": "ConfigChange" - } - ], - "runPolicy": "Serial", - "source": { - "type": "Git", - "git": { - "uri": "https://github.com/bcgov/namex", - "ref": "master" - }, - "contextDir": "jobs/solr-synonym-updater" - }, - "strategy": { - "type": "Source", - "sourceStrategy": { - "from": { - "kind": "ImageStreamTag", - "namespace": "openshift", - "name": "python:3.6" - } - } - }, - "output": { - "to": { - "kind": "ImageStreamTag", - "name": "inprogress-update:latest" - } - }, - "resources": {}, - "postCommit": {}, - "nodeSelector": null - }, - "status": { - "lastVersion": 0 - } - } - ] -} diff --git a/jobs/solr-synonym-updater/protected_syns.txt b/jobs/solr-synonym-updater/protected_syns.txt deleted file mode 100644 index 136c343fa..000000000 --- a/jobs/solr-synonym-updater/protected_syns.txt +++ /dev/null @@ -1 +0,0 @@ -pointofsale,treecare,treeprune,treeservice,treesurgeon,treetop,mixedmedia,biochemcial,biochemical,biohazard,nightstand,foodservice,bedandbreakfast,motorcourt,timeshare,aromatherapy,essentialoils,smarthomes,backhoe,motorcycle,vendingmachine,princegeorge,posttrauma,queencharlotte,portcoquitlam,firedepartment,firefighter,firefighters,fireprotection,firerescue,fireservices,cryptocurrency,driveway,roadwork,bubbletea,chaitea,cheesetea,energydrink,hotchocolate,publichouse,socialhouse,taphouse,fifthwheel,1stnation,1stpeople,firstnation,firstpeoplebritishcolumbia,pointofsales,medtech,smarthomess,virtualreality,firedepartments,firefighterss,fireprotections,firerescues,fireservicess,realestate,realestates,treecares,treeprunes,treeservices,treesurgeons,treetops,cryptocurrencys,mixedmedias,biochemcials,biochemicals,biohazards,driveways,roadworks,nightstands,bedandbreakfasts,motorcourts,timeshares,backhoes,foodservices,motorcycles,publichouses,socialhouses,taphouses,fifthwheels,aromatherapys,essentialoilss,vendingmachines,posttraumas,bubbleteas,chaiteas,cheeseteas,energydrinks,hotchocolates,queencharlottes,1stnations,1stpeoples,firstnations,firstpeoples, \ No newline at end of file diff --git a/jobs/solr-synonym-updater/requirements.txt b/jobs/solr-synonym-updater/requirements.txt deleted file mode 100644 index 316fd0c10..000000000 --- a/jobs/solr-synonym-updater/requirements.txt +++ /dev/null @@ -1,3 +0,0 @@ -# Just installs the production requirements. -# some setups need a requirements.txt file at the root level --r requirements/prod.txt diff --git a/jobs/solr-synonym-updater/requirements/dev.txt b/jobs/solr-synonym-updater/requirements/dev.txt deleted file mode 100644 index d9e8dcbff..000000000 --- a/jobs/solr-synonym-updater/requirements/dev.txt +++ /dev/null @@ -1,14 +0,0 @@ -# Everything the developer needs in addition to the production requirements --r prod.txt - -# Testing -pytest - -# Lint and code style -flake8 -flake8-blind-except -flake8-debugger -flake8-docstrings -flake8-isort -flake8-quotes -pep8-naming diff --git a/jobs/solr-synonym-updater/requirements/prod.txt b/jobs/solr-synonym-updater/requirements/prod.txt deleted file mode 100644 index ddda3e638..000000000 --- a/jobs/solr-synonym-updater/requirements/prod.txt +++ /dev/null @@ -1,8 +0,0 @@ -psycopg2-binary -python-dotenv - -Flask -Flask-SQLAlchemy - - -git+https://github.com/bcgov/namex.git#egg=namex&subdirectory=api diff --git a/jobs/solr-synonym-updater/run.sh b/jobs/solr-synonym-updater/run.sh deleted file mode 100755 index 6f2b4d240..000000000 --- a/jobs/solr-synonym-updater/run.sh +++ /dev/null @@ -1,8 +0,0 @@ -export LIBRARY_PATH=/opt/rh/httpd24/root/usr/lib64 -export X_SCLS=rh-python35 httpd24 -export LD_LIBRARY_PATH=/opt/rh/rh-python35/root/usr/lib64::/opt/rh/httpd24/root/usr/lib64 -export PATH=/opt/app-root/bin:/opt/rh/rh-python35/root/usr/bin::/opt/rh/httpd24/root/usr/bin:/opt/rh/httpd24/root/usr/sbin:/opt/app-root/src/.local/bin/:/opt/app-root/src/bin:/opt/app-root/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin - -cd /opt/app-root/src -echo 'run solr-synonym-updater' -/opt/app-root/bin/python solr-synonym-updater.py \ No newline at end of file diff --git a/jobs/solr-synonym-updater/solr-synonym-updater.py b/jobs/solr-synonym-updater/solr-synonym-updater.py deleted file mode 100644 index d33d2af6b..000000000 --- a/jobs/solr-synonym-updater/solr-synonym-updater.py +++ /dev/null @@ -1,89 +0,0 @@ -import sys - -from sqlalchemy import text -from datetime import datetime, timedelta - -from flask import Flask, g, current_app - -from namex import db -from namex.models import Request, State, User, Event -from namex.services import EventRecorder -from namex.utils.logging import setup_logging - -from config import Config - -setup_logging() ## important to do this first - - -def create_app(config=Config): - app = Flask(__name__) - app.config.from_object(config) - db.init_app(app) - app.app_context().push() - current_app.logger.debug('created the Flask App and pushed the App Context') - - return app - - -app = create_app(Config) -start_time = datetime.utcnow() -row_count = 0 - -try: - sql = "select id,synonyms_text " \ - "from synonym " \ - "where synonyms_text~'\w\s\w'" - - reqs = db.session.execute(sql) - - multi_word_syns = [] - for r in reqs: - current_app.logger.debug('processing id: {}'.format(r.id)) - - # create a list of all the synonyms for this row - synonyms = [word.strip() for word in r.synonyms_text.split(',')] - new_syn_text = '' - update_row = False - for synonym in synonyms: - - if ' ' in synonym: - updated_synonym = synonym.replace(' ','') - multi_word_syns.append((synonym, updated_synonym)) - update_row = True - new_syn_text += updated_synonym + ',' - else: - new_syn_text += synonym + ',' - - if update_row: - update_sql = "update synonym " \ - "set synonyms_text = \'{text}\' " \ - "where id={id}".format(text=new_syn_text[:-1], id=r.id) - db.session.execute(update_sql) - db.session.commit() - row_count += 1 - - # add new multi word synonyms to multi_word_syns.txt and protected-multi.txt - if len(multi_word_syns) > 0: - old_multi_word_syns = open('solr-synonym-updater/multi_word_syns.txt').read() - old_protected_syns = open('solr-synonym-updater/protected_syns.txt').read() - - for syn_tuple in multi_word_syns: - multi_syn = syn_tuple[0] - squished_multi_syn = syn_tuple[1] - - if multi_syn in old_multi_word_syns: - pass - else: - open('solr-synonym-updater/multi_word_syns.txt', 'a+').write('\n' + multi_syn + '=>' + squished_multi_syn) - open('solr-synonym-updater/protected_syns.txt', 'a+').write(squished_multi_syn + ',') - - -except Exception as err: - db.session.rollback() - print('Failed to update multi-synonyms: ', err, err.with_traceback(None), file=sys.stderr) - exit(1) - -app.do_teardown_appcontext() -end_time = datetime.utcnow() -print("job - columns updated: {0} completed in:{1}".format(row_count, end_time-start_time)) -exit(0)