diff --git a/README.md b/README.md index ea381fd53..d7b5f2c8f 100644 --- a/README.md +++ b/README.md @@ -54,7 +54,7 @@ We invite developers to join us in our mission to bring AI and data integration | **BizMerlinHR** | ATS | 🎯 | | | | | | | | **Beetween** | ATS | 🎯 | | | | | | | | [**Bullhorn**](./src/hrflow_connectors/connectors/bullhorn/README.md) | ATS | :white_check_mark: | *26/01/2022* | *30/10/2023* | :white_check_mark: | :white_check_mark: | :white_check_mark: | :x: | :x: | -| [**Breezy HR**](./src/hrflow_connectors/connectors/breezyhr/README.md) | ATS | :white_check_mark: | *19/01/2022* | *04/09/2023* | :white_check_mark: | :white_check_mark: | :x: | :x: | :x: | +| [**Breezy HR**](./src/hrflow_connectors/connectors/breezyhr/README.md) | ATS | :white_check_mark: | *19/01/2022* | *01/07/2024* | :white_check_mark: | :white_check_mark: | :x: | :x: | :x: | | **CATS** | ATS | 🎯 | | | | | | | | **Cegid (Meta4)** | ATS | 🎯 | | | | | | | | [**Ceridian**](./src/hrflow_connectors/connectors/ceridian/README.md) | HCM | :white_check_mark: | *19/01/2022* | *04/09/2023* | :x: | :white_check_mark: | :x: | :x: | :x: | @@ -95,7 +95,7 @@ We invite developers to join us in our mission to bring AI and data integration | **RecruitBox** | ATS | 🎯 | | | | **RecruiterFlow** | ATS | 🎯 | | | | | | | | **Recruitive** | ATS | 🎯 | | | | | | | -| [**SAPSuccessfactors**](./src/hrflow_connectors/connectors/sapsuccessfactors/README.md) | ATS | :white_check_mark: | *19/01/2022* | *30/10/2023* | :x: | :white_check_mark: | :white_check_mark: | :x: | :x: | +| [**SAPSuccessfactors**](./src/hrflow_connectors/connectors/sapsuccessfactors/README.md) | ATS | :white_check_mark: | *19/01/2022* | *30/10/2023* | :white_check_mark: | :white_check_mark: | :white_check_mark: | :x: | :x: | | [**Salesforce**](./src/hrflow_connectors/connectors/salesforce/README.md) | CRM | :white_check_mark: | *03/08/2023* | *04/12/2023* | :white_check_mark: | :white_check_mark: | :white_check_mark: | :x: | :x: | | [**Smartrecruiters**](./src/hrflow_connectors/connectors/smartrecruiters/README.md) | ATS | :white_check_mark: | *21/03/2022* | *30/10/2023* | :x: | :white_check_mark: | :white_check_mark: | :x: | :x: | | [**Taleez**](./src/hrflow_connectors/connectors/taleez/README.md) | ATS | :white_check_mark: | *19/01/2022* | *04/09/2023* | :x: | :white_check_mark: | :white_check_mark: | :x: | :x: | diff --git a/manifest.json b/manifest.json index 2c5a67c53..284c41d56 100644 --- a/manifest.json +++ b/manifest.json @@ -11801,6 +11801,373 @@ "workflow_code_workflow_id_settings_key": "__workflow_id", "workflow_code_origin_settings_prefix": "origin_", "workflow_code_target_settings_prefix": "target_" + }, + { + "name": "pull_profile_list", + "action_type": "inbound", + "action_parameters": { + "title": "ReadProfilesActionParameters", + "type": "object", + "properties": { + "read_mode": { + "description": "If 'incremental' then `read_from` of the last run is given to Origin Warehouse during read. **The actual behavior depends on implementation of read**. In 'sync' mode `read_from` is neither fetched nor given to Origin Warehouse during read.", + "default": "sync", + "allOf": [ + { + "$ref": "#/definitions/ReadMode" + } + ] + }, + "logics": { + "title": "logics", + "description": "List of logic functions. Each function should have the following signature typing.Callable[[typing.Dict], typing.Optional[typing.Dict]]. The final list should be exposed in a variable named 'logics'.", + "template": "\nimport typing as t\n\ndef logic_1(item: t.Dict) -> t.Union[t.Dict, None]:\n return None\n\ndef logic_2(item: t.Dict) -> t.Uniont[t.Dict, None]:\n return None\n\nlogics = [logic_1, logic_2]\n", + "type": "code_editor" + }, + "format": { + "title": "format", + "description": "Formatting function. You should expose a function named 'format' with following signature typing.Callable[[typing.Dict], typing.Dict]", + "template": "\nimport typing as t\n\ndef format(item: t.Dict) -> t.Dict:\n return item\n", + "type": "code_editor" + } + }, + "additionalProperties": false, + "definitions": { + "ReadMode": { + "title": "ReadMode", + "description": "An enumeration.", + "enum": [ + "sync", + "incremental" + ] + } + } + }, + "data_type": "profile", + "trigger_type": "schedule", + "origin": "SAP Profiles", + "origin_parameters": { + "title": "ReadProfilesParameters", + "type": "object", + "properties": { + "api_server": { + "title": "Api Server", + "description": "Server to be accessed", + "field_type": "Other", + "type": "string" + }, + "api_key": { + "title": "Api Key", + "description": "API Key used to authenticate on the SAP API", + "field_type": "Auth", + "type": "string" + }, + "top": { + "title": "Top", + "description": "Show only the first N items value is capped at 100", + "default": 100, + "field_type": "Query Param", + "type": "integer" + }, + "skip": { + "title": "Skip", + "description": "Search items by search phrases", + "field_type": "Query Param", + "type": "integer" + }, + "filter": { + "title": "Filter", + "description": "Filter items by property values", + "field_type": "Query Param", + "type": "string" + }, + "search": { + "title": "Search", + "description": "Search items by search phrases", + "field_type": "Query Param", + "type": "string" + } + }, + "required": [ + "api_server", + "api_key" + ], + "additionalProperties": false + }, + "origin_data_schema": { + "title": "SapCandidateModel", + "type": "object", + "properties": { + "address": { + "title": "Address", + "type": "string" + }, + "cellPhone": { + "title": "Cellphone", + "type": "string" + }, + "city": { + "title": "City", + "type": "string" + }, + "contactEmail": { + "title": "Contactemail", + "type": "string" + }, + "country": { + "title": "Country", + "type": "string" + }, + "currentTitle": { + "title": "Currenttitle", + "type": "string" + }, + "firstName": { + "title": "Firstname", + "type": "string" + }, + "homePhone": { + "title": "Homephone", + "type": "string" + }, + "lastName": { + "title": "Lastname", + "type": "string" + }, + "middleName": { + "title": "Middlename", + "type": "string" + }, + "primaryEmail": { + "title": "Primaryemail", + "type": "string" + }, + "zip": { + "title": "Zip", + "type": "string" + }, + "education": { + "$ref": "#/definitions/Education" + }, + "outsideWorkExperience": { + "$ref": "#/definitions/OutsideWorkExperience" + } + }, + "required": [ + "primaryEmail" + ], + "definitions": { + "Result": { + "title": "Result", + "type": "object", + "properties": { + "endDate": { + "title": "Enddate", + "type": "string" + }, + "school": { + "title": "School", + "type": "string" + }, + "schoolAddress": { + "title": "Schooladdress", + "type": "string" + }, + "startDate": { + "title": "Startdate", + "type": "string" + } + }, + "required": [ + "school", + "schoolAddress" + ] + }, + "Education": { + "title": "Education", + "type": "object", + "properties": { + "results": { + "title": "Results", + "type": "array", + "items": { + "$ref": "#/definitions/Result" + } + } + }, + "required": [ + "results" + ] + }, + "ResultOutsideWorkExperience": { + "title": "ResultOutsideWorkExperience", + "type": "object", + "properties": { + "employer": { + "title": "Employer", + "type": "string" + }, + "employerAddress": { + "title": "Employeraddress", + "type": "string" + }, + "endDate": { + "title": "Enddate", + "type": "string" + }, + "startDate": { + "title": "Startdate", + "type": "string" + } + }, + "required": [ + "employerAddress" + ] + }, + "OutsideWorkExperience": { + "title": "OutsideWorkExperience", + "type": "object", + "properties": { + "results": { + "title": "Results", + "type": "array", + "items": { + "$ref": "#/definitions/ResultOutsideWorkExperience" + } + } + }, + "required": [ + "results" + ] + } + } + }, + "supports_incremental": false, + "target": "HrFlow.ai Profile Parsing", + "target_parameters": { + "title": "WriteProfileParsingParameters", + "type": "object", + "properties": { + "api_secret": { + "title": "Api Secret", + "description": "X-API-KEY used to access HrFlow.ai API", + "field_type": "Auth", + "type": "string" + }, + "api_user": { + "title": "Api User", + "description": "X-USER-EMAIL used to access HrFlow.ai API", + "field_type": "Auth", + "type": "string" + }, + "source_key": { + "title": "Source Key", + "description": "HrFlow.ai source key", + "field_type": "Other", + "type": "string" + }, + "only_insert": { + "title": "Only Insert", + "description": "When enabled the profile is written only if it doesn't exist in the source", + "default": false, + "field_type": "Other", + "type": "boolean" + } + }, + "required": [ + "api_secret", + "api_user", + "source_key" + ], + "additionalProperties": false + }, + "target_data_schema": { + "title": "HrFlowProfileParsing", + "type": "object", + "properties": { + "reference": { + "title": "Reference", + "description": "Custom identifier of the Profile.", + "type": "string" + }, + "created_at": { + "title": "Created At", + "description": "type: datetime ISO8601, Creation date of the Profile.", + "type": "string" + }, + "resume": { + "$ref": "#/definitions/ResumeToParse" + }, + "tags": { + "title": "Tags", + "description": "List of tags of the Profile.", + "type": "array", + "items": { + "$ref": "#/definitions/GeneralEntitySchema" + } + }, + "metadatas": { + "title": "Metadatas", + "description": "List of metadatas of the Profile.", + "type": "array", + "items": { + "$ref": "#/definitions/GeneralEntitySchema" + } + } + }, + "required": [ + "created_at", + "resume", + "tags", + "metadatas" + ], + "definitions": { + "ResumeToParse": { + "title": "ResumeToParse", + "type": "object", + "properties": { + "raw": { + "title": "Raw", + "type": "string", + "format": "binary" + }, + "content_type": { + "title": "Content Type", + "type": "string" + } + }, + "required": [ + "raw", + "content_type" + ] + }, + "GeneralEntitySchema": { + "title": "GeneralEntitySchema", + "type": "object", + "properties": { + "name": { + "title": "Name", + "description": "Identification name of the Object", + "type": "string" + }, + "value": { + "title": "Value", + "description": "Value associated to the Object's name", + "type": "string" + } + }, + "required": [ + "name" + ] + } + } + }, + "workflow_code": "import typing as t\n\nfrom hrflow_connectors import SAPSuccessFactors\nfrom hrflow_connectors.core.connector import ActionInitError, Reason\n\nORIGIN_SETTINGS_PREFIX = \"origin_\"\nTARGET_SETTINGS_PREFIX = \"target_\"\n\n# << format_placeholder >>\n\n# << logics_placeholder >>\n\n\ndef workflow(\n \n settings: t.Dict\n ) -> None:\n actions_parameters = dict()\n try:\n format\n except NameError:\n pass\n else:\n actions_parameters[\"format\"] = format\n\n try:\n logics\n except NameError:\n pass\n else:\n actions_parameters[\"logics\"] = logics\n\n if \"__workflow_id\" not in settings:\n return SAPSuccessFactors.pull_profile_list(\n workflow_id=\"\",\n action_parameters=dict(),\n origin_parameters=dict(),\n target_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.workflow_id_not_found,\n data=dict(error=\"__workflow_id not found in settings\", settings_keys=list(settings.keys())),\n )\n )\n workflow_id = settings[\"__workflow_id\"]\n\n \n\n origin_parameters = dict()\n for parameter in ['api_server', 'api_key', 'top', 'skip', 'filter', 'search']:\n if \"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter) in settings:\n origin_parameters[parameter] = settings[\"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter)]\n \n\n target_parameters = dict()\n for parameter in ['api_secret', 'api_user', 'source_key', 'only_insert']:\n if \"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter) in settings:\n target_parameters[parameter] = settings[\"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter)]\n \n\n return SAPSuccessFactors.pull_profile_list(\n workflow_id=workflow_id,\n action_parameters=actions_parameters,\n origin_parameters=origin_parameters,\n target_parameters=target_parameters,\n )", + "workflow_code_format_placeholder": "# << format_placeholder >>", + "workflow_code_logics_placeholder": "# << logics_placeholder >>", + "workflow_code_workflow_id_settings_key": "__workflow_id", + "workflow_code_origin_settings_prefix": "origin_", + "workflow_code_target_settings_prefix": "target_" } ], "type": "ATS", diff --git a/src/hrflow_connectors/connectors/sapsuccessfactors/README.md b/src/hrflow_connectors/connectors/sapsuccessfactors/README.md index 83b849bcc..aa760db6e 100644 --- a/src/hrflow_connectors/connectors/sapsuccessfactors/README.md +++ b/src/hrflow_connectors/connectors/sapsuccessfactors/README.md @@ -30,6 +30,7 @@ In this section, we outline the data flow between different components of the co | ------- | ----------- | | [**Pull job list**](docs/pull_job_list.md) | Retrieves all jobs via the ***SAPSuccessFactors*** API and sends them to a ***Hrflow.ai Board***. | | [**Push profile**](docs/push_profile.md) | Writes a profile taken from a Hrflow.ai Source to SAPSuccessFactors via the SAP API | +| [**Pull profile list**](docs/pull_profile_list.md) | Retrieves all profiles via the ***SAPSuccessFactors*** API and sends them to a ***Hrflow.ai Board***. |

@@ -53,7 +54,10 @@ For more code details checkout connector code # πŸ”— Useful Links - πŸ“„Visit [SAP](https://sap.com/) to learn more. -- βš™οΈ API documentation : (https://api.sap.com/) +- βš™οΈ API documentations : + - [Job API](https://api.sap.com/api/RCMJobRequisition/overview) + - [Candidate API](https://api.sap.com/api/RCMCandidate/overview) + - πŸ’» [Connector code](https://github.com/Riminder/hrflow-connectors/tree/master/src/hrflow_connectors/connectors/sapsuccessfactors) on our Github. @@ -61,4 +65,5 @@ For more code details checkout connector code - πŸ’» HrFlow.ai: [Daniel ROSA](https://github.com/DanielRosa73) - Software Engineer - πŸ’» HrFlow.ai: [Limam VADHEL](https://github.com/limamvadhel) - Software Engineer - πŸ’» HrFlow.ai: [Corentin DUCHENE](https://github.com/CorentinDuchene) - Software Engineer +- πŸ’» HrFlow.ai : [Nedhir Ebnou](https://github.com/nedhirouebnou) - Software Engineer - 🀝 SAP: [SAP for the partnership and accessible documentation](https://sap.com) diff --git a/src/hrflow_connectors/connectors/sapsuccessfactors/connector.py b/src/hrflow_connectors/connectors/sapsuccessfactors/connector.py index 53f8ef656..c2ca7c5d2 100644 --- a/src/hrflow_connectors/connectors/sapsuccessfactors/connector.py +++ b/src/hrflow_connectors/connectors/sapsuccessfactors/connector.py @@ -6,6 +6,7 @@ from hrflow_connectors.connectors.hrflow.schemas import HrFlowProfile from hrflow_connectors.connectors.hrflow.warehouse import ( HrFlowJobWarehouse, + HrFlowProfileParsingWarehouse, HrFlowProfileWarehouse, ) from hrflow_connectors.connectors.sapsuccessfactors.warehouse import ( @@ -262,6 +263,36 @@ def format_profile(profile: HrFlowProfile) -> t.Dict: return sap_profile +def format_datetime(date_string): + match = re.search(r"\d+", date_string) + timestamp = int(match.group()) + + # Date is in millis since epoch and Python expects seconds since epoch + # Divide by 1000 + dt = datetime.datetime.utcfromtimestamp(timestamp / 1000.0) + + iso_date = dt.isoformat() + return iso_date + + +def format_sap_candidate(candidate_data: t.Dict) -> t.Dict: + metadatas = [ + { + "name": "profile url", + "value": candidate_data["metadata"]["uri"], + } + ] + + return dict( + reference=candidate_data["candidateId"], + created_at=format_datetime(candidate_data["creationDateTime"]), + updated_at=format_datetime(candidate_data["lastModifiedDateTime"]), + resume=candidate_data["resume"], + tags=candidate_data["tags"], + metadatas=metadatas, + ) + + DESCRIPTION = ( "By understanding what employees need, how they work, and what motivates them, you" " can put people at the heart of your HR strategy." @@ -301,5 +332,19 @@ def format_profile(profile: HrFlowProfile) -> t.Dict: target=SAPProfileWarehouse, action_type=ActionType.outbound, ), + ConnectorAction( + name=ActionName.pull_profile_list, + trigger_type=WorkflowType.pull, + description=( + "Retrieves all profiles via the ***SAPSuccessFactors*** API and sends" + " them to a ***Hrflow.ai Board***." + ), + parameters=BaseActionParameters.with_defaults( + "ReadProfilesActionParameters", format=format_sap_candidate + ), + origin=SAPProfileWarehouse, + target=HrFlowProfileParsingWarehouse, + action_type=ActionType.inbound, + ), ], ) diff --git a/src/hrflow_connectors/connectors/sapsuccessfactors/docs/pull_job_list.md b/src/hrflow_connectors/connectors/sapsuccessfactors/docs/pull_job_list.md index 2725c11d5..82ca94645 100644 --- a/src/hrflow_connectors/connectors/sapsuccessfactors/docs/pull_job_list.md +++ b/src/hrflow_connectors/connectors/sapsuccessfactors/docs/pull_job_list.md @@ -10,7 +10,7 @@ Retrieves all jobs via the ***SAPSuccessFactors*** API and sends them to a ***Hr | Field | Type | Default | Description | | ----- | ---- | ------- | ----------- | | `logics` | `typing.List[typing.Callable[[typing.Dict], typing.Optional[typing.Dict]]]` | [] | List of logic functions | -| `format` | `typing.Callable[[typing.Dict], typing.Dict]` | [`format_job`](../connector.py#L83) | Formatting function | +| `format` | `typing.Callable[[typing.Dict], typing.Dict]` | [`format_job`](../connector.py#L84) | Formatting function | | `read_mode` | `str` | ReadMode.sync | If 'incremental' then `read_from` of the last run is given to Origin Warehouse during read. **The actual behavior depends on implementation of read**. In 'sync' mode `read_from` is neither fetched nor given to Origin Warehouse during read. | ## Source Parameters diff --git a/src/hrflow_connectors/connectors/sapsuccessfactors/docs/pull_profile_list.md b/src/hrflow_connectors/connectors/sapsuccessfactors/docs/pull_profile_list.md new file mode 100644 index 000000000..0dbfaede9 --- /dev/null +++ b/src/hrflow_connectors/connectors/sapsuccessfactors/docs/pull_profile_list.md @@ -0,0 +1,71 @@ +# Pull profile list +`SAP Profiles` :arrow_right: `HrFlow.ai Profile Parsing` + +Retrieves all profiles via the ***SAPSuccessFactors*** API and sends them to a ***Hrflow.ai Board***. + + + +## Action Parameters + +| Field | Type | Default | Description | +| ----- | ---- | ------- | ----------- | +| `logics` | `typing.List[typing.Callable[[typing.Dict], typing.Optional[typing.Dict]]]` | [] | List of logic functions | +| `format` | `typing.Callable[[typing.Dict], typing.Dict]` | [`format_sap_candidate`](../connector.py#L278) | Formatting function | +| `read_mode` | `str` | ReadMode.sync | If 'incremental' then `read_from` of the last run is given to Origin Warehouse during read. **The actual behavior depends on implementation of read**. In 'sync' mode `read_from` is neither fetched nor given to Origin Warehouse during read. | + +## Source Parameters + +| Field | Type | Default | Description | +| ----- | ---- | ------- | ----------- | +| `api_server` :red_circle: | `str` | None | Server to be accessed | +| `api_key` :red_circle: | `str` | None | API Key used to authenticate on the SAP API | +| `top` | `int` | 100 | Show only the first N items value is capped at 100 | +| `skip` | `int` | None | Search items by search phrases | +| `filter` | `str` | None | Filter items by property values | +| `search` | `str` | None | Search items by search phrases | + +## Destination Parameters + +| Field | Type | Default | Description | +| ----- | ---- | ------- | ----------- | +| `api_secret` :red_circle: | `str` | None | X-API-KEY used to access HrFlow.ai API | +| `api_user` :red_circle: | `str` | None | X-USER-EMAIL used to access HrFlow.ai API | +| `source_key` :red_circle: | `str` | None | HrFlow.ai source key | +| `only_insert` | `bool` | False | When enabled the profile is written only if it doesn't exist in the source | + +:red_circle: : *required* + +## Example + +```python +import logging +from hrflow_connectors import SAPSuccessFactors +from hrflow_connectors.core import ReadMode + + +logging.basicConfig(level=logging.INFO) + + +SAPSuccessFactors.pull_profile_list( + workflow_id="some_string_identifier", + action_parameters=dict( + logics=[], + format=lambda *args, **kwargs: None # Put your code logic here, + read_mode=ReadMode.sync, + ), + origin_parameters=dict( + api_server="your_api_server", + api_key="your_api_key", + top=100, + skip=0, + filter="your_filter", + search="your_search", + ), + target_parameters=dict( + api_secret="your_api_secret", + api_user="your_api_user", + source_key="your_source_key", + only_insert=False, + ) +) +``` \ No newline at end of file diff --git a/src/hrflow_connectors/connectors/sapsuccessfactors/docs/push_profile.md b/src/hrflow_connectors/connectors/sapsuccessfactors/docs/push_profile.md index 006312ea0..05542d59b 100644 --- a/src/hrflow_connectors/connectors/sapsuccessfactors/docs/push_profile.md +++ b/src/hrflow_connectors/connectors/sapsuccessfactors/docs/push_profile.md @@ -10,7 +10,7 @@ Writes a profile taken from a Hrflow.ai Source to SAPSuccessFactors via the SAP | Field | Type | Default | Description | | ----- | ---- | ------- | ----------- | | `logics` | `typing.List[typing.Callable[[typing.Dict], typing.Optional[typing.Dict]]]` | [] | List of logic functions | -| `format` | `typing.Callable[[typing.Dict], typing.Dict]` | [`format_profile`](../connector.py#L223) | Formatting function | +| `format` | `typing.Callable[[typing.Dict], typing.Dict]` | [`format_profile`](../connector.py#L224) | Formatting function | | `read_mode` | `str` | ReadMode.sync | If 'incremental' then `read_from` of the last run is given to Origin Warehouse during read. **The actual behavior depends on implementation of read**. In 'sync' mode `read_from` is neither fetched nor given to Origin Warehouse during read. | ## Source Parameters diff --git a/src/hrflow_connectors/connectors/sapsuccessfactors/test-config.yaml b/src/hrflow_connectors/connectors/sapsuccessfactors/test-config.yaml index 9e2d8faac..0a74feb99 100644 --- a/src/hrflow_connectors/connectors/sapsuccessfactors/test-config.yaml +++ b/src/hrflow_connectors/connectors/sapsuccessfactors/test-config.yaml @@ -60,4 +60,14 @@ actions: target_parameters: api_key: $__API_KEY api_server: $__API_SERVER + status: success + pull_profile_list: + - id: valid_parameters + origin_parameters: + api_secret: $__API_SECRET + api_user: $__API_USER + source_key: $__SOURCE_KEY + target_parameters: + api_key: $__API_KEY + api_server: $__API_SERVER status: success \ No newline at end of file diff --git a/src/hrflow_connectors/connectors/sapsuccessfactors/warehouse.py b/src/hrflow_connectors/connectors/sapsuccessfactors/warehouse.py index 8b6c3d02d..3a05305d3 100644 --- a/src/hrflow_connectors/connectors/sapsuccessfactors/warehouse.py +++ b/src/hrflow_connectors/connectors/sapsuccessfactors/warehouse.py @@ -1,3 +1,4 @@ +import base64 import typing as t from logging import LoggerAdapter @@ -164,78 +165,27 @@ def write( return failed_profiles -def read_parsing( - adapter: LoggerAdapter, - parameters: ReadJobsParameters, - read_mode: t.Optional[ReadMode] = None, - read_from: t.Optional[str] = None, -) -> t.Iterable[t.Dict]: - params = dict() - params["top"] = parameters.top - params["skip"] = parameters.skip - params["filter"] = parameters.filter - params["search"] = parameters.search - - url = f"{parameters.api_server}/odata/v2/Candidate" - headers = { - "api_key": parameters.api_key, - "Content-Type": "application/json", - "Accept": "application/pdf", # Request PDF format for the CV content - } - - # Set the parameters for the API call - params = { - "$select": "ID,Resume", # Only retrieve the ID and Resume fields - "$expand": "Resume($select=ID,Content)", # Also retrieve the ID - # and Content of the Resume entity - } - - # Make the GET request - response = requests.get(url, headers=headers, params=params) - - # Check if the request was successful - if response.status_code != 200: - adapter.error(f"Failed to retrieve candidate CVs. Response: {response.text}") - raise Exception(f"Failed to retrieve candidate CVs. Response: {response.text}") - - # Extract the candidate data from the response - candidates_data = response.json()["value"] - - # Extract the CV content for each candidate - cvs = [] - for candidate in candidates_data: - cv_content = candidate["Resume"]["Content"] - cvs.append(cv_content) - - return cvs - - def read_profiles( adapter: LoggerAdapter, - parameters: ReadJobsParameters, + parameters: ReadProfilesParameters, read_mode: t.Optional[ReadMode] = None, read_from: t.Optional[str] = None, ) -> t.Iterable[t.Dict]: - params = dict() - params["top"] = parameters.top - params["skip"] = parameters.skip - params["filter"] = parameters.filter - params["search"] = parameters.search - # Set the API endpoint URL endpoint_url = f"{parameters.api_server}/odata/v2/Candidate" # Set the headers with the API key headers = { - "api_key": parameters.api_key, - "Content-Type": "application/json", + "APIKey": parameters.api_key, + "Accept": "application/json", } # Set the parameters for the API call - params = { - "$select": "ID,FirstName,LastName", # Only retrieve the ID, - # FirstName, and LastName fields - } + params = dict() + params["$top"] = parameters.top + params["$skip"] = parameters.skip + params["$filter"] = parameters.filter + params["$search"] = parameters.search # Make the GET request response = requests.get(endpoint_url, headers=headers, params=params) @@ -246,7 +196,28 @@ def read_profiles( raise Exception(f"Failed to retrieve candidates. Response: {response.text}") # Extract the candidate data from the response - candidates_data = response.json()["value"] + candidates = response.json()["d"]["results"] + candidates_data = [] + for candidate in candidates: + # Retrieve the resume + resume = dict() + resume_url = candidate["resume"]["__deferred"]["uri"] + resume_response = requests.get(resume_url, headers=headers) + file_content = resume_response.json()["d"]["fileContent"] + resume["raw"] = base64.b64decode(file_content) + resume["content_type"] = resume_response.json()["d"]["mimeType"] + # Retrieve the tags + tags_url = candidate["tags"]["__deferred"]["uri"] + tags_response = requests.get(tags_url, headers=headers) + tags = tags_response.json()["d"]["results"] + candidates_data.append( + dict( + candidateId=candidate["candidateId"], + resume=resume, + tags=tags, + metadata=candidate["__metadata"], + ) + ) return candidates_data @@ -260,15 +231,9 @@ def read_profiles( function=write, endpoints=[], ), -) - -SAPProfileParsingWarehouse = Warehouse( - name="SAP Profiles Parsing", - data_schema=SapCandidateModel, - data_type=DataType.profile, read=WarehouseReadAction( parameters=ReadProfilesParameters, - function=read_parsing, + function=read_profiles, endpoints=[], ), )