Skip to content

Commit

Permalink
feat: consider info if exists rather than the parsed info (#221)
Browse files Browse the repository at this point in the history
* feat: consider info if exists rather than the parsed info

* fix: add case urls in info of profile json

* fix: after jamal review

* fix: remplace cv url by cv binary (#220)

* fix:add some error handling for jobology connector

* fix:jobology flake8 connector

* fix:some type

* fix:regarding jamal review

* fix: remplace cv url by cv binary

* docs: update docs

* fix: flake8 outputs

* fix: jobology catch profile

* docs: update docs

* fix: regarding jamal review

* fix: handle possible error binasciii

* fix: flake8 and docs

* fix: some flake8 output

* fix: correct update date for Jobology connector (#222)

Co-authored-by: the-forest-tree <the-forest-tree@hrflow.ai>

* 4.6.1

Automatically generated by python-semantic-release

* fix: regarding jamal review location=value

---------

Co-authored-by: the-forest-tree <65894619+the-forest-tree@users.noreply.github.com>
Co-authored-by: the-forest-tree <the-forest-tree@hrflow.ai>
Co-authored-by: hrflow-semantic-release <hrflow-semantic-release>
  • Loading branch information
3 people committed Mar 15, 2024
1 parent 9688bff commit aeed305
Showing 1 changed file with 55 additions and 3 deletions.
58 changes: 55 additions & 3 deletions src/hrflow_connectors/connectors/hrflow/warehouse/profile.py
Original file line number Diff line number Diff line change
Expand Up @@ -174,6 +174,30 @@ def write(
return failed


def merge_info(base: dict, info: dict) -> dict:
if not info:
return base

info_parsed = base.get("info", {})
existing_urls = info_parsed.get("urls", [])

if isinstance(info.get("urls"), list):
for new_url in info["urls"]:
if new_url not in existing_urls:
existing_urls.append(new_url)

info_parsed["urls"] = existing_urls

for key, value in info.items():
if value and key != "location" and key != "urls":
info_parsed[key] = value
elif key == "location" and isinstance(value, dict) and any(value.values()):
info_parsed[key] = value

base["info"] = info_parsed
return base


def write_parsing(
adapter: LoggerAdapter,
parameters: WriteProfileParsingParameters,
Expand All @@ -184,6 +208,7 @@ def write_parsing(
api_secret=parameters.api_secret, api_user=parameters.api_user
)
for profile in profiles:
profile_info = profile.pop("info", {})
if parameters.only_insert and hrflow_client.profile.indexing.get(
source_key=parameters.source_key, reference=profile["reference"]
).get("data"):
Expand All @@ -193,7 +218,7 @@ def write_parsing(
)
continue

response = hrflow_client.profile.parsing.add_file(
parsing_response = hrflow_client.profile.parsing.add_file(
source_key=parameters.source_key,
profile_file=profile["resume"]["raw"],
profile_content_type=profile["resume"]["content_type"],
Expand All @@ -202,13 +227,40 @@ def write_parsing(
metadatas=profile["metadatas"],
created_at=profile["created_at"],
)
if response["code"] != 202:
if parsing_response["code"] not in [202, 201]: # 202: Accepted, 201: Created
adapter.error(
"Failed to parse profile with reference={} response={}".format(
profile["reference"], response
profile["reference"], parsing_response
)
)
failed.append(profile)
continue
source_response = hrflow_client.source.get(
key=parameters.source_key
) # Get source to check if sync_parsing is enabled
if source_response["code"] != 200:
adapter.warning(
"Failed to get source with key={} response={}, won't be able to update"
" profile parsed with profile json".format(
parameters.source_key, source_response
)
)
elif source_response["data"]["sync_parsing"] is True:
current_profile = parsing_response["data"]["profile"]
profile_result = merge_info(current_profile, profile_info)
edit_response = hrflow_client.profile.indexing.edit(
source_key=parameters.source_key,
key=profile_result["key"],
profile_json=profile_result,
)
if edit_response["code"] != 200:
adapter.warning(
"Failed to update profile after parsing, reference={}"
" response={}".format(profile["reference"], edit_response)
)
failed.append(profile)
continue

return failed


Expand Down

0 comments on commit aeed305

Please sign in to comment.