diff --git a/README.md b/README.md index 997d98626..5b4cacfb4 100644 --- a/README.md +++ b/README.md @@ -53,7 +53,7 @@ We invite developers to join us in our mission to bring AI and data integration | **BambooHR** | ATS | 🎯 | | | | | | | | **BizMerlinHR** | ATS | 🎯 | | | | | | | | **Beetween** | ATS | 🎯 | | | | | | | -| [**Bullhorn**](./src/hrflow_connectors/connectors/bullhorn/README.md) | ATS | :white_check_mark: | *26/01/2022* | *30/10/2023* | :white_check_mark: | :white_check_mark: | :white_check_mark: | :x: | :x: | +| [**Bullhorn**](./src/hrflow_connectors/connectors/bullhorn/README.md) | ATS | :white_check_mark: | *26/01/2022* | *11/07/2024* | :white_check_mark: | :white_check_mark: | :white_check_mark: | :x: | :x: | | [**Breezy HR**](./src/hrflow_connectors/connectors/breezyhr/README.md) | ATS | :white_check_mark: | *19/01/2022* | *01/07/2024* | :white_check_mark: | :white_check_mark: | :x: | :x: | :x: | | **CATS** | ATS | 🎯 | | | | | | | | **Cegid (Meta4)** | ATS | 🎯 | | | | | | | diff --git a/manifest.json b/manifest.json index b7a4a8ac2..d034b8ff5 100644 --- a/manifest.json +++ b/manifest.json @@ -13619,7 +13619,7 @@ }, "password": { "title": "Password", - "description": "Passoword for Bullhorn login", + "description": "Password for Bullhorn login", "field_type": "Auth", "type": "string" }, @@ -13800,7 +13800,7 @@ }, "password": { "title": "Password", - "description": "Passoword for Bullhorn login", + "description": "Password for Bullhorn login", "field_type": "Auth", "type": "string" }, @@ -13809,13 +13809,34 @@ "description": "Username for Bullhorn login", "field_type": "Auth", "type": "string" + }, + "last_modified_date": { + "title": "Last Modified Date", + "description": "Last Modified Date in timestamp", + "field_type": "Auth", + "type": "string" + }, + "fields": { + "title": "Fields", + "description": "Fields to be retrieved from Bullhorn", + "field_type": "Auth", + "type": "string" + }, + "query": { + "title": "Query", + "description": "the query parameters", + "field_type": "Auth", + "type": "string" } }, "required": [ "client_id", "client_secret", "password", - "username" + "username", + "last_modified_date", + "fields", + "query" ], "additionalProperties": false }, @@ -14160,7 +14181,7 @@ } } }, - "workflow_code": "import typing as t\n\nfrom hrflow_connectors import Bullhorn\nfrom hrflow_connectors.core.connector import ActionInitError, Reason\n\nORIGIN_SETTINGS_PREFIX = \"origin_\"\nTARGET_SETTINGS_PREFIX = \"target_\"\n\n# << format_placeholder >>\n\n# << logics_placeholder >>\n\n\ndef workflow(\n \n settings: t.Dict\n ) -> None:\n actions_parameters = dict()\n try:\n format\n except NameError:\n pass\n else:\n actions_parameters[\"format\"] = format\n\n try:\n logics\n except NameError:\n pass\n else:\n actions_parameters[\"logics\"] = logics\n\n if \"__workflow_id\" not in settings:\n return Bullhorn.pull_job_list(\n workflow_id=\"\",\n action_parameters=dict(),\n origin_parameters=dict(),\n target_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.workflow_id_not_found,\n data=dict(error=\"__workflow_id not found in settings\", settings_keys=list(settings.keys())),\n )\n )\n workflow_id = settings[\"__workflow_id\"]\n\n \n\n origin_parameters = dict()\n for parameter in ['client_id', 'client_secret', 'password', 'username']:\n if \"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter) in settings:\n origin_parameters[parameter] = settings[\"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter)]\n \n\n target_parameters = dict()\n for parameter in ['api_secret', 'api_user', 'board_key', 'sync', 'update_content', 'enrich_with_parsing']:\n if \"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter) in settings:\n target_parameters[parameter] = settings[\"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter)]\n \n\n return Bullhorn.pull_job_list(\n workflow_id=workflow_id,\n action_parameters=actions_parameters,\n origin_parameters=origin_parameters,\n target_parameters=target_parameters,\n )", + "workflow_code": "import typing as t\n\nfrom hrflow_connectors import Bullhorn\nfrom hrflow_connectors.core.connector import ActionInitError, Reason\n\nORIGIN_SETTINGS_PREFIX = \"origin_\"\nTARGET_SETTINGS_PREFIX = \"target_\"\n\n# << format_placeholder >>\n\n# << logics_placeholder >>\n\n\ndef workflow(\n \n settings: t.Dict\n ) -> None:\n actions_parameters = dict()\n try:\n format\n except NameError:\n pass\n else:\n actions_parameters[\"format\"] = format\n\n try:\n logics\n except NameError:\n pass\n else:\n actions_parameters[\"logics\"] = logics\n\n if \"__workflow_id\" not in settings:\n return Bullhorn.pull_job_list(\n workflow_id=\"\",\n action_parameters=dict(),\n origin_parameters=dict(),\n target_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.workflow_id_not_found,\n data=dict(error=\"__workflow_id not found in settings\", settings_keys=list(settings.keys())),\n )\n )\n workflow_id = settings[\"__workflow_id\"]\n\n \n\n origin_parameters = dict()\n for parameter in ['client_id', 'client_secret', 'password', 'username', 'last_modified_date', 'fields', 'query']:\n if \"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter) in settings:\n origin_parameters[parameter] = settings[\"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter)]\n \n\n target_parameters = dict()\n for parameter in ['api_secret', 'api_user', 'board_key', 'sync', 'update_content', 'enrich_with_parsing']:\n if \"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter) in settings:\n target_parameters[parameter] = settings[\"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter)]\n \n\n return Bullhorn.pull_job_list(\n workflow_id=workflow_id,\n action_parameters=actions_parameters,\n origin_parameters=origin_parameters,\n target_parameters=target_parameters,\n )", "workflow_code_format_placeholder": "# << format_placeholder >>", "workflow_code_logics_placeholder": "# << logics_placeholder >>", "workflow_code_workflow_id_settings_key": "__workflow_id", @@ -14229,7 +14250,7 @@ }, "password": { "title": "Password", - "description": "Passoword for Bullhorn login", + "description": "Password for Bullhorn login", "field_type": "Auth", "type": "string" }, @@ -14529,7 +14550,7 @@ }, "password": { "title": "Password", - "description": "Passoword for Bullhorn login", + "description": "Password for Bullhorn login", "field_type": "Auth", "type": "string" }, @@ -15224,6 +15245,781 @@ "workflow_code_workflow_id_settings_key": "__workflow_id", "workflow_code_origin_settings_prefix": "origin_", "workflow_code_target_settings_prefix": "target_" + }, + { + "name": "push_application", + "action_type": "outbound", + "action_parameters": { + "title": "WriteProfileActionParameters", + "type": "object", + "properties": { + "read_mode": { + "description": "If 'incremental' then `read_from` of the last run is given to Origin Warehouse during read. **The actual behavior depends on implementation of read**. In 'sync' mode `read_from` is neither fetched nor given to Origin Warehouse during read.", + "default": "sync", + "allOf": [ + { + "$ref": "#/definitions/ReadMode" + } + ] + }, + "logics": { + "title": "logics", + "description": "List of logic functions. Each function should have the following signature typing.Callable[[typing.Dict], typing.Optional[typing.Dict]]. The final list should be exposed in a variable named 'logics'.", + "template": "\nimport typing as t\n\ndef logic_1(item: t.Dict) -> t.Union[t.Dict, None]:\n return None\n\ndef logic_2(item: t.Dict) -> t.Uniont[t.Dict, None]:\n return None\n\nlogics = [logic_1, logic_2]\n", + "type": "code_editor" + }, + "format": { + "title": "format", + "description": "Formatting function. You should expose a function named 'format' with following signature typing.Callable[[typing.Dict], typing.Dict]", + "template": "\nimport typing as t\n\ndef format(item: t.Dict) -> t.Dict:\n return item\n", + "type": "code_editor" + }, + "event_parser": { + "title": "event_parser", + "description": "Event parsing function for **CATCH** integrations. You should expose a function named 'event_parser' with following signature typing.Callable[[typing.Dict], typing.Dict]", + "template": "\nimport typing as t\n\ndef event_parser(event: t.Dict) -> t.Dict:\n parsed = dict()\n parsed[\"user_id\"] = event[\"email\"]\n parsed[\"thread_id\"] = event[\"subscription_id\"]\n return parsed\n", + "type": "code_editor" + } + }, + "additionalProperties": false, + "definitions": { + "ReadMode": { + "title": "ReadMode", + "description": "An enumeration.", + "enum": [ + "sync", + "incremental" + ] + } + } + }, + "data_type": "profile", + "trigger_type": "hook", + "origin": "HrFlow.ai Profiles", + "origin_parameters": { + "title": "ReadProfileParameters", + "type": "object", + "properties": { + "api_secret": { + "title": "Api Secret", + "description": "X-API-KEY used to access HrFlow.ai API", + "field_type": "Auth", + "type": "string" + }, + "api_user": { + "title": "Api User", + "description": "X-USER-EMAIL used to access HrFlow.ai API", + "field_type": "Auth", + "type": "string" + }, + "source_key": { + "title": "Source Key", + "description": "HrFlow.ai source key", + "field_type": "Query Param", + "type": "string" + }, + "profile_key": { + "title": "Profile Key", + "description": "HrFlow.ai profile key", + "field_type": "Query Param", + "type": "string" + } + }, + "required": [ + "api_secret", + "api_user", + "source_key", + "profile_key" + ], + "additionalProperties": false + }, + "origin_data_schema": { + "title": "HrFlowProfile", + "type": "object", + "properties": { + "key": { + "title": "Key", + "description": "Identification key of the Profile.", + "type": "string" + }, + "reference": { + "title": "Reference", + "description": "Custom identifier of the Profile.", + "type": "string" + }, + "info": { + "title": "Info", + "description": "Object containing the Profile's info.", + "allOf": [ + { + "$ref": "#/definitions/ProfileInfo" + } + ] + }, + "text_language": { + "title": "Text Language", + "description": "Code language of the Profile. type: string code ISO 639-1", + "type": "string" + }, + "text": { + "title": "Text", + "description": "Full text of the Profile.", + "type": "string" + }, + "archived_at": { + "title": "Archived At", + "description": "type: datetime ISO8601, Archive date of the Profile. The value is null for unarchived Profiles.", + "type": "string" + }, + "updated_at": { + "title": "Updated At", + "description": "type: datetime ISO8601, Last update date of the Profile.", + "type": "string" + }, + "created_at": { + "title": "Created At", + "description": "type: datetime ISO8601, Creation date of the Profile.", + "type": "string" + }, + "experiences_duration": { + "title": "Experiences Duration", + "description": "Total number of years of experience.", + "type": "number" + }, + "educations_duration": { + "title": "Educations Duration", + "description": "Total number of years of education.", + "type": "number" + }, + "experiences": { + "title": "Experiences", + "description": "List of experiences of the Profile.", + "type": "array", + "items": { + "$ref": "#/definitions/Experience" + } + }, + "educations": { + "title": "Educations", + "description": "List of educations of the Profile.", + "type": "array", + "items": { + "$ref": "#/definitions/Education" + } + }, + "attachments": { + "title": "Attachments", + "description": "List of documents attached to the Profile.", + "type": "array", + "items": {} + }, + "skills": { + "title": "Skills", + "description": "List of skills of the Profile.", + "type": "array", + "items": { + "$ref": "#/definitions/Skill" + } + }, + "languages": { + "title": "Languages", + "description": "List of spoken languages of the profile", + "type": "array", + "items": { + "$ref": "#/definitions/GeneralEntitySchema" + } + }, + "certifications": { + "title": "Certifications", + "description": "List of certifications of the Profile.", + "type": "array", + "items": { + "$ref": "#/definitions/GeneralEntitySchema" + } + }, + "courses": { + "title": "Courses", + "description": "List of courses of the Profile.", + "type": "array", + "items": { + "$ref": "#/definitions/GeneralEntitySchema" + } + }, + "tasks": { + "title": "Tasks", + "description": "List of tasks of the Profile.", + "type": "array", + "items": { + "$ref": "#/definitions/GeneralEntitySchema" + } + }, + "interests": { + "title": "Interests", + "description": "List of interests of the Profile.", + "type": "array", + "items": { + "$ref": "#/definitions/GeneralEntitySchema" + } + }, + "tags": { + "title": "Tags", + "description": "List of tags of the Profile.", + "type": "array", + "items": { + "$ref": "#/definitions/GeneralEntitySchema" + } + }, + "metadatas": { + "title": "Metadatas", + "description": "List of metadatas of the Profile.", + "type": "array", + "items": { + "$ref": "#/definitions/GeneralEntitySchema" + } + }, + "labels": { + "title": "Labels", + "description": "List of labels of the Profile.", + "type": "array", + "items": { + "$ref": "#/definitions/Label" + } + } + }, + "definitions": { + "Location": { + "title": "Location", + "type": "object", + "properties": { + "text": { + "title": "Text", + "description": "Location text address.", + "type": "string" + }, + "lat": { + "title": "Lat", + "description": "Geocentric latitude of the Location.", + "type": "number" + }, + "lng": { + "title": "Lng", + "description": "Geocentric longitude of the Location.", + "type": "number" + } + } + }, + "InfoUrl": { + "title": "InfoUrl", + "type": "object", + "properties": { + "type": { + "title": "Type", + "enum": [ + "from_resume", + "linkedin", + "twitter", + "facebook", + "github" + ], + "type": "string" + }, + "url": { + "title": "Url", + "type": "string" + } + }, + "required": [ + "type" + ] + }, + "ProfileInfo": { + "title": "ProfileInfo", + "type": "object", + "properties": { + "full_name": { + "title": "Full Name", + "type": "string" + }, + "first_name": { + "title": "First Name", + "type": "string" + }, + "last_name": { + "title": "Last Name", + "type": "string" + }, + "email": { + "title": "Email", + "type": "string" + }, + "phone": { + "title": "Phone", + "type": "string" + }, + "date_birth": { + "title": "Date Birth", + "description": "Profile date of birth", + "type": "string" + }, + "location": { + "title": "Location", + "description": "Profile location object", + "allOf": [ + { + "$ref": "#/definitions/Location" + } + ] + }, + "urls": { + "title": "Urls", + "description": "Profile social networks and URLs", + "type": "array", + "items": { + "$ref": "#/definitions/InfoUrl" + } + }, + "picture": { + "title": "Picture", + "description": "Profile picture url", + "type": "string" + }, + "gender": { + "title": "Gender", + "description": "Profile gender", + "type": "string" + }, + "summary": { + "title": "Summary", + "description": "Profile summary text", + "type": "string" + } + } + }, + "Skill": { + "title": "Skill", + "type": "object", + "properties": { + "name": { + "title": "Name", + "description": "Identification name of the skill", + "type": "string" + }, + "type": { + "title": "Type", + "description": "Type of the skill. hard or soft", + "enum": [ + "hard", + "soft" + ], + "type": "string" + }, + "value": { + "title": "Value", + "description": "Value associated to the skill", + "type": "string" + } + }, + "required": [ + "name", + "type" + ] + }, + "GeneralEntitySchema": { + "title": "GeneralEntitySchema", + "type": "object", + "properties": { + "name": { + "title": "Name", + "description": "Identification name of the Object", + "type": "string" + }, + "value": { + "title": "Value", + "description": "Value associated to the Object's name", + "type": "string" + } + }, + "required": [ + "name" + ] + }, + "Experience": { + "title": "Experience", + "type": "object", + "properties": { + "key": { + "title": "Key", + "description": "Identification key of the Experience.", + "type": "string" + }, + "company": { + "title": "Company", + "description": "Company name of the Experience.", + "type": "string" + }, + "logo": { + "title": "Logo", + "description": "Logo of the Company", + "type": "string" + }, + "title": { + "title": "Title", + "description": "Title of the Experience.", + "type": "string" + }, + "description": { + "title": "Description", + "description": "Description of the Experience.", + "type": "string" + }, + "location": { + "title": "Location", + "description": "Location object of the Experience.", + "allOf": [ + { + "$ref": "#/definitions/Location" + } + ] + }, + "date_start": { + "title": "Date Start", + "description": "Start date of the experience. type: ('datetime ISO 8601')", + "type": "string" + }, + "date_end": { + "title": "Date End", + "description": "End date of the experience. type: ('datetime ISO 8601')", + "type": "string" + }, + "skills": { + "title": "Skills", + "description": "List of skills of the Experience.", + "type": "array", + "items": { + "$ref": "#/definitions/Skill" + } + }, + "certifications": { + "title": "Certifications", + "type": "array", + "items": { + "$ref": "#/definitions/GeneralEntitySchema" + } + }, + "courses": { + "title": "Courses", + "type": "array", + "items": { + "$ref": "#/definitions/GeneralEntitySchema" + } + }, + "tasks": { + "title": "Tasks", + "type": "array", + "items": { + "$ref": "#/definitions/GeneralEntitySchema" + } + } + } + }, + "Education": { + "title": "Education", + "type": "object", + "properties": { + "key": { + "title": "Key", + "description": "Identification key of the Education.", + "type": "string" + }, + "school": { + "title": "School", + "description": "School name of the Education.", + "type": "string" + }, + "logo": { + "title": "Logo", + "description": "Logo of the School", + "type": "string" + }, + "title": { + "title": "Title", + "description": "Title of the Education.", + "type": "string" + }, + "description": { + "title": "Description", + "description": "Description of the Education.", + "type": "string" + }, + "location": { + "title": "Location", + "description": "Location object of the Education.", + "allOf": [ + { + "$ref": "#/definitions/Location" + } + ] + }, + "date_start": { + "title": "Date Start", + "description": "Start date of the Education. type: ('datetime ISO 8601')", + "type": "string" + }, + "date_end": { + "title": "Date End", + "description": "End date of the Education. type: ('datetime ISO 8601')", + "type": "string" + }, + "skills": { + "title": "Skills", + "description": "List of skills of the Education.", + "type": "array", + "items": { + "$ref": "#/definitions/Skill" + } + }, + "certifications": { + "title": "Certifications", + "type": "array", + "items": { + "$ref": "#/definitions/GeneralEntitySchema" + } + }, + "courses": { + "title": "Courses", + "type": "array", + "items": { + "$ref": "#/definitions/GeneralEntitySchema" + } + }, + "tasks": { + "title": "Tasks", + "type": "array", + "items": { + "$ref": "#/definitions/GeneralEntitySchema" + } + } + } + }, + "Label": { + "title": "Label", + "type": "object", + "properties": { + "board_key": { + "title": "Board Key", + "description": "Identification key of the Board containing the target Job.", + "type": "string" + }, + "job_key": { + "title": "Job Key", + "description": "Identification key of the Job.", + "type": "string" + }, + "job_reference": { + "title": "Job Reference", + "description": "Custom identifier of the Job.", + "type": "string" + }, + "stage": { + "title": "Stage", + "description": "Stage associated to the Profile following the action of a recruiter (yes, no, later).", + "enum": [ + "yes", + "no", + "later" + ], + "type": "string" + }, + "date_stage": { + "title": "Date Stage", + "description": "Date of the stage edit action. type: ('datetime ISO 8601')", + "type": "string" + }, + "rating": { + "title": "Rating", + "description": "Rating associated to the Profile following the action of a recruiter (from 1 to 5).", + "enum": [ + 1, + 2, + 3, + 4, + 5 + ], + "type": "integer" + }, + "date_rating": { + "title": "Date Rating", + "description": "Date of the rating action. type: ('datetime ISO 8601')", + "type": "string" + } + }, + "required": [ + "board_key", + "job_key", + "job_reference", + "stage" + ] + } + } + }, + "supports_incremental": false, + "target": "Bullhorn Applications", + "target_parameters": { + "title": "WriteApplicationsParameters", + "type": "object", + "properties": { + "client_id": { + "title": "Client Id", + "description": "Client identifier for Bullhorn", + "field_type": "Auth", + "type": "string" + }, + "client_secret": { + "title": "Client Secret", + "description": "Client secret identifier for Bullhorn", + "field_type": "Auth", + "type": "string" + }, + "password": { + "title": "Password", + "description": "Password for Bullhorn login", + "field_type": "Auth", + "type": "string" + }, + "username": { + "title": "Username", + "description": "Username for Bullhorn login", + "field_type": "Auth", + "type": "string" + }, + "job_id": { + "title": "Job Id", + "description": "id for the job in Bullhorn", + "field_type": "Auth", + "type": "string" + }, + "status_when_created": { + "title": "Status When Created", + "description": "The status of the application when created in Bullhorn", + "field_type": "Auth", + "type": "string" + }, + "source": { + "title": "Source", + "description": "The source of the application to be created in Bullhorn", + "field_type": "Auth", + "type": "string" + } + }, + "required": [ + "client_id", + "client_secret", + "password", + "username", + "job_id", + "status_when_created" + ], + "additionalProperties": false + }, + "target_data_schema": { + "title": "BullhornProfile", + "type": "object", + "properties": { + "id": { + "title": "Id", + "Description": "Unique identifier for this entity", + "type": "string" + }, + "address": { + "title": "Address", + "Description": "Candidate address", + "allOf": [ + { + "$ref": "#/definitions/BullhornAddress" + } + ] + }, + "certifications": { + "title": "Certifications", + "Description": "Candidate\u2019s certifications" + }, + "name": { + "title": "Name", + "Description": "Candidate\u2019s full name. If setting firstname or lastname, you must also set this field; it does not populate automatically", + "type": "string" + }, + "firstName": { + "title": "Firstname", + "Description": "Candidate\u2019s first name", + "type": "string" + }, + "lastName": { + "title": "Lastname", + "Description": "Name of the file", + "type": "string" + }, + "email": { + "title": "Email", + "Description": "Candidate\u2019s email address", + "type": "string" + }, + "mobile": { + "title": "Mobile", + "Description": "Candidate\u2019s mobile (cell) telephone number", + "type": "string" + }, + "dateOfBirth": { + "title": "Dateofbirth", + "Description": "Candidate\u2019s date of birth", + "type": "integer" + }, + "experience": { + "title": "Experience", + "Description": "Number of years of experience that the Candidate has", + "type": "integer" + }, + "skillSet": { + "title": "Skillset", + "Description": "Text description of Candidate\u2019s skills", + "type": "string" + } + }, + "definitions": { + "BullhornAddress": { + "title": "BullhornAddress", + "type": "object", + "properties": { + "address1": { + "title": "Address1", + "Description": "Adress of the profile", + "type": "string" + }, + "city": { + "title": "City", + "Description": "City of the profile", + "type": "string" + }, + "state": { + "title": "State", + "Description": "Country code of the profile", + "type": "string" + }, + "zip": { + "title": "Zip", + "Description": "Postal code of the profile", + "type": "string" + } + } + } + } + }, + "workflow_code": "import typing as t\n\nfrom hrflow_connectors import Bullhorn\nfrom hrflow_connectors.core.connector import ActionInitError, Reason\n\nORIGIN_SETTINGS_PREFIX = \"origin_\"\nTARGET_SETTINGS_PREFIX = \"target_\"\n\n# << format_placeholder >>\n\n# << logics_placeholder >>\n\n# << event_parser_placeholder >>\n\n\ndef workflow(\n \n _request: t.Dict,\n \n settings: t.Dict\n ) -> None:\n actions_parameters = dict()\n try:\n format\n except NameError:\n pass\n else:\n actions_parameters[\"format\"] = format\n\n try:\n logics\n except NameError:\n pass\n else:\n actions_parameters[\"logics\"] = logics\n\n if \"__workflow_id\" not in settings:\n return Bullhorn.push_application(\n workflow_id=\"\",\n action_parameters=dict(),\n origin_parameters=dict(),\n target_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.workflow_id_not_found,\n data=dict(error=\"__workflow_id not found in settings\", settings_keys=list(settings.keys())),\n )\n )\n workflow_id = settings[\"__workflow_id\"]\n\n \n try:\n event_parser\n _event_parser = event_parser\n except NameError as e:\n action = Bullhorn.model.action_by_name(\"push_application\")\n # Without this trick event_parser is always only fetched from the local scope\n # meaning that try block always raises NameError even if the function is\n # defined in the placeholder\n _event_parser = action.parameters.__fields__[\"event_parser\"].default\n\n if _event_parser is not None:\n try:\n _request = _event_parser(_request)\n except Exception as e:\n return Bullhorn.push_application(\n workflow_id=workflow_id,\n action_parameters=dict(),\n origin_parameters=dict(),\n target_parameters=dict(),\n init_error=ActionInitError(\n reason=Reason.event_parsing_failure,\n data=dict(error=e, event=_request),\n )\n )\n \n\n origin_parameters = dict()\n for parameter in ['api_secret', 'api_user', 'source_key', 'profile_key']:\n if \"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter) in settings:\n origin_parameters[parameter] = settings[\"{}{}\".format(ORIGIN_SETTINGS_PREFIX, parameter)]\n \n if parameter in _request:\n origin_parameters[parameter] = _request[parameter]\n \n\n target_parameters = dict()\n for parameter in ['client_id', 'client_secret', 'password', 'username', 'job_id', 'status_when_created', 'source']:\n if \"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter) in settings:\n target_parameters[parameter] = settings[\"{}{}\".format(TARGET_SETTINGS_PREFIX, parameter)]\n \n if parameter in _request:\n target_parameters[parameter] = _request[parameter]\n \n\n return Bullhorn.push_application(\n workflow_id=workflow_id,\n action_parameters=actions_parameters,\n origin_parameters=origin_parameters,\n target_parameters=target_parameters,\n )", + "workflow_code_format_placeholder": "# << format_placeholder >>", + "workflow_code_logics_placeholder": "# << logics_placeholder >>", + "workflow_code_event_parser_placeholder": "# << event_parser_placeholder >>", + "workflow_code_workflow_id_settings_key": "__workflow_id", + "workflow_code_origin_settings_prefix": "origin_", + "workflow_code_target_settings_prefix": "target_" } ], "type": "ATS", diff --git a/src/hrflow_connectors/connectors/bullhorn/README.md b/src/hrflow_connectors/connectors/bullhorn/README.md index 5152b2aea..9e7fea19c 100644 --- a/src/hrflow_connectors/connectors/bullhorn/README.md +++ b/src/hrflow_connectors/connectors/bullhorn/README.md @@ -56,6 +56,7 @@ This new connector will enable: | [**Pull job list**](docs/pull_job_list.md) | Retrieves jobs from Bullhorn and writes them to Hrflow.ai Board | | [**Pull resume attachment list**](docs/pull_resume_attachment_list.md) | retrieves profiles attachments from Bullhorn and Parses them and sends them to Hrflow.ai source | | [**Pull profile list**](docs/pull_profile_list.md) | Retrieves profiles from Bullhorn and writes them to Hrflow.ai source | +| [**Push application**](docs/push_application.md) | Retrieves profiles from Hrflow.ai and writes their applications to the Bullhorn source |

| diff --git a/src/hrflow_connectors/connectors/bullhorn/connector.py b/src/hrflow_connectors/connectors/bullhorn/connector.py index aa918764e..e16153a31 100644 --- a/src/hrflow_connectors/connectors/bullhorn/connector.py +++ b/src/hrflow_connectors/connectors/bullhorn/connector.py @@ -6,6 +6,7 @@ from hrflow_connectors.connectors.bullhorn.schemas import BullhornProfile from hrflow_connectors.connectors.bullhorn.utils import date_format from hrflow_connectors.connectors.bullhorn.warehouse import ( + BullhornApplicationWarehouse, BullhornJobWarehouse, BullhornProfileParsingWarehouse, BullhornProfileWarehouse, @@ -43,6 +44,7 @@ def get_location(info: t.Dict) -> t.Dict: fields = {} location_dict = { "address1": location.get("text"), + "address2": None, "city": fields.get("city"), "state": fields.get("country"), "zip": fields.get("postcode"), @@ -126,7 +128,13 @@ def get_experience(experience_list: t.List[t.Dict]) -> t.List[t.Dict]: return experience_json -def get_attachments(attachment_list: t.List[t.Dict]) -> t.List[t.Dict]: +def get_attachments( + attachment_list: t.List[t.Dict], + file_type: str = "SAMPLE", + content_type: str = "text/plain", + type: str = "cover", + format: bool = False, +) -> t.List[t.Dict]: attachments_json = [] for hrflow_attachment in attachment_list: url = hrflow_attachment["public_url"] @@ -136,12 +144,15 @@ def get_attachments(attachment_list: t.List[t.Dict]) -> t.List[t.Dict]: attachment = { "externalID": "portfolio", "fileContent": b64.decode(), - "fileType": "SAMPLE", + "fileType": file_type, "name": hrflow_attachment["file_name"], - "contentType": "text/plain", "description": "Resume file for candidate.", - "type": "cover", + "type": type, } + if format: + attachment["format"] = "PDF" + else: + attachment["content_type"] = content_type attachments_json.append(attachment) return attachments_json @@ -371,6 +382,28 @@ def profile_format(data: BullhornProfile) -> t.Dict: return profile +def format_application(data: HrFlowProfile) -> t.Dict: + info = data.get("info") or {} + attachments = ( + [data["attachments"][0]] if data.get("attachments") is not None else [] + ) + profile = { + "firstName": info.get("first_name"), + "lastName": info.get("last_name"), + "name": info.get("full_name"), + "address": get_location(info), + "email": info.get("email"), + "mobile": info.get("phone"), + } + + attachment_list = get_attachments( + attachments, file_type="RESUME", type="RESUME", format=True + ) + + profile["attachment"] = attachment_list[0] if len(attachment_list) > 0 else {} + return profile + + DESCRIPTION = "Transform Your Business with Bullhorn Staffing and Recruitment Software" Bullhorn = Connector( @@ -432,5 +465,19 @@ def profile_format(data: BullhornProfile) -> t.Dict: target=HrFlowProfileWarehouse, action_type=ActionType.inbound, ), + ConnectorAction( + name=ActionName.push_application, + trigger_type=WorkflowType.catch, + description=( + "Retrieves profiles from Hrflow.ai and writes their applications to the" + " Bullhorn source" + ), + parameters=BaseActionParameters.with_defaults( + "WriteProfileActionParameters", format=format_application + ), + origin=HrFlowProfileWarehouse, + target=BullhornApplicationWarehouse, + action_type=ActionType.outbound, + ), ], ) diff --git a/src/hrflow_connectors/connectors/bullhorn/docs/pull_job_list.md b/src/hrflow_connectors/connectors/bullhorn/docs/pull_job_list.md index 7c31e7c3e..115ba20a3 100644 --- a/src/hrflow_connectors/connectors/bullhorn/docs/pull_job_list.md +++ b/src/hrflow_connectors/connectors/bullhorn/docs/pull_job_list.md @@ -10,7 +10,7 @@ Retrieves jobs from Bullhorn and writes them to Hrflow.ai Board | Field | Type | Default | Description | | ----- | ---- | ------- | ----------- | | `logics` | `typing.List[typing.Callable[[typing.Dict], typing.Optional[typing.Dict]]]` | [] | List of logic functions | -| `format` | `typing.Callable[[typing.Dict], typing.Dict]` | [`format_job`](../connector.py#L188) | Formatting function | +| `format` | `typing.Callable[[typing.Dict], typing.Dict]` | [`format_job`](../connector.py#L199) | Formatting function | | `read_mode` | `str` | ReadMode.sync | If 'incremental' then `read_from` of the last run is given to Origin Warehouse during read. **The actual behavior depends on implementation of read**. In 'sync' mode `read_from` is neither fetched nor given to Origin Warehouse during read. | ## Source Parameters @@ -19,8 +19,11 @@ Retrieves jobs from Bullhorn and writes them to Hrflow.ai Board | ----- | ---- | ------- | ----------- | | `client_id` :red_circle: | `str` | None | Client identifier for Bullhorn | | `client_secret` :red_circle: | `str` | None | Client secret identifier for Bullhorn | -| `password` :red_circle: | `str` | None | Passoword for Bullhorn login | +| `password` :red_circle: | `str` | None | Password for Bullhorn login | | `username` :red_circle: | `str` | None | Username for Bullhorn login | +| `last_modified_date` :red_circle: | `str` | None | Last Modified Date in timestamp | +| `fields` :red_circle: | `str` | None | Fields to be retrieved from Bullhorn | +| `query` :red_circle: | `str` | None | the query parameters | ## Destination Parameters @@ -58,6 +61,9 @@ Bullhorn.pull_job_list( client_secret="your_client_secret", password="your_password", username="your_username", + last_modified_date="your_last_modified_date", + fields="your_fields", + query="your_query", ), target_parameters=dict( api_secret="your_api_secret", diff --git a/src/hrflow_connectors/connectors/bullhorn/docs/pull_profile_list.md b/src/hrflow_connectors/connectors/bullhorn/docs/pull_profile_list.md index 6eef0615c..5a15e5dec 100644 --- a/src/hrflow_connectors/connectors/bullhorn/docs/pull_profile_list.md +++ b/src/hrflow_connectors/connectors/bullhorn/docs/pull_profile_list.md @@ -10,7 +10,7 @@ Retrieves profiles from Bullhorn and writes them to Hrflow.ai source | Field | Type | Default | Description | | ----- | ---- | ------- | ----------- | | `logics` | `typing.List[typing.Callable[[typing.Dict], typing.Optional[typing.Dict]]]` | [] | List of logic functions | -| `format` | `typing.Callable[[typing.Dict], typing.Dict]` | [`profile_format`](../connector.py#L276) | Formatting function | +| `format` | `typing.Callable[[typing.Dict], typing.Dict]` | [`profile_format`](../connector.py#L287) | Formatting function | | `read_mode` | `str` | ReadMode.sync | If 'incremental' then `read_from` of the last run is given to Origin Warehouse during read. **The actual behavior depends on implementation of read**. In 'sync' mode `read_from` is neither fetched nor given to Origin Warehouse during read. | ## Source Parameters @@ -19,7 +19,7 @@ Retrieves profiles from Bullhorn and writes them to Hrflow.ai source | ----- | ---- | ------- | ----------- | | `client_id` :red_circle: | `str` | None | Client identifier for Bullhorn | | `client_secret` :red_circle: | `str` | None | Client secret identifier for Bullhorn | -| `password` :red_circle: | `str` | None | Passoword for Bullhorn login | +| `password` :red_circle: | `str` | None | Password for Bullhorn login | | `username` :red_circle: | `str` | None | Username for Bullhorn login | ## Destination Parameters diff --git a/src/hrflow_connectors/connectors/bullhorn/docs/pull_resume_attachment_list.md b/src/hrflow_connectors/connectors/bullhorn/docs/pull_resume_attachment_list.md index dcf823afc..c23149d1b 100644 --- a/src/hrflow_connectors/connectors/bullhorn/docs/pull_resume_attachment_list.md +++ b/src/hrflow_connectors/connectors/bullhorn/docs/pull_resume_attachment_list.md @@ -10,7 +10,7 @@ retrieves profiles attachments from Bullhorn and Parses them and sends them to H | Field | Type | Default | Description | | ----- | ---- | ------- | ----------- | | `logics` | `typing.List[typing.Callable[[typing.Dict], typing.Optional[typing.Dict]]]` | [] | List of logic functions | -| `format` | `typing.Callable[[typing.Dict], typing.Dict]` | [`profile_format_parsing`](../connector.py#L254) | Formatting function | +| `format` | `typing.Callable[[typing.Dict], typing.Dict]` | [`profile_format_parsing`](../connector.py#L265) | Formatting function | | `read_mode` | `str` | ReadMode.sync | If 'incremental' then `read_from` of the last run is given to Origin Warehouse during read. **The actual behavior depends on implementation of read**. In 'sync' mode `read_from` is neither fetched nor given to Origin Warehouse during read. | ## Source Parameters @@ -19,7 +19,7 @@ retrieves profiles attachments from Bullhorn and Parses them and sends them to H | ----- | ---- | ------- | ----------- | | `client_id` :red_circle: | `str` | None | Client identifier for Bullhorn | | `client_secret` :red_circle: | `str` | None | Client secret identifier for Bullhorn | -| `password` :red_circle: | `str` | None | Passoword for Bullhorn login | +| `password` :red_circle: | `str` | None | Password for Bullhorn login | | `username` :red_circle: | `str` | None | Username for Bullhorn login | ## Destination Parameters diff --git a/src/hrflow_connectors/connectors/bullhorn/docs/push_application.md b/src/hrflow_connectors/connectors/bullhorn/docs/push_application.md new file mode 100644 index 000000000..4586a6320 --- /dev/null +++ b/src/hrflow_connectors/connectors/bullhorn/docs/push_application.md @@ -0,0 +1,73 @@ +# Push application +`HrFlow.ai Profiles` :arrow_right: `Bullhorn Applications` + +Retrieves profiles from Hrflow.ai and writes their applications to the Bullhorn source + + + +## Action Parameters + +| Field | Type | Default | Description | +| ----- | ---- | ------- | ----------- | +| `logics` | `typing.List[typing.Callable[[typing.Dict], typing.Optional[typing.Dict]]]` | [] | List of logic functions | +| `format` | `typing.Callable[[typing.Dict], typing.Dict]` | [`format_application`](../connector.py#L385) | Formatting function | +| `read_mode` | `str` | ReadMode.sync | If 'incremental' then `read_from` of the last run is given to Origin Warehouse during read. **The actual behavior depends on implementation of read**. In 'sync' mode `read_from` is neither fetched nor given to Origin Warehouse during read. | + +## Source Parameters + +| Field | Type | Default | Description | +| ----- | ---- | ------- | ----------- | +| `api_secret` :red_circle: | `str` | None | X-API-KEY used to access HrFlow.ai API | +| `api_user` :red_circle: | `str` | None | X-USER-EMAIL used to access HrFlow.ai API | +| `source_key` :red_circle: | `str` | None | HrFlow.ai source key | +| `profile_key` :red_circle: | `str` | None | HrFlow.ai profile key | + +## Destination Parameters + +| Field | Type | Default | Description | +| ----- | ---- | ------- | ----------- | +| `client_id` :red_circle: | `str` | None | Client identifier for Bullhorn | +| `client_secret` :red_circle: | `str` | None | Client secret identifier for Bullhorn | +| `password` :red_circle: | `str` | None | Password for Bullhorn login | +| `username` :red_circle: | `str` | None | Username for Bullhorn login | +| `job_id` :red_circle: | `str` | None | id for the job in Bullhorn | +| `status_when_created` :red_circle: | `str` | None | The status of the application when created in Bullhorn | +| `source` | `str` | None | The source of the application to be created in Bullhorn | + +:red_circle: : *required* + +## Example + +```python +import logging +from hrflow_connectors import Bullhorn +from hrflow_connectors.core import ReadMode + + +logging.basicConfig(level=logging.INFO) + + +Bullhorn.push_application( + workflow_id="some_string_identifier", + action_parameters=dict( + logics=[], + format=lambda *args, **kwargs: None # Put your code logic here, + read_mode=ReadMode.sync, + ), + origin_parameters=dict( + api_secret="your_api_secret", + api_user="your_api_user", + source_key="your_source_key", + profile_key="your_profile_key", + ), + target_parameters=dict( + client_id="your_client_id", + client_secret="your_client_secret", + password="your_password", + username="your_username", + job_id="your_job_id", + status_when_created="your_status_when_created", + source="your_source", + ) +) +``` \ No newline at end of file diff --git a/src/hrflow_connectors/connectors/bullhorn/docs/push_profile.md b/src/hrflow_connectors/connectors/bullhorn/docs/push_profile.md index e90169267..71822a86f 100644 --- a/src/hrflow_connectors/connectors/bullhorn/docs/push_profile.md +++ b/src/hrflow_connectors/connectors/bullhorn/docs/push_profile.md @@ -10,7 +10,7 @@ Writes a profile from Hrflow.ai Source to Bullhorn via the API | Field | Type | Default | Description | | ----- | ---- | ------- | ----------- | | `logics` | `typing.List[typing.Callable[[typing.Dict], typing.Optional[typing.Dict]]]` | [] | List of logic functions | -| `format` | `typing.Callable[[typing.Dict], typing.Dict]` | [`format_profile`](../connector.py#L149) | Formatting function | +| `format` | `typing.Callable[[typing.Dict], typing.Dict]` | [`format_profile`](../connector.py#L160) | Formatting function | | `read_mode` | `str` | ReadMode.sync | If 'incremental' then `read_from` of the last run is given to Origin Warehouse during read. **The actual behavior depends on implementation of read**. In 'sync' mode `read_from` is neither fetched nor given to Origin Warehouse during read. | ## Source Parameters @@ -28,7 +28,7 @@ Writes a profile from Hrflow.ai Source to Bullhorn via the API | ----- | ---- | ------- | ----------- | | `client_id` :red_circle: | `str` | None | Client identifier for Bullhorn | | `client_secret` :red_circle: | `str` | None | Client secret identifier for Bullhorn | -| `password` :red_circle: | `str` | None | Passoword for Bullhorn login | +| `password` :red_circle: | `str` | None | Password for Bullhorn login | | `username` :red_circle: | `str` | None | Username for Bullhorn login | :red_circle: : *required* diff --git a/src/hrflow_connectors/connectors/bullhorn/utils/authentication.py b/src/hrflow_connectors/connectors/bullhorn/utils/authentication.py index a9eda43de..8cbab4c6b 100644 --- a/src/hrflow_connectors/connectors/bullhorn/utils/authentication.py +++ b/src/hrflow_connectors/connectors/bullhorn/utils/authentication.py @@ -1,29 +1,14 @@ -import json -import urllib.parse as urlparse -import urllib.request as urllib2 +from urllib.parse import parse_qs, urlparse import requests base_url = "https://auth.bullhornstaffing.com/oauth" -class AuthCodeRedirectHandler(urllib2.HTTPRedirectHandler): +def get_auth_code(username, password, client_id): """ - A bare bones redirect handler that pulls the auth code sent back - by OAuth off the query string of the redirect URI given in the - Location header. Does no checking for other errors or bad/missing - information. + Retrieve the authorization code by initiating the OAuth flow. """ - - def http_error_302(self, req, fp, code, msg, headers): - """handler for 302 responses that assumes a properly constructed - OAuth 302 response and pulls the auth code out of the header""" - qs = urlparse.urlparse(headers["Location"]).query - auth_code = urlparse.parse_qs(qs)["code"][0] - return auth_code - - -def build_auth_code_request(username, password, client_id): data = { "client_id": client_id, "response_type": "code", @@ -31,45 +16,95 @@ def build_auth_code_request(username, password, client_id): "password": password, "action": "Login", } + authorize_url = base_url + "/authorize" + response = requests.post(authorize_url, data=data, allow_redirects=True) + if response.ok: + redirect_url = response.url + parsed_url = urlparse(redirect_url) + auth_code = parse_qs(parsed_url.query)["code"][0] + return auth_code + raise Exception( + f"Authorization failed with status code {response.status_code}: {response.text}" + ) - encoded = urlparse.urlencode(data) - encoded = encoded.encode("utf-8") - req = urllib2.Request(url=base_url + "/authorize", data=encoded) - return req +def make_token_request(data): + """ + Make a request to obtain the OAuth access token. + """ + token_url = base_url + "/token" + response = requests.post(token_url, data=data) + if response.ok: + return response.json() + raise Exception( + f"Token request failed with status code {response.status_code}: {response.text}" + ) -def get_access_token(code, client_id, client_secret): + +def login_to_bullhorn(access_token): """ - Gets an OAuth access token given an OAuth authorization code + Log in to Bullhorn using the obtained access token. + """ + login_url = "https://rest.bullhornstaffing.com/rest-services/login" + params = {"version": "2.0", "access_token": access_token["access_token"]} + response = requests.post(url=login_url, params=params) + + if response.ok: + return response.json() + + raise Exception( + f"Login to Bullhorn failed with status code {response.status_code}:" + f" {response.text}" + ) + + +def get_or_refresh_token( + grant_type, client_id, client_secret, ttl=None, code=None, refresh_token=None +): + """ + Gets or refreshes an OAuth access token based on the grant type. """ data = { - "grant_type": "authorization_code", + "grant_type": grant_type, "client_id": client_id, "client_secret": client_secret, - "code": code, } - encoded = urlparse.urlencode(data) - encoded = encoded.encode("utf-8") - - req = urllib2.Request(base_url + "/token", encoded) - return urllib2.urlopen(req).read() - + if grant_type == "authorization_code": + data["code"] = code + elif grant_type == "refresh_token": + data["refresh_token"] = refresh_token -def auth(username, password, client_id, client_secret): - req = build_auth_code_request(username, password, client_id) - opener = urllib2.build_opener(AuthCodeRedirectHandler) + # Add TTL if specified + if ttl: + data["ttl"] = ttl - auth_code = opener.open(req) - access_token = get_access_token(auth_code, client_id, client_secret) - access_token = access_token.decode("utf-8") - access_token = json.loads(access_token) + token_response = make_token_request(data) + # Login to Bullhorn and return the response + return login_to_bullhorn(token_response) - login_url = "https://rest.bullhornstaffing.com/rest-services/login" - params = {"version": "2.0", "access_token": access_token["access_token"]} - - response = requests.post(url=login_url, params=params) - response = response.json() - response["refresh_token"] = access_token["refresh_token"] - return response +def auth( + username, password, client_id, client_secret, refresh_token=None, auth_code=None +): + """ + Obtain the access token for authentication. + """ + if refresh_token: + access_token = get_or_refresh_token( + "refresh_token", + client_id, + client_secret, + ttl=604800, + refresh_token=refresh_token, + ) + elif auth_code: + access_token = get_or_refresh_token( + "authorization_code", client_id, client_secret, ttl=604800, code=auth_code + ) # 7 days in seconds + else: + auth_code = get_auth_code(username, password, client_id) + access_token = get_or_refresh_token( + "authorization_code", client_id, client_secret, ttl=604800, code=auth_code + ) + return access_token diff --git a/src/hrflow_connectors/connectors/bullhorn/warehouse.py b/src/hrflow_connectors/connectors/bullhorn/warehouse.py index 7fe3a1512..0ffe3570c 100644 --- a/src/hrflow_connectors/connectors/bullhorn/warehouse.py +++ b/src/hrflow_connectors/connectors/bullhorn/warehouse.py @@ -1,4 +1,7 @@ +import json +import time import typing as t +from datetime import datetime from logging import LoggerAdapter import requests @@ -17,28 +20,25 @@ from hrflow_connectors.core.warehouse import ReadMode -class WriteProfilesParameters(ParametersModel): +class BaseParameters(ParametersModel): client_id: str = Field( ..., description="Client identifier for Bullhorn", repr=False, field_type=FieldType.Auth, ) - client_secret: str = Field( ..., description="Client secret identifier for Bullhorn", repr=False, field_type=FieldType.Auth, ) - password: str = Field( ..., - description="Passoword for Bullhorn login", + description="Password for Bullhorn login", repr=False, field_type=FieldType.Auth, ) - username: str = Field( ..., description="Username for Bullhorn login", @@ -47,64 +47,58 @@ class WriteProfilesParameters(ParametersModel): ) -class ReadJobsParameters(ParametersModel): - client_id: str = Field( - ..., - description="Client identifier for Bullhorn", - repr=False, - field_type=FieldType.Auth, - ) +class WriteProfilesParameters(BaseParameters): + pass - client_secret: str = Field( + +class WriteApplicationsParameters(BaseParameters): + job_id: str = Field( ..., - description="Client secret identifier for Bullhorn", + description="id for the job in Bullhorn", repr=False, field_type=FieldType.Auth, ) - password: str = Field( + status_when_created: str = Field( ..., - description="Passoword for Bullhorn login", + description="The status of the application when created in Bullhorn", repr=False, field_type=FieldType.Auth, ) - username: str = Field( - ..., - description="Username for Bullhorn login", + source: str = Field( + None, + description="The source of the application to be created in Bullhorn", repr=False, field_type=FieldType.Auth, ) -class ReadProfileParameters(ParametersModel): - client_id: str = Field( +class ReadJobsParameters(BaseParameters): + last_modified_date: str = Field( ..., - description="Client identifier for Bullhorn", + description="Last Modified Date in timestamp", repr=False, field_type=FieldType.Auth, ) - client_secret: str = Field( + fields: str = Field( ..., - description="Client secret identifier for Bullhorn", + description="Fields to be retrieved from Bullhorn", repr=False, field_type=FieldType.Auth, ) - password: str = Field( + query: str = Field( ..., - description="Passoword for Bullhorn login", + description="the query parameters", repr=False, field_type=FieldType.Auth, ) - username: str = Field( - ..., - description="Username for Bullhorn login", - repr=False, - field_type=FieldType.Auth, - ) + +class ReadProfileParameters(BaseParameters): + pass def write( @@ -171,47 +165,296 @@ def write( return failed_profiles +def authenticate(parameters): + return auth( + parameters.username, + parameters.password, + parameters.client_id, + parameters.client_secret, + ) + + +def make_request(method, url, params, adapter, json=None): + response = method(url, params=params, data=json) + if response.status_code == 401: + adapter.info("Auth token expired, regenerating...") + auth_info = authenticate() + params["BhRestToken"] = auth_info["BhRestToken"] + response = method(url, params=params, data=json) + return response + + +def handle_response(response, adapter): + if not response.ok: + adapter.error( + f"Request failed, status_code={response.status_code}," + f" response={response.text}" + ) + return None + return response.json() + + +def search_entity(entity, rest_url, bh_rest_token, query, fields, adapter): + search_url = f"{rest_url}search/{entity}" + params = { + "BhRestToken": bh_rest_token, + "query": query, + "fields": fields, + "sort": "id", + } + response = make_request(requests.get, search_url, params, adapter) + return handle_response(response, adapter) + + +def create_or_update_entity(entity, rest_url, params, data, adapter, entity_id=None): + url = f"{rest_url}entity/{entity}" + method = requests.post if entity_id else requests.put + if entity_id: + url = f"{url}/{entity_id}" + response = make_request(method, url, params, adapter, json.dumps(data)) + return handle_response(response, adapter) + + +def check_entity_files(entity, rest_url, params, entity_id, adapter): + url = f"{rest_url}entityFiles/{entity}/{entity_id}" + response = requests.get(url, params=params) + return handle_response(response, adapter) + + +def write_application( + adapter: LoggerAdapter, + parameters: WriteApplicationsParameters, + profiles: t.Iterable[t.Dict], +) -> t.List[t.Dict]: + failed_profiles = [] + auth_info = authenticate(parameters) + rest_url = auth_info["restUrl"] + bh_rest_token = auth_info["BhRestToken"] + params = {"BhRestToken": bh_rest_token} + adapter.info(f"connexion info {params}, rest_url: {rest_url}") + + for profile in profiles: + attachment = profile.pop("attachment") + profile["source"] = parameters.source or profile.get("source") + profile["status"] = parameters.status_when_created or profile.get("status") + email = profile["email"] + adapter.info(f"checking if candidate with {email} already exists") + search_results = search_entity( + "Candidate", + rest_url, + bh_rest_token, + f"(email:{email} OR email2:{email}) AND isDeleted:0", + ( + "id,isDeleted,dateAdded,status,source,email," + "firstName,lastName,name,mobile,address" + ), + adapter, + ) + + if not search_results: + failed_profiles.append(profile) + continue + adapter.info(f"search profile response {search_results}") + candidate_exists = search_results["count"] > 0 + candidate_data = search_results["data"][0] if candidate_exists else {} + candidate_id = candidate_data.get("id") if candidate_exists else None + + if candidate_exists: + profile.update( + { + "firstName": candidate_data.get("firstName") or profile.get( + "firstName" + ), + "lastName": candidate_data.get("lastName") or profile.get( + "lastName" + ), + "name": candidate_data.get("name") or profile.get("name"), + "address": candidate_data.get("address") or profile.get("address"), + "mobile": candidate_data.get("mobile") or profile.get("mobile"), + "status": candidate_data.get("status") or profile.get("status"), + } + ) + adapter.info("creating or updating the candidate") + candidate_response = create_or_update_entity( + "Candidate", rest_url, params, profile, adapter, candidate_id + ) + if not candidate_response: + failed_profiles.append(profile) + continue + adapter.info(f"candidate creation response {candidate_response}") + if not candidate_exists: + candidate_id = candidate_response.get("changedEntityId") + + attachment_exists = False + if candidate_exists and attachment: + entity_files = check_entity_files( + "Candidate", rest_url, params, candidate_id, adapter + ) + if entity_files: + attachments = entity_files.get("EntityFiles", []) + if attachments and attachment["name"] == attachments[0]["name"]: + attachment_exists = True + adapter.info(f"attachment for the candidate exists {attachment_exists}") + if not attachment_exists and attachment: + attachment_response = make_request( + requests.put, + f"{rest_url}file/Candidate/{candidate_id}", + params, + adapter, + json.dumps(attachment), + ) + if not handle_response(attachment_response, adapter): + failed_profiles.append(profile) + continue + attachment_response = handle_response(attachment_response, adapter) + adapter.info(f"attachment response {attachment_response}") + adapter.info( + "Verifying if candidate had already applied for the job" + f" {parameters.job_id}" + ) + job_submission_results = search_entity( + "JobSubmission", + rest_url, + bh_rest_token, + f"candidate.id:{candidate_id} AND jobOrder.id:{parameters.job_id}", + "id,status,dateAdded", + adapter, + ) + + if not job_submission_results: + failed_profiles.append(profile) + continue + adapter.info(f"search job_submission response {job_submission_results}") + job_submission_exists = job_submission_results["count"] > 0 + job_submission_id = ( + job_submission_results["data"][0]["id"] if job_submission_exists else None + ) + + job_submission_payload = { + "candidate": {"id": candidate_id}, + "jobOrder": {"id": parameters.job_id}, + "status": parameters.status_when_created, + "dateWebResponse": int(time.time() * 1000), + } + adapter.info("Creating or updating if candidate jobSubmission") + job_submission_response = create_or_update_entity( + "JobSubmission", + rest_url, + params, + job_submission_payload, + adapter, + job_submission_id, + ) + if not job_submission_response: + failed_profiles.append(profile) + adapter.info(f"creation of job_submission response {job_submission_response}") + return failed_profiles + + def read_jobs( adapter: LoggerAdapter, parameters: ReadJobsParameters, read_mode: t.Optional[ReadMode] = None, read_from: t.Optional[str] = None, ) -> t.Iterable[t.Dict]: + start = 0 + auth_retries = 0 authentication = auth( parameters.username, parameters.password, parameters.client_id, parameters.client_secret, ) - start = 0 - - while True: - jobs_url = ( - authentication["restUrl"] - + "search/JobOrder?query=(isOpen:true)&fields=*&BhRestToken=" - + authentication["BhRestToken"] - + "&start=" - + str(start) + if read_mode is ReadMode.sync: + if parameters.last_modified_date is None: + raise Exception("last_modified_date cannot be None in ReadMode.sync") + last_modified_date = parameters.last_modified_date + else: + if parameters.last_modified_date is not None: + adapter.warning( + "last_modified_date is ignored in ReadMode.incremental, using" + " read_from instead" + ) + if read_from: + try: + read_from = json.loads(read_from) + last_modified_date = read_from["last_modified_date"] + last_id = read_from["last_id"] + except json.JSONDecodeError as e: + raise Exception(f"Failed to JSON parse read_from={read_from} error={e}") + except KeyError as e: + raise Exception( + "Failed to find expected key in" + f" read_from={read_from} error={repr(e)}" + ) + else: + last_modified_date = parameters.last_modified_date + + last_modified_date_filter = transform_timestamp(last_modified_date) + if not last_modified_date_filter: + raise Exception( + "error while applying a transformation date on last modified date to" + " perform filtering" ) - response = requests.get(url=jobs_url) - if response.status_code // 100 != 2: - adapter.error( - "Failed to pull jobs from Bullhorn status_code={} response={}".format( - response.status_code, response.text - ) + while True: + try: + query = ( + f"{parameters.query} AND" + f" dateLastModified:[{last_modified_date_filter}%20TO%20*]" + ) + jobs_url = ( + authentication["restUrl"] + + f"search/JobOrder?query={query}&fields=" + + parameters.fields + + "&BhRestToken=" + + authentication["BhRestToken"] + + "&start=" + + str(start) ) - raise Exception("Failed to pull jobs from Bullhorn") - response = response.json() - - start = response["start"] + response["count"] - data = response["data"] - for job in data: - yield job + response = requests.get(url=jobs_url) - if start >= response["total"]: - break + response = response.json() + start = response["start"] + response["count"] + data = response["data"] + + for job in data: + if ( + job["dateLastModified"] == last_modified_date + and job["id"] <= last_id + ): + adapter.info("job with id <= last_id") + continue + yield job + + if start >= response["total"]: + break + + except requests.HTTPError as e: + if e.response.status_code == 401: + adapter.info( + "Received 401 error. Retrying authentication to continue fetching" + " jobs." + ) + if auth_retries > 2: + raise Exception( + f" retries the authentication {auth_retries}" + " will stop the execution" + ) + + authentication = auth( + parameters.username, + parameters.password, + parameters.client_id, + parameters.client_secret, + refresh_token=authentication["refresh_token"], + ) + auth_retries += 1 + continue + else: + adapter.error("Failed to fetch jobs from Bullhorn.") + raise e def read_profiles_parsing( @@ -378,6 +621,22 @@ def read_profiles( break +def transform_timestamp(timestamp: t.Optional[t.Union[float, int]]) -> t.Optional[str]: + if not timestamp: + return None + # Convert the Unix timestamp (in milliseconds) to a datetime object + dt = datetime.fromtimestamp(int(timestamp) / 1000) + # Format the datetime object to something like 20221215121030 + transformed_date = dt.strftime("%Y%m%d%H%M%S") + return transformed_date + + +def item_to_read_from_job(item: t.Dict) -> str: + return json.dumps( + dict(last_modified_date=item["dateLastModified"], last_id=item["id"]) + ) + + BullhornProfileWarehouse = Warehouse( name="Bullhorn Profiles", data_schema=BullhornProfile, @@ -390,6 +649,15 @@ def read_profiles( ), ) +BullhornApplicationWarehouse = Warehouse( + name="Bullhorn Applications", + data_schema=BullhornProfile, + data_type=DataType.profile, + write=WarehouseWriteAction( + parameters=WriteApplicationsParameters, function=write_application, endpoints=[] + ), +) + BullhornProfileParsingWarehouse = Warehouse( name="Bullhorn Profiles", data_schema=BullhornProfile, @@ -408,4 +676,6 @@ def read_profiles( function=read_jobs, endpoints=[], ), + supports_incremental=True, + item_to_read_from=item_to_read_from_job, ) diff --git a/src/hrflow_connectors/core/connector.py b/src/hrflow_connectors/core/connector.py index 6d41fa487..66cc9d9af 100644 --- a/src/hrflow_connectors/core/connector.py +++ b/src/hrflow_connectors/core/connector.py @@ -310,6 +310,7 @@ class ActionName(str, enum.Enum): push_score_list = "push_score_list" catch_profile = "catch_profile" catch_job = "catch_job" + push_application = "push_application" # TalentSoft actions applicant_new = "applicant_new" applicant_resume_update = "applicant_resume_update"