diff --git a/CLAUDE.md b/CLAUDE.md index 99895a3..5295f82 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -22,12 +22,13 @@ This is a Python library for WhereScape RED, a data warehouse automation tool. T - `WhereScapeLogHandler` buffers logs and outputs them with WhereScape-specific exit codes on flush - Exit codes: `1` (success), `-1` (warnings), `-2` (errors), `-3` (critical) - Logs to both console (for WhereScape) and rotating file handler (Saturday night rotation) -- Must be initialized via `initialise_wherescape_logging(wherescape_instance)` +- Automatically initialized when WhereScape instance is created (in `WhereScape.__init__()`) - Sets up unhandled exception logging **helper_functions.py** - Shared utilities: - `prepare_metadata_query()`: Generates SQL to create/update load table column metadata in WhereScape repository - `create_column_names()`: Slugifies display names to valid column names (max 59 chars) +- `create_legacy_column_names()`: Legacy version that appends numbers to all columns (preserved for backward compatibility with existing tables) - `flatten_json()`: Flattens nested JSON responses from APIs - `filter_dict()` and `fill_out_empty_keys()`: Clean and normalize API responses @@ -58,6 +59,13 @@ All connectors follow a consistent pattern with three components: - **hubspot**: Companies, contacts, deals, tickets, engagements (supports multiple environments) - **jira**: Projects and issues (full and incremental loads) +**Note:** The HubSpot connector has a unique structure that deviates from the standard three-file pattern: +- `collect_data.py` - Main entry point (replaces standard `{source}_load_data.py`) +- `process_data.py` - Processes and sends data to HubSpot (bi-directional sync) +- `ticket_updates.py` - Specialized operations (merge tickets, fix company associations) +- `utils.py` - Shared utilities for HubSpot operations +- Supports bidirectional sync (reading from WhereScape, writing back to HubSpot) + ### Validators **validators/fact_dimension_join.py** - Data quality validation: @@ -125,7 +133,7 @@ All environment variables start with `WSL_` prefix: ### Running Tests -There is no formal test suite. The `test.py` file in the root can be used for ad-hoc testing with a local environment setup. +There is no formal test suite. Individual connectors may have test files (e.g., `anythingllm_test.py`) for ad-hoc testing with a local environment setup. ### Code Formatting and Linting @@ -142,7 +150,7 @@ ruff format . ``` Configuration details: -- Target: Python 3.12 +- Target: Python 3.14 - Line length: 119 characters - Enabled rules: pycodestyle (E/W), pyflakes (F), isort (I), pep8-naming (N), flake8-bugbear (B), flake8-comprehensions (C4), flake8-simplify (SIM), pyupgrade (UP) - See [pyproject.toml](pyproject.toml) for complete configuration diff --git a/pyproject.toml b/pyproject.toml index 1e15c52..164e608 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.ruff] -# Target Python 3.12 -target-version = "py312" +# Target Python 3.14 +target-version = "py314" # Set line length to match common Python conventions line-length = 119 diff --git a/requirements-dev.txt b/requirements-dev.txt index 223e82c..296e4a6 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -2,4 +2,4 @@ # Install with: pip install -r requirements-dev.txt # Code linting and formatting -ruff>=0.14.0,<0.15.0 \ No newline at end of file +ruff>=0.15.1,<0.16.0 \ No newline at end of file diff --git a/requirements.txt b/requirements.txt index cf307f1..b7d6c48 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,7 +1,7 @@ -hubspot-api-client==8.2.1 -notion-client==2.2.1 -numpy==1.26.4 -pandas==1.3.4 -pyodbc==5.1.0 +hubspot-api-client==12.0.0 +notion-client==3.0.0 +numpy==2.4.2 +pandas==3.0.0 +pyodbc==5.3.0 python-slugify==8.0.4 -requests==2.32.3 +requests==2.32.5 diff --git a/validators/fact_dimension_join.py b/validators/fact_dimension_join.py index 094270b..7a1c804 100644 --- a/validators/fact_dimension_join.py +++ b/validators/fact_dimension_join.py @@ -1,4 +1,5 @@ """Module with function to validate fact-dimension joins.""" + import csv import logging import os @@ -23,9 +24,7 @@ def check_fact_dimension_join(output_file_location=""): wherescape = WhereScape() start_time = datetime.now() - logging.info( - f"Start time: {start_time.strftime('%Y-%m-%d %H:%M:%S')} for check_fact_dimension_join" - ) + logging.info(f"Start time: {start_time.strftime('%Y-%m-%d %H:%M:%S')} for check_fact_dimension_join") date = datetime.now().strftime("%Y-%m-%d") diff --git a/wherescape/connectors/anythingllm/anythingllm_create_metadata.py b/wherescape/connectors/anythingllm/anythingllm_create_metadata.py index e13aa21..2abad46 100644 --- a/wherescape/connectors/anythingllm/anythingllm_create_metadata.py +++ b/wherescape/connectors/anythingllm/anythingllm_create_metadata.py @@ -46,9 +46,7 @@ def anythingllm_create_metadata(): """Create metadata for AnythingLLM chats load table.""" start_time = datetime.now() - logging.info( - f"Start time: {start_time.strftime('%Y-%m-%d %H:%M:%S')} for anythingllm_create_metadata" - ) + logging.info(f"Start time: {start_time.strftime('%Y-%m-%d %H:%M:%S')} for anythingllm_create_metadata") # Initialise WhereScape (logging is initialised through WhereScape object) wherescape = WhereScape() @@ -119,12 +117,8 @@ def anythingllm_create_metadata(): # Execute the SQL wherescape.push_to_meta(sql) - wherescape.main_message = ( - f"Created {len(columns) + 2} columns in metadata table for embed {embed_uuid}" - ) + wherescape.main_message = f"Created {len(columns) + 2} columns in metadata table for embed {embed_uuid}" # Final logging end_time = datetime.now() - logging.info( - f"Time elapsed: {(end_time - start_time).seconds} seconds for anythingllm_create_metadata" - ) + logging.info(f"Time elapsed: {(end_time - start_time).seconds} seconds for anythingllm_create_metadata") diff --git a/wherescape/connectors/anythingllm/anythingllm_load_data.py b/wherescape/connectors/anythingllm/anythingllm_load_data.py index 8948203..ef9bc63 100644 --- a/wherescape/connectors/anythingllm/anythingllm_load_data.py +++ b/wherescape/connectors/anythingllm/anythingllm_load_data.py @@ -20,9 +20,7 @@ def anythingllm_load_data_chats(): # First initialise WhereScape to setup logging logging.info("Connecting to WhereScape") wherescape_instance = WhereScape() - logging.info( - f"Start time: {start_time.strftime('%Y-%m-%d %H:%M:%S')} for anythingllm_load_data_chats" - ) + logging.info(f"Start time: {start_time.strftime('%Y-%m-%d %H:%M:%S')} for anythingllm_load_data_chats") # Get the relevant values from WhereScape api_key = os.getenv("WSL_SRCCFG_APIKEY") @@ -91,15 +89,11 @@ def anythingllm_load_data_chats(): logging.info(f"Successfully inserted {len(rows)} rows in to the load table.") # Add success message - wherescape_instance.main_message = ( - f"Successfully inserted {len(rows)} rows in to the load table." - ) + wherescape_instance.main_message = f"Successfully inserted {len(rows)} rows in to the load table." else: logging.info("No object changes received from AnythingLLM") # Final logging end_time = datetime.now() - logging.info( - f"Time elapsed: {(end_time - start_time).seconds} seconds for anythingllm_load_data_chats" - ) + logging.info(f"Time elapsed: {(end_time - start_time).seconds} seconds for anythingllm_load_data_chats") diff --git a/wherescape/connectors/anythingllm/anythingllm_test.py b/wherescape/connectors/anythingllm/anythingllm_test.py index 8ea78c3..6ebd5a9 100644 --- a/wherescape/connectors/anythingllm/anythingllm_test.py +++ b/wherescape/connectors/anythingllm/anythingllm_test.py @@ -142,27 +142,25 @@ def test_flatten_chat(): "embed_id": 456, "usersId": 789, "createdAt": "2024-01-15T10:30:00Z", - "response": json.dumps({ - "text": "The weather is sunny today.", - "type": "text", - "attachments": ["file1.pdf", "file2.png"], - "sources": [ - {"title": "Weather Report", "url": "https://example.com/weather"}, - {"title": "Climate Data", "url": "https://example.com/climate"} - ], - "metrics": { - "completion_tokens": 15, - "prompt_tokens": 8, - "total_tokens": 23, - "outputTps": 12.5, - "duration": 1200 + "response": json.dumps( + { + "text": "The weather is sunny today.", + "type": "text", + "attachments": ["file1.pdf", "file2.png"], + "sources": [ + {"title": "Weather Report", "url": "https://example.com/weather"}, + {"title": "Climate Data", "url": "https://example.com/climate"}, + ], + "metrics": { + "completion_tokens": 15, + "prompt_tokens": 8, + "total_tokens": 23, + "outputTps": 12.5, + "duration": 1200, + }, } - }), - "connection_information": json.dumps({ - "host": "example.com", - "ip": "192.168.1.100", - "username": "testuser" - }) + ), + "connection_information": json.dumps({"host": "example.com", "ip": "192.168.1.100", "username": "testuser"}), } try: @@ -172,11 +170,26 @@ def test_flatten_chat(): # Verify all expected fields are present expected_fields = [ - "id", "prompt", "session_id", "include", "embed_id", "user_id", "created_at", - "response_text", "response_type", "response_attachments", "response_sources", - "response_sources_count", "metrics_completion_tokens", "metrics_prompt_tokens", - "metrics_total_tokens", "metrics_output_tps", "metrics_duration", - "connection_host", "connection_ip", "connection_username" + "id", + "prompt", + "session_id", + "include", + "embed_id", + "user_id", + "created_at", + "response_text", + "response_type", + "response_attachments", + "response_sources", + "response_sources_count", + "metrics_completion_tokens", + "metrics_prompt_tokens", + "metrics_total_tokens", + "metrics_output_tps", + "metrics_duration", + "connection_host", + "connection_ip", + "connection_username", ] logging.info(f"Fields in result: {len(result)}") diff --git a/wherescape/connectors/anythingllm/anythingllm_wrapper.py b/wherescape/connectors/anythingllm/anythingllm_wrapper.py index 3408f05..4dd5467 100644 --- a/wherescape/connectors/anythingllm/anythingllm_wrapper.py +++ b/wherescape/connectors/anythingllm/anythingllm_wrapper.py @@ -90,7 +90,7 @@ def _flatten_chat(chat): "metrics_output_tps": response_obj.get("metrics", {}).get("outputTps"), "metrics_duration": response_obj.get("metrics", {}).get("duration"), } - except (json.JSONDecodeError, TypeError): + except json.JSONDecodeError, TypeError: # If response is not valid JSON, store as-is response_data = { "response_text": chat.get("response", ""), @@ -114,7 +114,7 @@ def _flatten_chat(chat): "connection_ip": conn_obj.get("ip"), "connection_username": conn_obj.get("username"), } - except (json.JSONDecodeError, TypeError): + except json.JSONDecodeError, TypeError: connection_data = { "connection_host": None, "connection_ip": None, diff --git a/wherescape/connectors/gitlab/__init__.py b/wherescape/connectors/gitlab/__init__.py index a346bc4..b4f1aa4 100644 --- a/wherescape/connectors/gitlab/__init__.py +++ b/wherescape/connectors/gitlab/__init__.py @@ -1,4 +1,5 @@ """ Module that takes care of the connection to Gitlab. """ + from .gitlab_wrapper import Gitlab # noqa: E402 diff --git a/wherescape/connectors/gitlab/gitlab_wrapper.py b/wherescape/connectors/gitlab/gitlab_wrapper.py index e0a1496..8b2db0a 100644 --- a/wherescape/connectors/gitlab/gitlab_wrapper.py +++ b/wherescape/connectors/gitlab/gitlab_wrapper.py @@ -1,9 +1,11 @@ """Module to fetch data (e.g. tickets, projects, pipelines) from the Gitlab API""" -import requests + import logging +import requests + +from ...helper_functions import fill_out_empty_keys, filter_dict, flatten_json from .gitlab_data_types_column_names import COLUMN_NAMES_AND_DATA_TYPES -from ...helper_functions import flatten_json, filter_dict, fill_out_empty_keys class Gitlab: @@ -88,9 +90,7 @@ def paginate_through_resource( break if response.status_code == 404: - logging.info( - f"{resource_api}\n Resource not found." - ) + logging.info(f"{resource_api}\n Resource not found.") break response.raise_for_status() @@ -123,13 +123,9 @@ def get_projects(self): keys_to_keep = COLUMN_NAMES_AND_DATA_TYPES["projects"].keys() resource_api = "projects" - params = { - "order_by": "id" - } + params = {"order_by": "id"} - all_projects = self.paginate_through_resource( - resource_api, keys_to_keep, params - ) + all_projects = self.paginate_through_resource(resource_api, keys_to_keep, params) return all_projects def get_release_tags(self): @@ -185,9 +181,7 @@ def get_issues(self): project_id = project[0] resource_api = f"projects/{project_id}/issues" - project_issues = self.paginate_through_resource( - resource_api, keys_to_keep, params - ) + project_issues = self.paginate_through_resource(resource_api, keys_to_keep, params) all_issues.extend(project_issues) return all_issues @@ -211,9 +205,7 @@ def get_pipelines(self): for project in self.projects: project_id = project[0] resource_api = f"projects/{project_id}/pipelines" - project_pipelines = self.paginate_through_resource( - resource_api, keys_to_keep, params - ) + project_pipelines = self.paginate_through_resource(resource_api, keys_to_keep, params) all_pipelines.extend(project_pipelines) return all_pipelines @@ -237,9 +229,7 @@ def get_merge_requests(self): for project in self.projects: project_id = project[0] resource_api = f"projects/{project_id}/merge_requests" - project_merge_requests = self.paginate_through_resource( - resource_api, keys_to_keep, params - ) + project_merge_requests = self.paginate_through_resource(resource_api, keys_to_keep, params) all_merge_requests.extend(project_merge_requests) return all_merge_requests diff --git a/wherescape/connectors/gitlab/python_gitlab_create_metadata.py b/wherescape/connectors/gitlab/python_gitlab_create_metadata.py index baae72d..f63f2d6 100644 --- a/wherescape/connectors/gitlab/python_gitlab_create_metadata.py +++ b/wherescape/connectors/gitlab/python_gitlab_create_metadata.py @@ -1,13 +1,13 @@ -from datetime import datetime import logging +from datetime import datetime -from .gitlab_data_types_column_names import COLUMN_NAMES_AND_DATA_TYPES from ... import WhereScape from ...helper_functions import ( - prepare_metadata_query, create_column_names, create_display_names, + prepare_metadata_query, ) +from .gitlab_data_types_column_names import COLUMN_NAMES_AND_DATA_TYPES def gitlab_create_metadata_smart(): diff --git a/wherescape/connectors/gitlab/python_gitlab_high_water_mark.py b/wherescape/connectors/gitlab/python_gitlab_high_water_mark.py index baf5179..cb8cbe2 100644 --- a/wherescape/connectors/gitlab/python_gitlab_high_water_mark.py +++ b/wherescape/connectors/gitlab/python_gitlab_high_water_mark.py @@ -1,5 +1,5 @@ -from datetime import datetime import logging +from datetime import datetime from ... import WhereScape @@ -14,17 +14,11 @@ def gitlab_next_high_water_mark(): """ wherescape_instance = WhereScape() next_high_water_mark = datetime.today().isoformat(timespec="seconds") - current_high_water_mark = wherescape_instance.read_parameter( - "gitlab_high_water_mark" - ) + current_high_water_mark = wherescape_instance.read_parameter("gitlab_high_water_mark") logging.info(f"Current high water mark is {current_high_water_mark}") logging.info(f"Next high water mark will be {next_high_water_mark}") - wherescape_instance.main_message = ( - f"Next high water mark will be {next_high_water_mark}" - ) - wherescape_instance.write_parameter( - "gitlab_high_water_mark_next", next_high_water_mark - ) + wherescape_instance.main_message = f"Next high water mark will be {next_high_water_mark}" + wherescape_instance.write_parameter("gitlab_high_water_mark_next", next_high_water_mark) def gitlab_update_high_water_mark(): @@ -35,11 +29,7 @@ def gitlab_update_high_water_mark(): """ wherescape_instance = WhereScape() - next_high_water_mark = wherescape_instance.read_parameter( - "gitlab_high_water_mark_next" - ) + next_high_water_mark = wherescape_instance.read_parameter("gitlab_high_water_mark_next") wherescape_instance.write_parameter("gitlab_high_water_mark", next_high_water_mark) - wherescape_instance.main_message = ( - f"High water mark is set to {next_high_water_mark}" - ) + wherescape_instance.main_message = f"High water mark is set to {next_high_water_mark}" logging.info(f"High water mark is set to {next_high_water_mark}") diff --git a/wherescape/connectors/gitlab/python_gitlab_load_data.py b/wherescape/connectors/gitlab/python_gitlab_load_data.py index 5a04bcb..88bae40 100644 --- a/wherescape/connectors/gitlab/python_gitlab_load_data.py +++ b/wherescape/connectors/gitlab/python_gitlab_load_data.py @@ -121,9 +121,7 @@ def gitlab_load_data(wherescape_instance, load_type, is_legacy=False): # Execute the sql wherescape_instance.push_many_to_target(sql, rows) logging.info(f"{len(rows)} rows successfully inserted in {table_name}") - wherescape_instance.main_message = ( - f"{load_type.capitalize()} successfully loaded {len(rows)} rows" - ) + wherescape_instance.main_message = f"{load_type.capitalize()} successfully loaded {len(rows)} rows" else: logging.info(f"No modified values found for {load_type.capitalize()}") wherescape_instance.main_message = f"No modified values found for {load_type.capitalize()}" diff --git a/wherescape/connectors/hubspot/__init__.py b/wherescape/connectors/hubspot/__init__.py index 6ac0880..8a13fca 100644 --- a/wherescape/connectors/hubspot/__init__.py +++ b/wherescape/connectors/hubspot/__init__.py @@ -1,4 +1,5 @@ """ Module that takes care of the connection to Hubspot """ + from .hubspot_wrapper import Hubspot diff --git a/wherescape/connectors/hubspot/collect_data.py b/wherescape/connectors/hubspot/collect_data.py index 6b79881..0988a62 100644 --- a/wherescape/connectors/hubspot/collect_data.py +++ b/wherescape/connectors/hubspot/collect_data.py @@ -35,9 +35,7 @@ def hubspot_load_data(): logging.info("hubspot update done") -def hubspot_get_token( - wherescape_instance: WhereScape, table_name: str, develop_env: bool -): +def hubspot_get_token(wherescape_instance: WhereScape, table_name: str, develop_env: bool): """ Function to get the hubspot access token from the table. First trying with environemnt specification from table_name. diff --git a/wherescape/connectors/hubspot/hubspot_wrapper.py b/wherescape/connectors/hubspot/hubspot_wrapper.py index a46edb9..03e3403 100644 --- a/wherescape/connectors/hubspot/hubspot_wrapper.py +++ b/wherescape/connectors/hubspot/hubspot_wrapper.py @@ -5,12 +5,12 @@ import hubspot.crm from hubspot.client import Client from hubspot.crm import ( - AssociationType, - associations, - companies, - contacts, - deals, - properties, + AssociationType, + associations, + companies, + contacts, + deals, + properties, tickets, ) @@ -25,6 +25,7 @@ - batch_input_map (map) map of the batch_inputs referring to the different classes designed for the different HubSpot classes """ + class HubspotObjectEnum(StrEnum): COMPANIES = auto() CONTACTS = auto() @@ -72,9 +73,7 @@ def send_patch(self, properties: list, hs_object: str): batch_api = getattr(self.client.crm, hs_object).batch_api error_api = getattr(hubspot.crm, HubspotObjectEnum(hs_object)) try: - response = batch_api.update( - batch_input_simple_public_object_batch_input=batch_input - ) + response = batch_api.update(batch_input_simple_public_object_batch_input=batch_input) except error_api.ApiException as e: logging.error(f"Exception when calling batch_api->update: {e}\n") return None @@ -88,7 +87,7 @@ def send_patch(self, properties: list, hs_object: str): pass return response - def update_batch(self, object_items: list, hs_object:str): + def update_batch(self, object_items: list, hs_object: str): """ Method that updates a batch of items for a Hubspot object. @@ -110,7 +109,7 @@ def update_batch(self, object_items: list, hs_object:str): response.append(api_batch.update(input_batch)) except api_error.ApiException as e: logging.error(f"Exception when calling {hs_object} batch_api->update\n {e}") - return None #stopping the program + return None # stopping the program del object_items[:100] # When less than 100, do all in one go. try: @@ -132,7 +131,7 @@ def get_object(self, record_id: str, hs_object: str, properties: list = []): - properties(list): optional. list of properties retreived with the object. Returns: - - Hubspot object + - Hubspot object """ basic_api = getattr(self.client.crm, HubspotObjectEnum(hs_object)).basic_api api_error = getattr(hubspot.crm, HubspotObjectEnum(hs_object)) @@ -141,7 +140,7 @@ def get_object(self, record_id: str, hs_object: str, properties: list = []): return response except api_error.ApiException as e: logging.error(f"An exception occured when calling {hs_object} batch_api_>update\n {e}") - + def get_property_names(self, object_name: str): """ Function to get the property names of an object type (i.e. companies). @@ -154,9 +153,7 @@ def get_property_names(self, object_name: str): """ property_names = [] try: - api_response = self.client.crm.properties.core_api.get_all( - object_type=object_name - ) + api_response = self.client.crm.properties.core_api.get_all(object_type=object_name) api_results = api_response.to_dict() for result in api_results["results"]: @@ -204,16 +201,17 @@ def get_all( return results def get_associations( - self, id_: str, - object_type: str, - associated_object_type: str, + self, + id_: str, + object_type: str, + associated_object_type: str, ): """ Method to retreive all associations of a specified object type. Params: - id_ (str): hubspot record id of specified object. - - object-type (str): hubspot object type. + - object-type (str): hubspot object type. - associated_object_type (str): hubspot object of the associations to be retrieved. Returns: @@ -239,18 +237,18 @@ def get_associations( after=response.paging.next.after, ) results.extend(response.results) - + return results except error_api as e: logging.error(f"Exception when calling basic_api->create: {e}\n") - + def filtered_search( - self, - hs_object: str, - filters: list = [], - properties: list = [] , - associations: list = [], + self, + hs_object: str, + filters: list = [], + properties: list = [], + associations: list = [], ) -> list: """ Method to find one or more objects based on provided filters. @@ -269,14 +267,12 @@ def filtered_search( "limit": 100, "properties": properties, "associations": associations, - "filterGroups": [{ - "filters": filters - }] + "filterGroups": [{"filters": filters}], } simple_input_class = get_search_input_class(hs_object) if not simple_input_class: return - + error_api = getattr(hubspot.crm, HubspotObjectEnum(hs_object)) search_api = getattr(self.client.crm, hs_object).search_api try: @@ -285,21 +281,18 @@ def filtered_search( results = response.results while response.paging: search_request["after"] = response.paging.next.after - response = search_api.do_search( - public_object_search_request=search_request - ) + response = search_api.do_search(public_object_search_request=search_request) results.extend(response.results) - sleep(0.1) # too fast results in error - + sleep(0.1) # too fast results in error + if results: logging.info(f"{len(results)} items found.") return results - + except error_api.ApiException as e: logging.error(f"An error occured while doing a filtered search: {e}") - def merge_tickets(self, ticket_a, ticket_b) -> tuple: """ This method merges the properties into tickets into the ticket that is the oldest. @@ -319,9 +312,9 @@ def merge_tickets(self, ticket_a, ticket_b) -> tuple: for property_ in to_remove.properties: if to_remove.properties[property_] is not None and to_keep.properties[property_] is not None: if property_.startswith("hs_") or is_date(to_keep.properties[property_]): - pass # Ignoring dates and hubspot owned properties (set by hubspot). + pass # Ignoring dates and hubspot owned properties (set by hubspot). elif to_remove.properties[property_] == to_keep.properties[property_]: - pass # No change needed if the properties are the same. + pass # No change needed if the properties are the same. elif property_ == "content": # For content, we add them together to keep all content. to_keep.properties[property_] += to_remove.properties[property_] @@ -338,12 +331,12 @@ def merge_tickets(self, ticket_a, ticket_b) -> tuple: return to_keep, to_remove def add_association( - self, - association, - association_type: str, - object_, - object_type: str, - hubspot_defined: bool = True, + self, + association, + association_type: str, + object_, + object_type: str, + hubspot_defined: bool = True, ): """ Method to add association of an object to a new object using existing connection type. @@ -353,11 +346,11 @@ def add_association( - association_type (str): type of association using singular form (company instead of companies). - object_: object that the association will be associated to. - object_type (str): type of association using singular form (company instead of companies). - - hubspot_defined (str): + - hubspot_defined (str): """ association_type_id = getattr(AssociationType, association.type.upper()) association_id = association.id - object_id= object_.id + object_id = object_.id defined = "HUBSPOT_DEFINED" if hubspot_defined is True else "USER_DEFINED" try: @@ -365,23 +358,25 @@ def add_association( object_type=object_type, object_id=object_id, to_object_type=association_type, - to_object_id= association_id, - association_spec=[{ - "associationCategory": defined, - "associationTypeId": association_type_id, - }], + to_object_id=association_id, + association_spec=[ + { + "associationCategory": defined, + "associationTypeId": association_type_id, + } + ], ) except associations.ApiException as e: logging.error(f"Exception when calling batch_api->create: {e}") def create_association( - self, - from_object_id: str, - from_object_type: str, - to_object_id: str, - to_object_type: str , - association_type: str, - hubspot_defined: bool = True, + self, + from_object_id: str, + from_object_type: str, + to_object_id: str, + to_object_type: str, + association_type: str, + hubspot_defined: bool = True, ): """ Method that creates a new association. @@ -398,17 +393,19 @@ def create_association( - New association on succes. """ association_type_id = getattr(AssociationType, association_type.upper()) - association_spec = [{ - "associationCategory": ("HUBSPOT_DEFINED" if hubspot_defined is True else "USER_DEFINED"), - "associationTypeId": association_type_id, - }] + association_spec = [ + { + "associationCategory": ("HUBSPOT_DEFINED" if hubspot_defined is True else "USER_DEFINED"), + "associationTypeId": association_type_id, + } + ] try: response = self.client.crm.associations.v4.basic_api.create( - object_type= from_object_type, + object_type=from_object_type, object_id=from_object_id, - to_object_type= to_object_type, - to_object_id= to_object_id, - association_spec= association_spec, + to_object_type=to_object_type, + to_object_id=to_object_id, + association_spec=association_spec, ) return response except associations.ApiException as e: @@ -416,10 +413,10 @@ def create_association( return def remove_association( - self, - from_object_id: str, - from_object_type: str, - to_object_id: str, + self, + from_object_id: str, + from_object_type: str, + to_object_id: str, to_object_type: str, ): """ @@ -438,10 +435,10 @@ def remove_association( to_object_type=to_object_type, to_object_id=to_object_id, ) - return 1 # return something when success + return 1 # return something when success except associations.ApiException as e: logging.error(f"Exception while trying to archive an association: {e}") - return # None when fail + return # None when fail def archive_object(self, object_id: str, hs_object: str): """ @@ -460,7 +457,7 @@ def archive_object(self, object_id: str, hs_object: str): except error_api.ApiException as e: logging.error(f"Exception when calling basic_api->archive: {e}") - def batch_archive(self, object_ids: list, hs_object:str): + def batch_archive(self, object_ids: list, hs_object: str): """ Funtion to archive multiple objects at once. @@ -499,7 +496,8 @@ def log_errors(errors): f"The process was stopped prematurely resulting from an error of category {category} with record_ids: {context_ids} " ) -def get_batch_input_class(hs_object:str): + +def get_batch_input_class(hs_object: str): """ Method to check if object exists in batch_input_map. Logs as error if no input class was found. @@ -515,7 +513,8 @@ def get_batch_input_class(hs_object:str): logging.error(f"Invalid hs_object: {hs_object}") return batch_input_class -def get_search_input_class(hs_object:str): + +def get_search_input_class(hs_object: str): """ Method to check if object exists in batch_input_map. Logs as error if no input class was found. @@ -531,6 +530,7 @@ def get_search_input_class(hs_object:str): logging.error(f"Invalid hs_object: {hs_object}") return simple_input_class + def update_properties_list(hubspot_items: list) -> list: """ Function to set a collection of hubspot objects into a correct list for updating properties. @@ -564,12 +564,13 @@ def update_properties_list(hubspot_items: list) -> list: return final_list + def create_filter( - property_name: str, - operator: str, - property_value: str = None, - higher_value: str = "", - property_values: list = [], + property_name: str, + operator: str, + property_value: str = None, + higher_value: str = "", + property_values: list = [], ) -> dict: """ Method that returns a fitler that can be used for hubspot searches. @@ -578,14 +579,12 @@ def create_filter( - property_name (str): internal name of the property - operator (str): operator for the search - property_value (str): value the property must have. lower value if between 2 values. - - higer_value (str): higher value when between 2 values. + - higer_value (str): higher value when between 2 values. - property_values (list): list of values for IN or NOT_IN operators. Returns filter dict """ - if operator.upper() in [ - "LT", "LTE", "GT", "GTE", "EQ", "NEQ" - ]: + if operator.upper() in ["LT", "LTE", "GT", "GTE", "EQ", "NEQ"]: return { "propertyName": property_name, "operator": operator.upper(), @@ -598,9 +597,7 @@ def create_filter( "highValue": higher_value, "value": property_value, } - elif operator.upper() in [ - "IN", "NOT_IN" - ]: + elif operator.upper() in ["IN", "NOT_IN"]: return { "propertyName": property_name, "operator": operator.upper(), @@ -616,4 +613,4 @@ def create_filter( # ]: # pass # something special ? else: - logging.error("the operator doesn't exist") \ No newline at end of file + logging.error("the operator doesn't exist") diff --git a/wherescape/connectors/hubspot/process_data.py b/wherescape/connectors/hubspot/process_data.py index 33b5b85..b685663 100644 --- a/wherescape/connectors/hubspot/process_data.py +++ b/wherescape/connectors/hubspot/process_data.py @@ -9,9 +9,7 @@ """ -def hubspot_process_results( - access_token: str, results: list, column_names: list, table_name: str -): +def hubspot_process_results(access_token: str, results: list, column_names: list, table_name: str): """ function that handles the processing of the results for it to be send to Hubspot Function to process the results to be send to hubspot. @@ -66,7 +64,7 @@ def send_data_to_hubspot(object_type: str, properties: list, hubspot_instance: H def create_data_dict(result: list, column_names: list, known_names: list): """ - This Function processes a list of results into a dict to fit the needs and + This Function processes a list of results into a dict to fit the needs and expectations from HubSpot. Parameters: @@ -146,6 +144,4 @@ def get_object_name(table_name: str): elif "deals" in table_name or "deal" in table_name: return "deals" else: - logging.error( - "Could not identify the specific hubspot object type based of the table name." - ) + logging.error("Could not identify the specific hubspot object type based of the table name.") diff --git a/wherescape/connectors/hubspot/python_hubspot_check_missing_columns.py b/wherescape/connectors/hubspot/python_hubspot_check_missing_columns.py index b2b453c..d1a634c 100644 --- a/wherescape/connectors/hubspot/python_hubspot_check_missing_columns.py +++ b/wherescape/connectors/hubspot/python_hubspot_check_missing_columns.py @@ -13,8 +13,11 @@ 4. The ds table needs to resude in a 'datastore' schema. 5. After the load table script has run, the ds table needs to be processed. """ + import logging + from hubspot.crm.properties.exceptions import ForbiddenException + from ...helper_functions import prepare_metadata_query @@ -33,9 +36,7 @@ def compare_columns(wherescape_columns, hubspot_columns): return missing_in_wherescape, missing_in_hubspot -def create_table_rows( - missing_in_wherescape, missing_in_hubspot, environment, table_name -): +def create_table_rows(missing_in_wherescape, missing_in_hubspot, environment, table_name): """Function that creates the table row list for the insert query.""" table_rows = [] if len(missing_in_wherescape) > 0: @@ -60,9 +61,7 @@ def get_ds_rows(wherescape_instance): return rows -def hubspot_check_missing_columns( - wherescape_instance, hubspot_instance, table_name, object_type, environment -): +def hubspot_check_missing_columns(wherescape_instance, hubspot_instance, table_name, object_type, environment): """ Query the hubspot api for the supplied environment and object_type and compare with the wherescape objects. @@ -82,17 +81,13 @@ def hubspot_check_missing_columns( object_type=object_type, archived=False ) except ForbiddenException: - logging.info( - f"No access for type {object_type} via the Hubspot api for {environment}" - ) + logging.info(f"No access for type {object_type} via the Hubspot api for {environment}") else: api_results = api_response.to_dict() hubspot_columns = [] for result in api_results["results"]: hubspot_columns.append(result["name"]) - missing_in_wherescape, missing_in_hubspot = compare_columns( - wherescape_columns, hubspot_columns - ) + missing_in_wherescape, missing_in_hubspot = compare_columns(wherescape_columns, hubspot_columns) table_rows = create_table_rows( missing_in_wherescape, missing_in_hubspot, diff --git a/wherescape/connectors/hubspot/ticket_updates.py b/wherescape/connectors/hubspot/ticket_updates.py index 4a8f256..5dbb17f 100644 --- a/wherescape/connectors/hubspot/ticket_updates.py +++ b/wherescape/connectors/hubspot/ticket_updates.py @@ -5,6 +5,7 @@ from wherescape.connectors.hubspot.utils import get_double_nerd_ids, get_double_tickets from wherescape.wherescape import WhereScape + """ This module requires only the access token to update information """ @@ -19,6 +20,7 @@ "notes", ] + def merge_double_tickets(parameter_name: str): """ Function start the process of merging tickets with the same nerds ticket id. @@ -29,16 +31,14 @@ def merge_double_tickets(parameter_name: str): start_time = datetime.now().strftime("%Y-%m-%d %H:%M:%S") logging.info("connecting to Wherescape") wherescape_instance = WhereScape() - logging.info( - f"Start time: {start_time} for hubspot merge_double_tickets" - ) + logging.info(f"Start time: {start_time} for hubspot merge_double_tickets") access_token = wherescape_instance.read_parameter(parameter_name) if access_token is None: logging.error(f"Nothing Token found under parameter: {parameter_name}.") - exit() + exit() hubspot = Hubspot(access_token) - + all_tickets = hubspot.get_all("tickets", ticket_properties) double_ticket_ids = get_double_nerd_ids(all_tickets) @@ -68,9 +68,9 @@ def merge_double_tickets(parameter_name: str): hubspot.batch_archive(batch_list, "tickets") del update_tickets[:100] - result = hubspot.update_batch(update_tickets, "tickets") + result = hubspot.update_batch(update_tickets, "tickets") if result is None: - exit() # exit if nothing was updated. to avoid archiving everything + exit() # exit if nothing was updated. to avoid archiving everything delete_ids = [] for ticket in delete_tickets: @@ -87,6 +87,7 @@ def merge_double_tickets(parameter_name: str): # delete remaining < 100 items hubspot.batch_archive(delete_ids, "tickets") + def hubspot_update_company_associaton(parameter_name: str): """ Function to set the right company to a hubspot ticket based on ticket propery nerds_customer_id and company property client_number. @@ -97,9 +98,7 @@ def hubspot_update_company_associaton(parameter_name: str): start_time = datetime.now().strftime("%Y-%m-%d %H:%M:%S") logging.info("connecting to Wherescape") wherescape_instance = WhereScape() - logging.info( - f"Start time: {start_time} for hubspot merge_double_tickets" - ) + logging.info(f"Start time: {start_time} for hubspot merge_double_tickets") # get parameter access_token = wherescape_instance.read_parameter(parameter_name) if access_token is None: @@ -107,7 +106,7 @@ def hubspot_update_company_associaton(parameter_name: str): exit() hubspot = Hubspot(access_token) - + # Get Nerds company. filters = [] filters.append(create_filter("domain", "EQ", "nerds.nl")) @@ -117,7 +116,6 @@ def hubspot_update_company_associaton(parameter_name: str): if nerds_company is None: logging.error("problem while locating company") return - # Get tickets with Nerds as associated company. ticket_filters = [] @@ -125,7 +123,7 @@ def hubspot_update_company_associaton(parameter_name: str): ticket_filters.append(create_filter("nerds_customer_id", "HAS_PROPERTY")) ticket_filters.append(create_filter("nerds_customer_email", "EQ", "anoniem@voys.nerds.nl")) tickets = hubspot.filtered_search(hs_object="tickets", filters=ticket_filters, properties=["nerds_customer_id"]) - logging.info("%i tickets found with " % len(tickets) ) + logging.info("%i tickets found with " % len(tickets)) for ticket in tickets: correct_company = None @@ -138,13 +136,11 @@ def hubspot_update_company_associaton(parameter_name: str): associated_companies = hubspot.get_associations(ticket.id, "ticket", "company") # see if correct company is already there. for association in associated_companies: - company = hubspot.get_object( - association.to_object_id, "companies", ["client_id", "domain", "city"] - ) - if company is not None: # we can stop once we find the correct one + company = hubspot.get_object(association.to_object_id, "companies", ["client_id", "domain", "city"]) + if company is not None: # we can stop once we find the correct one correct_company = company break - + if correct_company is None: filters = [] # Client id is numeric, so if customer id is not, the search would give an error. @@ -154,16 +150,14 @@ def hubspot_update_company_associaton(parameter_name: str): if association is not None and len(association) == 1: response = hubspot.create_association( - from_object_id= company.id, - from_object_type= "companies", - to_object_id= ticket.id, + from_object_id=company.id, + from_object_type="companies", + to_object_id=ticket.id, to_object_type="tickets", - association_type="primary_company_to_ticket" + association_type="primary_company_to_ticket", ) - if response is None: # no response indicates it failed + if response is None: # no response indicates it failed logging.warning(f"Correct company could not be set for ticket with record id {ticket.id}.") - + else: logging.warning(f"customer_id could not be used: {customer_id}") - - diff --git a/wherescape/connectors/hubspot/utils.py b/wherescape/connectors/hubspot/utils.py index b749445..05f0f47 100644 --- a/wherescape/connectors/hubspot/utils.py +++ b/wherescape/connectors/hubspot/utils.py @@ -1,4 +1,3 @@ - def get_double_nerd_ids(tickets: list) -> list: """ Function to retrieve all nerd ticket id that appear multiple times. @@ -13,7 +12,6 @@ def get_double_nerd_ids(tickets: list) -> list: for ticket in tickets: nerds_ticket = ticket.properties["nerds_ticket_id"] if ticket.properties["nerds_ticket_id"] is not None: - if nerds_ticket not in seen_nerd_ticket_id: seen_nerd_ticket_id[nerds_ticket] = 1 else: @@ -21,6 +19,7 @@ def get_double_nerd_ids(tickets: list) -> list: return [k for k, count in seen_nerd_ticket_id.items() if count > 1] + def get_double_tickets(tickets: list, id_: str) -> list: """ Function to collect all tickets with the provided id. diff --git a/wherescape/connectors/jira/__init__.py b/wherescape/connectors/jira/__init__.py index f1d53b2..2e862ed 100644 --- a/wherescape/connectors/jira/__init__.py +++ b/wherescape/connectors/jira/__init__.py @@ -1,4 +1,5 @@ """ Module that takes care of the connection to JIRA. """ + from .jira_wrapper import Jira # noqa: E402 diff --git a/wherescape/connectors/jira/jira_wrapper.py b/wherescape/connectors/jira/jira_wrapper.py index 43ac8ad..b071c7a 100644 --- a/wherescape/connectors/jira/jira_wrapper.py +++ b/wherescape/connectors/jira/jira_wrapper.py @@ -2,9 +2,9 @@ import json import logging + import pandas as pd import requests - from requests.auth import HTTPBasicAuth from ...helper_functions import filter_dict, flatten_json @@ -105,14 +105,10 @@ def make_request(self, url, method, payload={}): headers = {"Accept": "application/json", "Content-Type": "application/json"} auth = HTTPBasicAuth(self.user, self.apikey) - response = requests.request( - method, url, data=payload, headers=headers, auth=auth - ) + response = requests.request(method, url, data=payload, headers=headers, auth=auth) if response.status_code != 200: - raise Exception( - f"JIRA connection error {response.status_code}: {response.content}" - ) + raise Exception(f"JIRA connection error {response.status_code}: {response.content}") return response def get_all_projects(self, as_numpy=True): @@ -148,9 +144,7 @@ def get_all_projects(self, as_numpy=True): columns=list(project_keys_list), ) - data_as_frame = self.clean_dataframe( - data_as_frame, KEYS_TO_KEEP_FROM_PROJECTS_JSON - ) + data_as_frame = self.clean_dataframe(data_as_frame, KEYS_TO_KEEP_FROM_PROJECTS_JSON) return data_as_frame.values.tolist() return json_response["values"] @@ -172,9 +166,7 @@ def get_all_issues(self, since=None): for project in projects: if project["isPrivate"]: continue - all_issues_per_project.extend( - self.get_issue_data_per_project(project["id"], since) - ) + all_issues_per_project.extend(self.get_issue_data_per_project(project["id"], since)) return all_issues_per_project def get_issue_data_per_project(self, project_id, since=None): @@ -241,22 +233,16 @@ def clean_dataframe(self, dataframe, properties_to_transform): for key, value in properties_to_transform.items(): # to make it a little bit more faster, let's skip object, since it is already an object (string) - if "object" == value: + if value == "object": continue try: - # When working with dates, we want to keep None values None and not NaT. Otherwise we get a 00:00:00 date in wherescape - if ( - value == "datetime64[ns]" - and dataframe[key].loc[dataframe.index[0]] is None - ): + if value == "datetime64[ns]" and dataframe[key].loc[dataframe.index[0]] is None: continue else: dataframe[key] = dataframe[key].astype(value, errors="ignore") except KeyError: - logging.info( - key + " key not in dataframe, skipping transforming datatype" - ) + logging.info(key + " key not in dataframe, skipping transforming datatype") dataframe[key] = "" # todo: check if now keys are missing. return dataframe @@ -283,19 +269,13 @@ def clean_issue_data(self, issues): if item["field"] == "status" and item["toString"] == "In Progress": history_list.append(history["created"]) - issue["status_in_progress_date"] = ( - min(history_list) if len(history_list) > 0 else None - ) + issue["status_in_progress_date"] = min(history_list) if len(history_list) > 0 else None flattend_dict = flatten_json(json_response=issue, name_to_skip="fields", legacy_list_handling=True) data[issue["id"]] = filter_dict(flattend_dict, issues_keys_list) - data_as_frame = pd.DataFrame( - data.values(), list(data.keys()), columns=list(issues_keys_list) - ) - data_as_frame = self.clean_dataframe( - data_as_frame, KEYS_TO_KEEP_FROM_TICKETS_JSON - ) + data_as_frame = pd.DataFrame(data.values(), list(data.keys()), columns=list(issues_keys_list)) + data_as_frame = self.clean_dataframe(data_as_frame, KEYS_TO_KEEP_FROM_TICKETS_JSON) try: issue_data_in_list = data_as_frame.values.tolist() except: diff --git a/wherescape/connectors/jira/python_jira_create_metadata.py b/wherescape/connectors/jira/python_jira_create_metadata.py index 4e278dc..04b9088 100644 --- a/wherescape/connectors/jira/python_jira_create_metadata.py +++ b/wherescape/connectors/jira/python_jira_create_metadata.py @@ -1,14 +1,14 @@ -from datetime import datetime import logging import os +from datetime import datetime -from .jira_wrapper import Jira -from ...wherescape import WhereScape from ...helper_functions import ( - prepare_metadata_query, create_column_names, create_display_names, + prepare_metadata_query, ) +from ...wherescape import WhereScape +from .jira_wrapper import Jira def jira_create_metadata_project(): diff --git a/wherescape/connectors/jira/python_jira_load_data.py b/wherescape/connectors/jira/python_jira_load_data.py index 7de5277..1f38c48 100644 --- a/wherescape/connectors/jira/python_jira_load_data.py +++ b/wherescape/connectors/jira/python_jira_load_data.py @@ -132,15 +132,11 @@ def jira_load_data(load_type, use_high_water_mark=False, since=None, is_legacy=F logging.info(f"Successfully inserted {len(rows)} rows in to the load table.") # Update the high_water_mark. Will also be updated if use_high_water_mark=False - wherescape_instance.write_parameter( - "jira_high_water_mark", start_time.strftime("%Y-%m-%d %H:%M") - ) + wherescape_instance.write_parameter("jira_high_water_mark", start_time.strftime("%Y-%m-%d %H:%M")) logging.info(f"New high water mark is: {start_time.strftime('%Y-%m-%d %H:%M')}") # Add success message - wherescape_instance.main_message = ( - f"Successfully inserted {len(rows)} rows in to the load table." - ) + wherescape_instance.main_message = f"Successfully inserted {len(rows)} rows in to the load table." else: logging.info("No object changes received from JIRA") diff --git a/wherescape/connectors/python_api_connection_no_browse.py b/wherescape/connectors/python_api_connection_no_browse.py index a602811..e5957f2 100644 --- a/wherescape/connectors/python_api_connection_no_browse.py +++ b/wherescape/connectors/python_api_connection_no_browse.py @@ -1,4 +1,2 @@ print(1) -print( - """{"treeViewLayout": "Tabular", "treeViewIcons": {"schema": "project.ico","table": "File.ico"}}""" -) +print("""{"treeViewLayout": "Tabular", "treeViewIcons": {"schema": "project.ico","table": "File.ico"}}""") diff --git a/wherescape/logging.py b/wherescape/logging.py index d40177f..31aabf4 100644 --- a/wherescape/logging.py +++ b/wherescape/logging.py @@ -1,8 +1,8 @@ -from datetime import time import logging -from logging.handlers import TimedRotatingFileHandler import os import sys +from datetime import time +from logging.handlers import TimedRotatingFileHandler class WhereScapeLogHandler(logging.Handler): @@ -88,9 +88,7 @@ def handle_exception(exc_type, exc_value, exc_traceback): https://docs.python.org/3/library/sys.html#sys.excepthook """ logger = logging.getLogger() - logger.critical( - "Unhandled exception", exc_info=(exc_type, exc_value, exc_traceback) - ) + logger.critical("Unhandled exception", exc_info=(exc_type, exc_value, exc_traceback)) def initialise_wherescape_logging(wherescape): diff --git a/wherescape/wherescape.py b/wherescape/wherescape.py index 43703a3..c55f28f 100644 --- a/wherescape/wherescape.py +++ b/wherescape/wherescape.py @@ -46,23 +46,17 @@ def __init__(self): wsl_meta_dns = os.getenv("WSL_META_DSN") wsl_meta_user = os.getenv("WSL_META_USER") wsl_meta_pwd = os.getenv("WSL_META_PWD") - self.meta_db_connection_string = ( - f"DSN={wsl_meta_dns};UID={wsl_meta_user};PWD={wsl_meta_pwd}" - ) + self.meta_db_connection_string = f"DSN={wsl_meta_dns};UID={wsl_meta_user};PWD={wsl_meta_pwd}" wsl_tgt_dns = os.getenv("WSL_TGT_DSN") wsl_tgt_user = os.getenv("WSL_TGT_USER") wsl_tgt_pwd = os.getenv("WSL_TGT_PWD") - self.target_db_connection_string = ( - f"DSN={wsl_tgt_dns};UID={wsl_tgt_user};PWD={wsl_tgt_pwd};sslmode=prefer" - ) + self.target_db_connection_string = f"DSN={wsl_tgt_dns};UID={wsl_tgt_user};PWD={wsl_tgt_pwd};sslmode=prefer" wsl_src_dns = os.getenv("WSL_SRC_DSN") wsl_src_user = os.getenv("WSL_SRC_USER") wsl_src_pwd = os.getenv("WSL_SRC_PWD") - self.source_db_connection_string = ( - f"DSN={wsl_src_dns};UID={wsl_src_user};PWD={wsl_src_pwd};sslmode=prefer" - ) + self.source_db_connection_string = f"DSN={wsl_src_dns};UID={wsl_src_user};PWD={wsl_src_pwd};sslmode=prefer" self.sequence = os.getenv("WSL_SEQUENCE") self.job_key = os.getenv("WSL_JOB_KEY", "no-job-key") @@ -384,9 +378,7 @@ def job_clear_logs_by_date(self, days_to_retain=90, job_to_clean="%"): , @p_result = @out2 OUTPUT; SELECT @out AS return_code,@out1 AS return_msg,@out2 AS return_result""" - return_values = self.query_meta( - sql, self.common_input_parameter_list + function_parameter_list - ) + return_values = self.query_meta(sql, self.common_input_parameter_list + function_parameter_list) return_code = return_values[0][0] return_message = return_values[0][1] result_number = int(return_values[0][2]) @@ -437,9 +429,7 @@ def job_clear_archive_by_date(self, days_to_retain=365, job_to_clean="%"): , @p_result = @out2 OUTPUT; SELECT @out AS return_code,@out1 AS return_msg,@out2 AS return_result;""" - return_values = self.query_meta( - sql, self.common_input_parameter_list + function_parameter_list - ) + return_values = self.query_meta(sql, self.common_input_parameter_list + function_parameter_list) return_code = return_values[0][0] return_message = return_values[0][1] result_number = int(return_values[0][2]) diff --git a/wherescape/ws_env_template.py b/wherescape/ws_env_template.py index 5911447..4a4ac11 100644 --- a/wherescape/ws_env_template.py +++ b/wherescape/ws_env_template.py @@ -8,6 +8,7 @@ 2) Save the file as ws_env.py (in your local development environment) 3) Make sure that ws_env.py is in .gitignore """ + import os @@ -49,9 +50,7 @@ def setup_env(tablename, schema="load", environment="dev1", source="dev1"): if source == "sgmt": os.environ["WSL_SRC_DSN"] = "wswh prod" os.environ["WSL_SRC_DSN_ARCH"] = "64" - os.environ[ - "WSL_SRC_SERVER" - ] = "xxxxxxx.xxxxxxx.eu-central-1.rds.amazonaws.com" + os.environ["WSL_SRC_SERVER"] = "xxxxxxx.xxxxxxx.eu-central-1.rds.amazonaws.com" os.environ["WSL_SRC_DBPORT"] = "" os.environ["WSL_SRC_DBID"] = "" os.environ["WSL_SRC_DB"] = "__db_name__" @@ -61,9 +60,7 @@ def setup_env(tablename, schema="load", environment="dev1", source="dev1"): else: os.environ["WSL_SRC_DSN"] = "wswh dev1" os.environ["WSL_SRC_DSN_ARCH"] = "64" - os.environ[ - "WSL_SRC_SERVER" - ] = "xxxxxx.xxxxxx.eu-central-1.rds.amazonaws.com" + os.environ["WSL_SRC_SERVER"] = "xxxxxx.xxxxxx.eu-central-1.rds.amazonaws.com" os.environ["WSL_SRC_DBPORT"] = "" os.environ["WSL_SRC_DBID"] = "" os.environ["WSL_SRC_DB"] = "__db_name__" @@ -100,9 +97,7 @@ def setup_env(tablename, schema="load", environment="dev1", source="dev1"): os.environ["WSL_BINDIR"] = "C:\\Program Files\\WhereScape\\RED\\" # os.environ['WSL_WORKDIR'] = f"C:\\WhereScape\\Scheduler\\Work\\{capped_environment}\\" - os.environ[ - "WSL_WORKDIR" - ] = "C:\\Users\\Bart\\Documents\\GitHub\\wherescape-warehouse\\" + os.environ["WSL_WORKDIR"] = "C:\\Users\\Bart\\Documents\\GitHub\\wherescape-warehouse\\" os.environ["WSL_SEQUENCE"] = "1234" os.environ["WSL_JOB_KEY"] = "9999" @@ -115,9 +110,7 @@ def setup_env(tablename, schema="load", environment="dev1", source="dev1"): # setup passwords for warehouse.py git_dir = r"C:\WhereScape\GitlabRepository" - os.environ["CONFIG_JSON_PATH"] = ( - git_dir + r"\wherescape-warehouse\python\config.wherescape.json" - ) + os.environ["CONFIG_JSON_PATH"] = git_dir + r"\wherescape-warehouse\python\config.wherescape.json" # -- set passwords # -- script can run in all environments