Skip to content
Snippets Groups Projects
Commit 25fa78ab authored by mjbonifa's avatar mjbonifa
Browse files

added exceptions in phen

parent 1e8776a8
No related branches found
No related tags found
No related merge requests found
......@@ -52,7 +52,7 @@ class Proto():
def in_database(self, codes, db, col):
return codes.isin(db[col])
def process(self, codes, codes_file, ignore_errors=False):
def process(self, codes, codes_file):
""" identify issues that do not pass and fix them with define/d process """
errors = []
# Iter through each item in check.
......@@ -66,10 +66,7 @@ class Proto():
codes = fix(codes, codes_file)
logger.debug(f"Check: Fixed")
except InvalidCodesException as ex:
if ignore_errors:
errors.append(ex)
else:
raise ex
else:
logger.debug(f"Check: passed")
......
......@@ -354,6 +354,7 @@ def log_invalid_code(codes, mask, code_type=None, file_path=None, cause=None):
def preprocess_codes(df, file, target_code_type=None, codes_file=None):
""" Parses each column individually - Order and length will not be preserved! """
out = pd.DataFrame([]) # create output df to append to
code_errors = [] # list of errors from processing
meta_columns = [] # meta columns to keep with codes
if "actions" in file and "divide_col" in file["actions"]:
......@@ -375,14 +376,15 @@ def preprocess_codes(df, file, target_code_type=None, codes_file=None):
codes = codes.str.strip() # remove excess spaces
# process codes, validating them using parser and returning the errors
codes, errors = code_type_parser.process(codes, codes_file, ignore_errors=True)
codes, errors = code_type_parser.process(codes, codes_file)
if len(errors) > 0:
raise Exception(f"Code validation failed with {len(errors)} errors")
code_errors = code_errors.append(errors)
logger.warning(f"Code validation failed with {len(errors)} errors")
# add metadata columns
out = pd.concat([out, pd.DataFrame({code_type_name: codes}).join(metadata_df)], ignore_index=True)
return out, meta_columns
return out, meta_columns, code_errors
# Translate Df with multiple codes into single code type Series
def translate_codes(df, target_code_type):
......@@ -460,6 +462,7 @@ def map(phen_dir, target_code_type):
# Create output dataframe
out = pd.DataFrame([])
code_errors []
# Process each folder in codes section
for folder in codes:
......@@ -478,11 +481,13 @@ def map(phen_dir, target_code_type):
# Preprocessing & Validation Checks
logger.debug("Processing and validating code formats")
df, meta_columns = preprocess_codes(df,
file,
codes_file=str(codes_file_path.resolve()),
df, meta_columns, errors = preprocess_codes(
df,
file, codes_file=str(codes_file_path.resolve()),
target_code_type=target_code_type)
code_errors = code_errors.append(errors)
# partition table by categorical column
if ("actions" in file and "divide_col" in file["actions"] and len(df) > 0):
divide_col = file["actions"]["divide_col"]
......@@ -492,8 +497,10 @@ def map(phen_dir, target_code_type):
# Map to Concept/Phenotype
if len(df.index) != 0:
if ("concept_set" in file) and isinstance(df, pd.core.frame.DataFrame):
out = map_file(df,
target_code_type, out,
out = map_file(
df,
target_code_type,
out,
concepts=file["concept_set"],
meta_columns=meta_columns)
elif ("concept_set_categories" in file) and isinstance(df, pd.core.groupby.generic.DataFrameGroupBy):
......@@ -502,7 +509,8 @@ def map(phen_dir, target_code_type):
if (cat in file["concept_set_categories"].keys()): # check if category is mapped
grp = grp.drop(columns=[divide_col]) # delete categorical column
logger.debug("Category:", cat)
out = map_file(grp,
out = map_file(
grp,
target_code_type,
out,
concepts=file["concept_set_categories"][cat],
......@@ -512,6 +520,9 @@ def map(phen_dir, target_code_type):
else:
logger.warning(f"File {file} has no output after preprocessing in config {str(config_path.resolve())}")
if(len(code_errors) > 0):
logger.error(f"The map processing has {len(code_errors)} errors)
# Check there is output from processing
if len(out.index) == 0:
raise Exception(f"No output after map processing, check config {str(config_path.resolve())}")
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment