From a37374d1a4c2f463d3d66b747b4063265201d6d9 Mon Sep 17 00:00:00 2001 From: Connor McArthur Date: Wed, 26 Jul 2017 16:34:08 -0400 Subject: [PATCH] better error handling (#494) --- dbt/adapters/bigquery.py | 5 +- dbt/adapters/default.py | 1 + dbt/adapters/postgres.py | 6 +- dbt/adapters/redshift.py | 1 - dbt/adapters/snowflake.py | 15 +- dbt/clients/jinja.py | 12 +- dbt/context/common.py | 22 +-- dbt/context/parser.py | 6 +- dbt/context/runtime.py | 6 +- dbt/contracts/graph/parsed.py | 13 -- dbt/contracts/graph/unparsed.py | 1 + dbt/exceptions.py | 162 ++++++++++-------- dbt/graph/selector.py | 1 - .../macros/materializations/archive.sql | 5 + dbt/loader.py | 2 +- dbt/model.py | 9 +- dbt/node_runners.py | 20 ++- dbt/parser.py | 23 ++- dbt/runner.py | 5 +- dbt/runtime.py | 39 ----- dbt/ui/printer.py | 46 +++-- dbt/utils.py | 8 - requirements.txt | 3 +- setup.py | 4 +- .../023_exit_codes_test/test_exit_codes.py | 5 +- test/unit/test_compiler.py | 9 + test/unit/test_graph.py | 19 -- test/unit/test_parser.py | 69 ++++++++ 28 files changed, 298 insertions(+), 219 deletions(-) delete mode 100644 dbt/runtime.py diff --git a/dbt/adapters/bigquery.py b/dbt/adapters/bigquery.py index e41bad98c62..0a1eeee3e75 100644 --- a/dbt/adapters/bigquery.py +++ b/dbt/adapters/bigquery.py @@ -2,6 +2,7 @@ from contextlib import contextmanager +import dbt.compat import dbt.exceptions import dbt.flags as flags import dbt.clients.gcloud @@ -31,7 +32,7 @@ def handle_error(cls, error, message, sql): logger.debug(message.format(sql=sql)) logger.debug(error) error_msg = "\n".join([error['message'] for error in error.errors]) - raise dbt.exceptions.RuntimeException(error_msg) + raise dbt.exceptions.DatabaseException(error_msg) @classmethod @contextmanager @@ -51,7 +52,7 @@ def exception_handler(cls, profile, sql, model_name=None, except Exception as e: logger.debug("Unhandled error while running:\n{}".format(sql)) logger.debug(e) - raise dbt.exceptions.RuntimeException(e) + raise dbt.exceptions.RuntimeException(dbt.compat.to_string(e)) @classmethod def type(cls): diff --git a/dbt/adapters/default.py b/dbt/adapters/default.py index c4ba542140c..c21e1158cd5 100644 --- a/dbt/adapters/default.py +++ b/dbt/adapters/default.py @@ -38,6 +38,7 @@ class DefaultAdapter(object): "get_status", "get_result_from_cursor", "quote", + "quote_schema_and_table", ] ### diff --git a/dbt/adapters/postgres.py b/dbt/adapters/postgres.py index 719b7bf66d7..04491a88c16 100644 --- a/dbt/adapters/postgres.py +++ b/dbt/adapters/postgres.py @@ -3,6 +3,7 @@ from contextlib import contextmanager import dbt.adapters.default +import dbt.compat import dbt.exceptions from dbt.logger import GLOBAL_LOGGER as logger @@ -23,13 +24,14 @@ def exception_handler(cls, profile, sql, model_name=None, logger.debug('Postgres error: {}'.format(str(e))) cls.rollback(connection) - raise dbt.exceptions.RuntimeException(e) + raise dbt.exceptions.DatabaseException( + dbt.compat.to_string(e).strip()) except Exception as e: logger.debug("Error running SQL: %s", sql) logger.debug("Rolling back transaction.") cls.rollback(connection) - raise e + raise dbt.exceptions.RuntimeException(e) @classmethod def type(cls): diff --git a/dbt/adapters/redshift.py b/dbt/adapters/redshift.py index 1f8f87be847..5430873d5f4 100644 --- a/dbt/adapters/redshift.py +++ b/dbt/adapters/redshift.py @@ -2,7 +2,6 @@ from dbt.adapters.postgres import PostgresAdapter from dbt.logger import GLOBAL_LOGGER as logger # noqa -from dbt.compat import basestring drop_lock = multiprocessing.Lock() diff --git a/dbt/adapters/snowflake.py b/dbt/adapters/snowflake.py index dca1a830bf2..0f30531a081 100644 --- a/dbt/adapters/snowflake.py +++ b/dbt/adapters/snowflake.py @@ -7,6 +7,7 @@ from contextlib import contextmanager +import dbt.compat import dbt.exceptions import dbt.flags as flags @@ -26,26 +27,28 @@ def exception_handler(cls, profile, sql, model_name=None, try: yield except snowflake.connector.errors.ProgrammingError as e: - logger.debug('Snowflake error: {}'.format(str(e))) + msg = dbt.compat.to_string(e) - if 'Empty SQL statement' in e.msg: + logger.debug('Snowflake error: {}'.format(msg)) + + if 'Empty SQL statement' in msg: logger.debug("got empty sql statement, moving on") - elif 'This session does not have a current database' in e.msg: + elif 'This session does not have a current database' in msg: cls.rollback(connection) raise dbt.exceptions.FailedToConnectException( ('{}\n\nThis error sometimes occurs when invalid ' 'credentials are provided, or when your default role ' 'does not have access to use the specified database. ' 'Please double check your profile and try again.') - .format(str(e))) + .format(msg)) else: cls.rollback(connection) - raise dbt.exceptions.ProgrammingException(str(e)) + raise dbt.exceptions.DatabaseException(msg) except Exception as e: logger.debug("Error running SQL: %s", sql) logger.debug("Rolling back transaction.") cls.rollback(connection) - raise e + raise dbt.exceptions.RuntimeException(e.msg) @classmethod def type(cls): diff --git a/dbt/clients/jinja.py b/dbt/clients/jinja.py index c3edc11621e..cfdb50da56d 100644 --- a/dbt/clients/jinja.py +++ b/dbt/clients/jinja.py @@ -49,12 +49,12 @@ def call(*args, **kwargs): try: return macro(*args, **kwargs) - except (jinja2.exceptions.TemplateRuntimeError) as e: - raise dbt.exceptions.MacroRuntimeException( + except (TypeError, + jinja2.exceptions.TemplateRuntimeError) as e: + dbt.exceptions.raise_compiler_error( str(e), - context.get('model'), node) - except dbt.exceptions.MacroRuntimeException as e: + except dbt.exceptions.CompilationException as e: e.stack.append(node) raise e @@ -148,7 +148,7 @@ def get_template(string, ctx, node=None, capture_macros=False): except (jinja2.exceptions.TemplateSyntaxError, jinja2.exceptions.UndefinedError) as e: e.translated = False - dbt.exceptions.raise_compiler_error(node, str(e)) + dbt.exceptions.raise_compiler_error(str(e), node) def render_template(template, ctx, node=None): @@ -158,7 +158,7 @@ def render_template(template, ctx, node=None): except (jinja2.exceptions.TemplateSyntaxError, jinja2.exceptions.UndefinedError) as e: e.translated = False - dbt.exceptions.raise_compiler_error(node, str(e)) + dbt.exceptions.raise_compiler_error(str(e), node) def get_rendered(string, ctx, node=None, diff --git a/dbt/context/common.py b/dbt/context/common.py index 4a3c3bcaeaa..f6976e8e4ea 100644 --- a/dbt/context/common.py +++ b/dbt/context/common.py @@ -176,11 +176,11 @@ def pretty_dict(self, data): def assert_var_defined(self, var_name, default): if var_name not in self.local_vars and default is None: pretty_vars = self.pretty_dict(self.local_vars) - dbt.utils.compiler_error( - self.model, + dbt.exceptions.raise_compiler_error( self.UndefinedVarError.format( var_name, self.model_name, pretty_vars - ) + ), + self.model ) def assert_var_not_none(self, var_name): @@ -188,11 +188,11 @@ def assert_var_not_none(self, var_name): if raw is None: pretty_vars = self.pretty_dict(self.local_vars) model_name = dbt.utils.get_model_name_or_none(self.model) - dbt.utils.compiler_error( - self.model, + dbt.exceptions.raise_compiler_error( self.NoneVarError.format( var_name, model_name, pretty_vars - ) + ), + self.model ) def __call__(self, var_name, default=None): @@ -214,7 +214,9 @@ def __call__(self, var_name, default=None): def write(node, target_path, subdirectory): def fn(payload): - dbt.writer.write_node(node, target_path, subdirectory, payload) + node['build_path'] = dbt.writer.write_node( + node, target_path, subdirectory, payload) + return fn @@ -273,9 +275,7 @@ def generate(model, project, flat_graph, provider=None): schema, dbt.utils.model_immediate_name(model, dbt.flags.NON_DESTRUCTIVE), model.get('name') - ), - "var": Var(model, context=context), - "write": write(model, project.get('target-path'), 'run'), + ) }) context = _add_tracking(context) @@ -286,7 +286,9 @@ def generate(model, project, flat_graph, provider=None): context = _add_macros(context, model, flat_graph) + context["write"] = write(model, project.get('target-path'), 'run') context["render"] = render(context, model) + context["var"] = Var(model, context=context) context['context'] = context return context diff --git a/dbt/context/parser.py b/dbt/context/parser.py index 6f6d9f5029b..873f70e12a4 100644 --- a/dbt/context/parser.py +++ b/dbt/context/parser.py @@ -29,9 +29,9 @@ def __call__(self, *args, **kwargs): elif len(args) == 0 and len(kwargs) > 0: opts = kwargs else: - dbt.utils.compiler_error( - self.model.get('name'), - "Invalid model config given inline in {}".format(self.model)) + dbt.exceptions.raise_compiler_error( + "Invalid inline model config", + self.model) self.model['config_reference'].update_in_model_config(opts) return '' diff --git a/dbt/context/runtime.py b/dbt/context/runtime.py index 12347483e06..744421ac2c2 100644 --- a/dbt/context/runtime.py +++ b/dbt/context/runtime.py @@ -1,11 +1,13 @@ +import json + from dbt.adapters.factory import get_adapter +from dbt.compat import basestring import dbt.clients.jinja +import dbt.context.common import dbt.flags import dbt.utils -import dbt.context.common - from dbt.logger import GLOBAL_LOGGER as logger # noqa diff --git a/dbt/contracts/graph/parsed.py b/dbt/contracts/graph/parsed.py index 08c9a2606e6..e291fc0422c 100644 --- a/dbt/contracts/graph/parsed.py +++ b/dbt/contracts/graph/parsed.py @@ -72,9 +72,6 @@ def validate_nodes(parsed_nodes): validate_with(parsed_nodes_contract, parsed_nodes) - [validate_incremental(node) for unique_id, node - in parsed_nodes.items()] - def validate_macros(parsed_macros): validate_with(parsed_macros_contract, parsed_macros) @@ -82,13 +79,3 @@ def validate_macros(parsed_macros): def validate(parsed_graph): validate_with(parsed_graph_contract, parsed_graph) - - [validate_incremental(node) for unique_id, node - in parsed_graph.get('nodes').items()] - - -def validate_incremental(node): - if(node.get('resource_type') == NodeType.Model and - get_materialization(node) == 'incremental' and - node.get('config', {}).get('sql_where') is None): - dbt.exceptions.missing_sql_where(node) diff --git a/dbt/contracts/graph/unparsed.py b/dbt/contracts/graph/unparsed.py index 14075b1e3b7..1a7e75e50d9 100644 --- a/dbt/contracts/graph/unparsed.py +++ b/dbt/contracts/graph/unparsed.py @@ -13,6 +13,7 @@ # filesystem Required('root_path'): basestring, Required('path'): basestring, + Required('original_file_path'): basestring, Required('raw_sql'): basestring, }) diff --git a/dbt/exceptions.py b/dbt/exceptions.py index 1fb56f82815..cf139f0d5e1 100644 --- a/dbt/exceptions.py +++ b/dbt/exceptions.py @@ -10,81 +10,107 @@ class InternalException(Exception): class RuntimeException(RuntimeError, Exception): - pass + def __init__(self, msg, node=None): + self.stack = [] + self.node = node + self.msg = msg + @property + def type(self): + return 'Runtime' -class MacroRuntimeException(RuntimeException): - def __init__(self, msg, model, macro): - self.stack = [macro] - self.model = model - self.msg = msg + def node_to_string(self, node): + return "{} {} ({})".format( + node.get('resource_type'), + node.get('name', 'unknown'), + node.get('original_file_path')) - def __str__(self): - to_return = self.msg + def process_stack(self): + lines = [] + stack = self.stack + [self.node] + first = True - to_return += "\n in macro {} ({})".format( - self.stack[0].get('name'), self.stack[0].get('path')) + if len(stack) > 1: + lines.append("") - for item in self.stack[1:]: - to_return += "\n called by macro {} ({})".format( - item.get('name'), item.get('path')) + for item in stack: + msg = 'called by' - to_return += "\n called by model {} ({})".format( - self.model.get('name'), self.model.get('path')) + if first: + msg = 'in' + first = False - return to_return + lines.append("> {} {}".format( + msg, + self.node_to_string(item))) + return lines + + def __str__(self, prefix="! "): + node_string = "" + + if self.node is not None: + node_string = " in {}".format(self.node_to_string(self.node)) + + lines = ["{}{}".format(self.type + ' Error', + node_string)] + \ + self.msg.split("\n") + + lines += self.process_stack() + + return lines[0] + "\n" + "\n".join( + [" " + line for line in lines[1:]]) -class ValidationException(RuntimeException): - pass + +class DatabaseException(RuntimeException): + + def process_stack(self): + lines = [] + + if self.node is not None and self.node.get('build_path'): + lines.append( + "compiled SQL at {}".format(self.node.get('build_path'))) + + return lines + RuntimeException.process_stack(self) + + @property + def type(self): + return 'Database' class CompilationException(RuntimeException): - pass + @property + def type(self): + return 'Compilation' -class NotImplementedException(Exception): +class ValidationException(RuntimeException): pass -class ProgrammingException(Exception): +class NotImplementedException(Exception): pass -class FailedToConnectException(Exception): +class FailedToConnectException(DatabaseException): pass from dbt.utils import get_materialization # noqa -def raise_compiler_error(node, msg): - name = '' - node_type = 'model' +def raise_compiler_error(msg, node=None): + raise CompilationException(msg, node) - if node is None: - name = '' - elif isinstance(node, basestring): - name = node - elif isinstance(node, dict): - name = node.get('name') - node_type = node.get('resource_type') - if node_type == 'macro': - name = node.get('path') - else: - name = node.nice_name - - raise CompilationException( - "! Compilation error while compiling {} {}:\n! {}\n" - .format(node_type, name, msg)) +def raise_database_error(msg, node=None): + raise DatabaseException(msg, node) def ref_invalid_args(model, args): raise_compiler_error( - model, - "ref() takes at most two arguments ({} given)".format( - len(args))) + "ref() takes at most two arguments ({} given)".format(len(args)), + model) def ref_bad_context(model, target_model_name, target_model_package): @@ -105,8 +131,7 @@ def ref_bad_context(model, target_model_name, target_model_package): model_path=model['path'], ref_string=ref_string ) - raise_compiler_error( - model, error_msg) + raise_compiler_error(error_msg, model) def ref_target_not_found(model, target_model_name, target_model_package): @@ -116,26 +141,26 @@ def ref_target_not_found(model, target_model_name, target_model_package): target_package_string = "in package '{}' ".format(target_model_package) raise_compiler_error( - model, "Model '{}' depends on model '{}' {}which was not found." .format(model.get('unique_id'), target_model_name, - target_package_string)) + target_package_string), + model) def ref_disabled_dependency(model, target_model): raise_compiler_error( - model, "Model '{}' depends on model '{}' which is disabled in " "the project config".format(model.get('unique_id'), - target_model.get('unique_id'))) + target_model.get('unique_id')), + model) def dependency_not_found(model, target_model_name): raise_compiler_error( - model, "'{}' depends on '{}' which is not in the graph!" - .format(model.get('unique_id'), target_model_name)) + .format(model.get('unique_id'), target_model_name), + model) def macro_not_found(model, target_macro_id): @@ -149,9 +174,9 @@ def materialization_not_available(model, adapter_type): materialization = get_materialization(model) raise_compiler_error( - model, "Materialization '{}' is not available for {}!" - .format(materialization, adapter_type)) + .format(materialization, adapter_type), + model) def missing_materialization(model, adapter_type): @@ -163,34 +188,31 @@ def missing_materialization(model, adapter_type): valid_types = "'default' and '{}'".format(adapter_type) raise_compiler_error( - model, "No materialization '{}' was found for adapter {}! (searched types {})" - .format(materialization, adapter_type, valid_types)) - - -def missing_sql_where(model): - raise_compiler_error( - model, - "Model '{}' is materialized as 'incremental', but does not have a " - "sql_where defined in its config.".format(model.get('unique_id'))) + .format(materialization, adapter_type, valid_types), + model) def bad_package_spec(repo, spec, error_message): - raise RuntimeException( + raise InternalException( "Error checking out spec='{}' for repo {}\n{}".format( spec, repo, error_message)) def missing_config(model, name): raise_compiler_error( - model, "Model '{}' does not define a required config parameter '{}'." - .format(model.get('unique_id'), name)) + .format(model.get('unique_id'), name), + model) -def invalid_materialization_argument(name, argument): - msg = "Received an unknown argument '{}'.".format(argument) +def missing_relation(relation_name, model=None): + raise_compiler_error( + "Relation {} not found!".format(relation_name), + model) + - raise CompilationException( - "! Compilation error while compiling materialization {}:\n! {}\n" - .format(name, msg)) +def invalid_materialization_argument(name, argument): + raise_compiler_error( + "materialization '{}' received unknown argument '{}'." + .format(name, argument)) diff --git a/dbt/graph/selector.py b/dbt/graph/selector.py index 44c9ea5576d..2974e1954ab 100644 --- a/dbt/graph/selector.py +++ b/dbt/graph/selector.py @@ -1,4 +1,3 @@ -# import dbt.utils.compiler_error import networkx as nx from dbt.logger import GLOBAL_LOGGER as logger diff --git a/dbt/include/global_project/macros/materializations/archive.sql b/dbt/include/global_project/macros/materializations/archive.sql index 18f7d0c70a7..ae3eed76458 100644 --- a/dbt/include/global_project/macros/materializations/archive.sql +++ b/dbt/include/global_project/macros/materializations/archive.sql @@ -74,6 +74,11 @@ {%- set config = model['config'] -%} {%- set source_schema = config.get('source_schema') -%} {%- set source_table = config.get('source_table') -%} + + {%- if not adapter.already_exists(source_schema, source_table) -%} + {{ exceptions.missing_relation(source_table) }} + {%- endif -%} + {%- set source_columns = adapter.get_columns_in_table(source_schema, source_table) -%} {%- set target_schema = config.get('target_schema') -%} {%- set target_table = config.get('target_table') -%} diff --git a/dbt/loader.py b/dbt/loader.py index a8f8b7e6b1a..4f63d305aba 100644 --- a/dbt/loader.py +++ b/dbt/loader.py @@ -28,7 +28,7 @@ def load_all(cls, root_project, all_projects): @classmethod def register(cls, loader, subgraph='nodes'): if subgraph not in ['nodes', 'macros']: - raise dbt.exceptions.ProgrammingException( + raise dbt.exceptions.InternalException( 'Invalid subgraph type {}, should be "nodes" or "macros"!' .format(subgraph)) diff --git a/dbt/model.py b/dbt/model.py index 57d2d90ce66..e3f8e11e206 100644 --- a/dbt/model.py +++ b/dbt/model.py @@ -1,8 +1,10 @@ import os.path +import dbt.exceptions + from dbt.compat import basestring -from dbt.utils import split_path, deep_merge, DBTConfigKeys, compiler_error +from dbt.utils import split_path, deep_merge, DBTConfigKeys class SourceConfig(object): @@ -101,9 +103,8 @@ def __get_hooks(self, relevant_configs, key): for hook in new_hooks: if not isinstance(hook, basestring): name = ".".join(self.fqn) - compiler_error(None, "{} for model {} is not a string!".format( - key, name - )) + dbt.exceptions.raise_compiler_error( + "{} for model {} is not a string!".format(key, name)) hooks.append(hook) return hooks diff --git a/dbt/node_runners.py b/dbt/node_runners.py index fddb8d54ea3..d7614c431e8 100644 --- a/dbt/node_runners.py +++ b/dbt/node_runners.py @@ -83,8 +83,7 @@ def is_ephemeral(self): def safe_run(self, flat_graph, existing): catchable_errors = (dbt.exceptions.CompilationException, - dbt.exceptions.RuntimeException, - dbt.exceptions.ProgrammingException) + dbt.exceptions.RuntimeException) result = RunModelResult(self.node) started = time.time() @@ -100,7 +99,10 @@ def safe_run(self, flat_graph, existing): result = self.run(compiled_node, existing, flat_graph) except catchable_errors as e: - result.error = str(e).strip() + if e.node is None: + e.node = result.node + + result.error = dbt.compat.to_string(e) result.status = 'ERROR' except dbt.exceptions.InternalException as e: @@ -113,7 +115,7 @@ def safe_run(self, flat_graph, existing): note=INTERNAL_ERROR_STRING) logger.debug(error) - result.error = str(e).strip() + result.error = dbt.compat.to_string(e) result.status = 'ERROR' except Exception as e: @@ -299,10 +301,16 @@ def print_results_line(cls, results, execution_time): nodes = [r.node for r in results] stat_line = dbt.ui.printer.get_counts(nodes) + execution = "" + + if execution_time is not None: + execution = " in {execution_time:0.2f}s".format( + execution_time=execution_time) + dbt.ui.printer.print_timestamped_line("") dbt.ui.printer.print_timestamped_line( - "Finished running {stat_line} in {execution_time:0.2f}s." - .format(stat_line=stat_line, execution_time=execution_time)) + "Finished running {stat_line}{execution}." + .format(stat_line=stat_line, execution=execution)) @classmethod def after_run(cls, project, adapter, results, flat_graph, elapsed): diff --git a/dbt/parser.py b/dbt/parser.py index 6ba6ad9063c..62a9c1470d9 100644 --- a/dbt/parser.py +++ b/dbt/parser.py @@ -135,8 +135,8 @@ def parse_macro_file(macro_file_path, context = {} base_node = { - 'name': 'macro', 'path': macro_file_path, + 'original_file_path': macro_file_path, 'resource_type': NodeType.Macro, 'package_name': package_name, 'depends_on': { @@ -144,8 +144,12 @@ def parse_macro_file(macro_file_path, } } - template = dbt.clients.jinja.get_template( - macro_file_contents, context, node=base_node) + try: + template = dbt.clients.jinja.get_template( + macro_file_contents, context, node=base_node) + except dbt.exceptions.CompilationException as e: + e.node = base_node + raise e for key, item in template.module.__dict__.items(): if type(item) == jinja2.runtime.Macro: @@ -162,6 +166,7 @@ def parse_macro_file(macro_file_path, 'tags': tags, 'root_path': root_path, 'path': macro_file_path, + 'original_file_path': macro_file_path, 'raw_sql': macro_file_contents, }) @@ -286,11 +291,16 @@ def load_and_parse_sql(package_name, root_project, all_projects, root_dir, else: path = file_match.get('relative_path') + original_file_path = os.path.join( + file_match.get('searched_path'), + path) + result.append({ 'name': name, 'root_path': root_dir, 'resource_type': resource_type, 'path': path, + 'original_file_path': original_file_path, 'package_name': package_name, 'raw_sql': file_contents }) @@ -337,6 +347,7 @@ def load_and_parse_run_hook_type(root_project, all_projects, hook_type): 'root_path': "{}/dbt_project.yml".format(project_name), 'resource_type': NodeType.Operation, 'path': hook_path, + 'original_file_path': hook_path, 'package_name': project_name, 'raw_sql': hooks }) @@ -503,6 +514,7 @@ def parse_schema_test(test_base, model_name, test_config, test_type, 'package_name': test_base.get('package_name'), 'root_path': test_base.get('root_path'), 'path': pseudo_path, + 'original_file_path': test_base.get('original_file_path'), 'raw_sql': raw_sql } @@ -534,6 +546,9 @@ def load_and_parse_yml(package_name, root_project, all_projects, root_dir, file_contents = dbt.clients.system.load_file_contents( file_match.get('absolute_path'), strip=False) + original_file_path = os.path.join(file_match.get('searched_path'), + file_match.get('relative_path')) + parts = dbt.utils.split_path(file_match.get('relative_path', '')) name, _ = os.path.splitext(parts[-1]) @@ -542,6 +557,7 @@ def load_and_parse_yml(package_name, root_project, all_projects, root_dir, 'root_path': root_dir, 'resource_type': NodeType.Test, 'path': file_match.get('relative_path'), + 'original_file_path': original_file_path, 'package_name': package_name, 'raw_yml': file_contents }) @@ -592,6 +608,7 @@ def parse_archives_from_project(project): 'root_path': project.get('project-root'), 'resource_type': NodeType.Archive, 'path': os.path.join('archive', *fake_path), + 'original_file_path': 'dbt_project.yml', 'package_name': project.get('name'), 'config': config, 'raw_sql': '{{config(materialized="archive")}} -- noop' diff --git a/dbt/runner.py b/dbt/runner.py index daf3bd3fc7a..c3ba00c76d0 100644 --- a/dbt/runner.py +++ b/dbt/runner.py @@ -88,8 +88,6 @@ def call_runner(self, data): if result.errored and runner.raise_on_first_error(): raise dbt.exceptions.RuntimeException(result.error) - elif result.errored: - logger.info(result.error) return result @@ -162,6 +160,9 @@ def execute_nodes(self, linker, Runner, flat_graph, node_dependency_list): for conn_name in adapter.cancel_open_connections(profile): dbt.ui.printer.print_cancel_line(conn_name, schema_name) + dbt.ui.printer.print_run_end_messages(node_results, + early_exit=True) + pool.join() raise diff --git a/dbt/runtime.py b/dbt/runtime.py deleted file mode 100644 index 29e16b906f8..00000000000 --- a/dbt/runtime.py +++ /dev/null @@ -1,39 +0,0 @@ -from dbt.utils import compiler_error - - -class RuntimeContext(dict): - def __init__(self, model=None, *args, **kwargs): - super(RuntimeContext, self).__init__(*args, **kwargs) - - self.model = model - - def __getattr__(self, attr): - if attr in self: - return self.get(attr) - else: - compiler_error(self.model, "'{}' is undefined".format(attr)) - - def __setattr__(self, key, value): - self.__setitem__(key, value) - - def __setitem__(self, key, value): - super(RuntimeContext, self).__setitem__(key, value) - self.__dict__.update({key: value}) - - def __delattr__(self, item): - self.__delitem__(item) - - def __delitem__(self, key): - super(RuntimeContext, self).__delitem__(key) - del self.__dict__[key] - - def update_global(self, data): - self.update(data) - - def update_package(self, pkg_name, data): - if pkg_name not in self: - ctx = RuntimeContext(model=self.model) - - self[pkg_name] = ctx - - self[pkg_name].update(data) diff --git a/dbt/ui/printer.py b/dbt/ui/printer.py index 09a05d4d7c5..ea0de0bf9b3 100644 --- a/dbt/ui/printer.py +++ b/dbt/ui/printer.py @@ -205,35 +205,45 @@ def print_run_status_line(results): def print_run_result_error(result): - node = result.node + logger.info("") if result.failed: - status = 'FAIL {}'.format(result.status) - else: - status = result.status - - msg = " - {status} in {type} {package_name}.{node_name} ({path})".format( - status=red(status), - type=node.get('resource_type'), - package_name=node.get('package_name'), - node_name=node.get('name'), - path=node.get('build_path') - ) - logger.info(msg) + logger.info(yellow("Failure in {} {} ({})").format( + result.node.get('resource_type'), + result.node.get('name'), + result.node.get('original_file_path'))) + logger.info(" Got {} results, expected 0.".format(result.status)) + if result.node.get('build_path') is not None: + logger.info("") + logger.info(" compiled SQL at {}".format( + result.node.get('build_path'))) -def print_end_of_run_summary(num_errors): - if num_errors > 0: + else: + first = True + for line in result.error.split("\n"): + if first: + logger.info(yellow(line)) + first = False + else: + logger.info(line) + + +def print_end_of_run_summary(num_errors, early_exit=False): + if early_exit: + message = yellow('Exited because of keyboard interrupt.') + elif num_errors > 0: message = red('Completed with {} errors:'.format(num_errors)) else: message = green('Completed successfully') - logger.info('\n{}'.format(message)) + logger.info('') + logger.info('{}'.format(message)) -def print_run_end_messages(results): +def print_run_end_messages(results, early_exit=False): errors = [r for r in results if r.errored or r.failed] - print_end_of_run_summary(len(errors)) + print_end_of_run_summary(len(errors), early_exit) for error in errors: print_run_result_error(error) diff --git a/dbt/utils.py b/dbt/utils.py index 8ff65b2494c..c3bfd3764c3 100644 --- a/dbt/utils.py +++ b/dbt/utils.py @@ -56,14 +56,6 @@ def get_model_name_or_none(model): return name -def compiler_error(model, msg): - name = get_model_name_or_none(model) - raise RuntimeError( - "! Compilation error while compiling model {}:\n! {}\n" - .format(name, msg) - ) - - def compiler_warning(model, msg): name = get_model_name_or_none(model) logger.info( diff --git a/requirements.txt b/requirements.txt index 5cf0cdaea34..727cdd539bb 100644 --- a/requirements.txt +++ b/requirements.txt @@ -8,6 +8,7 @@ csvkit==0.9.1 snowplow-tracker==0.7.2 celery==3.1.23 voluptuous==0.10.5 -snowflake-connector-python==1.3.16 +snowflake-connector-python==1.4.0 colorama==0.3.9 google-cloud-bigquery==0.24.0 +pyasn1==0.2.3 diff --git a/setup.py b/setup.py index 91ae872007f..830685563bc 100644 --- a/setup.py +++ b/setup.py @@ -17,6 +17,7 @@ package_data={ 'dbt': [ 'include/global_project/dbt_project.yml', + 'include/global_project/macros/*.sql', 'include/global_project/macros/**/*.sql', ] }, @@ -39,8 +40,9 @@ 'snowplow-tracker==0.7.2', 'celery==3.1.23', 'voluptuous==0.10.5', - 'snowflake-connector-python==1.3.16', + 'snowflake-connector-python==1.4.0', 'colorama==0.3.9', 'google-cloud-bigquery==0.24.0', + 'pyasn1==0.2.3', ] ) diff --git a/test/integration/023_exit_codes_test/test_exit_codes.py b/test/integration/023_exit_codes_test/test_exit_codes.py index 936df83b942..e3640b7e80c 100644 --- a/test/integration/023_exit_codes_test/test_exit_codes.py +++ b/test/integration/023_exit_codes_test/test_exit_codes.py @@ -1,6 +1,9 @@ from nose.plugins.attrib import attr from test.integration.base import DBTIntegrationTest, FakeArgs +import dbt.exceptions + + class TestExitCodes(DBTIntegrationTest): def setUp(self): @@ -171,7 +174,7 @@ def test_deps(self): try: _, success = self.run_dbt_and_check(['deps']) self.assertTrue(False) - except RuntimeError as e: + except dbt.exceptions.InternalException as e: pass class TestExitCodesSeed(DBTIntegrationTest): diff --git a/test/unit/test_compiler.py b/test/unit/test_compiler.py index db17d3f85cf..8cd991b4f24 100644 --- a/test/unit/test_compiler.py +++ b/test/unit/test_compiler.py @@ -65,6 +65,7 @@ def test__prepend_ctes__already_has_cte(self): 'config': self.model_config, 'tags': set(), 'path': 'view.sql', + 'original_file_path': 'view.sql', 'raw_sql': 'select * from {{ref("ephemeral")}}', 'compiled': True, 'extra_ctes_injected': False, @@ -92,6 +93,7 @@ def test__prepend_ctes__already_has_cte(self): 'config': ephemeral_config, 'tags': set(), 'path': 'ephemeral.sql', + 'original_file_path': 'ephemeral.sql', 'raw_sql': 'select * from source_table', 'compiled': True, 'compiled_sql': 'select * from source_table', @@ -141,6 +143,7 @@ def test__prepend_ctes__no_ctes(self): 'config': self.model_config, 'tags': set(), 'path': 'view.sql', + 'original_file_path': 'view.sql', 'raw_sql': ('with cte as (select * from something_else) ' 'select * from source_table'), 'compiled': True, @@ -166,6 +169,7 @@ def test__prepend_ctes__no_ctes(self): 'config': self.model_config, 'tags': set(), 'path': 'view.sql', + 'original_file_path': 'view.sql', 'raw_sql': 'select * from source_table', 'compiled': True, 'extra_ctes_injected': False, @@ -229,6 +233,7 @@ def test__prepend_ctes(self): 'config': self.model_config, 'tags': set(), 'path': 'view.sql', + 'original_file_path': 'view.sql', 'raw_sql': 'select * from {{ref("ephemeral")}}', 'compiled': True, 'extra_ctes_injected': False, @@ -254,6 +259,7 @@ def test__prepend_ctes(self): 'config': ephemeral_config, 'tags': set(), 'path': 'ephemeral.sql', + 'original_file_path': 'ephemeral.sql', 'raw_sql': 'select * from source_table', 'compiled': True, 'extra_ctes_injected': False, @@ -311,6 +317,7 @@ def test__prepend_ctes__multiple_levels(self): 'config': self.model_config, 'tags': set(), 'path': 'view.sql', + 'original_file_path': 'view.sql', 'raw_sql': 'select * from {{ref("ephemeral")}}', 'compiled': True, 'extra_ctes_injected': False, @@ -336,6 +343,7 @@ def test__prepend_ctes__multiple_levels(self): 'config': ephemeral_config, 'tags': set(), 'path': 'ephemeral.sql', + 'original_file_path': 'ephemeral.sql', 'raw_sql': 'select * from {{ref("ephemeral_level_two")}}', 'compiled': True, 'extra_ctes_injected': False, @@ -361,6 +369,7 @@ def test__prepend_ctes__multiple_levels(self): 'config': ephemeral_config, 'tags': set(), 'path': 'ephemeral_level_two.sql', + 'original_file_path': 'ephemeral_level_two.sql', 'raw_sql': 'select * from source_table', 'compiled': True, 'extra_ctes_injected': False, diff --git a/test/unit/test_graph.py b/test/unit/test_graph.py index ecfa34ec044..3ae465cafb9 100644 --- a/test/unit/test_graph.py +++ b/test/unit/test_graph.py @@ -219,25 +219,6 @@ def test__model_enabled(self): [('model.test_models_compile.model_one', 'model.test_models_compile.model_two',)]) - def test__model_incremental_without_sql_where_fails(self): - self.use_models({ - 'model_one': 'select * from events' - }) - - cfg = { - "models": { - "materialized": "table", - "test_models_compile": { - "model_one": {"materialized": "incremental"}, - } - } - } - - compiler = self.get_compiler(self.get_project(cfg)) - - with self.assertRaises(dbt.exceptions.CompilationException): - compiler.compile() - def test__model_incremental(self): self.use_models({ 'model_one': 'select * from events' diff --git a/test/unit/test_parser.py b/test/unit/test_parser.py index aa737f00c71..b54970aeb86 100644 --- a/test/unit/test_parser.py +++ b/test/unit/test_parser.py @@ -70,6 +70,7 @@ def test__single_model(self): 'name': 'model_one', 'resource_type': 'model', 'package_name': 'root', + 'original_file_path': 'model_one.sql', 'root_path': get_os_path('/usr/src/app'), 'path': 'model_one.sql', 'raw_sql': ("select * from events"), @@ -89,6 +90,7 @@ def test__single_model(self): 'fqn': ['root', 'model_one'], 'empty': False, 'package_name': 'root', + 'original_file_path': 'model_one.sql', 'root_path': get_os_path('/usr/src/app'), 'refs': [], 'depends_on': { @@ -109,6 +111,7 @@ def test__single_model__nested_configuration(self): 'name': 'model_one', 'resource_type': 'model', 'package_name': 'root', + 'original_file_path': 'nested/path/model_one.sql', 'root_path': get_os_path('/usr/src/app'), 'path': get_os_path('nested/path/model_one.sql'), 'raw_sql': ("select * from events"), @@ -144,6 +147,7 @@ def test__single_model__nested_configuration(self): 'fqn': ['root', 'nested', 'path', 'model_one'], 'empty': False, 'package_name': 'root', + 'original_file_path': 'nested/path/model_one.sql', 'root_path': get_os_path('/usr/src/app'), 'refs': [], 'depends_on': { @@ -165,6 +169,7 @@ def test__empty_model(self): 'resource_type': 'model', 'package_name': 'root', 'path': 'model_one.sql', + 'original_file_path': 'model_one.sql', 'root_path': get_os_path('/usr/src/app'), 'raw_sql': (" "), }] @@ -190,6 +195,7 @@ def test__empty_model(self): 'config': self.model_config, 'tags': set(), 'path': 'model_one.sql', + 'original_file_path': 'model_one.sql', 'root_path': get_os_path('/usr/src/app'), 'raw_sql': self.find_input_by_name( models, 'model_one').get('raw_sql') @@ -203,6 +209,7 @@ def test__simple_dependency(self): 'resource_type': 'model', 'package_name': 'root', 'path': 'base.sql', + 'original_file_path': 'base.sql', 'root_path': get_os_path('/usr/src/app'), 'raw_sql': 'select * from events' }, { @@ -210,6 +217,7 @@ def test__simple_dependency(self): 'resource_type': 'model', 'package_name': 'root', 'path': 'events_tx.sql', + 'original_file_path': 'events_tx.sql', 'root_path': get_os_path('/usr/src/app'), 'raw_sql': "select * from {{ref('base')}}" }] @@ -236,6 +244,7 @@ def test__simple_dependency(self): 'config': self.model_config, 'tags': set(), 'path': 'base.sql', + 'original_file_path': 'base.sql', 'root_path': get_os_path('/usr/src/app'), 'raw_sql': self.find_input_by_name( models, 'base').get('raw_sql') @@ -255,6 +264,7 @@ def test__simple_dependency(self): 'config': self.model_config, 'tags': set(), 'path': 'events_tx.sql', + 'original_file_path': 'events_tx.sql', 'root_path': get_os_path('/usr/src/app'), 'raw_sql': self.find_input_by_name( models, 'events_tx').get('raw_sql') @@ -268,6 +278,7 @@ def test__multiple_dependencies(self): 'resource_type': 'model', 'package_name': 'root', 'path': 'events.sql', + 'original_file_path': 'events.sql', 'root_path': get_os_path('/usr/src/app'), 'raw_sql': 'select * from base.events', }, { @@ -275,6 +286,7 @@ def test__multiple_dependencies(self): 'resource_type': 'model', 'package_name': 'root', 'path': 'sessions.sql', + 'original_file_path': 'sessions.sql', 'root_path': get_os_path('/usr/src/app'), 'raw_sql': 'select * from base.sessions', }, { @@ -282,6 +294,7 @@ def test__multiple_dependencies(self): 'resource_type': 'model', 'package_name': 'root', 'path': 'events_tx.sql', + 'original_file_path': 'events_tx.sql', 'root_path': get_os_path('/usr/src/app'), 'raw_sql': ("with events as (select * from {{ref('events')}}) " "select * from events"), @@ -290,6 +303,7 @@ def test__multiple_dependencies(self): 'resource_type': 'model', 'package_name': 'root', 'path': 'sessions_tx.sql', + 'original_file_path': 'sessions_tx.sql', 'root_path': get_os_path('/usr/src/app'), 'raw_sql': ("with sessions as (select * from {{ref('sessions')}}) " "select * from sessions"), @@ -298,6 +312,7 @@ def test__multiple_dependencies(self): 'resource_type': 'model', 'package_name': 'root', 'path': 'multi.sql', + 'original_file_path': 'multi.sql', 'root_path': get_os_path('/usr/src/app'), 'raw_sql': ("with s as (select * from {{ref('sessions_tx')}}), " "e as (select * from {{ref('events_tx')}}) " @@ -326,6 +341,7 @@ def test__multiple_dependencies(self): 'config': self.model_config, 'tags': set(), 'path': 'events.sql', + 'original_file_path': 'events.sql', 'root_path': get_os_path('/usr/src/app'), 'raw_sql': self.find_input_by_name( models, 'events').get('raw_sql') @@ -345,6 +361,7 @@ def test__multiple_dependencies(self): 'config': self.model_config, 'tags': set(), 'path': 'sessions.sql', + 'original_file_path': 'sessions.sql', 'root_path': get_os_path('/usr/src/app'), 'raw_sql': self.find_input_by_name( models, 'sessions').get('raw_sql') @@ -364,6 +381,7 @@ def test__multiple_dependencies(self): 'config': self.model_config, 'tags': set(), 'path': 'events_tx.sql', + 'original_file_path': 'events_tx.sql', 'root_path': get_os_path('/usr/src/app'), 'raw_sql': self.find_input_by_name( models, 'events_tx').get('raw_sql') @@ -383,6 +401,7 @@ def test__multiple_dependencies(self): 'config': self.model_config, 'tags': set(), 'path': 'sessions_tx.sql', + 'original_file_path': 'sessions_tx.sql', 'root_path': get_os_path('/usr/src/app'), 'raw_sql': self.find_input_by_name( models, 'sessions_tx').get('raw_sql') @@ -402,6 +421,7 @@ def test__multiple_dependencies(self): 'config': self.model_config, 'tags': set(), 'path': 'multi.sql', + 'original_file_path': 'multi.sql', 'root_path': get_os_path('/usr/src/app'), 'raw_sql': self.find_input_by_name( models, 'multi').get('raw_sql') @@ -415,6 +435,7 @@ def test__multiple_dependencies__packages(self): 'resource_type': 'model', 'package_name': 'snowplow', 'path': 'events.sql', + 'original_file_path': 'events.sql', 'root_path': get_os_path('/usr/src/app'), 'raw_sql': 'select * from base.events', }, { @@ -422,6 +443,7 @@ def test__multiple_dependencies__packages(self): 'resource_type': 'model', 'package_name': 'snowplow', 'path': 'sessions.sql', + 'original_file_path': 'sessions.sql', 'root_path': get_os_path('/usr/src/app'), 'raw_sql': 'select * from base.sessions', }, { @@ -429,6 +451,7 @@ def test__multiple_dependencies__packages(self): 'resource_type': 'model', 'package_name': 'snowplow', 'path': 'events_tx.sql', + 'original_file_path': 'events_tx.sql', 'root_path': get_os_path('/usr/src/app'), 'raw_sql': ("with events as (select * from {{ref('events')}}) " "select * from events"), @@ -437,6 +460,7 @@ def test__multiple_dependencies__packages(self): 'resource_type': 'model', 'package_name': 'snowplow', 'path': 'sessions_tx.sql', + 'original_file_path': 'sessions_tx.sql', 'root_path': get_os_path('/usr/src/app'), 'raw_sql': ("with sessions as (select * from {{ref('sessions')}}) " "select * from sessions"), @@ -445,6 +469,7 @@ def test__multiple_dependencies__packages(self): 'resource_type': 'model', 'package_name': 'root', 'path': 'multi.sql', + 'original_file_path': 'multi.sql', 'root_path': get_os_path('/usr/src/app'), 'raw_sql': ("with s as " "(select * from {{ref('snowplow', 'sessions_tx')}}), " @@ -475,6 +500,7 @@ def test__multiple_dependencies__packages(self): 'config': self.model_config, 'tags': set(), 'path': 'events.sql', + 'original_file_path': 'events.sql', 'root_path': get_os_path('/usr/src/app'), 'raw_sql': self.find_input_by_name( models, 'events').get('raw_sql') @@ -494,6 +520,7 @@ def test__multiple_dependencies__packages(self): 'config': self.model_config, 'tags': set(), 'path': 'sessions.sql', + 'original_file_path': 'sessions.sql', 'root_path': get_os_path('/usr/src/app'), 'raw_sql': self.find_input_by_name( models, 'sessions').get('raw_sql') @@ -513,6 +540,7 @@ def test__multiple_dependencies__packages(self): 'config': self.model_config, 'tags': set(), 'path': 'events_tx.sql', + 'original_file_path': 'events_tx.sql', 'root_path': get_os_path('/usr/src/app'), 'raw_sql': self.find_input_by_name( models, 'events_tx').get('raw_sql') @@ -532,6 +560,7 @@ def test__multiple_dependencies__packages(self): 'config': self.model_config, 'tags': set(), 'path': 'sessions_tx.sql', + 'original_file_path': 'sessions_tx.sql', 'root_path': get_os_path('/usr/src/app'), 'raw_sql': self.find_input_by_name( models, 'sessions_tx').get('raw_sql') @@ -552,6 +581,7 @@ def test__multiple_dependencies__packages(self): 'config': self.model_config, 'tags': set(), 'path': 'multi.sql', + 'original_file_path': 'multi.sql', 'root_path': get_os_path('/usr/src/app'), 'raw_sql': self.find_input_by_name( models, 'multi').get('raw_sql') @@ -578,6 +608,7 @@ def test__process_refs__packages(self): 'config': self.disabled_config, 'tags': set(), 'path': 'events.sql', + 'original_file_path': 'events.sql', 'root_path': get_os_path('/usr/src/app'), 'raw_sql': 'does not matter' }, @@ -596,6 +627,7 @@ def test__process_refs__packages(self): 'config': self.model_config, 'tags': set(), 'path': 'events.sql', + 'original_file_path': 'events.sql', 'root_path': get_os_path('/usr/src/app'), 'raw_sql': 'does not matter' }, @@ -614,6 +646,7 @@ def test__process_refs__packages(self): 'config': self.model_config, 'tags': set(), 'path': 'multi.sql', + 'original_file_path': 'multi.sql', 'root_path': get_os_path('/usr/src/app'), 'raw_sql': 'does not matter' } @@ -640,6 +673,7 @@ def test__process_refs__packages(self): 'config': self.disabled_config, 'tags': set(), 'path': 'events.sql', + 'original_file_path': 'events.sql', 'root_path': get_os_path('/usr/src/app'), 'raw_sql': 'does not matter' }, @@ -658,6 +692,7 @@ def test__process_refs__packages(self): 'config': self.model_config, 'tags': set(), 'path': 'events.sql', + 'original_file_path': 'events.sql', 'root_path': get_os_path('/usr/src/app'), 'raw_sql': 'does not matter' }, @@ -676,6 +711,7 @@ def test__process_refs__packages(self): 'config': self.model_config, 'tags': set(), 'path': 'multi.sql', + 'original_file_path': 'multi.sql', 'root_path': get_os_path('/usr/src/app'), 'raw_sql': 'does not matter' } @@ -689,6 +725,7 @@ def test__in_model_config(self): 'resource_type': 'model', 'package_name': 'root', 'path': 'model_one.sql', + 'original_file_path': 'model_one.sql', 'root_path': get_os_path('/usr/src/app'), 'raw_sql': ("{{config({'materialized':'table'})}}" "select * from events"), @@ -721,6 +758,7 @@ def test__in_model_config(self): 'tags': set(), 'root_path': get_os_path('/usr/src/app'), 'path': 'model_one.sql', + 'original_file_path': 'model_one.sql', 'raw_sql': self.find_input_by_name( models, 'model_one').get('raw_sql') } @@ -742,6 +780,7 @@ def test__root_project_config(self): 'resource_type': 'model', 'package_name': 'root', 'path': 'table.sql', + 'original_file_path': 'table.sql', 'root_path': get_os_path('/usr/src/app'), 'raw_sql': ("{{config({'materialized':'table'})}}" "select * from events"), @@ -750,6 +789,7 @@ def test__root_project_config(self): 'resource_type': 'model', 'package_name': 'root', 'path': 'ephemeral.sql', + 'original_file_path': 'ephemeral.sql', 'root_path': get_os_path('/usr/src/app'), 'raw_sql': ("select * from events"), }, { @@ -757,6 +797,7 @@ def test__root_project_config(self): 'resource_type': 'model', 'package_name': 'root', 'path': 'view.sql', + 'original_file_path': 'view.sql', 'root_path': get_os_path('/usr/src/app'), 'raw_sql': ("select * from events"), }] @@ -795,6 +836,7 @@ def test__root_project_config(self): 'macros': [] }, 'path': 'table.sql', + 'original_file_path': 'table.sql', 'config': self.model_config, 'tags': set(), 'root_path': get_os_path('/usr/src/app'), @@ -814,6 +856,7 @@ def test__root_project_config(self): 'macros': [] }, 'path': 'ephemeral.sql', + 'original_file_path': 'ephemeral.sql', 'config': ephemeral_config, 'tags': set(), 'root_path': get_os_path('/usr/src/app'), @@ -833,6 +876,7 @@ def test__root_project_config(self): 'macros': [] }, 'path': 'view.sql', + 'original_file_path': 'view.sql', 'root_path': get_os_path('/usr/src/app'), 'config': view_config, 'tags': set(), @@ -881,6 +925,7 @@ def test__other_project_config(self): 'resource_type': 'model', 'package_name': 'root', 'path': 'table.sql', + 'original_file_path': 'table.sql', 'root_path': get_os_path('/usr/src/app'), 'raw_sql': ("{{config({'materialized':'table'})}}" "select * from events"), @@ -889,6 +934,7 @@ def test__other_project_config(self): 'resource_type': 'model', 'package_name': 'root', 'path': 'ephemeral.sql', + 'original_file_path': 'ephemeral.sql', 'root_path': get_os_path('/usr/src/app'), 'raw_sql': ("select * from events"), }, { @@ -896,6 +942,7 @@ def test__other_project_config(self): 'resource_type': 'model', 'package_name': 'root', 'path': 'view.sql', + 'original_file_path': 'view.sql', 'root_path': get_os_path('/usr/src/app'), 'raw_sql': ("select * from events"), }, { @@ -903,6 +950,7 @@ def test__other_project_config(self): 'resource_type': 'model', 'package_name': 'snowplow', 'path': 'disabled.sql', + 'original_file_path': 'disabled.sql', 'root_path': get_os_path('/usr/src/app'), 'raw_sql': ("select * from events"), }, { @@ -910,6 +958,7 @@ def test__other_project_config(self): 'resource_type': 'model', 'package_name': 'snowplow', 'path': get_os_path('views/package.sql'), + 'original_file_path': get_os_path('views/package.sql'), 'root_path': get_os_path('/usr/src/app'), 'raw_sql': ("select * from events"), }, { @@ -917,6 +966,7 @@ def test__other_project_config(self): 'resource_type': 'model', 'package_name': 'snowplow', 'path': get_os_path('views/multi_sort.sql'), + 'original_file_path': get_os_path('views/multi_sort.sql'), 'root_path': get_os_path('/usr/src/app'), 'raw_sql': ("select * from events"), }] @@ -974,6 +1024,7 @@ def test__other_project_config(self): 'macros': [] }, 'path': 'table.sql', + 'original_file_path': 'table.sql', 'root_path': get_os_path('/usr/src/app'), 'config': self.model_config, 'tags': set(), @@ -993,6 +1044,7 @@ def test__other_project_config(self): 'macros': [] }, 'path': 'ephemeral.sql', + 'original_file_path': 'ephemeral.sql', 'root_path': get_os_path('/usr/src/app'), 'config': ephemeral_config, 'tags': set(), @@ -1012,6 +1064,7 @@ def test__other_project_config(self): 'macros': [] }, 'path': 'view.sql', + 'original_file_path': 'view.sql', 'root_path': get_os_path('/usr/src/app'), 'config': view_config, 'tags': set(), @@ -1031,6 +1084,7 @@ def test__other_project_config(self): 'macros': [] }, 'path': 'disabled.sql', + 'original_file_path': 'disabled.sql', 'root_path': get_os_path('/usr/src/app'), 'config': disabled_config, 'tags': set(), @@ -1050,6 +1104,7 @@ def test__other_project_config(self): 'macros': [] }, 'path': get_os_path('views/package.sql'), + 'original_file_path': get_os_path('views/package.sql'), 'root_path': get_os_path('/usr/src/app'), 'config': sort_config, 'tags': set(), @@ -1069,6 +1124,7 @@ def test__other_project_config(self): 'macros': [] }, 'path': get_os_path('views/multi_sort.sql'), + 'original_file_path': get_os_path('views/multi_sort.sql'), 'root_path': get_os_path('/usr/src/app'), 'config': multi_sort_config, 'tags': set(), @@ -1084,6 +1140,7 @@ def test__simple_schema_test(self): 'resource_type': 'test', 'package_name': 'root', 'root_path': get_os_path('/usr/src/app'), + 'original_file_path': 'test_one.yml', 'path': 'test_one.yml', 'raw_sql': None, 'raw_yml': ('{model_one: {constraints: {not_null: [id],' @@ -1119,6 +1176,7 @@ def test__simple_schema_test(self): 'macros': [] }, 'config': self.model_config, + 'original_file_path': 'test_one.yml', 'path': get_os_path( 'schema_test/not_null_model_one_id.sql'), 'tags': set(['schema']), @@ -1138,6 +1196,7 @@ def test__simple_schema_test(self): 'macros': [] }, 'config': self.model_config, + 'original_file_path': 'test_one.yml', 'path': get_os_path('schema_test/unique_model_one_id.sql'), 'tags': set(['schema']), 'raw_sql': unique_sql, @@ -1150,6 +1209,7 @@ def test__simple_schema_test(self): 'accepted_values_model_one_id__a__b'], 'empty': False, 'package_name': 'root', + 'original_file_path': 'test_one.yml', 'root_path': get_os_path('/usr/src/app'), 'refs': [('model_one',)], 'depends_on': { @@ -1170,6 +1230,7 @@ def test__simple_schema_test(self): 'relationships_model_one_id__id__ref_model_two_'], 'empty': False, 'package_name': 'root', + 'original_file_path': 'test_one.yml', 'root_path': get_os_path('/usr/src/app'), 'refs': [('model_one',), ('model_two',)], 'depends_on': { @@ -1240,6 +1301,7 @@ def test__simple_data_test(self): 'resource_type': 'test', 'package_name': 'root', 'path': 'no_events.sql', + 'original_file_path': 'no_events.sql', 'root_path': get_os_path('/usr/src/app'), 'raw_sql': "select * from {{ref('base')}}" }] @@ -1265,6 +1327,7 @@ def test__simple_data_test(self): }, 'config': self.model_config, 'path': 'no_events.sql', + 'original_file_path': 'no_events.sql', 'root_path': get_os_path('/usr/src/app'), 'tags': set(), 'raw_sql': self.find_input_by_name( @@ -1301,6 +1364,7 @@ def test__simple_macro(self): 'depends_on': { 'macros': [] }, + 'original_file_path': 'simple_macro.sql', 'root_path': get_os_path('/usr/src/app'), 'tags': set(), 'path': 'simple_macro.sql', @@ -1337,6 +1401,7 @@ def test__simple_macro_used_in_model(self): 'depends_on': { 'macros': [] }, + 'original_file_path': 'simple_macro.sql', 'root_path': get_os_path('/usr/src/app'), 'tags': set(), 'path': 'simple_macro.sql', @@ -1349,6 +1414,7 @@ def test__simple_macro_used_in_model(self): 'name': 'model_one', 'resource_type': 'model', 'package_name': 'root', + 'original_file_path': 'model_one.sql', 'root_path': get_os_path('/usr/src/app'), 'path': 'model_one.sql', 'raw_sql': ("select *, {{package.simple(1, 2)}} from events"), @@ -1368,6 +1434,7 @@ def test__simple_macro_used_in_model(self): 'fqn': ['root', 'model_one'], 'empty': False, 'package_name': 'root', + 'original_file_path': 'model_one.sql', 'root_path': get_os_path('/usr/src/app'), 'refs': [], 'depends_on': { @@ -1390,6 +1457,7 @@ def test__macro_no_explicit_project_used_in_model(self): 'package_name': 'root', 'root_path': get_os_path('/usr/src/app'), 'path': 'model_one.sql', + 'original_file_path': 'model_one.sql', 'raw_sql': ("select *, {{ simple(1, 2) }} from events"), }] @@ -1416,6 +1484,7 @@ def test__macro_no_explicit_project_used_in_model(self): 'config': self.model_config, 'tags': set(), 'path': 'model_one.sql', + 'original_file_path': 'model_one.sql', 'raw_sql': self.find_input_by_name( models, 'model_one').get('raw_sql') }