From e2cd45b18a4edd28d912acbc7a1dc03e88f3b1d6 Mon Sep 17 00:00:00 2001 From: Jacob Beck Date: Tue, 19 May 2020 11:01:36 -0600 Subject: [PATCH] Fix some issues around global-scoped vars Fix global-level var rewriting from v2 -> v1 Add top-level vars to more lookup fields When a user calls var() in a `generate_*_name` macro, on failure it now raises (like at runtime) When a node has no FQN, pretend the FQN is just the package name when returning vars Add tests --- CHANGELOG.md | 1 + core/dbt/config/project.py | 55 +++++++----- core/dbt/config/runtime.py | 4 +- core/dbt/context/configured.py | 26 ++++-- core/dbt/context/providers.py | 34 +++++-- core/dbt/parser/base.py | 17 ++-- .../bad-generate-macros/generate_names.sql | 4 + .../dependency-data/root_model_expected.csv | 2 + .../dependency-models/inside/model.sql | 3 + .../data/first_dep_expected.csv | 2 + .../first_dependency/dbt_project.yml | 27 ++++++ .../models/nested/first_dep_model.sql | 3 + .../data/second_dep_expected.csv | 2 + .../second_dependency_v1/dbt_project.yml | 43 +++++++++ .../models/inner/second_dep_model.sql | 3 + .../test_context_vars.py | 89 +++++++++++++++++++ .../trivial-models/model.sql | 1 + test/unit/test_config.py | 80 +++++++++++++++++ 18 files changed, 350 insertions(+), 46 deletions(-) create mode 100644 test/integration/013_context_var_tests/bad-generate-macros/generate_names.sql create mode 100644 test/integration/013_context_var_tests/dependency-data/root_model_expected.csv create mode 100644 test/integration/013_context_var_tests/dependency-models/inside/model.sql create mode 100644 test/integration/013_context_var_tests/first_dependency/data/first_dep_expected.csv create mode 100644 test/integration/013_context_var_tests/first_dependency/dbt_project.yml create mode 100644 test/integration/013_context_var_tests/first_dependency/models/nested/first_dep_model.sql create mode 100644 test/integration/013_context_var_tests/second_dependency_v1/data/second_dep_expected.csv create mode 100644 test/integration/013_context_var_tests/second_dependency_v1/dbt_project.yml create mode 100644 test/integration/013_context_var_tests/second_dependency_v1/models/inner/second_dep_model.sql create mode 100644 test/integration/013_context_var_tests/trivial-models/model.sql diff --git a/CHANGELOG.md b/CHANGELOG.md index 94e8d01e707..e183042a8da 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,7 @@ - dbt now logs using the adapter plugin's ideas about how relations should be displayed ([dbt-spark/#74](https://github.com/fishtown-analytics/dbt-spark/issues/74), [#2450](https://github.com/fishtown-analytics/dbt/pull/2450)) - The create_adapter_plugin.py script creates a version 2 dbt_project.yml file ([#2451](https://github.com/fishtown-analytics/dbt/issues/2451), [#2455](https://github.com/fishtown-analytics/dbt/pull/2455)) - Fixed dbt crashing with an AttributeError on duplicate sources ([#2463](https://github.com/fishtown-analytics/dbt/issues/2463), [#2464](https://github.com/fishtown-analytics/dbt/pull/2464)) +- Fixed a number of issues with globally-scoped vars ([#2473](https://github.com/fishtown-analytics/dbt/issues/2473), [#2472](https://github.com/fishtown-analytics/dbt/issues/2472), [#2469](https://github.com/fishtown-analytics/dbt/issues/2469), [#2477](https://github.com/fishtown-analytics/dbt/pull/2477)) - Fixed DBT Docker entrypoint ([#2470](https://github.com/fishtown-analytics/dbt/issues/2470), [#2475](https://github.com/fishtown-analytics/dbt/pull/2475)) Contributors: diff --git a/core/dbt/config/project.py b/core/dbt/config/project.py index 157546093db..7bf61bcc1c8 100644 --- a/core/dbt/config/project.py +++ b/core/dbt/config/project.py @@ -2,7 +2,8 @@ from dataclasses import dataclass, field from itertools import chain from typing import ( - List, Dict, Any, Optional, TypeVar, Union, Tuple, Callable, Mapping + List, Dict, Any, Optional, TypeVar, Union, Tuple, Callable, Mapping, + Iterable, Set ) from typing_extensions import Protocol @@ -274,10 +275,8 @@ def vars_for( self, node: IsFQNResource, adapter_type: str ) -> Mapping[str, Any]: # in v2, vars are only either project or globally scoped - merged = MultiDict([self.vars]) - if node.package_name in self.vars: - merged.add(self.vars.get(node.package_name, {})) + merged.add(self.vars.get(node.package_name, {})) return merged def to_dict(self): @@ -634,7 +633,7 @@ def validate_version(self): ) raise DbtProjectError(msg) - def as_v1(self): + def as_v1(self, all_projects: Iterable[str]): if self.config_version == 1: return self @@ -647,21 +646,7 @@ def as_v1(self): common_config_keys = ['models', 'seeds', 'snapshots'] if 'vars' in dct and isinstance(dct['vars'], dict): - # stuff any 'vars' entries into the old-style - # models/seeds/snapshots dicts - for project_name, items in dct['vars'].items(): - if not isinstance(items, dict): - # can't translate top-level vars - continue - for cfgkey in ['models', 'seeds', 'snapshots']: - if project_name not in mutated[cfgkey]: - mutated[cfgkey][project_name] = {} - project_type_cfg = mutated[cfgkey][project_name] - if 'vars' not in project_type_cfg: - project_type_cfg['vars'] = {} - mutated[cfgkey][project_name]['vars'].update(items) - # remove this from the v1 form - mutated.pop('vars') + v2_vars_to_v1(mutated, dct['vars'], set(all_projects)) # ok, now we want to look through all the existing cfgkeys and mirror # it, except expand the '+' prefix. for cfgkey in common_config_keys: @@ -675,6 +660,36 @@ def as_v1(self): return project +def v2_vars_to_v1( + dst: Dict[str, Any], src_vars: Dict[str, Any], project_names: Set[str] +) -> None: + # stuff any 'vars' entries into the old-style + # models/seeds/snapshots dicts + common_config_keys = ['models', 'seeds', 'snapshots'] + for project_name in project_names: + for cfgkey in common_config_keys: + if cfgkey not in dst: + dst[cfgkey] = {} + if project_name not in dst[cfgkey]: + dst[cfgkey][project_name] = {} + project_type_cfg = dst[cfgkey][project_name] + + if 'vars' not in project_type_cfg: + project_type_cfg['vars'] = {} + project_type_vars = project_type_cfg['vars'] + + project_type_vars.update({ + k: v for k, v in src_vars.items() + if not isinstance(v, dict) + }) + + items = src_vars.get(project_name, None) + if isinstance(items, dict): + project_type_vars.update(items) + # remove this from the v1 form + dst.pop('vars') + + def _flatten_config(dct: Dict[str, Any]): result = {} for key, value in dct.items(): diff --git a/core/dbt/config/runtime.py b/core/dbt/config/runtime.py index 698c461a1eb..b5854322d3c 100644 --- a/core/dbt/config/runtime.py +++ b/core/dbt/config/runtime.py @@ -360,12 +360,12 @@ def _get_project_directories(self) -> Iterator[Path]: if path.is_dir() and not path.name.startswith('__'): yield path - def as_v1(self): + def as_v1(self, all_projects: Iterable[str]): if self.config_version == 1: return self return self.from_parts( - project=Project.as_v1(self), + project=Project.as_v1(self, all_projects), profile=self, args=self.args, dependencies=self.dependencies, diff --git a/core/dbt/context/configured.py b/core/dbt/context/configured.py index 7862fbdb655..a0a1510fdf8 100644 --- a/core/dbt/context/configured.py +++ b/core/dbt/context/configured.py @@ -6,6 +6,8 @@ from dbt.contracts.graph.parsed import ParsedMacro from dbt.include.global_project import PACKAGES from dbt.include.global_project import PROJECT_NAME as GLOBAL_PROJECT_NAME +from dbt.node_types import NodeType +from dbt.utils import MultiDict from dbt.context.base import contextproperty, Var from dbt.context.target import TargetContext @@ -25,6 +27,13 @@ def project_name(self) -> str: return self.config.project_name +class FQNLookup: + def __init__(self, package_name: str): + self.package_name = package_name + self.fqn = [package_name] + self.resource_type = NodeType.Model + + class ConfiguredVar(Var): def __init__( self, @@ -44,17 +53,16 @@ def __call__(self, var_name, default=Var._VAR_NOTSET): return self.config.cli_vars[var_name] if self.config.config_version == 2 and my_config.config_version == 2: - - active_vars = self.config.vars.to_dict() - active_vars = active_vars.get(self.project_name, {}) - if var_name in active_vars: - return active_vars[var_name] + adapter_type = self.config.credentials.type + lookup = FQNLookup(self.project_name) + active_vars = self.config.vars.vars_for(lookup, adapter_type) + all_vars = MultiDict([active_vars]) if self.config.project_name != my_config.project_name: - config_vars = my_config.vars.to_dict() - config_vars = config_vars.get(self.project_name, {}) - if var_name in config_vars: - return config_vars[var_name] + all_vars.add(my_config.vars.vars_for(lookup, adapter_type)) + + if var_name in all_vars: + return all_vars[var_name] if default is not Var._VAR_NOTSET: return default diff --git a/core/dbt/context/providers.py b/core/dbt/context/providers.py index 8b235feef62..0f8bf9a066d 100644 --- a/core/dbt/context/providers.py +++ b/core/dbt/context/providers.py @@ -15,14 +15,14 @@ from dbt.context.base import ( contextmember, contextproperty, Var ) -from dbt.context.configured import ManifestContext, MacroNamespace +from dbt.context.configured import ManifestContext, MacroNamespace, FQNLookup from dbt.context.context_config import ContextConfigType from dbt.contracts.graph.manifest import Manifest, Disabled from dbt.contracts.graph.compiled import ( - NonSourceNode, CompiledSeedNode, CompiledResource, CompiledNode + NonSourceNode, CompiledSeedNode, CompiledResource ) from dbt.contracts.graph.parsed import ( - ParsedMacro, ParsedSourceDefinition, ParsedSeedNode, ParsedNode + ParsedMacro, ParsedSourceDefinition, ParsedSeedNode ) from dbt.exceptions import ( InternalException, @@ -36,6 +36,7 @@ source_target_not_found, wrapped_exports, ) +from dbt.legacy_config_updater import IsFQNResource from dbt.logger import GLOBAL_LOGGER as logger # noqa from dbt.node_types import NodeType @@ -450,17 +451,17 @@ def packages_for_node(self) -> Iterable[Project]: yield self.config def _generate_merged(self) -> Mapping[str, Any]: - cli_vars = self.config.cli_vars - - # once sources have FQNs, add ParsedSourceDefinition - if not isinstance(self.node, (CompiledNode, ParsedNode)): - return cli_vars + search_node: IsFQNResource + if hasattr(self.node, 'fqn'): + search_node = self.node + else: + search_node = FQNLookup(self.node.package_name) adapter_type = self.config.credentials.type merged = MultiDict() for project in self.packages_for_node(): - merged.add(project.vars.vars_for(self.node, adapter_type)) + merged.add(project.vars.vars_for(search_node, adapter_type)) merged.add(self.cli_vars) return merged @@ -494,6 +495,10 @@ class ParseProvider(Provider): source = ParseSourceResolver +class GenerateNameProvider(ParseProvider): + Var = RuntimeVar + + class RuntimeProvider(Provider): execute = True Config = RuntimeConfigObject @@ -1120,6 +1125,17 @@ def generate_parser_macro( return ctx.to_dict() +def generate_generate_component_name_macro( + macro: ParsedMacro, + config: RuntimeConfig, + manifest: Manifest, +) -> Dict[str, Any]: + ctx = MacroContext( + macro, config, manifest, GenerateNameProvider(), None + ) + return ctx.to_dict() + + def generate_runtime_model( model: NonSourceNode, config: RuntimeConfig, diff --git a/core/dbt/parser/base.py b/core/dbt/parser/base.py index 03ed3a8a1ab..748c85172bb 100644 --- a/core/dbt/parser/base.py +++ b/core/dbt/parser/base.py @@ -9,7 +9,10 @@ from dbt.clients.jinja import MacroGenerator from dbt.clients.system import load_file_contents -from dbt.context.providers import generate_parser_model, generate_parser_macro +from dbt.context.providers import ( + generate_parser_model, + generate_generate_component_name_macro, +) import dbt.flags from dbt import hooks from dbt.adapters.factory import get_adapter @@ -107,7 +110,9 @@ def __init__( f'No macro with name generate_{component}_name found' ) - root_context = generate_parser_macro(macro, config, manifest, None) + root_context = generate_generate_component_name_macro( + macro, config, manifest + ) self.updater = MacroGenerator(macro, root_context) self.component = component @@ -324,12 +329,12 @@ def initial_config(self, fqn: List[str]) -> ContextConfigType: config_version = min( [self.project.config_version, self.root_project.config_version] ) - # it would be nice to assert that if the main config is v2, the - # dependencies are all v2. or vice-versa. + # grab a list of the existing project names. This is for var conversion + all_projects = self.root_project.load_dependencies() if config_version == 1: return LegacyContextConfig( - self.root_project.as_v1(), - self.project.as_v1(), + self.root_project.as_v1(all_projects), + self.project.as_v1(all_projects), fqn, self.resource_type, ) diff --git a/test/integration/013_context_var_tests/bad-generate-macros/generate_names.sql b/test/integration/013_context_var_tests/bad-generate-macros/generate_names.sql new file mode 100644 index 00000000000..77c78702068 --- /dev/null +++ b/test/integration/013_context_var_tests/bad-generate-macros/generate_names.sql @@ -0,0 +1,4 @@ +{% macro generate_schema_name(custom_schema_name, node) -%} + {% do var('somevar') %} + {% do return(dbt.generate_schema_name(custom_schema_name, node)) %} +{%- endmacro %} diff --git a/test/integration/013_context_var_tests/dependency-data/root_model_expected.csv b/test/integration/013_context_var_tests/dependency-data/root_model_expected.csv new file mode 100644 index 00000000000..37ecc3f8122 --- /dev/null +++ b/test/integration/013_context_var_tests/dependency-data/root_model_expected.csv @@ -0,0 +1,2 @@ +first_dep_global,from_root +dep_never_overridden,root_root_value diff --git a/test/integration/013_context_var_tests/dependency-models/inside/model.sql b/test/integration/013_context_var_tests/dependency-models/inside/model.sql new file mode 100644 index 00000000000..cf56838072d --- /dev/null +++ b/test/integration/013_context_var_tests/dependency-models/inside/model.sql @@ -0,0 +1,3 @@ +select + '{{ var("first_dep_override") }}' as first_dep_global, + '{{ var("from_root_to_root") }}' as from_root diff --git a/test/integration/013_context_var_tests/first_dependency/data/first_dep_expected.csv b/test/integration/013_context_var_tests/first_dependency/data/first_dep_expected.csv new file mode 100644 index 00000000000..5537cc39e15 --- /dev/null +++ b/test/integration/013_context_var_tests/first_dependency/data/first_dep_expected.csv @@ -0,0 +1,2 @@ +first_dep_global,from_root +first_dep_global_value_overridden,root_first_value diff --git a/test/integration/013_context_var_tests/first_dependency/dbt_project.yml b/test/integration/013_context_var_tests/first_dependency/dbt_project.yml new file mode 100644 index 00000000000..7a3f3c77f63 --- /dev/null +++ b/test/integration/013_context_var_tests/first_dependency/dbt_project.yml @@ -0,0 +1,27 @@ + +name: 'first_dep' +version: '1.0' +config-version: 2 + +profile: 'default' + +source-paths: ["models"] +analysis-paths: ["analysis"] +test-paths: ["tests"] +data-paths: ["data"] +macro-paths: ["macros"] + +require-dbt-version: '>=0.1.0' + +target-path: "target" # directory which will store compiled SQL files +clean-targets: # directories to be removed by `dbt clean` + - "target" + - "dbt_modules" + +vars: + first_dep: + first_dep_global: 'first_dep_global_value_overridden' + + +seeds: + quote_columns: False diff --git a/test/integration/013_context_var_tests/first_dependency/models/nested/first_dep_model.sql b/test/integration/013_context_var_tests/first_dependency/models/nested/first_dep_model.sql new file mode 100644 index 00000000000..500f23dfe9a --- /dev/null +++ b/test/integration/013_context_var_tests/first_dependency/models/nested/first_dep_model.sql @@ -0,0 +1,3 @@ +select + '{{ var("first_dep_global") }}' as first_dep_global, + '{{ var("from_root_to_first") }}' as from_root diff --git a/test/integration/013_context_var_tests/second_dependency_v1/data/second_dep_expected.csv b/test/integration/013_context_var_tests/second_dependency_v1/data/second_dep_expected.csv new file mode 100644 index 00000000000..147bae14831 --- /dev/null +++ b/test/integration/013_context_var_tests/second_dependency_v1/data/second_dep_expected.csv @@ -0,0 +1,2 @@ +from_root,from_second +root_second_value,second_to_second_override_value diff --git a/test/integration/013_context_var_tests/second_dependency_v1/dbt_project.yml b/test/integration/013_context_var_tests/second_dependency_v1/dbt_project.yml new file mode 100644 index 00000000000..33caf2d8af2 --- /dev/null +++ b/test/integration/013_context_var_tests/second_dependency_v1/dbt_project.yml @@ -0,0 +1,43 @@ + +name: 'second_dep' +version: '1.0' + +profile: 'default' + +source-paths: ["models"] +analysis-paths: ["analysis"] +test-paths: ["tests"] +data-paths: ["data"] +macro-paths: ["macros"] + +require-dbt-version: '>=0.1.0' + +target-path: "target" # directory which will store compiled SQL files +clean-targets: # directories to be removed by `dbt clean` + - "target" + - "dbt_modules" + + +seeds: + quote_columns: False + + +models: + second_dep: + vars: + from_second_to_second: 'never_see_me' + inner: + vars: + from_second_to_second: 'second_to_second_override_value' + first_dep: + vars: + from_second_to_first: 'never_see_me_either' + nested: + vars: + from_second_to_first: 'second_to_first_override_value' + test: + vars: + from_second_to_root: 'also_never_see_me' + inside: + vars: + from_second_to_root: 'second_to_root_override_value' diff --git a/test/integration/013_context_var_tests/second_dependency_v1/models/inner/second_dep_model.sql b/test/integration/013_context_var_tests/second_dependency_v1/models/inner/second_dep_model.sql new file mode 100644 index 00000000000..d7a6f0b7e17 --- /dev/null +++ b/test/integration/013_context_var_tests/second_dependency_v1/models/inner/second_dep_model.sql @@ -0,0 +1,3 @@ +select + '{{ var("from_root_to_second") }}' as from_root, + '{{ var("from_second_to_second") }}' as from_second diff --git a/test/integration/013_context_var_tests/test_context_vars.py b/test/integration/013_context_var_tests/test_context_vars.py index 35c3a10c96d..d5de95fce3a 100644 --- a/test/integration/013_context_var_tests/test_context_vars.py +++ b/test/integration/013_context_var_tests/test_context_vars.py @@ -153,3 +153,92 @@ def test_postgres_warn(self): with pytest.raises(dbt.exceptions.CompilationException): self.run_dbt(['run'], strict=True) self.run_dbt(['run'], strict=False, expect_pass=True) + + +class TestVarDependencyInheritance(DBTIntegrationTest): + @property + def schema(self): + return "context_vars_013" + + @property + def models(self): + return 'dependency-models' + + @property + def packages_config(self): + return { + "packages": [ + {'local': 'first_dependency'}, + {'local': 'second_dependency_v1'}, + ] + } + + @property + def project_config(self): + return { + 'config-version': 2, + 'data-paths': ['dependency-data'], + 'vars': { + 'first_dep_override': 'dep_never_overridden', + 'test': { + 'from_root_to_root': 'root_root_value', + }, + 'first_dep': { + 'from_root_to_first': 'root_first_value', + }, + 'second_dep': { + 'from_root_to_second': 'root_second_value', + }, + }, + } + + @use_profile('postgres') + def test_postgres_var_mutual_overrides_v1_conversion(self): + self.run_dbt(['deps'], strict=False) + assert len(self.run_dbt(['seed'], strict=False)) == 3 + assert len(self.run_dbt(['run'], strict=False)) == 3 + self.assertTablesEqual('root_model_expected', 'model') + self.assertTablesEqual('first_dep_expected', 'first_dep_model') + self.assertTablesEqual('second_dep_expected', 'second_dep_model') + + +class TestMissingVarGenerateNameMacro(DBTIntegrationTest): + @property + def schema(self): + return "context_vars_013" + + @property + def models(self): + return 'trivial-models' + + @property + def project_config(self): + return { + 'macro-paths': ['bad-generate-macros'], + } + + @use_profile('postgres') + def test_postgres_generate_schema_name_var(self): + with self.assertRaises(dbt.exceptions.CompilationException) as exc: + self.run_dbt(['compile']) + + assert "Required var 'somevar' not found in config" in str(exc.exception) + + # globally scoped + self.use_default_project({ + 'vars': { + 'somevar': 1, + }, + 'macro-paths': ['bad-generate-macros'], + }) + self.run_dbt(['compile']) + # locally scoped + self.use_default_project({ + 'vars': { + 'test': { + 'somevar': 1, + }, + }, + 'macro-paths': ['bad-generate-macros'], + }) + self.run_dbt(['compile']) diff --git a/test/integration/013_context_var_tests/trivial-models/model.sql b/test/integration/013_context_var_tests/trivial-models/model.sql new file mode 100644 index 00000000000..43258a71464 --- /dev/null +++ b/test/integration/013_context_var_tests/trivial-models/model.sql @@ -0,0 +1 @@ +select 1 as id diff --git a/test/unit/test_config.py b/test/unit/test_config.py index 3b5a589c020..11f8889e133 100644 --- a/test/unit/test_config.py +++ b/test/unit/test_config.py @@ -11,11 +11,13 @@ import dbt.config import dbt.exceptions +from dbt.adapters.factory import load_plugin from dbt.adapters.postgres import PostgresCredentials from dbt.adapters.redshift import RedshiftCredentials from dbt.context.base import generate_base_context from dbt.contracts.connection import QueryComment, DEFAULT_QUERY_COMMENT from dbt.contracts.project import PackageConfig, LocalPackage, GitPackage +from dbt.node_types import NodeType from dbt.semver import VersionSpecifier from dbt.task.run_operation import RunOperationTask @@ -1248,3 +1250,81 @@ def test_cli_and_env_vars(self): self.assertEqual(config.models['bar']['materialized'], 'default') # rendered! self.assertEqual(config.seeds['foo']['post-hook'], "{{ env_var('env_value_profile') }}") self.assertEqual(config.seeds['bar']['materialized'], 'default') # rendered! + + +class TestV2V1Conversion(unittest.TestCase): + def setUp(self): + self.initial_src_vars = { + # globals + 'foo': 123, + 'bar': 'hello', + # project-scoped + 'my_project': { + 'bar': 'goodbye', + 'baz': True, + }, + 'other_project': { + 'foo': 456, + }, + } + self.src_vars = deepcopy(self.initial_src_vars) + self.dst = {'vars': deepcopy(self.initial_src_vars)} + + self.projects = ['my_project', 'other_project', 'third_project'] + load_plugin('postgres') + self.local_var_search = mock.MagicMock(fqn=['my_project', 'my_model'], resource_type=NodeType.Model, package_name='my_project') + self.other_var_search = mock.MagicMock(fqn=['other_project', 'model'], resource_type=NodeType.Model, package_name='other_project') + self.third_var_search = mock.MagicMock(fqn=['third_project', 'third_model'], resource_type=NodeType.Model, package_name='third_project') + + def test_v2_v1_dict(self): + dbt.config.project.v2_vars_to_v1(self.dst, self.src_vars, self.projects) + # make sure the input didn't get mutated. That would be bad! + assert self.src_vars == self.initial_src_vars + # conversion sould remove top-level 'vars' + assert 'vars' not in self.dst + + # when we convert, all of models/seeds/snapshots will have the same vars + for key in ['models', 'seeds', 'snapshots']: + assert key in self.dst + for project in self.projects: + assert project in self.dst[key] + assert 'vars' in self.dst[key][project] + if project == 'my_project': + assert self.dst[key][project]['vars'] == { + 'foo': 123, # override + 'bar': 'goodbye', + 'baz': True, # only in my-project + } + elif project == 'other_project': + assert self.dst[key][project]['vars'] == { + 'foo': 456, # override + 'bar': 'hello', + } + elif project == 'third_project': + assert self.dst[key][project]['vars'] == { + 'foo': 123, + 'bar': 'hello', + } + else: + assert False, f'extra project: {project}' + + def test_v2_v1_lookups(self): + dbt.config.project.v2_vars_to_v1(self.dst, self.src_vars, self.projects) + + v1_vars = dbt.config.project.V1VarProvider(**self.dst) + v2_vars = dbt.config.project.V2VarProvider(self.initial_src_vars) + + expected = [ + (self.local_var_search, 'foo', 123), + (self.other_var_search, 'foo', 456), + (self.third_var_search, 'foo', 123), + (self.local_var_search, 'bar', 'goodbye'), + (self.other_var_search, 'bar', 'hello'), + (self.third_var_search, 'bar', 'hello'), + (self.local_var_search, 'baz', True), + (self.other_var_search, 'baz', None), + (self.third_var_search, 'baz', None), + ] + for node, key, expected_value in expected: + assert v1_vars.vars_for(node, 'postgres').get(key) == expected_value + assert v2_vars.vars_for(node, 'postgres').get(key) == expected_value