Skip to content

Commit

Permalink
chore: prefer allow/deny terminology (apache#10320)
Browse files Browse the repository at this point in the history
* chore: prefer allow/deny terminology

* fix tests

* add PR reference
  • Loading branch information
villebro authored and auxten committed Nov 20, 2020
1 parent ee2980e commit ca29221
Show file tree
Hide file tree
Showing 10 changed files with 31 additions and 29 deletions.
2 changes: 2 additions & 0 deletions UPDATING.md
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,8 @@ assists people when migrating to a new version.

## Next

* [10320](https://github.com/apache/incubator-superset/pull/10320): References to blacklst/whitelist language have been replaced with more appropriate alternatives. All configs refencing containing `WHITE`/`BLACK` have been replaced with `ALLOW`/`DENY`. Affected config variables that need to be updated: `TIME_GRAIN_BLACKLIST`, `VIZ_TYPE_BLACKLIST`, `DRUID_DATA_SOURCE_BLACKLIST`.

* [9964](https://github.com/apache/incubator-superset/pull/9964): Breaking change on Flask-AppBuilder 3. If you're using OAuth, find out what needs to be changed [here](https://github.com/dpgaspar/Flask-AppBuilder/blob/master/README.rst#change-log).

* [10233](https://github.com/apache/incubator-superset/pull/10233): a change which deprecates the `ENABLE_FLASK_COMPRESS` config option in favor of the Flask-Compress `COMPRESS_REGISTER` config option which serves the same purpose.
Expand Down
12 changes: 6 additions & 6 deletions superset/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -381,8 +381,8 @@ def _try_json_readsha( # pylint: disable=unused-argument
# List of time grains to disable in the application (see list of builtin
# time grains in superset/db_engine_specs.builtin_time_grains).
# For example: to disable 1 second time grain:
# TIME_GRAIN_BLACKLIST = ['PT1S']
TIME_GRAIN_BLACKLIST: List[str] = []
# TIME_GRAIN_DENYLIST = ['PT1S']
TIME_GRAIN_DENYLIST: List[str] = []

# Additional time grains to be supported using similar definitions as in
# superset/db_engine_specs.builtin_time_grains.
Expand All @@ -402,17 +402,17 @@ def _try_json_readsha( # pylint: disable=unused-argument

# ---------------------------------------------------
# List of viz_types not allowed in your environment
# For example: Blacklist pivot table and treemap:
# VIZ_TYPE_BLACKLIST = ['pivot_table', 'treemap']
# For example: Disable pivot table and treemap:
# VIZ_TYPE_DENYLIST = ['pivot_table', 'treemap']
# ---------------------------------------------------

VIZ_TYPE_BLACKLIST: List[str] = []
VIZ_TYPE_DENYLIST: List[str] = []

# ---------------------------------------------------
# List of data sources not to be refreshed in druid cluster
# ---------------------------------------------------

DRUID_DATA_SOURCE_BLACKLIST: List[str] = []
DRUID_DATA_SOURCE_DENYLIST: List[str] = []

# --------------------------------------------------
# Modules, datasources and middleware to be registered
Expand Down
6 changes: 3 additions & 3 deletions superset/connectors/druid/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -206,11 +206,11 @@ def refresh_datasources(
If ``datasource_name`` is specified, only that datasource is updated
"""
ds_list = self.get_datasources()
blacklist = conf.get("DRUID_DATA_SOURCE_BLACKLIST", [])
denylist = conf.get("DRUID_DATA_SOURCE_DENYLIST", [])
ds_refresh: List[str] = []
if not datasource_name:
ds_refresh = list(filter(lambda ds: ds not in blacklist, ds_list))
elif datasource_name not in blacklist and datasource_name in ds_list:
ds_refresh = list(filter(lambda ds: ds not in denylist, ds_list))
elif datasource_name not in denylist and datasource_name in ds_list:
ds_refresh.append(datasource_name)
else:
return
Expand Down
6 changes: 3 additions & 3 deletions superset/db_engine_specs/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -268,16 +268,16 @@ def get_time_grains(cls) -> Tuple[TimeGrain, ...]:
def get_time_grain_expressions(cls) -> Dict[Optional[str], str]:
"""
Return a dict of all supported time grains including any potential added grains
but excluding any potentially blacklisted grains in the config file.
but excluding any potentially disabled grains in the config file.
:return: All time grain expressions supported by the engine
"""
# TODO: use @memoize decorator or similar to avoid recomputation on every call
time_grain_expressions = cls._time_grain_expressions.copy()
grain_addon_expressions = config["TIME_GRAIN_ADDON_EXPRESSIONS"]
time_grain_expressions.update(grain_addon_expressions.get(cls.engine, {}))
blacklist: List[str] = config["TIME_GRAIN_BLACKLIST"]
for key in blacklist:
denylist: List[str] = config["TIME_GRAIN_DENYLIST"]
for key in denylist:
time_grain_expressions.pop(key)
return time_grain_expressions

Expand Down
6 changes: 3 additions & 3 deletions superset/models/datasource_access_request.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ class DatasourceAccessRequest(Model, AuditMixinNullable):
datasource_id = Column(Integer)
datasource_type = Column(String(200))

ROLES_BLACKLIST = set(config["ROBOT_PERMISSION_ROLES"])
ROLES_DENYLIST = set(config["ROBOT_PERMISSION_ROLES"])

@property
def cls_model(self) -> Type["BaseDatasource"]:
Expand Down Expand Up @@ -72,7 +72,7 @@ def roles_with_datasource(self) -> str:
perm = self.datasource.perm # pylint: disable=no-member
pv = security_manager.find_permission_view_menu("datasource_access", perm)
for role in pv.role:
if role.name in self.ROLES_BLACKLIST:
if role.name in self.ROLES_DENYLIST:
continue
# pylint: disable=no-member
href = (
Expand All @@ -95,7 +95,7 @@ def user_roles(self) -> str:
f"created_by={self.created_by.username}&role_to_extend={role.name}"
)
link = '<a href="{}">Extend {} Role</a>'.format(href, role.name)
if role.name in self.ROLES_BLACKLIST:
if role.name in self.ROLES_DENYLIST:
link = "{} Role".format(role.name)
action_list = action_list + "<li>" + link + "</li>"
return "<ul>" + action_list + "</ul>"
12 changes: 6 additions & 6 deletions superset/utils/pandas_postprocessing.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
from superset.exceptions import QueryObjectValidationError
from superset.utils.core import DTTM_ALIAS, PostProcessingContributionOrientation

WHITELIST_NUMPY_FUNCTIONS = (
ALLOWLIST_NUMPY_FUNCTIONS = (
"average",
"argmin",
"argmax",
Expand All @@ -49,7 +49,7 @@
"var",
)

WHITELIST_ROLLING_FUNCTIONS = (
DENYLIST_ROLLING_FUNCTIONS = (
"count",
"corr",
"cov",
Expand All @@ -65,7 +65,7 @@
"quantile",
)

WHITELIST_CUMULATIVE_FUNCTIONS = (
ALLOWLIST_CUMULATIVE_FUNCTIONS = (
"cummax",
"cummin",
"cumprod",
Expand Down Expand Up @@ -142,7 +142,7 @@ def _get_aggregate_funcs(
_("Operator undefined for aggregator: %(name)s", name=name,)
)
operator = agg_obj["operator"]
if operator not in WHITELIST_NUMPY_FUNCTIONS or not hasattr(np, operator):
if operator not in ALLOWLIST_NUMPY_FUNCTIONS or not hasattr(np, operator):
raise QueryObjectValidationError(
_("Invalid numpy function: %(operator)s", operator=operator,)
)
Expand Down Expand Up @@ -331,7 +331,7 @@ def rolling( # pylint: disable=too-many-arguments
kwargs["win_type"] = win_type

df_rolling = df_rolling.rolling(**kwargs)
if rolling_type not in WHITELIST_ROLLING_FUNCTIONS or not hasattr(
if rolling_type not in DENYLIST_ROLLING_FUNCTIONS or not hasattr(
df_rolling, rolling_type
):
raise QueryObjectValidationError(
Expand Down Expand Up @@ -422,7 +422,7 @@ def cum(df: DataFrame, columns: Dict[str, str], operator: str) -> DataFrame:
"""
df_cum = df[columns.keys()]
operation = "cum" + operator
if operation not in WHITELIST_CUMULATIVE_FUNCTIONS or not hasattr(
if operation not in ALLOWLIST_CUMULATIVE_FUNCTIONS or not hasattr(
df_cum, operation
):
raise QueryObjectValidationError(
Expand Down
2 changes: 1 addition & 1 deletion superset/viz.py
Original file line number Diff line number Diff line change
Expand Up @@ -2940,6 +2940,6 @@ def get_data(self, df: pd.DataFrame) -> VizData:
if (
inspect.isclass(o)
and issubclass(o, BaseViz)
and o.viz_type not in config["VIZ_TYPE_BLACKLIST"]
and o.viz_type not in config["VIZ_TYPE_DENYLIST"]
)
}
2 changes: 1 addition & 1 deletion superset/viz_sip38.py
Original file line number Diff line number Diff line change
Expand Up @@ -2823,6 +2823,6 @@ def get_data(self, df: pd.DataFrame) -> VizData:
if (
inspect.isclass(o)
and issubclass(o, BaseViz)
and o.viz_type not in config["VIZ_TYPE_BLACKLIST"]
and o.viz_type not in config["VIZ_TYPE_DENYLIST"]
)
}
4 changes: 2 additions & 2 deletions tests/db_engine_specs/base_engine_spec_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -151,9 +151,9 @@ def test_limit_with_non_token_limit(self):
"""SELECT 'LIMIT 777'""", """SELECT 'LIMIT 777'\nLIMIT 1000"""
)

def test_time_grain_blacklist(self):
def test_time_grain_denylist(self):
with app.app_context():
app.config["TIME_GRAIN_BLACKLIST"] = ["PT1M"]
app.config["TIME_GRAIN_DENYLIST"] = ["PT1M"]
time_grain_functions = SqliteEngineSpec.get_time_grain_expressions()
self.assertNotIn("PT1M", time_grain_functions)

Expand Down
8 changes: 4 additions & 4 deletions tests/security_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -784,9 +784,9 @@ def assert_can_all(view_menu):
def test_views_are_secured(self):
"""Preventing the addition of unsecured views without has_access decorator"""
# These FAB views are secured in their body as opposed to by decorators
method_whitelist = ("action", "action_post")
method_allowlist = ("action", "action_post")
# List of redirect & other benign views
views_whitelist = [
views_allowlist = [
["MyIndexView", "index"],
["UtilView", "back"],
["LocaleView", "index"],
Expand All @@ -807,8 +807,8 @@ def test_views_are_secured(self):
view_class, predicate=inspect.ismethod
):
if (
name not in method_whitelist
and [class_name, name] not in views_whitelist
name not in method_allowlist
and [class_name, name] not in views_allowlist
and hasattr(value, "_urls")
and not hasattr(value, "_permission_name")
):
Expand Down

0 comments on commit ca29221

Please sign in to comment.