diff --git a/requirements/base.txt b/requirements/base.txt index 4b7363ca18c67..5e12f2faae071 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -68,7 +68,7 @@ dnspython==2.1.0 # via email-validator email-validator==1.1.3 # via flask-appbuilder -flask==2.0.3 +flask==2.1.3 # via # apache-superset # flask-appbuilder @@ -86,11 +86,11 @@ flask-babel==1.0.0 # via flask-appbuilder flask-caching==1.10.1 # via apache-superset -flask-compress==1.10.1 +flask-compress==1.13 # via apache-superset flask-jwt-extended==4.3.1 # via flask-appbuilder -flask-login==0.4.1 +flask-login==0.6.0 # via flask-appbuilder flask-migrate==3.1.0 # via apache-superset @@ -98,9 +98,9 @@ flask-sqlalchemy==2.5.1 # via # flask-appbuilder # flask-migrate -flask-talisman==0.8.1 +flask-talisman==1.0.0 # via apache-superset -flask-wtf==0.14.3 +flask-wtf==1.0.1 # via # apache-superset # flask-appbuilder @@ -144,10 +144,11 @@ mako==1.1.4 # via alembic markdown==3.3.4 # via apache-superset -markupsafe==2.0.1 +markupsafe==2.1.1 # via # jinja2 # mako + # werkzeug # wtforms marshmallow==3.13.0 # via @@ -236,7 +237,6 @@ six==1.16.0 # via # bleach # click-repl - # flask-talisman # isodate # jsonschema # paramiko @@ -278,7 +278,7 @@ wcwidth==0.2.5 # via prompt-toolkit webencodings==0.5.1 # via bleach -werkzeug==2.0.3 +werkzeug==2.1.2 # via # flask # flask-jwt-extended diff --git a/setup.py b/setup.py index 3e017fe263d65..dc546e5a6030c 100644 --- a/setup.py +++ b/setup.py @@ -82,13 +82,14 @@ def get_git_sha() -> str: "cron-descriptor", "cryptography>=3.3.2", "deprecation>=2.1.0, <2.2.0", - "flask>=2.0.0, <3.0.0", + "flask>=2.1.3, <2.2", "flask-appbuilder>=4.1.6, <5.0.0", - "flask-caching>=1.10.0", - "flask-compress", - "flask-talisman", - "flask-migrate", - "flask-wtf", + "flask-caching>=1.10.1, <1.11", + "flask-compress>=1.13, <2.0", + "flask-talisman>=1.0.0, <2.0", + "flask-login==0.6.0", + "flask-migrate>=3.1.0, <4.0", + "flask-wtf>=1.0.1, <1.1", "func_timeout", "geopy", "graphlib-backport", diff --git a/superset-frontend/src/components/Chart/DrillDetail/DrillDetailPane.tsx b/superset-frontend/src/components/Chart/DrillDetail/DrillDetailPane.tsx index f3e33298d11ac..cf387b4e7e821 100644 --- a/superset-frontend/src/components/Chart/DrillDetail/DrillDetailPane.tsx +++ b/superset-frontend/src/components/Chart/DrillDetail/DrillDetailPane.tsx @@ -219,7 +219,7 @@ export default function DrillDetailPane({ useEffect(() => { if (!responseError && !isLoading && !resultsPages.has(pageIndex)) { setIsLoading(true); - const jsonPayload = getDrillPayload(formData, filters); + const jsonPayload = getDrillPayload(formData, filters) ?? {}; const cachePageLimit = Math.ceil(SAMPLES_ROW_LIMIT / PAGE_SIZE); getDatasourceSamples( datasourceType, diff --git a/superset-frontend/src/explore/components/DataTablesPane/components/SamplesPane.tsx b/superset-frontend/src/explore/components/DataTablesPane/components/SamplesPane.tsx index 0d1047c51d282..5c66075750dc5 100644 --- a/superset-frontend/src/explore/components/DataTablesPane/components/SamplesPane.tsx +++ b/superset-frontend/src/explore/components/DataTablesPane/components/SamplesPane.tsx @@ -61,7 +61,7 @@ export const SamplesPane = ({ if (isRequest && !cache.has(datasource)) { setIsLoading(true); - getDatasourceSamples(datasource.type, datasource.id, queryForce) + getDatasourceSamples(datasource.type, datasource.id, queryForce, {}) .then(response => { setData(ensureIsArray(response.data)); setColnames(ensureIsArray(response.colnames)); diff --git a/superset/views/core.py b/superset/views/core.py index 534f8f667d707..d0db5e9b2e94d 100755 --- a/superset/views/core.py +++ b/superset/views/core.py @@ -773,7 +773,12 @@ def get_redirect_url() -> str: query["form_data_key"] = [form_data_key] url = url._replace(query=parse.urlencode(query, True)) redirect_url = parse.urlunparse(url) - return redirect_url + + # Return a relative URL + url = parse.urlparse(redirect_url) + if url.query: + return f"{url.path}?{url.query}" + return url.path @has_access @event_logger.log_this diff --git a/tests/integration_tests/conftest.py b/tests/integration_tests/conftest.py index 8908c3e22782f..5c132381b1930 100644 --- a/tests/integration_tests/conftest.py +++ b/tests/integration_tests/conftest.py @@ -188,7 +188,11 @@ def remove(self) -> None: def setup_presto_if_needed(): - backend = app.config["SQLALCHEMY_EXAMPLES_URI"].split("://")[0] + db_uri = ( + app.config.get("SQLALCHEMY_EXAMPLES_URI") + or app.config["SQLALCHEMY_DATABASE_URI"] + ) + backend = db_uri.split("://")[0] database = get_example_database() extra = database.get_extra() diff --git a/tests/integration_tests/core_tests.py b/tests/integration_tests/core_tests.py index 6e9f1a8d33c48..708d8e2deedcc 100644 --- a/tests/integration_tests/core_tests.py +++ b/tests/integration_tests/core_tests.py @@ -626,7 +626,7 @@ def test_redirect_invalid(self): self.login(username="admin") response = self.client.get(f"/r/{model_url.id}") - assert response.headers["Location"] == "http://localhost/" + assert response.headers["Location"] == "/" db.session.delete(model_url) db.session.commit() @@ -1671,7 +1671,9 @@ def test_explore_redirect(self, mock_command: mock.Mock): rv = self.client.get( f"/superset/explore/?form_data={quote(json.dumps(form_data))}" ) - self.assertRedirects(rv, f"/explore/?form_data_key={random_key}") + self.assertEqual( + rv.headers["Location"], f"/explore/?form_data_key={random_key}" + ) if __name__ == "__main__": diff --git a/tests/integration_tests/dashboard_tests.py b/tests/integration_tests/dashboard_tests.py index 3432b0fc16d88..973394a26d3f7 100644 --- a/tests/integration_tests/dashboard_tests.py +++ b/tests/integration_tests/dashboard_tests.py @@ -143,7 +143,7 @@ def test_new_dashboard(self): dash_count_after = db.session.query(func.count(Dashboard.id)).first()[0] self.assertEqual(dash_count_before + 1, dash_count_after) group = re.match( - r"http:\/\/localhost\/superset\/dashboard\/([0-9]*)\/\?edit=true", + r"\/superset\/dashboard\/([0-9]*)\/\?edit=true", response.headers["Location"], ) assert group is not None diff --git a/tests/integration_tests/datasource_tests.py b/tests/integration_tests/datasource_tests.py index edee0028467f1..4969321a1c54b 100644 --- a/tests/integration_tests/datasource_tests.py +++ b/tests/integration_tests/datasource_tests.py @@ -466,9 +466,9 @@ def test_get_samples(test_client, login_as_admin, virtual_dataset): f"/datasource/samples?datasource_id={virtual_dataset.id}&datasource_type=table" ) # feeds data - test_client.post(uri) + test_client.post(uri, json={}) # get from cache - rv = test_client.post(uri) + rv = test_client.post(uri, json={}) assert rv.status_code == 200 assert len(rv.json["result"]["data"]) == 10 assert QueryCacheManager.has( @@ -480,9 +480,9 @@ def test_get_samples(test_client, login_as_admin, virtual_dataset): # 2. should read through cache data uri2 = f"/datasource/samples?datasource_id={virtual_dataset.id}&datasource_type=table&force=true" # feeds data - test_client.post(uri2) + test_client.post(uri2, json={}) # force query - rv2 = test_client.post(uri2) + rv2 = test_client.post(uri2, json={}) assert rv2.status_code == 200 assert len(rv2.json["result"]["data"]) == 10 assert QueryCacheManager.has( @@ -518,7 +518,7 @@ def test_get_samples_with_incorrect_cc(test_client, login_as_admin, virtual_data uri = ( f"/datasource/samples?datasource_id={virtual_dataset.id}&datasource_type=table" ) - rv = test_client.post(uri) + rv = test_client.post(uri, json={}) assert rv.status_code == 422 assert "error" in rv.json @@ -530,7 +530,7 @@ def test_get_samples_on_physical_dataset(test_client, login_as_admin, physical_d uri = ( f"/datasource/samples?datasource_id={physical_dataset.id}&datasource_type=table" ) - rv = test_client.post(uri) + rv = test_client.post(uri, json={}) assert rv.status_code == 200 assert QueryCacheManager.has( rv.json["result"]["cache_key"], region=CacheRegion.DATA @@ -543,7 +543,7 @@ def test_get_samples_with_filters(test_client, login_as_admin, virtual_dataset): f"/datasource/samples?datasource_id={virtual_dataset.id}&datasource_type=table" ) rv = test_client.post(uri, json=None) - assert rv.status_code == 200 + assert rv.status_code == 400 rv = test_client.post(uri, json={}) assert rv.status_code == 200 @@ -644,7 +644,7 @@ def test_get_samples_pagination(test_client, login_as_admin, virtual_dataset): uri = ( f"/datasource/samples?datasource_id={virtual_dataset.id}&datasource_type=table" ) - rv = test_client.post(uri) + rv = test_client.post(uri, json={}) assert rv.json["result"]["page"] == 1 assert rv.json["result"]["per_page"] == app.config["SAMPLES_ROW_LIMIT"] assert rv.json["result"]["total_count"] == 10 @@ -653,28 +653,28 @@ def test_get_samples_pagination(test_client, login_as_admin, virtual_dataset): per_pages = (app.config["SAMPLES_ROW_LIMIT"] + 1, 0, "xx") for per_page in per_pages: uri = f"/datasource/samples?datasource_id={virtual_dataset.id}&datasource_type=table&per_page={per_page}" - rv = test_client.post(uri) + rv = test_client.post(uri, json={}) assert rv.status_code == 400 # 3. incorrect page or datasource_type uri = f"/datasource/samples?datasource_id={virtual_dataset.id}&datasource_type=table&page=xx" - rv = test_client.post(uri) + rv = test_client.post(uri, json={}) assert rv.status_code == 400 uri = f"/datasource/samples?datasource_id={virtual_dataset.id}&datasource_type=xx" - rv = test_client.post(uri) + rv = test_client.post(uri, json={}) assert rv.status_code == 400 # 4. turning pages uri = f"/datasource/samples?datasource_id={virtual_dataset.id}&datasource_type=table&per_page=2&page=1" - rv = test_client.post(uri) + rv = test_client.post(uri, json={}) assert rv.json["result"]["page"] == 1 assert rv.json["result"]["per_page"] == 2 assert rv.json["result"]["total_count"] == 10 assert [row["col1"] for row in rv.json["result"]["data"]] == [0, 1] uri = f"/datasource/samples?datasource_id={virtual_dataset.id}&datasource_type=table&per_page=2&page=2" - rv = test_client.post(uri) + rv = test_client.post(uri, json={}) assert rv.json["result"]["page"] == 2 assert rv.json["result"]["per_page"] == 2 assert rv.json["result"]["total_count"] == 10 @@ -682,7 +682,7 @@ def test_get_samples_pagination(test_client, login_as_admin, virtual_dataset): # 5. Exceeding the maximum pages uri = f"/datasource/samples?datasource_id={virtual_dataset.id}&datasource_type=table&per_page=2&page=6" - rv = test_client.post(uri) + rv = test_client.post(uri, json={}) assert rv.json["result"]["page"] == 6 assert rv.json["result"]["per_page"] == 2 assert rv.json["result"]["total_count"] == 10 diff --git a/tests/integration_tests/thumbnails_tests.py b/tests/integration_tests/thumbnails_tests.py index 332d71da331ec..0ee420fda8e76 100644 --- a/tests/integration_tests/thumbnails_tests.py +++ b/tests/integration_tests/thumbnails_tests.py @@ -368,7 +368,7 @@ def test_get_cached_chart_wrong_digest(self): id_, thumbnail_url = self._get_id_and_thumbnail_url(CHART_URL) rv = self.client.get(f"api/v1/chart/{id_}/thumbnail/1234/") self.assertEqual(rv.status_code, 302) - self.assertRedirects(rv, thumbnail_url) + self.assertEqual(rv.headers["Location"], thumbnail_url) @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") @with_feature_flags(THUMBNAILS=True) @@ -413,4 +413,4 @@ def test_get_cached_dashboard_wrong_digest(self): id_, thumbnail_url = self._get_id_and_thumbnail_url(DASHBOARD_URL) rv = self.client.get(f"api/v1/dashboard/{id_}/thumbnail/1234/") self.assertEqual(rv.status_code, 302) - self.assertRedirects(rv, thumbnail_url) + self.assertEqual(rv.headers["Location"], thumbnail_url)