Skip to content

Commit

Permalink
Simplify
Browse files Browse the repository at this point in the history
  • Loading branch information
sbrunner committed Feb 15, 2019
1 parent cfb7e4b commit 0ccb10e
Show file tree
Hide file tree
Showing 3 changed files with 53 additions and 78 deletions.
58 changes: 24 additions & 34 deletions geoportal/c2cgeoportal_geoportal/lib/checker.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,34 +30,24 @@
import logging
import subprocess
from time import sleep
from urllib.parse import urlsplit, urlunsplit
from urllib.parse import urljoin

import requests

log = logging.getLogger(__name__)


def build_url(name, url, request, headers=None):
settings = {
elem['netloc']: elem for elem in request.registry.settings["checker"].get("host_map", [])
}
url_fragments = urlsplit(url)
def build_url(name, path, request, headers=None):
base_internal_url = request.registry.settings["checker"]["base_internal_url"]
url = urljoin(base_internal_url, path)

forward_host = request.registry.settings["checker"].get("forward_host", False)
headers = _build_headers(request, headers)
netloc = '*' if '*' in settings and url_fragments.netloc not in settings else url_fragments.netloc
if netloc in settings:
mapped_host = settings[netloc]
url_ = urlunsplit((
mapped_host['to'].get('schema', url_fragments.scheme),
mapped_host['to'].get('netloc', url_fragments.netloc),
url_fragments.path, url_fragments.query, url_fragments.fragment
))
if mapped_host.get('forward_host', False):
headers["Host"] = url_fragments.netloc
else:
url_ = url

log.debug("%s, URL: %s => %s", name, url, url_)
return {"url": url_, "headers": headers}
if forward_host:
headers["Host"] = request.host

log.debug("%s, URL: %s", name, url)
return {"url": url, "headers": headers}


def _build_headers(request, headers=None):
Expand All @@ -79,7 +69,7 @@ def _routes(settings, health_check):
name = "checker_routes_" + route.get("display_name", route["name"])

def get_both(request):
return build_url("route", request.route_url(route["name"]), request)
return build_url("route", request.route_path(route["name"]), request)

health_check.add_url_check(
url=lambda r: get_both(r)["url"],
Expand All @@ -95,8 +85,8 @@ def _pdf3(settings, health_check):
print_settings = settings["print"]

def check(request):
url = request.route_url("printproxy_report_create", format="pdf")
url_headers = build_url("Check the printproxy request (create)", url, request)
path = request.route_path("printproxy_report_create", format="pdf")
url_headers = build_url("Check the printproxy request (create)", path, request)

session = requests.session()
resp = session.post(
Expand All @@ -108,8 +98,8 @@ def check(request):

job = resp.json()

url = request.route_url("printproxy_status", ref=job["ref"])
url_headers = build_url("Check the printproxy pdf status", url, request)
path = request.route_path("printproxy_status", ref=job["ref"])
url_headers = build_url("Check the printproxy pdf status", path, request)
done = False
while not done:
sleep(1)
Expand All @@ -124,8 +114,8 @@ def check(request):
raise Exception("Failed to do the printing: {0!s}".format(status["error"]))
done = status["done"]

url = request.route_url("printproxy_report_get", ref=job["ref"])
url_headers = build_url("Check the printproxy pdf retrieve", url, request)
path = request.route_path("printproxy_report_get", ref=job["ref"])
url_headers = build_url("Check the printproxy pdf retrieve", path, request)
resp = session.get(
timeout=30,
**url_headers
Expand All @@ -139,7 +129,7 @@ def _fts(settings, health_check):
fts_settings = settings["fulltextsearch"]

def get_both(request):
return build_url("Check the fulltextsearch", request.route_url("fulltextsearch"), request)
return build_url("Check the fulltextsearch", request.route_path("fulltextsearch"), request)

def check(_request, response):
if response.json()["features"] == 0:
Expand All @@ -166,15 +156,15 @@ def _themes_errors(settings, health_check):
interfaces_settings = themes_settings["interfaces"]

def check(request):
url = request.route_url("themes")
path = request.route_path("themes")
session = requests.session()
for interface, in DBSession.query(Interface.name).all():
params = {}
params.update(default_params)
params.update(interfaces_settings.get(interface, {}).get("params", {}))
params["interface"] = interface

interface_url_headers = build_url("checker_themes " + interface, url, request)
interface_url_headers = build_url("checker_themes " + interface, path, request)

response = session.get(
params=params,
Expand Down Expand Up @@ -216,7 +206,7 @@ def _lang_files(global_settings, settings, health_check):
def get_both(url, lang, request):
return build_url(
name,
request.static_url(url.format(package=global_settings["package"], lang=lang)),
request.static_path(url.format(package=global_settings["package"], lang=lang)),
request
)

Expand All @@ -235,8 +225,8 @@ def _phantomjs(settings, health_check):
continue

def check(request):
url = request.route_url(route["name"], _query=route.get("params", {}))
url = build_url("Check", url, request)["url"]
path = request.route_path(route["name"], _query=route.get("params", {}))
url = build_url("Check", path, request)["url"]

cmd = [
"phantomjs", "--local-to-remote-url-access=true",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -537,11 +537,7 @@ vars:

# Checker configuration
checker:
host_map:
- netloc: '*'
to:
schema: http
netloc: localhost:8080
base_internal_url: http://localhost:8080
fulltextsearch:
level: 1
search: text to search
Expand Down
67 changes: 28 additions & 39 deletions geoportal/tests/test_checker.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,81 +37,69 @@

class TestExportCSVView(TestCase):

@staticmethod
def _create_dummy_request(netloc="example.com", forward_host=True):
def test_build_url_docker(self):
request = DummyRequest()
request.registry.settings = {
"checker": {
"host_map": [{
"netloc": netloc,
"to": {
"netloc": "localhost",
"schema": "http",
},
"forward_host": forward_host
}]
"base_internal_url": "http://localhost:8080",
}
}
return request

def test_build_url_star(self):
self.assertEqual(
build_url(
"Test",
"http://example.com/toto?titi#tutu",
self._create_dummy_request("*", False)
"/toto?titi#tutu",
request
),
{
"url": "http://localhost/toto?titi#tutu",
"url": "http://localhost:8080/toto?titi#tutu",
"headers": {
"Cache-Control": "no-cache",
}
}
)

def test_build_url_http(self):
request = DummyRequest()
request.registry.settings = {
"checker": {
"base_internal_url": "http://localhost",
"forward_host": True,
}
}
self.assertEqual(
build_url(
"Test",
"http://example.com/toto?titi#tutu",
self._create_dummy_request()
"/toto?titi#tutu",
request
),
{
"url": "http://localhost/toto?titi#tutu",
"headers": {
"Cache-Control": "no-cache",
"Host": "example.com"
"Host": "example.com:80"
}
}
)

def test_build_url_https(self):
self.assertEqual(
build_url(
"Test",
"https://example.com/toto?titi#tutu",
self._create_dummy_request()
),
{
"url": "http://localhost/toto?titi#tutu",
"headers": {
"Cache-Control": "no-cache",
"Host": "example.com"
}
request = DummyRequest()
request.registry.settings = {
"checker": {
"base_internal_url": "https://localhost",
"forward_host": True,
}
)

def test_build_url_other(self):
}
self.assertEqual(
build_url(
"Test",
"https://camptocamp.com/toto?titi#tutu",
self._create_dummy_request()
"/toto?titi#tutu",
request
),
{
"url": "https://camptocamp.com/toto?titi#tutu",
"url": "https://localhost/toto?titi#tutu",
"headers": {
"Cache-Control": "no-cache",
"Host": "example.com:80"
}
}
)
Expand All @@ -120,18 +108,19 @@ def test_build_url_forward_headers(self):
request = DummyRequest()
request.registry.settings = {
"checker": {
"base_internal_url": "http://localhost",
"forward_headers": ["Cookie"]
}
}
request.headers["Cookie"] = "test"
self.assertEqual(
build_url(
"Test",
"https://camptocamp.com/toto?titi#tutu",
"/toto?titi#tutu",
request
),
{
"url": "https://camptocamp.com/toto?titi#tutu",
"url": "http://localhost/toto?titi#tutu",
"headers": {
"Cache-Control": "no-cache",
"Cookie": "test",
Expand Down

0 comments on commit 0ccb10e

Please sign in to comment.