diff --git a/generate_common.py b/generate_common.py index 805e548b8..98bf8f1a1 100644 --- a/generate_common.py +++ b/generate_common.py @@ -32,6 +32,8 @@ import datas.metadata as metadata +from spinorama import ray_setup_logger + MINIRAY = None try: import ray @@ -241,12 +243,15 @@ def cache_load_seq(filters, smoke_test): @ray.remote(num_cpus=1) -def cache_fetch(cachepath: str): +def cache_fetch(cachepath: str, level): + logger = logging.getLogger("spinorama") + ray_setup_logger(level) + logger.debug("Level of debug is %d", level) return fl.load(path=cachepath) -def cache_load_distributed_map(filters, smoke_test): - cache_files = glob("/home/pierre/src/spinorama/{}/*.h5".format(CACHE_DIR)) +def cache_load_distributed_map(filters, smoke_test, level): + cache_files = glob("./{}/*.h5".format(CACHE_DIR)) ids = [] # mapper read the cache and start 1 worker per file for cache in cache_files: @@ -254,7 +259,7 @@ def cache_load_distributed_map(filters, smoke_test): filters.get("speaker_name") ): continue - ids.append(cache_fetch.remote(cache)) + ids.append(cache_fetch.remote(cache, level)) print("(queued {} files)".format(len(cache_files))) return ids @@ -286,14 +291,14 @@ def cache_load_distributed_reduce(filters, smoke_test, ids): return df_all -def cache_load_distributed(filters, smoke_test): - ids = cache_load_distributed_map(filters, smoke_test) +def cache_load_distributed(filters, smoke_test, level): + ids = cache_load_distributed_map(filters, smoke_test, level) return cache_load_distributed_reduce(filters, smoke_test, ids) -def cache_load(filters, smoke_test): +def cache_load(filters, smoke_test, level): if ray.is_initialized and filters.get("speaker_name") is None: - return cache_load_distributed(filters, smoke_test) + return cache_load_distributed(filters, smoke_test, level) return cache_load_seq(filters, smoke_test) diff --git a/generate_meta.py b/generate_meta.py index c2cfc48b7..2bf120964 100755 --- a/generate_meta.py +++ b/generate_meta.py @@ -42,6 +42,7 @@ from hashlib import md5 from itertools import groupby import json +from glob import glob import math import os import sys @@ -52,6 +53,8 @@ from docopt import docopt +from spinorama import ray_setup_logger + try: import ray except ModuleNotFoundError: @@ -179,6 +182,8 @@ def reject(filters: dict, speaker_name: str) -> bool: @ray.remote(num_cpus=1) def queue_score(speaker_name, speaker_data): + ray_setup_logger(level) + logger.debug("Level of debug is %d", level) logger.info("Processing %s", speaker_name) results = [] for origin, measurements in speaker_data.items(): @@ -758,20 +763,27 @@ def dump_metadata(meta): def dict_to_json(filename, d): js = json.dumps(d) key = md5(js.encode("utf-8"), usedforsecurity=False).hexdigest()[0:5] - hashname = "{}-{}.json".format(filename[:-5], key) - if os.path.exists(hashname) and os.path.exists(hashname + ".zip"): + hashed_filename = "{}-{}.json".format(filename[:-5], key) + if os.path.exists(hashed_filename) and os.path.exists(hashed_filename + ".zip"): + logger.debug("skipping %s", hashed_filename) return - with open(hashname, "w", encoding="utf-8") as f: + # hash changed, remove old files + old_hash_pattern = "{}-*.json".format(filename[:-5]) + for fileold in glob(old_hash_pattern): + logger.debug("remove old file %s", fileold) + os.remove(fileold) + with open(hashed_filename, "w", encoding="utf-8") as f: f.write(js) f.close() with zipfile.ZipFile( - hashname + ".zip", + hashed_filename + ".zip", "w", compression=zipfile.ZIP_DEFLATED, allowZip64=True, ) as current_zip: - current_zip.writestr(hashname, js) + current_zip.writestr(hashed_filename, js) + logger.debug("generated %s and zip version", hashed_filename) meta_full = {k: v for k, v in meta.items() if not v.get("skip", False)} dict_to_json(metafile, meta_full) @@ -794,8 +806,8 @@ def by_year(key): def_m = m["default_measurement"] year = int(m["measurements"][def_m].get("review_published", "1970")[0:4]) # group together years without too many reviews - if year > 1970 and year < 2021: - return 2020 + if year > 1970 and year < 2020: + return 2019 return year grouped_by_year = groupby(meta_sorted_date_tail, by_year) @@ -827,7 +839,7 @@ def main(): "format": mformat, "version": mversion, } - main_df = cache_load(filters=filters, smoke_test=smoke_test) + main_df = cache_load(filters=filters, smoke_test=smoke_test, level=level) steps.append(("loaded", time.perf_counter())) if main_df is None: @@ -862,5 +874,6 @@ def main(): if __name__ == "__main__": args = docopt(__doc__, version="generate_meta.py version 1.6", options_first=True) - logger = get_custom_logger(level=args2level(args), duplicate=True) + level = args2level(args) + logger = get_custom_logger(level=level, duplicate=True) main() diff --git a/generate_peqs.py b/generate_peqs.py index c2f7f2ea8..5d02c632e 100755 --- a/generate_peqs.py +++ b/generate_peqs.py @@ -588,7 +588,7 @@ def main(): if disable_ray: df_all_speakers = cache_load_seq(filters=do_filters, smoke_test=smoke_test) else: - df_all_speakers = cache_load(filters=do_filters, smoke_test=smoke_test) + df_all_speakers = cache_load(filters=do_filters, smoke_test=smoke_test, level) except ValueError as v_e: if speaker_name is not None: print( diff --git a/package.json b/package.json index a049814cd..bb74c9d8d 100644 --- a/package.json +++ b/package.json @@ -8,15 +8,16 @@ "npm": "^10.2.3", "plotly": "^1.0.6", "sass": "^1.58.0", - "typescript-language-server": "^3.3.2" + "typescript-language-server": "^3.3.2", + "webpack": "^5.89.0" }, "devDependencies": { - "eslint": "^8.53.0", + "eslint": "^8.54.0", "flow": "^0.2.3", "flow-remove-types": "^2.222.0", "husky": "^8.0.3", "lint-staged": "^15.0.2", - "prettier": "^3.0.3", + "prettier": "^3.1.0", "pyright": "^1.1.337", "standard": "^17.1.0", "w3c-html-validator": "^1.6.1" diff --git a/src/spinorama/compute_estimates.py b/src/spinorama/compute_estimates.py index df3572c0c..2840f7148 100644 --- a/src/spinorama/compute_estimates.py +++ b/src/spinorama/compute_estimates.py @@ -130,8 +130,11 @@ def estimates(spin: pd.DataFrame, spl_h: pd.DataFrame, spl_v: pd.DataFrame) -> d est["dir_{}_p".format(orientation)] = dir_deg_p est["dir_{}_m".format(orientation)] = dir_deg_m est["dir_{}".format(orientation)] = dir_deg + except KeyError as error: + # missing data + logger.debug("Computing %s directivity failed! %s", orientation, error) except Exception as error: - logger.warning("Computing directivity failed! %s", error) + logger.warning("Computing %s directivity failed! %s", orientation, error) logger.debug("Estimates v3: %s", est) except TypeError as type_error: diff --git a/update_3rdparties.sh b/update_3rdparties.sh index e5de0801e..733c09db9 100755 --- a/update_3rdparties.sh +++ b/update_3rdparties.sh @@ -17,7 +17,6 @@ wget -O${ASSETS}/fuse-${FUSE}.min.js https://cdn.jsdelivr.net/npm/fuse.js@${FUSE wget -O${ASSETS}/fontawesome-${FONTAWESOME}.min.css https://cdn.jsdelivr.net/npm/@fortawesome/fontawesome-free@${FONTAWESOME}/css/all.min.css mkdir -p ${WEBFONTS} wget -O${WEBFONTS}/fa-brands-400.woff2 https://cdn.jsdelivr.net/npm/@fortawesome/fontawesome-free@${FONTAWESOME}/webfonts/fa-brands-400.woff2 -wget -O${WEBFONTS}/fa-solid-400.woff2 https://cdn.jsdelivr.net/npm/@fortawesome/fontawesome-free@${FONTAWESOME}/webfonts/fa-solid-400.woff2 wget -O${WEBFONTS}/fa-solid-900.woff2 https://cdn.jsdelivr.net/npm/@fortawesome/fontawesome-free@${FONTAWESOME}/webfonts/fa-solid-900.woff2 wget -O${WEBFONTS}/fa-regular-400.woff2 https://cdn.jsdelivr.net/npm/@fortawesome/fontawesome-free@${FONTAWESOME}/webfonts/fa-regular-400.woff2 diff --git a/update_website.sh b/update_website.sh index 778e33710..9edc83e4f 100755 --- a/update_website.sh +++ b/update_website.sh @@ -19,7 +19,7 @@ echo "Update starts" export PYTHONPATH=src:src/website:src/spinorama:. -PYTHON=python3.10 +PYTHON=python3.11 IP="127.0.0.1" case $HOSTNAME in @@ -40,7 +40,7 @@ esac #echo $IP # check meta -command=$(python3.10 ./check_meta.py) +command=$(python3.11 ./check_meta.py) status=$? if [ $status -ne 0 ]; then echo "KO checking metadata ($status)"; @@ -53,7 +53,7 @@ fi ./update_pictures.sh # generate all graphs if some are missing rm -fr /tmp/ray -command=$(python3.10 ./generate_graphs.py --dash-ip="$IP") +command=$(python3.11 ./generate_graphs.py --dash-ip="$IP") status=$? if [ $status -ne 0 ]; then echo "KO after generate graph!" @@ -63,7 +63,7 @@ else fi # recompute metadata for all speakers rm -f docs/assets/metadata.json -command=$(python3.10 ./generate_meta.py --dash-ip="$IP") +command=$(python3.11 ./generate_meta.py --dash-ip="$IP") status=$? if [ $status -ne 0 ]; then echo "KO after generate meta!" @@ -75,7 +75,7 @@ fi ./update_pictures.sh # generate radar # rm -f docs/speakers/*/spider* -command=$(python3.10 ./generate_radar.py) +command=$(python3.11 ./generate_radar.py) status=$? if [ $status -ne 0 ]; then echo "KO after generate radar!" @@ -85,7 +85,7 @@ else fi # generate eq_compare # rm -f docs/speakers/*/eq_compare* -command=$(python3.10 ./generate_eq_compare.py) +command=$(python3.11 ./generate_eq_compare.py) status=$? if [ $status -ne 0 ]; then echo "KO after generate EQ compare!" @@ -95,7 +95,7 @@ else fi # generate status rm -f docs/stats/*.json -command=$(python3.10 ./generate_stats.py) +command=$(python3.11 ./generate_stats.py) status=$? if [ $status -ne 0 ]; then echo "KO after generate statistics!" @@ -106,7 +106,7 @@ fi # generate website ./update_brands.sh ./update_reviewers.sh -command=$(python3.10 ./generate_html.py --dev --sitedev=https://dev.spinorama.org) +command=$(python3.11 ./generate_html.py --dev --sitedev=https://dev.spinorama.org) status=$? if [ $status -ne 0 ]; then echo "KO after generate HTML!"