Skip to content

Commit

Permalink
fix: logging is some places
Browse files Browse the repository at this point in the history
  • Loading branch information
pierrre committed Nov 29, 2023
1 parent 3283979 commit 55fbe4c
Show file tree
Hide file tree
Showing 7 changed files with 52 additions and 31 deletions.
21 changes: 13 additions & 8 deletions generate_common.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,8 @@

import datas.metadata as metadata

from spinorama import ray_setup_logger

MINIRAY = None
try:
import ray
Expand Down Expand Up @@ -241,20 +243,23 @@ def cache_load_seq(filters, smoke_test):


@ray.remote(num_cpus=1)
def cache_fetch(cachepath: str):
def cache_fetch(cachepath: str, level):
logger = logging.getLogger("spinorama")
ray_setup_logger(level)
logger.debug("Level of debug is %d", level)
return fl.load(path=cachepath)


def cache_load_distributed_map(filters, smoke_test):
cache_files = glob("/home/pierre/src/spinorama/{}/*.h5".format(CACHE_DIR))
def cache_load_distributed_map(filters, smoke_test, level):
cache_files = glob("./{}/*.h5".format(CACHE_DIR))
ids = []
# mapper read the cache and start 1 worker per file
for cache in cache_files:
if filters.get("speaker_name") is not None and cache[-5:-3] != cache_key(
filters.get("speaker_name")
):
continue
ids.append(cache_fetch.remote(cache))
ids.append(cache_fetch.remote(cache, level))

print("(queued {} files)".format(len(cache_files)))
return ids
Expand Down Expand Up @@ -286,14 +291,14 @@ def cache_load_distributed_reduce(filters, smoke_test, ids):
return df_all


def cache_load_distributed(filters, smoke_test):
ids = cache_load_distributed_map(filters, smoke_test)
def cache_load_distributed(filters, smoke_test, level):
ids = cache_load_distributed_map(filters, smoke_test, level)
return cache_load_distributed_reduce(filters, smoke_test, ids)


def cache_load(filters, smoke_test):
def cache_load(filters, smoke_test, level):
if ray.is_initialized and filters.get("speaker_name") is None:
return cache_load_distributed(filters, smoke_test)
return cache_load_distributed(filters, smoke_test, level)
return cache_load_seq(filters, smoke_test)


Expand Down
31 changes: 22 additions & 9 deletions generate_meta.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@
from hashlib import md5
from itertools import groupby
import json
from glob import glob
import math
import os
import sys
Expand All @@ -52,6 +53,8 @@

from docopt import docopt

from spinorama import ray_setup_logger

try:
import ray
except ModuleNotFoundError:
Expand Down Expand Up @@ -179,6 +182,8 @@ def reject(filters: dict, speaker_name: str) -> bool:

@ray.remote(num_cpus=1)
def queue_score(speaker_name, speaker_data):
ray_setup_logger(level)
logger.debug("Level of debug is %d", level)
logger.info("Processing %s", speaker_name)
results = []
for origin, measurements in speaker_data.items():
Expand Down Expand Up @@ -758,20 +763,27 @@ def dump_metadata(meta):
def dict_to_json(filename, d):
js = json.dumps(d)
key = md5(js.encode("utf-8"), usedforsecurity=False).hexdigest()[0:5]
hashname = "{}-{}.json".format(filename[:-5], key)
if os.path.exists(hashname) and os.path.exists(hashname + ".zip"):
hashed_filename = "{}-{}.json".format(filename[:-5], key)
if os.path.exists(hashed_filename) and os.path.exists(hashed_filename + ".zip"):
logger.debug("skipping %s", hashed_filename)
return
with open(hashname, "w", encoding="utf-8") as f:
# hash changed, remove old files
old_hash_pattern = "{}-*.json".format(filename[:-5])
for fileold in glob(old_hash_pattern):
logger.debug("remove old file %s", fileold)
os.remove(fileold)
with open(hashed_filename, "w", encoding="utf-8") as f:
f.write(js)
f.close()

with zipfile.ZipFile(
hashname + ".zip",
hashed_filename + ".zip",
"w",
compression=zipfile.ZIP_DEFLATED,
allowZip64=True,
) as current_zip:
current_zip.writestr(hashname, js)
current_zip.writestr(hashed_filename, js)
logger.debug("generated %s and zip version", hashed_filename)

meta_full = {k: v for k, v in meta.items() if not v.get("skip", False)}
dict_to_json(metafile, meta_full)
Expand All @@ -794,8 +806,8 @@ def by_year(key):
def_m = m["default_measurement"]
year = int(m["measurements"][def_m].get("review_published", "1970")[0:4])
# group together years without too many reviews
if year > 1970 and year < 2021:
return 2020
if year > 1970 and year < 2020:
return 2019
return year

grouped_by_year = groupby(meta_sorted_date_tail, by_year)
Expand Down Expand Up @@ -827,7 +839,7 @@ def main():
"format": mformat,
"version": mversion,
}
main_df = cache_load(filters=filters, smoke_test=smoke_test)
main_df = cache_load(filters=filters, smoke_test=smoke_test, level=level)
steps.append(("loaded", time.perf_counter()))

if main_df is None:
Expand Down Expand Up @@ -862,5 +874,6 @@ def main():

if __name__ == "__main__":
args = docopt(__doc__, version="generate_meta.py version 1.6", options_first=True)
logger = get_custom_logger(level=args2level(args), duplicate=True)
level = args2level(args)
logger = get_custom_logger(level=level, duplicate=True)
main()
2 changes: 1 addition & 1 deletion generate_peqs.py
Original file line number Diff line number Diff line change
Expand Up @@ -588,7 +588,7 @@ def main():
if disable_ray:
df_all_speakers = cache_load_seq(filters=do_filters, smoke_test=smoke_test)
else:
df_all_speakers = cache_load(filters=do_filters, smoke_test=smoke_test)
df_all_speakers = cache_load(filters=do_filters, smoke_test=smoke_test, level)
except ValueError as v_e:
if speaker_name is not None:
print(
Expand Down
7 changes: 4 additions & 3 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -8,15 +8,16 @@
"npm": "^10.2.3",
"plotly": "^1.0.6",
"sass": "^1.58.0",
"typescript-language-server": "^3.3.2"
"typescript-language-server": "^3.3.2",
"webpack": "^5.89.0"
},
"devDependencies": {
"eslint": "^8.53.0",
"eslint": "^8.54.0",
"flow": "^0.2.3",
"flow-remove-types": "^2.222.0",
"husky": "^8.0.3",
"lint-staged": "^15.0.2",
"prettier": "^3.0.3",
"prettier": "^3.1.0",
"pyright": "^1.1.337",
"standard": "^17.1.0",
"w3c-html-validator": "^1.6.1"
Expand Down
5 changes: 4 additions & 1 deletion src/spinorama/compute_estimates.py
Original file line number Diff line number Diff line change
Expand Up @@ -130,8 +130,11 @@ def estimates(spin: pd.DataFrame, spl_h: pd.DataFrame, spl_v: pd.DataFrame) -> d
est["dir_{}_p".format(orientation)] = dir_deg_p
est["dir_{}_m".format(orientation)] = dir_deg_m
est["dir_{}".format(orientation)] = dir_deg
except KeyError as error:
# missing data
logger.debug("Computing %s directivity failed! %s", orientation, error)
except Exception as error:
logger.warning("Computing directivity failed! %s", error)
logger.warning("Computing %s directivity failed! %s", orientation, error)

logger.debug("Estimates v3: %s", est)
except TypeError as type_error:
Expand Down
1 change: 0 additions & 1 deletion update_3rdparties.sh
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@ wget -O${ASSETS}/fuse-${FUSE}.min.js https://cdn.jsdelivr.net/npm/fuse.js@${FUSE
wget -O${ASSETS}/fontawesome-${FONTAWESOME}.min.css https://cdn.jsdelivr.net/npm/@fortawesome/fontawesome-free@${FONTAWESOME}/css/all.min.css
mkdir -p ${WEBFONTS}
wget -O${WEBFONTS}/fa-brands-400.woff2 https://cdn.jsdelivr.net/npm/@fortawesome/fontawesome-free@${FONTAWESOME}/webfonts/fa-brands-400.woff2
wget -O${WEBFONTS}/fa-solid-400.woff2 https://cdn.jsdelivr.net/npm/@fortawesome/fontawesome-free@${FONTAWESOME}/webfonts/fa-solid-400.woff2
wget -O${WEBFONTS}/fa-solid-900.woff2 https://cdn.jsdelivr.net/npm/@fortawesome/fontawesome-free@${FONTAWESOME}/webfonts/fa-solid-900.woff2
wget -O${WEBFONTS}/fa-regular-400.woff2 https://cdn.jsdelivr.net/npm/@fortawesome/fontawesome-free@${FONTAWESOME}/webfonts/fa-regular-400.woff2

16 changes: 8 additions & 8 deletions update_website.sh
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
echo "Update starts"
export PYTHONPATH=src:src/website:src/spinorama:.

PYTHON=python3.10
PYTHON=python3.11

IP="127.0.0.1"
case $HOSTNAME in
Expand All @@ -40,7 +40,7 @@ esac
#echo $IP

# check meta
command=$(python3.10 ./check_meta.py)
command=$(python3.11 ./check_meta.py)
status=$?
if [ $status -ne 0 ]; then
echo "KO checking metadata ($status)";
Expand All @@ -53,7 +53,7 @@ fi
./update_pictures.sh
# generate all graphs if some are missing
rm -fr /tmp/ray
command=$(python3.10 ./generate_graphs.py --dash-ip="$IP")
command=$(python3.11 ./generate_graphs.py --dash-ip="$IP")
status=$?
if [ $status -ne 0 ]; then
echo "KO after generate graph!"
Expand All @@ -63,7 +63,7 @@ else
fi
# recompute metadata for all speakers
rm -f docs/assets/metadata.json
command=$(python3.10 ./generate_meta.py --dash-ip="$IP")
command=$(python3.11 ./generate_meta.py --dash-ip="$IP")
status=$?
if [ $status -ne 0 ]; then
echo "KO after generate meta!"
Expand All @@ -75,7 +75,7 @@ fi
./update_pictures.sh
# generate radar
# rm -f docs/speakers/*/spider*
command=$(python3.10 ./generate_radar.py)
command=$(python3.11 ./generate_radar.py)
status=$?
if [ $status -ne 0 ]; then
echo "KO after generate radar!"
Expand All @@ -85,7 +85,7 @@ else
fi
# generate eq_compare
# rm -f docs/speakers/*/eq_compare*
command=$(python3.10 ./generate_eq_compare.py)
command=$(python3.11 ./generate_eq_compare.py)
status=$?
if [ $status -ne 0 ]; then
echo "KO after generate EQ compare!"
Expand All @@ -95,7 +95,7 @@ else
fi
# generate status
rm -f docs/stats/*.json
command=$(python3.10 ./generate_stats.py)
command=$(python3.11 ./generate_stats.py)
status=$?
if [ $status -ne 0 ]; then
echo "KO after generate statistics!"
Expand All @@ -106,7 +106,7 @@ fi
# generate website
./update_brands.sh
./update_reviewers.sh
command=$(python3.10 ./generate_html.py --dev --sitedev=https://dev.spinorama.org)
command=$(python3.11 ./generate_html.py --dev --sitedev=https://dev.spinorama.org)
status=$?
if [ $status -ne 0 ]; then
echo "KO after generate HTML!"
Expand Down

0 comments on commit 55fbe4c

Please sign in to comment.