Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

nan values won't break charts down now #984

Merged
merged 2 commits into from
Jun 9, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 4 additions & 2 deletions frontend/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -31,11 +31,13 @@
"build": "./scripts/build.sh",
"build:core": "yarn workspace @visualdl/core build",
"build:demo": "yarn workspace @visualdl/demo build",
"build:wasm": "yarn workspace @visualdl/wasm build",
"clean": "rimraf output packages/*/dist packages/wasm/target",
"dev": "yarn dev:core",
"dev:core": "yarn workspace @visualdl/core dev",
"dev:demo": "yarn workspace @visualdl/server dev:demo",
"dev:server": "yarn workspace @visualdl/server dev",
"dev:wasm": "yarn workspace @visualdl/wasm dev",
"lint": "eslint --ext .tsx,.jsx.ts,.js,.mjs .",
"format": "prettier --write \"**/*.{ts,tsx,js,jsx}\"",
"test": "yarn workspaces run test",
Expand All @@ -44,8 +46,8 @@
"version": "yarn format && git add -A"
},
"devDependencies": {
"@typescript-eslint/eslint-plugin": "4.26.0",
"@typescript-eslint/parser": "4.26.0",
"@typescript-eslint/eslint-plugin": "4.26.1",
"@typescript-eslint/parser": "4.26.1",
"eslint": "7.28.0",
"eslint-config-prettier": "8.3.0",
"eslint-plugin-license-header": "0.2.0",
Expand Down
12 changes: 6 additions & 6 deletions frontend/packages/core/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -95,30 +95,30 @@
"@types/chai": "4.2.18",
"@types/d3": "6.7.0",
"@types/d3-format": "2.0.0",
"@types/echarts": "4.9.7",
"@types/echarts": "4.9.8",
"@types/file-saver": "2.0.2",
"@types/lodash": "4.14.170",
"@types/mime-types": "2.1.0",
"@types/nprogress": "0.2.0",
"@types/numeric": "1.2.1",
"@types/react": "17.0.9",
"@types/react-dom": "17.0.6",
"@types/react": "17.0.10",
"@types/react-dom": "17.0.7",
"@types/react-helmet": "6.1.1",
"@types/react-rangeslider": "2.2.3",
"@types/react-redux": "7.1.16",
"@types/react-router-dom": "5.1.7",
"@types/react-table": "7.7.1",
"@types/snowpack-env": "2.3.3",
"@types/styled-components": "5.1.9",
"@types/styled-components": "5.1.10",
"@types/three": "0.129.1",
"@visualdl/mock": "2.2.0-1",
"@web/test-runner": "0.13.5",
"@web/test-runner": "0.13.6",
"chai": "4.3.4",
"chalk": "4.1.1",
"dotenv": "10.0.0",
"enhanced-resolve": "5.8.2",
"html-minifier": "4.0.0",
"snowpack": "3.5.5",
"snowpack": "3.5.6",
"snowpack-plugin-copy": "1.0.1",
"typescript": "4.3.2"
},
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,14 @@ const ScalarChart: FunctionComponent<ScalarChartProps> = ({

const xAxisType = useMemo(() => (xAxis === XAxis.WallTime ? XAxisType.time : XAxisType.value), [xAxis]);

const transformParams = useMemo(() => [datasets?.map(data => data ?? []) ?? [], smoothing], [datasets, smoothing]);
const transformParams = useMemo(
() => [
datasets?.map(data => data?.map(row => [row[0], row[1], Number.isFinite(row[2]) ? row[2] : null]) ?? []) ??
[],
smoothing
],
[datasets, smoothing]
);
const {data: smoothedDatasetsOrUndefined} = useWebAssembly<Dataset[]>('scalar_transform', transformParams);
const smoothedDatasets = useMemo<NonNullable<typeof smoothedDatasetsOrUndefined>>(
() => smoothedDatasetsOrUndefined ?? [],
Expand Down
2 changes: 1 addition & 1 deletion frontend/packages/core/src/resource/scalar/chart.ts
Original file line number Diff line number Diff line change
Expand Up @@ -158,7 +158,7 @@ export const tooltip = (data: TooltipData[], stepLength: number, i18n: typeof I1
],
data: data.map(({min, max, item}) => [
valueFormatter(item[3] ?? Number.NaN),
valueFormatter(item[2] ?? Number.NaN),
valueFormatter(Number.isFinite(item[2]) ? (item[2] as number) : Number.NaN),
item[1],
valueFormatter(min ?? Number.NaN),
valueFormatter(max ?? Number.NaN),
Expand Down
56 changes: 33 additions & 23 deletions frontend/packages/core/src/resource/scalar/data.ts
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@

// cSpell:words quantile accum debias exponentiated

import type {Dataset, ScalarDataset} from './types';
import type {Dataset, ScalarDataset, Value} from './types';

import BigNumber from 'bignumber.js';
import type {Run} from '~/types';
Expand All @@ -34,34 +34,38 @@ export const transform = ({datasets, smoothing}: {datasets: ScalarDataset[]; smo
let startValue = 0;
const bigSmoothing = new BigNumber(smoothing);
data.forEach((d, i) => {
const nextVal = new BigNumber(d[2]);
const millisecond = (d[0] = Math.floor(d[0]));
if (i === 0) {
startValue = millisecond;
}
// relative time in millisecond.
d[4] = Math.floor(millisecond - startValue);
if (!nextVal.isFinite()) {
d[3] = nextVal.toNumber();
if (!Number.isFinite(d[2])) {
d[3] = null;
} else {
// last = last * smoothing + (1 - smoothing) * nextVal;
last = last.multipliedBy(bigSmoothing).plus(bigSmoothing.minus(1).negated().multipliedBy(nextVal));
numAccum++;
let debiasWeight = new BigNumber(1);
if (!bigSmoothing.isEqualTo(1)) {
//debiasWeight = 1.0 - Math.pow(smoothing, numAccum);
debiasWeight = bigSmoothing.exponentiatedBy(numAccum).minus(1).negated();
const nextVal = new BigNumber(d[2] as number);
if (!nextVal.isFinite()) {
d[3] = nextVal.toNumber();
} else {
// last = last * smoothing + (1 - smoothing) * nextVal;
last = last.multipliedBy(bigSmoothing).plus(bigSmoothing.minus(1).negated().multipliedBy(nextVal));
numAccum++;
let debiasWeight = new BigNumber(1);
if (!bigSmoothing.isEqualTo(1)) {
//debiasWeight = 1.0 - Math.pow(smoothing, numAccum);
debiasWeight = bigSmoothing.exponentiatedBy(numAccum).minus(1).negated();
}
// d[3] = last / debiasWeight;
d[3] = last.dividedBy(debiasWeight).toNumber();
}
// d[3] = last / debiasWeight;
d[3] = last.dividedBy(debiasWeight).toNumber();
}
});
return data;
});

export const singlePointRange = (value: number) => ({
min: value ? Math.min(value * 2, 0) : -0.5,
max: value ? Math.max(value * 2, 0) : 0.5
export const singlePointRange = (value: Value) => ({
min: Number.isFinite(value) ? Math.min((value as number) * 2, 0) : -0.5,
max: Number.isFinite(value) ? Math.max((value as number) * 2, 0) : 0.5
});

export const range = ({datasets}: {datasets: Dataset[]}) => {
Expand All @@ -72,7 +76,7 @@ export const range = ({datasets}: {datasets: Dataset[]}) => {
max: Number.NaN
};
}
const values = dataset.map(v => v[2]);
const values = dataset.map(v => v[2]).filter(Number.isFinite) as number[];
return {
min: Math.min(...values) ?? Number.NaN,
max: Math.max(...values) ?? Number.NaN
Expand All @@ -86,7 +90,7 @@ export const axisRange = ({datasets, outlier}: {datasets: Dataset[]; outlier: bo
if (dataset.length === 0) {
return void 0;
}
const values = dataset.map(v => v[2]);
const values = dataset.map(v => v[2]).filter(Number.isFinite) as number[];
if (!outlier) {
// Get the origin data range.
return {
Expand All @@ -95,7 +99,10 @@ export const axisRange = ({datasets, outlier}: {datasets: Dataset[]; outlier: bo
};
} else {
// Get the quantile range.
const sorted = dataset.map(v => v[2]).sort();
const sorted = dataset
.map(v => v[2])
.filter(Number.isFinite)
.sort() as number[];
return {
min: quantile(sorted, 0.05),
max: quantile(values, 0.95)
Expand All @@ -122,10 +129,13 @@ export const nearestPoint = (data: Dataset[], runs: Run[], idx: number, value: n
let d = Number.POSITIVE_INFINITY;
let dv = value;
for (let i = 0; i < series.length; i++) {
const dd = Math.abs(series[i][idx] - value);
if (d > dd) {
d = dd;
dv = series[i][idx];
const v = series[i][idx];
if (Number.isFinite(v)) {
const dd = Math.abs((v as number) - value);
if (d > dd) {
d = dd;
dv = v as number;
}
}
}
result.push(...series.filter(s => s[idx] === dv).map(item => ({run, item})));
Expand Down
3 changes: 2 additions & 1 deletion frontend/packages/core/src/resource/scalar/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,8 @@ export const sortingMethodMap: Record<SM, (points: TooltipData[], data: number[]
[SM.Descending]: (points: TooltipData[]) => sortBy(points, point => point.item[3]).reverse(),
[SM.Ascending]: (points: TooltipData[]) => sortBy(points, point => point.item[3]),
// Compare other points width the trigger point, calculate the nearest sort.
[SM.Nearest]: (points: TooltipData[], data: number[]) => sortBy(points, point => point.item[3] - data[2])
[SM.Nearest]: (points: TooltipData[], data: number[]) =>
sortBy(points, point => (point.item[3] ?? Number.NaN) - data[2])
} as const;

export type {Dataset, ScalarDataset, Range, TooltipData} from './types';
Expand Down
5 changes: 3 additions & 2 deletions frontend/packages/core/src/resource/scalar/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -18,10 +18,11 @@ import {Run, TimeMode} from '~/types';

export type {Range} from '~/types';

type Value = number;
type InvalidValue = 'NaN' | 'Inf' | '-Inf';
export type Value = number | null | InvalidValue;
type WallTime = number;
type Step = number;
type Smoothed = number;
type Smoothed = number | null;
type Relative = number;

export type Dataset = [WallTime, Step, Value, Smoothed, Relative][];
Expand Down
2 changes: 1 addition & 1 deletion frontend/packages/demo/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@
"devDependencies": {
"@types/express": "4.17.12",
"@types/mkdirp": "1.0.1",
"@types/node": "15.12.1",
"@types/node": "15.12.2",
"@types/node-fetch": "2.5.10",
"@types/rimraf": "3.0.0",
"cpy-cli": "3.1.1",
Expand Down
2 changes: 1 addition & 1 deletion frontend/packages/netron/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@
"sass-loader": "12.0.0",
"terser": "5.7.0",
"webpack": "5.38.1",
"webpack-cli": "4.7.0"
"webpack-cli": "4.7.2"
},
"engines": {
"node": ">=12",
Expand Down
4 changes: 2 additions & 2 deletions frontend/packages/server/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -41,12 +41,12 @@
"enhanced-resolve": "5.8.2",
"express": "4.17.1",
"http-proxy-middleware": "2.0.0",
"pm2": "4.5.6"
"pm2": "5.0.4"
},
"devDependencies": {
"@types/enhanced-resolve": "3.0.6",
"@types/express": "4.17.12",
"@types/node": "15.12.1",
"@types/node": "15.12.2",
"@visualdl/mock": "2.2.0-1",
"cross-env": "7.0.3",
"nodemon": "2.0.7",
Expand Down
1 change: 1 addition & 0 deletions frontend/packages/wasm/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@
"types": "dist/index.d.ts",
"scripts": {
"build": "wasm-pack build --release --out-dir dist --out-name index --target web .",
"dev": "wasm-pack build --dev --out-dir dist --out-name index --target web .",
"test": "echo \"Error: no test specified\" && exit 0"
},
"devDependencies": {
Expand Down
57 changes: 29 additions & 28 deletions frontend/packages/wasm/src/scalar.rs
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
#[derive(Serialize, Deserialize)]
pub struct Dataset(f64, i64, f64);
pub struct Dataset(f64, i64, Option<f64>);

#[derive(Serialize, Deserialize)]
pub struct Smoothed(i64, i64, f64, f64, i64);
pub struct Smoothed(i64, i64, Option<f64>, Option<f64>, i64);

#[derive(Serialize, Deserialize)]
pub struct Range {
Expand Down Expand Up @@ -33,6 +33,13 @@ fn quantile(values: &Vec<f64>, p: f64) -> f64 {
return value0 + (value1 - value0) * (i - (i0 as f64));
}

fn sort_values(data: &Vec<Smoothed>) -> (Vec<f64>, Vec<f64>) {
let values: Vec<f64> = data.iter().filter_map(|x| x.2).collect();
let mut sorted: Vec<f64> = values.clone();
sorted.sort_by(|a, b| a.partial_cmp(b).unwrap());
(sorted, values)
}

pub fn transform(datasets: &Vec<Vec<Dataset>>, smoothing: f64) -> Vec<Vec<Smoothed>> {
let mut result: Vec<Vec<Smoothed>> = vec![];
for dataset in datasets.iter() {
Expand All @@ -44,8 +51,7 @@ pub fn transform(datasets: &Vec<Vec<Dataset>>, smoothing: f64) -> Vec<Vec<Smooth
let mut num_accum: i32 = 0;
let mut start_value: i64 = 0;
for (i, d) in dataset.iter().enumerate() {
let mut r: Smoothed = Smoothed(0, d.1, d.2, 0.0, 0);
let next_val: f64 = d.2;
let mut r: Smoothed = Smoothed(0, d.1, d.2, Some(0.0), 0);
// second to millisecond.
let millisecond: i64 = d.0.floor() as i64;
r.0 = millisecond;
Expand All @@ -54,16 +60,20 @@ pub fn transform(datasets: &Vec<Vec<Dataset>>, smoothing: f64) -> Vec<Vec<Smooth
}
// Relative time in millisecond.
r.4 = millisecond - start_value;
if next_val.is_infinite() {
r.3 = next_val;
} else {
last = last * smoothing + (1.0 - smoothing) * next_val;
num_accum += 1;
let mut debias_weight: f64 = 1.0_f64;
if smoothing != 1.0 {
debias_weight = (1.0_f64 - smoothing.powi(num_accum)).into();
if let Some(next_val) = d.2 {
if next_val.is_infinite() || next_val.is_nan() {
r.3 = Some(next_val);
} else {
last = last * smoothing + (1.0 - smoothing) * next_val;
num_accum += 1;
let mut debias_weight: f64 = 1.0_f64;
if smoothing != 1.0 {
debias_weight = (1.0_f64 - smoothing.powi(num_accum)).into();
}
r.3 = Some(last / debias_weight);
}
r.3 = last / debias_weight;
} else {
r.3 = None;
}
row.push(r);
}
Expand All @@ -76,17 +86,12 @@ pub fn range(datasets: &Vec<Vec<Smoothed>>) -> Vec<Range> {
let mut ranges: Vec<Range> = vec![];

for data in datasets.iter() {
let n: usize = data.len();

if n == 0 {
if data.len() == 0 {
ranges.push(Range::new(f64::NAN, f64::NAN));
}

let values: Vec<f64> = data.iter().map(|x| x.2).collect();
let mut sorted: Vec<f64> = values.clone();
sorted.sort_by(|a, b| a.partial_cmp(b).unwrap());

ranges.push(Range::new(sorted[0], sorted[n - 1]));
let (sorted, _) = sort_values(data);
ranges.push(Range::new(sorted[0], sorted[sorted.len() - 1]));
}

return ranges;
Expand All @@ -96,18 +101,14 @@ pub fn axis_range(datasets: &Vec<Vec<Smoothed>>, outlier: bool) -> Range {
let mut ranges: Vec<Range> = vec![];

for data in datasets.iter() {
let n: usize = data.len();

if n == 0 {
if data.len() == 0 {
continue;
}

let values: Vec<f64> = data.iter().map(|x| x.2).collect();
let mut sorted: Vec<f64> = values.clone();
sorted.sort_by(|a, b| a.partial_cmp(b).unwrap());
let (sorted, values) = sort_values(data);

if !outlier {
ranges.push(Range::new(sorted[0], sorted[n - 1]));
ranges.push(Range::new(sorted[0], sorted[sorted.len() - 1]));
} else {
ranges.push(Range::new(
quantile(&sorted, 0.05_f64),
Expand Down
Loading