Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions src/components/plots/AnalysisWG.tsx
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
"use client";
import { ArrayMinMax, GetCurrentArray, GetCurrentArrayWorkers } from '@/utils/HelperFuncs';
import { ArrayMinMax, GetCurrentArray } from '@/utils/HelperFuncs';
import * as THREE from 'three';
import React, { useEffect, useRef } from 'react';
import { DataReduction, Convolve, Multivariate2D, Multivariate3D, CUMSUM3D, Convolve2D, CustomShader } from '../computation/webGPU';
Expand Down Expand Up @@ -98,7 +98,7 @@ const AnalysisWG = ({ setTexture, }: { setTexture: React.Dispatch<React.SetState
}

// --- 2. Dispatch GPU computation based on the operation ---
const inputArray = analysisMode ? analysisArray : await GetCurrentArrayWorkers(analysisStore)
const inputArray = analysisMode ? analysisArray : await GetCurrentArray(analysisStore)
const shapeInfo = { shape: dataShape, strides};
const kernelParams = { kernelDepth, kernelSize };
// [1538316, 1481, 1]
Expand Down Expand Up @@ -204,7 +204,7 @@ const AnalysisWG = ({ setTexture, }: { setTexture: React.Dispatch<React.SetState

const is2D = outputShape.length === 2
async function Analyze(){
const dataArray = analysisMode ? analysisArray : await GetCurrentArrayWorkers(analysisStore)
const dataArray = analysisMode ? analysisArray : GetCurrentArray(analysisStore)
const newArray = await CustomShader(dataArray, shapeInfo, kernelParams, axis, customShader?? "") as Float16Array
const {minVal, maxVal} = valueScales
const textureData = new Uint8Array(newArray.length)
Expand Down
89 changes: 39 additions & 50 deletions src/components/plots/FlatMap.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ import { useZarrStore } from '@/GlobalStates/ZarrStore';
import { vertShader } from '@/components/computation/shaders'
import { useShallow } from 'zustand/shallow'
import { ThreeEvent } from '@react-three/fiber';
import { coarsenFlatArray, GetCurrentArray, GetCurrentArrayWorkers, GetTimeSeries, parseUVCoords, deg2rad } from '@/utils/HelperFuncs';
import { coarsenFlatArray, GetCurrentArray, GetTimeSeries, parseUVCoords, deg2rad } from '@/utils/HelperFuncs';
import { evaluate_cmap } from 'js-colormaps-es';
import { useCoordBounds } from '@/hooks/useCoordBounds';
import { GetFrag } from '../textures';
Expand Down Expand Up @@ -90,19 +90,8 @@ const FlatMap = ({textures, infoSetters} : {textures : THREE.DataTexture[] | THR
const infoRef = useRef<boolean>(false)
const lastUV = useRef<THREE.Vector2>(new THREE.Vector2(0,0))
const rotateMap = analysisMode && axis == 2;
// const sampleArray = useMemo(()=> analysisMode ? analysisArray : GetCurrentArray(),[analysisMode, analysisArray, textures])
const sampleArray = useMemo(()=> analysisMode ? analysisArray : GetCurrentArray(),[analysisMode, analysisArray, textures])
const analysisDims = useMemo(()=>dimArrays.length > 2 ? dimSlices.filter((_e,idx)=> idx != axis) : dimSlices,[dimSlices,axis])
const [sampleArray, setSampleArray] = useState<any | undefined>(undefined) // Moved this to a state as async functions cannot be used in useMemo
useEffect(()=>{
if (analysisMode){
setSampleArray(analysisArray)
return
}
else {
GetCurrentArrayWorkers().then(e=> setSampleArray(e))
return
}
},[analysisMode, analysisArray, textures])

const {lonBounds, latBounds} = useCoordBounds()

Expand Down Expand Up @@ -133,44 +122,44 @@ const FlatMap = ({textures, infoSetters} : {textures : THREE.DataTexture[] | THR


// ----- TIMESERIES ----- //
async function HandleTimeSeries(event: THREE.Intersection){
const uv = event.uv;
const normal = new THREE.Vector3(0,0,1)
if(uv){
const tsUV = flipY ? new THREE.Vector2(uv.x, 1-uv.y) : uv
const tempTS = GetTimeSeries({data:analysisMode ? analysisArray : await GetCurrentArrayWorkers(), shape:dataShape, stride:strides},{uv:tsUV,normal})
setPlotDim(0) //I think this 2 is only if there are 3-dims. Need to rework the logic

const coordUV = parseUVCoords({normal:normal,uv:uv})
let dimCoords = coordUV.map((val,idx)=>val ? dimSlices[idx][Math.round(val*dimSlices[idx].length)] : null)
const thisDimNames = dimNames.filter((_,idx)=> dimCoords[idx] !== null)
const thisDimUnits = dimUnits.filter((_,idx)=> dimCoords[idx] !== null)
dimCoords = dimCoords.filter(val => val !== null)
const tsID = `${dimCoords[0]}_${dimCoords[1]}`
const tsObj = {
color:evaluate_cmap(getColorIdx()/10,"Paired"),
data:tempTS
}
incrementColorIdx();
updateTimeSeries({ [tsID] : tsObj})
const dimObj = {
first:{
name:thisDimNames[0],
loc:dimCoords[0] ?? 0,
units:thisDimUnits[0]
},
second:{
name:thisDimNames[1],
loc:dimCoords[1] ?? 0,
units:thisDimUnits[1]
},
plot:{
units:dimUnits[0]
function HandleTimeSeries(event: THREE.Intersection){
const uv = event.uv;
const normal = new THREE.Vector3(0,0,1)
if(uv){
const tsUV = flipY ? new THREE.Vector2(uv.x, 1-uv.y) : uv
const tempTS = GetTimeSeries({data:analysisMode ? analysisArray : GetCurrentArray(), shape:dataShape, stride:strides},{uv:tsUV,normal})
setPlotDim(0) //I think this 2 is only if there are 3-dims. Need to rework the logic

const coordUV = parseUVCoords({normal:normal,uv:uv})
let dimCoords = coordUV.map((val,idx)=>val ? dimSlices[idx][Math.round(val*dimSlices[idx].length)] : null)
const thisDimNames = dimNames.filter((_,idx)=> dimCoords[idx] !== null)
const thisDimUnits = dimUnits.filter((_,idx)=> dimCoords[idx] !== null)
dimCoords = dimCoords.filter(val => val !== null)
const tsID = `${dimCoords[0]}_${dimCoords[1]}`
const tsObj = {
color:evaluate_cmap(getColorIdx()/10,"Paired"),
data:tempTS
}
incrementColorIdx();
updateTimeSeries({ [tsID] : tsObj})
const dimObj = {
first:{
name:thisDimNames[0],
loc:dimCoords[0] ?? 0,
units:thisDimUnits[0]
},
second:{
name:thisDimNames[1],
loc:dimCoords[1] ?? 0,
units:thisDimUnits[1]
},
plot:{
units:dimUnits[0]
}
}
updateDimCoords({[tsID] : dimObj})
}
}
Comment on lines +126 to 162
Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

medium

The indentation of the HandleTimeSeries function body is inconsistent with the rest of the file, using an excessive 8-space offset. This should be corrected to match the project's standard indentation (likely 2 spaces relative to the function declaration) to maintain readability.

      const uv = event.uv;
      const normal = new THREE.Vector3(0, 0, 1);
      if (uv) {
        const tsUV = flipY ? new THREE.Vector2(uv.x, 1 - uv.y) : uv;
        const tempTS = GetTimeSeries({ data: analysisMode ? analysisArray : GetCurrentArray(), shape: dataShape, stride: strides }, { uv: tsUV, normal });
        setPlotDim(0);

        const coordUV = parseUVCoords({ normal: normal, uv: uv });
        let dimCoords = coordUV.map((val, idx) => val ? dimSlices[idx][Math.round(val * dimSlices[idx].length)] : null);
        const thisDimNames = dimNames.filter((_, idx) => dimCoords[idx] !== null);
        const thisDimUnits = dimUnits.filter((_, idx) => dimCoords[idx] !== null);
        dimCoords = dimCoords.filter(val => val !== null);
        const tsID = `${dimCoords[0]}_${dimCoords[1]}`;
        const tsObj = {
          color: evaluate_cmap(getColorIdx() / 10, "Paired"),
          data: tempTS
        };
        incrementColorIdx();
        updateTimeSeries({ [tsID]: tsObj });
        const dimObj = {
          first: {
            name: thisDimNames[0],
            loc: dimCoords[0] ?? 0,
            units: thisDimUnits[0]
          },
          second: {
            name: thisDimNames[1],
            loc: dimCoords[1] ?? 0,
            units: thisDimUnits[1]
          },
          plot: {
            units: dimUnits[0]
          }
        };
        updateDimCoords({ [tsID]: dimObj });
      }
    }

}
updateDimCoords({[tsID] : dimObj})
}
}
// ----- SHADER MATERIAL ----- //
const shaderMaterial = useMemo(()=>new THREE.ShaderMaterial({
glslVersion: THREE.GLSL3,
Expand Down
3 changes: 1 addition & 2 deletions src/components/textures/TextureMakers.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -14,9 +14,8 @@ function StoreData(array: Array, valueScales?: {maxVal: number, minVal: number})
const [minVal,maxVal] = valueScales ? [valueScales.minVal, valueScales.maxVal] : ArrayMinMax(data )
const textureData = new Uint8Array(data.length)
const range = (maxVal - minVal)
const multiplier = 1/range
for (let i = 0; i < data.length; i++){
const normed = (data[i] - minVal) * multiplier;
const normed = (data[i] - minVal) / range;
Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

medium

This change introduces a performance regression by moving the division operation inside the loop. In the previous version, a reciprocal multiplier (1 / range) was calculated once outside the loop, allowing for more efficient multiplication within the loop. For large data arrays, this can have a noticeable impact on performance.

if (isNaN(normed)){
textureData[i] = 255;
} else {
Expand Down
29 changes: 0 additions & 29 deletions src/components/workers/chunkWorker.ts

This file was deleted.

3 changes: 1 addition & 2 deletions src/components/zarr/GetArray.ts
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,7 @@ export async function GetArray(varOveride?: string) {
const { idx4D, initStore, variable, setProgress, setStrides, setStatus } = useGlobalStore.getState();
const { compress, xSlice, ySlice, zSlice, coarsen, kernelSize, kernelDepth, fetchNC, setCurrentChunks, setArraySize } = useZarrStore.getState();
const { cache } = useCacheStore.getState();
const useNC = initStore.startsWith("local") && fetchNC // In case a user has NetCDF switched but then goes to a remote
const fetcher = useNC ? NCFetcher() : zarrFetcher()
const fetcher = fetchNC ? NCFetcher() : zarrFetcher()
Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

medium

The removal of the initStore.startsWith("local") check is a logic regression. This condition ensured that NCFetcher was only used for local stores, providing a fallback to zarrFetcher for remote URLs even if the NetCDF toggle was active. Reverting this may cause errors when users attempt to access remote data while the NetCDF setting is enabled.

Suggested change
const fetcher = fetchNC ? NCFetcher() : zarrFetcher()
const useNC = initStore.startsWith("local") && fetchNC;
const fetcher = useNC ? NCFetcher() : zarrFetcher();

const targetVariable = varOveride ?? variable;
const meta = await fetcher.getMetadata(targetVariable);
const { shape, chunkShape, fillValue, dtype } = meta;
Expand Down
4 changes: 1 addition & 3 deletions src/components/zarr/utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -59,9 +59,7 @@ export function ToFloat16(array : Float32Array, scalingFactor: number | null) :
initialScale
denominator = Math.pow(10,newScalingFactor);
multiplier = 1/denominator;
// Need SharedArrayBuffer here or else chunks passed to workers will become detached on the main thread.
const buffer = new SharedArrayBuffer(array.length * 2) // 2 for float16
const newArray = new Float16Array(buffer)
const newArray = new Float16Array(array.length)
for (let i = 0; i < array.length; i++) {
newArray[i] = array[i] * multiplier;
}
Expand Down
95 changes: 0 additions & 95 deletions src/utils/HelperFuncs.ts
Original file line number Diff line number Diff line change
Expand Up @@ -301,101 +301,6 @@ export function GetCurrentArray(overrideStore?:string){
}
}

export async function GetCurrentArrayWorkers(overrideStore?:string){
const { variable, is4D, idx4D, initStore, strides, dataShape, setStatus }= useGlobalStore.getState()
const { arraySize, currentChunks } = useZarrStore.getState()
const {cache} = useCacheStore.getState();
const store = overrideStore ? overrideStore : initStore

if (cache.has(is4D ? `${store}_${idx4D}_${variable}` : `${store}_${variable}`)){ // Non-chunked data
const chunk = cache.get(is4D ? `${store}_${idx4D}_${variable}` : `${store}_${variable}`)
const compressed = chunk.compressed
setStatus(compressed ? "Decompressing data..." : null)
const thisData = compressed ? DecompressArray(chunk.data) : chunk.data
setStatus(null)
return thisData
}

const sharedBuffer = new SharedArrayBuffer(arraySize * Float16Array.BYTES_PER_ELEMENT)
const hasZ = dataShape.length > 2;
const [xStartIdx, xEndIdx] = currentChunks.x
const [yStartIdx, yEndIdx] = currentChunks.y
const [zStartIdx, zEndIdx] = currentChunks.z

type Task = {
chunkData: Float16Array | Uint8Array
chunkShape: number[]
chunkStride: number[]
chunkCoord: [number, number, number]
compressed: boolean
}
const tasks: Task[] = []

for (let z = zStartIdx; z < zEndIdx; z++) {
for (let y = yStartIdx; y < yEndIdx; y++) {
for (let x = xStartIdx; x < xEndIdx; x++) {
const chunkID = `z${z}_y${y}_x${x}`
const cacheName = is4D ? `${store}_${variable}_${idx4D}_chunk_${chunkID}` : `${store}_${variable}_chunk_${chunkID}`
const chunk = cache.get(cacheName);
const compressed = chunk.compressed;
tasks.push({
chunkData: chunk.data,
chunkShape: chunk.shape,
chunkStride: chunk.stride,
chunkCoord: [z, y, x],
compressed,
})
}
}
}
const POOL_COUNT = navigator.hardwareConcurrency || 4;
const workers = Array.from({length: Math.min(POOL_COUNT, tasks.length)}, () =>
new Worker(new URL('../components/workers/chunkWorker.ts', import.meta.url), { type: 'module' })
)

const terminateAll = () => workers.forEach(w => w.terminate())

let taskIndex = 0
let completed = 0

await new Promise<void>((resolve, reject) => {
const dispatch = (worker: Worker) => {
if (taskIndex >= tasks.length) return // this worker is done
const task = tasks[taskIndex++] // Next isntruction set
worker.onmessage = () => {
completed++
if (completed === tasks.length) {
terminateAll()
resolve()
} else { // More tasks left. Send worker new isntructions
dispatch(worker)
}
}
worker.onerror = (e) => {
terminateAll()
reject(e)
}
worker.postMessage({
sharedBuffer,
chunkData: task.chunkData,
chunkShape: task.chunkShape,
chunkStride: task.chunkStride,
dataShape,
strides,
hasZ,
compressed:task.compressed,
chunkCoord: task.chunkCoord,
startCoords: [zStartIdx, yStartIdx, xStartIdx],
})
}
workers.forEach(dispatch) // Distribute first tasks
}).catch((e) => {
if (e.message === 'cancelled') return
throw e
})
setStatus(null)
return new Float16Array(sharedBuffer)
}

export function TwoDecimals(val: number){
return Math.round(val * 100)/100
Expand Down
Loading