Skip to content

Commit

Permalink
refactor: Improve cache types
Browse files Browse the repository at this point in the history
  • Loading branch information
bprusinowski committed Feb 28, 2024
1 parent be30345 commit 9001565
Show file tree
Hide file tree
Showing 3 changed files with 15 additions and 17 deletions.
4 changes: 2 additions & 2 deletions app/domain/data.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { Literal, NamedNode, Term } from "rdf-js";
import { Literal, Term } from "rdf-js";

import { ComponentType } from "@/config-types";
import {
Expand All @@ -11,7 +11,7 @@ import {
} from "@/graphql/resolver-types";
import { ResolvedDimension } from "@/graphql/shared-types";

export type RawObservationValue = Literal | NamedNode;
export type RawObservationValue = Term;

export type RawObservation = Record<string, RawObservationValue>;

Expand Down
10 changes: 5 additions & 5 deletions app/rdf/query-cache.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,22 +4,22 @@ import { LRUCache } from "typescript-lru-cache";

type SparqlClient = StreamClient | ParsingClient;

export const executeWithCache = async <T>(
export const executeWithCache = async <Executed, Parsed>(
sparqlClient: SparqlClient,
query: string,
execute: () => Promise<any>,
parse: (v: any) => T,
execute: () => Promise<Executed>,
parse: (result: Executed) => Parsed,
cache: LRUCache | undefined
) => {
const key = `${sparqlClient.query.endpoint.endpointUrl} - ${query}`;
const cached = cache?.get(key);

if (cached) {
return cached as T;
return cached as Parsed;
}

const result = await execute();
const parsed = parse(result) as T;
const parsed = parse(result) as Parsed;

if (cache) {
cache.set(key, parsed);
Expand Down
18 changes: 8 additions & 10 deletions app/rdf/query-dimension-values.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@ import keyBy from "lodash/keyBy";
import mapValues from "lodash/mapValues";
import sortBy from "lodash/sortBy";
import { CubeDimension } from "rdf-cube-view-query";
import LiteralExt from "rdf-ext/lib/Literal";
import { NamedNode, Quad, Term } from "rdf-js";
import { ParsingClient } from "sparql-http-client/ParsingClient";
import { ResultRow } from "sparql-http-client/ResultParser";
Expand Down Expand Up @@ -249,14 +248,12 @@ ${getQueryFilters(filterList, cube, dimensionIri)}`
.THEN.BY(RDF.variable("position"), true)
.LIMIT(1).prologue`${pragmas}`.build();

console.log(query);

try {
return await executeWithCache(
sparqlClient,
query,
() => sparqlClient.query.select(query, { operation: "postUrlencoded" }),
(result: ResultRow[]) => result.map((d) => d.value.value),
(result) => result.map((d) => d.value.value),
cache
);
} catch {
Expand Down Expand Up @@ -344,9 +341,7 @@ ${formatFilterIntoSparqlFilter(value, dimension, versioned, i + j)}`;
`;
};

type MinMaxResult = [{ minValue: LiteralExt; maxValue: LiteralExt }];

const parseMinMax = (result: MinMaxResult) => {
const parseMinMax = (result: ResultRow[]) => {
const { minValue, maxValue } = result[0];
const min = parseObservationValue({ value: minValue }) ?? 0;
const max = parseObservationValue({ value: maxValue }) ?? 0;
Expand All @@ -368,13 +363,16 @@ export const loadMinMaxDimensionValues = async ({
const query = SELECT`(MIN(?value) as ?minValue) (MAX(?value) as ?maxValue)`
.WHERE`<${datasetIri}> ${ns.cube.observationSet}/${ns.cube.observation} ?observation .
?observation <${dimensionIri}> ?value .
FILTER ( (STRLEN(STR(?value)) > 0) && (STR(?value) != "NaN") )`.build();
FILTER ( (STRLEN(STR(?value)) > 0) && (STR(?value) != "NaN") )`;

try {
return await executeWithCache(
sparqlClient,
query,
() => sparqlClient.query.select(query, { operation: "postUrlencoded" }),
query.build(),
() =>
query.execute(sparqlClient.query, {
operation: "postUrlencoded",
}),
parseMinMax,
cache
);
Expand Down

0 comments on commit 9001565

Please sign in to comment.