Skip to content

Commit

Permalink
WIP
Browse files Browse the repository at this point in the history
  • Loading branch information
bprusinowski committed Nov 16, 2023
1 parent bb76f90 commit 81fe169
Showing 1 changed file with 67 additions and 3 deletions.
70 changes: 67 additions & 3 deletions app/graphql/resolvers/rdf.ts
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ import { truthy } from "@/domain/types";
import { Loaders } from "@/graphql/context";
import {
DataCubeComponentFilter,
DataCubeObservationFilter,
DataCubeResolvers,
DimensionResolvers,
QueryResolvers,
Expand Down Expand Up @@ -323,9 +324,15 @@ export const dataCubesMetadata: NonNullable<
export const dataCubesObservations: NonNullable<
QueryResolvers["dataCubesObservations"]
> = async (_, { locale, filters }, { setup }, info) => {
if (filters.length > 1 && filters.some((f) => f.joinBy === undefined)) {
throw new Error("Can't query multiple cubes observations without joinBy!");
}

const { loaders, sparqlClient, cache } = await setup(info);
// If the cube was updated, we need to also update the filter with the correct iri.
const filtersWithCorrectIri: DataCubeObservationFilter[] = [];

const data: Observation[] = [];
const dataByCubeIri: Record<string, Observation[]> = {};
const sparqlEditorUrls: DataCubesObservations["sparqlEditorUrls"] = [];

await Promise.all(
Expand All @@ -340,6 +347,11 @@ export const dataCubesObservations: NonNullable<
const cube = latest ? await getLatestCube(rawCube) : rawCube;
// TODO: optimize to avoid fetching the shape at all
await cube.fetchShape();
filtersWithCorrectIri.push({
...filter,
iri: cube.term?.value!,
});

const { query, observations } = await getCubeObservations({
cube,
locale,
Expand All @@ -348,7 +360,8 @@ export const dataCubesObservations: NonNullable<
componentIris,
cache,
});
data.push(...observations);

dataByCubeIri[cube.term?.value!] = observations;
sparqlEditorUrls.push({
cubeIri: cube.term?.value!,
url: getSparqlEditorUrl({
Expand All @@ -362,8 +375,59 @@ export const dataCubesObservations: NonNullable<
})
);

const joinBys = filtersWithCorrectIri.reduce((acc, f) => {
acc[f.iri] = f.joinBy!;

return acc;
}, {} as Record<string, string>);

// FIXME: handle situation where we have column of the same name in multiple cubes
const observations: Observation[] = [];
const allKeys = Object.entries(dataByCubeIri).flatMap(([_, obs]) => {
return Object.keys(obs[0]);
});

Object.entries(dataByCubeIri).forEach(([cubeIri, obs], i) => {
const rest = Object.entries(dataByCubeIri)
.filter((_, j) => j !== i)
.map((c) =>
c[1].map((d) => ({ ...d, cubeIri: c[0] } as Record<string, any>))
);

[...obs].forEach((ob) => {
for (const r of rest.length === 0 ? [[]] : [...rest]) {
const base = allKeys.reduce((acc, d) => {
acc[d] = null;

return acc;
}, {} as Record<string, any>);

const toMergeId = r.findIndex(
(o) => o[joinBys[o.cubeIri]] === ob[joinBys[cubeIri]]
);

if (toMergeId !== -1) {
const { cubeIri: otherCubeIri, ...other } = r[toMergeId];
const toPush = Object.assign(base, ob, other, {
joinBy: ob[joinBys[cubeIri]] ?? other[joinBys[otherCubeIri]],
});
rest.splice(toMergeId, 1);
observations.push(toPush);
} else {
observations.push(
Object.assign(base, ob, { joinBy: ob[joinBys[cubeIri]] })
);
}
}
obs.splice(obs.indexOf(ob), 1);
});
});

console.log(observations);

return {
data,
// data: Object.values(dataByCubeIri).flat(),
data: observations,
sparqlEditorUrls,
};
};
Expand Down

0 comments on commit 81fe169

Please sign in to comment.