Skip to content

Commit

Permalink
Merge pull request #161 from HSLdevcom/development
Browse files Browse the repository at this point in the history
Development
  • Loading branch information
e-halinen authored Jun 6, 2024
2 parents eb0cf05 + 7cefb47 commit afeae19
Show file tree
Hide file tree
Showing 6 changed files with 106 additions and 20 deletions.
13 changes: 13 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,19 @@ Data importer for [jore-graphql](https://github.com/HSLdevcom/jore-graphql)

Read more about the [JORE import process](https://github.com/HSLdevcom/hsl-map-documentation/blob/master/Process%20schema/README.md#jore-import-process).

## Development

### Creating custom functions with PostGraphile
When creating a custom function (called "computed column") the naming scheme must adhere to following rules:
[PostGraphile documentation](https://www.graphile.org/postgraphile/computed-columns/)

In short:
- Must adhere to common PostGraphile function restrictions
- Name must begin with the name of the table it applies to, followed by an underscore (_)
- First parameter must be the table type
- Must NOT return VOID
- Must be marked as STABLE (or IMMUTABLE, though that tends to be less common)
- Must be defined in the same PostgreSQL schema as the table

## Prerequisites

Expand Down
1 change: 1 addition & 0 deletions src/constants.js
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,7 @@ export const SERVER_PORT = secretsEnv.SERVER_PORT || 3000;
export const ADMIN_PASSWORD = secretsEnv.ADMIN_PASSWORD || "password";
export const PATH_PREFIX = secretsEnv.PATH_PREFIX || "/";
export const SCHEMA = "jore";
export const DEFAULT_DATABASE = "postgres";
export const INTERMEDIATE_SCHEMA = "jore_new";
export const AZURE_UPLOAD_CONTAINER = secretsEnv.AZURE_UPLOAD_CONTAINER || "joredumps";
export const AZURE_STORAGE_ACCOUNT = secretsEnv.AZURE_STORAGE_ACCOUNT || "";
Expand Down
93 changes: 81 additions & 12 deletions src/setup/createFunctions.sql
Original file line number Diff line number Diff line change
Expand Up @@ -285,14 +285,14 @@ DO
$$
BEGIN
create type jore.terminus as (
line_id character varying(6),
stop_id character varying(6),
type character varying(6),
line_id text,
stop_id text,
type text,
lat numeric(9, 6),
lon numeric(9, 6),
stop_short_id character varying(6),
stop_area_id character varying(6),
terminal_id character varying(6),
stop_short_id text,
stop_area_id text,
terminal_id text,
point geometry
);
EXCEPTION
Expand Down Expand Up @@ -460,14 +460,14 @@ DO
$$
BEGIN
create type jore.terminus_grouped as (
lines character varying(6)[],
lines text[],
lat numeric(9, 6),
lon numeric(9, 6),
stop_area_id character varying(6),
terminal_id character varying(6),
type character varying(6),
name_fi character varying(40),
name_se character varying(40)
stop_area_id text,
terminal_id text,
type text,
name_fi text,
name_se text
);
EXCEPTION
WHEN duplicate_object THEN null;
Expand Down Expand Up @@ -1073,6 +1073,11 @@ $$
select * from jore.line where line_id = id AND date_begin = line_date_begin AND date_end = line_date_end;
$$ language sql stable;

create or replace function jore.get_lines_with_id_and_user_date_range(id text, line_date_begin date, line_date_end date) returns setof jore.line as
$$
select * from jore.line where line_id = id AND NOT (date_begin < line_date_begin AND date_end < line_date_begin) AND NOT (date_begin > line_date_end AND date_end > line_date_end);
$$ language sql stable;

create or replace function jore.get_stops_by_ids(stop_ids text[]) returns setof jore.stop as
$$
select * from jore.stop stop where stop.stop_id = any(stop_ids)
Expand All @@ -1085,6 +1090,70 @@ select exists(
);
$$ language sql stable;

create or replace function jore.line_routes_for_date_range(line jore.line, route_date_begin date, route_date_end date) returns setof jore.route as
$$
select * from jore.route route
WHERE route.line_id = line.line_id
AND NOT (route.date_begin < route_date_begin AND route.date_end < route_date_begin)
AND NOT (route.date_begin > route_date_end AND route.date_end > route_date_end);
$$ language sql stable;

DO
$$
BEGIN
create type jore.route_timed_stop_departure as (
stop_id character varying(7),
route_id character varying(6),
direction text,
day_type character varying(2)[],
departure_id integer,
hours integer,
minutes integer,
is_next_day boolean,
timing_stop_type integer
);
EXCEPTION
WHEN duplicate_object THEN null;
END
$$;

create or replace function jore.route_timed_stops_departures(route jore.route, user_date_begin date, user_date_end date) returns setof jore.route_timed_stop_departure as
$$
SELECT departure.stop_id,
departure.route_id,
departure.direction,
array_agg(departure.day_type),
departure.departure_id,
departure.hours,
departure.minutes,
departure.is_next_day,
segment.timing_stop_type
FROM jore.departure departure
JOIN jore.route_segment segment
ON segment.stop_id = departure.stop_id AND segment.route_id = departure.route_id AND segment.direction = departure.direction
AND NOT (segment.date_begin < user_date_begin AND segment.date_end < user_date_begin)
AND NOT (segment.date_begin > user_date_end AND segment.date_end > user_date_end)
AND NOT (segment.date_begin < route.date_begin AND segment.date_end < route.date_begin)
AND NOT (segment.date_begin > route.date_end AND segment.date_end > route.date_end)
AND ((segment.timing_stop_type = 1) OR (segment.timing_stop_type = 2) OR (segment.stop_index = 1))
WHERE departure.route_id = route.route_id
AND NOT (departure.date_begin < route.date_begin AND departure.date_end < route.date_begin)
AND NOT (departure.date_begin > route.date_end AND departure.date_end > route.date_end)
AND NOT (segment.date_begin < user_date_begin AND segment.date_end < user_date_begin)
AND NOT (segment.date_begin > user_date_end AND segment.date_end > user_date_end)
GROUP BY departure.stop_id, departure.departure_id, departure.day_type,
departure.route_id, departure.direction, departure.hours, departure.minutes, departure.is_next_day,
segment.timing_stop_type;
$$ language sql stable;

create or replace function jore.route_all_timed_stops(route jore.route) returns setof jore.route_segment as
$$
SELECT * FROM jore.route_segment segment WHERE segment.route_id = route.route_id
AND segment.direction = route.direction
AND NOT (segment.date_begin < route.date_begin AND segment.date_end < route.date_begin)
AND NOT (segment.date_begin > route.date_end AND segment.date_end > route.date_end)
AND ((segment.timing_stop_type = 1) OR (segment.timing_stop_type = 2) OR (segment.stop_index = 1));
$$ language sql stable;

-- jorestatic functions

Expand Down
2 changes: 1 addition & 1 deletion src/sources/fetchExportFromFTP.js
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ export async function fetchExportFromFTP() {

await client.cd(FTP_PATH);
const files = await client.list();

const zips = files.filter(({ name }) => name.endsWith(".zip"));
const newestFile = orderBy(zips, "name", "desc")[0];
const newestExportName = get(newestFile, "name", "");
Expand Down
10 changes: 5 additions & 5 deletions src/utils/createDbDump.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ import fs from "fs-extra";
import { format } from "date-fns";
import pgConnectionString from "pg-connection-string";

import { PG_CONNECTION_STRING } from "../constants.js";
import { PG_CONNECTION_STRING, DEFAULT_DATABASE } from "../constants.js";

const { parse } = pgConnectionString;

Expand All @@ -15,7 +15,7 @@ const dumpsDir = path.join(cwd, "dumps");

export const deleteFiles = ({ filesDir, minFileCount }) => {
fs.readdir(filesDir, (err, files) => {
if (files.length < minFileCount) return;
if (!files || files.length < minFileCount) return;
files.forEach((file) => {
const removableFile = path.join(filesDir, file);
fs.stat(removableFile, async (err, stat) => {
Expand Down Expand Up @@ -58,8 +58,8 @@ export const createDbDump = async () => {
resolve(filePath);
} else {
const pgConnection = parse(PG_CONNECTION_STRING);
console.log(`Dumping the ${pgConnection.database} database into ${filePath}`);

const database = pgConnection.database ? pgConnection.database : DEFAULT_DATABASE;
console.log(`Dumping the ${database} database into ${filePath}`);
const dumpProcess = childProcess.spawn(
"pg_dump",
[`-f ${filePath}`, "-Fc", "-N '*old'", "-N '*new'"],
Expand All @@ -71,7 +71,7 @@ export const createDbDump = async () => {
PGPASSWORD: pgConnection.password,
PGHOST: pgConnection.host,
PGPORT: pgConnection.port,
PGDATABASE: pgConnection.database,
PGDATABASE: database,
},
},
);
Expand Down
7 changes: 5 additions & 2 deletions src/utils/importDbDump.js
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ import {
AZURE_STORAGE_KEY,
AZURE_STORAGE_ACCOUNT,
AZURE_UPLOAD_CONTAINER,
DEFAULT_DATABASE
} from "../constants.js";
import { clearDb } from "../setup/clearDb.js";
import { deleteFiles } from "./createDbDump.js";
Expand Down Expand Up @@ -75,16 +76,18 @@ export const importDbDump = async () => {

if (fileExists) {
const pgConnection = parse(PG_CONNECTION_STRING);
const database = pgConnection.database ? pgConnection.database : DEFAULT_DATABASE;
console.log(`Restoring db with ${filePath}`);

const dumpProcess = childProcess.spawn(
"pg_restore",
[
"-c",
"--if-exists",
"--drop-cascade",
"--no-owner",
`-U ${pgConnection.user}`,
`-d ${pgConnection.database}`,
`-d ${database}`,
"--single-transaction",
`${filePath}`,
],
Expand All @@ -96,7 +99,7 @@ export const importDbDump = async () => {
PGPASSWORD: pgConnection.password,
PGHOST: pgConnection.host,
PGPORT: pgConnection.port,
PGDATABASE: pgConnection.database,
PGDATABASE: database,
},
},
);
Expand Down

0 comments on commit afeae19

Please sign in to comment.