Skip to content

Commit

Permalink
Merge pull request #12 from redpencilio/feature/sparql-header-support
Browse files Browse the repository at this point in the history
Add initial support for custom SPARQL headers
  • Loading branch information
elpoelma authored Oct 4, 2022
2 parents e6cbef4 + 4ddfd50 commit 3c6b7f7
Show file tree
Hide file tree
Showing 5 changed files with 24 additions and 19 deletions.
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -29,3 +29,4 @@ The service can be configured with the following environment variables:
- `CRON_PATTERN` [string]: the cron pattern which the cronjob should use. (default: `* 0 * * * *`)

- `LDES_ENDPOINT_HEADER_<key>` [string]: A header key-value combination which should be send as part of the headers to the LDES ENDPOINT. E.g. `LDES_ENDPOINT_HEADER_X-API-KEY: <api_key>`.
- `SPARQL_ENDPOINT_HEADER_<key>` [string]: A header key-value combination which should be send as part of the headers to the SPARQL ENDPOINT.
11 changes: 8 additions & 3 deletions app.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,14 @@ import {
import { DataFactory } from "n3";
import * as RDF from "rdf-js";
import Consumer, { Member } from "ldes-consumer";
import { convertBlankNodes, extractBaseResourceUri, extractLDESEndpointHeadersFromEnv } from "./utils";
import { convertBlankNodes, extractBaseResourceUri, extractEndpointHeadersFromEnv } from "./utils";
import { CronJob } from "cron";
import { CRON_PATTERN, LDES_ENDPOINT_VIEW, REPLACE_VERSIONS } from "./config";
import {
CRON_PATTERN,
LDES_ENDPOINT_HEADER_PREFIX,
LDES_ENDPOINT_VIEW,
REPLACE_VERSIONS,
} from "./config";
const { quad, variable } = DataFactory;

async function processMember(member: Member) {
Expand Down Expand Up @@ -42,7 +47,7 @@ const consumerJob = new CronJob(CRON_PATTERN, async () => {
const consumer = new Consumer({
endpoint,
initialState,
requestHeaders: extractLDESEndpointHeadersFromEnv()
requestHeaders: extractEndpointHeadersFromEnv(LDES_ENDPOINT_HEADER_PREFIX)
});
consumer.listen(
async (member) => {
Expand Down
1 change: 1 addition & 0 deletions config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,3 +5,4 @@ export const LDES_RELATION_PATH = "http://www.w3.org/ns/prov#generatedAtTime";
export const MU_APPLICATION_GRAPH = process.env.MU_APPLICATION_GRAPH;
export const CRON_PATTERN = process.env.CRON_PATTERN || "0 * * * * *";
export const LDES_ENDPOINT_HEADER_PREFIX = 'LDES_ENDPOINT_HEADER_'
export const SPARQL_ENDPOINT_HEADER_PREFIX = 'SPARQL_ENDPOINT_HEADER_'
22 changes: 9 additions & 13 deletions sparql-queries.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import * as RDF from "rdf-js";
import { fromDate, toString } from "./utils";
import { extractEndpointHeadersFromEnv, fromDate, toString } from "./utils";
import { querySudo as query, updateSudo as update } from "@lblod/mu-auth-sudo";
import { DataFactory } from "n3";
import { LDES, PROV, PURL, TREE } from "./namespaces";
Expand All @@ -9,6 +9,8 @@ const { quad, namedNode, variable, literal } = DataFactory;

const stream = namedNode(LDES_STREAM);

const SPARQL_ENDPOINT_HEADERS = extractEndpointHeadersFromEnv(SPARQL_ENDPOINT_HEADER_PREFIX);

function constructTriplesString(quads: RDF.Quad[]) {
let triplesString = quads.map(toString).join("\n");
return triplesString;
Expand Down Expand Up @@ -61,7 +63,7 @@ export function constructSelectQuery(
export async function executeInsertQuery(quads: RDF.Quad[]) {
let queryStr = constructInsertQuery(quads);
try {
await update(queryStr);
await update(queryStr, SPARQL_ENDPOINT_HEADERS);
} catch (e) {
console.error(e);
}
Expand All @@ -70,7 +72,7 @@ export async function executeInsertQuery(quads: RDF.Quad[]) {
export async function executeDeleteQuery(quads: RDF.Quad[]) {
let queryStr = constructDeleteQuery(quads);
try {
await update(queryStr);
await update(queryStr, SPARQL_ENDPOINT_HEADERS);
} catch (e) {
console.error(e);
}
Expand All @@ -80,14 +82,8 @@ export async function executeDeleteInsertQuery(
quadsToDelete: RDF.Quad[],
quadsToInsert: RDF.Quad[]
) {
let deleteQuery = constructDeleteQuery(quadsToDelete);
let insertQuery = constructInsertQuery(quadsToInsert);
try {
await update(deleteQuery);
await update(insertQuery);
} catch (e) {
console.error(e);
}
await executeDeleteQuery(quadsToDelete);
await executeInsertQuery(quadsToInsert);
}

export async function fetchState(): Promise<State | undefined> {
Expand All @@ -97,7 +93,7 @@ export async function fetchState(): Promise<State | undefined> {
let variables = [variable("state")];
const sparql_query = constructSelectQuery(variables, quads);
try {
const response = await query(sparql_query);
const response = await query(sparql_query, SPARQL_ENDPOINT_HEADERS);
const stateString = extractVariableFromResponse(response, "state")?.shift();
if (stateString) {
return JSON.parse(stateString);
Expand Down Expand Up @@ -125,7 +121,7 @@ export async function getVersion(resource: RDF.NamedNode) {
const sparql_query = constructSelectQuery(variables, quads);

try {
const response = await query(sparql_query);
const response = await query(sparql_query, SPARQL_ENDPOINT_HEADERS);
const versionUris = extractVariableFromResponse(response, "v");
if (versionUris) {
return namedNode(versionUris[0]);
Expand Down
8 changes: 5 additions & 3 deletions utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -65,17 +65,19 @@ export function extractBaseResourceUri(
return;
}

export function extractLDESEndpointHeadersFromEnv() {
export function extractEndpointHeadersFromEnv(prefix: string) {
const headers: {
[key: string]: number | string | string[];
} = {};
for (const [key, value] of Object.entries(process.env)) {
if (key.startsWith(LDES_ENDPOINT_HEADER_PREFIX)) {
const headerKey = key.split(LDES_ENDPOINT_HEADER_PREFIX).pop();
if (key.startsWith(prefix)) {
const headerKey = key.split(prefix).pop();
if (headerKey && value) {
headers[headerKey.toLowerCase()] = value;
}
}
}
return headers;
}


0 comments on commit 3c6b7f7

Please sign in to comment.