Skip to content

Commit

Permalink
Merge branch 'main' into combined-prs-branch
Browse files Browse the repository at this point in the history
  • Loading branch information
Razzmatazzz authored May 9, 2024
2 parents 9e17ce8 + 3d2b46e commit 9c21c37
Show file tree
Hide file tree
Showing 6 changed files with 205 additions and 53 deletions.
22 changes: 20 additions & 2 deletions custom-endpoints/nightbot.mjs
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import { v4 as uuidv4 } from 'uuid';

import cacheMachine from '../utils/cache-machine.mjs';
import graphqlUtil from '../utils/graphql-util.mjs';

const skipCache = false; //ENVIRONMENT !== 'production' || false;
Expand All @@ -25,6 +26,23 @@ export default async function (request, data) {
});
}

if (!skipCache) {
const cachedResponse = await cacheMachine.get(env, 'nightbot', { q: url.searchParams.get('q') });
if (cachedResponse) {
// Construct a new response with the cached data
const newResponse = new Response(cachedResponse);
// Add a custom 'X-CACHE: HIT' header so we know the request hit the cache
newResponse.headers.append('X-CACHE', 'HIT');
console.log(`Request served from cache: ${new Date() - requestStart} ms`);
// Return the new cached response
return newResponse;
} else {
console.log('no cached response');
}
} else {
//console.log(`Skipping cache in ${ENVIRONMENT} environment`);
}

const context = {
data,
util: graphqlUtil,
Expand Down Expand Up @@ -73,10 +91,10 @@ export default async function (request, data) {
delete data.requests[requestId];

// Update the cache with the results of the query
// don't update cache if result contained errors
const response = new Response(responseBody);
if (!skipCache && ttl > 0) {
response.headers.set('cache-ttl', String(ttl));
// using waitUntil doens't hold up returning a response but keeps the worker alive as long as needed
ctx.waitUntil(cacheMachine.put(env, 'nightbot', {q: url.searchParams.get('q')}, response, String(ttl)));
}

return response;
Expand Down
90 changes: 40 additions & 50 deletions index.mjs
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ import typeDefs from './schema.mjs';
import dynamicTypeDefs from './schema_dynamic.mjs';
import resolvers from './resolvers/index.mjs';
import graphqlUtil from './utils/graphql-util.mjs';
import cacheMachine from './utils/cache-machine.mjs';

import nightbot from './custom-endpoints/nightbot.mjs';
import twitch from './custom-endpoints/twitch.mjs';
Expand All @@ -21,9 +22,6 @@ let lastSchemaRefresh = 0;

const schemaRefreshInterval = 1000 * 60 * 10;

// If the environment is not production, skip using the caching service
const skipCache = true; //ENVIRONMENT !== 'production' || false;

// Example of how router can be used in an application
async function getSchema(data, context) {
if (schema && new Date() - lastSchemaRefresh < schemaRefreshInterval) {
Expand Down Expand Up @@ -80,19 +78,14 @@ async function getSchema(data, context) {
});
}

async function graphqlHandler(request, env, requestBody) {
async function graphqlHandler(request, env, ctx) {
const url = new URL(request.url);
let query = false;
let variables = false;

if (request.method === 'POST') {
try {
if (!requestBody) {
requestBody = await request.json();
}
if (typeof requestBody === 'string') {
requestBody = JSON.parse(requestBody);
}
const requestBody = await request.json();
query = requestBody.query;
variables = requestBody.variables;
} catch (jsonError) {
Expand Down Expand Up @@ -134,6 +127,28 @@ async function graphqlHandler(request, env, requestBody) {
//return new Response(JSON.stringify({}), responseOptions);
}

let specialCache = '';
const contentType = request.headers.get('content-type');
if (!contentType || !contentType.startsWith('application/json')) {
specialCache = 'application/json';
}

// Check the cache service for data first - If cached data exists, return it
if (env.SKIP_CACHE !== 'true') {
const cachedResponse = await cacheMachine.get(env, query, variables, specialCache);
if (cachedResponse) {
// Construct a new response with the cached data
const newResponse = new Response(cachedResponse, responseOptions);
// Add a custom 'X-CACHE: HIT' header so we know the request hit the cache
newResponse.headers.append('X-CACHE', 'HIT');
console.log('Request served from cache');
// Return the new cached response
return newResponse;
}
} else {
//console.log(`Skipping cache in ${ENVIRONMENT} environment`);
}

const context = { data: dataAPI, util: graphqlUtil, requestId, lang: {}, warnings: [], errors: [] };
let result = await graphql({schema: await getSchema(dataAPI, context), source: query, rootValue: {}, contextValue: context, variableValues: variables});
console.log('generated graphql response');
Expand All @@ -152,13 +167,21 @@ async function graphqlHandler(request, env, requestBody) {

let ttl = dataAPI.getRequestTtl(requestId);

if (specialCache === 'application/json') {
if (!result.warnings) {
result = Object.assign({warnings: []}, result);
}
ttl = 30 * 60;
result.warnings.push({message: `Your request does not have a "content-type" header set to "application/json". Requests missing this header are limited to resposnes that update every ${ttl/60} minutes.`});
}

const body = JSON.stringify(result);

const response = new Response(body, responseOptions)

// don't update cache if result contained errors
if (!skipCache && (!result.errors || result.errors.length === 0) && ttl > 0) {
response.headers.set('cache-ttl', String(ttl));
if (env.SKIP_CACHE !== 'true' && ttl > 0) {
// using waitUntil doesn't hold up returning a response but keeps the worker alive as long as needed
ctx.waitUntil(cacheMachine.put(env, query, variables, body, String(ttl), specialCache));
}

//console.log(`${requestId} kvs loaded: ${dataAPI.requests[requestId].kvLoaded.join(', ')}`);
Expand Down Expand Up @@ -192,17 +215,6 @@ const graphQLOptions = {
},
};

async function sha256(message) {
// encode as UTF-8
const msgBuffer = new TextEncoder().encode(message);
// hash the message
const hashBuffer = await crypto.subtle.digest('SHA-256', msgBuffer);
// convert bytes to hex string
return [...new Uint8Array(hashBuffer)]
.map((b) => b.toString(16).padStart(2, '0'))
.join('');
}

export default {
async fetch(request, env, ctx) {
if (!['GET', 'POST'].includes(request.method.toUpperCase())) {
Expand All @@ -214,21 +226,7 @@ export default {
const requestStart = new Date();
const url = new URL(request.url);

const cacheUrl = new URL(request.url);
let cacheKey = new Request(cacheUrl.toString().toLowerCase(), request);
const requestBody = await request.text();
if (request.method.toUpperCase() === 'POST') {
cacheUrl.pathname = '/posts' + cacheUrl.pathname + await sha256(requestBody);
cacheKey = new Request(cacheUrl.toString().toLowerCase(), {
headers: request.headers,
method: 'GET',
});
}
const cache = env.ENVIRONMENT === 'production' ? caches.default : await caches.open('dev:cache');
let response = await cache.match(cacheKey);
if (!skipCache && response) {
return response;
}
let response;

try {
if (url.pathname === '/twitch') {
Expand All @@ -239,7 +237,7 @@ export default {
}

if (url.pathname === graphQLOptions.playgroundEndpoint) {
return playground(request, graphQLOptions);
response = playground(request, graphQLOptions);
}

if (graphQLOptions.forwardUnmatchedRequestsToOrigin) {
Expand All @@ -258,7 +256,7 @@ export default {
}

if (url.pathname === graphQLOptions.baseEndpoint) {
response = await graphqlHandler(request, env, requestBody);
response = await graphqlHandler(request, env, ctx);
if (graphQLOptions.cors) {
setCors(response, graphQLOptions.cors);
}
Expand All @@ -267,18 +265,10 @@ export default {
if (!response) {
response = new Response('Not found', { status: 404 });
}
if (!skipCache && response.headers.has('cache-ttl')) {
const ttl = parseInt(response.headers.get('cache-ttl'));
response.headers.delete('cache-ttl');
if (ttl > 0) {
response.headers.set('Cache-Control', `s-maxage=${ttl}`);
//response.headers.delete('cache-ttl');
ctx.waitUntil(cache.put(cacheKey, response.clone()));
}
}
console.log(`Response time: ${new Date() - requestStart} ms`);
return response;
} catch (err) {
console.log(err);
return new Response(graphQLOptions.debug ? err : 'Something went wrong', { status: 500 });
}
},
Expand Down
3 changes: 3 additions & 0 deletions resolvers/traderResolver.mjs
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,9 @@ export default {
taskUnlock(data, args, context) {
if (data.vendor.taskUnlock) return context.data.task.get(context, data.vendor.taskUnlock);
return null;
},
buyLimit(data) {
return data.vendor.buyLimit;
}
},
TraderOffer: {
Expand Down
1 change: 1 addition & 0 deletions schema.mjs
Original file line number Diff line number Diff line change
Expand Up @@ -1294,6 +1294,7 @@ type TraderCashOffer {
priceRUB: Int
#updated: String
taskUnlock: Task
buyLimit: Int
}
enum TraderName {
Expand Down
140 changes: 140 additions & 0 deletions utils/cache-machine.mjs
Original file line number Diff line number Diff line change
@@ -0,0 +1,140 @@
// cache url
const cacheUrl = 'https://cache.tarkov.dev'

let cacheFailCount = 0;
let cachePaused = false;

function pauseCache() {
cacheFailCount++;
if (cacheFailCount <= 2) {
return;
}
cachePaused = true;
setTimeout(() => {
cachePaused = false;
cacheFailCount = 0;
}, 60000);
}

async function fetchWithTimeout(resource, options = {}) {
const { timeout = 1000 } = options;
return fetch(resource, {
...options,
signal: AbortSignal.timeout(timeout),
});
}

// Helper function to create a hash from a string
// :param string: string to hash
// :return: SHA-256 hash of string
async function hash(string) {
const utf8 = new TextEncoder().encode(string);
const hashBuffer = await crypto.subtle.digest('SHA-256', utf8);
const hashArray = Array.from(new Uint8Array(hashBuffer));
const hashHex = hashArray
.map((bytes) => bytes.toString(16).padStart(2, '0'))
.join('');

return hashHex;
}

// Updates the cache with the results of a query
// :param json: the incoming request in json
// :param body: the body to cache
// :return: true if successful, false if not
async function updateCache(env, query, variables, body, ttl = '', specialCache = '') {
try {
if (!env.CACHE_BASIC_AUTH) {
console.warn('env.CACHE_BASIC_AUTH is not set; skipping cache check');
return false;
}
if (cachePaused) {
console.warn('Cache paused; skipping cache update');
return false;
}
// Get the cacheKey from the request
query = query.trim();
console.log(`caching response for ${env.ENVIRONMENT} environment`);
const cacheKey = await hash(env.ENVIRONMENT + query + JSON.stringify(variables) + specialCache);

// headers and POST body
const headersPost = {
body: JSON.stringify({ key: cacheKey, value: body, ttl }),
method: 'POST',
headers: {
'content-type': 'application/json;charset=UTF-8',
'Authorization': `Basic ${env.CACHE_BASIC_AUTH}`
},
timeout: 10000,
};

// Update the cache
const response = await fetchWithTimeout(`${cacheUrl}/api/cache`, headersPost);

// Log non-200 responses
if (response.status !== 200) {
console.error(`failed to write to cache: ${response.status}`);
return false
}
cacheFailCount = 0;
return true
} catch (error) {
if (error.message === 'The operation was aborted due to timeout') {
console.warn('Updating cache timed out');
pauseCache();
return false;
}
console.error('updateCache error: ' + error.message);
return false;
}
}

// Checks the caching service to see if a request has been cached
// :param json: the json payload of the incoming worker request
// :return: json results of the item found in the cache or false if not found
async function checkCache(env, query, variables, specialCache = '') {
try {
if (!env.CACHE_BASIC_AUTH) {
console.warn('env.CACHE_BASIC_AUTH is not set; skipping cache check');
return false;
}
if (cachePaused) {
console.warn('Cache paused; skipping cache check');
return false;
}
query = query.trim();
const cacheKey = await hash(env.ENVIRONMENT + query + JSON.stringify(variables) + specialCache);
if (!cacheKey) {
console.warn('Skipping cache check; key is empty');
return false;
}

const response = await fetchWithTimeout(`${cacheUrl}/api/cache?key=${cacheKey}`, {
headers: {
'content-type': 'application/json;charset=UTF-8',
'Authorization': `Basic ${env.CACHE_BASIC_AUTH}`
},
});
cacheFailCount = 0;
if (response.status === 200) {
return await response.json();
} else if (response.status !== 404) {
console.error(`failed to read from cache: ${response.status}`);
}

return false
} catch (error) {
if (error.message === 'The operation was aborted due to timeout') {
console.warn('Checking cache timed out');
pauseCache();
return false;
}
console.error('checkCache error: ' + error.message);
return false;
}
}

export default {
get: checkCache,
put: updateCache
};
2 changes: 1 addition & 1 deletion wrangler.toml
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ vars = { ENVIRONMENT = "production" }
kv_namespaces = [
{ binding = "DATA_CACHE", id = "17fd725f04984e408d4a70b37c817171", preview_id = "17fd725f04984e408d4a70b37c817171" },
]
vars = { ENVIRONMENT = "development" }
vars = { ENVIRONMENT = "development", SKIP_CACHE = "false" }

# [secrets]
# CACHE_BASIC_AUTH
Expand Down

0 comments on commit 9c21c37

Please sign in to comment.