-
Notifications
You must be signed in to change notification settings - Fork 3
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Feature/glblagctwo 1147 #1
Changes from all commits
30ea05f
f00cf3b
ecab7c0
d764122
b74d8d1
140790a
7a58f3d
cd7ca27
8db5220
3483b59
c097b7d
130497f
a3d839d
2357310
de24103
e224e06
b1127e7
0ecdd36
5acf00e
514a794
42630af
6438384
cad0349
38832ed
dce701c
d2c0a24
b502c4a
aa09deb
a364e6b
2383fad
6c1a2d3
2e0c271
9276d90
a98d717
848ed57
e938f9b
27da43f
b8dc6c2
b0c352f
380c470
d83ec35
359edac
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,3 @@ | ||
*.iml | ||
.idea | ||
node_modules |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,41 @@ | ||
{ | ||
"version": "0.1.0", | ||
// List of configurations. Add new configurations or edit existing ones. | ||
"configurations": [ | ||
{ | ||
// Name of configuration; appears in the launch configuration drop down menu. | ||
"name": "Launch Mocha", | ||
// Type of configuration. | ||
"type": "node", | ||
// Workspace relative or absolute path to the program. | ||
"program": "node_modules/mocha/bin/_mocha", | ||
// Automatically stop program after launch. | ||
"stopOnEntry": false, | ||
// Command line arguments passed to the program. | ||
"args": ["--debug-brk", "--timeout", "10000"], | ||
// Workspace relative or absolute path to the working directory of the program being debugged. Default is the current workspace. | ||
"cwd": ".", | ||
// Workspace relative or absolute path to the runtime executable to be used. Default is the runtime executable on the PATH. | ||
"runtimeExecutable": null, | ||
// Optional arguments passed to the runtime executable. | ||
"runtimeArgs": ["--nolazy"], | ||
// Environment variables passed to the program. | ||
"env": { | ||
"NODE_ENV": "development" | ||
}, | ||
// Use JavaScript source maps (if they exist). | ||
"sourceMaps": false, | ||
// If JavaScript source maps are enabled, the generated code is expected in this directory. | ||
"outDir": null | ||
}, | ||
{ | ||
"name": "Attach", | ||
"type": "node", | ||
// TCP/IP address. Default is "localhost". | ||
"address": "localhost", | ||
// Port to attach to. | ||
"port": 5858, | ||
"sourceMaps": false | ||
} | ||
] | ||
} |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1 @@ | ||
module.exports = require ('./lib/plugin') |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,6 @@ | ||
{ | ||
"compilerOptions": { | ||
"target": "ES6", | ||
"module": "commonjs" | ||
} | ||
} |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,94 @@ | ||
'use strict' | ||
|
||
const Hapi = require('hapi') | ||
const _ = require('lodash') | ||
const Hoek = require('hoek') | ||
const mongoose = require('mongoose') | ||
const uuid = require('node-uuid') | ||
|
||
module.exports = function() { | ||
const toJsonApi = function (resources) { | ||
if (_.isArray(resources)) { | ||
return _.map(resources, (resource) => { | ||
return toJsonApiSingle(resource); | ||
}) | ||
} else { | ||
return toJsonApiSingle(resources); | ||
} | ||
|
||
function toJsonApiSingle(resource) { | ||
var mapped = _.mapKeys(resource, function (val, key) { | ||
if (key === '_id') return 'id' | ||
else return key | ||
}); | ||
return _.omit(mapped, '__v') | ||
} | ||
} | ||
|
||
const toMongooseModel = function (hhSchema) { | ||
|
||
const mongooseSchema = {} | ||
mongooseSchema._id = { | ||
type: String, | ||
default: () => { | ||
return uuid.v4() | ||
} | ||
} | ||
|
||
var schemaMap = { | ||
'string': String, | ||
'number': Number, | ||
'date': Date, | ||
'buffer': Buffer, | ||
'boolean': Boolean, | ||
'array': Array, | ||
'any': Object | ||
} | ||
|
||
mongooseSchema.type = 'string' | ||
mongooseSchema.attributes = | ||
_.mapValues(hhSchema.attributes, function (val) { | ||
Hoek.assert(val.isJoi, 'attribute values in the hh schema should be defined with Joi') | ||
return schemaMap[val._type] | ||
}) | ||
|
||
const schema = mongoose.Schema(mongooseSchema) | ||
return mongoose.model(hhSchema.type, schema) | ||
} | ||
|
||
const toMongoosePredicate = function(query) { | ||
const mappedToModel = _.mapKeys(query.filter, function (val, key) { | ||
if (key === 'id') return '_id' | ||
else return `attributes.${key}` | ||
}) | ||
|
||
return _.mapValues(mappedToModel, function (val, key) { | ||
const supportedComparators = ['lt', 'lte', 'gt', 'gte'] | ||
|
||
//if it's a normal value strig, do a $in query | ||
if (_.isString(val) && val.indexOf(',') !== -1) { | ||
return {$in: val.split(',')} | ||
} | ||
|
||
//if it's a comparator, translate to $gt, $lt etc | ||
const valueKey = _.keys(val)[0] | ||
if (_.contains(supportedComparators, valueKey)) { | ||
return {[`$${valueKey}`] : val[valueKey]} | ||
} | ||
|
||
else return val | ||
}) | ||
} | ||
|
||
const toMongooseSort = function(sort) { | ||
if (!sort) return {'_id' : -1} | ||
if(sort.indexOf('-') === 0) { | ||
return {[`attributes.${sort.substr(1)}`] : -1} | ||
} | ||
|
||
return {[`attributes.${sort}`] : 1} | ||
} | ||
|
||
return { toJsonApi, toMongooseModel, toMongoosePredicate, toMongooseSort } | ||
} | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,118 @@ | ||
'use strict' | ||
|
||
const Hapi = require('hapi') | ||
const Boom = require('boom') | ||
const _ = require('lodash') | ||
const mongoose = require('mongoose') | ||
const converters = require('./converters')() | ||
const utils = require('./utils')() | ||
|
||
mongoose.Promise = require('bluebird') | ||
|
||
module.exports = function (options) { | ||
|
||
const models = {} | ||
|
||
const connect = function(cb) { | ||
mongoose.connect(options.mongodbUrl, cb) | ||
} | ||
|
||
const disconnect = function(cb) { | ||
//clear out events | ||
mongoose.connection._events = {} | ||
mongoose.disconnect(cb) | ||
} | ||
|
||
mongoose.connection.on('error', connect) | ||
|
||
const find = function (type, req) { | ||
const model = models[type] | ||
const query = req.query | ||
const limit = (query.page && query.page.limit) || 1000 | ||
const skip = (query.page && query.page.offset) || 0 | ||
const sort = converters.toMongooseSort(query.sort) | ||
const sparse = query.fields && query.fields[type].split(',') | ||
var predicate = converters.toMongoosePredicate(query) | ||
return model.find(predicate).skip(skip).sort(sort).limit(limit).lean().exec() | ||
.then((resources)=> { | ||
let data = converters.toJsonApi(resources); | ||
if (sparse) { | ||
data = _.map(data, (datum) => { | ||
datum.attributes = _.pick(datum.attributes, sparse) | ||
return datum | ||
}) | ||
} | ||
|
||
return {data} | ||
}) | ||
} | ||
|
||
const findById = function(type, req) { | ||
|
||
const model = models[type] | ||
return model.findById(req.params.id).lean().exec() | ||
.then((resources) => { | ||
if (!resources) { | ||
return Boom.notFound() | ||
} | ||
return {data: converters.toJsonApi(resources)} | ||
}) | ||
} | ||
|
||
const create = function(type, req) { | ||
const model = models[type] | ||
var data = utils.getPayload(req) | ||
return model.create(data) | ||
.then((created) => { | ||
return {data: converters.toJsonApi(created.toObject())} | ||
}) | ||
} | ||
|
||
const update = function(type, req) { | ||
|
||
const model = models[type] | ||
var data = utils.getPayload(req) | ||
return model.findByIdAndUpdate(req.params.id, data) | ||
.then((resource) => { | ||
if (!resource) { | ||
return Boom.notFound() | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. The adapter now has knowledge about Boom, which makes it a kind of leaky abstraction and would require re-implementation of these errors across all future adapters Perhaps we can have the adapter just return the Promise, and remove the catch. The plugin.js code would then be responsible to either interpret the empty result or the error when the Promise is rejected There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I'v removed all Not really sure about There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. A bit conflicted about moving the interpretation of the empty result to plugin.js, however sustaining that pattern might lead to inventing our own error model for adapter, which is not something I want to get into. Let's keep it as is for now, we can talk about this a bit later. |
||
} | ||
return findById(type, req) | ||
}) | ||
} | ||
|
||
const del = function(type, req) { | ||
const model = models[type] | ||
var predicate = converters.toMongoosePredicate({id: req.params.id}) | ||
return model.remove(predicate) | ||
.then(() => { | ||
return {} | ||
}) | ||
} | ||
|
||
const processSchema = function(hhSchema) { | ||
|
||
if (!models[hhSchema.type]) { | ||
|
||
// clean up existing models and schemas | ||
delete mongoose.models[hhSchema.type] | ||
delete mongoose.modelSchemas[hhSchema.type] | ||
|
||
models[hhSchema.type] = converters.toMongooseModel(hhSchema) | ||
} | ||
return models[hhSchema.type] | ||
} | ||
|
||
return { | ||
connect, | ||
disconnect, | ||
find, | ||
findById, | ||
create, | ||
update, | ||
delete: del, | ||
models, | ||
processSchema | ||
} | ||
|
||
} |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,15 @@ | ||
'use strict' | ||
|
||
const Hapi = require('hapi') | ||
const _ = require('lodash') | ||
const Hoek = require('hoek') | ||
const mongoose = require('mongoose') | ||
const uuid = require('node-uuid') | ||
|
||
module.exports = function() { | ||
const getPayload = function (req) { | ||
return (req.payload) ? req.payload.data : {} | ||
} | ||
|
||
return { getPayload } | ||
} |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,88 @@ | ||
'use strict' | ||
|
||
const _ = require('lodash') | ||
const routes = require('./routes')() | ||
const adapterUtils = require('./utils/adapter')() | ||
const routeUtils = require('./utils/route')() | ||
|
||
exports.register = function (server, opts, next) { | ||
server.expose('version', require('../package.json').version); | ||
|
||
const adapter = opts.adapter; | ||
|
||
adapterUtils.checkValidAdapter(adapter); | ||
|
||
adapter.connect(() => { | ||
server.expose('adapter', adapter); | ||
next() | ||
}); | ||
|
||
const get = function (schema) { | ||
routeUtils.createOptionsRoute(server, schema) | ||
adapter.processSchema(schema) | ||
return _.merge(routes.get(schema), { | ||
handler: (req, reply) => { | ||
routeUtils.parseComparators(req) | ||
reply(adapter.find(schema.type, req)) | ||
} | ||
}) | ||
} | ||
|
||
const getById = function (schema) { | ||
routeUtils.createOptionsRoute(server, schema) | ||
adapter.processSchema(schema) | ||
return _.merge(routes.getById(schema), { | ||
handler: (req, reply) => { | ||
reply(adapter.findById(schema.type, req)) | ||
} | ||
}) | ||
} | ||
|
||
const post = function (schema) { | ||
routeUtils.createOptionsRoute(server, schema) | ||
adapter.processSchema(schema) | ||
return _.merge(routes.post(schema), { | ||
handler: (req, reply) => { | ||
reply(adapter.create(schema.type, req)).code(201) | ||
} | ||
}) | ||
} | ||
|
||
const patch = function (schema) { | ||
routeUtils.createOptionsRoute(server, schema) | ||
adapter.processSchema(schema) | ||
return _.merge(routes.patch(schema), { | ||
handler: (req, reply) => { | ||
reply(adapter.update(schema.type, req)) | ||
} | ||
}) | ||
} | ||
|
||
const del = function (schema) { | ||
routeUtils.createOptionsRoute(server, schema) | ||
adapter.processSchema(schema) | ||
return _.merge(routes.delete(schema), { | ||
handler: (req, reply) => { | ||
reply(adapter.delete(schema.type, req)).code(204) | ||
} | ||
}) | ||
} | ||
|
||
server.expose('routes', { | ||
get: get, | ||
getById: getById, | ||
post: post, | ||
patch: patch, | ||
delete: del | ||
}) | ||
|
||
server.ext('onPostStop', (server, next) => { | ||
adapter.disconnect(next) | ||
}) | ||
} | ||
|
||
exports.register.attributes = { | ||
pkg: require('../package.json') | ||
} | ||
|
||
exports.getAdapter = adapterUtils.getStandardAdapter; |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
See comment below about mixup of jsonapi vocabulary further down below. This is fields behaviour, not includes