/**
* @file PXDB Backbone API interface.
* @author Avi Blackmore <avi@pxdbproject.com>
* @copyright 2018, Project Phoenix, LLC. All rights reserved.
*
* @description This library provides an interface for connecting to a
* PXDB installation, and autogenerates Backbone classes to use with
* the GraphQL schema.
* @requires jquery
* @requires lodash
* @requires backbone
* @requires graphql
* @requires azure-storage.blob
*/
/**
* PXDB library namespace.
* @namespace pxdb
*/
pxdb = {
/**
* @class
* @description Creates a new PXDB connection engine.
* @classdesc The engine class is used to connect to a PXDB
* installation, authenticate, handle GraphQL queries, and
* generate Backbone model and collection classes to encapsulate
* those queries and their results. The engine is the basic
* mechanism for all interaction with PXDB.
* @param {string} Base URL of the PXDB installation.
*/
engine: function (baseurl) {
/**
* @description These are callback bindings for the setup process of
* engine.init(), and for the refreshing of access tokens.
* The value of each key must be an object, with two
* properties: "success" and "error". These properties must
* be functions which take one parameter: "data" for success,
* "error" for error.
*
* Each is called after a specific stage of the init process:
* - setup: Retrieving the base config info from PXDB.
* - login: Logging in.
* - tenantbase: Retrieving the tenant base URL.
* - tenantroutes: Retrieving the routes defined for the tenant.
* - tenantschema: Retrieving the tenant schema and generating
* the Backbone classes.
* - rpb_schema: Retrieving the roles_pbac schema and
* generating its Backbone classes.
*
* Additionally, token_refresh is called after calling the
* {@link pxdb.engine#token_refresh|token_refresh()} method to
* refresh the access token.
*
* Default error handlers are provided, which emit the error
* JSON to console.log.
*/
this.callbacks = {
setup: {success: null,
error: err => console.log(err)},
login: {success: null,
error: err => console.log(err)},
tenantbase: {success: null,
error: err => console.log(err)},
tenantroutes: {success: null,
error: err => console.log(err)},
token_refresh: {success: null,
error: err => console.log(err)},
tenant_schema: {success: null,
error: err => console.log(err)},
rpb_schema: {success: null,
error: err => console.log(err)}
}
/**
* @description These are validation hooks for built-in
* datatypes in GraphQL, used by the Backbone Model classes'
* {@link pxdb.PXDB_MODEL.validate|validate()} method. The
* validations are simple type checks.
*
* This object can be extended with validation functions for
* new scalar data types that a GraphQL schema defines, for
* instance, DateTime or Geometry. New validation functions
* should be set on a property name that matches the type
* name, without the ! for "required"; the functions must take
* a single argument and return a boolean, true if the
* argument is properly formatted for the type, false
* otherwise.
*
* These functions are not meant for validating composite
* GraphQL types: those types correspond to models and
* collections, and the model validation method checks their
* validity.
*
* Predefined validations:
* - ID: Always true. IDs are never set by mutations.
* - Int: v => _.isInteger(v)
* - Float: v => _.isNumber(v)
* - Boolean: v => _.isBoolean(v),
* - String: v => _.isString(v)
*/
this.type_validations = {
ID: v => true,
Int: v => _.isInteger(v),
Float: v => _.isNumber(v),
Boolean: v => _.isBoolean(v),
String: v => _.isString(v)
}
/**
* @description These are error message templates defined for
* validation checking of the GraphQL queries and mutations.
* They are called when validations fail.
*/
this.errorstrings = {
badattr: attr => `Field '${attr}' is not valid for this type.`,
badsubselect: attr => `Subselection '${attr}' is not valid for this type.`,
wrongtype: (attr, type) => `Field '${attr}' must be of type '${type}'.`,
notnull: attr => `Field '${attr}' cannot be set to null.`,
notinput: attr => `Field '${attr}' cannot be modified (check input type).`,
missingrequired: attr => `Field '${attr}' is required.`
}
/**
* @name roleid
* @memberof pxdb.engine
* @instance
* @description The ID of the logged in role using this
* engine. Set by {@link pxdb.engine#init|init()} after the
* tenantbase phase.
*/
/**
* @name classes
* @memberof pxdb.engine
* @instance
* @description Backbone classes generated by {@link
* pxdb.engine#init|init()} during the tenant_schema phase are
* stored on this member. See {@link
* pxdb.create_backbone_classes|pxdb.create_backbone_classes()}
* for information on the structure of this object.
*
* Each model class will be a subclass of {@link PXDB_MODEL},
* and each collection class a subclass of {@link
* PXDB_COLLECTION}.
*/
/**
* @name rpb_classes
* @memberof pxdb.engine
* @instance
* @description Backbone classes generated by {@link
* pxdb.engine#init|init()} during the rpb_schema phase are
* stored on this member. See {@link
* pxdb.create_backbone_classes|pxdb.create_backbone_classes()}
* for information on the structure of this object.
*
* Each model class will be a subclass of {@link PXDB_MODEL},
* and each collection class a subclass of {@link
* PXDB_COLLECTION}.
*/
/**
* The URLs configured for this engine are stored
* here. Before calling {@link pxdb.engine#init|init()}, the
* only property of this object will be 'base', the base URL
* passed in to the constructor. Once initialized, this
* object will have the following properties:
* - base: Base installation URL.
* - auth: Authentication URL.
* - tenant: Tenant urls:
* - base: Base of the tenant's URL schema.
* - query: GraphQL target for all database queries.
* - roles_pbac: GraphQL target for working with roles, role
* classes, and policy rules.
* - ows: URL for Mapserver OWS queries. Only enabled if this
* tenant has geospatial capabilities.
*/
this.urls = {
base: baseurl
}
/**
* @public
* @deprecated This method is deprecated in favor of {@link
* pxdb.engine#login|login}. Do not use this method for any
* new code.
*
* @description Log in and set up URLs, schema, and Backbone
* classes. Several AJAX requests are involved with this process:
*
* - setup: GET request is sent to the base URL provided to
* the constructor, to ensure it is a proper PXDB installation
* and get the auth URL path. Error at this point means the
* URL is wrong.
*
* - login: If login and password are provided, POST to the
* auth URL to log in. The request returns access and refresh
* JWT tokens. Error at this point means the credentials are
* incorrect.
*
* OR
*
* - token_refresh: If only a refresh token is provided in the
* login parameter, GET on the auth URL to refresh the access
* token. The request returns a fresh access token. Error at
* this point means the token is invalid or expired.
*
* - tenantbase: GET on the base URL again, with the access
* token set, returns the role's tenant URL path. Error is
* extremely unlikely and indicative of a server-side problem.
*
* - tenantroutes: GET on the tenant URL returns the tenant's
* available routes. Again, error is extremely unlikely and
* indicative of a server-side issue.
*
* - tenantschema: GET on the tenant's query route to retrieve
* the GraphQL schema. Again, error is unlikely.
*
* The {@link pxdb.engine#callbacks|callbacks} object contains
* information on handling errors, as well as adding success
* callbacks.
*
* @param {string} login - Role login.
* @param {string} password - Role password.
* @param {int} tenantid - Tenant ID for the role.
*/
this.init = function (login, password, tenantid) {
var obj = this;
var setup = function () {
//Get the auth URL from base.
return $.ajax(obj.urls.base);
}
var dologin = function () {
//Do login and get tokens.
return $.ajax(obj.urls.auth, {
method: 'POST',
data: {login: login,
password: password,
tenantid: tenantid}
});
}
var tenantbase = function () {
//Get base tenant URL.
return $.ajax(obj.urls.base, {
headers: {
'Authorization': 'Bearer ' + obj.tokens.access_token
}
});
}
var tenantroutes = function () {
//Get tenant routes.
return $.ajax(obj.urls.tenant.base, {
method: 'GET',
headers: {
'Authorization': 'Bearer ' + obj.tokens.access_token
}
});
}
var tenantschema = function () {
return $.ajax(obj.urls.tenant.query, {
method: 'GET',
headers: {
'Authorization': 'Bearer ' + obj.tokens.access_token
}
});
}
var rpb_schema = function () {
return $.ajax(obj.urls.tenant.roles_pbac, {
method: 'GET',
headers: {
'Authorization': 'Bearer ' + obj.tokens.access_token
}
});
}
//Run our promise chain. Start with setup.
setup()
.then(function (data, status, xhr) {
//Got our base config data.
obj.urls.auth = data.auth_url;
if (_.isFunction(obj.callbacks.setup.success)) {
obj.callbacks.setup.success(data);
}
}, function (xhr, status) {
//URL must have been wrong somehow.
if (_.isFunction(obj.callbacks.setup.error)) {
obj.callbacks.setup.error(xhr.responseJSON);
}
throw(Error(status));
})
.then(function () {
if (password == undefined) {
obj.tokens = {refresh_token: login};
return obj.token_refresh();
} else {
return dologin();
}
})
.then(function (data, status, xhr) {
//If we used a refresh token, we skip this.
if (password != undefined) {
//Logged in, got our tokens.
obj.tokens = data;
if (_.isFunction(obj.callbacks.login.success)) {
obj.callbacks.login.success(data);
}
}
}, function (xhr, status) {
//Credentials must be invalid or something.
if (_.isFunction(obj.callbacks.login.error)) {
obj.callbacks.login.error(xhr.responseJSON);
}
throw(Error(status));
})
.then(function () { return tenantbase(); })
.then(function (data, status, xhr) {
//Got our tenant base URL.
obj.urls.tenant = {
base: data.tenant_url
}
obj.roleid = data.roleid;
if (_.isFunction(obj.callbacks.tenantbase.success)) {
obj.callbacks.tenantbase.success(data);
}
}, function (xhr, status) {
//Honestly not sure how we'd get here. Something
//went seriously wrong!
if (_.isFunction(obj.callbacks.tenantbase.error)) {
obj.callbacks.tenantbase.error(xhr.responseJSON);
}
throw(Error(status));
})
.then(function () { return tenantroutes(); })
.then(function (data, status, xhr) {
//Got our tenant routes.
_.each(data.routes, function(v,k,c) {
obj.urls.tenant[k] = v;
});
if (_.isFunction(obj.callbacks.tenantroutes.success)) {
obj.callbacks.tenantroutes.success(data);
}
}, function (xhr, status) {
//Again, how did we get here?
if (_.isFunction(obj.callbacks.tenantroutes.error)) {
obj.callbacks.tenantroutes.error(xhr.responseJSON);
}
throw(Error(status));
})
.then(function () { return tenantschema(); })
.then(function (data, status, xhr) {
//Finally, got our tenant schema. Build the classes.
obj.gql_schema = data;
obj._schema = pxdb.extract_types_from_schema(data['json']);
obj._queries = pxdb.generate_gql_queries(obj._schema, 3);
obj.classes = pxdb.create_backbone_classes(obj,
obj._schema,
obj._queries);
if (_.isFunction(obj.callbacks.tenant_schema.success)) {
obj.callbacks.tenant_schema.success(data);
}
}, function (xhr, status) {
//Not sure how we'd get here, either.
if (_.isFunction(obj.callbacks.tenant_schema.error)) {
obj.callbacks.tenant_schema.error(xhr.responseJSON);
}
throw(Error(status));
})
.then(function () { return rpb_schema(); })
.then(function (data, status, xhr) {
//And now our roles_pbac schema. Build the
//classes for those.
obj.gql_rpb_schema = data;
obj._rpbschema = pxdb.extract_types_from_schema(
data['json']);
obj._rpbqueries = pxdb.generate_gql_queries(obj._rpbschema, 1);
obj.rpb_classes = pxdb.create_backbone_classes(
obj,
obj._rpbschema,
obj._rpbqueries,
true);
if (_.isFunction(obj.callbacks.rpb_schema.success)) {
obj.callbacks.rpb_schema.success(data);
}
}, function (xhr, status) {
//Not sure how we'd get here, either.
if (_.isFunction(obj.callbacks.rpb_schema.error)) {
obj.callbacks.tenant_schema.error(xhr.responseJSON);
}
throw(Error(status));
});
}
/**
* @public
* @description Log in and set up URLs, schema, and Backbone
* classes. Several phases are involved with this process:
*
* - login: If login and password are provided, POST to the
* auth URL to log in. The request returns all data required
* to set up the authentication tokens, URLs, schemas, and
* Backbone classes. Error at this point means the
* credentials are incorrect.
*
* OR
*
* - token_refresh: If only a refresh token is provided in the
* login parameter, GET on the auth URL to refresh the access
* token. The request returns a fresh access token. Error at
* this point means the token is invalid or expired.
*
* - tenantroutes: The routes are populated from the returned
* data.
*
* - tenantschema: The GraphQL schema is parsed and Backbone
* classes are generated.
*
* - rpb_schema: The GraphQL roles_pbac schema is parsed and
* its Backbone classes are generated.
*
* The {@link pxdb.engine#callbacks|callbacks} object contains
* information on handling errors, as well as adding success
* callbacks.
*
* @param {string} login - Role login.
* @param {string} password - Role password.
* @param {int} tenantid - Tenant ID for the role.
*/
this.login = function (login, password, tenantid) {
var obj = this;
var dologin = function () {
//Do login and get tokens.
return $.ajax(obj.urls.auth, {
method: 'POST',
data: {login: login,
password: password,
tenantid: tenantid}
});
}
obj.urls.auth = obj.urls.base + '/auth';
(function () {
if (password == undefined) {
obj.tokens = {refresh_token: login};
return obj.token_refresh();
} else {
return dologin();
}
})()
.then(function (data, status, xhr) {
//Logged in, got our tokens.
obj.tokens = {access_token: data.access_token,
refresh_token: data.refresh_token}
obj.roleid = data.roleid;
if (_.isFunction(obj.callbacks.login.success)) {
obj.callbacks.login.success(obj.tokens);
}
obj.urls.tenant = {}
if (_.isFunction(obj.callbacks.tenantbase.success)) {
obj.callbacks.tenantbase.success(data);
}
_.each(data.routes, function(v,k,c) {
obj.urls.tenant[k] = v;
});
if (_.isFunction(obj.callbacks.tenantroutes.success)) {
obj.callbacks.tenantroutes.success(data.routes);
}
obj.gql_schema = data.schema;
obj._schema = pxdb.extract_types_from_schema(obj.gql_schema['json']);
obj._queries = pxdb.generate_gql_queries(obj._schema, 3);
obj.classes = pxdb.create_backbone_classes(obj,
obj._schema,
obj._queries);
if (_.isFunction(obj.callbacks.tenant_schema.success)) {
obj.callbacks.tenant_schema.success(data.schema);
}
obj.gql_rpb_schema = data.rpb_schema;
obj._rpbschema = pxdb.extract_types_from_schema(
obj.gql_rpb_schema['json']);
obj._rpbqueries = pxdb.generate_gql_queries(obj._rpbschema, 1);
obj.rpb_classes = pxdb.create_backbone_classes(
obj,
obj._rpbschema,
obj._rpbqueries,
true);
if (_.isFunction(obj.callbacks.rpb_schema.success)) {
obj.callbacks.rpb_schema.success(data.rpb_schema);
}
}, function (xhr, status) {
//Credentials must be invalid or something.
if (_.isFunction(obj.callbacks.login.error)) {
obj.callbacks.login.error(xhr.responseJSON);
}
throw(Error(status));
});
}
/**
* Refreshes the JWT access token using the refresh token.
* Access tokens are valid for one hour, so this will be
* needed in error handlers to handle 401 errors on query. If
* the response to this method is 401, the refresh token has
* expired; refresh tokens have a lifetime of 24 hours, so
* this is unlikely.
*/
this.token_refresh = function () {
var obj = this;
if (this.tokens == undefined) {
if (_.isFunction(obj.callbacks.token_refresh.error)) {
obj.callbacks.token_refresh.error(
{status: 400,
error: 'Not logged in.'});
}
} else {
return $.ajax(this.urls.auth, {
method: 'GET',
headers: {
'Authorization': 'Bearer ' + obj.tokens.refresh_token
},
success: function (data, status, xhr) {
obj.tokens.access_token = data.access_token;
if (_.isFunction(obj.callbacks.token_refresh.success)) {
obj.callbacks.token_refresh.success(data);
}
},
error: function (xhr, status, error) {
if (_.isFunction(obj.callbacks.token_refresh_error)) {
obj.callbacks.token_refresh.error(xhr.responseJSON);
}
}
});
}
}
/**
* @private
* @description Returns the headers to be passed in to a
* GraphQL request. This is a utility function, not meant for
* external use.
*/
this._get_headers = function () {
return {
'Authorization': 'Bearer ' + this.tokens.access_token
}
}
/**
* Returns a GraphQL endpoint for use in making GraphQL
* queries. If the user of this library wishes to make
* GraphQL queries directly, instead of using the Backbone
* classes, this may be useful. Otherwise, no need to call it
* directly.
*/
this.graphql_endpoint = function (rpb=false) {
if (rpb) {
return graphql(this.urls.tenant.roles_pbac, {
headers: this._get_headers(),
alwaysAutodeclare: true
});
} else {
return graphql(this.urls.tenant.query, {
headers: this._get_headers(),
alwaysAutodeclare: true
});
}
}
/**
* @description Runs an aggregate query using the PxDB
* Aggregates function namespace; PxDB will run a subquery
* against a table and then pass the first column of that
* subquery to an aggregate function. Any additional
* columns are used to group the aggregate results, as a
* SQL "group by" clause. The JSON results will be returned
* as an array of objects.
*
* NB: If the columns parameter contains subselections, the
* return query will name the subselected columns based on the
* selection path. For instance, a selection like this:
* ['foo', {bar: ['baz']}] would yield a column named
* 'bar_baz' in the returned rows.
*
* @param {string} fname - Name of the aggregate function. Required.
* @param {string} tabname - Name of the table to query
* against. Required. Must be a table type in the GraphQL
* schema.
* @param {array} columns - Column selection list. Format is
* the same as for calling the {@link PXDB_COLLECTION#selection|selection()} method on a model or
* collection, including subselections. Required, and at
* least one column is required. The first named column will
* be the argument to the aggregate function, and must be a
* column in the target table, not a subselection.
* @param {object} Optional - Additional, optional arguments.
* @param {string} Optional.filter - A PxDB filter expression
* applied to the subquery. This will restrict what data the
* aggregate query sees.
* @param {string} Optional.having - PxDB filter expression
* applied to the aggregate query. This acts as a "having"
* clause, restricting output of the aggregate query.
* @param {array} Optional.ordering - Ordering spec for the aggregate query output.
* @param {integer} Optional.limit - Limits output to this number of rows.
* @param {integer} Optional.offset - Offset the first row returned by this number.
* @return {array} Array of row objects.
*/
this.aggregate_query = async function (fname, tabname, columns,
{filter, having, ordering, limit, offset} = {}) {
var theengine = this;
if (fname == undefined ||
tabname == undefined ||
columns == undefined) {
throw new Error("All of fname, tabname, and columns arguments are required.");
}
var validfnames = this.gql_schema.json.__schema.types.find(
e => e.name == 'Aggregates').fields.map(e => e.name);
if (!validfnames.includes(fname.toLowerCase())) {
throw new Error(`Invalid aggregate ${fname}.`)
}
fname = fname.toLowerCase();
if (!(tabname in this._queries)) {
throw new Error(`Unknown table type ${tabname}.`);
}
var selectable = this._queries[tabname].read.all_allowed;
//Check our column selection.
_.each(columns, function (f, i) {
if (typeof(f) == 'string' &&
_.indexOf(selectable, f) == -1) {
throw new Error(theengine.errorstrings.badattr(f));
} else if (typeof(f) == 'object' &&
_.find(selectable,
function(v,i,c) {
var vkey = _.keys(v)[0];
var fkey = _.keys(f)[0];
return (typeof(v) == 'object' &&
vkey == fkey);
}) == undefined) {
throw new Error(theengine.errorstrings.badsubselect(_.keys(f)[0]));
}
});
var gql = this.graphql_endpoint();
var qstring = `query Fn ($subquery: String, $filter: String, $having: String, $ordering: [String], $limit: Int, $offset: Int) {
Aggregates {
${fname}(subquery: $subquery, filter: $filter, having: $having, ordering: $ordering, limit: $limit, offset:$offset)
}}`;
var qvars = {
subquery: this._queries[tabname].read.template(columns),
filter: filter,
having: having,
ordering: ordering,
limit: limit,
offset: offset
}
return await gql(qstring)(qvars).then(d => JSON.parse(d.Aggregates[fname]));
}
},
//Some GraphQL utility functions for dealing with schema parsing.
/**
* @private
* @constant
* @description Built-in GraphQL scalar types. We don't need to
* reflect these.
*/
builtin_scalars: ['String', 'Boolean', 'Int', 'Float', 'ID'],
/**
* @private
* @description Built-in scalars are in our list.
*/
is_builtin_scalar: function (typename) {
return (_.indexOf(pxdb.builtin_scalars,
_.trim(typename, '!')) != -1);
},
/**
* @private
* @description Introspection types start with double underscores.
*/
is_introspection_type: function (typename) {
if (_.isArray(typename)) {
return false;
} else { //string case
return (_.trim(typename, '!').match('__') != undefined);
}
},
/**
* @private
* @description Defined types are neither built-in scalars nor
* introspection types.
*/
is_defined_type: function (typename) {
return !(pxdb.is_builtin_scalar(typename) ||
pxdb.is_introspection_type(typename));
},
/**
* @private
* @description "Special" types are types defined by the PxDB
* server that correspond to specially-handled columns or function
* calls, not to normal tables.
*/
is_special_type: function (typename) {
return (_.indexOf(
['Aggregates', 'Geometry', 'FileObject', 'GeometryInput', 'FileInput'],
_.trim(typename, "!")) != -1);
},
/**
* @description Returns an object containing the type
* specification for each defined type in the GraphQL schema.
* This excludes introspection and builtin scalar types. The type
* map will look like this:
* ```javascript
* {types: TYPENAME: TYPENAME | {
* ATTRIBNAME: ATTRIBTYPE | [ATTRIBTYPE],
* ...},
* ...},
* inputs: INPUT-TYPENAME: {
* ATTRIBNAME: ATTRIBTYPE | [ATTRIBTYPE],
* ...},
* ...},
* mutations: {MUTATIONNAME: {ARGNAME: ARGTYPE, ...}}
* }
* ```
* If TYPENAME names a scalar, its value will be the same as the
* key. If ATTRIBTYPE is a scalar or object type, it will be
* given as a string name; a list will be given as an array
* containing the element type.
*
* NOTE: This function is called automatically by {@link
* pxdb.engine#init|engine.init()}.
*
* @param {object} schema - GraphQL introspection schema.
* @return {object} Type specification for the schema.
*/
extract_types_from_schema: function (schema) {
var querytype = null;
var mutationtype = null;
var returns = {
types: {},
inputs: {}
}
//First pass to find our query and mutation types. We put any other x
_.each(schema.__schema.types, function (typespec, i) {
if (typespec.name == schema.__schema.queryType.name) {
querytype = typespec;
}
if (typespec.name == schema.__schema.mutationType.name) {
mutationtype = typespec;
}
if (querytype != null && mutationtype != null) {
return false;
}
});
var qtypes = []
var mtypes = []
var mutations = {}
//Grab the names of the query types.
_.each(querytype.fields, function (fspec, i) {
if (!pxdb.is_special_type(fspec.name)) {
qtypes.push(fspec.name);
}
});
//Grab other types.
_.each(schema.__schema.types, function (typespec, i) {
if (pxdb.is_defined_type(typespec.name) &&
!pxdb.is_special_type(typespec.name) &&
typespec.kind == 'OBJECT' &&
!_.includes(qtypes, typespec.name)) {
qtypes.push(typespec.name);
}
});
//Now the input types for mutations.
_.each(mutationtype.fields, function (fspec, i) {
//The types are the same for create and update, we don't
//need to duplicate.
if ((fspec.name.match('^create') != undefined) ||
((fspec.name.match('^create') == undefined &&
fspec.name.match('^update') != undefined))) {
mtypes.push(_.filter(fspec.args, (v,i,c) => v.name == 'input')[0].type.name);
}
var mutargs = {}
//Gather the mutation definitions.
_.each(fspec.args, function (arg, i) {
if (arg.type.kind == 'INPUT_OBJECT') {
mutargs[arg.name] = arg.type.name;
} else {
mutargs[arg.name] = arg.type.ofType.name;
}
});
mutations[fspec.name] = mutargs;
});
returns['mutations'] = mutations;
//Final pass to gather the type names and attributes.
_.each(schema.__schema.types, function (typespec, i) {
//We only want the defined types by the schema, and not
//the query or mutation types.
if (pxdb.is_defined_type(typespec.name) &&
typespec.name != schema.__schema.queryType.name &&
typespec.name != schema.__schema.mutationType.name) {
//The type name must be one we've already seen.
if ($.inArray(typespec.name, qtypes) != -1 ||
$.inArray(typespec.name, mtypes) != -1){
var attribs = {}
//We're looking at the fields for an
if (typespec.kind == 'OBJECT') {
thefields = typespec.fields;
} else {
thefields = typespec.inputFields;
}
//Get the type of each field.
$.each(thefields, function (i, fspec) {
var typename = '';
if (fspec.type.kind == 'LIST') {
typename = [fspec.type.ofType.name];
} else if (fspec.type.kind == 'OBJECT') {
typename = fspec.type.name;
} else {
if (fspec.type.ofType != null) {
typename = fspec.type.ofType.name;
if (fspec.type.kind == 'NON_NULL') {
typename += '!';
}
} else {
typename = fspec.type.name;
}
}
attribs[fspec.name] = typename;
});
if ($.inArray(typespec.name, qtypes) != -1) {
returns.types[typespec.name] = attribs;
} else {
returns.inputs[typespec.name] = attribs;
}
}
}
});
return returns;
},
/**
* @description Returns an object containing the query/mutation
* templates for each of the defined types (i.e., not
* introspection or scalar types) in the provided schema TYPESPEC.
* The GraphQL templates will have accompanying information on the
* available fields; for queries, this will be the available
* fields in the query type, while for mutations, the input fields
* will be provided.
*
* The resulting object will have this structure:
* ```javascript
* {
* TYPENAME: {
* create: MUTATION,
* read: QUERYSPEC,
* update: MUTATION,
* delete: MUTATION
* },
* ...
* }
* ```
* QUERYSPEC will be an object with this structure:
* ```javascript
* {
* query: QUERYSTRING,
* fields: FIELDLIST
* }
* ```
*
* MUTATION will be a templated string.
*
* The templates will be used to generate query functions for each
* query or mutation type by Backbone.sync. The fields list will
* be used to create the available field list for use by model or
* collection classes in the .fields() method.
*
* MAX_RECURSION, which defaults to Infinity, sets the maximum
* level to which the code will descend in "nested" types, such as
* collections or object references on a given type. If this is
* set to 0, no "nested" types, including types that represent
* complex scalar objects (such as FileObjects or Geometries) will
* be included; if set to 1, only the first "level" of nested
* types will be included, and so forth.
*
* NOTE: This function is called automatically by {@link
* pxdb.engine#init|engine.init()}.
*
* @param {object} typespec - Type specification from {@link
* extract_types_from_schema}
* @param {integer} max_recursion - Maximum level to recurse into nested types.
* @return {object} GraphQL query templates.
*/
generate_gql_queries: function (typespec, max_recursion=Infinity) {
var returns = {}
//This function recursively pulls together the type fields for
//a GraphQL type, and returns the array.
var get_type_fields = function (typename, seentypes=null, rlevel=0) {
var fields = []
if (seentypes == null) {
seentypes = [];
}
seentypes.push(typename);
_.forOwn(typespec.types[typename], function (
type, name, obj) {
if (pxdb.is_defined_type(type) &&
_.indexOf(_.keys(typespec.types), type) != -1) {
if (_.indexOf(seentypes, type) == -1 && rlevel < max_recursion) {
var theobj = {}
seentypes.push(typename);
theobj[name] = get_type_fields(type, seentypes, rlevel + 1);
seentypes.pop();
fields.push(theobj);
}
} else if (_.isArray(type)) {
if (rlevel < max_recursion) {
if (!pxdb.is_builtin_scalar(type[0])) {
var theobj = {}
seentypes.push(type[0]);
theobj[name] = get_type_fields(
type[0], seentypes, rlevel + 1);
seentypes.pop();
fields.push(theobj);
} else {
fields.push(name);
}
}
} else {
if (_.indexOf(seentypes, type) == -1 &&
_.indexOf(_.keys(typespec.types), type) == -1) {
fields.push(name);
}
}
});
seentypes.pop();
return fields;
}
var field_descender = function (field, idx) {
if (_.isObject(field)) {
if (!_.isArray(field)) {
var spec = _.keys(field)[0] + '{ ';
} else {
var spec = '';
}
var farr = _.map(field, field_descender);
spec += farr.join(',');
if (!_.isArray(field)) {
spec += '}';
}
return spec;
} else {
return field;
}
}
//Generate the template string for the query.
var qtemplater = function (strings, qname, typename, fields) {
var header = strings[0] + qname + strings[1];
var typespec = typename + strings[2];
var closer = strings[3];
fields = _.map(fields, field_descender);
var fieldspec = fields.join(',');
return header + typespec + fieldspec + closer;
}
var mtemplater = function (strings, mutname, params, mutname2, args, thefields) {
var header = strings[0] + mutname + strings[1] + params + strings[2];
var mutor = mutname + strings[3] + args + strings[4];
var fieldspec = (_.map(thefields, field_descender)).join(',');
var closer = strings[5];
return header + mutor + fieldspec + closer;
}
//Iterate over the types to create queries.
_.each(typespec.types, function (spec, typename, thetypes) {
returns[typename] = {}
var typefields = get_type_fields(typename);
returns[typename].read = {
template: function(fields) {
return qtemplater`query ${typename} ($filter: String, $ordering: [String], $limit: Int, $offset: Int) {
${typename} (filter: $filter, ordering: $ordering, limit: $limit, offset: $offset) {
${fields}
}
}`;
},
fields: _.cloneDeep(typefields),
all_allowed: _.cloneDeep(typefields)
}
});
//Iterate over mutations to create mutation queries.
_.each(typespec.mutations, function (spec, mutname, themuts) {
//Are we creating, updating, or deleting? Also need the
//base type.
if (_.startsWith(mutname, 'create_')) {
var typename = _.replace(mutname, 'create_', '')
var prop = 'create';
} else if (_.startsWith(mutname, 'update_')) {
var typename = _.replace(mutname, 'update_', '');
var prop = 'update';
} else {
var typename = _.replace(mutname, 'delete_', '');
var prop = 'delete';
}
var inputtype = typename + 'Input';
var idcol = '';
//What is our ID column name? It's going to vary by type.
_.forOwn(typespec.types[typename], function (type, name, thespec) {
if (_.trim(type, '!') == 'ID') {
idcol = name;
return false;
}
});
//What parameter declarations and args should we use? And what
//return value?
if (prop == 'create') {
var params = [`$input:${inputtype}!`];
var args = ['input:$input'];
var thefields = get_type_fields(typename);
} else if (prop == 'update') {
var params = [`$input:${inputtype}!`, '$id:ID!'];
var args = ['input:$input', `${idcol}:$id`];
var thefields = get_type_fields(typename);
} else {
var params = ['$id:ID!'];
var args = [`${idcol}:$id`];
var thefields = [`${idcol}`];
}
var mobj = {
template: function(fields) {
return mtemplater`mutation ${mutname} (${params}) {
${mutname} (${args}) {
${fields}
}
}`;
},
fields: thefields
}
if (returns[typename] != undefined) {
returns[typename][prop] = mobj;
}
});
return returns;
},
/**
* @description Generates Backbone model and collection classes
* using the types in TYPESPEC and the GraphQL queries in QUERIES.
* Returns an object with the model and collection classes, with
* the keys being the class names. The model classes will be
* named for the type; collection classes will have the type name
* with "_collection" appended.
*
* NOTE: This function is called automatically by {@link
* pxdb.engine#init|engine.init()}.
*
* @param {object} engine - PXDB engine instance.
* @param {object} typespec - Type specification from {@link
* extract_types_from_schema}
* @param {object} queries - GraphQL query templates from {@link
* generate_gql_queries}
* @param {boolean} rpb - If true, this is for the roles_pbac
* schema; false otherwise.
* @return {object} Backbone classes object.
*/
create_backbone_classes: function (engine, typespec, queries, rpb=false) {
var classes = {}
_.each(typespec.types, function (spec, typename, obj) {
var idcol = '';
_.each(spec, function (type, name, s) {
if (_.trim(type,'!') == 'ID') {
idcol = name;
}
});
//Sets the "selection" for the GraphQL query, the fields
//that will be requested.
var selectionfn = function (newselect) {
var thismodel = this;
if (newselect != undefined) {
var selectable = this.constructor.prototype._selectable;
var seenidcol = false;
_.each(newselect, function (f, i) {
if (typeof(f) == 'string' &&
_.indexOf(selectable, f) == -1) {
throw new Error(thismodel._engine.errorstrings.badattr(f));
} else if (typeof(f) == 'object' &&
_.find(selectable,
function(v,i,c) {
var vkey = _.keys(v)[0];
var fkey = _.keys(f)[0];
return (typeof(v) == 'object' &&
vkey == fkey);
}) == undefined) {
throw new Error(thismodel._engine.errorstrings.badsubselect(_.keys(f)[0]));
}
if (typeof(f) == 'string' && f == idcol) {
seenidcol = true;
}
});
//We have to have the ID field.
if (!seenidcol) {
throw new Error(this._engine.errorstrings.missingrequired(idcol));
}
this._selection = newselect;
if (this instanceof Backbone.Collection && this.inheritSelection) {
this.each(m => m.selection(_.cloneDeep(this._selection)));
}
}
return _.cloneDeep(this._selection);
}
/**
* @class PXDB_MODEL
* @extends Backbone.Model
* @classdesc A PXDB_MODEL class represents a single model
* of the corresponding GraphQL type. This is equivalent
* to a single database row.
*
* There is no class named PXDB_MODEL; this is a
* 'metaclass', a form of class that will be generated for
* each of the schema types in the GraphQL schema. The
* classes will be stored on the {@link
* pxdb.engine#classes|classes property} of the {@link
* pxdb.engine|engine} instance, with names specified as
* documented at {@link
* pxdb.create_backbone_classes|pxdb.create_backbone_classes()}.
*
* Except where specified, PXDB_MODEL classes work identically to standard
* {@link http://backbonejs.org/#Model|Backbone Models}.
*
* In this documentation, wherever PXDB_MODEL is stated,
* replace with the actual class name for the object type.
*/
var themodel = Backbone.Model.extend({
_rpb: rpb,
/**
* @memberof PXDB_MODEL
* @instance
* @description The {@link
* http://backbonejs.org/#Model-urlRoot|Backbone
* urlRoot property} is not used by PXDB_MODEL models.
* It is provided here as a reference to the tenant
* query URL, solely to prevent calls to the model's
* {@link http://backbonejs.org/#Model-url|url} method
* from signalling an error.
*/
urlRoot: engine.urls.tenant.query,
/**
* @method
* @memberof PXDB_MODEL
* @instance
* @description Returns the {@link PXDB_MODEL#urlRoot}.
* For GraphQL objects, all models have the same query
* root, so this method is not meaningful.
*/
url: function () {
return this.urlRoot;
},
/**
* @method
* @memberof PXDB_MODEL
* @description This is the overriden sync method,
* which calls {@link
* pxdb.backbone_sync|pxdb.backbone_sync()} to perform
* GraphQL queries and mutations. It should not be
* called directly.
*/
sync: pxdb.backbone_sync,
parse: function (resp, options) {
paths = pxdb.getPaths(resp);
for (p of paths) {
if (_.includes(p, 'thefiles') &&
_.includes(p, 'additional')) {
_.set(resp, p, JSON.parse(_.get(resp, p)));
}
}
return resp;
},
save: function (key, val, options) {
// Handle both `"key", value` and `{key: value}` -style arguments.
var attrs;
if (key == null || typeof key === 'object') {
attrs = key;
options = val;
} else {
(attrs = {})[key] = val;
}
options = _.extend({validate: true, parse: true}, options);
var wait = options.wait;
// If we're not waiting and attributes exist, save acts as
// `set(attr).save(null, opts)` with validation. Otherwise, check if
// the model will be valid when the attributes, if any, are set.
if (attrs && !wait) {
if (!this.set(attrs, options)) return false;
} else if (!this._validate(attrs, options)) {
return false;
}
// After a successful server-side save, the client is (optionally)
// updated with the server-side state.
var model = this;
var success = options.success;
var attributes = this.attributes;
options.success = function(resp) {
//Set model "newness" to false, clear allchanged.
model._isnew = false;
// Ensure attributes are restored during synchronous saves.
model.attributes = attributes;
var serverAttrs = options.parse ? model.parse(resp, options) : resp;
if (wait) serverAttrs = _.extend({}, attrs, serverAttrs);
if (serverAttrs && !model.set(serverAttrs, options)) return false;
model._allchanged = {};
if (success) success.call(options.context, model, resp, options);
model.trigger('sync', model, resp, options);
};
pxdb._wrapError(this, options);
// Set temporary attributes if `{wait: true}` to properly find new ids.
if (attrs && wait) this.attributes = _.extend({}, attributes, attrs);
var method = this.isNew() ? 'create' : (options.patch ? 'patch' : 'update');
if (method === 'patch' && !options.attrs) options.attrs = attrs;
var xhr = this.sync(method, this, options);
// Restore attributes.
this.attributes = attributes;
return xhr;
},
fetch: function (options) {
options = _.extend({parse: true}, options);
var model = this;
var success = options.success;
options.success = function(resp) {
if (resp != undefined) {
//Set model "newness" to false, clear allchanged.
model._isnew = false;
var serverAttrs = options.parse ? model.parse(resp, options) : resp;
if (!model.set(serverAttrs, options)) return false;
model._allchanged = {};
}
if (success) success.call(options.context, model, resp, options);
model.trigger('sync', model, resp, options);
};
pxdb._wrapError(this, options);
return this.sync('read', this, options);
},
//Set this to false in sync.
_isnew: true,
/**
* @memberof PXDB_MODEL
* @description Overridden {@link
* http://backbonejs.org/#Model-idAttribute|Backbone
* isNew()} method. This detects when a model is
* newly created, versus one that is persisted. It
* relies on the sync() semantics.
*/
isNew: function () {
return this._isnew;
},
_validattrs: _.clone(spec),
_inputattrs: _.clone(typespec.inputs[typename + 'Input']),
_allchanged: {},
/**
* @method
* @memberof PXDB_MODEL
* @instance
* @description This validation method automatically
* prevents invalid mutations. It disallows setting
* attributes that are not defind for the GraphQL
* schema type, setting attributes to the wrong
* underlying scalar type, and setting attributes that
* are only defined for the query type, but not the
* input type.
*
* Standard Backbone validation semantics, as
* described at {@link
* http://backbonejs.org/#Model-validate}.
*/
validate: function (attrs, options) {
var thismodel = this;
var proto = thismodel.constructor.prototype;
var errors = [];
//Make a pass over the attributes to ensure only
//valid attributes are set.
_.each(attrs, function (v,k,c) {
//Check attributes against type fields.
if (!_.has(proto._validattrs, k) &&
!_.has(proto._inputattrs, k)) {
errors.push(thismodel._engine.errorstrings.badattr(k));
}
//If the value is null, we need to check if this is a
//nullable field.
if (v == null) {
if (proto._validattrs[k].indexOf('!') != -1) {
errors.push(thismodel._engine.errorstrings.notnull(k));
}
} else { //Not null, so we check the type.
//What type is this field?
var thetype = (_.isArray(proto._validattrs[k]) ? 'Array' : _.trim(proto._validattrs[k], '!'));
//Input fields can have different types
//than the corresponding query fields.
//For non-input fields, this will be a
//no-op.
var inputtype = ((_.has(proto._inputattrs, k)) ? _.trim(proto._inputattrs[k], '!') : thetype);
//Grab a validator if one is defined;
//otherwise, we punt.
var validator = _.get(thismodel._engine.type_validations, thetype, val => true);
//Grab defined validator for the input type. It might be the same as the regular validator, or it might not.
var inputvalidator = _.get(thismodel._engine.type_validations, inputtype, val => true);
//If either validator returns true, we're good.
var correct = (validator(v) || inputvalidator(v));
if (!correct) {
errors.push(thismodel._engine.errorstrings.wrongtype(k, thetype));
}
}
});
//Check if any attrbutes are not defined that need to be.
_.each(proto._inputattrs, function (v,k,c) {
if (!_.has(attrs, k) && _.endsWith(v, '!')) {
errors.push(thismodel._engine.errorstrings.missingrequired(k));
}
});
if (errors.length > 0) {
return errors;
}
},
_typename: typename,
_selectable: _.cloneDeep(queries[typename].read.all_allowed),
/**
* @memberof PXDB_MODEL
* @description Standard {@link
* http://backbonejs.org/#Model-idAttribute|Backbone
* idAttribute} property, automatically set to the ID
* key of the GraphQL type.
*/
idAttribute: idcol,
_graphql: queries[typename],
_engine: engine,
/**
* @method
* @memberof PXDB_MODEL
* @instance
* @description Sets and/or returns the 'selection',
* that is, what fields are queried and returned on
* the model. This is checked against the schema, so
* only valid attributes can be queried.
* Additionally, the ID attribute is required.
*
* The selection must be formatted as follows: an
* array of either strings, for scalar fields, or
* objects, for subselection of linked types: the key
* is the linked type name, value is an array of
* fields to select on that type.
*
* For instance, consider a type "Person" that has
* fields "personid", "firstname", "lastname", and a
* linked type "EmailAddress" with fields "personid"
* and "email" on it; to select the personid,
* firstname, lastname, and EmailAddress_collection on
* the Person type, with the email on the EmailAddress
* linked type, the selection would be formatted like
* so:
* ```javascript
* ['personid', 'firstname', 'lastname', {EmailAddress_collection: ['email']}].
*```
*
* NB: This method sets the selection on a specific
* model object; the model's collection may have a
* different selection set, and is unaffected by
* calling selection() on the model object.
*
* @param {array} newselect - New selection to assign.
* If not provided, the existing selection is
* returned.
* @returns {array} Selection currently set on this model.
*/
selection: selectionfn,
/**
* @method
* @memberof PXDB_MODEL
* @instance
* @description Returns the filter expression used to
* retrieve this model. For a model, this is always a
* simple ID lookup; filter expressions cannot be set
* on models.
*/
expression: function () {
return `${idcol} = ${this.id}`;
},
initialize: function(atts, opts) {
if (this.collection != undefined && this.collection.inheritSelection) {
this.selection(this.collection.selection());
} else {
var sel = []
var thismodel = this;
var typename = thismodel.constructor.prototype._typename;
var thisengine = thismodel.constructor.prototype._engine;
var avail = _.cloneDeep(queries[typename].read.fields);
_.each(avail, function (elt, i) {
var colname = (typeof elt == 'string') ? elt : _.keys(elt)[0];
if (thismodel._rpb) {
var thetype = thisengine._rpbschema.types[typename][colname];
} else {
var thetype = thisengine._schema.types[typename][colname];
}
if (thetype instanceof Array) {
thetype = thetype[0];
}
if (pxdb.is_builtin_scalar(thetype) ||
pxdb.is_special_type(thetype)) {
sel.push(elt);
}
});
this.selection(sel);
}
this.listenTo(this, 'change', function (m, r, o) {
_.each(m.changedAttributes(), function (v, k) {
m._allchanged[k] = v;
});
});
}
});
/**
* @class PXDB_COLLECTION
* @extends Backbone.Collection
* @classdesc A PXDB_COLLECTION class represents a
* collection of the corresponding PXDB_MODEL objects.
* This is equivalent to the result set of a database
* query.
*
* There is no class named PXDB_COLLECTION; as for
* PXDB_MODEL, this is a 'metaclass', a form of class that
* will be generated for each of the schema types in the
* GraphQL schema. The classes will be stored on the
* {@link pxdb.engine#classes|classes property} of the
* {@link pxdb.engine|engine} instance, with names
* specified as documented at {@link
* pxdb.create_backbone_classes|pxdb.create_backbone_classes()}.
*
* Except where specified, PXDB_COLLECTION classes work
* identically to standard {@link
* http://backbonejs.org/#Collection|Backbone
* Collections}.
*
* In this documentation, wherever PXDB_COLLECTION is
* stated, replace with the actual class name for the
* object type.
*/
var thecollection = Backbone.Collection.extend({
/**
* @method
* @memberof PXDB_COLLECTION
* @description This is the overriden sync method,
* which calls {@link
* pxdb.backbone_sync|pxdb.backbone_sync()} to perform
* GraphQL queries and mutations. It should not be
* called directly.
*/
sync: pxdb.backbone_sync,
fetch: function(options) {
options = _.extend({parse: true}, options);
var success = options.success;
var collection = this;
options.success = function(resp) {
var method = options.reset ? 'reset' : 'set';
collection[method](resp, options);
collection.each(function (m) { m._isnew = false });
if (success) success.call(options.context, collection, resp, options);
collection.trigger('sync', collection, resp, options);
};
pxdb._wrapError(this, options);
return this.sync('read', this, options);
},
_rpb: rpb,
_expression: null,
_selectable: _.cloneDeep(queries[typename].read.all_allowed),
_orderby: [`${idcol}`],
_limit: null,
_offset: null,
/**
* @name url
* @memberof PXDB_COLLECTION
* @description PXDB_COLLECTION classes do not use or
* need the {@link
* http://backbonejs.org/#Collection-url|url property}
* to function. Its absence is documented here.
*/
/**
* @memberof PXDB_COLLECTION
* @description Boolean flag. If true, the selection
* set on this collection by the {@link
* PXDB_COLLECTION#selection|selection()} method will
* automatically be set on all models within this
* collection, when those models are added.
*
* This will occur regardless of whether or not the
* models were created by {@link
* https://backbonejs.org/#Collection-create|create()},
* created independently via constructors and then
* added via {@link
* https://backbonejs.org/#Collection-add|add()}, or
* fetched from the server via {@link
* https://backbonejs.org/#Collection-fetch|fetch()}.
*
* The models can subsequently have their selections
* changed by calling {@link
* PXDB_MODEL#selection|selection()} on them, but if
* the collection's selection is updated via the
* collection's {@link
* PXDB_COLLECTION#selection|selection()} method, the
* models will have their selections reset if this
* flag is true.
*/
inheritSelection: false,
/**
* @method
* @memberof PXDB_COLLECTION
* @instance
* @description Sets and/or returns the 'selection',
* that is, what fields are queried and returned on
* the model. This is checked against the schema, so
* only valid attributes can be queried.
*
* The selection must be formatted as follows: an
* array of either strings, for scalar fields, or
* objects, for subselection of linked types: the key
* is the linked type name, value is an array of
* fields to select on that type.
*
* For instance, consider a type "Person" that has
* fields "personid", "firstname", "lastname", and a
* linked type "EmailAddress" with fields "personid"
* and "email" on it; to select the personid,
* firstname, lastname, and EmailAddress_collection on
* the Person type, with the email on the EmailAddress
* linked type, the selection would be formatted like
* so:
*
* ```javascript
* ['personid', 'firstname', 'lastname', {EmailAddress_collection: ['email']}].
* ```
*
* NB: Setting the selection on a collection has no
* effect on the selection of the contained models,
* unless the collection attribute "inheritSelection"
* is set to true, in which case new models will have
* their selections set automatically to that of the
* containing collection.
*
* In any case, selections can be set independently for
* a collection and for individual models (the latter
* by calling "selection()" on a given model); this is
* useful for fetching a set of records with basic
* attributes, and then "drilling down" to get more
* information on specific records.
*
* @param {array} newselect - New selection to assign.
* If not passed in, the existing selection is
* returned.
* @returns {array} Selection currently set on this model.
*/
selection: selectionfn,
/**
* @method
* @memberof PXDB_COLLECTION
* @instance
* @description Sets and/or returns the filter
* expression for this collection. The filter
* expression is used in PXDB GraphQL queries to
* determine what rows are returned. This is a
* logical expression with a syntax similar to a SQL
* where clause, but with some differences; this
* syntax is documented at (TBD).
*
* @param {string} newexp - New filter expression to
* set. If not provided, the existing filter
* expression is returned.
* @returns {string} Filter expression currently set
* for this collection.
*/
expression: function (newexp) {
if (newexp != undefined) {
this._expression = newexp;
}
return this._expression;
},
/**
* @method
* @memberof PXDB_COLLECTION
* @instance
* @description Sets and/or returns the server-side
* ordering parameters for this collection.
* Server-side ordering is specified using an array of
* field names as strings, with the optional keywords'asc' and 'desc'
* appended to them, for ascending and descending
* order, respectively; if used, a space is required
* between the field name and the asc or desc keyword.
*
* NB: Client-side sorting using Backbone's standard
* {@link
* http://backbonejs.org/#Collection-comparator|comparator}
* facility is unaffected by this setting.
*
* @param {array} neworder - New ordering spec. If
* not provided, existing ordering is returned.
* @return {array} Ordering currently set for this
* collection.
*/
ordering: function (neworder) {
if (_.isArray(neworder)) {
this._orderby = neworder;
}
return this._orderby;
},
/**
* @method
* @memberof PXDB_COLLECTION
* @instance
* @description Sets and/or returns the server-side
* paging settings for this collection. Server-side
* paging is specified using a page size, with this
* method, and a page number, with the method {@link
* PXDB_COLLECTION#setPage|setPage()}. This instructs
* the server to return a maximum number of records
* equal to the page size, offset by the page number.
*
* Note that it is possible for a paged query to
* return fewer records than the page size; this
* indicates that the "last page" has been reached.
* Setting the page number past the "last page" will
* result in no records being returned.
*
* Server-side ordering should be used with this
* facility to ensure consistent results.
*
* Page size must be a positive integer.
*
* Calling this method will automatically reset the
* page number to 1.
*
* @param {integer} pagesize - Page size. If not
* provided, the current page size and number are
* returned.
* @return {array} Page size and number currently set
* for this collection, as an array: page size is the
* first element, page number the second.
*/
paging: function (pagesize) {
if ((_.isInteger(pagesize) && pagesize > 0)){
this._limit = pagesize;
this._offset = 0;
}
if (_.isInteger(this._limit)) {
return [this._limit, (this._offset / this._limit) + 1]
}
},
/**
* @method
* @memberof PXDB_COLLECTION
* @instance
* @description Unsets the page size and number for
* this collection, disabling server-side paging until
* and unless {@link PXDB_COLLECTION#paging|paging()}
* is called again.
*/
clearPaging: function () {
this._limit = null;
this._offset = null;
},
/**
* @method
* @memberof PXDB_COLLECTION
* @instance
* @description Sets the page number when using
* server-side paging. Page number must be a positive
* integer. Returns the page size and number as for
* {@link PXDB_COLLECTION#paging|paging()}.
*
* @param {integer} pagenum - Page number.
* @returns {array} See {@link
* PXDB_COLLECTION#paging|paging()} return value.
*/
setPage: function (pagenum) {
if (!_.isInteger(pagenum) || pagenum <= 0) {
throw Error("Cannot set page below 1.");
}
if (!_.isInteger(this._limit)) {
throw Error("No page size set. Call paging() first.");
}
this._offset = (pagenum - 1) * this._limit;
return this.paging();
},
model: themodel,
_graphql: queries[typename],
_engine: engine,
initialize: function (models, options) {
this.selection(_.cloneDeep(queries[typename].read.fields));
}
});
classes[typename] = themodel;
classes[typename + '_collection'] = thecollection;
});
return classes;
},
/**
* @private
* @description Backbone.sync override. This uses GraphQL queries
* to fetch and persist. Does not require a url property on the
* model or collection; instead, the required property is
* '_graphql', generated by pxdb.generate_gql_queries.
*
* This function is called by the Backbone classes, and must not
* be called directly.
*
* @param {string} method - CRUD method being called.
* @param {object} model - Model or collection for the method.
* @param {object} options - Options for processing. Success and
* error handlers can be passed in here.
*/
backbone_sync: function (method, model, options) {
//We don't need to validate after syncing.
options['validate'] = false;
gql = model._engine.graphql_endpoint(model._rpb);
if (method in model._graphql) {
//Enable selectors.
if (method == 'read') {
var sel = model.selection();
} else if (method == 'delete') {
var sel = [model.idAttribute];
} else {
var sel = (model.collection != undefined) ? model.collection.selection() : model.selection();
}
var fn = gql(model._graphql[method].template(sel));
} else {
throw Error(`Method ${method} not supported for this model.`);
}
var opts = {};
if (options.progress && typeof options.progress == 'function') {
opts.progress = options.progress;
}
switch (method) {
case 'read':
var args = {filter: model.expression()}
if (model instanceof Backbone.Collection) {
args.ordering = model._orderby;
args.limit = model._limit;
args.offset = model._offset;
}
break;
case 'create':
case 'update':
var input = {};
var fileinputs = [];
var theclasses = ((model._rpb) ? model.constructor.prototype._engine.rpb_classes :
model.constructor.prototype._engine.classes);
_.each(model.constructor.prototype._inputattrs, function (v,k,c) {
if ((method == 'create' && model.has(k)) ||
(_.includes(_.keys(model._allchanged), k) || _.includes(v, '!'))) {
if (_.includes(_.keys(theclasses), k)) {
var relclass = theclasses[k].prototype.model;
var idatt = relclass.prototype.idAttribute;
var inval = model.get(k);
input[k] = [];
_.each(inval, function (iv, ik, ic) {
if (_.isNumber(iv) || _.isString(iv)) {
input[k].push(iv);
} else if (iv instanceof relclass) {
input[k].push(iv.get(idatt));
} else {
input[k].push(iv[idatt]);
}
});
} else if (_.isArray(v) && v[0] == 'FileUpload') {
var thefiles = model.get(k);
if ((thefiles instanceof File)) {
thefiles = [thefiles]
}
input[k] = [];
_.each(thefiles, function(file, idx) {
if (file.hasOwnProperty('storagekey')) {
var fileupload = {
filename: file.filename,
mimetype: file.mimetype,
size: file.size,
storagekey: file.storagekey,
filenum: file.filenum
}
} else {
var fileupload = {
filename: file.name,
mimetype: file.type,
size: file.size
}
}
input[k].push(fileupload);
fileinputs.push(file);
});
} else {
input[k] = model.get(k);
}
}
});
var args = {input: input};
if (method == 'update') {
args['id'] = model.id;
}
break;
case 'delete':
var args = {id: model.id}
break;
}
return resp = fn(args, opts)
.then(function (data) {
//Querying
if (method == 'read') {
//Fetching for a collection.
if (model instanceof Backbone.Collection) {
var rows = _.values(data)[0];
} else { //Operating on an individual model, not a
//collection.
var rows = _.values(data)[0][0];
}
} else { //Mutating
var rows = data[method + '_' + model._typename];
}
//Any passed-in success handlers will receive the new data.
options.success(rows);
if (fileinputs != undefined && fileinputs.length > 0) {
var modelrefresh = function () {
setTimeout(function () {
model.fetch();
}, 10000);
}
_.each(fileinputs, function (thefile, i) {
var finderfn = function (ef) {
return (ef.filename == thefile.name &&
ef.mimetype == thefile.type &&
ef.size == thefile.size);
}
var modelfilelist = model.get('thefiles');
var fileobj = _.find(modelfilelist, finderfn);
if (fileobj) {
var fileopts = {};
if (options.fileprogress &&
typeof options.fileprogress == 'function') {
fileopts.fileprogress = options.fileprogress;
}
if (i == 0) {
fileopts.filedone = modelrefresh;
}
pxdb.upload_file_to_url(thefile, fileobj.url,
fileopts);
}
});
}
})
.catch(function (err) {
//Call any passed-in error handlers with the error message.
options.error(err);
});
},
_wrapError: function(model, options) {
var error = options.error;
options.error = function(resp) {
if (error) error.call(options.context, model, resp, options);
model.trigger('error', model, resp, options);
}
},
/**
* @private
* @description Handles signed URL file upload to backend storage
* services. This is done using XHR directly for most cases, but
* Azure Blob Storage, Amazon S3, and Rackspace Cloud Files
* require special handling; that is done in this function.
*
* This function could be called directly, but is automatically
* called by {@link pxdb.backbone_sync|pxdb.backbone_sync()}, so
* this is not likely to be necessary.
*
* @param {File} file - The file object to upload.
* @param {string} url - Signed URL to upload to.
* @param {object} [opts] - Object of options to be set.
* @param {string} [opts.service] - Name of the storage service.
* May be any supported by PXDB.
* @param {function} [opts.fileprogress] - Callback function for
* file upload progress. Will be called by the XHR or library
* functions; uses the XHR.upload.progress event syntax.
* @param {function} [opts.filedone] - Callback to run when upload
* is complete or errors out.
*/
upload_file_to_url: function(file, url, opts={}) {
//Change this later for more general use, but right now we're
//just using Azure.
var service = ((_.has(opts, 'service')) ?
opts['service'].toUpperCase() : 'AZURE');
var progressfn = ((_.has(opts, 'fileprogress')) ?
opts.fileprogress : e => e);
var completefn = ((_.has(opts, 'filedone')) ?
opts.filedone : e => e);
var errorfn = ((_.has(opts, 'file_error')) ?
opts.file_error : e => e);
switch (service) {
case 'AZURE':
var urlparts = /(https:\/\/.*?)\/(.*?)\/(.*?)\?(.*)/.exec(url);
var storageurl = urlparts[1];
var container = urlparts[2];
var blobname = urlparts[3];
var token = urlparts[4];
var svc = AzureStorage.Blob.createBlobServiceWithSas(
storageurl, token);
bs = parseInt(file.size / 100);
svc.singleBlobPutThresholdInBytes = bs;
var progress = function () {
progressfn({name: monitor.name,
loaded: monitor.completeSize,
total: monitor.totalSize});
}
var finisher = function (error, result, response) {
progress();
clearInterval(progupdate);
if (error) {
errorfn(error, response);
} else {
completefn(response);
}
}
var monitor = svc.createBlockBlobFromBrowserFile(
container, blobname, file,
{blockSize: bs}, finisher);
var progupdate = setInterval(progress, 200);
break;
case 'CLOUDFILES':
//Rackspace cloud files upload. TBI.
break;
case 'S3':
//AWS S3 upload. TBI.
break;
case 'GOOGLESTORAGE':
case 'LOCAL':
case 'MINIO':
default:
//Normal XHR upload. TBI.
break;
}
},
//Utility function
getPaths: function (object) {
return object && typeof object === 'object' && Object.keys(object).reduce(
(p, k) =>
(pxdb.getPaths(object[k]) || [[]]).reduce(
(r, a) =>
[...r, [k, ...a]], p), []);
}
}
//Node.js compatibility.
if (typeof module == 'object' && module.exports) {
module.exports = pxdb;
}