Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

alloy

Package Overview
Dependencies
Maintainers
4
Versions
269
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

alloy - npm Package Compare versions

Comparing version 0.1.21 to 0.1.22

Alloy/modules/ti.physicalSizeCategory-android-1.0.zip

405

Alloy/commands/compile/compilerUtils.js

@@ -20,3 +20,2 @@ var U = require('../../utils'),

JSON_NULL = JSON.parse('null'),
stylePrefix = '\t\t',
compilerConfig;

@@ -28,3 +27,2 @@

var STYLE_ALLOY_TYPE = '__ALLOY_TYPE__',
STYLE_CONST_PREFIX = '__ALLOY_CONST__--',
STYLE_EXPR_PREFIX = '__ALLOY_EXPR__--',

@@ -298,76 +296,17 @@ PLATFORMS = ['ios', 'android', 'mobileweb'],

exports.loadStyle = function(p) {
if (path.existsSync(p)) {
var f = fs.readFileSync(p, 'utf8');
// skip empty files
if (/^\s*$/.test(f)) {
return {};
exports.loadStyle = function(tssFile) {
if (path.existsSync(tssFile)) {
var contents = fs.readFileSync(tssFile, 'utf8');
if (!/^\s*$/.test(contents)) {
var code = processTssFile(contents);
var json = JSON.parse(code);
optimizer.optimizeStyle(json);
return json;
}
// Handle "call" ASTs, where we look for expr() syntax
function do_call() {
if (this[1][1] === 'expr') {
var code = pro.gen_code(this[2][0]);
var new_ast = ['string', STYLE_CONST_PREFIX + code];
return new_ast;
}
};
// Recursively assemble the full name of a dot-notation variable
function processDot(dot,name) {
switch(dot[0]) {
case 'dot':
return processDot(dot[1], '.' + (dot[2] || '') + name);
break;
case 'name':
var pre = dot[1];
if (pre === 'Ti' || pre === 'Titanium' || pre === 'Alloy') {
return pre + name;
} else {
return null;
}
break;
}
}
// Handle all AST "dot"s, looking for Titanium constants
function do_dot() {
var name = processDot(this,'');
if (name === null) {
return null;
} else {
return ['string', STYLE_CONST_PREFIX + name];
}
}
// Generate AST and add the handlers for "call" and "dot" to the AST walker
var ast = jsp.parse('module.exports = ' + f);
//console.log(require('util').inspect(ast, false, null));
var walker = pro.ast_walker();
var new_ast = walker.with_walkers({
"call": do_call,
"dot": do_dot
}, function(){
return walker.walk(ast);
});
// generate code based on the new AST. Make sure to keep keys quoted so the
// JSON parses without exception. The wild [1][0][1][3] array is how we grab
// just the style object from the AST, leaving behind the appended "module.exports = "
var code = pro.gen_code(new_ast[1][0][1][3], {
beautify: true,
quote_keys: true,
keep_zeroes: true,
double_quotes: true
});
try {
return JSON.parse(code);
} catch(E) {
console.error(code);
U.die("Error parsing style at "+p.yellow+". Error was: "+String(E).red);
}
}
return {};
};
exports.loadAndSortStyle = function(tssFile) {
return sortStyles(exports.loadStyle(tssFile));
}

@@ -390,55 +329,133 @@

exports.addStyleById = function(styles, id, key, value) {
var idStr = '#' + id;
if (!styles[idStr]) {
styles[idStr] = {};
}
styles[idStr][key] = value;
return styles;
}
exports.generateStyleParams = function(styles,classes,id,apiName,extraStyle) {
var platform = compilerConfig && compilerConfig.alloyConfig && compilerConfig.alloyConfig.platform ? compilerConfig.alloyConfig.platform : undefined;
var regex = new RegExp('^' + STYLE_EXPR_PREFIX + '(.+)'),
styleCollection = [],
lastObj = {};
exports.generateStyleParams = function(styles,classes,id,className,extraStyle) {
var s = {};
extraStyle = extraStyle || {};
_.each(styles, function(style) {
if ((style.isId && style.key === id) ||
(style.isClass && _.contains(classes, style.key)) ||
(style.isApi && style.key === apiName)) {
// manage potential runtime conditions for the style
var conditionals = {
platform: [],
size: ''
};
// Start with any base View styles
mergeStyles(styles['View'],s);
if (style.queries) {
// handle platform device query
// - Make compile time comparison if possible
// - Add runtime conditional if platform is not known
var q = style.queries;
if (q.platform) {
if (platform) {
if (!_.contains(q.platform,platform)) {
return;
}
} else {
_.each(q.platform, function(p) {
conditionals.platform.push(CONDITION_MAP[p]['runtime']);
});
}
}
// Merge in styles based on UI component type
mergeStyles(styles[U.properCase(className)],s);
// handle size device query
if (q.size === 'tablet') {
conditionals.size = 'Alloy.isTablet';
} else if (q.size === 'handheld') {
conditionals.size = 'Alloy.isHandheld';
}
// Merge in styles based on associated classes
for (var c=0;c<classes.length;c++) {
var clsn = classes[c];
mergeStyles(styles['.'+clsn],s);
}
// assemble runtime query
var pcond = conditionals.platform.length > 0 ? '(' + conditionals.platform.join(' || ') + ')' : '';
var joinString = pcond && conditionals.size ? ' && ' : '';
var conditional = pcond + joinString + conditionals.size;
// Merge in styles based on the component's ID
mergeStyles(styles['#'+id],s);
if (id) s['id'] = id;
var str = [];
// push styles if we need to insert a conditional
if (conditional) {
if (lastObj) {
styleCollection.push({style:lastObj});
styleCollection.push({style:style.style, condition:conditional});
lastObj = {};
}
} else {
_.extend(lastObj,style.style);
}
} else {
_.extend(lastObj, style.style);
}
}
});
// Merge in any extra specified styles
mergeStyles(extraStyle,s);
// add in any final styles
_.extend(lastObj, extraStyle || {});
if (!_.isEmpty(lastObj)) { styleCollection.push({style:lastObj}); }
var regex = new RegExp('^' + STYLE_CONST_PREFIX + '(.+)');
for (var sn in s) {
var value = s[sn],
actualValue;
// console.log('--------' + id + ':' + classes + ':' + apiName + '-------------');
// console.log(require('util').inspect(styleCollection, false, null));
if (_.isString(value)) {
var matches = value.match(regex);
if (matches !== null) {
actualValue = matches[1]; // matched a constant
function processStyle(style) {
for (var sn in style) {
var value = style[sn],
actualValue;
if (_.isString(value)) {
var matches = value.match(regex);
if (matches !== null) {
code += sn + ':' + matches[1] + ','; // matched a constant or expr()
} else {
code += sn + ':"' + value + '",'; // just a string
}
} else if (_.isObject(value)) {
if (value[STYLE_ALLOY_TYPE] === 'var') {
code += sn + ':' + value.value + ','; // dynamic variable value
} else {
// recursively process objects
code += sn + ': {';
processStyle(value);
code += '},';
continue;
}
} else {
actualValue = '"' + value + '"'; // just a string
code += sn + ':' + JSON.stringify(value) + ','; // catch all, just stringify the value
}
} else if (_.isObject(value) && value[STYLE_ALLOY_TYPE] === 'var') {
actualValue = value.value; // dynamic variable value
}
}
// Let's assemble the fastest factory method object possible based on
// what we know about the style we just sorted and assembled
var code = '';
if (styleCollection.length === 0) {
// do nothing
} else if (styleCollection.length === 1) {
if (styleCollection[0].condition) {
// check the condition and return the object
code += styleCollection[0].condition + ' ? {' + processStyle(styleCollection[0].style) + '} : {}';
} else {
actualValue = JSON.stringify(value); // catch all, just stringify the value
// just return the object
code += '{';
processStyle(styleCollection[0].style);
code += '}';
}
str.push(stylePrefix + sn + ':' + actualValue);
} else if (styleCollection.length > 1) {
// construct self-executing function to merge styles based on runtime conditionals
code += '(function(){\n';
code += 'var o = {};\n';
for (var i = 0, l = styleCollection.length; i < l; i++) {
if (styleCollection[i].condition) {
code += 'if (' + styleCollection[i].condition + ') ';
}
code += '_.extend(o, {';
processStyle(styleCollection[i].style);
code += '});\n';
}
code += 'return o;\n'
code += '})()'
}
return str.join(',\n');
//console.log(code);
return code;
}

@@ -531,14 +548,154 @@

///////////////////////////////////////
var mergeStyles = function(from, to) {
if (from) {
for (var k in from) {
var v = from[k];
// for optimization, remove null or undefined values
if (v == JSON_NULL || typeof(v)==='undefined' || typeof(v)==='null') {
delete to[k];
} else {
to[k] = from[k];
function processTssFile(f) {
// Handle "call" ASTs, where we look for expr() syntax
function do_call() {
if (this[1][1] === 'expr') {
var code = pro.gen_code(this[2][0]);
var new_ast = ['string', STYLE_EXPR_PREFIX + code];
return new_ast;
}
};
// Recursively assemble the full name of a dot-notation variable
function processDot(dot,name) {
switch(dot[0]) {
case 'dot':
return processDot(dot[1], '.' + (dot[2] || '') + name);
break;
case 'name':
var pre = dot[1];
if (pre === 'Ti' || pre === 'Titanium' || pre === 'Alloy') {
return pre + name;
} else {
return null;
}
break;
}
}
// Handle all AST "dot"s, looking for Titanium constants
function do_dot() {
var name = processDot(this,'');
if (name === null) {
return null;
} else {
return ['string', STYLE_EXPR_PREFIX + name];
}
}
// Generate AST and add the handlers for "call" and "dot" to the AST walker
var ast = jsp.parse('module.exports = ' + f);
var walker = pro.ast_walker();
var new_ast = walker.with_walkers({
"call": do_call,
"dot": do_dot
}, function(){
return walker.walk(ast);
});
// generate code based on the new AST. Make sure to keep keys quoted so the
// JSON parses without exception. The wild [1][0][1][3] array is how we grab
// just the style object from the AST, leaving behind the appended "module.exports = "
return pro.gen_code(new_ast[1][0][1][3], {
beautify: true,
quote_keys: true,
keep_zeroes: true,
double_quotes: true
}) || '';
}
function sortStyles(componentStyle) {
var mergedStyle = {},
regex = /^\s*([\#\.]{0,1})([^\[]+)(?:\[([^\]]+)\])*\s*$/,
extraStyle = extraStyle || {},
sortedStyles = [],
ctr = 1,
VALUES = {
ID: 10000,
CLASS: 1000,
API: 100,
PLATFORM: 10,
SUM: 1,
ORDER: 0.001
};
// add global style to processing, if present
var styleList = [];
if (compilerConfig && _.isObject(compilerConfig.globalStyle) && !_.isEmpty(compilerConfig.globalStyle)) {
styleList.push(compilerConfig.globalStyle);
}
if (_.isObject(componentStyle) && !_.isEmpty(componentStyle)) {
styleList.push(componentStyle);
}
// Calculate priority:
_.each(styleList, function(style) {
for (var key in style) {
var obj = {};
var priority = ctr++ * VALUES.ORDER;
var match = key.match(regex);
if (match === null) {
U.die('Invalid style specifier "' + key + '"');
}
var newKey = match[2];
switch(match[1]) {
case '#':
obj.isId = true;
priority += VALUES.ID;
break;
case '.':
obj.isClass = true;
priority += VALUES.CLASS;
break;
default:
if (match[2]) {
obj.isApi = true;
priority += VALUES.API;
}
break;
}
if (match[3]) {
obj.queries = {};
_.each(match[3].split(/\s+/), function(query) {
var parts = query.split('=');
var q = U.trim(parts[0]);
var v = U.trim(parts[1]);
if (q === 'platform') {
priority += VALUES.PLATFORM + VALUES.SUM;
v = v.split(',');
} else {
priority += VALUES.SUM;
}
obj.queries[q] = v;
});
}
_.extend(obj, {
priority: priority,
key: newKey,
style: style[key]
});
sortedStyles.push(obj);
}
}
}
});
return _.sortBy(sortedStyles, 'priority');
}
// testing style priority
if (require.main === module) {
console.log(require('util').inspect(sortStyles({
"#myview": {},
"#myview[platform=ios]": {},
"#myview[size=tablet]": {},
".container[platform=android size=tablet]": {},
"View[platform=ios]": {},
"Label": {},
".container[platform=android size=handheld]": {},
".container": {}
}), false, null));
console.log('------------------------------');
}

@@ -81,2 +81,14 @@ var path = require('path'),

// TODO: remove this once the this is merged: https://github.com/appcelerator/titanium_mobile/pull/2610
// Make sure that ti.physicalSizeCategory is installed
if (!path.existsSync(path.join(outputPath,'ti.physicalSizeCategory-android-1.0.zip')) &&
!path.existsSync(path.join(outputPath,'modules','android','ti.physicalsizecategory','1.0','timodule.xml'))) {
wrench.copyDirSyncRecursive(path.join(alloyRoot,'modules'), outputPath, {preserve:true})
}
U.installModule(outputPath, {
id: 'ti.physicalSizeCategory',
platform: 'android',
version: '1.0'
});
// create components directory for view/controller components

@@ -87,2 +99,10 @@ U.copyAlloyDir(alloyRoot, 'lib', compileConfig.dir.resources);

// create the global style, if it exists
try {
compileConfig.globalStyle = CU.loadStyle(path.join(inputPath,CONST.DIR.STYLE,CONST.GLOBAL_STYLE));
} catch(e) {
logger.error(e.stack);
U.die('Error processing global style at "' + globalStylePath + '"');
}
// Process all models

@@ -95,8 +115,4 @@ var models = processModels();

_.each(viewCollection, function(collection) {
//_.each(fs.readdirSync(path.join(collection.dir,'views')), function(view) {
_.each(wrench.readdirSyncRecursive(path.join(collection.dir,CONST.DIR.VIEW)), function(view) {
if (viewRegex.test(view)) {
console.log(view);
// var basename = path.basename(view, '.'+CONST.FILE_EXT.VIEW);
// parseView(basename, collection.dir, basename, collection.manifest);
parseView(view, collection.dir, collection.manifest);

@@ -119,3 +135,3 @@ }

logger.error(code);
U.die(e);
U.die(e.stack);
}

@@ -178,3 +194,8 @@

// Load the style and update the state
state.styles = CU.loadStyle(files.STYLE);
try {
state.styles = CU.loadAndSortStyle(files.STYLE);
} catch (e) {
logger.error(e.stack);
U.die('Error processing style at "' + files.STYLE + '"');
}

@@ -181,0 +202,0 @@ // read and parse the view file

@@ -14,2 +14,3 @@ /**

var JSON_NULL = JSON.parse('null');
var EQUALITY_SIGNS = {

@@ -228,2 +229,13 @@ '==': 1,

exports.optimizeStyle = function(styleList) {
for (var style in styleList) {
for (var key in styleList[style]) {
var v = styleList[style][key];
if (v == JSON_NULL || typeof(v)==='undefined' || typeof(v)==='null') {
delete styleList[style][key];
}
}
}
}
function optimize(ast, defines, fn)

@@ -230,0 +242,0 @@ {

@@ -12,5 +12,5 @@ var CU = require('../compilerUtils');

// Generate runtime code
code += args.symbol + " = A$(" + args.ns + "." + createFunc + "({\n";
code += args.symbol + " = A$(" + args.ns + "." + createFunc + "(\n";
code += CU.generateStyleParams(state.styles, args.classes, args.id, node.nodeName, state.extraStyle) + '\n';
code += "}),'" + node.nodeName + "', " + (args.parent.symbol || 'null') + ");\n";
code += "),'" + node.nodeName + "', " + (args.parent.symbol || 'null') + ");\n";
if (args.parent.symbol) {

@@ -17,0 +17,0 @@ code += args.parent.symbol + ".add(" + args.symbol + ");\n";

@@ -12,3 +12,3 @@ var CU = require('../compilerUtils'),

if (nodeText) {
state.styles = CU.addStyleById(state.styles, args.id, 'title', nodeText);
state.extraStyle = CU.createVariableStyle('title', "'" + nodeText.replace(/'/g, "\\'") + "'");
}

@@ -15,0 +15,0 @@

@@ -12,3 +12,3 @@ var CU = require('../compilerUtils'),

if (nodeText) {
state.styles = CU.addStyleById(state.styles, args.id, 'text', nodeText);
state.extraStyle = CU.createVariableStyle('text', "'" + nodeText.replace(/'/g, "\\'") + "'");
}

@@ -15,0 +15,0 @@

@@ -1,7 +0,12 @@

var CONST = require('../../common/constants');
var basePath = '../../';
var path = require('path'),
fs = require('fs'),
wrench = require('wrench'),
alloyRoot = path.join(__dirname,'..','..'),
_ = require(basePath + 'lib/alloy/underscore')._,
U = require(basePath + 'utils'),
CONST = require(basePath + 'common/constants');
exports.pad = function(x)
{
if (x < 10)
{
function pad(x) {
if (x < 10) {
return '0' + x;

@@ -12,7 +17,31 @@ }

exports.generateMigrationFileName = function(t)
{
exports.generateMigrationFileName = function(t) {
var d = new Date;
var s = String(d.getUTCFullYear()) + String(exports.pad(d.getUTCMonth())) + String(exports.pad(d.getUTCDate())) + String(exports.pad(d.getUTCHours())) + String(exports.pad(d.getUTCMinutes())) + String(d.getUTCMilliseconds())
var s = String(d.getUTCFullYear()) + String(pad(d.getUTCMonth())) + String(pad(d.getUTCDate())) + String(pad(d.getUTCHours())) + String(pad(d.getUTCMinutes())) + String(d.getUTCMilliseconds())
return s + '_' + t + '.' + CONST.FILE_EXT.MIGRATION;
}
exports.generate = function(name, type, program, args) {
var ext = '.'+CONST.FILE_EXT[type];
var templatePath = path.join(alloyRoot,'template',type.toLowerCase()+ext);
var dir = path.join(program.outputPath,CONST.DIR[type],path.dirname(name));
var file = path.join(dir,path.basename(name,ext)+ext);
if (path.existsSync(file) && !program.force) {
U.die(" file already exists: " + file);
}
if (!path.existsSync(dir)) {
wrench.mkdirSyncRecursive(dir);
}
var templateContents = fs.readFileSync(templatePath,'utf8');
if (args.templateFunc) { templateContents = args.templateFunc(templateContents); }
var code = _.template(templateContents, args.template || {});
fs.writeFileSync(file, code);
return {
file: file,
dir: dir,
code: code
};
}

@@ -1,20 +0,8 @@

var path = require('path'),
fs = require('fs'),
U = require('../../../utils'),
_ = require("../../../lib/alloy/underscore")._,
CONST = require('../../../common/constants'),
var GU = require('../generateUtils'),
logger = require('../../../common/logger');
module.exports = function(name, args, program) {
var templatePath = path.join(__dirname,'..','..','..','template','controller.' + CONST.FILE_EXT.CONTROLLER);
var cn = path.join(program.outputPath,'controllers',name+'.'+CONST.FILE_EXT.CONTROLLER);
if (path.existsSync(cn) && !program.force) {
U.die("Controller file already exists: " + cn);
}
var code = _.template(fs.readFileSync(templatePath,'utf8'), {});
fs.writeFileSync(cn, code);
logger.info('Generated controller named '+name);
var type = 'CONTROLLER';
var info = GU.generate(name, type, program);
logger.info('Generated ' + type.toLowerCase() + ' named ' + name);
}
var path = require('path'),
fs = require('fs'),
U = require('../../../utils'),
GU = require('../generateUtils'),
_ = require("../../../lib/alloy/underscore")._,
CONST = require('../../../common/constants'),
logger = require('../../../common/logger');
module.exports = function(name, args, program) {
var migrationsDir = path.join(program.outputPath,'migrations');
U.ensureDir(migrationsDir);
var templatePath = path.join(__dirname,'..','..','..','template','migration.' + CONST.FILE_EXT.MIGRATION);
var mf = path.join(migrationsDir, GU.generateMigrationFileName(name));
var md = _.template(fs.readFileSync(templatePath,'utf8'),{});
fs.writeFileSync(mf,md);
logger.info('Generated empty migration named '+name);
module.exports = function(name, args, program, template) {
var type = 'MIGRATION';
var dir = path.dirname(name);
name = path.join(dir,
GU.generateMigrationFileName(path.basename(name,'.'+CONST.FILE_EXT.MIGRATION)));
var info = GU.generate(name, type, program, {
template: template || {
up:'',
down:''
}
});
logger.info('Generated ' + type.toLowerCase() + ' named ' + name);
}
var path = require('path'),
fs = require('fs'),
GU = require('../generateUtils'),
U = require('../../../utils'),
GU = require('../generateUtils'),
_ = require("../../../lib/alloy/underscore")._,
_ = require('../../../lib/alloy/underscore')._,
CONST = require('../../../common/constants'),

@@ -10,12 +9,5 @@ logger = require('../../../common/logger');

module.exports = function(name, args, program) {
var templatePath = path.join(__dirname,'..','..','..','template');
var modelTemplatePath = path.join(templatePath, 'model.' + CONST.FILE_EXT.MODEL);
var migrationTemplatePath = path.join(templatePath, 'migration.' + CONST.FILE_EXT.MIGRATION);
var migrationsDir = path.join(program.outputPath,'migrations');
var modelsDir = path.join(program.outputPath,'models');
var template = {
up: '',
down: ''
}
var type = 'MODEL';
var basename = path.basename(name,'.'+CONST.FILE_EXT.MODEL);
// validate arguments and paths

@@ -28,27 +20,22 @@ if (args.length === 0) {

}
U.ensureDir(migrationsDir);
U.ensureDir(modelsDir);
// Create model JSON from template and command line column arguments
var modelJsonCode = _.template(fs.readFileSync(modelTemplatePath, 'utf8'), {name:name});
var json = JSON.parse(modelJsonCode);
_.each(args, function(pair) {
var arr = pair.split(':');
json.columns[arr[0]] = arr[1];
// generate model file
var info = GU.generate(name, type, program, {
template: { name: basename },
templateFunc: function(contents) {
var json = JSON.parse(contents);
if (!json.columns) { json.columns = []; }
_.each(args, function(pair) {
var arr = pair.split(':');
json.columns[arr[0]] = arr[1];
});
return U.stringifyJSON(json);
}
});
logger.info('Generated ' + type.toLowerCase() + ' named ' + name);
// Write new model JSON to model file
var modelFile = path.join(modelsDir,name+'.' + CONST.FILE_EXT.MODEL);
if (path.existsSync(modelFile) && !program.force) {
U.die("Model file already exists: "+modelFile);
}
var code = U.stringifyJSON(json);
fs.writeFileSync(modelFile, code);
// generate migration file
var migrationFile = path.join(migrationsDir, GU.generateMigrationFileName(name));
var migrationCode = code.split("\n");
// Create the "up" and "down" template values
template.up = '\tdb.createTable("' + name + '",\n';
// generate associated migration
var template = { up: '', down: '' };
var migrationCode = info.code.split("\n");
template.up = '\tdb.createTable("' + basename + '",\n';
_.each(migrationCode, function(line) {

@@ -58,8 +45,6 @@ template.up += '\t\t' + line + '\n';

template.up += '\t);';
template.down = '\tdb.dropTable("' + name + '");'
template.down = '\tdb.dropTable("' + basename + '");';
// Write out migration via template
fs.writeFileSync(migrationFile, _.template(fs.readFileSync(migrationTemplatePath, 'utf8'), template));
logger.info('Generated model named '+name);
// run the migration generator with the template
(require('./migration'))(name, args, program, template);
}

@@ -1,30 +0,11 @@

var path = require('path'),
fs = require('fs'),
U = require('../../../utils'),
_ = require("../../../lib/alloy/underscore")._,
CONST = require('../../../common/constants'),
var GU = require('../generateUtils'),
logger = require('../../../common/logger');
module.exports = function(name, args, program) {
var vn = path.join(program.outputPath,'views',name+'.'+CONST.FILE_EXT.VIEW);
var sn = path.join(program.outputPath,'styles',name+'.'+CONST.FILE_EXT.STYLE);
var templatePath = path.join(__dirname,'..','..','..','template');
var viewTemplate = path.join(templatePath, 'view.' + CONST.FILE_EXT.VIEW);
var styleTemplate = path.join(templatePath, 'style.' + CONST.FILE_EXT.STYLE);
// validate paths
if (path.existsSync(vn) && !program.force) {
U.die("View file already exists: "+vn);
var types = ['VIEW','STYLE'];
for (var i = 0; i < types.length; i++) {
var type = types[i];
var info = GU.generate(name, type, program);
logger.info('Generated ' + type.toLowerCase() + ' named ' + name);
}
if (path.existsSync(sn) && !program.force) {
U.die("Style file already exists: "+sn);
}
// write out view and style files based on templates
var XML = _.template(fs.readFileSync(viewTemplate,'utf8'), {}),
JSON = _.template(fs.readFileSync(styleTemplate,'utf8'), {});
fs.writeFileSync(vn,XML);
fs.writeFileSync(sn,JSON);
logger.info('Generated view and styles named '+name);
}

@@ -35,3 +35,4 @@ var path = require('path'),

"min-alloy-version": "1.0",
"min-titanium-version":"2.0"
"min-titanium-version":"2.0",
"tags":""
}));

@@ -38,0 +39,0 @@ fs.writeFileSync(path.join(widgetPath, 'views', 'widget.' + CONST.FILE_EXT.VIEW), '<View id="defaultView"/>');

@@ -8,2 +8,3 @@ var path = require('path'),

U = require('../../utils'),
CONST = require('../../common/constants'),
logger = require('../../common/logger'),

@@ -94,5 +95,5 @@ alloyRoot = path.join(__dirname,'..', '..');

defaultDir = path.join(templateDir,'default'),
INDEX_XML = fs.readFileSync(path.join(defaultDir,'index.xml'),'utf8'),
INDEX_JSON = fs.readFileSync(path.join(defaultDir,'index.tss'),'utf8'),
INDEX_C = fs.readFileSync(path.join(defaultDir,'index.js'),'utf8'),
INDEX_XML = fs.readFileSync(path.join(defaultDir,'index.'+CONST.FILE_EXT.VIEW),'utf8'),
INDEX_JSON = fs.readFileSync(path.join(defaultDir,'index.'+CONST.FILE_EXT.STYLE),'utf8'),
INDEX_C = fs.readFileSync(path.join(defaultDir,'index.'+CONST.FILE_EXT.CONTROLLER),'utf8'),
README = fs.readFileSync(path.join(templateDir, 'README'),'utf8'),

@@ -128,8 +129,18 @@ defaultConfig = {},

// create default view, controller, style, and config.
fs.writeFileSync(path.join(appPath,'views','index.xml'),INDEX_XML,'utf-8');
fs.writeFileSync(path.join(appPath,'styles','index.tss'),INDEX_JSON,'utf-8');
fs.writeFileSync(path.join(appPath,'controllers','index.js'),INDEX_C,'utf-8');
fs.writeFileSync(path.join(appPath,'config','alloy.json'),U.stringifyJSON(defaultConfig),'utf-8');
fs.writeFileSync(path.join(appPath,'views','index.'+CONST.FILE_EXT.VIEW),INDEX_XML,'utf-8');
fs.writeFileSync(path.join(appPath,'styles','index.'+CONST.FILE_EXT.STYLE),INDEX_JSON,'utf-8');
fs.writeFileSync(path.join(appPath,'controllers','index.'+CONST.FILE_EXT.CONTROLLER),INDEX_C,'utf-8');
fs.writeFileSync(path.join(appPath,'config','alloy.'+CONST.FILE_EXT.CONFIG),U.stringifyJSON(defaultConfig),'utf-8');
fs.writeFileSync(path.join(appPath,'README'),README,'utf-8');
// copy in any modules
wrench.copyDirSyncRecursive(path.join(alloyRoot,'modules'), projectPath, {preserve:true});
// TODO: remove this once the this is merged: https://github.com/appcelerator/titanium_mobile/pull/2610
U.installModule(projectPath, {
id: 'ti.physicalSizeCategory',
platform: 'android',
version: '1.0'
});
// write the build file

@@ -136,0 +147,0 @@ alloyJmkTemplate = fs.readFileSync(path.join(templateDir,'alloy.jmk'), 'utf8');

exports.ALLOY_DIR = 'app';
exports.GLOBAL_STYLE = 'app.tss';

@@ -3,0 +4,0 @@ exports.FILE_EXT = {

var _ = require("alloy/underscore")._,
Backbone = require("alloy/backbone"),
var _ = require('alloy/underscore')._,
Backbone = require('alloy/backbone'),
STR = require('alloy/string');

@@ -9,10 +9,3 @@

// TODO: we might want to eliminate this as all sync operations can be handles
// in the adapter-specific code
Backbone.Collection.notify = _.extend({}, Backbone.Events);
Backbone.sync = function(method, model, opts) {
// Ti.API.info("sync called with method="+method+", model="+JSON.stringify(model)+", opts="+JSON.stringify(opts));
// Ti.API.info("config => "+JSON.stringify(model.config));
var m = (model.config || {});

@@ -22,6 +15,2 @@ var type = (m.adapter ? m.adapter.type : null) || 'sql';

require('alloy/sync/'+type).sync(model,method,opts);
// TODO: we might want to eliminate this as all sync operations can be handles
// in the adapter-specific code
Backbone.Collection.notify.trigger('sync', {method:method,model:model});
};

@@ -48,8 +37,8 @@

var extendClass = {};
// cosntruct the model based on the current adapter type
if (migrations) { extendObj.migrations = migrations; }
if (migrations) { extendClass.migrations = migrations; }
if (_.isFunction(adapter.beforeModelCreate)) { config = adapter.beforeModelCreate(config) || config; }
var Model = Backbone.Model.extend(extendObj);
config.Model = Model; // needed for fetch operations to initialize the collection from persistent store
config.data = {}; // for localStorage or case where entire collection is needed to maintain store
var Model = Backbone.Model.extend(extendObj, extendClass);
Model.prototype.config = config;

@@ -135,1 +124,36 @@ if (_.isFunction(adapter.afterModelCreate)) { adapter.afterModelCreate(Model); }

}
function isTabletFallback() {
return !(Math.min(
Ti.Platform.displayCaps.platformHeight,
Ti.Platform.displayCaps.platformWidth
) < 700);
}
exports.isTablet = (function() {
if (OS_IOS) {
return Ti.Platform.osname === 'ipad';
}
if (OS_ANDROID) {
try {
var psc = require('ti.physicalSizeCategory');
return psc.physicalSizeCategory === 'large' ||
psc.physicalSizeCategory === 'xlarge';
} catch(e) {
Ti.API.warn('Could not find ti.physicalSizeCategory module, using fallback for Alloy.isTablet');
return isTabletFallback();
}
}
// TODO: this needs some help
if (OS_MOBILEWEB) {
return !(Math.min(
Ti.Platform.displayCaps.platformHeight,
Ti.Platform.displayCaps.platformWidth
) < 700);
}
// Last resort. Don't worry, uglifyjs cleans up this dead code if necessary.
return isTabletFallback();
})();
exports.isHandheld = !exports.isTablet;
/**
* HTML5 localStorage sync adapter
*/
var _ = require("alloy/underscore")._;
var _ = require('alloy/underscore')._;

@@ -11,7 +11,12 @@ function S4() {

function guid() {
return (S4()+S4()+"-"+S4()+"-"+S4()+"-"+S4()+"-"+S4()+S4()+S4());
return (S4()+S4()+'-'+S4()+'-'+S4()+'-'+S4()+'-'+S4()+S4()+S4());
};
function Sync(model, method, opts)
{
function InitAdapter(config) {
if (Ti.Platform.osname !== 'mobileweb') {
throw 'No support for localStorage persistence in non MobileWeb environments.';
}
}
function Sync(model, method, opts) {
var name = model.config.adapter.name;

@@ -26,3 +31,3 @@ var data = model.config.data;

case "create":
case 'create':
if (!model.id) model.id = model.attributes.id = guid();

@@ -33,3 +38,3 @@ data[model.id] = model;

case "read":
case 'read':
var store = localStorage.getItem(name);

@@ -42,5 +47,6 @@ var store_data = (store && JSON.parse(store)) || {};

}
model.trigger('fetch');
break;
case "update":
case 'update':
data[model.id] = model;

@@ -50,3 +56,3 @@ storeModel(data);

case "delete":
case 'delete':
delete data[model.id];

@@ -59,1 +65,19 @@ storeModel(data);

module.exports.sync = Sync;
module.exports.beforeModelCreate = function(config) {
config = config || {};
config.data = {}; // for localStorage or case where entire collection is needed to maintain store
InitAdapter(config);
return config;
};
module.exports.afterModelCreate = function(Model) {
Model = Model || {};
Model.prototype.config.Model = Model; // needed for fetch operations to initialize the collection from persistent store
return Model;
};

@@ -5,17 +5,25 @@ /**

*/
var _ = require("alloy/underscore")._,
var _ = require('alloy/underscore')._,
db;
function InitDB(config) {
function S4() {
return (((1+Math.random())*0x10000)|0).toString(16).substring(1);
};
function guid() {
return (S4()+S4()+'-'+S4()+'-'+S4()+'-'+S4()+'-'+S4()+S4()+S4());
};
function InitAdapter(config) {
if (!db) {
if (Ti.Platform.osname === 'mobileweb' || typeof Ti.Database === 'undefined') {
throw "No support for Titanium.Database in MobileWeb environment";
throw 'No support for Titanium.Database in MobileWeb environment.';
}
else {
db = Ti.Database.open("_alloy_");
db = Ti.Database.open('_alloy_');
}
module.exports.db = db;
// create the table in case it doesn't exist
db.execute("CREATE TABLE IF NOT EXISTS migrations (latest TEXT, model TEXT)");
// create the migration table in case it doesn't exist
db.execute('CREATE TABLE IF NOT EXISTS migrations (latest TEXT, model TEXT)');
}

@@ -28,3 +36,3 @@ return {};

// get the latest migratino
var rs = db.execute("SELECT latest FROM migrations where model = ?",table);
var rs = db.execute('SELECT latest FROM migrations where model = ?', table);
if (rs.isValidRow()) {

@@ -37,4 +45,3 @@ mid = rs.field(0);

function SQLiteMigrateDB()
{
function SQLiteMigrateDB() {
//TODO: we should move this into the codegen so we don't waste precious cpu cycles doing this

@@ -86,3 +93,3 @@ this.column = function(name)

this.createTable = function(name,config) {
Ti.API.info("create table migration called for "+config.adapter.tablename);
Ti.API.info('create table migration called for '+config.adapter.tablename);

@@ -93,6 +100,6 @@ var self = this,

for (var k in config.columns) {
columns.push(k + ' ' + self.column(config.columns[k]));
columns.push(k+" "+self.column(config.columns[k]));
}
var sql = "CREATE TABLE "+config.adapter.tablename+" ( " + columns.join(",")+",id" + " )";
var sql = 'CREATE TABLE '+config.adapter.tablename+' ( '+columns.join(',')+',id' + ' )';
Ti.API.info(sql);

@@ -104,81 +111,71 @@

this.dropTable = function(name) {
Ti.API.info("drop table migration called for "+name);
db.execute("DROP TABLE IF EXISTS "+name);
Ti.API.info('drop table migration called for '+name);
db.execute('DROP TABLE IF EXISTS '+name);
};
}
function SQLSync(model) {
this.model = model;
this.table = model.config.adapter.tablename;
this.columns = model.config.columns;
var self = this;
function Sync(model, method, opts) {
var table = model.config.adapter.tablename;
var columns = model.config.columns;
this.create = function(opts) {
var names = [];
var values = [];
var q = [];
for (var k in self.columns)
{
names.push(k);
values.push(self.model.get(k));
q.push('?');
}
var lastRowID = db.getLastInsertRowId();
var sql = 'INSERT INTO '+self.table+' ('+names.join(',')+',id) values ('+q.join(',')+',?)';
values.push(lastRowID);
db.execute(sql,values);
self.model.id = lastRowID;
};
switch (method) {
this.read = function(opts)
{
var sql = "select rowid,* from "+self.table;
var rs = db.execute(sql);
while(rs.isValidRow())
{
var o = {};
_.times(rs.fieldCount(),function(c){
var fn = rs.fieldName(c);
if (fn!='rowid')
{
// don't place rowid in the model
var rv = rs.field(c);
o[fn]=rv;
}
});
//o.id = rs.fieldByName('rowid');
//var m = new self.model.model(o);
//results.push(m);
rs.next();
}
rs.close();
//return results;
};
case 'create':
var names = [];
var values = [];
var q = [];
for (var k in columns) {
names.push(k);
values.push(model.get(k));
q.push('?');
}
var id = guid();
var sql = 'INSERT INTO '+table+' ('+names.join(',')+',id) VALUES ('+q.join(',')+',?)';
values.push(id);
db.execute(sql, values);
model.id = id;
break;
case 'read':
var sql = 'SELECT * FROM '+table;
var rs = db.execute(sql);
while(rs.isValidRow())
{
var o = {};
_.times(rs.fieldCount(),function(c){
var fn = rs.fieldName(c);
o[fn] = rs.fieldByName(fn);
});
var m = new model.config.Model(o);
model.models.push(m);
rs.next();
}
rs.close();
model.trigger('fetch');
break;
this.update = function(opts) {
//var sql = 'UPDATE '+self.table+' SET 'icon=? WHERE id=?s rowid,* from "+self.table;
var names = [];
var values = [];
var q = [];
for (var k in self.columns)
{
names.push(k+'=?');
values.push(self.model.get(k));
q.push("?");
}
var sql = 'UPDATE '+self.table+' SET '+names.join(',')+' WHERE id=?';
var e = sql +","+values.join(',')+','+self.model.id;
Ti.API.info(e);
values.push(self.model.id);
db.execute(sql,values);
};
this['delete'] = function(opts) {
var sql = "delete from "+self.table+" where rowid = ?";
db.execute(sql,self.model.id);
self.model.id = null;
};
case 'update':
var names = [];
var values = [];
var q = [];
for (var k in columns)
{
names.push(k+'=?');
values.push(model.get(k));
q.push('?');
}
var sql = 'UPDATE '+table+' SET '+names.join(',')+' WHERE id=?';
var e = sql +','+values.join(',')+','+model.id;
values.push(model.id);
db.execute(sql,values);
break;
case 'delete':
var sql = 'DELETE FROM '+table+' WHERE id=?';
db.execute(sql, model.id);
model.id = null;
break;
}
}

@@ -199,8 +196,6 @@

var prev;
//TODO: check config for the right adapter and then delegate. for now just doing SQL
var sqlMigration = new SQLiteMigrateDB;
var migrationIds = {}; // cache for latest mid by model name
db.execute("BEGIN;");
db.execute('BEGIN;');

@@ -214,5 +209,5 @@ // iterate through all our migrations and call up/down and the last migration should

var mid = GetMigrationForCached(mctx.name,migrationIds);
Ti.API.info("mid = "+mid+", name = "+mctx.name);
Ti.API.info('mid = '+mid+', name = '+mctx.name);
if (!mid || mctx.id > mid) {
Ti.API.info("Migration starting to "+mctx.id+" for "+mctx.name);
Ti.API.info('Migration starting to '+mctx.id+' for '+mctx.name);
if (prev && _.isFunction(prev.down)) {

@@ -228,3 +223,3 @@ prev.down(sqlMigration);

else {
Ti.API.info("skipping migration "+mctx.id+", already performed");
Ti.API.info('skipping migration '+mctx.id+', already performed');
prev = null;

@@ -235,18 +230,27 @@ }

if (prev && prev.id) {
db.execute("DELETE FROM migrations where model = ?",prev.name);
db.execute("INSERT INTO migrations VALUES (?,?)",prev.id,prev.name);
db.execute('DELETE FROM migrations where model = ?', prev.name);
db.execute('INSERT INTO migrations VALUES (?,?)', prev.id,prev.name);
}
db.execute("COMMIT;");
db.execute('COMMIT;');
}
function Sync(model, method, opts) {
var sync = new SQLSync(model);
return sync[method](opts);
}
module.exports.sync = Sync;
module.exports.beforeModelCreate = InitDB;
module.exports.beforeModelCreate = function(config) {
config = config || {};
InitAdapter(config);
return config;
};
module.exports.afterModelCreate = function(Model) {
Model = Model || {};
Model.prototype.config.Model = Model; // needed for fetch operations to initialize the collection from persistent store
Migrate(Model.migrations);
return Model;
};
module.exports.sync = Sync;

@@ -45,2 +45,67 @@ // The island of misfit toys... for functions

exports.installModule = function(dir, opts)
{
var tiapp = path.join(dir,'tiapp.xml');
if (path.existsSync(tiapp))
{
var xml = fs.readFileSync(tiapp);
var doc = new DOMParser().parseFromString(String(xml));
var modules = doc.documentElement.getElementsByTagName("modules");
var found = false;
if (modules.length > 0)
{
var items = modules.item(0).getElementsByTagName('module');
if (items.length > 0)
{
for (var c=0;c<items.length;c++)
{
var mod = items.item(c);
var name = exports.XML.getNodeText(mod);
if (name == opts.id)
{
found = true;
break;
}
}
}
}
if (!found)
{
var node = doc.createElement('module');
if (opts.platform) {
node.setAttribute('platform',opts.platform);
}
node.setAttribute('version',opts.version || '1.0');
var text = doc.createTextNode(opts.id);
node.appendChild(text);
var pna = null;
// install the plugin into tiapp.xml
if (modules.length == 0)
{
var pn = doc.createElement('modules');
doc.documentElement.appendChild(pn);
doc.documentElement.appendChild(doc.createTextNode("\n"));
pna = pn;
}
else
{
pna = modules.item(0);
}
pna.appendChild(node);
pna.appendChild(doc.createTextNode("\n"));
var serializer = new XMLSerializer();
var newxml = serializer.serializeToString(doc);
fs.writeFileSync(tiapp,newxml,'utf-8');
logger.info("Installed '" + opts.id + "' module to "+tiapp);
}
}
}
exports.copyAlloyDir = function(appDir, sources, destDir) {

@@ -47,0 +112,0 @@ var sources = _.isArray(sources) ? sources : [sources];

@@ -16,3 +16,3 @@ {

],
"version": "0.1.21",
"version": "0.1.22",
"author": "Appcelerator, Inc. <info@appcelerator.com>",

@@ -19,0 +19,0 @@ "maintainers": [

@@ -24,2 +24,17 @@ Alloy

Quick Start
-----------
This quick start will give you the shortest path to installing Alloy and creating your first Alloy-driven project. It is assumed that you have a working Titanium (Studio) environment.
* Do this once:
1. Download and install [Node.js](http://nodejs.org/), if necessary
2. At the command line: `sudo npm install -g alloy`
* Do this for each project you create:
3. Create a new mobile project in Titanium Studio, we'll call its path **PATH/TO/PROJECT**.
4. `cd PATH/TO/PROJECT`
5. `alloy new .`
After these steps, you can now run your projects in Titanium Studio. Be aware when working with an Alloy project that all files in your **Resources** directory are subject to being overwritten. All your work should be done in your project's **app** folder.
Installation

@@ -26,0 +41,0 @@ -------------

@@ -1,7 +0,7 @@

/// Function to keep a Ti.TableView in sync with Backbone Model.
// Function to keep a Ti.TableView in sync with Backbone Model.
$.table.updateContent = function(collection) {
var rows = [];
for (var i = 0; i < collection.length; i++) {
var m = collection[i].attributes, title = "";
for(var key in m) { if (key !== "id") { title += m[key] + " " }}
var model = collection.at(i).attributes, title = "";
for (var key in model) { if (key !== "id") { title += model[key] + " " }}
rows.push(Ti.UI.createTableViewRow({"title":title}));

@@ -12,35 +12,27 @@ }

// CRUD ops handler, put any special model processing here, ignored for this sample
var CRUDops = {
"create": function(o) { Ti.API.info("create called with model="+JSON.stringify(o)); },
"read": function(o) { Ti.API.info("read called with model="+JSON.stringify(o)); },
"update": function(o) { Ti.API.info("update called with model="+JSON.stringify(o)); },
"delete": function(o) { Ti.API.info("delete called with model="+JSON.stringify(o)); }
};
// listener for server to persistant store sync requests
Alloy.getCollection('Book').notify.on('sync', function(e) {
CRUDops[e.method](e.model);
});
// Now let's create a Backbone collection that will hold our models,
// the classes that represent our model have been generated automatically.
// Use new on the generated classes to create the model or collection object.
// the classes that represent our model have been generated automatically
// as Alloy components. Use new on the component to create the model or
// collection.
var books = new (Alloy.getCollection('Book'));
// Fetch will load the models from persistent starage.
// You can bind any Backbone event to models or collections but fetch is convenient because
// fetch occurs when the persistent store is sync'd to the local Backbone server.
books.bind("fetch", function() { $.table.updateContent(books); });
// Fetch will load models from persistent starage, sync'ing Backbone and persistent store.
books.fetch();
// Now we can add items to the model.
var book = new (Alloy.getModel('Book'))({ book:"Jungle Book", author:"Kipling" });
var book = new (Alloy.getModel('Book'))({book:"Jungle Book", author:"Kipling"});
books.add(book);
// Use Backbone shortcut to create a model and add to collection in single step. Does the same
// thing as the creating a new model and then addin it to the collection.
// thing as the creating a new model and then adding it to the collection.
books.add({book:"War and Peace", author:"Tolstoy"});
// Add will add models to local server but save triggers the CRUD create opperation,
// During create an id is added to the model signaling that the model has been persisted
// and no longer in the new state.
books.forEach(function(model){model.save();});
// Add will add models to local Backbone server but save triggers the CRUD create opperation
// causing the model to get added to the persistent store. During create an id is added to the
// model signaling that the model has been persisted and no longer in the new state.
books.forEach(function(model){ model.save();});

@@ -50,8 +42,11 @@ // UPDATE - update the model save here triggers the CRUD update opperation

// Okay time to show the results. Here you could filter the results if you only wanted to render a subset
$.table.updateContent(books.models);
// Okay time to show the results. Remember this sync's local Backbone server with persitent store.
books.fetch();
// DELETE - destroy triggers the CRUD delete opperation
//books.forEach(function(model){model.destroy({success: function(model, response) {}})}); // uncomment to remove store
for(i=books.length-1; i>=0; i--) {
var model = books.at(i);
model.destroy();
};
$.index.open();
$.index.open();

@@ -1,7 +0,7 @@

// Function to keep a Ti.TableView in sync with Backbone CRUD opperations
$.table.updateContent = function(o) {
// Function to keep a Ti.TableView in sync with Backbone Model.
$.table.updateContent = function(collection) {
var rows = [];
for (var i = 0; i < o.length; i++) {
var m = o.models[i], title = "";
for(var key in m.attributes) { title += m.attributes[key] + " " }
for (var i = 0; i < collection.length; i++) {
var model = collection.at(i).attributes, title = "";
for (var key in model) { if (key !== "id") { title += model[key] + " " }}
rows.push(Ti.UI.createTableViewRow({"title":title}));

@@ -12,46 +12,40 @@ }

// CRUD ops handler, put any special model processing here, some are ignored for this sample
var CRUDops = {
"create": function(o) { Ti.API.info("create called with model="+JSON.stringify(o)); },
"read": function(o) { $.table.updateContent(o); },
"update": function(o) { Ti.API.info("update called with model="+JSON.stringify(o)); },
"delete": function(o) { Ti.API.info("delete called with model="+JSON.stringify(o)); }
};
// Now let's create a Backbone collection that will hold our models,
// the classes that represent our model have been generated automatically
// as Alloy components. Use new on the component to create the model or
// collection.
var books = new (Alloy.getCollection('Book'));
// listener for server to persistant store sync requests
Alloy.getCollection('Book').notify.on('sync', function(e) {
CRUDops[e.method](e.model);
});
// You can bind any Backbone event to models or collections but fetch is convenient because
// fetch occurs when the persistent store is sync'd to the local Backbone server.
books.bind("fetch", function() { $.table.updateContent(books); });
// Now let's create a Backbone collection that will hold our models,
// the classes that represent our model have been generated automatically.
// Use new on the generated classes to create the model or collection object.
var books = new (Alloy.getCollection('Book'));
// Fetch will load models from persistent starage, sync'ing Backbone and persistent store.
books.fetch();
// create a model
// Now we can add items to the model.
var book = new (Alloy.getModel('Book'))({book:"Jungle Book", author:"Kipling"});
// Add a model to a Backbone collection.
books.add(book);
// Use Backbone shortcut to create a model and add to collection in single step.
// Use Backbone shortcut to create a model and add to collection in single step. Does the same
// thing as the creating a new model and then adding it to the collection.
books.add({book:"War and Peace", author:"Tolstoy"});
// fetch triggers the CRUD read operation causing a sever to persistent store sync up.
// Everything in the current Backbone model state will be overwritten with the
// fetched "server" state, triggering a "read" sync operation
books.fetch();
// Add will add models to local Backbone server but save triggers the CRUD create opperation
// causing the model to get added to the persistent store. During create an id is added to the
// model signaling that the model has been persisted and no longer in the new state.
books.forEach(function(model){ model.save();});
// Add will add models to local server but save triggers the CRUD create opperation,
// During create an id is added to the model signaling that the model has been persisted
// and no longer in the new state.
books.forEach(function(model){model.save();});
// UPDATE - update the model save here triggers the CRUD update opperation
book.save({author:"R Kipling"});
// Okay time to show the results. Remember this sync's local Backbone server with persitent store.
books.fetch();
// DELETE - destroy triggers the CRUD delete opperation
books.forEach(function(model){model.destroy();});
for(i=books.length-1; i>=0; i--) {
var model = books.at(i);
model.destroy();
};
$.main.open();
$.index.open();

@@ -9,3 +9,3 @@

db.createTable("todos",
db.createTable("books",
{

@@ -12,0 +12,0 @@ "columns":

@@ -9,3 +9,3 @@

db.createTable("todos",
db.createTable("books",
{

@@ -12,0 +12,0 @@ "columns":

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc