commit
dc8187c9e8
@ -0,0 +1,9 @@
|
||||
var/db/*
|
||||
temp*
|
||||
*.tmproj
|
||||
.makefiles*
|
||||
configs/*.js
|
||||
build/*
|
||||
*.node
|
||||
.DS_Store
|
||||
._*
|
@ -0,0 +1,10 @@
|
||||
[submodule "modules/node-hash"]
|
||||
path = modules/node-hash
|
||||
url = git://github.com/Marak/node_hash.git
|
||||
[submodule "modules/node-ldapsearch"]
|
||||
path = modules/node-ldapsearch
|
||||
url = git://github.com/xSmurf/node-ldapsearch.git
|
||||
[submodule "modules/ltx"]
|
||||
[ath = modules/node-ltx
|
||||
url = git://github.com/astro/ltx.git
|
||||
|
@ -0,0 +1,65 @@
|
||||
var sys = require("sys"),
|
||||
events = require("events"),
|
||||
fs = require("fs");
|
||||
|
||||
var Configs = this;
|
||||
|
||||
var load = function (debug) {
|
||||
events.EventEmitter.call(this);
|
||||
|
||||
var self = this;
|
||||
Configs.debug = debug || false;
|
||||
|
||||
dir = __dirname + "/configs/";
|
||||
fs.readdir(dir, function (err, files) {
|
||||
if (err) {
|
||||
sys.puts(("[ warn ] unable to load config directory: " + dir).magenta);
|
||||
return;
|
||||
}
|
||||
|
||||
for (var k = 0, l = files.length; k < l; ++k) {
|
||||
if (!(/\.js$/.exec(files[k]))) {
|
||||
continue;
|
||||
}
|
||||
|
||||
var filePath = dir + files[k];
|
||||
var fullPath = __dirname + filePath.substr(1);
|
||||
fileName = filePath.replace(/(\.js)$/, "");
|
||||
fileKey = fileName.replace(/^(.*)\//g, "");
|
||||
|
||||
// Delete module cache
|
||||
if (typeof process.mainModule.moduleCache[fullPath] !== "undefined") {
|
||||
delete process.mainModule.moduleCache[fullPath];
|
||||
}
|
||||
|
||||
var configFile = require(fileName).Config;
|
||||
|
||||
Object.keys(configFile).forEach(function (key) {
|
||||
Configs[key] = configFile[key];
|
||||
|
||||
if (Object.keys(configFile).length > 1) {
|
||||
if (key === "init" && typeof Configs.init === "function") {
|
||||
Configs.init.call(Configs, Configs);
|
||||
|
||||
delete Configs.init;
|
||||
}
|
||||
} else {
|
||||
if (typeof Configs[key].init === "function") {
|
||||
Configs[key].init.call(Configs, Configs[key]);
|
||||
|
||||
delete Configs[key].init;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
sys.puts(("[ config ] ./" + fileKey).magenta);
|
||||
}
|
||||
|
||||
Configs.loaded = true;
|
||||
self.emit("config:loaded");
|
||||
});
|
||||
};
|
||||
|
||||
sys.inherits(load, events.EventEmitter);
|
||||
|
||||
exports.load = load;
|
@ -0,0 +1,27 @@
|
||||
var path = require("path");
|
||||
|
||||
exports.Config = {
|
||||
version: "0.0.1",
|
||||
|
||||
responderDir: "responders",
|
||||
|
||||
color: "green",
|
||||
|
||||
colors: {
|
||||
commands: "red",
|
||||
modules: "green",
|
||||
configs: "magenta",
|
||||
success: "green",
|
||||
failure: "red"
|
||||
},
|
||||
|
||||
init: function(self)
|
||||
{
|
||||
if (this.debug === true) {
|
||||
}
|
||||
/*
|
||||
if (("fields" in self.ldap) === true) {
|
||||
self.ldap.users.replace(/(\?\*\?)/, self.ldap.fields.join(","));
|
||||
}*/
|
||||
}
|
||||
};
|
@ -0,0 +1,198 @@
|
||||
var debug = true;
|
||||
|
||||
var sys = require("sys"),
|
||||
util = require("util"),
|
||||
events = require("events"),
|
||||
colors = require("colors"),
|
||||
path = require("path"),
|
||||
hash = require("../deps/node-hash/lib/hash"),
|
||||
// We load this separately because we might need some things before the configs are loaded
|
||||
FreeNodeConfig = require("../configs/freenode").Config,
|
||||
Configs = require("../configs"),
|
||||
Responders = require("../responders"),
|
||||
LDAPClient = require("../deps/node-ldapsearch/build/default/ldap.node");
|
||||
|
||||
var FreeNode = function() {
|
||||
var self = this;
|
||||
this.uptime = (new Date().getTime());
|
||||
this.loaders = [];
|
||||
this.ConfigsLoader = null;
|
||||
this.RespondersLoader = null;
|
||||
|
||||
process.title = "FreeNode";
|
||||
|
||||
events.EventEmitter.call(this);
|
||||
|
||||
process.chdir(path.dirname(__dirname));
|
||||
|
||||
var onInitedListerner = function() {
|
||||
self.removeListener("loaded", onInitedListerner);
|
||||
self.onInited();
|
||||
};
|
||||
|
||||
this.addListener("loaded", onInitedListerner);
|
||||
|
||||
this.addDependencies();
|
||||
|
||||
return this;
|
||||
};
|
||||
|
||||
|
||||
sys.inherits(FreeNode, events.EventEmitter);
|
||||
exports.FreeNode = FreeNode;
|
||||
|
||||
|
||||
/**
|
||||
* init: Processes the loader chain by starting each loader function
|
||||
* and waiting for the return object's loaded event before processing the next one.
|
||||
*/
|
||||
FreeNode.prototype.init = function(reload) {
|
||||
var self = this;
|
||||
|
||||
this.reload = false;
|
||||
if (typeof reload !== "undefined") {
|
||||
this.reload = reload;
|
||||
}
|
||||
|
||||
if (this.loaders.length > 0) {
|
||||
var setup = function(params) {
|
||||
return function() {
|
||||
var success = params.success;
|
||||
|
||||
if (typeof success[2] === "undefined") {
|
||||
var obj = success[0]();
|
||||
} else {
|
||||
var obj = success[2];
|
||||
}
|
||||
|
||||
if (typeof params.failure !== "undefined") {
|
||||
var failure = params.failure;
|
||||
var cbFailure = function(error) {
|
||||
obj.removeListener(failure[1], cbFailure);
|
||||
failure[0](error);
|
||||
};
|
||||
}
|
||||
|
||||
var cbSuccess = function() {
|
||||
obj.removeListener(success[1], cbSuccess);
|
||||
if (typeof failure !== "undefined") {
|
||||
obj.removeListener(failure[1], cbFailure);
|
||||
}
|
||||
|
||||
self.loaders.shift();
|
||||
self.init(self.reload);
|
||||
};
|
||||
|
||||
if (typeof params.failure !== "undefined") {
|
||||
obj.addListener(failure[1], cbFailure);
|
||||
}
|
||||
|
||||
obj.addListener(success[1], cbSuccess);
|
||||
|
||||
if (typeof success[2] !== "undefined") {
|
||||
success[0]();
|
||||
}
|
||||
};
|
||||
}(this.loaders[0]);
|
||||
|
||||
setup();
|
||||
} else if (this.reload === false) {
|
||||
this.emit("loaded");
|
||||
} else if (this.reload === true) {
|
||||
this.emit("reloaded");
|
||||
}
|
||||
|
||||
return this;
|
||||
};
|
||||
|
||||
/**
|
||||
* reinit: Prepares and initiates a processs resource reload
|
||||
* (triggered by SIGHUP)
|
||||
*/
|
||||
FreeNode.prototype.reinit = function(cbReturn) {
|
||||
var self = this;
|
||||
|
||||
this.addDependencies(true);
|
||||
|
||||
var onReloaded = function() {
|
||||
self.removeListener("reloaded", onReloaded);
|
||||
console.log(("\nFreeNode Reload Completed!\n\n")[Configs.color]);
|
||||
};
|
||||
|
||||
this.addListener("reloaded", onReloaded);
|
||||
|
||||
if (typeof cbReturn === "function") {
|
||||
var cbFunc = function(cb) {
|
||||
return function() {
|
||||
self.removeListener("reloaded", cbFunc);
|
||||
cbReturn();
|
||||
};
|
||||
}(cbReturn);
|
||||
|
||||
this.addListener("reloaded", cbFunc);
|
||||
}
|
||||
|
||||
this.init(true);
|
||||
};
|
||||
|
||||
FreeNode.prototype.onInited = function() {
|
||||
|
||||
};
|
||||
|
||||
/**
|
||||
* addDependencies: loads a list of the process' dependencies in the loading chain.
|
||||
* Some dependencies are skipped if this is called in reload mode
|
||||
*/
|
||||
FreeNode.prototype.addDependencies = function(reload) {
|
||||
var self = this;
|
||||
|
||||
if (typeof reload === "undefined") {
|
||||
var reload = false;
|
||||
}
|
||||
|
||||
this.addDependency(
|
||||
// Loader
|
||||
[function() {
|
||||
console.log(("Loading Configuration Files")[FreeNodeConfig.colors.configs]);
|
||||
self.ConfigsLoader = new Configs.load(debug);
|
||||
return self.ConfigsLoader;
|
||||
}, "config:loaded"]
|
||||
);
|
||||
|
||||
if (reload === true) {
|
||||
// We also want to wait for the MUC to bind
|
||||
this.addDependency(
|
||||
// Loader
|
||||
[function() {
|
||||
console.log(("Removing responders for reload")[Configs.colors.responders]);
|
||||
|
||||
Responders.unload.apply(self.Responders, [self]);
|
||||
}, "responders:deloaded", this]
|
||||
);
|
||||
}
|
||||
|
||||
this.addDependency(
|
||||
// Loader
|
||||
[function() {
|
||||
console.log(("Loading Responders")[Configs.colors.responders]);
|
||||
|
||||
Responders.load.apply(self, [self]);
|
||||
}, "responders:loaded", this]
|
||||
);
|
||||
};
|
||||
|
||||
/**
|
||||
* addDependency: adds a dependency data to the loading chain
|
||||
*
|
||||
* success[0] = The object loader function, should return the object on which to bind the loaded event
|
||||
* success[1] = The "loaded" event which the object will trigger when it's done loading
|
||||
* success[2] = Optionally, you can specify an object on which to bind the event before calling the loader function
|
||||
* This is useful in cases where the loading function triggers an the event on a different object that already exists
|
||||
* Optional:
|
||||
* failure[0] = Callback function to bind to the object on failure
|
||||
* failure[1] = Error event name on which to bind the callback
|
||||
*/
|
||||
FreeNode.prototype.addDependency = function(success, failure) {
|
||||
this.loaders[this.loaders.length] = {success:success, failure:failure};
|
||||
};
|
||||
|
@ -0,0 +1,14 @@
|
||||
var FreeNodeHandler = require("./libs/freenode").FreeNode,
|
||||
Configs = require("./configs");
|
||||
|
||||
// Instanciate our main object
|
||||
var FreeNode = new FreeNodeHandler();
|
||||
|
||||
FreeNode.init();
|
||||
|
||||
// Process SIGHUP by calling reinit
|
||||
process.on("SIGHUP", function() {
|
||||
console.log(("\n\n**** Reloading FreeNode! ****\n")[Configs.color]);
|
||||
|
||||
FreeNode.reinit();
|
||||
});
|
@ -0,0 +1,133 @@
|
||||
/*
|
||||
* Date Format 1.2.3
|
||||
* (c) 2007-2009 Steven Levithan <stevenlevithan.com>
|
||||
* MIT license
|
||||
*
|
||||
* Includes enhancements by Scott Trenda <scott.trenda.net>
|
||||
* and Kris Kowal <cixar.com/~kris.kowal/>
|
||||
*
|
||||
* Accepts a date, a mask, or a date and a mask.
|
||||
* Returns a formatted version of the given date.
|
||||
* The date defaults to the current date/time.
|
||||
* The mask defaults to dateFormat.masks.default.
|
||||
*
|
||||
* http://blog.stevenlevithan.com/archives/date-time-format
|
||||
*
|
||||
*/
|
||||
|
||||
var dateFormat = function () {
|
||||
var token = /d{1,4}|m{1,4}|yy(?:yy)?|([HhMsTt])\1?|[LloSZ]|"[^"]*"|'[^']*'/g,
|
||||
timezone = /\b(?:[PMCEA][SDP]T|(?:Pacific|Mountain|Central|Eastern|Atlantic) (?:Standard|Daylight|Prevailing) Time|(?:GMT|UTC)(?:[-+]\d{4})?)\b/g,
|
||||
timezoneClip = /[^-+\dA-Z]/g,
|
||||
pad = function (val, len) {
|
||||
val = String(val);
|
||||
len = len || 2;
|
||||
while (val.length < len) val = "0" + val;
|
||||
return val;
|
||||
};
|
||||
|
||||
// Regexes and supporting functions are cached through closure
|
||||
return function (date, mask, utc) {
|
||||
var dF = dateFormat;
|
||||
|
||||
// You can't provide utc if you skip other args (use the "UTC:" mask prefix)
|
||||
if (arguments.length == 1 && Object.prototype.toString.call(date) == "[object String]" && !/\d/.test(date)) {
|
||||
mask = date;
|
||||
date = undefined;
|
||||
}
|
||||
|
||||
// Passing date through Date applies Date.parse, if necessary
|
||||
date = date ? new Date(date) : new Date;
|
||||
if (isNaN(date)) throw SyntaxError("invalid date");
|
||||
|
||||
mask = String(dF.masks[mask] || mask || dF.masks["default"]);
|
||||
|
||||
// Allow setting the utc argument via the mask
|
||||
if (mask.slice(0, 4) == "UTC:") {
|
||||
mask = mask.slice(4);
|
||||
utc = true;
|
||||
}
|
||||
|
||||
var _ = utc ? "getUTC" : "get",
|
||||
d = date[_ + "Date"](),
|
||||
D = date[_ + "Day"](),
|
||||
m = date[_ + "Month"](),
|
||||
y = date[_ + "FullYear"](),
|
||||
H = date[_ + "Hours"](),
|
||||
M = date[_ + "Minutes"](),
|
||||
s = date[_ + "Seconds"](),
|
||||
L = date[_ + "Milliseconds"](),
|
||||
o = utc ? 0 : date.getTimezoneOffset(),
|
||||
flags = {
|
||||
d: d,
|
||||
dd: pad(d),
|
||||
ddd: dF.i18n.dayNames[D],
|
||||
dddd: dF.i18n.dayNames[D + 7],
|
||||
m: m + 1,
|
||||
mm: pad(m + 1),
|
||||
mmm: dF.i18n.monthNames[m],
|
||||
mmmm: dF.i18n.monthNames[m + 12],
|
||||
yy: String(y).slice(2),
|
||||
yyyy: y,
|
||||
h: H % 12 || 12,
|
||||
hh: pad(H % 12 || 12),
|
||||
H: H,
|
||||
HH: pad(H),
|
||||
M: M,
|
||||
MM: pad(M),
|
||||
s: s,
|
||||
ss: pad(s),
|
||||
l: pad(L, 3),
|
||||
L: pad(L > 99 ? Math.round(L / 10) : L),
|
||||
t: H < 12 ? "a" : "p",
|
||||
tt: H < 12 ? "am" : "pm",
|
||||
T: H < 12 ? "A" : "P",
|
||||
TT: H < 12 ? "AM" : "PM",
|
||||
Z: utc ? "UTC" : (String(date).match(timezone) || [""]).pop().replace(timezoneClip, ""),
|
||||
o: (o > 0 ? "-" : "+") + pad(Math.floor(Math.abs(o) / 60) * 100 + Math.abs(o) % 60, 4),
|
||||
S: ["th", "st", "nd", "rd"][d % 10 > 3 ? 0 : (d % 100 - d % 10 != 10) * d % 10]
|
||||
};
|
||||
|
||||
return mask.replace(token, function ($0) {
|
||||
return $0 in flags ? flags[$0] : $0.slice(1, $0.length - 1);
|
||||
});
|
||||
};
|
||||
}();
|
||||
|
||||
// Some common format strings
|
||||
dateFormat.masks = {
|
||||
"default": "ddd mmm dd yyyy HH:MM:ss",
|
||||
httpHeaders: "UTC:ddd, dd mmm yyyy HH:MM:ss \"GMT\"",
|
||||
mediumFull: "ddd mmm dd HH:MM",
|
||||
mediumFile: "ddd_mmm_dd-HH-MM",
|
||||
shortDate: "m/d/yy",
|
||||
mediumDate: "mmm d, yyyy",
|
||||
longDate: "mmmm d, yyyy",
|
||||
fullDate: "dddd, mmmm d, yyyy",
|
||||
shortTime: "h:MM TT",
|
||||
mediumTime: "h:MM:ss TT",
|
||||
longTime: "h:MM:ss TT Z",
|
||||
isoDate: "yyyy-mm-dd",
|
||||
isoTime: "HH:MM:ss",
|
||||
isoDateTime: "yyyy-mm-dd'T'HH:MM:ss",
|
||||
isoUtcDateTime: "UTC:yyyy-mm-dd'T'HH:MM:ss'Z'"
|
||||
};
|
||||
|
||||
// Internationalization strings
|
||||
dateFormat.i18n = {
|
||||
dayNames: [
|
||||
"Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat",
|
||||
"Sunday", "Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday"
|
||||
],
|
||||
monthNames: [
|
||||
"Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec",
|
||||
"January", "February", "March", "April", "May", "June", "July", "August", "September", "October", "November", "December"
|
||||
]
|
||||
};
|
||||
|
||||
exports.dateFormat = dateFormat;
|
||||
|
||||
// For convenience...
|
||||
Date.prototype.format = function (mask, utc) {
|
||||
return dateFormat(this, mask, utc);
|
||||
};
|
@ -0,0 +1,115 @@
|
||||
var sys = require("sys"),
|
||||
util = require("util"),
|
||||
events = require("events"),
|
||||
colors = require("colors"),
|
||||
fs = require("fs");
|
||||
|
||||
/**
|
||||
* read a directory (recursively deep)
|
||||
* data[] = an object for each element in the directory
|
||||
* .name = item's name (file or folder name)
|
||||
* .stat = item's stat (.stat.isDirectory() == true IF a folder)
|
||||
* .children = another data[] for the children
|
||||
* filter = an object with various filter settings:
|
||||
* .depth = max directory recursion depth to travel
|
||||
* (0 or missing means: infinite)
|
||||
* (1 means: only the folder passed in)
|
||||
* .hidden = true means: process hidden files and folders (defaults to false)
|
||||
* .callback = callback function: callback(name, path, filter) -- returns truthy to keep the file
|
||||
*
|
||||
*
|
||||
* @param path = path to directory to read (".", ".\apps")
|
||||
* @param callback = function to callback to: callback(err, data)
|
||||
* @param [filter] = (optional) filter object
|
||||
*
|
||||
* Shamelessly snagged from:
|
||||
* http://utahjs.com/2010/09/16/nodejs-events-and-recursion-readdir/
|
||||
*/
|
||||
exports.recurseDir = function(path, callback, filter) {
|
||||
if (filter) {
|
||||
// process filter. are we too deep yet?
|
||||
if (!filter.depthAt) filter.depthAt = 1; // initialize what depth we are at
|
||||
if (filter.depth && filter.depth < filter.depthAt) {
|
||||
callback(undefined, []); // we are too deep. return "nothing found"
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// queue up a "readdir" file system call (and return)
|
||||
fs.readdir(path, function(err, files) {
|
||||
if (err) {
|
||||
callback(err);
|
||||
return;
|
||||
}
|
||||
var doHidden = false; // true means: process hidden files and folders
|
||||
if (filter && filter.hidden) {
|
||||
doHidden = true; // filter requests to process hidden files and folders
|
||||
}
|
||||
var count = 0; // count the number of "stat" calls queued up
|
||||
var countFolders = 0; // count the number of "folders" calls queued up
|
||||
var data = []; // the data to return
|
||||
|
||||
// iterate over each file in the dir
|
||||
files.forEach(function (name) {
|
||||
// ignore files that start with a "." UNLESS requested to process hidden files and folders
|
||||
if (doHidden || name.indexOf(".") !== 0) {
|
||||
// queue up a "stat" file system call for every file (and return)
|
||||
count += 1;
|
||||
fs.stat(path + "/" + name, function(err, stat) {
|
||||
if (err) {
|
||||
callback(err);
|
||||
return;
|
||||
}
|
||||
var processFile = true;
|
||||
if (filter && filter.callback) {
|
||||
processFile = filter.callback(name, stat, filter);
|
||||
}
|
||||
if (processFile) {
|
||||
var obj = {};
|
||||
obj.name = name;
|
||||
obj.filepath = path + "/" + name;
|
||||
obj.stat = stat;
|
||||
data.push(obj);
|
||||
if (stat.isDirectory()) {
|
||||
countFolders += 1;
|
||||
// perform "recurseDir" on each child folder (which queues up a readdir and returns)
|
||||
(function(obj2) {
|
||||
// obj2 = the "obj" object
|
||||
exports.recurseDir(path + "/" + name, function(err, data2) {
|
||||
if (err) {
|
||||
callback(err);
|
||||
return;
|
||||
}
|
||||
// entire child folder info is in "data2" (1 fewer child folders to wait to be processed)
|
||||
countFolders -= 1;
|
||||
obj2.children = data2;
|
||||
if (countFolders <= 0) {
|
||||
// sub-folders found. This was the last sub-folder to processes.
|
||||
callback(undefined, data); // callback w/ data
|
||||
} else {
|
||||
// more children folders to be processed. do nothing here.
|
||||
}
|
||||
});
|
||||
})(obj);
|
||||
}
|
||||
}
|
||||
// 1 more file has been processed (or skipped)
|
||||
count -= 1;
|
||||
if (count <= 0) {
|
||||
// all files have been processed.
|
||||
if (countFolders <= 0) {
|
||||
// no sub-folders were found. DONE. no sub-folders found
|
||||
callback(undefined, data); // callback w/ data
|
||||
} else {
|
||||
// children folders were found. do nothing here (we are waiting for the children to callback)
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
if (count <= 0) { // if no "stat" calls started, then this was an empty folder
|
||||
callback(undefined, []); // callback w/ empty
|
||||
}
|
||||
});
|
||||
};
|
||||
|
@ -0,0 +1,141 @@
|
||||
var sys = require("sys"),
|
||||
util = require("util"),
|
||||
events = require("events"),
|
||||
colors = require("colors"),
|
||||
recurseDir = require("./deps/recurseDir").recurseDir,
|
||||
Configs = require("./configs");
|
||||
|
||||
var load = function (Foulinks) {
|
||||
var self = this;
|
||||
|
||||
if (typeof this.Responders === "undefined") {
|
||||
this.Responders = {};
|
||||
}
|
||||
|
||||
var loadResponder = function(filePath, responderBase) {
|
||||
var fileKey = filePath.replace(/^(.*)\/|\.js$/g, "");
|
||||
|
||||
if (typeof responderBase[fileKey] === "undefined") {
|
||||
responderBase[fileKey] = new Object();
|
||||
}
|
||||
|
||||
try {
|
||||
var responderFile = require(filePath).Responder;
|
||||
responderFile.forEach(function (func) {
|
||||
func.apply(responderBase[fileKey], [Foulinks]);
|
||||
|
||||
responderBase[fileKey].responderFile = filePath;
|
||||
|
||||
if (typeof responderBase[fileKey].init === "function") {
|
||||
responderBase[fileKey].init();
|
||||
}
|
||||
});
|
||||
|
||||
sys.puts(("[ responders ] ." + filePath.replace(__dirname+"/"+Configs.responderDir, "").replace(/(\.js)$/, ""))[Configs.colors.responders]);
|
||||
} catch (err) {
|
||||
// Don't keep a cache of failed includes!
|
||||
if (typeof process.mainModule.moduleCache[filePath] !== "undefined") {
|
||||
delete process.mainModule.moduleCache[filePath];
|
||||
}
|
||||
|
||||
delete responderBase[fileKey];
|
||||
|
||||
sys.puts(("[ responders ] ERROR Loading ." +
|
||||
filePath.replace(__dirname+"/"+Configs.responderDir, "").replace(/(\.js)$/, "") +
|
||||
": " + err.toString() + "\n" + err.stack.toString())[Configs.colors.failure]);
|
||||
}
|
||||
};
|
||||
|
||||
var loadResponders = function(data) {
|
||||
for (fileIndex in data) {
|
||||
if (data[fileIndex].stat.isDirectory()) {
|
||||
loadResponders(data[fileIndex].children);
|
||||
} else {
|
||||
var baseStart = data[fileIndex].filepath.indexOf(Configs.responderDir) + Configs.responderDir.length + 1;
|
||||
var basePath = data[fileIndex].filepath.substr(baseStart);
|
||||
var responderBase = self.Responders;
|
||||
var baseSections = basePath.split("/");
|
||||
|
||||
for (index in baseSections) {
|
||||
if (index >= baseSections.length - 1) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (typeof responderBase[baseSections[index]] === "undefined") {
|
||||
responderBase[baseSections[index]] = {};
|
||||
}
|
||||
|
||||
responderBase = responderBase[baseSections[index]];
|
||||
}
|
||||
|
||||
responderBase.isDir = true;
|
||||
|
||||
loadResponder(data[fileIndex].filepath, responderBase);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
var validFile = function(name, stat, filter) {
|
||||
if (stat.isDirectory() === true) {
|
||||
return true;
|
||||
} else {
|
||||
if (/\.js$/.test(name)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
};
|
||||
|
||||
recurseDir(__dirname + "/" + Configs.responderDir, function(err, data) {
|
||||
if (typeof err === "undefined") {
|
||||
loadResponders(data);
|
||||
|
||||
self.emit("responders:loaded");
|
||||
} else {
|
||||
self.emit("responders:error");
|
||||
}
|
||||
}, {callback: validFile});
|
||||
};
|
||||
|
||||
exports.load = load;
|
||||
|
||||
var unload = function(Foulinks, recurse) {
|
||||
// We went too far!
|
||||
if (typeof this.responderFile !== "undefined") {
|
||||
return;
|
||||
}
|
||||
|
||||
for (fileKey in this) {
|
||||
if (typeof this[fileKey] === "object") {
|
||||
// This is a responder and we should remove it
|
||||
if (typeof this[fileKey].responderFile !== "undefined") {
|
||||
var filePath = this[fileKey].responderFile;
|
||||
if (typeof this[fileKey].deinit === "function") {
|
||||
this[fileKey].deinit();
|
||||
|
||||
delete this[fileKey];
|
||||
}
|
||||
|
||||
// Delete responder cache
|
||||
if (typeof process.mainModule.moduleCache[filePath] !== "undefined") {
|
||||
delete process.mainModule.moduleCache[filePath];
|
||||
}
|
||||
|
||||
delete this[fileKey];
|
||||
|
||||
sys.puts(("[ responders ] removed: " + fileKey)[Configs.colors.responders]);
|
||||
// Sub responders follow... maybe
|
||||
} else {
|
||||
unload.apply(this[fileKey], [Foulinks, true]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (recurse !== true) {
|
||||
Foulinks.emit("responders:deloaded");
|
||||
}
|
||||
};
|
||||
|
||||
exports.unload = unload;
|
||||
|
Loading…
Reference in new issue