events = require("events")
emitter = events.EventEmitter
|
_ = require "underscore"
|
colors = require "colors"
|
fs = require "fs"
|
mkdir = require( "mkdirp" ).mkdirp
|
path = require "path"
|
express = require "express"
class Log
|
onEvent: (x) ->
unless quiet
console.log " #{x}"
|
onStep: (x) ->
unless quiet
console.log "#{x}".blue
|
onComplete: (x) ->
console.log "#{x}".green
|
onError: (x) ->
console.log "!!! #{x} !!!".red
log = new Log()
exports.log = log
_ = require "underscore"
path = require "path"
Commander = require( "commander" ).Command
|
config = { }
|
siteConfig =
"source": "src"
"style": "style"
"markup": "markup"
"output":
{
"source": [ "lib", "site/js" ],
"style": [ "css", "site/css" ],
"markup": "site/"
}
"spec": "spec"
"ext": "ext"
"lint": {}
"uglify": {}
"cssmin": {}
"hosts": {
"/": "site"
}
libConfig =
"source": "src"
"output": "lib"
"spec": "spec"
"ext": "ext"
"lint": {}
"uglify": {}
"hosts": {
"/": "spec"
}
defaultMocha =
growl: true
ignoreLeaks: true
reporter: "spec"
ui: "bdd"
colors: true
defaultDoc =
generator: "docco"
output: "docs"
continuous = test = inProcess = quiet = debug = false
ext =
gzip: "gz"
uglify: "min"
cssmin: "min"
extensionLookup =
".css": "style"
".scss": "style"
".sass": "style"
".less": "style"
".stylus": "style"
".js": "source"
".coffee": "source"
".markdown": "markup"
".md": "markup"
".html": "markup"
|
class Configuration
constructor: ( @fp, @scheduler, @log ) ->
|
configure: ( argList, onConfig ) ->
self = this
command = new Commander()
command
.version("0.7.7")
.option( "-b, --build [build file]", "Use a custom build file", "./build.json" )
.option( "--ci", "Run a continuous integration build" )
.option( "--host", "Setup a static HTTP host" )
.option( "--lib [project]", "Create a lib project at the folder [project]" )
.option( "--libfile [file name]", "Create a new lib build file named [file name]" )
.option( "--site [project]", "Create a site project at the folder [project]" )
.option( "--sitefile [file name]", "Create a new site build file named [file name]" )
.option( "--mocha", "Run specifications using Mocha" )
|
.option( "--ape", "Create annotated source using ape" )
.option( "-q, --quiet", "Only print completion and error messages" )
command.parse( argList );
if command.libfile or command.sitefile
|
name = command.libfile or= command.sitefile
type = if command.sitefile then 'site' else 'lib'
@writeConfig type, "#{name}.json", () ->
self.log.onComplete "Created #{ type } build file - #{ name }"
onConfig config, true
else if command.site or command.lib
|
type = if command.site then 'site' else 'lib'
scaffold = command.site or= command.lib
config = if type == 'site' then siteConfig else libConfig
@log.onStep "Creating scaffolding for new #{ type } project"
|
self.ensurePaths( () ->
self.writeConfig( type, scaffold + "/build.json", () ->
self.log.onComplete "Scaffold ( #{ scaffold } ) created."
onConfig config, true
)
, scaffold )
else
buildFile = command.build
@log.onStep "Checking for #{ buildFile }"
exists = @fp.pathExists buildFile
@prepConfig exists, buildFile, () ->
if command.host
config.host = true
if command.ci
config.continuous = true
if command.mocha
config.mocha = defaultMocha
if command.ape
config.docs = defaultDoc
config.docs.generator = "ape"
if command.docco
config.docs = defaultDoc
|
self.ensurePaths () ->
onConfig config
|
createLibBuild: () ->
|
if buildLibTemplate
output = if buildLibTemplate == true then "build.json" else buildLibTemplate
writeConfig "lib", output
global.process.exit(0)
config
|
createSiteBuild: () ->
|
if buildSiteTemplate
output = if buildSiteTemplate == true then "build.json" else buildSiteTemplate
writeConfig "site", output
global.process.exit(0)
config
|
ensurePaths: ( onComplete, prefix ) ->
self = this
prefix = prefix or= ""
config.working = config.working || "./tmp"
fp = @fp
paths = [
config[ "source" ]
config[ "style" ]
config[ "markup" ]
config[ "spec" ]
config[ "ext" ]
config[ "working" ]
]
|
if config.docs
paths.push config.docs.output
outputList = []
|
if _.isObject config.output
outputList = _.flatten config.output
else
|
outputList = [ config.output ]
paths = paths.concat outputList
|
name = config.name
if name
for output in outputList
if _.isString name
nestedPath = path.dirname name
if nestedPath
paths.push path.join output, nestedPath
else
nestedPaths = _.map _.flatten( name ), ( x ) -> path.join output, path.dirname( x )
paths = paths.concat nestedPaths
worker = ( p, done ) ->
try
fp.ensurePath [ prefix, p ], () ->
done()
catch err
done()
@log.onStep "Ensuring project directory structure"
@scheduler.parallel paths, worker, onComplete
|
prepConfig: ( exists, file, onComplete ) ->
self = this
onDone = () -> self.normalizeConfig onComplete
unless exists
@loadConvention( onDone )
else
@loadConfig( file, onDone )
|
loadConfig: ( file, onComplete ) ->
@log.onStep "Loading config..."
fp = @fp
fp.read file, ( content ) ->
config = JSON.parse( content )
if config.extensions
ext.gzip = config.extensions.gzip || ext.gzip
ext.uglify = config.extensions.uglify || ext.uglify
|
onComplete()
|
loadConvention: ( onComplete ) ->
isSite = @fp.pathExists "./site"
conventionConfig = if isSite then siteConfig else libConfig
@log.onStep "No build file found, using #{ if isSite then 'site' else 'lib' } conventions"
config = conventionConfig
onComplete()
|
normalizeConfig: ( onComplete ) ->
self = this
fp = @fp
config.output = config.output || "lib"
if _.isString config.output
outputPath = config.output
config.output =
style: outputPath
source: outputPath
markup: outputPath
calls = []
|
finalize = config.finalize
if finalize
calls.push ( done ) ->
self.getFinalization finalize, ( result ) ->
config.finalize = result
done()
|
wrap = config.wrap
if wrap
calls.push ( done ) ->
self.getWrap wrap, ( result ) ->
config.wrap = result
done()
if config.mocha
config.mocha = _.extend defaultMocha, config.mocha
if config.docs
config.docs = _.extend defaultDoc, config.docs
|
if calls.length > 0
@scheduler.parallel calls,
( call, done ) ->
call( done )
, () -> onComplete()
else
onComplete()
|
getFinalization: ( original, onComplete ) ->
self = this
finalization = {}
result = {}
aggregation = {}
aggregate = @scheduler.aggregate
|
if not original or _.isEqual original, {}
onComplete finalization
|
else if original.header or
original["header-file"] or
original.footer or
original["footer-file"]
|
@getContentBlock original, "header", aggregation
@getContentBlock original, "footer", aggregation
|
if _.isEqual aggregation, {}
onComplete finalization
else
aggregate aggregation, ( constructed ) ->
finalization.source = constructed
onComplete finalization
|
else
sources = {}
blocks = {
"source": original[ "source" ],
"style": original[ "style" ],
"markup": original[ "markup" ]
}
_.each( blocks, ( block, name ) ->
subAggregate = {}
self.getContentBlock block, "header", subAggregate
self.getContentBlock block, "footer", subAggregate
sources[ name ] = ( done ) ->
aggregate subAggregate, done
)
aggregate sources, onComplete
|
getWrap: ( original, onComplete ) ->
self = this
wrap = {}
result = {}
aggregation = {}
aggregate = @scheduler.aggregate
|
if not original or _.isEqual original, {}
onComplete wrap
|
else if original.prefix or
original["prefix-file"] or
original.suffix or
original["suffix-file"]
|
@getContentBlock original, "prefix", aggregation
@getContentBlock original, "suffix", aggregation
|
if _.isEqual aggregation, {}
onComplete wrap
else
aggregate aggregation, ( constructed ) ->
wrap.source = constructed
onComplete wrap
|
else
sources = {}
blocks = {
"source": original[ "source" ],
"style": original[ "style" ],
"markup": original[ "markup" ]
}
_.each( blocks, ( block, name ) ->
subAggregate = {}
self.getContentBlock block, "prefix", subAggregate
self.getContentBlock block, "suffix", subAggregate
sources[ name ] = ( done ) -> aggregate subAggregate, done
)
aggregate sources, onComplete
|
getContentBlock: ( source, property, aggregation ) ->
aggregation[ property ] = ( done ) -> done ""
fp = @fp
if source
propertyPath = source["#{ property }-file"]
propertyValue = source[ property ]
if propertyPath and @fp.pathExists propertyPath
aggregation[ property ] = ( done ) ->
fp.read propertyPath, ( content ) ->
done content
else if propertyValue
aggregation[ property ] = ( done ) -> done propertyValue
|
writeConfig: ( type, name, onComplete ) ->
config = if type == "lib" then libConfig else siteConfig
log = @log
json = JSON.stringify( config, null, "\t" )
@fp.write name, json, () ->
log.onComplete "#{name} created successfully!"
onComplete()
exports.configuration = Configuration
_ = require "underscore"
|
class Scheduler
constructor: () ->
|
parallel: ( items, worker, onComplete ) ->
|
if not items or items.length == 0
onComplete []
count = items.length
results = []
|
done = ( result ) ->
count = count - 1
|
if result
|
results.push result
|
if count == 0
|
onComplete( results )
|
worker( item, done ) for item in items
|
pipeline: ( item, workers, onComplete ) ->
|
if item == undefined or not workers or workers.length == 0
onComplete item || {}
|
iterate = ( done ) ->
worker = workers.shift()
worker item, done
done = ->
done = ( product ) ->
|
item = product
|
if workers.length == 0
|
onComplete( product )
else
iterate done
|
iterate done
|
aggregate: ( calls, onComplete ) ->
results = {}
|
isDone = () ->
_.chain( calls ).keys().all( ( x ) -> results[ x ] != undefined ).value()
|
getCallback = ( name ) ->
( result ) ->
results[ name ] = result
|
if isDone()
onComplete results
|
_.each( calls, ( call, name ) ->
callback = getCallback name
call callback
)
exports.scheduler = Scheduler
fs = require "fs"
path = require "path"
_ = require "underscore"
|
class FSCrawler
constructor: ( @scheduler ) ->
_.bindAll( this )
|
crawl: ( directory, onComplete ) ->
self = this
fileList = []
forAll = @scheduler.parallel
if directory and directory != ""
|
directory = path.resolve directory
|
fs.readdir directory, ( err, contents ) ->
|
if not err and contents.length > 0
qualified = []
|
for item in contents
qualified.push path.resolve directory, item
|
self.classifyHandles qualified, ( files, directories ) ->
fileList = fileList.concat files
|
if directories.length > 0
forAll directories, self.crawl, ( files ) ->
fileList = fileList.concat _.flatten files
onComplete fileList
|
else
onComplete fileList
|
else
onComplete fileList
|
else
onComplete fileList
|
classifyHandles: ( list, onComplete ) ->
if list and list.length > 0
@scheduler.parallel list, @classifyHandle, ( classified ) ->
files = []
directories = []
for item in classified
if item.isDirectory
directories.push item.file
else if not item.error
files.push item.file
onComplete files, directories
else
onComplete [], []
|
classifyHandle: ( file, onComplete ) ->
fs.stat file, ( err, stat ) ->
if err
onComplete { file: file, err: err }
else
onComplete { file: file, isDirectory: stat.isDirectory() }
exports.crawler = FSCrawler
fs = require "fs"
_ = require "underscore"
|
class FSProvider
constructor: ( @crawler, @log ) ->
_.bindAll this
|
buildPath: ( pathSpec ) ->
if not pathSpec
""
else
fullPath = pathSpec
if _.isArray( pathSpec )
fullPath = path.join.apply {}, pathSpec
fullPath
|
delete: ( filePath, onDeleted ) ->
filePath = @buildPath filePath
if @pathExists filePath
fs.unlink filePath, ( err ) ->
onDeleted()
|
ensurePath: ( pathSpec, onComplete ) ->
pathSpec = @buildPath pathSpec
path.exists pathSpec, ( exists ) ->
unless exists
|
mkdir pathSpec, "0755", ( err ) ->
|
if err
log.onError "Could not create #{pathSpec}. #{err}"
else
onComplete()
else
onComplete()
|
getFiles: ( filePath, onFiles ) ->
if not filePath
onFiles []
else
filePath = @buildPath filePath
files = []
@crawler.crawl filePath, onFiles
|
copy: ( from, to, onComplete ) ->
from = this.buildPath from
to = this.buildPath to
readStream = undefined
writeStream = fs.createWriteStream( to )
( readStream = fs.createReadStream( from ) ).pipe( writeStream )
readStream.on 'end', () ->
if writeStream
writeStream.destroySoon()
onComplete()
|
pathExists: ( pathSpec ) ->
pathSpec = this.buildPath pathSpec
path.existsSync pathSpec
|
read: ( filePath, onContent ) ->
filePath = @buildPath filePath
fs.readFile filePath, "utf8", ( err, content ) ->
if err
log.onError "Could not read #{ filePath } : #{ err }"
onContent "", err
else
onContent content
|
readSync: ( filePath ) ->
filePath = @buildPath filePath
try
fs.readFileSync filePath, "utf8"
catch err
log.onError "Could not read #{ filePath } : #{ err }"
err
|
transform: ( filePath, transform, outputPath, onComplete ) ->
self = this
filePath = @buildPath filePath
outputPath = @buildPath outputPath
this.read(
filePath,
( content ) ->
transform content, ( newContent, error ) ->
if not error
self.write outputPath, newContent, onComplete
else
onComplete error
)
|
write: ( filePath, content, onComplete ) ->
filePath = @buildPath filePath
fs.writeFile filePath, content, "utf8", ( err ) ->
if err
log.onError "Could not write #{ filePath } : #{ err }"
onComplete err
else
onComplete()
exports.fsProvider = FSProvider
|
coffeeScript = require "coffee-script"
|
less = require( "less" )
|
stylus = require( "stylus" )
|
haml = require( "haml" )
|
marked = require( "marked" )
marked.setOptions { sanitize: false }
|
coffeeKup = require( "coffeekup" )
|
_ = require "underscore"
|
class Compiler
constructor: (@fp, @log) ->
_.bindAll( this )
|
compile: ( file, onComplete ) ->
self = this
ext = file.ext()
newExt = @extensionMap[ ext ]
newFile = file.name.replace ext, newExt
log = @log
log.onEvent "Compiling #{ file.name } to #{ newFile }"
compiler = @compilers[ ext ]
if compiler
@fp.transform(
[ file.workingPath, file.name ],
compiler,
[ file.workingPath, newFile ],
( err ) ->
unless err
file.name = newFile
onComplete file
else
log.onError "Error compiling #{ file.name }: \r\n #{ err }"
onComplete err
)
else
onComplete file
|
extensionMap:
".js": ".js"
".css": ".css"
".html": ".html"
".coffee" : ".js"
".kup": ".html"
".less": ".css"
".styl": ".css"
".sass": ".css"
".scss": ".css"
".haml": ".html"
".md": ".html"
".markdown": ".html"
|
compilers:
".coffee" : ( content, onContent ) ->
try
js = coffeeScript.compile content, { bare: true }
onContent js
catch error
onContent "", error
".less" : ( content, onContent ) ->
try
less.render( content, {}, (e, css) -> onContent(css) )
catch error
onContent "", error
".sass" : ( content, onContent ) ->
try
onContent content
catch error
onContent "", error
".scss" : ( content, onContent ) ->
try
onContent content
catch error
onContent "", error
".styl" : ( content, onContent ) ->
try
stylus.render( content, {}, (e, css) -> onContent( css, e ) )
catch error
onContent "", error
".haml" : ( content, onContent ) ->
try
html = haml.render content
onContent html
catch error
onContent "", error
".md" : ( content, onContent ) ->
try
onContent( marked.parse( content ) )
catch error
onContent "", error
".markdown" : ( content, onContent ) ->
try
onContent( marked.parse( content ) )
catch error
onContent "", error
".kup" : ( content, onContent ) ->
try
html =( coffeeKup.compile content, {} )()
onContent html
catch error
onContent "", error
exports.compiler = Compiler
_ = require "underscore"
path = require "path"
|
class Combiner
constructor: ( @fp, @scheduler, @findPatterns, @replacePatterns ) ->
|
combineList: ( list, onComplete ) ->
self = this
forAll = @scheduler.parallel
|
findImports = _.bind( ( file, done ) ->
self.findImports file, list, done
, this )
|
findDependents = _.bind( ( file, done ) ->
self.findDependents file, list, done
, this )
|
combineFile = _.bind( ( file, done ) ->
self.combineFile file, done
, this )
|
forAll list, findImports, () ->
for f1 in list
findDependents f1, list
forAll list, combineFile, onComplete
|
combineFile: ( file, onComplete ) ->
self = this
forAll = @scheduler.parallel
|
if file.combined
onComplete()
|
else
combineFile = ( file, done ) ->
self.combineFile file, done
dependencies = file.imports
if dependencies and dependencies.length > 0
forAll dependencies, combineFile, () ->
self.combine file, () ->
file.combined = true
onComplete()
else
self.combine file, () ->
file.combined = true
onComplete()
|
findImports: ( file, list, onComplete ) ->
self = this
imports = []
@fp.read [ file.workingPath, file.name ], ( content ) ->
|
for pattern in self.findPatterns
imports = imports.concat content.match pattern
imports = _.filter imports, ( x ) -> x
|
for imported in imports
importName = ( imported.match ///['\"].*['\"]/// )[ 0 ].replace(///['\"]///g, "" )
importedFile = _.find( list, ( i ) ->
relativeImportPath = path.relative( path.dirname( file.fullPath ), path.dirname( i.fullPath ) )
relativeImport = self.fp.buildPath( [ relativeImportPath, i.name ] )
relativeImport == importName )
file.imports.push importedFile
onComplete()
|
findDependents: ( file, list ) ->
imported = ( importFile ) ->
file.fullPath == importFile.fullPath
for item in list
if _.any item.imports, imported then file.dependents++
|
combine: ( file, onComplete ) ->
self = this
unless file.combined
pipe = @scheduler.pipeline
fp = @fp
if file.imports.length > 0
|
steps = for imported in file.imports
self.getStep file, imported
fp.read [ file.workingPath, file.name ], ( main ) ->
pipe main, steps, ( result ) ->
fp.write [ file.workingPath, file.name ], result, () -> onComplete()
else
onComplete()
else
onComplete()
|
getStep: ( file, imported ) ->
self = this
( text, onDone ) -> self.replace text, file, imported, onDone
|
replace: ( content, file, imported, onComplete ) ->
patterns = @replacePatterns
pipe = @scheduler.pipeline
source = imported.name
working = imported.workingPath
relativeImportPath = path.relative( path.dirname( file.fullPath ), path.dirname( imported.fullPath ) )
relativeImport = @fp.buildPath( [ relativeImportPath, imported.name ] )
@fp.read [ working, source ], ( newContent ) ->
steps = for pattern in patterns
|
( current, done ) ->
stringified = pattern.toString().replace ///replace///, relativeImport
stringified = stringified.substring( 1, stringified.length - 2 )
fullPattern = new RegExp stringified, "g"
capture = fullPattern.exec( content )
if capture and capture.length > 1
|
whiteSpace = capture[1]
|
newContent = "#{ whiteSpace }" + newContent.replace ///\n///g, "\n#{ whiteSpace }"
sanitized = current.replace( fullPattern, newContent.replace( "\$", "$" ) ).replace( "$", "$" )
done sanitized
pipe content, steps, ( result ) ->
onComplete result
exports.combiner = Combiner
|
jsp = require( "uglify-js" ).parser
pro = require( "uglify-js" ).uglify
|
jslint = require( "readyjslint" ).JSLINT
|
cssminifier = require "cssmin"
|
class StylePipeline
constructor: ( @config, @fp, @minifier, @scheduler, @log ) ->
_.bindAll( this )
|
process: ( files, onComplete ) ->
self = this
forAll = @scheduler.parallel
forAll files, @wrap, () ->
minified = []
if self.config.cssmin
minified = _.map( files, ( x ) -> _.clone x )
forAll files, self.finalize, () ->
self.log.onStep "Finalizing CSS"
forAll minified, self.minify, () ->
if minified.length > 0
self.log.onStep "Minifying CSS"
forAll minified, self.finalize, () ->
onComplete( files.concat minified )
|
minify: ( file, onComplete ) ->
if @config.cssmin
@log.onEvent "Minifying #{ file.name }"
self = this
ext = file.ext()
newFile = file.name.replace ext, ".min.css"
self.fp.transform(
[ file.workingPath, file.name ],
( content, onTransform ) ->
onTransform( self.minifier.cssmin content )
, [ file.workingPath, newFile ],
( ) ->
file.name = newFile
onComplete()
)
else
onComplete()
|
finalize: ( file, onComplete ) ->
self = this
if @config.finalize and @config.finalize.style
@log.onEvent "Finalizing #{ file.name }"
header = @config.finalize.style.header
footer = @config.finalize.style.footer
@fp.transform(
[ file.workingPath, file.name ],
( content, onTransform ) ->
if header
content = header + content
if footer
content = content + footer
onTransform content
, [ file.workingPath, file.name ],
onComplete
)
else
onComplete()
|
wrap: ( file, onComplete ) ->
self = this
if @config.wrap and @config.wrap.style
@log.onEvent "Wrapping #{ file.name }"
prefix = @config.wrap.style.prefix
suffix = @config.wrap.style.suffix
@fp.transform(
[ file.workingPath, file.name ],
( content, onTransform ) ->
if prefix
content = prefix + content
if suffix
content = content + suffix
onTransform content
, [ file.workingPath, file.name ],
onComplete
)
else
onComplete()
|
class SourcePipeline
constructor: ( @config, @fp, @minifier, @scheduler, @log ) ->
_.bindAll( this )
|
process: ( files, onComplete ) ->
self = this
forAll = @scheduler.parallel
forAll files, @wrap, () ->
minify = []
if self.config.uglify
minify = _.map( files, ( x ) -> _.clone x )
forAll files, self.finalize, () ->
self.log.onStep "Finalizing source files"
forAll minify, self.minify, () ->
if minify.length > 0
self.log.onStep "Minifying source files"
forAll minify, self.finalize, () ->
onComplete( files.concat minify )
|
minify: ( file, onComplete ) ->
exclusions = @config.uglify?.exclude || []
isExcluded = _.any exclusions, ( x ) -> x == file.name
if @config.uglify and not isExcluded
self = this
ext = file.ext()
newFile = file.name.replace ext, ".min.js"
@log.onEvent "Minifying #{ newFile }"
@fp.transform(
[ file.workingPath, file.name ],
( content, onTransform ) ->
self.minifier content, ( err, result ) ->
if err
self.log.onError "Error minifying #{ file.name } : \r\n\t #{ err }"
result = content
onTransform( result )
, [ file.workingPath, newFile ],
() ->
file.name = newFile
onComplete()
)
else
onComplete()
|
finalize: ( file, onComplete ) ->
self = this
if @config.finalize and @config.finalize.source
@log.onEvent "Finalizing #{ file.name }"
header = @config.finalize.source.header
footer = @config.finalize.source.footer
@fp.transform(
[ file.workingPath, file.name ],
( content, onTransform ) ->
if header
content = header + content
if footer
content = content + footer
onTransform content
, [ file.workingPath, file.name ],
() ->
onComplete()
)
else
onComplete()
|
wrap: ( file, onComplete ) ->
self = this
if @config.wrap and @config.wrap.source
@log.onEvent "Wrapping #{ file.name }"
prefix = @config.wrap.source.prefix
suffix = @config.wrap.source.suffix
@fp.transform(
[ file.workingPath, file.name ],
( content, onTransform ) ->
if prefix
content = prefix + content
if suffix
content = content + suffix
onTransform content
, [ file.workingPath, file.name ],
() ->
onComplete()
)
else
onComplete()
|
class MarkupPipeline
constructor: () ->
|
class PostProcessor
constructor: ( @config, @fp, @scheduler, @log ) ->
uglify = ( source, callback ) ->
try
ast = jsp.parse source
ast = pro.ast_mangle ast
ast = pro.ast_squeeze ast
callback undefined, pro.gen_code ast
catch err
callback err, ""
@style = new StylePipeline @config, @fp, cssminifier, @scheduler, @log
@source = new SourcePipeline @config, @fp, uglify, @scheduler, @log
@markup = {
process: ( files, onComplete ) -> onComplete files
}
exports.postProcessor = PostProcessor
|
|
ape = require "ape"
|
class Documenter
constructor: ( @config, @fp, @scheduler, @log ) ->
self = this
_.bindAll( this )
if @config.docs
|
@generator = @runApe
else
@generator = () ->
callback = Array.prototype.slice.call arguments, 4
if callback
callback()
|
generate: ( files ) ->
self = this
if files && files.length > 0
@log.onEvent "Creating annotated source for: #{ _.pluck( files, 'name' ).toString() }"
@scheduler.parallel files, @document, () ->
self.log.onComplete "Code annotation completed"
|
document: ( file, onComplete ) ->
self = this
language = ape.get_language file.name
ext = file.ext()
newFile = file.name.replace ext, ".html"
@log.onEvent "Annotation for #{ file.name }"
@fp.read [ file.workingPath, file.name ], ( content ) ->
self.generator language, ext, newFile, content, ( doc ) ->
self.fp.write [ self.config.docs.output, newFile ], doc, onComplete
|
|
runApe: ( language, extension, newFile, code, onComplete ) ->
ape.generate_doc code, language, 'html', null, ( err, result ) -> onComplete result
|
class Anvil
constructor: ( @fp, @compiler, @combiner, @documenter, @scheduler, @postProcessor, @log, @callback ) ->
@buildNumber = 0
@inProcess = false
extensions: [ ".js", ".coffee", ".html", ".haml", ".markdown", ".md", ".css", ".styl", ".less", ".css" ]
|
build: ( config ) ->
if not @inProcess
@initialize( config )
@log.onStep "Build #{ @buildNumber } initiated"
@inProcess = true
@buildSource()
@buildStyle()
|
buildMarkup: () ->
findPatterns = [ ///[\<][!][-]{2}.?import[(]?.?['\"].*['\"].?[)]?.?[-]{2}[\>]///g ]
replacePatterns = [ ///([ \t]*)[\<][!][-]{2}.?import[(]?.?['\"]replace['\"].?[)]?.?[-]{2}[\>]///g ]
@processType( "markup", findPatterns, replacePatterns )
|
buildSource: () ->
findPatterns = [ ///([/]{2}|[\#]{3}).?import.?[(]?.?[\"'].*[\"'].?[)]?[;]?.?([\#]{0,3})///g ]
replacePatterns = [ ///([ \t]*)([/]{2}|[\#]{3}).?import.?[(]?.?[\"']replace[\"'].?[)]?[;]?.?[\#]{0,3}///g ]
@processType( "source", findPatterns, replacePatterns )
|
buildStyle: () ->
findPatterns = [ ///([/]{2}|[/][*]).?import[(]?.?[\"'].*[\"'].?[)]?([*][/])?///g ]
replacePatterns = [ ///([ \t]*)([/]{2}|[/][*]).?import[(]?.?[\"']replace[\"'].?[)]?([*][/])?///g ]
@processType( "style", findPatterns, replacePatterns )
|
initialize: ( config ) ->
@config = config
@filesBuilt = {}
|
@steps =
source: false
style: false
markup: false
hasSource: config.source
hasStyle: config.style
hasMarkup: config.markup
markupReady: () -> ( this.source or not this.hasSource ) and ( this.style or not this.hasStyle )
allDone: () ->
status = ( this.source or not this.hasSource ) and ( this.style or not this.hasStyle ) and ( this.markup or not this.hasMarkup )
status
|
processType: ( type, findPatterns, replacePatterns ) ->
self = this
forAll = @scheduler.parallel
compiler = @compiler
combiner = new @combiner( @fp, @scheduler, findPatterns, replacePatterns )
postProcessor = @postProcessor
@log.onStep "Starting #{ type } pipe-line"
self.prepFiles type, ( list ) ->
if list and list.length > 0
self.copyFiles list, () ->
|
self.log.onStep "Combining #{ type } files"
combiner.combineList list, () ->
|
final = _.filter( list, ( x ) -> x.dependents == 0 )
|
if self.config.docs
self.documenter.generate final
|
self.log.onStep "Compiling #{ type } files"
forAll final, compiler.compile, ( compiled ) ->
|
self.log.onStep "Post-process #{ type } files"
postProcessor[ type ].process compiled, ( list ) ->
|
self.log.onStep "Moving #{ type } files to destinations"
self.finalOutput list, () ->
self.stepComplete type
else
self.stepComplete type
|
finalOutput: ( files, onComplete ) ->
fp = @fp
names = @config.name
forAll = @scheduler.parallel
copy = ( file, done ) ->
forAll( file.outputPaths, ( destination, moved ) ->
outputName = file.name
if names
if _.isString names
outputName = names
else
custom = names[ file.name ]
outputName = custom or= outputName
fp.copy [ file.workingPath, file.name ], [ destination, outputName ], moved
, done )
forAll files, copy, onComplete
|
copyFiles: ( files, onComplete ) ->
fp = @fp
copy = ( file, done ) ->
fp.ensurePath file.workingPath, () ->
fp.copy file.fullPath, [ file.workingPath, file.name ], done
@scheduler.parallel files, copy, onComplete
|
cleanWorking: ( onComplete ) ->
fp = @fp
forAll = @scheduler.parallel
fp.getFiles @config.working, ( files ) ->
forAll files, fp.delete, () ->
onComplete()
|
prepFiles: ( type, onComplete ) ->
self = this
workingBase = @config.working
typePath = @config[ type ]
output = @config.output[ type ]
output = if _.isArray( output ) then output else [ output ]
log = @log
@fp.getFiles typePath, ( files ) ->
log.onEvent "Found #{ files.length } #{ type } files ..."
list = for file in files
name = path.basename file
relative = path.dirname( file.replace( typePath, "") )
working = self.fp.buildPath( workingBase, relative )
{
dependents: 0
ext: () -> path.extname this.name
fullPath: file
imports: []
name: name
originalName: name
outputPaths: output
relativePath: relative
workingPath: working
}
filtered = _.filter list, ( x ) -> _.any self.extensions, ( y ) -> y == x.ext()
onComplete filtered
|
stepComplete: ( step ) ->
@steps[ step ] = true
if step != "markup" and @steps.markupReady()
@buildMarkup()
if step == "markup" and @steps.allDone()
@inProcess = false
@cleanWorking @callback
|
class Continuous
constructor: ( @fp, @config, @onChange ) ->
@style = @normalize @config.style
@source = @normalize @config.source
@markup = @normalize @config.markup
@spec = @normalize @config.spec
@watchers = []
@watching = false
_.bindAll( this )
this
|
normalize: ( x ) -> if _.isArray x then x else [ x ]
|
setup: () ->
if not @watching
@watching = true
if @style then @watchPath p for p in @style
if @source then @watchPath p for p in @source
if @markup then @watchPath p for p in @markup
if @spec then @watchPath p for p in @spec
|
watchPath: ( path ) ->
@fp.getFiles path, @watchFiles
|
watchFiles: ( files ) ->
for file in files
@watchers.push fs.watch file, @onEvent
|
onEvent: ( event, file ) ->
if @watching
@watching = false
while @watchers.length > 0
@watchers.pop().close()
@onChange()
Mocha = require "mocha"
_ = require "underscore"
reporters = Mocha.reporters
interfaces = Mocha.interfaces
Context = Mocha.Context
Runner = Mocha.Runner
Suite = Mocha.Suite
path = require "path"
|
class MochaRunner
constructor: ( @fp, @scheduler, @config, @onComplete ) ->
_.bindAll( this )
run: () ->
self = this
if @config.spec
forAll = @scheduler.parallel
opts = @config.mocha or=
growl: true
ignoreLeaks: true
reporter: "spec"
ui: "bdd"
colors: true
reporterName = opts.reporter.toLowerCase().replace( ///([a-z])///, ( x ) -> x.toUpperCase() )
uiName = opts.ui.toLowerCase()
mocha = new Mocha( {
ui: uiName
ignoreLeaks: true
colors: opts.colors
growl: opts.growl
slow: opts.slow
timeout: opts.timeout
} )
mocha.reporter(reporterName)
specs = if _.isString @config.spec then [ @config.spec ] else @config.spec
forAll specs, @fp.getFiles, ( lists ) ->
files = _.flatten lists
for file in files
delete require.cache[ file ]
mocha.addFile file
mocha.run () ->
self.onComplete()
|
class SocketServer
constructor: ( app ) ->
_.bindAll( this )
@clients = []
@io = require( "socket.io" ).listen(app)
@io.set "log level", 1
|
@io.sockets.on "connection", @addClient
|
addClient: ( socket ) ->
@clients.push socket
socket.on "end", @removeClient
socket.on "disconnect", @removeClient
log.onEvent "client connected"
|
removeClient: ( socket ) ->
index = @clients.indexOf socket
@clients.splice index, 1
log.onEvent "client disconnected"
|
refreshClients: ->
log.onEvent "Refreshing hooked clients"
@notifyClients "refresh"
|
notifyClients: ( msg ) ->
for client in @clients
client.emit msg, {}
express = require 'express'
|
class Host
constructor: ( @fp, @scheduler, @compiler, @config ) ->
self = this
_.bindAll( this )
@app = express.createServer()
app = @app
app.use express.bodyParser()
app.use app.router
hosts = @config.hosts
|
if hosts
_.each( hosts, ( value, key ) ->
app.use key, express.static( path.resolve value )
)
|
else
output = @config.output
target = ""
if @config.markup
if _.isString output
target = output
else if _.isArray output
target = output[ 0 ]
else
target = output.markup
else
if _.isString output
target = output
else if _.isArray output
target = output[ 0 ]
else
target = output.source
app.use "/", express.static( path.resolve target )
if @config.ext
app.use "/ext", express.static( path.resolve @config.ext )
if @config.spec
app.use "/spec", express.static( path.resolve @config.spec )
|
anvilPath = path.resolve( path.dirname( fs.realpathSync( __filename ) ), "../ext" )
console.log "Hosting anvil prerequisites from #{ anvilPath }"
app.use "/anvil", express.static( anvilPath )
|
app.get ///.*[.](coffee|kup|less|styl|md|markdown|haml)///, ( req, res ) ->
fileName = ".#{ req.url }"
ext = path.extname fileName
mimeType = self.contentTypes[ ext ]
res.header 'Content-Type', mimeType
self.fp.read fileName, ( content ) ->
self.compiler.compilers[ ext ] content, ( compiled ) ->
res.send compiled
port = if @config.port then @config.port else 3080
app.listen port
contentTypes:
".coffee": "application/javascript"
".less": "text/css"
".styl": "text/css"
".md": "text/html"
".markdown": "text/html"
".haml": "text/html"
".kup": "text/html"
|
class Cli
constructor: () ->
@anvil = {}
@ci = undefined
@documenter = undefined
@mochaRunner = undefined
@socketServer = {}
@postProcessor = {}
@log = log
@scheduler = new Scheduler()
@crawler = new FSCrawler @scheduler
@fp = new FSProvider @crawler, @log
@configuration = new Configuration @fp, @scheduler, @log
@compiler = new Compiler @fp, @log
_.bindAll this
initCI: ( config ) ->
@ci = new Continuous @fp, config, @onFileChange
initHost: ( config ) ->
@server = new Host @fp, @scheduler, @compiler, config
@socketServer = new SocketServer @server.app
@log.onStep "Static HTTP server listening on port #{ config.port }"
initMocha: ( config ) ->
@mochaRunner = new MochaRunner @fp, @scheduler, config, @onTestsComplete
notifyHttpClients: () ->
if @socketServer.refreshClients
@log.onStep "Notifying clients of build completion"
@socketServer.refreshClients()
onBuildComplete: () ->
self = this
@log.onComplete "Build #{ @anvil.buildNumber++ } completed"
if self.mochaRunner
|
self.log.onStep "Running specifications with Mocha"
self.mochaRunner.run()
else
self.startCI()
self.notifyHttpClients()
onConfig: ( config, stop ) ->
@config = config
|
if stop then process.exit 0
|
if config.continuous then @initCI config
|
if config.mocha then @initMocha config
|
if config.host then @initHost config
|
@postProcessor = new PostProcessor config, @fp, @scheduler, @log
@documenter = new Documenter config, @fp, @scheduler, @log
@anvil = new Anvil @fp, @compiler, Combiner, @documenter, @scheduler, @postProcessor, @log, @onBuildComplete
@anvil.build( config )
|
@startCI()
onFileChange: () ->
@log.onEvent "File change detected, starting build"
@fileChange = ->
@anvil.build( @config )
onTestsComplete: () ->
@log.onComplete "Tests completed"
@startCI()
@notifyHttpClients()
run: () ->
@configuration.configure process.argv, @onConfig
startCI: () ->
if @ci
@log.onStep "Starting file watchers"
@ci.setup()
exports.run = ->
cli = new Cli()
cli.run()
|