diff --git a/lib/tokenizer/Tokenizer.js b/lib/tokenizer/Tokenizer.js index dbbbd3bf..21870e31 100644 --- a/lib/tokenizer/Tokenizer.js +++ b/lib/tokenizer/Tokenizer.js @@ -194,14 +194,15 @@ var Tokenizer = function(source, startOffset, startLine, startColumn) { this.lines = null; this.columns = null; - this.setSource(source || '', startOffset, startLine, startColumn); + this.setSource(source, startOffset, startLine, startColumn); }; Tokenizer.prototype = { setSource: function(source, startOffset, startLine, startColumn) { - var start = firstCharOffset(source); + var safeSource = String(source || ''); + var start = firstCharOffset(safeSource); - this.source = source; + this.source = safeSource; this.startOffset = typeof startOffset === 'undefined' ? 0 : startOffset; this.startLine = typeof startLine === 'undefined' ? 1 : startLine; this.startColumn = typeof startColumn === 'undefined' ? 1 : startColumn; @@ -213,7 +214,7 @@ Tokenizer.prototype = { this.tokenStart = start; this.tokenEnd = start; - tokenLayout(this, source, start); + tokenLayout(this, safeSource, start); this.next(); }, diff --git a/test/tokenizer.js b/test/tokenizer.js index 7963e3f4..3ae192e6 100644 --- a/test/tokenizer.js +++ b/test/tokenizer.js @@ -39,6 +39,7 @@ describe('parser/tokenizer', function() { assert.equal(tokenizer.eof, true); assert.equal(tokenizer.tokenType, 0); + assert.equal(tokenizer.source, ''); }); it('edge case: empty input', function() { @@ -46,6 +47,23 @@ describe('parser/tokenizer', function() { assert.equal(tokenizer.eof, true); assert.equal(tokenizer.tokenType, 0); + assert.equal(tokenizer.source, ''); + }); + + it('should convert input to string', function() { + var tokenizer = new Tokenizer({ + toString: function() { + return css; + } + }); + + assert.equal(tokenizer.source, css); + }); + + it('should accept a Buffer', function() { + var tokenizer = new Tokenizer(new Buffer(css)); + + assert.equal(tokenizer.source, css); }); it('getTypes()', function() {