Skip to content
This repository has been archived by the owner on Dec 15, 2022. It is now read-only.

Commit

Permalink
Atom packages can load cson grammars and TextMate packages can't
Browse files Browse the repository at this point in the history
  • Loading branch information
probablycorey-and-nathan committed Mar 21, 2013
0 parents commit 3126c0b
Show file tree
Hide file tree
Showing 2 changed files with 115 additions and 0 deletions.
50 changes: 50 additions & 0 deletions grammars/toml.cson
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
'name': 'TOML'
'scopeName': 'source.toml'
'fileTypes': ['toml']
'patterns': [
{
'match': '(?:^\\s*)(\\[([^\\]]+)\\])'
'captures':
'2': 'name': 'variable.keygroup.toml'
'name': 'keygroup.toml'
}
{
'match': '(?:^\\s*)(\\S+)\\s*='
'captures':
'1': 'name': 'entity.key.toml'
'name': 'key.toml'
}
{
'begin': '"'
'beginCaptures':
'0': 'name': 'string.begin.toml'
'end': '"'
'endCaptures':
'0': 'name': 'string.end.toml'
'name': 'string.toml'
'patterns': [
'match': '\\\\[nt0r"\\\\]'
'name' : 'constant.character.escape.toml'
]
}
{
'match': '#.*$'
'name': 'comment.toml'
}
{
'match': 'true'
'name': 'constant.language.boolean.true.toml'
}
{
'match': 'false'
'name': 'constant.language.boolean.false.toml'
}
{
'match': '\\d{4}-\\d{2}-\\d{2}(T)\\d{2}:\\d{2}:\\d{2}(Z)'
'name': 'support.date.toml'
}
{
'match': '-?\\d+(\\.?\\d+)?'
'name': 'constant.numeric.toml'
}
]
65 changes: 65 additions & 0 deletions spec/toml-spec.coffee
Original file line number Diff line number Diff line change
@@ -0,0 +1,65 @@
TextMatePackage = require 'text-mate-package'

describe "TOML grammar", ->
grammar = null

beforeEach ->
spyOn(syntax, "addGrammar")
window.loadPackage("toml")
expect(syntax.addGrammar).toHaveBeenCalled()
grammar = syntax.addGrammar.argsForCall[0][0]

it "parses the grammar", ->
expect(grammar).toBeTruthy()
expect(grammar.scopeName).toBe "source.toml"

it "tokenizes comments", ->
{tokens} = grammar.tokenizeLine("# I am a comment")
expect(tokens[0]).toEqual value: "# I am a comment", scopes: ["source.toml", "comment.toml"]

it "tokenizes strings", ->
{tokens} = grammar.tokenizeLine('"I am a string"')
expect(tokens[0]).toEqual value: '"', scopes: ["source.toml", "string.toml", "string.begin.toml"]
expect(tokens[1]).toEqual value: 'I am a string', scopes: ["source.toml", "string.toml"]
expect(tokens[2]).toEqual value: '"', scopes: ["source.toml", "string.toml","string.end.toml"]

{tokens} = grammar.tokenizeLine('"I\'m \\n escaped"')
expect(tokens[0]).toEqual value: '"', scopes: ["source.toml", "string.toml", "string.begin.toml"]
expect(tokens[1]).toEqual value: "I'm ", scopes: ["source.toml", "string.toml"]
expect(tokens[2]).toEqual value: "\\n", scopes: ["source.toml", "string.toml", "constant.character.escape.toml"]
expect(tokens[3]).toEqual value: " escaped", scopes: ["source.toml", "string.toml"]
expect(tokens[4]).toEqual value: '"', scopes: ["source.toml", "string.toml", "string.end.toml"]

it "tokenizes booleans", ->
{tokens} = grammar.tokenizeLine("true")
expect(tokens[0]).toEqual value: "true", scopes: ["source.toml", "constant.language.boolean.true.toml"]
{tokens} = grammar.tokenizeLine("false")
expect(tokens[0]).toEqual value: "false", scopes: ["source.toml", "constant.language.boolean.false.toml"]

it "tokenizes numbers", ->
{tokens} = grammar.tokenizeLine("123")
expect(tokens[0]).toEqual value: "123", scopes: ["source.toml", "constant.numeric.toml"]

{tokens} = grammar.tokenizeLine("-1")
expect(tokens[0]).toEqual value: "-1", scopes: ["source.toml", "constant.numeric.toml"]

{tokens} = grammar.tokenizeLine("3.14")
expect(tokens[0]).toEqual value: "3.14", scopes: ["source.toml", "constant.numeric.toml"]

{tokens} = grammar.tokenizeLine("-123.456")
expect(tokens[0]).toEqual value: "-123.456", scopes: ["source.toml", "constant.numeric.toml"]

it "tokenizes dates", ->
{tokens} = grammar.tokenizeLine("1979-05-27T07:32:00Z")
expect(tokens[0]).toEqual value: "1979-05-27T07:32:00Z", scopes: ["source.toml", "support.date.toml"]

it "tokenizes keygroups", ->
{tokens} = grammar.tokenizeLine("[keygroup]")
expect(tokens[0]).toEqual value: "[", scopes: ["source.toml", "keygroup.toml"]
expect(tokens[1]).toEqual value: "keygroup", scopes: ["source.toml", "keygroup.toml", "variable.keygroup.toml"]
expect(tokens[2]).toEqual value: "]", scopes: ["source.toml", "keygroup.toml"]

it "tokenizes keys", ->
{tokens} = grammar.tokenizeLine("key =")
expect(tokens[0]).toEqual value: "key", scopes: ["source.toml", "key.toml", "entity.key.toml"]
expect(tokens[1]).toEqual value: " =", scopes: ["source.toml", "key.toml"]

0 comments on commit 3126c0b

Please sign in to comment.