Skip to content
This repository has been archived by the owner on Dec 15, 2022. It is now read-only.

Commit

Permalink
Support hex, octal, binary, inf, and nan
Browse files Browse the repository at this point in the history
  • Loading branch information
Winston Liu authored and Winston Liu committed Mar 26, 2019
1 parent 5963fd6 commit 45f5308
Show file tree
Hide file tree
Showing 2 changed files with 77 additions and 2 deletions.
45 changes: 43 additions & 2 deletions grammars/toml.cson
Original file line number Diff line number Diff line change
Expand Up @@ -197,8 +197,11 @@
'include': '#date-time'
}
{
'match': '[+-]?(0|[1-9]\\d*)(_\\d+)*((\\.\\d+)(_\\d+)*)?([eE][+-]?\\d+(_\\d+)*)?'
'name': 'constant.numeric.toml'
'include': '#numbers'
}
{
'match': '.+'
'name': 'invalid.illegal.toml'
}
]
'date-time':
Expand Down Expand Up @@ -251,3 +254,41 @@
'name': 'constant.numeric.date.toml'
}
]
'numbers':
# https://github.com/toml-lang/toml#integer
'patterns': [
{
# Handles decimal integers & floats
'match': '''(?x)
[+-]? # Optional +/-
(
0 # Just a zero
|
[1-9](_?\\d)* # Or a non-zero number (no leading zeros allowed)
)
(\\.\\d(_?\\d)*)? # Optional fractional portion
([eE][+-]?\\d(_?\\d)*)? # Optional exponent
\\b
'''
'name': 'constant.numeric.toml'
}
{
'match': '[+-]?(inf|nan)\\b'
'name': 'constant.numeric.$1.toml'
}
{
# Hex
'match': '0x[0-9A-Fa-f](_?[0-9A-Fa-f])*\\b'
'name': 'constant.numeric.hex.toml'
}
{
# Octal
'match': '0o[0-7](_?[0-7])*\\b'
'name': 'constant.numeric.octal.toml'
}
{
# Binary
'match': '0b[01](_?[01])*\\b'
'name': 'constant.numeric.binary.toml'
}
]
34 changes: 34 additions & 0 deletions spec/toml-spec.coffee
Original file line number Diff line number Diff line change
Expand Up @@ -90,11 +90,45 @@ describe "TOML grammar", ->
{tokens} = grammar.tokenizeLine("foo = #{int}")
expect(tokens[4]).toEqual value: int, scopes: ["source.toml", "constant.numeric.toml"]

it "does not tokenize a number with leading zeros as an integer", ->
{tokens} = grammar.tokenizeLine("foo = 01")
expect(tokens[4]).toEqual value: "01", scopes: ["source.toml", "invalid.illegal.toml"]

it "does not tokenize a number with an underscore not followed by a digit as an integer", ->
{tokens} = grammar.tokenizeLine("foo = 1__2")
expect(tokens[4]).toEqual value: "1__2", scopes: ["source.toml", "invalid.illegal.toml"]

{tokens} = grammar.tokenizeLine("foo = 1_")
expect(tokens[4]).toEqual value: "1_", scopes: ["source.toml", "invalid.illegal.toml"]

it "tokenizes hex integers", ->
for int in ["0xDEADBEEF", "0xdeadbeef", "0xdead_beef"]
{tokens} = grammar.tokenizeLine("foo = #{int}")
expect(tokens[4]).toEqual value: int, scopes: ["source.toml", "constant.numeric.hex.toml"]

it "tokenizes octal integers", ->
{tokens} = grammar.tokenizeLine("foo = 0o755")
expect(tokens[4]).toEqual value: "0o755", scopes: ["source.toml", "constant.numeric.octal.toml"]

it "tokenizes binary integers", ->
{tokens} = grammar.tokenizeLine("foo = 0b11010110")
expect(tokens[4]).toEqual value: "0b11010110", scopes: ["source.toml", "constant.numeric.binary.toml"]

it "does not tokenize a number followed by other characters as a number", ->
{tokens} = grammar.tokenizeLine("foo = 0xdeadbeefs")
expect(tokens[4]).toEqual value: "0xdeadbeefs", scopes: ["source.toml", "invalid.illegal.toml"]

it "tokenizes floats", ->
for float in ["+1.0", "3.1415", "-0.01", "5e+22", "1e6", "-2E-2", "6.626e-34", "6.626e-34", "9_224_617.445_991_228_313", "1e1_000"]
{tokens} = grammar.tokenizeLine("foo = #{float}")
expect(tokens[4]).toEqual value: float, scopes: ["source.toml", "constant.numeric.toml"]

it "tokenizes inf and nan", ->
for sign in ["+", "-", ""]
for float in ["inf", "nan"]
{tokens} = grammar.tokenizeLine("foo = #{sign}#{float}")
expect(tokens[4]).toEqual value: "#{sign}#{float}", scopes: ["source.toml", "constant.numeric.#{float}.toml"]

it "tokenizes offset date-times", ->
{tokens} = grammar.tokenizeLine("foo = 1979-05-27T07:32:00Z")
expect(tokens[4]).toEqual value: "1979-05-27", scopes: ["source.toml", "constant.numeric.date.toml"]
Expand Down

0 comments on commit 45f5308

Please sign in to comment.