Skip to content

Commit

Permalink
Merge pull request #6 from superhawk610/fix/handle-boundary-characters
Browse files Browse the repository at this point in the history
  • Loading branch information
superhawk610 authored Dec 29, 2020
2 parents 4fbf44d + e72e568 commit 513eb51
Show file tree
Hide file tree
Showing 7 changed files with 82 additions and 16 deletions.
10 changes: 10 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
# Changelog

## Version 0.0.2: Fix verb token leading character parsing

This version addresses a bug with custom format strings that include a verb token preceded
by a non-whitespace character, such as `(:bar)`.

## Version 0.0.1: Initial Release

This version released alongside [superhawk610/terminal](https://github.com/superhawk610/terminal).
4 changes: 4 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -192,6 +192,10 @@ Debugging crowded layouts can be difficult, so this helper swaps each bar compon
<barToken p={4} t={10}> <percentToken "40.0%"> <customVerbToken verb="hello" value="Hello!">
```

## Changelog

See [CHANGELOG.md](CHANGELOG.md).

## License

Copyright &copy; 2019 Aaron Ross, all rights reserved. View the ISC license [here](LICENSE).
2 changes: 1 addition & 1 deletion bar.go
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ func New(t int) *Bar {
formatString: defaultFormat,
format: tokenize(defaultFormat, []string{}),
callback: noop,
output: &stdout{},
output: initializeStdout(),
}
}

Expand Down
4 changes: 2 additions & 2 deletions opts.go
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ func NewWithFormat(t int, f string) *Bar {
formatString: f,
format: tokenize(f, nil),
callback: noop,
output: &stdout{},
output: initializeStdout(),
}
}

Expand All @@ -55,7 +55,7 @@ func NewWithOpts(opts ...func(o *barOpts)) *Bar {
end: ")",
formatString: defaultFormat,
callback: noop,
output: &stdout{},
output: initializeStdout(),
}

for _, aug := range opts {
Expand Down
16 changes: 11 additions & 5 deletions output.go
Original file line number Diff line number Diff line change
Expand Up @@ -12,15 +12,21 @@ type Output interface {
Printf(format string, vals ...interface{})
}

type stdout struct{}
type stdout struct {
terminal terminal.Terminal
}

func initializeStdout() *stdout {
return &stdout{}
}

// ClearLine clears the current output line and returns the cursor
// to the first index
func (stdout) ClearLine() {
terminal.ClearLine()
func (s *stdout) ClearLine() {
s.terminal.ClearLine()
}

// Printf accepts a format string and any number of input values
func (stdout) Printf(format string, vals ...interface{}) {
terminal.Overwritef(format, vals...)
func (s *stdout) Printf(format string, vals ...interface{}) {
s.terminal.Overwritef(format, vals...)
}
34 changes: 28 additions & 6 deletions tokens.go
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,8 @@ type literalToken struct {
content string
}

// tokenize takes a format string and a slice of custom verbs (if any)
// and returns a slice of tokens that represent the format string.
func tokenize(f string, customVerbs []string) tokens {
var t tokens

Expand All @@ -53,6 +55,9 @@ func tokenize(f string, customVerbs []string) tokens {
}
}

// nextToken consumes characters from the input until a complete token is
// found and returned. If an error is encountered, it is returned alongside
// a `nil` token.
func (f *tokenFormat) nextToken(customVerbs []string) (token, error) {
for {
r, _, err := f.stream.ReadRune()
Expand All @@ -71,6 +76,9 @@ func (f *tokenFormat) nextToken(customVerbs []string) (token, error) {
}
}

// readAction will consume characters from the input until it finds a valid
// action verb, returning the corresponding verb token. If no valid verb is
// found when the input runs out, a literal token will be returned instead.
func (f *tokenFormat) readAction(customVerbs []string) (token, error) {
var verb bytes.Buffer

Expand All @@ -97,36 +105,42 @@ func (f *tokenFormat) readAction(customVerbs []string) (token, error) {
}
}

// readLiteral will consume characters from the input until it encounters
// a separator character (see `readSeparator`), returning a literal token
// containing the characters it consumed.
func (f *tokenFormat) readLiteral(prefix rune) (token, error) {
var value bytes.Buffer

value.Write([]byte(string([]rune{prefix})))

for {
if f.readSeparator() {
return literalToken{value.String()}, nil
}

r, _, err := f.stream.ReadRune()

if err != nil {
return nil, err
}

value.Write([]byte(string([]rune{r})))

if f.readSeparator() {
return literalToken{value.String()}, nil
}
}
}

// readSeparator looks for a separator character (one of ` `, `:`, or *EOF*), returning
// `true` if one is found and `false` otherwise. It does not consume any characters
// from the input.
func (f *tokenFormat) readSeparator() bool {
p, err := f.stream.Peek(1)
if err != nil || p[0] == ' ' || p[0] == ':' {
if err != nil || p[0] == byte(' ') || p[0] == byte(':') {
return true
}
return false
}

// tokenFromString will return the token parsed from s, as well as a
// bool determining whether a valid token was found
// bool determining whether a valid token was found.
func tokenFromString(s string, customVerbs []string) (token, bool) {
// check for standard verbs
switch s {
Expand All @@ -150,6 +164,10 @@ func tokenFromString(s string, customVerbs []string) (token, bool) {
return nil, false
}

//
// print implementations
//

func (t spaceToken) print(_ *Bar) string {
return " "
}
Expand Down Expand Up @@ -197,6 +215,10 @@ func (t literalToken) print(_ *Bar) string {
return t.content
}

//
// debug implementations
//

func (t spaceToken) debug(b *Bar) string {
return " "
}
Expand Down
28 changes: 26 additions & 2 deletions tokens_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,31 @@ func TestTokenize(t *testing.T) {
got := tokenize(testCase.formatString, nil)
if !reflect.DeepEqual(got, testCase.expected) {
t.Errorf(
"[%d] tokenize(%#v, nil) = %#v; want %#v",
"[%d] tokenize(%#v, nil)\n\n got %#v\n want %#v",
i,
testCase.formatString,
got,
testCase.expected,
)
}
}
}

func TestTokenizeWithBoundaryCharacters(t *testing.T) {
var testCases = []struct {
formatString string
expected tokens
}{
{"(:bar", tokens{literalToken{"("}, barToken{}}},
{"(:bar)", tokens{literalToken{"("}, barToken{}, literalToken{")"}}},
{":bar (:eta remaining)", tokens{barToken{}, spaceToken{}, literalToken{"("}, etaToken{}, spaceToken{}, literalToken{"remaining)"}}},
}

for i, testCase := range testCases {
got := tokenize(testCase.formatString, nil)
if !reflect.DeepEqual(got, testCase.expected) {
t.Errorf(
"[%d] tokenize(%#v, nil)\n\n got %#v\n want %#v",
i,
testCase.formatString,
got,
Expand All @@ -53,7 +77,7 @@ func TestTokenizeWithCustomVerbs(t *testing.T) {
got := tokenize(testCase.formatString, testCase.customVerbs)
if !reflect.DeepEqual(got, testCase.expected) {
t.Errorf(
"[%d] tokenize(%#v, %#v) = %#v; want %#v",
"[%d] tokenize(%#v, %#v)\n\n got %#v\n want %#v",
i,
testCase.formatString,
testCase.customVerbs,
Expand Down

0 comments on commit 513eb51

Please sign in to comment.