Skip to content

Commit

Permalink
Merge pull request #26 from rsteube/wordbreak-prefix
Browse files Browse the repository at this point in the history
added wordbreak prefix
  • Loading branch information
rsteube authored Aug 11, 2023
2 parents bd636a9 + 7c8835a commit 0bd041d
Show file tree
Hide file tree
Showing 3 changed files with 118 additions and 90 deletions.
8 changes: 7 additions & 1 deletion cmd/carapace-shlex/cmd/root.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ package cmd

import (
"encoding/json"
"fmt"

"github.com/rsteube/carapace"
"github.com/rsteube/carapace-bridge/pkg/actions/bridge"
Expand Down Expand Up @@ -31,6 +32,10 @@ var rootCmd = &cobra.Command{
if cmd.Flag("words").Changed {
tokens = tokens.Words()
}
if cmd.Flag("prefix").Changed {
fmt.Fprintln(cmd.OutOrStdout(), tokens.WordbreakPrefix())
return nil
}

encoder := json.NewEncoder(cmd.OutOrStdout())
encoder.SetEscapeHTML(false)
Expand All @@ -44,9 +49,10 @@ func Execute() error {
}

func init() {
rootCmd.Flags().Bool("args", false, "show words")
rootCmd.Flags().Bool("current", false, "show current pipeline")
rootCmd.Flags().Bool("prefix", false, "show wordbreak prefix")
rootCmd.Flags().Bool("words", false, "show words")
rootCmd.Flags().Bool("args", false, "show words")

carapace.Gen(rootCmd).PositionalCompletion(
bridge.ActionCarapaceBin().SplitP(),
Expand Down
89 changes: 0 additions & 89 deletions shlex.go
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@ import (
"fmt"
"io"
"os"
"strconv"
"strings"
)

Expand Down Expand Up @@ -397,94 +396,6 @@ func (t *tokenizer) Next() (*Token, error) {
return token, err
}

type TokenSlice []Token

func (t TokenSlice) Strings() []string {
s := make([]string, 0, len(t))
for _, token := range t {
s = append(s, token.Value)
}
return s
}

func (t TokenSlice) Pipelines() []TokenSlice {
pipelines := make([]TokenSlice, 0)

pipeline := make(TokenSlice, 0)
for _, token := range t {
switch {
case token.Type == WORDBREAK_TOKEN && wordbreakType(token).IsPipelineDelimiter():
pipelines = append(pipelines, pipeline)
pipeline = make(TokenSlice, 0)
default:
pipeline = append(pipeline, token)
}
}
return append(pipelines, pipeline)
}

func (t TokenSlice) CurrentPipeline() TokenSlice {
pipelines := t.Pipelines()
return pipelines[len(pipelines)-1]
}

func (t TokenSlice) Words() TokenSlice {
words := make(TokenSlice, 0)
for index, token := range t {
switch {
case index == 0:
words = append(words, token)
case t[index-1].adjoins(token):
words[len(words)-1].Value += token.Value
words[len(words)-1].RawValue += token.RawValue
words[len(words)-1].State = token.State
default:
words = append(words, token)
}
}
return words
}

func (t TokenSlice) FilterRedirects() TokenSlice {
filtered := make(TokenSlice, 0)
for index, token := range t {
switch token.Type {
case WORDBREAK_TOKEN:
if wordbreakType(token).IsRedirect() {
continue
}
}

if index > 0 {
if wordbreakType(t[index-1]).IsRedirect() {
continue
}
}

if index < len(t)-1 {
next := t[index+1]
if token.adjoins(next) {
if _, err := strconv.Atoi(token.RawValue); err == nil {
if wordbreakType(t[index+1]).IsRedirect() {
continue
}
}
}

}

filtered = append(filtered, token)
}
return filtered
}

func (t TokenSlice) CurrentToken() (token Token) {
if len(t) > 0 {
token = t[len(t)-1]
}
return
}

// Split partitions of a string into tokens.
func Split(s string) (TokenSlice, error) {
l := newLexer(strings.NewReader(s))
Expand Down
111 changes: 111 additions & 0 deletions tokenslice.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,111 @@
package shlex

import "strconv"

type TokenSlice []Token

func (t TokenSlice) Strings() []string {
s := make([]string, 0, len(t))
for _, token := range t {
s = append(s, token.Value)
}
return s
}

func (t TokenSlice) Pipelines() []TokenSlice {
pipelines := make([]TokenSlice, 0)

pipeline := make(TokenSlice, 0)
for _, token := range t {
switch {
case token.Type == WORDBREAK_TOKEN && wordbreakType(token).IsPipelineDelimiter():
pipelines = append(pipelines, pipeline)
pipeline = make(TokenSlice, 0)
default:
pipeline = append(pipeline, token)
}
}
return append(pipelines, pipeline)
}

func (t TokenSlice) CurrentPipeline() TokenSlice {
pipelines := t.Pipelines()
return pipelines[len(pipelines)-1]
}

func (t TokenSlice) Words() TokenSlice {
words := make(TokenSlice, 0)
for index, token := range t {
switch {
case index == 0:
words = append(words, token)
case t[index-1].adjoins(token):
words[len(words)-1].Value += token.Value
words[len(words)-1].RawValue += token.RawValue
words[len(words)-1].State = token.State
default:
words = append(words, token)
}
}
return words
}

func (t TokenSlice) FilterRedirects() TokenSlice {
filtered := make(TokenSlice, 0)
for index, token := range t {
switch token.Type {
case WORDBREAK_TOKEN:
if wordbreakType(token).IsRedirect() {
continue
}
}

if index > 0 {
if wordbreakType(t[index-1]).IsRedirect() {
continue
}
}

if index < len(t)-1 {
next := t[index+1]
if token.adjoins(next) {
if _, err := strconv.Atoi(token.RawValue); err == nil {
if wordbreakType(t[index+1]).IsRedirect() {
continue
}
}
}

}

filtered = append(filtered, token)
}
return filtered
}

func (t TokenSlice) CurrentToken() (token Token) {
if len(t) > 0 {
token = t[len(t)-1]
}
return
}

func (t TokenSlice) WordbreakPrefix() string {
found := false
prefix := ""
for i := len(t) - 2; i >= 0; i-- {
token := t[i]
if !token.adjoins(t[i+1]) {
break
}

if token.Type == WORDBREAK_TOKEN {
found = true
}

if found {
prefix = token.Value + prefix
}
}
return prefix
}

0 comments on commit 0bd041d

Please sign in to comment.