diff --git a/klog/app/cli/args/filter.go b/klog/app/cli/args/filter.go index f8825fd..d36a820 100644 --- a/klog/app/cli/args/filter.go +++ b/klog/app/cli/args/filter.go @@ -6,6 +6,7 @@ import ( "github.com/jotaen/klog/klog" "github.com/jotaen/klog/klog/app" "github.com/jotaen/klog/klog/service" + "github.com/jotaen/klog/klog/service/kfl" "github.com/jotaen/klog/klog/service/period" ) @@ -33,6 +34,8 @@ type FilterArgs struct { LastQuarter bool `hidden:"" name:"last-quarter" group:"Filter"` ThisYear bool `hidden:"" name:"this-year" group:"Filter"` LastYear bool `hidden:"" name:"last-year" group:"Filter"` + + FilterQuery string `name:"filter" placeholder:"KQL-FILTER-QUERY" group:"Filter" help:"(Experimental)"` } // FilterArgsCompletionOverrides enables/disables tab completion for @@ -51,6 +54,19 @@ var FilterArgsCompletionOverrides = map[string]bool{ } func (args *FilterArgs) ApplyFilter(now gotime.Time, rs []klog.Record) ([]klog.Record, app.Error) { + if args.FilterQuery != "" { + predicate, err := kfl.Parse(args.FilterQuery) + if err != nil { + return nil, app.NewErrorWithCode( + app.GENERAL_ERROR, + "Malformed filter query", + err.Error(), + err, + ) + } + rs = kfl.Filter(predicate, rs) + return rs, nil + } today := klog.NewDateFromGo(now) qry := service.FilterQry{ BeforeOrEqual: args.Until, diff --git a/klog/service/kfl/error.go b/klog/service/kfl/error.go new file mode 100644 index 0000000..bbc931a --- /dev/null +++ b/klog/service/kfl/error.go @@ -0,0 +1,49 @@ +package kfl + +import ( + "fmt" + "math" + "strings" + + tf "github.com/jotaen/klog/lib/terminalformat" +) + +type ParseError interface { + error + Original() error + Position() (int, int) +} + +type parseError struct { + err error + position int + length int + query string +} + +func (e parseError) Error() string { + errorLength := max(e.length, 1) + relevantQueryFragment, newStart := tf.TextSubstrWithContext(e.query, e.position, errorLength, 10, 20) + return fmt.Sprintf( + "%s\n\n%s\n%s%s%s\nCursor positions %d-%d in query.", + e.err, + relevantQueryFragment, + strings.Repeat("—", max(0, newStart)), + strings.Repeat("^", max(0, errorLength)), + strings.Repeat("—", max(0, len(relevantQueryFragment)-(newStart+errorLength))), + e.position, + e.position+errorLength, + ) +} + +func (e parseError) Original() error { + return e.err +} + +func (e parseError) Position() (int, int) { + return e.position, e.length +} + +func max(x int, y int) int { + return int(math.Max(float64(x), float64(y))) +} diff --git a/klog/service/kfl/filter.go b/klog/service/kfl/filter.go new file mode 100644 index 0000000..ca08686 --- /dev/null +++ b/klog/service/kfl/filter.go @@ -0,0 +1,23 @@ +package kfl + +import ( + "github.com/jotaen/klog/klog" +) + +func Filter(p Predicate, rs []klog.Record) []klog.Record { + var res []klog.Record + for _, r := range rs { + var es []klog.Entry + for i, e := range r.Entries() { + if p.Matches(queriedEntry{r, r.Entries()[i]}) { + es = append(es, e) + } + } + if len(es) == 0 { + continue + } + r.SetEntries(es) + res = append(res, r) + } + return res +} diff --git a/klog/service/kfl/filter_test.go b/klog/service/kfl/filter_test.go new file mode 100644 index 0000000..a79f581 --- /dev/null +++ b/klog/service/kfl/filter_test.go @@ -0,0 +1,154 @@ +package kfl + +import ( + "testing" + + "github.com/jotaen/klog/klog" + "github.com/jotaen/klog/klog/service" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func sampleRecordsForQuerying() []klog.Record { + return []klog.Record{ + func() klog.Record { + // Note that records without entries never match any query. + r := klog.NewRecord(klog.Ɀ_Date_(1999, 12, 30)) + r.SetSummary(klog.Ɀ_RecordSummary_("Hello World", "#foo")) + return r + }(), func() klog.Record { + r := klog.NewRecord(klog.Ɀ_Date_(1999, 12, 31)) + r.AddDuration(klog.NewDuration(5, 0), klog.Ɀ_EntrySummary_("#bar")) + return r + }(), func() klog.Record { + r := klog.NewRecord(klog.Ɀ_Date_(2000, 1, 1)) + r.SetSummary(klog.Ɀ_RecordSummary_("#foo")) + r.AddDuration(klog.NewDuration(0, 15), nil) + r.AddDuration(klog.NewDuration(6, 0), klog.Ɀ_EntrySummary_("#bar")) + r.AddDuration(klog.NewDuration(0, -30), nil) + return r + }(), func() klog.Record { + r := klog.NewRecord(klog.Ɀ_Date_(2000, 1, 2)) + r.SetSummary(klog.Ɀ_RecordSummary_("#foo")) + r.AddDuration(klog.NewDuration(7, 0), nil) + return r + }(), func() klog.Record { + r := klog.NewRecord(klog.Ɀ_Date_(2000, 1, 3)) + r.SetSummary(klog.Ɀ_RecordSummary_("#foo=a")) + r.AddDuration(klog.NewDuration(4, 0), klog.Ɀ_EntrySummary_("test", "foo #bar=1")) + r.AddDuration(klog.NewDuration(4, 0), klog.Ɀ_EntrySummary_("#bar=2")) + r.Start(klog.NewOpenRange(klog.Ɀ_Time_(12, 00)), nil) + return r + }(), + } +} + +func TestQueryWithNoClauses(t *testing.T) { + rs := Filter(And{}, sampleRecordsForQuerying()) + require.Len(t, rs, 4) + assert.Equal(t, klog.NewDuration(5+6+7+8, -30+15), service.Total(rs...)) +} + +func TestQueryWithAtDate(t *testing.T) { + rs := Filter(IsInDateRange{ + From: klog.Ɀ_Date_(2000, 1, 2), + To: klog.Ɀ_Date_(2000, 1, 2), + }, sampleRecordsForQuerying()) + require.Len(t, rs, 1) + assert.Equal(t, klog.NewDuration(7, 0), service.Total(rs...)) +} + +func TestQueryWithAfter(t *testing.T) { + rs := Filter(IsInDateRange{ + From: klog.Ɀ_Date_(2000, 1, 1), + To: nil, + }, sampleRecordsForQuerying()) + require.Len(t, rs, 3) + assert.Equal(t, 1, rs[0].Date().Day()) + assert.Equal(t, 2, rs[1].Date().Day()) + assert.Equal(t, 3, rs[2].Date().Day()) +} + +func TestQueryWithBefore(t *testing.T) { + rs := Filter(IsInDateRange{ + From: nil, + To: klog.Ɀ_Date_(2000, 1, 1), + }, sampleRecordsForQuerying()) + require.Len(t, rs, 2) + assert.Equal(t, 31, rs[0].Date().Day()) + assert.Equal(t, 1, rs[1].Date().Day()) +} + +func TestQueryWithTagOnEntries(t *testing.T) { + rs := Filter(HasTag{klog.NewTagOrPanic("bar", "")}, sampleRecordsForQuerying()) + require.Len(t, rs, 3) + assert.Equal(t, 31, rs[0].Date().Day()) + assert.Equal(t, 1, rs[1].Date().Day()) + assert.Equal(t, 3, rs[2].Date().Day()) + assert.Equal(t, klog.NewDuration(5+8+6, 0), service.Total(rs...)) +} + +func TestQueryWithTagOnOverallSummary(t *testing.T) { + rs := Filter(HasTag{klog.NewTagOrPanic("foo", "")}, sampleRecordsForQuerying()) + require.Len(t, rs, 3) + assert.Equal(t, 1, rs[0].Date().Day()) + assert.Equal(t, 2, rs[1].Date().Day()) + assert.Equal(t, 3, rs[2].Date().Day()) + assert.Equal(t, klog.NewDuration(6+7+8, -30+15), service.Total(rs...)) +} + +func TestQueryWithTagOnEntriesAndInSummary(t *testing.T) { + rs := Filter(And{[]Predicate{HasTag{klog.NewTagOrPanic("foo", "")}, HasTag{klog.NewTagOrPanic("bar", "")}}}, sampleRecordsForQuerying()) + require.Len(t, rs, 2) + assert.Equal(t, 1, rs[0].Date().Day()) + assert.Equal(t, 3, rs[1].Date().Day()) + assert.Equal(t, klog.NewDuration(8+6, 0), service.Total(rs...)) +} + +func TestQueryWithTagValues(t *testing.T) { + rs := Filter(HasTag{klog.NewTagOrPanic("foo", "a")}, sampleRecordsForQuerying()) + require.Len(t, rs, 1) + assert.Equal(t, 3, rs[0].Date().Day()) + assert.Equal(t, klog.NewDuration(8, 0), service.Total(rs...)) +} + +func TestQueryWithTagValuesInEntries(t *testing.T) { + rs := Filter(HasTag{klog.NewTagOrPanic("bar", "1")}, sampleRecordsForQuerying()) + require.Len(t, rs, 1) + assert.Equal(t, 3, rs[0].Date().Day()) + assert.Equal(t, klog.NewDuration(4, 0), service.Total(rs...)) +} + +func TestQueryWithTagNonMatchingValues(t *testing.T) { + rs := Filter(HasTag{klog.NewTagOrPanic("bar", "3")}, sampleRecordsForQuerying()) + require.Len(t, rs, 0) +} + +func TestQueryWithEntryTypes(t *testing.T) { + { + rs := Filter(IsEntryType{ENTRY_TYPE_DURATION}, sampleRecordsForQuerying()) + require.Len(t, rs, 4) + assert.Equal(t, klog.NewDuration(0, 1545), service.Total(rs...)) + } + { + rs := Filter(IsEntryType{ENTRY_TYPE_NEGATIVE_DURATION}, sampleRecordsForQuerying()) + require.Len(t, rs, 1) + assert.Equal(t, 1, rs[0].Date().Day()) + assert.Equal(t, klog.NewDuration(0, -30), service.Total(rs...)) + } + { + rs := Filter(IsEntryType{ENTRY_TYPE_POSITIVE_DURATION}, sampleRecordsForQuerying()) + require.Len(t, rs, 4) + assert.Equal(t, klog.NewDuration(0, 1575), service.Total(rs...)) + } + { + rs := Filter(IsEntryType{ENTRY_TYPE_RANGE}, sampleRecordsForQuerying()) + require.Len(t, rs, 0) + assert.Equal(t, klog.NewDuration(0, 0), service.Total(rs...)) + } + { + rs := Filter(IsEntryType{ENTRY_TYPE_OPEN_RANGE}, sampleRecordsForQuerying()) + require.Len(t, rs, 1) + assert.Equal(t, klog.NewDuration(0, 0), service.Total(rs...)) + } +} diff --git a/klog/service/kfl/parse.go b/klog/service/kfl/parse.go new file mode 100644 index 0000000..3040c32 --- /dev/null +++ b/klog/service/kfl/parse.go @@ -0,0 +1,189 @@ +package kfl + +import ( + "errors" + "fmt" + "strings" + + "github.com/jotaen/klog/klog" + "github.com/jotaen/klog/klog/service/period" +) + +var ( + ErrMalformedFilterQuery = errors.New("Malformed filter query") // This is only a just-in-case fallback. + ErrCannotMixAndOr = errors.New("Cannot mix && and || operators on the same level. Please use parenthesis () for grouping.") + errUnbalancedBrackets = errors.New("Missing") // Internal “base” class + ErrUnbalancedOpenBracket = fmt.Errorf("%w opening parenthesis. Please make sure that the number of opening and closing parentheses matches.", errUnbalancedBrackets) + ErrUnbalancedCloseBracket = fmt.Errorf("%w closing parenthesis. Please make sure that the number of opening and closing parentheses matches.", errUnbalancedBrackets) + errOperatorOperand = errors.New("Missing") // Internal “base” class + ErrOperatorExpected = fmt.Errorf("%w operator. Please put a logical operator ('&&' or '||') before this search operand.", errOperatorOperand) + ErrOperandExpected = fmt.Errorf("%w filter term. Please remove redundant logical operators.", errOperatorOperand) + ErrIllegalTokenValue = errors.New("Illegal value. Please make sure to use only valid operand values.") +) + +func Parse(filterQuery string) (Predicate, ParseError) { + p, pErr := func() (Predicate, ParseError) { + tokens, pErr := tokenise(filterQuery) + if pErr != nil { + return nil, pErr + } + tp := newTokenParser( + append(tokens, token{tokenCloseBracket, ")", len(filterQuery) - 1}), + ) + p, pErr := parseGroup(&tp) + if pErr != nil { + return nil, pErr + } + // Check whether there are tokens left, which would indicate + // unbalanced brackets. + if tp.next() != (token{}) { + return nil, parseError{ + err: ErrUnbalancedOpenBracket, + position: 0, + length: len(filterQuery), + } + } + return p, nil + }() + if pErr != nil { + if pErr, ok := pErr.(parseError); ok { + pErr.query = filterQuery + return nil, pErr + } + } + return p, nil +} + +func parseGroup(tp *tokenParser) (Predicate, ParseError) { + g := newPredicateGroup() + + if pErr := tp.checkNextIsOperand(); pErr != nil { + return nil, pErr + } + + for { + tk := tp.next() + if tk == (token{}) { + return nil, parseError{ + err: ErrUnbalancedCloseBracket, + position: 0, + } + } + + switch tk.kind { + + case tokenOpenBracket: + if pErr := tp.checkNextIsOperand(); pErr != nil { + return nil, pErr + } + p, pErr := parseGroup(tp) + if pErr != nil { + return nil, pErr + } + g.append(p) + + case tokenCloseBracket: + if pErr := tp.checkNextIsOperatorOrEnd(); pErr != nil { + return nil, pErr + } + p, pErr := g.make() + return p, pErr + + case tokenDate: + if pErr := tp.checkNextIsOperatorOrEnd(); pErr != nil { + return nil, pErr + } + date, err := klog.NewDateFromString(tk.value) + if err != nil { + return nil, parseError{ + err: err, + position: tk.position, + length: len(tk.value), + } + } + g.append(IsInDateRange{date, date}) + + case tokenDateRange: + if pErr := tp.checkNextIsOperatorOrEnd(); pErr != nil { + return nil, pErr + } + dateBoundaries := []klog.Date{nil, nil} + bounds := strings.Split(tk.value, "...") + for i, v := range bounds { + if v == "" { + continue + } + date, err := klog.NewDateFromString(v) + if err != nil { + return nil, parseError{ + err: err, + position: tk.position, + length: len(tk.value), + } + } + dateBoundaries[i] = date + } + g.append(IsInDateRange{dateBoundaries[0], dateBoundaries[1]}) + + case tokenPeriod: + if pErr := tp.checkNextIsOperatorOrEnd(); pErr != nil { + return nil, pErr + } + prd, err := period.NewPeriodFromPatternString(tk.value) + if err != nil { + return nil, parseError{ + err: err, + position: tk.position, + length: len(tk.value), + } + } + g.append(IsInDateRange{prd.Since(), prd.Until()}) + + case tokenAnd, tokenOr: + if pErr := tp.checkNextIsOperand(); pErr != nil { + return nil, pErr + } + pErr := g.setOperator(tk, tk.position) + if pErr != nil { + return nil, pErr + } + + case tokenNot: + if pErr := tp.checkNextIsOperand(); pErr != nil { + return nil, pErr + } + g.negateNextOperand() + + case tokenEntryType: + if pErr := tp.checkNextIsOperatorOrEnd(); pErr != nil { + return nil, pErr + } + et, err := NewEntryTypeFromString(strings.TrimLeft(tk.value, "type:")) + if err != nil { + return nil, parseError{ + err: err, + position: tk.position, + length: len(tk.value), + } + } + g.append(IsEntryType{et}) + + case tokenTag: + if pErr := tp.checkNextIsOperatorOrEnd(); pErr != nil { + return nil, pErr + } + tag, err := klog.NewTagFromString(tk.value) + if err != nil { + return nil, parseError{ + err: err, + position: tk.position, + length: len(tk.value), + } + } + g.append(HasTag{tag}) + + default: + panic("Unrecognized token") + } + } +} diff --git a/klog/service/kfl/parse_test.go b/klog/service/kfl/parse_test.go new file mode 100644 index 0000000..abe3114 --- /dev/null +++ b/klog/service/kfl/parse_test.go @@ -0,0 +1,239 @@ +package kfl + +import ( + "testing" + + "github.com/jotaen/klog/klog" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +type et struct { // “Error Test” + input string + pos int + len int +} + +func TestAtDate(t *testing.T) { + p, err := Parse("2020-03-01") + require.Nil(t, err) + assert.Equal(t, + IsInDateRange{klog.Ɀ_Date_(2020, 3, 1), klog.Ɀ_Date_(2020, 3, 1)}, + p) +} + +func TestAndOperator(t *testing.T) { + p, err := Parse("2020-01-01 && #hello") + require.Nil(t, err) + assert.Equal(t, + And{[]Predicate{ + IsInDateRange{klog.Ɀ_Date_(2020, 1, 1), klog.Ɀ_Date_(2020, 1, 1)}, + HasTag{klog.NewTagOrPanic("hello", "")}, + }}, p) +} + +func TestOrOperator(t *testing.T) { + p, err := Parse("#foo || 1999-12-31") + require.Nil(t, err) + assert.Equal(t, + Or{[]Predicate{ + HasTag{klog.NewTagOrPanic("foo", "")}, + IsInDateRange{klog.Ɀ_Date_(1999, 12, 31), klog.Ɀ_Date_(1999, 12, 31)}, + }}, p) +} + +func TestCannotMixAndOrOnSameLevel(t *testing.T) { + for _, tt := range []string{ + "#foo || 1999-12-31 && 2000-01-02", + "#foo || 1999-12-31 || 2000-01-02 && 2020-07-24", + "#foo && 1999-12-31 || 2000-01-02", + "#foo && 1999-12-31 && 2000-01-02 || 2020-07-24", + "#foo && (1999-12-31 || 2000-01-02 && 2021-05-17)", + "#foo || (1999-12-31 && 2000-01-02 || 2021-05-17)", + } { + t.Run(tt, func(t *testing.T) { + p, err := Parse(tt) + require.ErrorIs(t, err.Original(), ErrCannotMixAndOr) + require.Nil(t, p) + }) + } +} + +func TestNotOperator(t *testing.T) { + p, err := Parse("!2020-01-01 && !#hello && !(2021-04-05 || #foo)") + require.Nil(t, err) + assert.Equal(t, + And{[]Predicate{ + Not{ + IsInDateRange{klog.Ɀ_Date_(2020, 1, 1), klog.Ɀ_Date_(2020, 1, 1)}, + }, + Not{ + HasTag{klog.NewTagOrPanic("hello", "")}, + }, + Not{ + Or{[]Predicate{ + IsInDateRange{klog.Ɀ_Date_(2021, 4, 5), klog.Ɀ_Date_(2021, 4, 5)}, + HasTag{klog.NewTagOrPanic("foo", "")}, + }}, + }, + }}, p) +} + +func TestGrouping(t *testing.T) { + p, err := Parse("(#foo || #bar || #xyz) && 1999-12-31") + require.Nil(t, err) + assert.Equal(t, + And{[]Predicate{ + Or{[]Predicate{ + HasTag{klog.NewTagOrPanic("foo", "")}, + HasTag{klog.NewTagOrPanic("bar", "")}, + HasTag{klog.NewTagOrPanic("xyz", "")}, + }}, + IsInDateRange{klog.Ɀ_Date_(1999, 12, 31), klog.Ɀ_Date_(1999, 12, 31)}, + }}, p) +} + +func TestNestedGrouping(t *testing.T) { + p, err := Parse("((#foo && (#bar || #xyz)) && 1999-12-31) || 1970-03-12") + require.Nil(t, err) + assert.Equal(t, + Or{[]Predicate{ + And{[]Predicate{ + And{[]Predicate{ + HasTag{klog.NewTagOrPanic("foo", "")}, + Or{[]Predicate{ + HasTag{klog.NewTagOrPanic("bar", "")}, + HasTag{klog.NewTagOrPanic("xyz", "")}, + }}, + }}, + IsInDateRange{klog.Ɀ_Date_(1999, 12, 31), klog.Ɀ_Date_(1999, 12, 31)}, + }}, + IsInDateRange{klog.Ɀ_Date_(1970, 3, 12), klog.Ɀ_Date_(1970, 03, 12)}, + }}, p) +} + +func TestClosedDateRange(t *testing.T) { + p, err := Parse("2020-03-06...2020-04-22") + require.Nil(t, err) + assert.Equal(t, + IsInDateRange{klog.Ɀ_Date_(2020, 3, 6), klog.Ɀ_Date_(2020, 4, 22)}, + p) +} + +func TestOpenDateRangeSince(t *testing.T) { + p, err := Parse("2020-03-01...") + require.Nil(t, err) + assert.Equal(t, + IsInDateRange{klog.Ɀ_Date_(2020, 3, 1), nil}, + p) +} + +func TestOpenDateRangeUntil(t *testing.T) { + p, err := Parse("...2020-03-01") + require.Nil(t, err) + assert.Equal(t, + IsInDateRange{nil, klog.Ɀ_Date_(2020, 3, 1)}, + p) +} + +func TestPeriod(t *testing.T) { + p, err := Parse("2020 || 2021-Q2 || 2022-08 || 2023-W46") + require.Nil(t, err) + assert.Equal(t, + Or{[]Predicate{ + IsInDateRange{klog.Ɀ_Date_(2020, 1, 1), klog.Ɀ_Date_(2020, 12, 31)}, + IsInDateRange{klog.Ɀ_Date_(2021, 4, 1), klog.Ɀ_Date_(2021, 6, 30)}, + IsInDateRange{klog.Ɀ_Date_(2022, 8, 1), klog.Ɀ_Date_(2022, 8, 31)}, + IsInDateRange{klog.Ɀ_Date_(2023, 11, 13), klog.Ɀ_Date_(2023, 11, 19)}, + }}, p) +} + +func TestTags(t *testing.T) { + p, err := Parse("#tag || #tag-with=value || #tag-with='quoted value'") + require.Nil(t, err) + assert.Equal(t, + Or{[]Predicate{ + HasTag{klog.NewTagOrPanic("tag", "")}, + HasTag{klog.NewTagOrPanic("tag-with", "value")}, + HasTag{klog.NewTagOrPanic("tag-with", "quoted value")}, + }}, p) +} + +func TestOpeningBracketMismatch(t *testing.T) { + for _, tt := range []et{ + {"(2020-01", 0, 0}, + {"((2020-01", 0, 0}, + {"(2020-01-01 && (2020-02-02 || 2020-03-03", 0, 0}, + } { + t.Run(tt.input, func(t *testing.T) { + p, err := Parse(tt.input) + require.Nil(t, p) + require.ErrorIs(t, err.Original(), errUnbalancedBrackets) + pos, len := err.Position() + assert.Equal(t, tt.pos, pos) + assert.Equal(t, tt.len, len) + }) + } +} + +func TestClosingBracketMismatch(t *testing.T) { + for _, tt := range []et{ + {"(2020-01-01))", 0, 13}, + {"2020-01-01)", 0, 11}, + {"(2020-01-01 && (2020-02-02))) || 2020-03-03", 0, 43}, + } { + t.Run(tt.input, func(t *testing.T) { + p, err := Parse(tt.input) + require.Nil(t, p) + require.ErrorIs(t, err.Original(), errUnbalancedBrackets) + pos, len := err.Position() + assert.Equal(t, tt.pos, pos) + assert.Equal(t, tt.len, len) + }) + } +} + +func TestOperatorOperandSequence(t *testing.T) { + for _, tt := range []string{ + // Operands: (date, date-range, period, tag) + "2020-01-01 2020-02-02", + "2020-01-01 (#foo && #bar)", + "(#foo && #bar) 2020-01-01", + "(#foo && #bar) #foo", + "2020-01-01...2020-02-28 #foo", + "2020-01-01... #foo", + "...2020-01-01 #foo", + "2020-01 2020-02", + "2020-01-01 #foo", + "2020-01 #foo", + "#foo 2020-01-01", + "#foo 2020-01", + "#foo #foo", + "type:duration #foo", + "#foo type:duration", + "2020 type:duration", + "type:duration 2025-Q4", + + // And: + "2020-01-01 && #tag #foo", + "2020-01-01 && && 2020-02-02", + "2020-01-01 && ( && #foo)", + + // Or: + "2020-01-01 || #tag #foo", + "2020-01-01 || || 2020-02-02", + "2020-01-01 && ( || #foo)", + + // Not: + "!&& #foo", + "!|| #foo", + "(!) #foo", + "#foo !", + } { + t.Run(tt, func(t *testing.T) { + p, err := Parse(tt) + require.ErrorIs(t, err.Original(), errOperatorOperand) + require.Nil(t, p) + }) + } +} diff --git a/klog/service/kfl/predicate.go b/klog/service/kfl/predicate.go new file mode 100644 index 0000000..1fa3693 --- /dev/null +++ b/klog/service/kfl/predicate.go @@ -0,0 +1,128 @@ +package kfl + +import ( + "errors" + "strings" + + "github.com/jotaen/klog/klog" +) + +type queriedEntry struct { + parent klog.Record + entry klog.Entry +} + +type Predicate interface { + Matches(queriedEntry) bool +} + +type IsInDateRange struct { + From klog.Date + To klog.Date +} + +func (i IsInDateRange) Matches(e queriedEntry) bool { + isAfter := func() bool { + if i.From == nil { + return true + } + return e.parent.Date().IsAfterOrEqual(i.From) + }() + isBefore := func() bool { + if i.To == nil { + return true + } + return i.To.IsAfterOrEqual(e.parent.Date()) + }() + return isAfter && isBefore +} + +type HasTag struct { + Tag klog.Tag +} + +func (h HasTag) Matches(e queriedEntry) bool { + return e.parent.Summary().Tags().Contains(h.Tag) || e.entry.Summary().Tags().Contains(h.Tag) +} + +type And struct { + Predicates []Predicate +} + +func (a And) Matches(e queriedEntry) bool { + for _, p := range a.Predicates { + if !p.Matches(e) { + return false + } + } + return true +} + +type Or struct { + Predicates []Predicate +} + +func (o Or) Matches(e queriedEntry) bool { + for _, p := range o.Predicates { + if p.Matches(e) { + return true + } + } + return false +} + +type Not struct { + Predicate Predicate +} + +func (n Not) Matches(e queriedEntry) bool { + return !n.Predicate.Matches(e) +} + +type EntryType string + +const ( + ENTRY_TYPE_DURATION = EntryType("duration") + ENTRY_TYPE_POSITIVE_DURATION = EntryType("duration-positive") + ENTRY_TYPE_NEGATIVE_DURATION = EntryType("duration-negative") + ENTRY_TYPE_RANGE = EntryType("range") + ENTRY_TYPE_OPEN_RANGE = EntryType("open-range") +) + +func NewEntryTypeFromString(val string) (EntryType, error) { + for _, t := range []EntryType{ + ENTRY_TYPE_DURATION, + ENTRY_TYPE_POSITIVE_DURATION, + ENTRY_TYPE_NEGATIVE_DURATION, + ENTRY_TYPE_RANGE, + ENTRY_TYPE_OPEN_RANGE, + } { + if strings.ToLower(strings.ReplaceAll(val, "_", "-")) == string(t) { + return t, nil + } + } + return EntryType(""), errors.New("Illegal entry type") +} + +type IsEntryType struct { + Type EntryType +} + +func (t IsEntryType) Matches(e queriedEntry) bool { + return klog.Unbox[bool](&e.entry, func(r klog.Range) bool { + return t.Type == ENTRY_TYPE_RANGE + }, func(duration klog.Duration) bool { + if t.Type == ENTRY_TYPE_DURATION { + return true + } + if t.Type == ENTRY_TYPE_POSITIVE_DURATION && e.entry.Duration().InMinutes() >= 0 { + return true + } + if t.Type == ENTRY_TYPE_NEGATIVE_DURATION && e.entry.Duration().InMinutes() < 0 { + return true + } + return false + }, func(openRange klog.OpenRange) bool { + return t.Type == ENTRY_TYPE_OPEN_RANGE + }) +} diff --git a/klog/service/kfl/tokenise.go b/klog/service/kfl/tokenise.go new file mode 100644 index 0000000..f02e8bc --- /dev/null +++ b/klog/service/kfl/tokenise.go @@ -0,0 +1,151 @@ +package kfl + +import ( + "errors" + "regexp" +) + +type tokenKind int + +const ( + tokenOpenBracket tokenKind = iota + tokenCloseBracket + tokenAnd + tokenOr + tokenNot + tokenDate + tokenPeriod + tokenDateRange + tokenTag + tokenEntryType +) + +type token struct { + kind tokenKind + value string + position int +} + +var ( + tagRegex = regexp.MustCompile(`^(#([\p{L}\d_-]+)(=(("[^"]*")|('[^']*')|([\p{L}\d_-]*)))?)`) + dateRangeRegex = regexp.MustCompile(`^((\d{4}-\d{2}-\d{2})?\.\.\.(\d{4}-\d{2}-\d{2})?)`) + dateRegex = regexp.MustCompile(`^(\d{4}-\d{2}-\d{2})`) + periodRegex = regexp.MustCompile(`^((\d{4}-\p{L}?\d+)|(\d{4}))`) + typeRegex = regexp.MustCompile(`^(type:[\p{L}\-_]+)`) +) + +var ( + ErrMissingWhiteSpace = errors.New("Missing whitespace. Please separate operands and operators with whitespace.") + ErrUnrecognisedToken = errors.New("Unrecognised query token. Please make sure to use valid query syntax.") +) + +func tokenise(filterQuery string) ([]token, ParseError) { + txtParser := newTextParser(filterQuery) + tokens := []token{} + for { + if txtParser.isFinished() { + break + } + + if txtParser.peekString(" ") { + txtParser.advance(1) + } else if txtParser.peekString("(") { + tokens = append(tokens, token{tokenOpenBracket, "(", txtParser.pointer}) + txtParser.advance(1) + } else if txtParser.peekString(")") { + tokens = append(tokens, token{tokenCloseBracket, ")", txtParser.pointer}) + txtParser.advance(1) + if !txtParser.peekString(EOT, " ", ")") { + return nil, parseError{ + err: ErrMissingWhiteSpace, + position: txtParser.pointer - 1, + length: 1, + } + } + } else if txtParser.peekString("&&") { + tokens = append(tokens, token{tokenAnd, "&&", txtParser.pointer}) + txtParser.advance(2) + if !txtParser.peekString(EOT, " ") { + return nil, parseError{ + err: ErrMissingWhiteSpace, + position: txtParser.pointer, + length: 1, + } + } + } else if txtParser.peekString("||") { + tokens = append(tokens, token{tokenOr, "||", txtParser.pointer}) + txtParser.advance(2) + if !txtParser.peekString(EOT, " ") { + return nil, parseError{ + err: ErrMissingWhiteSpace, + position: txtParser.pointer, + length: 1, + } + } + } else if txtParser.peekString("!") { + tokens = append(tokens, token{tokenNot, "!", txtParser.pointer}) + txtParser.advance(1) + } else if tm := txtParser.peekRegex(tagRegex); tm != nil { + value := tm[1] + tokens = append(tokens, token{tokenTag, value, txtParser.pointer}) + txtParser.advance(len(value)) + if !txtParser.peekString(EOT, " ", ")") { + return nil, parseError{ + err: ErrMissingWhiteSpace, + position: txtParser.pointer, + length: 1, + } + } + } else if ym := txtParser.peekRegex(typeRegex); ym != nil { + tokens = append(tokens, token{tokenEntryType, ym[1], txtParser.pointer}) + txtParser.advance(len(ym[1])) + if !txtParser.peekString(EOT, " ", ")") { + return nil, parseError{ + err: ErrMissingWhiteSpace, + position: txtParser.pointer - 1, + length: 1, + } + } + } else if rm := txtParser.peekRegex(dateRangeRegex); rm != nil { + value := rm[1] + tokens = append(tokens, token{tokenDateRange, value, txtParser.pointer}) + txtParser.advance(len(value)) + if !txtParser.peekString(EOT, " ", ")") { + return nil, parseError{ + err: ErrMissingWhiteSpace, + position: txtParser.pointer - 1, + length: 1, + } + } + } else if dm := txtParser.peekRegex(dateRegex); dm != nil { + value := dm[1] + tokens = append(tokens, token{tokenDate, value, txtParser.pointer}) + txtParser.advance(len(value)) + if !txtParser.peekString(EOT, " ", ")") { + return nil, parseError{ + err: ErrMissingWhiteSpace, + position: txtParser.pointer - 1, + length: 1, + } + } + } else if pm := txtParser.peekRegex(periodRegex); pm != nil { + value := pm[1] + tokens = append(tokens, token{tokenPeriod, value, txtParser.pointer}) + txtParser.advance(len(value)) + if !txtParser.peekString(EOT, " ", ")") { + return nil, parseError{ + err: ErrMissingWhiteSpace, + position: txtParser.pointer - 1, + length: 1, + } + } + } else { + return nil, parseError{ + err: ErrUnrecognisedToken, + position: txtParser.pointer, + length: 1, + } + } + } + return tokens, nil +} diff --git a/klog/service/kfl/tokenise_test.go b/klog/service/kfl/tokenise_test.go new file mode 100644 index 0000000..6280257 --- /dev/null +++ b/klog/service/kfl/tokenise_test.go @@ -0,0 +1,124 @@ +package kfl + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestTokeniseEmptyToken(t *testing.T) { + { // Empty + p, err := tokenise("") + require.Nil(t, err) + assert.Equal(t, p, []token{}) + } + { // Blank + p, err := tokenise(" ") + require.Nil(t, err) + assert.Equal(t, p, []token{}) + } +} + +func TestTokeniseAllTokens(t *testing.T) { + p, err := tokenise("2020-01-01 && #hello || (2020-02-02 && !2021-Q4) && type:duration") + require.Nil(t, err) + assert.Equal(t, []token{ + {tokenDate, "2020-01-01", 0}, + {tokenAnd, "&&", 11}, + {tokenTag, "#hello", 14}, + {tokenOr, "||", 21}, + {tokenOpenBracket, "(", 24}, + {tokenDate, "2020-02-02", 25}, + {tokenAnd, "&&", 36}, + {tokenNot, "!", 39}, + {tokenPeriod, "2021-Q4", 40}, + {tokenCloseBracket, ")", 47}, + {tokenAnd, "&&", 49}, + {tokenEntryType, "type:duration", 52}, + }, p) +} + +func TestDisregardWhitespaceBetweenTokens(t *testing.T) { + p, err := tokenise(" 2020-01-01 && #hello || ( 2020-02-02 && ! 2021-Q4 ) && type:duration") + require.Nil(t, err) + assert.Equal(t, []token{ + {tokenDate, "2020-01-01", 3}, + {tokenAnd, "&&", 17}, + {tokenTag, "#hello", 24}, + {tokenOr, "||", 34}, + {tokenOpenBracket, "(", 40}, + {tokenDate, "2020-02-02", 44}, + {tokenAnd, "&&", 57}, + {tokenNot, "!", 62}, + {tokenPeriod, "2021-Q4", 66}, + {tokenCloseBracket, ")", 75}, + {tokenAnd, "&&", 78}, + {tokenEntryType, "type:duration", 84}, + }, p) +} + +func TestFailsOnUnrecognisedToken(t *testing.T) { + for _, txt := range []string{ + "abcde", + "2020-01-01 & 2020-01-02", + "2020-01-01 * 2020-01-02", + "2020-01-01 {2020-01-02}", + } { + t.Run(txt, func(t *testing.T) { + p, err := tokenise(txt) + require.ErrorIs(t, err.Original(), ErrUnrecognisedToken) + assert.Nil(t, p) + }) + } +} + +func TestFailsOnMissingWhitespace(t *testing.T) { + for _, txt := range []string{ + "2021-12-12 &&&", + "2021-12-12 &&&&", + "2021-12-12 &&||", + "2021-12-12 &&2021-12-12", + "2021-12-12 &&#tag", + "2021-12-12 &&(2021-12-12 || #foo)", + + "2021-12-12 |||", + "2021-12-12 ||||", + "2021-12-12 ||&&", + "2021-12-12 ||2021-12-12", + "2021-12-12 ||#tag", + "2021-12-12 ||(2021-12-12 || #foo)", + + "(#foo)(#bar)", + "( #foo )( #bar )", + + "2020-01-01&&", + "2020-01-01||", + "2020-01-01( #foo )", + "2020-01-01#foo", + + "2020-01-01...2020-01-31&&", + "2020-01-01...2020-01-31( #foo )", + "2020-01-01...&&", + "2020-01-01...( #foo )", + + "(2021-12-12 || #foo)2020-01-01", + "(2021-12-12 || #foo)&& #foo", + "(2021-12-12 || #foo)#foo", + + "#tag&& #tag", + "#tag|| #tag", + "#tag( 2020-01-01)", + + "2020-Q4&&", + "2020-Q4||", + "2020-Q4( 2020-01-01 )", + "2020-Q4!( 2020-01-01 )", + } { + t.Run(txt, func(t *testing.T) { + p, err := tokenise(txt) + require.ErrorIs(t, err.Original(), ErrMissingWhiteSpace) + assert.Nil(t, p) + }) + } +} diff --git a/klog/service/kfl/util.go b/klog/service/kfl/util.go new file mode 100644 index 0000000..d357ff5 --- /dev/null +++ b/klog/service/kfl/util.go @@ -0,0 +1,170 @@ +package kfl + +import ( + "regexp" + "strings" +) + +const EOT = "" // End of text + +type textParser struct { + text string + pointer int +} + +func newTextParser(text string) textParser { + return textParser{ + text: text, + pointer: 0, + } +} + +func (t *textParser) isFinished() bool { + return t.pointer == len(t.text) +} + +func (t *textParser) peekString(lookup ...string) bool { + r := t.remainder() + for _, l := range lookup { + if l == EOT { + if r == EOT { + return true + } + } else if strings.HasPrefix(r, l) { + return true + } + } + return false +} + +func (t *textParser) peekRegex(lookup *regexp.Regexp) []string { + return lookup.FindStringSubmatch(t.remainder()) +} + +func (t *textParser) advance(i int) { + t.pointer += i +} + +func (t *textParser) remainder() string { + if t.isFinished() { + return "" + } + return t.text[t.pointer:] +} + +type tokenParser struct { + tokens []token + pointer int +} + +func newTokenParser(ts []token) tokenParser { + return tokenParser{ + tokens: ts, + pointer: 0, + } +} + +func (t *tokenParser) next() token { + if t.pointer >= len(t.tokens) { + return token{} + } + next := t.tokens[t.pointer] + t.pointer += 1 + return next +} + +func (t *tokenParser) checkNextIsOperand() ParseError { + if t.pointer >= len(t.tokens) { + return parseError{ + err: ErrOperandExpected, + position: t.tokens[len(t.tokens)-1].position, + length: 1, + } + } + for _, k := range []tokenKind{ + tokenOpenBracket, tokenTag, tokenDate, tokenDateRange, tokenPeriod, tokenNot, tokenEntryType, + } { + if t.tokens[t.pointer].kind == k { + return nil + } + } + return parseError{ + err: ErrOperandExpected, + position: t.tokens[t.pointer].position, + length: len(t.tokens[t.pointer].value), + } +} + +func (t *tokenParser) checkNextIsOperatorOrEnd() ParseError { + if t.pointer >= len(t.tokens) { + return nil + } + for _, k := range []tokenKind{ + tokenCloseBracket, tokenAnd, tokenOr, + } { + if t.tokens[t.pointer].kind == k { + return nil + } + } + return parseError{ + err: ErrOperatorExpected, + position: t.tokens[t.pointer].position, + length: len(t.tokens[t.pointer].value), + } +} + +type predicateGroup struct { + ps []Predicate + operator tokenKind // -1 (unset) or tokenAnd or tokenOr + isNextNegated bool +} + +func newPredicateGroup() predicateGroup { + return predicateGroup{ + ps: nil, + operator: -1, + isNextNegated: false, + } +} + +func (g *predicateGroup) append(p Predicate) { + if g.isNextNegated { + g.isNextNegated = false + p = Not{p} + } + g.ps = append(g.ps, p) +} + +func (g *predicateGroup) setOperator(operatorT token, position int) ParseError { + if g.operator == -1 { + g.operator = operatorT.kind + } + if g.operator != operatorT.kind { + return parseError{ + err: ErrCannotMixAndOr, + position: position, + length: 2, + } + } + return nil +} + +func (g *predicateGroup) negateNextOperand() { + g.isNextNegated = true +} + +func (g *predicateGroup) make() (Predicate, ParseError) { + if len(g.ps) == 1 { + return g.ps[0], nil + } else if g.operator == tokenAnd { + return And{g.ps}, nil + } else if g.operator == tokenOr { + return Or{g.ps}, nil + } else { + // This would happen for an empty group. + return nil, parseError{ + err: ErrMalformedFilterQuery, + position: 0, + } + } +} diff --git a/lib/terminalformat/substr.go b/lib/terminalformat/substr.go new file mode 100644 index 0000000..a28ff04 --- /dev/null +++ b/lib/terminalformat/substr.go @@ -0,0 +1,60 @@ +package terminalformat + +import "unicode/utf8" + +// TextSubstrWithContext returns a fragment of a string like a regular `substr` +// method would do. However, it returns a bit of surrounding text for context. +// The surrounding text is between `minSurroundingRunes` and `maxSurroundingRunes` +// long, and it tries to find a word boundary (space character) as natural cut-off. +// Only if it cannot find one, it makes a hard cut. +func TextSubstrWithContext(text string, start int, length int, minSurroundingRunes int, maxSurroundingRunes int) (string, int) { + if start < 0 || length < 0 || start >= len(text) { + return "", 0 + } + + end := start + length + if end > len(text) { + end = len(text) + } + + fuzzyStart := start + charCount := 0 + + for fuzzyStart > 0 && charCount < maxSurroundingRunes { + _, size := utf8.DecodeLastRuneInString(text[:fuzzyStart]) + if size == 0 { + break + } + fuzzyStart -= size + charCount++ + + if charCount >= minSurroundingRunes && text[fuzzyStart] == ' ' { + break + } + } + + if fuzzyStart < len(text) && text[fuzzyStart] == ' ' { + fuzzyStart++ + } + + fuzzyEnd := end + charCount = 0 + + for fuzzyEnd < len(text) && charCount < maxSurroundingRunes { + r, size := utf8.DecodeRuneInString(text[fuzzyEnd:]) + if r == utf8.RuneError && size == 1 { + break + } + + if charCount >= minSurroundingRunes && r == ' ' { + break + } + + fuzzyEnd += size + charCount++ + } + + translatedPos := start - fuzzyStart + + return text[fuzzyStart:fuzzyEnd], translatedPos +}