Skip to content

Commit

Permalink
WIP - Remove unused reverse prop
Browse files Browse the repository at this point in the history
It hasn't been used in 3 years, it can die and come back if/when needed.
  • Loading branch information
AndrewSisley committed Nov 27, 2024
1 parent 8509211 commit 32be1e0
Show file tree
Hide file tree
Showing 10 changed files with 16 additions and 50 deletions.
2 changes: 1 addition & 1 deletion internal/db/collection_get.go
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ func (c *collection) get(
// create a new document fetcher
df := c.newFetcher()
// initialize it with the primary index
err := df.Init(ctx, identity.FromContext(ctx), txn, c.db.acp, c, fields, nil, nil, false, showDeleted)
err := df.Init(ctx, identity.FromContext(ctx), txn, c.db.acp, c, fields, nil, nil, showDeleted)
if err != nil {
_ = df.Close()
return nil, err
Expand Down
1 change: 0 additions & 1 deletion internal/db/collection_index.go
Original file line number Diff line number Diff line change
Expand Up @@ -310,7 +310,6 @@ func (c *collection) iterateAllDocs(
nil,
nil,
false,
false,
)
if err != nil {
return errors.Join(err, df.Close())
Expand Down
25 changes: 4 additions & 21 deletions internal/db/fetcher/fetcher.go
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,6 @@ type Fetcher interface {
fields []client.FieldDefinition,
filter *mapper.Filter,
docmapper *core.DocumentMapping,
reverse bool,
showDeleted bool,
) error
Start(ctx context.Context, prefixes ...keys.Walkable) error
Expand All @@ -95,7 +94,6 @@ type DocumentFetcher struct {
passedPermissionCheck bool // have valid permission to access

col client.Collection
reverse bool
deletedDocs bool

txn datastore.Txn
Expand Down Expand Up @@ -156,12 +154,11 @@ func (df *DocumentFetcher) Init(
fields []client.FieldDefinition,
filter *mapper.Filter,
docmapper *core.DocumentMapping,
reverse bool,
showDeleted bool,
) error {
df.txn = txn

err := df.init(identity, acp, col, fields, filter, docmapper, reverse)
err := df.init(identity, acp, col, fields, filter, docmapper)
if err != nil {
return err
}
Expand All @@ -171,7 +168,7 @@ func (df *DocumentFetcher) Init(
df.deletedDocFetcher = new(DocumentFetcher)
df.deletedDocFetcher.txn = txn
}
return df.deletedDocFetcher.init(identity, acp, col, fields, filter, docmapper, reverse)
return df.deletedDocFetcher.init(identity, acp, col, fields, filter, docmapper)
}

return nil
Expand All @@ -184,12 +181,10 @@ func (df *DocumentFetcher) init(
fields []client.FieldDefinition,
filter *mapper.Filter,
docMapper *core.DocumentMapping,
reverse bool,
) error {
df.identity = identity
df.acp = acp
df.col = col
df.reverse = reverse
df.initialized = true
df.filter = filter
df.isReadingDocument = false
Expand Down Expand Up @@ -300,20 +295,10 @@ func (df *DocumentFetcher) start(ctx context.Context, prefixes []keys.Walkable,
return strings.Compare(a.ToString(), b.ToString())
})

if df.reverse {
for i, j := 0, len(valuePrefixes)-1; i < j; i, j = i+1, j-1 {
valuePrefixes[i], valuePrefixes[j] = valuePrefixes[j], valuePrefixes[i]
}
}
df.prefixes = valuePrefixes
}
df.curPrefixIndex = -1

if df.reverse {
df.order = []dsq.Order{dsq.OrderByKeyDescending{}}
} else {
df.order = []dsq.Order{dsq.OrderByKey{}}
}
df.order = []dsq.Order{dsq.OrderByKey{}}

_, err := df.startNextPrefix(ctx)
return err
Expand Down Expand Up @@ -571,9 +556,7 @@ func (df *DocumentFetcher) FetchNext(ctx context.Context) (EncodedDocument, Exec
if ddf != nil {
// If we've reached the end of the deleted docs, we can skip to getting the next active docs.
if !ddf.kvEnd {
if df.kvEnd ||
(df.reverse && ddf.kv.Key.DocID > df.kv.Key.DocID) ||
(!df.reverse && ddf.kv.Key.DocID < df.kv.Key.DocID) {
if df.kvEnd || ddf.kv.Key.DocID < df.kv.Key.DocID {
encdoc, execInfo, err := ddf.FetchNext(ctx)

if err != nil {
Expand Down
2 changes: 0 additions & 2 deletions internal/db/fetcher/indexer.go
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,6 @@ func (f *IndexFetcher) Init(
fields []client.FieldDefinition,
filter *mapper.Filter,
docMapper *core.DocumentMapping,
reverse bool,
showDeleted bool,
) error {
f.resetState()
Expand Down Expand Up @@ -118,7 +117,6 @@ outer:
filter,
f.mapping,
false,
false,
)
}

Expand Down
21 changes: 10 additions & 11 deletions internal/db/fetcher/mocks/fetcher.go

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 0 additions & 1 deletion internal/db/fetcher/mocks/utils.go
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,6 @@ func NewStubbedFetcher(t *testing.T) *Fetcher {
mock.Anything,
mock.Anything,
mock.Anything,
mock.Anything,
).Maybe().Return(nil)
f.EXPECT().Start(mock.Anything, mock.Anything).Maybe().Return(nil)
f.EXPECT().FetchNext(mock.Anything).Maybe().Return(nil, nil)
Expand Down
2 changes: 0 additions & 2 deletions internal/db/fetcher/versioned.go
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,6 @@ func (vf *VersionedFetcher) Init(
fields []client.FieldDefinition,
filter *mapper.Filter,
docmapper *core.DocumentMapping,
reverse bool,
showDeleted bool,
) error {
vf.acp = acp
Expand Down Expand Up @@ -141,7 +140,6 @@ func (vf *VersionedFetcher) Init(
fields,
filter,
docmapper,
reverse,
showDeleted,
)
}
Expand Down
8 changes: 1 addition & 7 deletions internal/db/indexed_docs_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -606,7 +606,6 @@ func TestNonUniqueCreate_IfUponIndexingExistingDocsFetcherFails_ReturnError(t *t
mock.Anything,
mock.Anything,
mock.Anything,
mock.Anything,
).Unset()
f.EXPECT().Init(
mock.Anything,
Expand All @@ -618,7 +617,6 @@ func TestNonUniqueCreate_IfUponIndexingExistingDocsFetcherFails_ReturnError(t *t
mock.Anything,
mock.Anything,
mock.Anything,
mock.Anything,
).Return(testError)
f.EXPECT().Close().Unset()
f.EXPECT().Close().Return(nil)
Expand Down Expand Up @@ -839,7 +837,6 @@ func TestNonUniqueUpdate_IfFetcherFails_ReturnError(t *testing.T) {
mock.Anything,
mock.Anything,
mock.Anything,
mock.Anything,
).Unset()
f.EXPECT().Init(
mock.Anything,
Expand All @@ -851,7 +848,6 @@ func TestNonUniqueUpdate_IfFetcherFails_ReturnError(t *testing.T) {
mock.Anything,
mock.Anything,
mock.Anything,
mock.Anything,
).Return(testError)
f.EXPECT().Close().Unset()
f.EXPECT().Close().Return(nil)
Expand Down Expand Up @@ -960,7 +956,6 @@ func TestNonUniqueUpdate_ShouldPassToFetcherOnlyRelevantFields(t *testing.T) {
mock.Anything,
mock.Anything,
mock.Anything,
mock.Anything,
).Unset()
f.EXPECT().Init(
mock.Anything,
Expand All @@ -972,7 +967,6 @@ func TestNonUniqueUpdate_ShouldPassToFetcherOnlyRelevantFields(t *testing.T) {
mock.Anything,
mock.Anything,
mock.Anything,
mock.Anything,
).
RunAndReturn(func(
ctx context.Context,
Expand All @@ -983,7 +977,7 @@ func TestNonUniqueUpdate_ShouldPassToFetcherOnlyRelevantFields(t *testing.T) {
fields []client.FieldDefinition,
filter *mapper.Filter,
mapping *core.DocumentMapping,
reverse, showDeleted bool,
showDeleted bool,
) error {
require.Equal(t, 2, len(fields))
require.ElementsMatch(t,
Expand Down
2 changes: 0 additions & 2 deletions internal/lens/fetcher.go
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,6 @@ func (f *lensedFetcher) Init(
fields []client.FieldDefinition,
filter *mapper.Filter,
docmapper *core.DocumentMapping,
reverse bool,
showDeleted bool,
) error {
f.col = col
Expand Down Expand Up @@ -122,7 +121,6 @@ historyLoop:
innerFetcherFields,
filter,
docmapper,
reverse,
showDeleted,
)
}
Expand Down
2 changes: 0 additions & 2 deletions internal/planner/scan.go
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,6 @@ type scanNode struct {
showDeleted bool

prefixes []keys.Walkable
reverse bool

filter *mapper.Filter
slct *mapper.Select
Expand All @@ -72,7 +71,6 @@ func (n *scanNode) Init() error {
n.fields,
n.filter,
n.slct.DocumentMapping,
n.reverse,
n.showDeleted,
); err != nil {
return err
Expand Down

0 comments on commit 32be1e0

Please sign in to comment.