From 9a228d2896acf7b51470fe5133a533f55a0954a3 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Mon, 26 Aug 2024 11:23:32 -0700 Subject: [PATCH 01/71] fix(i): Make peer connections deterministic (#2888) ## Relevant issue(s) Resolves #2847 Resolves #1902 ## Description This PR fixes an issue where peer connections were not deterministic within the test framework. There's also a few other areas that were cleaned up: - Bootstrap has been replaced with https://github.com/ipfs/boxo/blob/main/bootstrap/bootstrap.go - Host setup logic was moved to `net/host.go` - Persistent peer store was replaced with in memory only version - Context reuse within the `net` package has been refactored - Removed unused peer event ## Tasks - [x] I made sure the code is well commented, particularly hard-to-understand areas. - [x] I made sure the repository-held documentation is changed accordingly. - [x] I made sure the pull request title adheres to the conventional commit style (the subset used in the project can be found in [tools/configs/chglog/config.yml](tools/configs/chglog/config.yml)). - [x] I made sure to discuss its limitations such as threats to validity, vulnerability to mistake and misuse, robustness to invalidation of assumptions, resource requirements, ... ## How has this been tested? `make test` Specify the platform(s) on which this was tested: - MacOS --- cli/start.go | 14 +- event/event.go | 7 - go.mod | 1 - go.sum | 8 +- net/client.go | 6 +- net/client_test.go | 8 +- net/config.go | 8 + net/config_test.go | 12 ++ net/dialer_test.go | 47 ++--- net/host.go | 72 +++++++ net/host_test.go | 29 +++ net/peer.go | 347 +++++++++------------------------- net/peer_test.go | 224 +++------------------- net/server.go | 31 ++- net/server_test.go | 55 +----- net/utils/util.go | 53 ------ node/node.go | 19 +- node/node_test.go | 13 -- tests/clients/cli/wrapper.go | 4 +- tests/clients/clients.go | 4 +- tests/clients/http/wrapper.go | 4 +- tests/integration/client.go | 6 +- tests/integration/p2p.go | 27 ++- tests/integration/utils.go | 21 +- 24 files changed, 313 insertions(+), 707 deletions(-) create mode 100644 net/host.go create mode 100644 net/host_test.go delete mode 100644 net/utils/util.go diff --git a/cli/start.go b/cli/start.go index 212202be80..641e743ee8 100644 --- a/cli/start.go +++ b/cli/start.go @@ -11,12 +11,10 @@ package cli import ( - "fmt" "os" "os/signal" "syscall" - "github.com/libp2p/go-libp2p/core/peer" "github.com/sourcenetwork/immutable" "github.com/sourcenetwork/sourcehub/sdk" "github.com/spf13/cobra" @@ -26,7 +24,6 @@ import ( "github.com/sourcenetwork/defradb/internal/db" "github.com/sourcenetwork/defradb/keyring" "github.com/sourcenetwork/defradb/net" - netutils "github.com/sourcenetwork/defradb/net/utils" "github.com/sourcenetwork/defradb/node" ) @@ -49,15 +46,6 @@ func MakeStartCommand() *cobra.Command { RunE: func(cmd *cobra.Command, args []string) error { cfg := mustGetContextConfig(cmd) - var peers []peer.AddrInfo - if val := cfg.GetStringSlice("net.peers"); len(val) > 0 { - addrs, err := netutils.ParsePeers(val) - if err != nil { - return errors.Wrap(fmt.Sprintf("failed to parse bootstrap peers %s", val), err) - } - peers = addrs - } - opts := []node.Option{ node.WithStorePath(cfg.GetString("datastore.badger.path")), node.WithBadgerInMemory(cfg.GetString("datastore.store") == configStoreMemory), @@ -65,13 +53,13 @@ func MakeStartCommand() *cobra.Command { node.WithSourceHubChainID(cfg.GetString("acp.sourceHub.ChainID")), node.WithSourceHubGRPCAddress(cfg.GetString("acp.sourceHub.GRPCAddress")), node.WithSourceHubCometRPCAddress(cfg.GetString("acp.sourceHub.CometRPCAddress")), - node.WithPeers(peers...), // db options db.WithMaxRetries(cfg.GetInt("datastore.MaxTxnRetries")), // net node options net.WithListenAddresses(cfg.GetStringSlice("net.p2pAddresses")...), net.WithEnablePubSub(cfg.GetBool("net.pubSubEnabled")), net.WithEnableRelay(cfg.GetBool("net.relayEnabled")), + net.WithBootstrapPeers(cfg.GetStringSlice("net.peers")...), // http server options http.WithAddress(cfg.GetString("api.address")), http.WithAllowedOrigins(cfg.GetStringSlice("api.allowed-origins")...), diff --git a/event/event.go b/event/event.go index fa557cc03c..9d24a89c10 100644 --- a/event/event.go +++ b/event/event.go @@ -12,7 +12,6 @@ package event import ( "github.com/ipfs/go-cid" - "github.com/libp2p/go-libp2p/core/event" "github.com/libp2p/go-libp2p/core/peer" ) @@ -30,8 +29,6 @@ const ( UpdateName = Name("update") // PubSubName is the name of the network pubsub event. PubSubName = Name("pubsub") - // PeerName is the name of the network connect event. - PeerName = Name("peer") // P2PTopicName is the name of the network p2p topic update event. P2PTopicName = Name("p2p-topic") // PeerInfoName is the name of the network peer info event. @@ -44,10 +41,6 @@ const ( ReplicatorCompletedName = Name("replicator-completed") ) -// Peer is an event that is published when -// a peer connection has changed status. -type Peer = event.EvtPeerConnectednessChanged - // PubSub is an event that is published when // a pubsub message has been received from a remote peer. type PubSub struct { diff --git a/go.mod b/go.mod index 1fdba68594..8b94ad9378 100644 --- a/go.mod +++ b/go.mod @@ -200,7 +200,6 @@ require ( github.com/hashicorp/go-safetemp v1.0.0 // indirect github.com/hashicorp/go-version v1.6.0 // indirect github.com/hashicorp/golang-lru v1.0.2 // indirect - github.com/hashicorp/golang-lru/arc/v2 v2.0.7 // indirect github.com/hashicorp/golang-lru/v2 v2.0.7 // indirect github.com/hashicorp/hcl v1.0.0 // indirect github.com/hashicorp/yamux v0.1.1 // indirect diff --git a/go.sum b/go.sum index 1165fcb757..c9ee7d5d1d 100644 --- a/go.sum +++ b/go.sum @@ -226,8 +226,6 @@ github.com/99designs/go-keychain v0.0.0-20191008050251-8e49817e8af4 h1:/vQbFIOMb github.com/99designs/go-keychain v0.0.0-20191008050251-8e49817e8af4/go.mod h1:hN7oaIRCjzsZ2dE+yG5k+rsdt3qcwykqK6HVGcKwsw4= github.com/99designs/keyring v1.2.2 h1:pZd3neh/EmUzWONb35LxQfvuY7kiSXAq3HQd97+XBn0= github.com/99designs/keyring v1.2.2/go.mod h1:wes/FrByc8j7lFOAGLGSNEg8f/PaI3cgTBqhFkHUrPk= -github.com/AndreasBriese/bbloom v0.0.0-20190825152654-46b345b51c96 h1:cTp8I5+VIoKjsnZuH8vjyaysT/ses3EvZeaV/1UkF2M= -github.com/AndreasBriese/bbloom v0.0.0-20190825152654-46b345b51c96/go.mod h1:bOvUY6CB00SOBii9/FifXqc0awNKxLFCL/+pkDPuyl8= github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161 h1:L/gRVlceqvL25UVaW/CKtUDjefjrs0SPonmDGUVOYP0= github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= @@ -442,7 +440,6 @@ github.com/decred/dcrd/dcrec/secp256k1/v4 v4.3.0/go.mod h1:v57UDF4pDQJcEfFUCRop3 github.com/desertbit/timer v0.0.0-20180107155436-c41aec40b27f h1:U5y3Y5UE0w7amNe7Z5G/twsBW0KEalRQXZzf8ufSh9I= github.com/desertbit/timer v0.0.0-20180107155436-c41aec40b27f/go.mod h1:xH/i4TFMt8koVQZ6WFms69WAsDWr2XsYL3Hkl7jkoLE= github.com/dgraph-io/badger v1.6.2 h1:mNw0qs90GVgGGWylh0umH5iag1j6n/PeJtNvL6KY/x8= -github.com/dgraph-io/badger v1.6.2/go.mod h1:JW2yswe3V058sS0kZ2h/AXeDSqFjxnZcRrVH//y2UQE= github.com/dgraph-io/badger/v2 v2.2007.4 h1:TRWBQg8UrlUhaFdco01nO2uXwzKS7zd+HVdwV/GHc4o= github.com/dgraph-io/badger/v2 v2.2007.4/go.mod h1:vSw/ax2qojzbN6eXHIx6KPKtCSHJN/Uz0X0VPruTIhk= github.com/dgraph-io/badger/v3 v3.2011.1 h1:Hmyof0WMEF/QtutX5SQHzIMnJQxb/IrSzhjckV2SD6g= @@ -869,10 +866,6 @@ github.com/ipfs/go-datastore v0.6.0 h1:JKyz+Gvz1QEZw0LsX1IBn+JFCJQH4SJVFtM4uWU0M github.com/ipfs/go-datastore v0.6.0/go.mod h1:rt5M3nNbSO/8q1t4LNkLyUwRs8HupMeN/8O4Vn9YAT8= github.com/ipfs/go-detect-race v0.0.1 h1:qX/xay2W3E4Q1U7d9lNs1sU9nvguX0a7319XbyQ6cOk= github.com/ipfs/go-detect-race v0.0.1/go.mod h1:8BNT7shDZPo99Q74BpGMK+4D8Mn4j46UU0LZ723meps= -github.com/ipfs/go-ds-badger v0.3.0 h1:xREL3V0EH9S219kFFueOYJJTcjgNSZ2HY1iSvN7U1Ro= -github.com/ipfs/go-ds-badger v0.3.0/go.mod h1:1ke6mXNqeV8K3y5Ak2bAA0osoTfmxUdupVCGm4QUIek= -github.com/ipfs/go-ds-leveldb v0.5.0 h1:s++MEBbD3ZKc9/8/njrn4flZLnCuY9I79v94gBUNumo= -github.com/ipfs/go-ds-leveldb v0.5.0/go.mod h1:d3XG9RUDzQ6V4SHi8+Xgj9j1XuEk1z82lquxrVbml/Q= github.com/ipfs/go-ipfs-blocksutil v0.0.1 h1:Eh/H4pc1hsvhzsQoMEP3Bke/aW5P5rVM1IWFJMcGIPQ= github.com/ipfs/go-ipfs-blocksutil v0.0.1/go.mod h1:Yq4M86uIOmxmGPUHv/uI7uKqZNtLb449gwKqXjIsnRk= github.com/ipfs/go-ipfs-delay v0.0.1 h1:r/UXYyRcddO6thwOnhiznIAiSvxMECGgtv35Xs1IeRQ= @@ -908,6 +901,7 @@ github.com/ipld/go-ipld-prime/storage/bsrvadapter v0.0.0-20240322071758-198d7dba github.com/ipld/go-ipld-prime/storage/bsrvadapter v0.0.0-20240322071758-198d7dba8fb8/go.mod h1:ej/GTRX+HjlHMs/M3zg9fM8mUlQXgHqRvPJjtp+atHw= github.com/jackpal/go-nat-pmp v1.0.2 h1:KzKSgb7qkJvOUTqYl9/Hg/me3pWgBmERKrTGD7BdWus= github.com/jackpal/go-nat-pmp v1.0.2/go.mod h1:QPH045xvCAeXUZOxsnwmrtiCoxIr9eob+4orBN1SBKc= +github.com/jbenet/go-cienv v0.1.0 h1:Vc/s0QbQtoxX8MwwSLWWh+xNNZvM3Lw7NsTcHrvvhMc= github.com/jbenet/go-cienv v0.1.0/go.mod h1:TqNnHUmJgXau0nCzC7kXWeotg3J9W34CUv5Djy1+FlA= github.com/jbenet/go-temp-err-catcher v0.1.0 h1:zpb3ZH6wIE8Shj2sKS+khgRvf7T7RABoLk/+KKHggpk= github.com/jbenet/go-temp-err-catcher v0.1.0/go.mod h1:0kJRvmDZXNMIiJirNPEYfhpPwbGVtZVWC34vc5WLsDk= diff --git a/net/client.go b/net/client.go index 9930710891..77eb28d4d6 100644 --- a/net/client.go +++ b/net/client.go @@ -31,7 +31,7 @@ var ( // pushLog creates a pushLog request and sends it to another node // over libp2p grpc connection -func (s *server) pushLog(ctx context.Context, evt event.Update, pid peer.ID) error { +func (s *server) pushLog(evt event.Update, pid peer.ID) error { body := &pb.PushLogRequest_Body{ DocID: []byte(evt.DocID), Cid: evt.Cid.Bytes(), @@ -50,10 +50,10 @@ func (s *server) pushLog(ctx context.Context, evt event.Update, pid peer.ID) err return NewErrPushLog(err) } - cctx, cancel := context.WithTimeout(ctx, PushTimeout) + ctx, cancel := context.WithTimeout(s.peer.ctx, PushTimeout) defer cancel() - if _, err := client.PushLog(cctx, req); err != nil { + if _, err := client.PushLog(ctx, req); err != nil { return NewErrPushLog( err, errors.NewKV("CID", evt.Cid), diff --git a/net/client_test.go b/net/client_test.go index 43c4ec1e01..629b176605 100644 --- a/net/client_test.go +++ b/net/client_test.go @@ -63,7 +63,7 @@ func TestPushlogWithDialFailure(t *testing.T) { grpc.WithCredentialsBundle(nil), ) - err = p.server.pushLog(ctx, event.Update{ + err = p.server.pushLog(event.Update{ DocID: id.String(), Cid: cid, SchemaRoot: "test", @@ -86,7 +86,7 @@ func TestPushlogWithInvalidPeerID(t *testing.T) { cid, err := createCID(doc) require.NoError(t, err) - err = p.server.pushLog(ctx, event.Update{ + err = p.server.pushLog(event.Update{ DocID: id.String(), Cid: cid, SchemaRoot: "test", @@ -100,11 +100,9 @@ func TestPushlogW_WithValidPeerID_NoError(t *testing.T) { db1, p1 := newTestPeer(ctx, t) defer db1.Close() defer p1.Close() - p1.Start() db2, p2 := newTestPeer(ctx, t) defer p2.Close() defer db2.Close() - p2.Start() err := p1.host.Connect(ctx, p2.PeerInfo()) require.NoError(t, err) @@ -139,7 +137,7 @@ func TestPushlogW_WithValidPeerID_NoError(t *testing.T) { b, err := db1.Blockstore().AsIPLDStorage().Get(ctx, headCID.KeyString()) require.NoError(t, err) - err = p1.server.pushLog(ctx, event.Update{ + err = p1.server.pushLog(event.Update{ DocID: doc.ID().String(), Cid: headCID, SchemaRoot: col.SchemaRoot(), diff --git a/net/config.go b/net/config.go index d2a3039d4a..8128aef215 100644 --- a/net/config.go +++ b/net/config.go @@ -24,6 +24,7 @@ type Options struct { EnableRelay bool GRPCServerOptions []grpc.ServerOption GRPCDialOptions []grpc.DialOption + BootstrapPeers []string } // DefaultOptions returns the default net options. @@ -64,3 +65,10 @@ func WithListenAddresses(addresses ...string) NodeOpt { opt.ListenAddresses = addresses } } + +// WithBootstrapPeers sets the bootstrap peer addresses to attempt to connect to. +func WithBootstrapPeers(peers ...string) NodeOpt { + return func(opt *Options) { + opt.BootstrapPeers = peers + } +} diff --git a/net/config_test.go b/net/config_test.go index 869c820788..b9920d8915 100644 --- a/net/config_test.go +++ b/net/config_test.go @@ -34,3 +34,15 @@ func TestWithEnablePubSub(t *testing.T) { WithEnablePubSub(true)(opts) assert.Equal(t, true, opts.EnablePubSub) } + +func TestWithBootstrapPeers(t *testing.T) { + opts := &Options{} + WithBootstrapPeers("/ip4/127.0.0.1/tcp/6666/p2p/QmaCpDMGvV2BGHeYERUEnRQAwe3N8SzbUtfsmvsqQLuvuJ")(opts) + assert.ElementsMatch(t, []string{"/ip4/127.0.0.1/tcp/6666/p2p/QmaCpDMGvV2BGHeYERUEnRQAwe3N8SzbUtfsmvsqQLuvuJ"}, opts.BootstrapPeers) +} + +func TestWithPrivateKey(t *testing.T) { + opts := &Options{} + WithPrivateKey([]byte("abc"))(opts) + assert.Equal(t, []byte("abc"), opts.PrivateKey) +} diff --git a/net/dialer_test.go b/net/dialer_test.go index 479d4d7e63..64060f2660 100644 --- a/net/dialer_test.go +++ b/net/dialer_test.go @@ -16,8 +16,6 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - - netutils "github.com/sourcenetwork/defradb/net/utils" ) func TestDial_WithConnectedPeer_NoError(t *testing.T) { @@ -28,27 +26,24 @@ func TestDial_WithConnectedPeer_NoError(t *testing.T) { ctx := context.Background() n1, err := NewPeer( ctx, - db1.Rootstore(), db1.Blockstore(), db1.Events(), - WithListenAddresses("/ip4/0.0.0.0/tcp/0"), + WithListenAddresses("/ip4/127.0.0.1/tcp/0"), ) assert.NoError(t, err) defer n1.Close() n2, err := NewPeer( ctx, - db2.Rootstore(), db2.Blockstore(), db2.Events(), - WithListenAddresses("/ip4/0.0.0.0/tcp/0"), + WithListenAddresses("/ip4/127.0.0.1/tcp/0"), ) assert.NoError(t, err) defer n2.Close() - addrs, err := netutils.ParsePeers([]string{n1.host.Addrs()[0].String() + "/p2p/" + n1.PeerID().String()}) - if err != nil { - t.Fatal(err) - } - n2.Bootstrap(addrs) + + err = n2.Connect(ctx, n1.PeerInfo()) + require.NoError(t, err) + _, err = n1.server.dial(n2.PeerID()) require.NoError(t, err) } @@ -61,27 +56,24 @@ func TestDial_WithConnectedPeerAndSecondConnection_NoError(t *testing.T) { ctx := context.Background() n1, err := NewPeer( ctx, - db1.Rootstore(), db1.Blockstore(), db1.Events(), - WithListenAddresses("/ip4/0.0.0.0/tcp/0"), + WithListenAddresses("/ip4/127.0.0.1/tcp/0"), ) assert.NoError(t, err) defer n1.Close() n2, err := NewPeer( ctx, - db2.Rootstore(), db2.Blockstore(), db2.Events(), - WithListenAddresses("/ip4/0.0.0.0/tcp/0"), + WithListenAddresses("/ip4/127.0.0.1/tcp/0"), ) assert.NoError(t, err) defer n2.Close() - addrs, err := netutils.ParsePeers([]string{n1.host.Addrs()[0].String() + "/p2p/" + n1.PeerID().String()}) - if err != nil { - t.Fatal(err) - } - n2.Bootstrap(addrs) + + err = n2.Connect(ctx, n1.PeerInfo()) + require.NoError(t, err) + _, err = n1.server.dial(n2.PeerID()) require.NoError(t, err) @@ -97,27 +89,24 @@ func TestDial_WithConnectedPeerAndSecondConnectionWithConnectionShutdown_Closing ctx := context.Background() n1, err := NewPeer( ctx, - db1.Rootstore(), db1.Blockstore(), db1.Events(), - WithListenAddresses("/ip4/0.0.0.0/tcp/0"), + WithListenAddresses("/ip4/127.0.0.1/tcp/0"), ) assert.NoError(t, err) defer n1.Close() n2, err := NewPeer( ctx, - db2.Rootstore(), db2.Blockstore(), db2.Events(), - WithListenAddresses("/ip4/0.0.0.0/tcp/0"), + WithListenAddresses("/ip4/127.0.0.1/tcp/0"), ) assert.NoError(t, err) defer n2.Close() - addrs, err := netutils.ParsePeers([]string{n1.host.Addrs()[0].String() + "/p2p/" + n1.PeerID().String()}) - if err != nil { - t.Fatal(err) - } - n2.Bootstrap(addrs) + + err = n2.Connect(ctx, n1.PeerInfo()) + require.NoError(t, err) + _, err = n1.server.dial(n2.PeerID()) require.NoError(t, err) diff --git a/net/host.go b/net/host.go new file mode 100644 index 0000000000..2de5ffe25c --- /dev/null +++ b/net/host.go @@ -0,0 +1,72 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package net + +import ( + "context" + "time" + + libp2p "github.com/libp2p/go-libp2p" + dht "github.com/libp2p/go-libp2p-kad-dht" + dualdht "github.com/libp2p/go-libp2p-kad-dht/dual" + record "github.com/libp2p/go-libp2p-record" + libp2pCrypto "github.com/libp2p/go-libp2p/core/crypto" + "github.com/libp2p/go-libp2p/core/host" + "github.com/libp2p/go-libp2p/core/routing" + "github.com/libp2p/go-libp2p/p2p/net/connmgr" +) + +// setupHost returns a host and router configured with the given options. +func setupHost(ctx context.Context, options *Options) (host.Host, *dualdht.DHT, error) { + connManager, err := connmgr.NewConnManager(100, 400, connmgr.WithGracePeriod(time.Second*20)) + if err != nil { + return nil, nil, err + } + + dhtOpts := []dualdht.Option{ + dualdht.DHTOption(dht.NamespacedValidator("pk", record.PublicKeyValidator{})), + dualdht.DHTOption(dht.Concurrency(10)), + dualdht.DHTOption(dht.Mode(dht.ModeAuto)), + } + + var ddht *dualdht.DHT + routing := func(h host.Host) (routing.PeerRouting, error) { + ddht, err = dualdht.New(ctx, h, dhtOpts...) + return ddht, err + } + + libp2pOpts := []libp2p.Option{ + libp2p.ConnectionManager(connManager), + libp2p.DefaultTransports, + libp2p.ListenAddrStrings(options.ListenAddresses...), + libp2p.Routing(routing), + } + + // relay is enabled by default unless explicitly disabled + if !options.EnableRelay { + libp2pOpts = append(libp2pOpts, libp2p.DisableRelay()) + } + + // use the private key from options or generate a random one + if options.PrivateKey != nil { + privateKey, err := libp2pCrypto.UnmarshalEd25519PrivateKey(options.PrivateKey) + if err != nil { + return nil, nil, err + } + libp2pOpts = append(libp2pOpts, libp2p.Identity(privateKey)) + } + + h, err := libp2p.New(libp2pOpts...) + if err != nil { + return nil, nil, err + } + return h, ddht, nil +} diff --git a/net/host_test.go b/net/host_test.go new file mode 100644 index 0000000000..f7787e4c5e --- /dev/null +++ b/net/host_test.go @@ -0,0 +1,29 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package net + +import ( + "context" + "testing" + + "github.com/stretchr/testify/require" +) + +func TestSetupHostWithDefaultOptions(t *testing.T) { + h, dht, err := setupHost(context.Background(), DefaultOptions()) + require.NoError(t, err) + + require.NotNil(t, h) + require.NotNil(t, dht) + + err = h.Close() + require.NoError(t, err) +} diff --git a/net/peer.go b/net/peer.go index 7222d7cf9f..301c080edb 100644 --- a/net/peer.go +++ b/net/peer.go @@ -14,41 +14,27 @@ package net import ( "context" - "fmt" - "sync" - "sync/atomic" + "io" "time" "github.com/ipfs/boxo/bitswap" "github.com/ipfs/boxo/bitswap/network" "github.com/ipfs/boxo/blockservice" + "github.com/ipfs/boxo/bootstrap" exchange "github.com/ipfs/boxo/exchange" - "github.com/ipfs/boxo/ipns" blocks "github.com/ipfs/go-block-format" "github.com/ipfs/go-cid" - ds "github.com/ipfs/go-datastore" - libp2p "github.com/libp2p/go-libp2p" gostream "github.com/libp2p/go-libp2p-gostream" - dht "github.com/libp2p/go-libp2p-kad-dht" - dualdht "github.com/libp2p/go-libp2p-kad-dht/dual" pubsub "github.com/libp2p/go-libp2p-pubsub" - record "github.com/libp2p/go-libp2p-record" - libp2pCrypto "github.com/libp2p/go-libp2p/core/crypto" - libp2pEvent "github.com/libp2p/go-libp2p/core/event" "github.com/libp2p/go-libp2p/core/host" "github.com/libp2p/go-libp2p/core/peer" "github.com/libp2p/go-libp2p/core/routing" - // @TODO: https://github.com/sourcenetwork/defradb/issues/1902 - //nolint:staticcheck - "github.com/libp2p/go-libp2p/p2p/host/peerstore/pstoreds" - "github.com/libp2p/go-libp2p/p2p/net/connmgr" "github.com/multiformats/go-multiaddr" "github.com/sourcenetwork/corelog" "google.golang.org/grpc" "github.com/sourcenetwork/defradb/client" - "github.com/sourcenetwork/defradb/crypto" "github.com/sourcenetwork/defradb/datastore" "github.com/sourcenetwork/defradb/errors" "github.com/sourcenetwork/defradb/event" @@ -64,6 +50,9 @@ type Peer struct { bus *event.Bus updateSub *event.Subscription + ctx context.Context + cancel context.CancelFunc + host host.Host dht routing.Routing ps *pubsub.PubSub @@ -75,97 +64,48 @@ type Peer struct { exch exchange.Interface bserv blockservice.BlockService - ctx context.Context - cancel context.CancelFunc - dhtClose func() error + bootCloser io.Closer } // NewPeer creates a new instance of the DefraDB server as a peer-to-peer node. func NewPeer( ctx context.Context, - rootstore datastore.Rootstore, blockstore datastore.Blockstore, bus *event.Bus, opts ...NodeOpt, ) (p *Peer, err error) { - if rootstore == nil || blockstore == nil { - return nil, ErrNilDB - } - - options := DefaultOptions() - for _, opt := range opts { - opt(options) - } - - connManager, err := connmgr.NewConnManager(100, 400, connmgr.WithGracePeriod(time.Second*20)) - if err != nil { - return nil, err - } - - var listenAddresses []multiaddr.Multiaddr - for _, addr := range options.ListenAddresses { - listenAddress, err := multiaddr.NewMultiaddr(addr) - if err != nil { - return nil, err - } - listenAddresses = append(listenAddresses, listenAddress) - } - ctx, cancel := context.WithCancel(ctx) defer func() { if p == nil { cancel() + } else if err != nil { + p.Close() } }() - peerstore, err := pstoreds.NewPeerstore(ctx, rootstore, pstoreds.DefaultOpts()) - if err != nil { - return nil, err + if blockstore == nil { + return nil, ErrNilDB } - if options.PrivateKey == nil { - // generate an ephemeral private key - key, err := crypto.GenerateEd25519() + options := DefaultOptions() + for _, opt := range opts { + opt(options) + } + + peers := make([]peer.AddrInfo, len(options.BootstrapPeers)) + for i, p := range options.BootstrapPeers { + addr, err := peer.AddrInfoFromString(p) if err != nil { return nil, err } - options.PrivateKey = key + peers[i] = *addr } - // unmarshal the private key bytes - privateKey, err := libp2pCrypto.UnmarshalEd25519PrivateKey(options.PrivateKey) + h, ddht, err := setupHost(ctx, options) if err != nil { return nil, err } - var ddht *dualdht.DHT - - libp2pOpts := []libp2p.Option{ - libp2p.ConnectionManager(connManager), - libp2p.DefaultTransports, - libp2p.Identity(privateKey), - libp2p.ListenAddrs(listenAddresses...), - libp2p.Peerstore(peerstore), - libp2p.Routing(func(h host.Host) (routing.PeerRouting, error) { - // Delete this line and uncomment the next 6 lines once we remove batchable datastore support. - // var store ds.Batching - // // If `rootstore` doesn't implement `Batching`, `nil` will be passed - // // to newDHT which will cause the DHT to be stored in memory. - // if dsb, isBatching := rootstore.(ds.Batching); isBatching { - // store = dsb - // } - ddht, err = newDHT(ctx, h, rootstore) - return ddht, err - }), - } - if !options.EnableRelay { - libp2pOpts = append(libp2pOpts, libp2p.DisableRelay()) - } - - h, err := libp2p.New(libp2pOpts...) - if err != nil { - return nil, err - } log.InfoContext( ctx, "Created LibP2P host", @@ -173,9 +113,23 @@ func NewPeer( corelog.Any("Address", options.ListenAddresses), ) - var ps *pubsub.PubSub + bswapnet := network.NewFromIpfsHost(h, ddht) + bswap := bitswap.New(ctx, bswapnet, blockstore) + + p = &Peer{ + host: h, + dht: ddht, + blockstore: blockstore, + ctx: ctx, + cancel: cancel, + bus: bus, + p2pRPC: grpc.NewServer(options.GRPCServerOptions...), + bserv: blockservice.New(blockstore, bswap), + exch: bswap, + } + if options.EnablePubSub { - ps, err = pubsub.NewGossipSub( + p.ps, err = pubsub.NewGossipSub( ctx, h, pubsub.WithPeerExchange(true), @@ -184,119 +138,65 @@ func NewPeer( if err != nil { return nil, err } + p.updateSub, err = p.bus.Subscribe(event.UpdateName, event.P2PTopicName, event.ReplicatorName) + if err != nil { + return nil, err + } + log.Info("Starting internal broadcaster for pubsub network") + go p.handleMessageLoop() } + p.server, err = newServer(p, options.GRPCDialOptions...) if err != nil { return nil, err } - sub, err := h.EventBus().Subscribe(&libp2pEvent.EvtPeerConnectednessChanged{}) + p2plistener, err := gostream.Listen(h, corenet.Protocol) if err != nil { return nil, err } - // publish subscribed events to the event bus - go func() { - for { - select { - case <-ctx.Done(): - return - case val, isOpen := <-sub.Out(): - if !isOpen { - return - } - bus.Publish(event.NewMessage(event.PeerName, val)) - } - } - }() - - p = &Peer{ - host: h, - dht: ddht, - ps: ps, - blockstore: blockstore, - bus: bus, - p2pRPC: grpc.NewServer(options.GRPCServerOptions...), - ctx: ctx, - cancel: cancel, - } - p.server, err = newServer(p, options.GRPCDialOptions...) + p.bootCloser, err = bootstrap.Bootstrap(p.PeerID(), h, ddht, bootstrap.BootstrapConfigWithPeers(peers)) if err != nil { return nil, err } - p.setupBlockService() - - return p, nil -} - -// Start all the internal workers/goroutines/loops that manage the P2P state. -func (p *Peer) Start() error { - // reconnect to known peers - var wg sync.WaitGroup - for _, id := range p.host.Peerstore().PeersWithAddrs() { - if id == p.host.ID() { - continue - } - wg.Add(1) - go func(id peer.ID) { - defer wg.Done() - addr := p.host.Peerstore().PeerInfo(id) - err := p.host.Connect(p.ctx, addr) - if err != nil { - log.InfoContext( - p.ctx, - "Failure while reconnecting to a known peer", - corelog.Any("peer", id)) - } - }(id) - } - wg.Wait() - - p2plistener, err := gostream.Listen(p.host, corenet.Protocol) - if err != nil { - return err - } - - if p.ps != nil { - sub, err := p.bus.Subscribe(event.UpdateName, event.P2PTopicName, event.ReplicatorName) - if err != nil { - return err - } - p.updateSub = sub - log.InfoContext(p.ctx, "Starting internal broadcaster for pubsub network") - go p.handleMessageLoop() - } - - log.InfoContext( - p.ctx, - "Starting P2P node", - corelog.Any("P2P addresses", p.host.Addrs())) // register the P2P gRPC server go func() { pb.RegisterServiceServer(p.p2pRPC, p.server) if err := p.p2pRPC.Serve(p2plistener); err != nil && !errors.Is(err, grpc.ErrServerStopped) { - log.ErrorContextE(p.ctx, "Fatal P2P RPC server error", err) + log.ErrorE("Fatal P2P RPC server error", err) } }() - p.bus.Publish(event.NewMessage(event.PeerInfoName, event.PeerInfo{Info: p.PeerInfo()})) + bus.Publish(event.NewMessage(event.PeerInfoName, event.PeerInfo{Info: p.PeerInfo()})) - return nil + return p, nil } // Close the peer node and all its internal workers/goroutines/loops. func (p *Peer) Close() { - // close topics - if err := p.server.removeAllPubsubTopics(); err != nil { - log.ErrorContextE(p.ctx, "Error closing pubsub topics", err) + defer p.cancel() + + if p.bootCloser != nil { + // close bootstrap service + if err := p.bootCloser.Close(); err != nil { + log.ErrorE("Error closing bootstrap", err) + } } - // stop gRPC server - for _, c := range p.server.conns { - if err := c.Close(); err != nil { - log.ErrorContextE(p.ctx, "Failed closing server RPC connections", err) + if p.server != nil { + // close topics + if err := p.server.removeAllPubsubTopics(); err != nil { + log.ErrorE("Error closing pubsub topics", err) + } + + // stop gRPC server + for _, c := range p.server.conns { + if err := c.Close(); err != nil { + log.ErrorE("Failed closing server RPC connections", err) + } } } @@ -305,24 +205,25 @@ func (p *Peer) Close() { } if err := p.bserv.Close(); err != nil { - log.ErrorContextE(p.ctx, "Error closing block service", err) + log.ErrorE("Error closing block service", err) } if err := p.host.Close(); err != nil { - log.ErrorContextE(p.ctx, "Error closing host", err) - } - - if p.dhtClose != nil { - err := p.dhtClose() - if err != nil { - log.ErrorContextE(p.ctx, "Failed to close DHT", err) - } + log.ErrorE("Error closing host", err) } - stopGRPCServer(p.ctx, p.p2pRPC) - - if p.cancel != nil { - p.cancel() + stopped := make(chan struct{}) + go func() { + p.p2pRPC.GracefulStop() + close(stopped) + }() + timer := time.NewTimer(10 * time.Second) + select { + case <-timer.C: + p.p2pRPC.Stop() + log.Info("Peer gRPC server was shutdown ungracefully") + case <-stopped: + timer.Stop() } } @@ -345,7 +246,7 @@ func (p *Peer) handleMessageLoop() { } if err != nil { - log.ErrorContextE(p.ctx, "Error while handling broadcast log", err) + log.ErrorE("Error while handling broadcast log", err) } case event.P2PTopic: @@ -369,9 +270,9 @@ func (p *Peer) RegisterNewDocument( schemaRoot string, ) error { // register topic - if err := p.server.addPubSubTopic(docID.String(), !p.server.hasPubSubTopic(schemaRoot)); err != nil { - log.ErrorContextE( - p.ctx, + err := p.server.addPubSubTopic(docID.String(), !p.server.hasPubSubTopic(schemaRoot)) + if err != nil { + log.ErrorE( "Failed to create new pubsub topic", err, corelog.String("DocID", docID.String()), @@ -392,7 +293,7 @@ func (p *Peer) RegisterNewDocument( }, } - return p.server.publishLog(p.ctx, schemaRoot, req) + return p.server.publishLog(ctx, schemaRoot, req) } func (p *Peer) handleDocCreateLog(evt event.Update) error { @@ -449,9 +350,9 @@ func (p *Peer) handleDocUpdateLog(evt event.Update) error { func (p *Peer) pushLogToReplicators(lg event.Update) { // let the exchange know we have this block // this should speed up the dag sync process - err := p.bserv.Exchange().NotifyNewBlocks(p.ctx, blocks.NewBlock(lg.Block)) + err := p.bserv.Exchange().NotifyNewBlocks(context.Background(), blocks.NewBlock(lg.Block)) if err != nil { - log.ErrorContextE(p.ctx, "Failed to notify new blocks", err) + log.ErrorE("Failed to notify new blocks", err) } // push to each peer (replicator) @@ -475,9 +376,8 @@ func (p *Peer) pushLogToReplicators(lg event.Update) { continue } go func(peerID peer.ID) { - if err := p.server.pushLog(p.ctx, lg, peerID); err != nil { - log.ErrorContextE( - p.ctx, + if err := p.server.pushLog(lg, peerID); err != nil { + log.ErrorE( "Failed pushing log", err, corelog.String("DocID", lg.DocID), @@ -489,66 +389,11 @@ func (p *Peer) pushLogToReplicators(lg event.Update) { } } -func (p *Peer) setupBlockService() { - bswapnet := network.NewFromIpfsHost(p.host, p.dht) - bswap := bitswap.New(p.ctx, bswapnet, p.blockstore) - p.bserv = blockservice.New(p.blockstore, bswap) - p.exch = bswap -} - -func stopGRPCServer(ctx context.Context, server *grpc.Server) { - stopped := make(chan struct{}) - go func() { - server.GracefulStop() - close(stopped) - }() - timer := time.NewTimer(10 * time.Second) - select { - case <-timer.C: - server.Stop() - log.InfoContext(ctx, "Peer gRPC server was shutdown ungracefully") - case <-stopped: - timer.Stop() - } -} - // Connect initiates a connection to the peer with the given address. func (p *Peer) Connect(ctx context.Context, addr peer.AddrInfo) error { return p.host.Connect(ctx, addr) } -// Bootstrap connects to the given peers. -func (p *Peer) Bootstrap(addrs []peer.AddrInfo) { - var connected uint64 - - var wg sync.WaitGroup - for _, pinfo := range addrs { - wg.Add(1) - go func(pinfo peer.AddrInfo) { - defer wg.Done() - err := p.host.Connect(p.ctx, pinfo) - if err != nil { - log.InfoContext(p.ctx, "Cannot connect to peer", corelog.Any("Error", err)) - return - } - log.InfoContext(p.ctx, "Connected", corelog.Any("PeerID", pinfo.ID)) - atomic.AddUint64(&connected, 1) - }(pinfo) - } - - wg.Wait() - - if nPeers := len(addrs); int(connected) < nPeers/2 { - log.InfoContext(p.ctx, fmt.Sprintf("Only connected to %d bootstrap peers out of %d", connected, nPeers)) - } - - err := p.dht.Bootstrap(p.ctx) - if err != nil { - log.ErrorContextE(p.ctx, "Problem bootstraping using DHT", err) - return - } -} - func (p *Peer) PeerID() peer.ID { return p.host.ID() } @@ -563,17 +408,3 @@ func (p *Peer) PeerInfo() peer.AddrInfo { Addrs: p.host.Network().ListenAddresses(), } } - -func newDHT(ctx context.Context, h host.Host, dsb ds.Batching) (*dualdht.DHT, error) { - dhtOpts := []dualdht.Option{ - dualdht.DHTOption(dht.NamespacedValidator("pk", record.PublicKeyValidator{})), - dualdht.DHTOption(dht.NamespacedValidator("ipns", ipns.Validator{KeyBook: h.Peerstore()})), - dualdht.DHTOption(dht.Concurrency(10)), - dualdht.DHTOption(dht.Mode(dht.ModeAuto)), - } - if dsb != nil { - dhtOpts = append(dhtOpts, dualdht.DHTOption(dht.Datastore(dsb))) - } - - return dualdht.New(ctx, h, dhtOpts...) -} diff --git a/net/peer_test.go b/net/peer_test.go index cb8c8eab44..5322d32f6e 100644 --- a/net/peer_test.go +++ b/net/peer_test.go @@ -13,10 +13,8 @@ package net import ( "context" "testing" - "time" "github.com/ipfs/go-cid" - "github.com/libp2p/go-libp2p/core/peer" mh "github.com/multiformats/go-multihash" badger "github.com/sourcenetwork/badger/v4" rpc "github.com/sourcenetwork/go-libp2p-pubsub-rpc" @@ -31,7 +29,6 @@ import ( coreblock "github.com/sourcenetwork/defradb/internal/core/block" "github.com/sourcenetwork/defradb/internal/core/crdt" "github.com/sourcenetwork/defradb/internal/db" - netutils "github.com/sourcenetwork/defradb/net/utils" ) func emptyBlock() []byte { @@ -75,7 +72,6 @@ func newTestPeer(ctx context.Context, t *testing.T) (client.DB, *Peer) { n, err := NewPeer( ctx, - db.Rootstore(), db.Blockstore(), db.Events(), WithListenAddresses(randomMultiaddr), @@ -91,7 +87,7 @@ func TestNewPeer_NoError(t *testing.T) { db, err := db.NewDB(ctx, store, acp.NoACP, nil) require.NoError(t, err) defer db.Close() - p, err := NewPeer(ctx, db.Rootstore(), db.Blockstore(), db.Events()) + p, err := NewPeer(ctx, db.Blockstore(), db.Events()) require.NoError(t, err) p.Close() } @@ -102,16 +98,6 @@ func TestNewPeer_NoDB_NilDBError(t *testing.T) { require.ErrorIs(t, err, ErrNilDB) } -func TestStartAndClose_NoError(t *testing.T) { - ctx := context.Background() - db, p := newTestPeer(ctx, t) - defer db.Close() - defer p.Close() - - err := p.Start() - require.NoError(t, err) -} - func TestStart_WithKnownPeer_NoError(t *testing.T) { ctx := context.Background() store := memory.NewDatastore(ctx) @@ -126,75 +112,22 @@ func TestStart_WithKnownPeer_NoError(t *testing.T) { n1, err := NewPeer( ctx, - db1.Rootstore(), - db1.Blockstore(), - db1.Events(), - WithListenAddresses("/ip4/0.0.0.0/tcp/0"), - ) - require.NoError(t, err) - defer n1.Close() - n2, err := NewPeer( - ctx, - db2.Rootstore(), - db2.Blockstore(), - db2.Events(), - WithListenAddresses("/ip4/0.0.0.0/tcp/0"), - ) - require.NoError(t, err) - defer n2.Close() - - addrs, err := netutils.ParsePeers([]string{n1.host.Addrs()[0].String() + "/p2p/" + n1.PeerID().String()}) - if err != nil { - t.Fatal(err) - } - n2.Bootstrap(addrs) - - err = n2.Start() - require.NoError(t, err) -} - -func TestStart_WithOfflineKnownPeer_NoError(t *testing.T) { - ctx := context.Background() - store := memory.NewDatastore(ctx) - db1, err := db.NewDB(ctx, store, acp.NoACP, nil) - require.NoError(t, err) - defer db1.Close() - - store2 := memory.NewDatastore(ctx) - db2, err := db.NewDB(ctx, store2, acp.NoACP, nil) - require.NoError(t, err) - defer db2.Close() - - n1, err := NewPeer( - ctx, - db1.Rootstore(), db1.Blockstore(), db1.Events(), - WithListenAddresses("/ip4/0.0.0.0/tcp/0"), + WithListenAddresses("/ip4/127.0.0.1/tcp/0"), ) require.NoError(t, err) defer n1.Close() n2, err := NewPeer( ctx, - db2.Rootstore(), db2.Blockstore(), db2.Events(), - WithListenAddresses("/ip4/0.0.0.0/tcp/0"), + WithListenAddresses("/ip4/127.0.0.1/tcp/0"), ) require.NoError(t, err) defer n2.Close() - addrs, err := netutils.ParsePeers([]string{n1.host.Addrs()[0].String() + "/p2p/" + n1.PeerID().String()}) - if err != nil { - t.Fatal(err) - } - n2.Bootstrap(addrs) - n1.Close() - - // give time for n1 to close - time.Sleep(100 * time.Millisecond) - - err = n2.Start() + err = n2.Connect(ctx, n1.PeerInfo()) require.NoError(t, err) } @@ -317,10 +250,7 @@ func TestHandleDocCreateLog_WithExistingTopic_TopicExistsError(t *testing.T) { doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Definition()) require.NoError(t, err) - err = col.Create(ctx, doc) - require.NoError(t, err) - - _, err = rpc.NewTopic(ctx, p.ps, p.host.ID(), doc.ID().String(), true) + _, err = rpc.NewTopic(ctx, p.ps, p.host.ID(), "bae-7fca96a2-5f01-5558-a81f-09b47587f26d", true) require.NoError(t, err) err = p.handleDocCreateLog(event.Update{ @@ -348,20 +278,13 @@ func TestHandleDocUpdateLog_NoError(t *testing.T) { doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Definition()) require.NoError(t, err) - err = col.Create(ctx, doc) - require.NoError(t, err) - - headCID, err := getHead(ctx, db, doc.ID()) - require.NoError(t, err) - - b, err := db.Blockstore().AsIPLDStorage().Get(ctx, headCID.KeyString()) + cid, err := createCID(doc) require.NoError(t, err) err = p.handleDocUpdateLog(event.Update{ DocID: doc.ID().String(), - Cid: headCID, + Cid: cid, SchemaRoot: col.SchemaRoot(), - Block: b, }) require.NoError(t, err) } @@ -396,23 +319,16 @@ func TestHandleDocUpdateLog_WithExistingDocIDTopic_TopicExistsError(t *testing.T doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Definition()) require.NoError(t, err) - err = col.Create(ctx, doc) - require.NoError(t, err) - - headCID, err := getHead(ctx, db, doc.ID()) - require.NoError(t, err) - - b, err := db.Blockstore().AsIPLDStorage().Get(ctx, headCID.KeyString()) + cid, err := createCID(doc) require.NoError(t, err) - _, err = rpc.NewTopic(ctx, p.ps, p.host.ID(), doc.ID().String(), true) + _, err = rpc.NewTopic(ctx, p.ps, p.host.ID(), "bae-7fca96a2-5f01-5558-a81f-09b47587f26d", true) require.NoError(t, err) err = p.handleDocUpdateLog(event.Update{ DocID: doc.ID().String(), - Cid: headCID, + Cid: cid, SchemaRoot: col.SchemaRoot(), - Block: b, }) require.ErrorContains(t, err, "topic already exists") } @@ -435,23 +351,16 @@ func TestHandleDocUpdateLog_WithExistingSchemaTopic_TopicExistsError(t *testing. doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Definition()) require.NoError(t, err) - err = col.Create(ctx, doc) - require.NoError(t, err) - - headCID, err := getHead(ctx, db, doc.ID()) - require.NoError(t, err) - - b, err := db.Blockstore().AsIPLDStorage().Get(ctx, headCID.KeyString()) + cid, err := createCID(doc) require.NoError(t, err) - _, err = rpc.NewTopic(ctx, p.ps, p.host.ID(), col.SchemaRoot(), true) + _, err = rpc.NewTopic(ctx, p.ps, p.host.ID(), "bafkreia7ljiy5oief4dp5xsk7t7zlgfjzqh3537hw7rtttjzchybfxtn4u", true) require.NoError(t, err) err = p.handleDocUpdateLog(event.Update{ DocID: doc.ID().String(), - Cid: headCID, + Cid: cid, SchemaRoot: col.SchemaRoot(), - Block: b, }) require.ErrorContains(t, err, "topic already exists") } @@ -475,7 +384,6 @@ func TestNewPeer_WithEnableRelay_NoError(t *testing.T) { defer db.Close() n, err := NewPeer( context.Background(), - db.Rootstore(), db.Blockstore(), db.Events(), WithEnableRelay(true), @@ -484,23 +392,6 @@ func TestNewPeer_WithEnableRelay_NoError(t *testing.T) { n.Close() } -func TestNewPeer_WithDBClosed_NoError(t *testing.T) { - ctx := context.Background() - store := memory.NewDatastore(ctx) - - db, err := db.NewDB(ctx, store, acp.NoACP, nil) - require.NoError(t, err) - db.Close() - - _, err = NewPeer( - context.Background(), - db.Rootstore(), - db.Blockstore(), - db.Events(), - ) - require.ErrorContains(t, err, "datastore closed") -} - func TestNewPeer_NoPubSub_NoError(t *testing.T) { ctx := context.Background() store := memory.NewDatastore(ctx) @@ -510,7 +401,6 @@ func TestNewPeer_NoPubSub_NoError(t *testing.T) { n, err := NewPeer( context.Background(), - db.Rootstore(), db.Blockstore(), db.Events(), WithEnablePubSub(false), @@ -529,7 +419,6 @@ func TestNewPeer_WithEnablePubSub_NoError(t *testing.T) { n, err := NewPeer( ctx, - db.Rootstore(), db.Blockstore(), db.Events(), WithEnablePubSub(true), @@ -549,7 +438,6 @@ func TestNodeClose_NoError(t *testing.T) { defer db.Close() n, err := NewPeer( context.Background(), - db.Rootstore(), db.Blockstore(), db.Events(), ) @@ -557,92 +445,25 @@ func TestNodeClose_NoError(t *testing.T) { n.Close() } -func TestNewPeer_BootstrapWithNoPeer_NoError(t *testing.T) { - ctx := context.Background() - store := memory.NewDatastore(ctx) - db, err := db.NewDB(ctx, store, acp.NoACP, nil) - require.NoError(t, err) - defer db.Close() - - n1, err := NewPeer( - ctx, - db.Rootstore(), - db.Blockstore(), - db.Events(), - WithListenAddresses("/ip4/0.0.0.0/tcp/0"), - ) - require.NoError(t, err) - n1.Bootstrap([]peer.AddrInfo{}) - n1.Close() -} - -func TestNewPeer_BootstrapWithOnePeer_NoError(t *testing.T) { - ctx := context.Background() - store := memory.NewDatastore(ctx) - db, err := db.NewDB(ctx, store, acp.NoACP, nil) - require.NoError(t, err) - defer db.Close() - n1, err := NewPeer( - ctx, - db.Rootstore(), - db.Blockstore(), - db.Events(), - WithListenAddresses("/ip4/0.0.0.0/tcp/0"), - ) - require.NoError(t, err) - defer n1.Close() - n2, err := NewPeer( - ctx, - db.Rootstore(), - db.Blockstore(), - db.Events(), - WithListenAddresses("/ip4/0.0.0.0/tcp/0"), - ) - require.NoError(t, err) - defer n2.Close() - addrs, err := netutils.ParsePeers([]string{n1.host.Addrs()[0].String() + "/p2p/" + n1.PeerID().String()}) - if err != nil { - t.Fatal(err) - } - n2.Bootstrap(addrs) -} - -func TestNewPeer_BootstrapWithOneValidPeerAndManyInvalidPeers_NoError(t *testing.T) { +func TestListenAddrs_WithListenAddresses_NoError(t *testing.T) { ctx := context.Background() store := memory.NewDatastore(ctx) db, err := db.NewDB(ctx, store, acp.NoACP, nil) require.NoError(t, err) defer db.Close() - n1, err := NewPeer( - ctx, - db.Rootstore(), - db.Blockstore(), - db.Events(), - WithListenAddresses("/ip4/0.0.0.0/tcp/0"), - ) - require.NoError(t, err) - defer n1.Close() - n2, err := NewPeer( - ctx, - db.Rootstore(), + n, err := NewPeer( + context.Background(), db.Blockstore(), db.Events(), - WithListenAddresses("/ip4/0.0.0.0/tcp/0"), + WithListenAddresses("/ip4/127.0.0.1/tcp/0"), ) require.NoError(t, err) - defer n2.Close() - addrs, err := netutils.ParsePeers([]string{ - n1.host.Addrs()[0].String() + "/p2p/" + n1.PeerID().String(), - "/ip4/0.0.0.0/tcp/1234/p2p/" + "12D3KooWC8YY6Tx3uAeHsdBmoy7PJPwqXAHE4HkCZ5veankKWci6", - "/ip4/0.0.0.0/tcp/1235/p2p/" + "12D3KooWC8YY6Tx3uAeHsdBmoy7PJPwqXAHE4HkCZ5veankKWci5", - "/ip4/0.0.0.0/tcp/1236/p2p/" + "12D3KooWC8YY6Tx3uAeHsdBmoy7PJPwqXAHE4HkCZ5veankKWci4", - }) - require.NoError(t, err) - n2.Bootstrap(addrs) + require.Contains(t, n.ListenAddrs()[0].String(), "/tcp/") + n.Close() } -func TestListenAddrs_WithListenAddresses_NoError(t *testing.T) { +func TestPeer_WithBootstrapPeers_NoError(t *testing.T) { ctx := context.Background() store := memory.NewDatastore(ctx) db, err := db.NewDB(ctx, store, acp.NoACP, nil) @@ -651,12 +472,11 @@ func TestListenAddrs_WithListenAddresses_NoError(t *testing.T) { n, err := NewPeer( context.Background(), - db.Rootstore(), db.Blockstore(), db.Events(), - WithListenAddresses("/ip4/0.0.0.0/tcp/0"), + WithBootstrapPeers("/ip4/127.0.0.1/tcp/6666/p2p/QmaCpDMGvV2BGHeYERUEnRQAwe3N8SzbUtfsmvsqQLuvuJ"), ) require.NoError(t, err) - require.Contains(t, n.ListenAddrs()[0].String(), "/tcp/") + n.Close() } diff --git a/net/server.go b/net/server.go index 0e36eb7e3c..2f129d19cf 100644 --- a/net/server.go +++ b/net/server.go @@ -18,7 +18,6 @@ import ( "sync" cid "github.com/ipfs/go-cid" - "github.com/libp2p/go-libp2p/core/peer" libpeer "github.com/libp2p/go-libp2p/core/peer" "github.com/libp2p/go-libp2p/core/peerstore" "github.com/sourcenetwork/corelog" @@ -46,7 +45,7 @@ type server struct { topics map[string]pubsubTopic // replicators is a map from collectionName => peerId - replicators map[string]map[peer.ID]struct{} + replicators map[string]map[libpeer.ID]struct{} mu sync.Mutex conns map[libpeer.ID]*grpc.ClientConn @@ -68,7 +67,7 @@ func newServer(p *Peer, opts ...grpc.DialOption) (*server, error) { peer: p, conns: make(map[libpeer.ID]*grpc.ClientConn), topics: make(map[string]pubsubTopic), - replicators: make(map[string]map[peer.ID]struct{}), + replicators: make(map[string]map[libpeer.ID]struct{}), } cred := insecure.NewCredentials() @@ -224,7 +223,7 @@ func (s *server) removePubSubTopic(topic string) error { return nil } - log.InfoContext(s.peer.ctx, "Removing pubsub topic", + log.Info("Removing pubsub topic", corelog.String("PeerID", s.peer.PeerID().String()), corelog.String("Topic", topic)) @@ -242,7 +241,7 @@ func (s *server) removeAllPubsubTopics() error { return nil } - log.InfoContext(s.peer.ctx, "Removing all pubsub topics", + log.Info("Removing all pubsub topics", corelog.String("PeerID", s.peer.PeerID().String())) s.mu.Lock() @@ -282,7 +281,8 @@ func (s *server) publishLog(ctx context.Context, topic string, req *pb.PushLogRe return errors.Wrap("failed marshling pubsub message", err) } - if _, err := t.Publish(ctx, data, rpc.WithIgnoreResponse(true)); err != nil { + _, err = t.Publish(ctx, data, rpc.WithIgnoreResponse(true)) + if err != nil { return errors.Wrap(fmt.Sprintf("failed publishing to thread %s", topic), err) } return nil @@ -290,14 +290,14 @@ func (s *server) publishLog(ctx context.Context, topic string, req *pb.PushLogRe // pubSubMessageHandler handles incoming PushLog messages from the pubsub network. func (s *server) pubSubMessageHandler(from libpeer.ID, topic string, msg []byte) ([]byte, error) { - log.InfoContext(s.peer.ctx, "Received new pubsub event", + log.Info("Received new pubsub event", corelog.String("PeerID", s.peer.PeerID().String()), corelog.Any("SenderId", from), corelog.String("Topic", topic)) req := new(pb.PushLogRequest) if err := proto.Unmarshal(msg, req); err != nil { - log.ErrorContextE(s.peer.ctx, "Failed to unmarshal pubsub message %s", err) + log.ErrorE("Failed to unmarshal pubsub message %s", err) return nil, err } ctx := grpcpeer.NewContext(s.peer.ctx, &grpcpeer.Peer{ @@ -311,7 +311,7 @@ func (s *server) pubSubMessageHandler(from libpeer.ID, topic string, msg []byte) // pubSubEventHandler logs events from the subscribed DocID topics. func (s *server) pubSubEventHandler(from libpeer.ID, topic string, msg []byte) { - log.InfoContext(s.peer.ctx, "Received new pubsub event", + log.Info("Received new pubsub event", corelog.String("PeerID", s.peer.PeerID().String()), corelog.Any("SenderId", from), corelog.String("Topic", topic), @@ -349,14 +349,14 @@ func (s *server) updatePubSubTopics(evt event.P2PTopic) { for _, topic := range evt.ToAdd { err := s.addPubSubTopic(topic, true) if err != nil { - log.ErrorContextE(s.peer.ctx, "Failed to add pubsub topic.", err) + log.ErrorE("Failed to add pubsub topic.", err) } } for _, topic := range evt.ToRemove { err := s.removePubSubTopic(topic) if err != nil { - log.ErrorContextE(s.peer.ctx, "Failed to remove pubsub topic.", err) + log.ErrorE("Failed to remove pubsub topic.", err) } } s.peer.bus.Publish(event.NewMessage(event.P2PTopicCompletedName, nil)) @@ -371,7 +371,7 @@ func (s *server) updateReplicators(evt event.Replicator) { s.peer.host.Peerstore().AddAddrs(evt.Info.ID, evt.Info.Addrs, peerstore.PermanentAddrTTL) // connect to the peer if err := s.peer.Connect(s.peer.ctx, evt.Info); err != nil { - log.ErrorContextE(s.peer.ctx, "Failed to connect to replicator peer", err) + log.ErrorE("Failed to connect to replicator peer", err) } } @@ -389,7 +389,7 @@ func (s *server) updateReplicators(evt event.Replicator) { } for schema := range evt.Schemas { if _, exists := s.replicators[schema]; !exists { - s.replicators[schema] = make(map[peer.ID]struct{}) + s.replicators[schema] = make(map[libpeer.ID]struct{}) } s.replicators[schema][evt.Info.ID] = struct{}{} } @@ -397,9 +397,8 @@ func (s *server) updateReplicators(evt event.Replicator) { if evt.Docs != nil { for update := range evt.Docs { - if err := s.pushLog(s.peer.ctx, update, evt.Info.ID); err != nil { - log.ErrorContextE( - s.peer.ctx, + if err := s.pushLog(update, evt.Info.ID); err != nil { + log.ErrorE( "Failed to replicate log", err, corelog.Any("CID", update.Cid), diff --git a/net/server_test.go b/net/server_test.go index 1ac178a2d1..0e23e3b019 100644 --- a/net/server_test.go +++ b/net/server_test.go @@ -16,8 +16,6 @@ import ( "github.com/ipfs/go-cid" "github.com/ipfs/go-datastore/query" - "github.com/libp2p/go-libp2p/core/event" - "github.com/libp2p/go-libp2p/core/host" "github.com/stretchr/testify/require" grpcpeer "google.golang.org/grpc/peer" @@ -36,55 +34,6 @@ func TestNewServerSimple(t *testing.T) { require.NoError(t, err) } -var mockError = errors.New("mock error") - -type mockHost struct { - host.Host -} - -func (mH *mockHost) EventBus() event.Bus { - return &mockBus{} -} - -type mockBus struct { - event.Bus -} - -func (mB *mockBus) Emitter(eventType any, opts ...event.EmitterOpt) (event.Emitter, error) { - return nil, mockError -} - -func (mB *mockBus) Subscribe(eventType any, opts ...event.SubscriptionOpt) (event.Subscription, error) { - return nil, mockError -} - -func TestNewServerWithEmitterError(t *testing.T) { - ctx := context.Background() - db, p := newTestPeer(ctx, t) - defer db.Close() - defer p.Close() - - _, err := db.AddSchema(ctx, `type User { - name: String - age: Int - }`) - require.NoError(t, err) - - col, err := db.GetCollectionByName(ctx, "User") - require.NoError(t, err) - - doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Definition()) - require.NoError(t, err) - - err = col.Create(ctx, doc) - require.NoError(t, err) - - p.host = &mockHost{p.host} - - _, err = newServer(p) - require.NoError(t, err) -} - func TestGetDocGraph(t *testing.T) { ctx := context.Background() db, p := newTestPeer(ctx, t) @@ -151,10 +100,8 @@ func TestPushLog(t *testing.T) { db, p := newTestPeer(ctx, t) defer db.Close() defer p.Close() - err := p.Start() - require.NoError(t, err) - _, err = db.AddSchema(ctx, `type User { + _, err := db.AddSchema(ctx, `type User { name: String age: Int }`) diff --git a/net/utils/util.go b/net/utils/util.go deleted file mode 100644 index 5e345cc12b..0000000000 --- a/net/utils/util.go +++ /dev/null @@ -1,53 +0,0 @@ -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -/* -Package utils provides utility functions for the defradb networking facilities. -*/ -package utils - -import ( - "fmt" - - "github.com/libp2p/go-libp2p/core/peer" - ma "github.com/multiformats/go-multiaddr" - - "github.com/sourcenetwork/defradb/errors" -) - -func ParsePeers(addrs []string) ([]peer.AddrInfo, error) { - maddrs := make([]ma.Multiaddr, len(addrs)) - for i, addr := range addrs { - var err error - maddrs[i], err = ma.NewMultiaddr(addr) - if err != nil { - return nil, err - } - } - return peer.AddrInfosFromP2pAddrs(maddrs...) -} - -func TCPAddrFromMultiAddr(maddr ma.Multiaddr) (string, error) { - var addr string - if maddr == nil { - return addr, errors.New("address can't be empty") - } - ip4, err := maddr.ValueForProtocol(ma.P_IP4) - if err != nil { - return addr, err - } - tcp, err := maddr.ValueForProtocol(ma.P_TCP) - if err != nil { - return addr, err - } - return fmt.Sprintf("%s:%s", ip4, tcp), nil -} diff --git a/node/node.go b/node/node.go index 6dad35f593..5eea424956 100644 --- a/node/node.go +++ b/node/node.go @@ -16,7 +16,6 @@ import ( "fmt" gohttp "net/http" - "github.com/libp2p/go-libp2p/core/peer" "github.com/sourcenetwork/corelog" "github.com/sourcenetwork/defradb/client" @@ -40,7 +39,6 @@ type Option any // Options contains start configuration values. type Options struct { - peers []peer.AddrInfo disableP2P bool disableAPI bool } @@ -67,13 +65,6 @@ func WithDisableAPI(disable bool) NodeOpt { } } -// WithPeers sets the bootstrap peers. -func WithPeers(peers ...peer.AddrInfo) NodeOpt { - return func(o *Options) { - o.peers = peers - } -} - // Node is a DefraDB instance with optional sub-systems. type Node struct { DB client.DB @@ -141,13 +132,10 @@ func NewNode(ctx context.Context, opts ...Option) (*Node, error) { var peer *net.Peer if !options.disableP2P { // setup net node - peer, err = net.NewPeer(ctx, db.Rootstore(), db.Blockstore(), db.Events(), netOpts...) + peer, err = net.NewPeer(ctx, db.Blockstore(), db.Events(), netOpts...) if err != nil { return nil, err } - if len(options.peers) > 0 { - peer.Bootstrap(options.peers) - } } var server *http.Server @@ -172,11 +160,6 @@ func NewNode(ctx context.Context, opts ...Option) (*Node, error) { // Start starts the node sub-systems. func (n *Node) Start(ctx context.Context) error { - if n.Peer != nil { - if err := n.Peer.Start(); err != nil { - return err - } - } if n.Server != nil { err := n.Server.SetListener() if err != nil { diff --git a/node/node_test.go b/node/node_test.go index d3bf4c5048..1aa1dac92a 100644 --- a/node/node_test.go +++ b/node/node_test.go @@ -13,9 +13,7 @@ package node import ( "testing" - "github.com/libp2p/go-libp2p/core/peer" "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" ) func TestWithDisableP2P(t *testing.T) { @@ -29,14 +27,3 @@ func TestWithDisableAPI(t *testing.T) { WithDisableAPI(true)(options) assert.Equal(t, true, options.disableAPI) } - -func TestWithPeers(t *testing.T) { - peer, err := peer.AddrInfoFromString("/ip4/127.0.0.1/tcp/9000/p2p/QmYyQSo1c1Ym7orWxLYvCrM2EmxFTANf8wXmmE7DWjhx5N") - require.NoError(t, err) - - options := &Options{} - WithPeers(*peer)(options) - - require.Len(t, options.peers, 1) - assert.Equal(t, *peer, options.peers[0]) -} diff --git a/tests/clients/cli/wrapper.go b/tests/clients/cli/wrapper.go index 14e4df7cc4..8600448968 100644 --- a/tests/clients/cli/wrapper.go +++ b/tests/clients/cli/wrapper.go @@ -540,8 +540,8 @@ func (w *Wrapper) PrintDump(ctx context.Context) error { return w.node.DB.PrintDump(ctx) } -func (w *Wrapper) Bootstrap(addrs []peer.AddrInfo) { - w.node.Peer.Bootstrap(addrs) +func (w *Wrapper) Connect(ctx context.Context, addr peer.AddrInfo) error { + return w.node.Peer.Connect(ctx, addr) } func (w *Wrapper) Host() string { diff --git a/tests/clients/clients.go b/tests/clients/clients.go index f5d822ab39..2a67ed0812 100644 --- a/tests/clients/clients.go +++ b/tests/clients/clients.go @@ -11,6 +11,8 @@ package clients import ( + "context" + "github.com/libp2p/go-libp2p/core/peer" "github.com/sourcenetwork/defradb/client" @@ -20,5 +22,5 @@ import ( // required for testing. type Client interface { client.DB - Bootstrap([]peer.AddrInfo) + Connect(context.Context, peer.AddrInfo) error } diff --git a/tests/clients/http/wrapper.go b/tests/clients/http/wrapper.go index 76e31d9cbd..5a813c9265 100644 --- a/tests/clients/http/wrapper.go +++ b/tests/clients/http/wrapper.go @@ -233,8 +233,8 @@ func (w *Wrapper) PrintDump(ctx context.Context) error { return w.node.DB.PrintDump(ctx) } -func (w *Wrapper) Bootstrap(addrs []peer.AddrInfo) { - w.node.Peer.Bootstrap(addrs) +func (w *Wrapper) Connect(ctx context.Context, addr peer.AddrInfo) error { + return w.node.Peer.Connect(ctx, addr) } func (w *Wrapper) Host() string { diff --git a/tests/integration/client.go b/tests/integration/client.go index d9c9f74334..d37a9f22c0 100644 --- a/tests/integration/client.go +++ b/tests/integration/client.go @@ -11,6 +11,7 @@ package tests import ( + "context" "fmt" "os" "strconv" @@ -100,10 +101,11 @@ func newGoClientWrapper(n *node.Node) *goClientWrapper { } } -func (w *goClientWrapper) Bootstrap(addrs []peer.AddrInfo) { +func (w *goClientWrapper) Connect(ctx context.Context, addr peer.AddrInfo) error { if w.peer != nil { - w.peer.Bootstrap(addrs) + return w.peer.Connect(ctx, addr) } + return nil } func (w *goClientWrapper) Close() { diff --git a/tests/integration/p2p.go b/tests/integration/p2p.go index b1b79982cf..99c713bb79 100644 --- a/tests/integration/p2p.go +++ b/tests/integration/p2p.go @@ -16,7 +16,6 @@ import ( "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/net" - "github.com/libp2p/go-libp2p/core/peer" "github.com/sourcenetwork/corelog" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -147,9 +146,12 @@ func connectPeers( sourceNode := s.nodes[cfg.SourceNodeID] targetNode := s.nodes[cfg.TargetNodeID] - addrs := []peer.AddrInfo{targetNode.PeerInfo()} - log.InfoContext(s.ctx, "Bootstrapping with peers", corelog.Any("Addresses", addrs)) - sourceNode.Bootstrap(addrs) + log.InfoContext(s.ctx, "Connect peers", + corelog.Any("Source", sourceNode.PeerInfo()), + corelog.Any("Target", targetNode.PeerInfo())) + + err := sourceNode.Connect(s.ctx, targetNode.PeerInfo()) + require.NoError(s.t, err) s.nodeP2P[cfg.SourceNodeID].connections[cfg.TargetNodeID] = struct{}{} s.nodeP2P[cfg.TargetNodeID].connections[cfg.SourceNodeID] = struct{}{} @@ -287,6 +289,23 @@ func getAllP2PCollections( assert.Equal(s.t, expectedCollections, cols) } +// reconnectPeers makes sure that all peers are connected after a node restart action. +func reconnectPeers(s *state) { + for i, n := range s.nodeP2P { + for j := range n.connections { + sourceNode := s.nodes[i] + targetNode := s.nodes[j] + + log.InfoContext(s.ctx, "Connect peers", + corelog.Any("Source", sourceNode.PeerInfo()), + corelog.Any("Target", targetNode.PeerInfo())) + + err := sourceNode.Connect(s.ctx, targetNode.PeerInfo()) + require.NoError(s.t, err) + } + } +} + func RandomNetworkingConfig() ConfigureNode { return func() []net.NodeOpt { return []net.NodeOpt{ diff --git a/tests/integration/utils.go b/tests/integration/utils.go index f26ee4ed5b..2576f30762 100644 --- a/tests/integration/utils.go +++ b/tests/integration/utils.go @@ -709,15 +709,9 @@ func restartNodes( nodeOpts := s.nodeConfigs[i] nodeOpts = append(nodeOpts, net.WithListenAddresses(addresses...)) - p, err := net.NewPeer(s.ctx, node.DB.Rootstore(), node.DB.Blockstore(), node.DB.Events(), nodeOpts...) + node.Peer, err = net.NewPeer(s.ctx, node.DB.Blockstore(), node.DB.Events(), nodeOpts...) require.NoError(s.t, err) - if err := p.Start(); err != nil { - p.Close() - require.NoError(s.t, err) - } - node.Peer = p - c, err := setupClient(s, node) require.NoError(s.t, err) s.nodes[i] = c @@ -733,6 +727,7 @@ func restartNodes( // will reference the old (closed) database instances. refreshCollections(s) refreshIndexes(s) + reconnectPeers(s) } // refreshCollections refreshes all the collections of the given names, preserving order. @@ -783,20 +778,12 @@ func configureNode( nodeOpts := action() nodeOpts = append(nodeOpts, net.WithPrivateKey(privateKey)) - p, err := net.NewPeer(s.ctx, node.DB.Rootstore(), node.DB.Blockstore(), node.DB.Events(), nodeOpts...) + node.Peer, err = net.NewPeer(s.ctx, node.DB.Blockstore(), node.DB.Events(), nodeOpts...) require.NoError(s.t, err) - log.InfoContext(s.ctx, "Starting P2P node", corelog.Any("P2P address", p.PeerInfo())) - if err := p.Start(); err != nil { - p.Close() - require.NoError(s.t, err) - } - - s.nodeAddresses = append(s.nodeAddresses, p.PeerInfo()) + s.nodeAddresses = append(s.nodeAddresses, node.Peer.PeerInfo()) s.nodeConfigs = append(s.nodeConfigs, nodeOpts) - node.Peer = p - c, err := setupClient(s, node) require.NoError(s.t, err) From af15a33e32e1c5a51bc535b0cb7b8bc584752d88 Mon Sep 17 00:00:00 2001 From: Shahzad Lone Date: Mon, 26 Aug 2024 17:41:43 -0400 Subject: [PATCH 02/71] chore: Bump to GoLang v1.22 (#2913) ## Relevant issue(s) Resolves #2431 ## Description - This is a routine version bump of GoLang, the previous bump was done in (https://github.com/sourcenetwork/defradb/pull/2195) - Also updates the golang version for AWS AMI generation. - Update all loop todos for 1.22 by removing the copy variables. --- .github/workflows/check-tidy.yml | 4 ++-- .github/workflows/check-vulnerabilities.yml | 4 ++-- Makefile | 2 +- go.mod | 2 +- internal/lens/history.go | 6 +----- internal/planner/commit.go | 5 ++--- internal/request/graphql/schema/generate.go | 14 +++----------- tools/cloud/aws/packer/build_aws_ami.pkr.hcl | 4 ++-- tools/configs/golangci.yaml | 12 ++++-------- tools/defradb.containerfile | 2 +- 10 files changed, 19 insertions(+), 36 deletions(-) diff --git a/.github/workflows/check-tidy.yml b/.github/workflows/check-tidy.yml index bbcf2d620b..6ffea2e189 100644 --- a/.github/workflows/check-tidy.yml +++ b/.github/workflows/check-tidy.yml @@ -9,8 +9,8 @@ # licenses/APL.txt. # This workflow checks that go mod tidy command we have set for the specific -# go version is not broken, for example `go mod tidy -go=1.21.3`. This -# can cause some head scratching at times, so better catch this in the PR. +# go version is not broken. This can cause some head scratching at times, +# so better catch this in the PR. # # Inaddition to that also checks that we are currently in a `tidy` state. name: Check Tidy Workflow diff --git a/.github/workflows/check-vulnerabilities.yml b/.github/workflows/check-vulnerabilities.yml index 6f1b2fd35f..5ebb3192b1 100644 --- a/.github/workflows/check-vulnerabilities.yml +++ b/.github/workflows/check-vulnerabilities.yml @@ -33,7 +33,7 @@ jobs: - name: Run govulncheck uses: golang/govulncheck-action@v1 with: - go-version-input: "1.21" - go-package: ./... + go-version-file: 'go.mod' check-latest: true cache: false + go-package: ./... diff --git a/Makefile b/Makefile index cfc48d50c7..c84a0a118e 100644 --- a/Makefile +++ b/Makefile @@ -202,7 +202,7 @@ verify: .PHONY: tidy tidy: - go mod tidy -go=1.21.3 + go mod tidy -go=1.22 .PHONY: clean clean: diff --git a/go.mod b/go.mod index 8b94ad9378..8247f6a406 100644 --- a/go.mod +++ b/go.mod @@ -1,6 +1,6 @@ module github.com/sourcenetwork/defradb -go 1.21.3 +go 1.22 require ( github.com/bits-and-blooms/bitset v1.13.0 diff --git a/internal/lens/history.go b/internal/lens/history.go index e4e04d657f..fbe8fa0cec 100644 --- a/internal/lens/history.go +++ b/internal/lens/history.go @@ -157,11 +157,7 @@ func getCollectionHistory( history := map[schemaVersionID]*collectionHistoryLink{} schemaVersionsByColID := map[uint32]schemaVersionID{} - for _, c := range cols { - // Todo - this `col := c` can be removed with Go 1.22: - // https://github.com/sourcenetwork/defradb/issues/2431 - col := c - + for _, col := range cols { // Convert the temporary types to the cleaner return type: history[col.SchemaVersionID] = &collectionHistoryLink{ collection: &col, diff --git a/internal/planner/commit.go b/internal/planner/commit.go index bbb5fdc09c..1e6a1f7b92 100644 --- a/internal/planner/commit.go +++ b/internal/planner/commit.go @@ -241,9 +241,8 @@ func (n *dagScanNode) Next() (bool, error) { // so that the last new cid will be at the front of the slice n.queuedCids = append(make([]*cid.Cid, len(heads)), n.queuedCids...) - for i, h := range heads { - link := h // TODO remove when Go 1.22 #2431 - n.queuedCids[len(heads)-i-1] = &link.Cid + for i, head := range heads { + n.queuedCids[len(heads)-i-1] = &head.Cid } } diff --git a/internal/request/graphql/schema/generate.go b/internal/request/graphql/schema/generate.go index 4d86a97204..d2d4841408 100644 --- a/internal/request/graphql/schema/generate.go +++ b/internal/request/graphql/schema/generate.go @@ -420,11 +420,7 @@ func (g *Generator) buildTypes( // get all the defined types from the AST objs := make([]*gql.Object, 0) - for _, c := range collections { - // Copy the loop variable before usage within the loop or it - // will be reassigned before the thunk is run - // TODO remove when Go 1.22 - collection := c + for _, collection := range collections { fieldDescriptions := collection.GetFields() isEmbeddedObject := !collection.Description.Name.HasValue() isQuerySource := len(collection.Description.QuerySources()) > 0 @@ -536,18 +532,14 @@ func (g *Generator) buildTypes( // buildMutationInputTypes creates the input object types // for collection create and update mutation operations. func (g *Generator) buildMutationInputTypes(collections []client.CollectionDefinition) error { - for _, c := range collections { - if !c.Description.Name.HasValue() { + for _, collection := range collections { + if !collection.Description.Name.HasValue() { // If the definition's collection is empty, this must be a collectionless // schema, in which case users cannot mutate documents through it and we // have no need to build mutation input types for it. continue } - // Copy the loop variable before usage within the loop or it - // will be reassigned before the thunk is run - // TODO remove when Go 1.22 - collection := c mutationInputName := collection.Description.Name.Value() + mutationInputNameSuffix // check if mutation input type exists diff --git a/tools/cloud/aws/packer/build_aws_ami.pkr.hcl b/tools/cloud/aws/packer/build_aws_ami.pkr.hcl index 4eb1579778..e089a1f3bc 100644 --- a/tools/cloud/aws/packer/build_aws_ami.pkr.hcl +++ b/tools/cloud/aws/packer/build_aws_ami.pkr.hcl @@ -66,8 +66,8 @@ build { inline = [ "/usr/bin/cloud-init status --wait", "sudo apt-get update && sudo apt-get install make build-essential -y", - "curl -OL https://golang.org/dl/go1.21.6.linux-amd64.tar.gz", - "rm -rf /usr/local/go && sudo tar -C /usr/local -xzf go1.21.6.linux-amd64.tar.gz", + "curl -OL https://golang.org/dl/go1.22.6.linux-amd64.tar.gz", + "rm -rf /usr/local/go && sudo tar -C /usr/local -xzf go1.22.6.linux-amd64.tar.gz", "export PATH=$PATH:/usr/local/go/bin", "git clone \"https://git@$DEFRADB_GIT_REPO\"", "cd ./defradb || { printf \"\\\ncd into defradb failed.\\\n\" && exit 2; }", diff --git a/tools/configs/golangci.yaml b/tools/configs/golangci.yaml index e8fe63a1fc..0b92bd2d50 100644 --- a/tools/configs/golangci.yaml +++ b/tools/configs/golangci.yaml @@ -57,7 +57,7 @@ run: # Define the Go version limit. # Default: use Go version from the go.mod file, fallback on the env var `GOVERSION`. - go: "1.21" + go: "1.22" #=====================================================================================[ Output Configuration Options ] output: @@ -105,15 +105,11 @@ linters: enable-all: false - disable: - # - prealloc - disable-all: true enable: - errcheck - errorlint - - exportloopref - forbidigo - goconst - gofmt @@ -263,7 +259,7 @@ linters-settings: gosimple: # Select the Go version to target. - go: "1.21" + go: "1.22" # https://staticcheck.io/docs/options#checks checks: ["all", "-S1038"] # Turn on all except (these are disabled): @@ -356,13 +352,13 @@ linters-settings: staticcheck: # Select the Go version to target. - go: "1.21" + go: "1.22" # https://staticcheck.io/docs/options#checks checks: ["all"] unused: # Select the Go version to target. - go: "1.21" + go: "1.22" whitespace: # Enforces newlines (or comments) after every multi-line if statement. diff --git a/tools/defradb.containerfile b/tools/defradb.containerfile index 37b2275839..d6d7fc2f30 100644 --- a/tools/defradb.containerfile +++ b/tools/defradb.containerfile @@ -11,7 +11,7 @@ RUN npm run build # Stage: build # Several steps are involved to enable caching and because of the behavior of COPY regarding directories. -FROM docker.io/golang:1.21 AS build +FROM docker.io/golang:1.22 AS build WORKDIR /repo/ COPY go.mod go.sum Makefile ./ RUN make deps:modules From d5b034f9658f4f734fed3a8b995b1ebc2d78d5ce Mon Sep 17 00:00:00 2001 From: AndrewSisley Date: Mon, 26 Aug 2024 18:32:32 -0400 Subject: [PATCH 03/71] refactor: Make SourceHub dep internal-only (#2963) ## Relevant issue(s) Resolves #2962 ## Description I was mostly curious if this will make the vuln checker go green (it doesn't, it just reports it from our internal acp package now), but it is a good change anyway IMO to avoid embedded Go client users from having to directly import the dependency. --- cli/start.go | 3 +-- keyring/signer.go | 5 +++-- node/acp.go | 15 ++++++++++++--- tests/integration/acp.go | 3 +-- 4 files changed, 17 insertions(+), 9 deletions(-) diff --git a/cli/start.go b/cli/start.go index 641e743ee8..651360ab83 100644 --- a/cli/start.go +++ b/cli/start.go @@ -16,7 +16,6 @@ import ( "syscall" "github.com/sourcenetwork/immutable" - "github.com/sourcenetwork/sourcehub/sdk" "github.com/spf13/cobra" "github.com/sourcenetwork/defradb/errors" @@ -101,7 +100,7 @@ func MakeStartCommand() *cobra.Command { if err != nil { return err } - opts = append(opts, node.WithTxnSigner(immutable.Some[sdk.TxSigner](signer))) + opts = append(opts, node.WithTxnSigner(immutable.Some[node.TxSigner](signer))) } } diff --git a/keyring/signer.go b/keyring/signer.go index 25b8db8db8..ebab954896 100644 --- a/keyring/signer.go +++ b/keyring/signer.go @@ -14,7 +14,8 @@ import ( "github.com/cosmos/cosmos-sdk/crypto/keys/secp256k1" cryptotypes "github.com/cosmos/cosmos-sdk/crypto/types" cosmostypes "github.com/cosmos/cosmos-sdk/types" - "github.com/sourcenetwork/sourcehub/sdk" + + "github.com/sourcenetwork/defradb/node" ) type txnSigner struct { @@ -25,7 +26,7 @@ type txnSigner struct { accAddress string } -var _ sdk.TxSigner = (*txnSigner)(nil) +var _ node.TxSigner = (*txnSigner)(nil) // NewTxSignerFromKeyringKey creates a new TxSigner backed by a keyring. // diff --git a/node/acp.go b/node/acp.go index 4123df00f1..4092aa3532 100644 --- a/node/acp.go +++ b/node/acp.go @@ -13,8 +13,8 @@ package node import ( "context" + cryptotypes "github.com/cosmos/cosmos-sdk/crypto/types" "github.com/sourcenetwork/immutable" - "github.com/sourcenetwork/sourcehub/sdk" "github.com/sourcenetwork/defradb/acp" ) @@ -36,12 +36,21 @@ type ACPOptions struct { // This is only used for local acp. path string - signer immutable.Option[sdk.TxSigner] + signer immutable.Option[TxSigner] sourceHubChainID string sourceHubGRPCAddress string sourceHubCometRPCAddress string } +// TxSigner models an entity capable of providing signatures for a Tx. +// +// Effectively, it can be either a secp256k1 cosmos-sdk key or a pointer to a +// secp256k1 key in a cosmos-sdk like keyring. +type TxSigner interface { + GetAccAddress() string + GetPrivateKey() cryptotypes.PrivKey +} + // DefaultACPOptions returns new options with default values. func DefaultACPOptions() *ACPOptions { return &ACPOptions{ @@ -71,7 +80,7 @@ func WithACPPath(path string) ACPOpt { // WithKeyring sets the txn signer for Defra to use. // // It is only required when SourceHub ACP is active. -func WithTxnSigner(signer immutable.Option[sdk.TxSigner]) ACPOpt { +func WithTxnSigner(signer immutable.Option[TxSigner]) ACPOpt { return func(o *ACPOptions) { o.signer = signer } diff --git a/tests/integration/acp.go b/tests/integration/acp.go index 831e7f6cac..9242a266fc 100644 --- a/tests/integration/acp.go +++ b/tests/integration/acp.go @@ -27,7 +27,6 @@ import ( "github.com/decred/dcrd/dcrec/secp256k1/v4" toml "github.com/pelletier/go-toml" "github.com/sourcenetwork/immutable" - "github.com/sourcenetwork/sourcehub/sdk" "github.com/stretchr/testify/require" acpIdentity "github.com/sourcenetwork/defradb/acp/identity" @@ -352,7 +351,7 @@ cmdReaderLoop: } return []node.ACPOpt{ - node.WithTxnSigner(immutable.Some[sdk.TxSigner](signer)), + node.WithTxnSigner(immutable.Some[node.TxSigner](signer)), node.WithSourceHubChainID(chainID), node.WithSourceHubGRPCAddress(gRpcAddress), node.WithSourceHubCometRPCAddress(rpcAddress), From 2d1702c4e8bfcf0925e81b50540a9f6abf7a033b Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Wed, 28 Aug 2024 05:14:31 -0400 Subject: [PATCH 04/71] bot: Update dependencies (bulk dependabot PRs) 27-08-2024 (#2966) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ✅ This PR was created by combining the following PRs: #2959 bot: Bump github.com/bits-and-blooms/bitset from 1.13.0 to 1.14.2 #2958 bot: Bump github.com/libp2p/go-libp2p-kad-dht from 0.25.2 to 0.26.1 #2957 bot: Bump github.com/vito/go-sse from 1.1.1 to 1.1.2 #2956 bot: Bump go.opentelemetry.io/otel/metric from 1.28.0 to 1.29.0 #2955 bot: Bump graphiql from 3.7.0 to 3.7.1 in /playground #2954 bot: Bump eslint from 9.9.0 to 9.9.1 in /playground --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> Co-authored-by: Shahzad Lone --- go.mod | 14 +- go.sum | 28 ++-- playground/package-lock.json | 266 ++++++++++++++++------------------- playground/package.json | 4 +- 4 files changed, 148 insertions(+), 164 deletions(-) diff --git a/go.mod b/go.mod index 8247f6a406..f3455ec9af 100644 --- a/go.mod +++ b/go.mod @@ -3,7 +3,7 @@ module github.com/sourcenetwork/defradb go 1.22 require ( - github.com/bits-and-blooms/bitset v1.13.0 + github.com/bits-and-blooms/bitset v1.14.2 github.com/bxcodec/faker v2.0.1+incompatible github.com/cosmos/cosmos-sdk v0.50.9 github.com/cosmos/gogoproto v1.7.0 @@ -31,7 +31,7 @@ require ( github.com/lestrrat-go/jwx/v2 v2.1.1 github.com/libp2p/go-libp2p v0.36.2 github.com/libp2p/go-libp2p-gostream v0.6.0 - github.com/libp2p/go-libp2p-kad-dht v0.25.2 + github.com/libp2p/go-libp2p-kad-dht v0.26.1 github.com/libp2p/go-libp2p-pubsub v0.12.0 github.com/libp2p/go-libp2p-record v0.2.0 github.com/mr-tron/base58 v1.2.0 @@ -54,9 +54,9 @@ require ( github.com/stretchr/testify v1.9.0 github.com/tidwall/btree v1.7.0 github.com/valyala/fastjson v1.6.4 - github.com/vito/go-sse v1.1.1 + github.com/vito/go-sse v1.1.2 github.com/zalando/go-keyring v0.2.5 - go.opentelemetry.io/otel/metric v1.28.0 + go.opentelemetry.io/otel/metric v1.29.0 go.opentelemetry.io/otel/sdk/metric v1.28.0 go.uber.org/zap v1.27.0 golang.org/x/exp v0.0.0-20240808152545-0cdaa3abc0fa @@ -247,7 +247,7 @@ require ( github.com/libp2p/go-flow-metrics v0.1.0 // indirect github.com/libp2p/go-libp2p-asn-util v0.4.1 // indirect github.com/libp2p/go-libp2p-kbucket v0.6.3 // indirect - github.com/libp2p/go-libp2p-routing-helpers v0.7.3 // indirect + github.com/libp2p/go-libp2p-routing-helpers v0.7.4 // indirect github.com/libp2p/go-msgio v0.3.0 // indirect github.com/libp2p/go-nat v0.2.0 // indirect github.com/libp2p/go-netroute v0.2.1 // indirect @@ -349,9 +349,9 @@ require ( go.opencensus.io v0.24.0 // indirect go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0 // indirect go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.52.0 // indirect - go.opentelemetry.io/otel v1.28.0 // indirect + go.opentelemetry.io/otel v1.29.0 // indirect go.opentelemetry.io/otel/sdk v1.28.0 // indirect - go.opentelemetry.io/otel/trace v1.28.0 // indirect + go.opentelemetry.io/otel/trace v1.29.0 // indirect go.uber.org/dig v1.18.0 // indirect go.uber.org/fx v1.22.2 // indirect go.uber.org/mock v0.4.0 // indirect diff --git a/go.sum b/go.sum index c9ee7d5d1d..59d3052b4e 100644 --- a/go.sum +++ b/go.sum @@ -294,8 +294,8 @@ github.com/bgentry/go-netrc v0.0.0-20140422174119-9fd32a8b3d3d/go.mod h1:6QX/PXZ github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs= github.com/bgentry/speakeasy v0.1.1-0.20220910012023-760eaf8b6816 h1:41iFGWnSlI2gVpmOtVTJZNodLdLQLn/KsJqFvXwnd/s= github.com/bgentry/speakeasy v0.1.1-0.20220910012023-760eaf8b6816/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs= -github.com/bits-and-blooms/bitset v1.13.0 h1:bAQ9OPNFYbGHV6Nez0tmNI0RiEu7/hxlYJRUA0wFAVE= -github.com/bits-and-blooms/bitset v1.13.0/go.mod h1:7hO7Gc7Pp1vODcmWvKMRA9BNmbv6a/7QIWpPxHddWR8= +github.com/bits-and-blooms/bitset v1.14.2 h1:YXVoyPndbdvcEVcseEovVfp0qjJp7S+i5+xgp/Nfbdc= +github.com/bits-and-blooms/bitset v1.14.2/go.mod h1:7hO7Gc7Pp1vODcmWvKMRA9BNmbv6a/7QIWpPxHddWR8= github.com/blang/semver/v4 v4.0.0 h1:1PFHFE6yCCTv8C1TeyNNarDzntLi7wMI5i/pzqYIsAM= github.com/blang/semver/v4 v4.0.0/go.mod h1:IbckMUScFkM3pff0VJDNKRiT6TG/YpiHIM2yvyW5YoQ= github.com/bradfitz/go-smtpd v0.0.0-20170404230938-deb6d6237625/go.mod h1:HYsPBTaaSFSlLx/70C2HPIMNZpVV8+vt/A+FMnYP11g= @@ -997,16 +997,16 @@ github.com/libp2p/go-libp2p-asn-util v0.4.1 h1:xqL7++IKD9TBFMgnLPZR6/6iYhawHKHl9 github.com/libp2p/go-libp2p-asn-util v0.4.1/go.mod h1:d/NI6XZ9qxw67b4e+NgpQexCIiFYJjErASrYW4PFDN8= github.com/libp2p/go-libp2p-gostream v0.6.0 h1:QfAiWeQRce6pqnYfmIVWJFXNdDyfiR/qkCnjyaZUPYU= github.com/libp2p/go-libp2p-gostream v0.6.0/go.mod h1:Nywu0gYZwfj7Jc91PQvbGU8dIpqbQQkjWgDuOrFaRdA= -github.com/libp2p/go-libp2p-kad-dht v0.25.2 h1:FOIk9gHoe4YRWXTu8SY9Z1d0RILol0TrtApsMDPjAVQ= -github.com/libp2p/go-libp2p-kad-dht v0.25.2/go.mod h1:6za56ncRHYXX4Nc2vn8z7CZK0P4QiMcrn77acKLM2Oo= +github.com/libp2p/go-libp2p-kad-dht v0.26.1 h1:AazV3LCImYVkDUGAHx5lIEgZ9iUI2QQKH5GMRQU8uEA= +github.com/libp2p/go-libp2p-kad-dht v0.26.1/go.mod h1:mqRUGJ/+7ziQ3XknU2kKHfsbbgb9xL65DXjPOJwmZF8= github.com/libp2p/go-libp2p-kbucket v0.6.3 h1:p507271wWzpy2f1XxPzCQG9NiN6R6lHL9GiSErbQQo0= github.com/libp2p/go-libp2p-kbucket v0.6.3/go.mod h1:RCseT7AH6eJWxxk2ol03xtP9pEHetYSPXOaJnOiD8i0= github.com/libp2p/go-libp2p-pubsub v0.12.0 h1:PENNZjSfk8KYxANRlpipdS7+BfLmOl3L2E/6vSNjbdI= github.com/libp2p/go-libp2p-pubsub v0.12.0/go.mod h1:Oi0zw9aw8/Y5GC99zt+Ef2gYAl+0nZlwdJonDyOz/sE= github.com/libp2p/go-libp2p-record v0.2.0 h1:oiNUOCWno2BFuxt3my4i1frNrt7PerzB3queqa1NkQ0= github.com/libp2p/go-libp2p-record v0.2.0/go.mod h1:I+3zMkvvg5m2OcSdoL0KPljyJyvNDFGKX7QdlpYUcwk= -github.com/libp2p/go-libp2p-routing-helpers v0.7.3 h1:u1LGzAMVRK9Nqq5aYDVOiq/HaB93U9WWczBzGyAC5ZY= -github.com/libp2p/go-libp2p-routing-helpers v0.7.3/go.mod h1:cN4mJAD/7zfPKXBcs9ze31JGYAZgzdABEm+q/hkswb8= +github.com/libp2p/go-libp2p-routing-helpers v0.7.4 h1:6LqS1Bzn5CfDJ4tzvP9uwh42IB7TJLNFJA6dEeGBv84= +github.com/libp2p/go-libp2p-routing-helpers v0.7.4/go.mod h1:we5WDj9tbolBXOuF1hGOkR+r7Uh1408tQbAKaT5n1LE= github.com/libp2p/go-libp2p-testing v0.12.0 h1:EPvBb4kKMWO29qP4mZGyhVzUyR25dvfUIK5WDu6iPUA= github.com/libp2p/go-libp2p-testing v0.12.0/go.mod h1:KcGDRXyN7sQCllucn1cOOS+Dmm7ujhfEyXQL5lvkcPg= github.com/libp2p/go-msgio v0.3.0 h1:mf3Z8B1xcFN314sWX+2vOTShIE0Mmn2TXn3YCUQGNj0= @@ -1486,8 +1486,8 @@ github.com/valyala/fastjson v1.6.4 h1:uAUNq9Z6ymTgGhcm0UynUAB6tlbakBrz6CQFax3BXV github.com/valyala/fastjson v1.6.4/go.mod h1:CLCAqky6SMuOcxStkYQvblddUtoRxhYMGLrsQns1aXY= github.com/viant/assertly v0.4.8/go.mod h1:aGifi++jvCrUaklKEKT0BU95igDNaqkvz+49uaYMPRU= github.com/viant/toolbox v0.24.0/go.mod h1:OxMCG57V0PXuIP2HNQrtJf2CjqdmbrOx5EkMILuUhzM= -github.com/vito/go-sse v1.1.1 h1:9TlsS/xk9++g+W61ZR8dusNq2BAsqR1Kq8NhMfqpzGI= -github.com/vito/go-sse v1.1.1/go.mod h1:2wkcaQ+jtlZ94Uve8gYZjFpL68luAjssTINA2hpgcZs= +github.com/vito/go-sse v1.1.2 h1:FLQ1J0tMGN7pKa3KOyZCHojYDR0Z/L/y+3ejUO3P+tM= +github.com/vito/go-sse v1.1.2/go.mod h1:2wkcaQ+jtlZ94Uve8gYZjFpL68luAjssTINA2hpgcZs= github.com/warpfork/go-testmark v0.12.1 h1:rMgCpJfwy1sJ50x0M0NgyphxYYPMOODIJHhsXyEHU0s= github.com/warpfork/go-testmark v0.12.1/go.mod h1:kHwy7wfvGSPh1rQJYKayD4AbtNaeyZdcGi9tNJTaa5Y= github.com/warpfork/go-wish v0.0.0-20220906213052-39a1cc7a02d0 h1:GDDkbFiaK8jsSDJfjId/PEGEShv6ugrt4kYsC5UIDaQ= @@ -1539,16 +1539,16 @@ go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.4 go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0/go.mod h1:Mjt1i1INqiaoZOMGR1RIUJN+i3ChKoFRqzrRQhlkbs0= go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.52.0 h1:9l89oX4ba9kHbBol3Xin3leYJ+252h0zszDtBwyKe2A= go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.52.0/go.mod h1:XLZfZboOJWHNKUv7eH0inh0E9VV6eWDFB/9yJyTLPp0= -go.opentelemetry.io/otel v1.28.0 h1:/SqNcYk+idO0CxKEUOtKQClMK/MimZihKYMruSMViUo= -go.opentelemetry.io/otel v1.28.0/go.mod h1:q68ijF8Fc8CnMHKyzqL6akLO46ePnjkgfIMIjUIX9z4= -go.opentelemetry.io/otel/metric v1.28.0 h1:f0HGvSl1KRAU1DLgLGFjrwVyismPlnuU6JD6bOeuA5Q= -go.opentelemetry.io/otel/metric v1.28.0/go.mod h1:Fb1eVBFZmLVTMb6PPohq3TO9IIhUisDsbJoL/+uQW4s= +go.opentelemetry.io/otel v1.29.0 h1:PdomN/Al4q/lN6iBJEN3AwPvUiHPMlt93c8bqTG5Llw= +go.opentelemetry.io/otel v1.29.0/go.mod h1:N/WtXPs1CNCUEx+Agz5uouwCba+i+bJGFicT8SR4NP8= +go.opentelemetry.io/otel/metric v1.29.0 h1:vPf/HFWTNkPu1aYeIsc98l4ktOQaL6LeSoeV2g+8YLc= +go.opentelemetry.io/otel/metric v1.29.0/go.mod h1:auu/QWieFVWx+DmQOUMgj0F8LHWdgalxXqvp7BII/W8= go.opentelemetry.io/otel/sdk v1.28.0 h1:b9d7hIry8yZsgtbmM0DKyPWMMUMlK9NEKuIG4aBqWyE= go.opentelemetry.io/otel/sdk v1.28.0/go.mod h1:oYj7ClPUA7Iw3m+r7GeEjz0qckQRJK2B8zjcZEfu7Pg= go.opentelemetry.io/otel/sdk/metric v1.28.0 h1:OkuaKgKrgAbYrrY0t92c+cC+2F6hsFNnCQArXCKlg08= go.opentelemetry.io/otel/sdk/metric v1.28.0/go.mod h1:cWPjykihLAPvXKi4iZc1dpER3Jdq2Z0YLse3moQUCpg= -go.opentelemetry.io/otel/trace v1.28.0 h1:GhQ9cUuQGmNDd5BTCP2dAvv75RdMxEfTmYejp+lkx9g= -go.opentelemetry.io/otel/trace v1.28.0/go.mod h1:jPyXzNPg6da9+38HEwElrQiHlVMTnVfM3/yv2OlIHaI= +go.opentelemetry.io/otel/trace v1.29.0 h1:J/8ZNK4XgR7a21DZUAsbF8pZ5Jcw1VhACmnYt39JTi4= +go.opentelemetry.io/otel/trace v1.29.0/go.mod h1:eHl3w0sp3paPkYstJOmAimxhiFXPg+MMTlEh3nsQgWQ= go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI= go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= diff --git a/playground/package-lock.json b/playground/package-lock.json index 2fa682a907..872277b296 100644 --- a/playground/package-lock.json +++ b/playground/package-lock.json @@ -8,7 +8,7 @@ "name": "playground", "version": "0.0.0", "dependencies": { - "graphiql": "^3.7.0", + "graphiql": "^3.7.1", "graphql": "^16.9.0", "react": "^18.3.1", "react-dom": "^18.3.1", @@ -21,7 +21,7 @@ "@typescript-eslint/eslint-plugin": "^8.2.0", "@typescript-eslint/parser": "^8.2.0", "@vitejs/plugin-react-swc": "^3.7.0", - "eslint": "^9.9.0", + "eslint": "^9.9.1", "eslint-plugin-react-hooks": "^4.6.2", "eslint-plugin-react-refresh": "^0.4.9", "typescript": "^5.5.4", @@ -59,11 +59,41 @@ "integrity": "sha512-NVf/1YycDMs6+FxS0Tb/W8MjJRDQdXF+tBfDtZ5UZeiRUkTmwKc4vmYCKZTyymfJk1gnMsauvZSX/HiV9jOABw==", "license": "MIT" }, + "node_modules/@codemirror/language": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/@codemirror/language/-/language-6.0.0.tgz", + "integrity": "sha512-rtjk5ifyMzOna1c7PBu7J1VCt0PvA5wy3o8eMVnxMKb7z8KA7JFecvD04dSn14vj/bBaAbqRsGed5OjtofEnLA==", + "peer": true, + "dependencies": { + "@codemirror/state": "^6.0.0", + "@codemirror/view": "^6.0.0", + "@lezer/common": "^1.0.0", + "@lezer/highlight": "^1.0.0", + "@lezer/lr": "^1.0.0", + "style-mod": "^4.0.0" + } + }, + "node_modules/@codemirror/state": { + "version": "6.4.1", + "resolved": "https://registry.npmjs.org/@codemirror/state/-/state-6.4.1.tgz", + "integrity": "sha512-QkEyUiLhsJoZkbumGZlswmAhA7CBU02Wrz7zvH4SrcifbsqwlXShVXg65f3v/ts57W3dqyamEriMhij1Z3Zz4A==", + "peer": true + }, + "node_modules/@codemirror/view": { + "version": "6.33.0", + "resolved": "https://registry.npmjs.org/@codemirror/view/-/view-6.33.0.tgz", + "integrity": "sha512-AroaR3BvnjRW8fiZBalAaK+ZzB5usGgI014YKElYZvQdNH5ZIidHlO+cyf/2rWzyBFRkvG6VhiXeAEbC53P2YQ==", + "peer": true, + "dependencies": { + "@codemirror/state": "^6.4.0", + "style-mod": "^4.1.0", + "w3c-keyname": "^2.2.4" + } + }, "node_modules/@emotion/is-prop-valid": { "version": "0.8.8", "resolved": "https://registry.npmjs.org/@emotion/is-prop-valid/-/is-prop-valid-0.8.8.tgz", "integrity": "sha512-u5WtneEAr5IDG2Wv65yhunPSMLIpuKsbuOktRojfrEiEvRyC85LgPMZI63cr7NUqT8ZIGdSVg8ZKGxIug4lXcA==", - "license": "MIT", "optional": true, "dependencies": { "@emotion/memoize": "0.7.4" @@ -73,7 +103,6 @@ "version": "0.7.4", "resolved": "https://registry.npmjs.org/@emotion/memoize/-/memoize-0.7.4.tgz", "integrity": "sha512-Ja/Vfqe3HpuzRsG1oBtWTHk2PGZ7GR+2Vz5iYGelAw8dx32K0y7PjVuxK6z1nMpZOqAFsRUPCkK1YjJ56qJlgw==", - "license": "MIT", "optional": true }, "node_modules/@esbuild/aix-ppc64": { @@ -494,11 +523,10 @@ } }, "node_modules/@eslint/config-array": { - "version": "0.17.1", - "resolved": "https://registry.npmjs.org/@eslint/config-array/-/config-array-0.17.1.tgz", - "integrity": "sha512-BlYOpej8AQ8Ev9xVqroV7a02JK3SkBAaN9GfMMH9W6Ch8FlQlkjGw4Ir7+FgYwfirivAf4t+GtzuAxqfukmISA==", + "version": "0.18.0", + "resolved": "https://registry.npmjs.org/@eslint/config-array/-/config-array-0.18.0.tgz", + "integrity": "sha512-fTxvnS1sRMu3+JjXwJG0j/i4RT9u4qJ+lqS/yCGap4lH4zZGzQ7tu+xZqQmcMZq5OBZDL4QRxQzRjkWcGt8IVw==", "dev": true, - "license": "Apache-2.0", "dependencies": { "@eslint/object-schema": "^2.1.4", "debug": "^4.3.1", @@ -513,7 +541,6 @@ "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", "dev": true, - "license": "MIT", "dependencies": { "balanced-match": "^1.0.0", "concat-map": "0.0.1" @@ -524,7 +551,6 @@ "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", "dev": true, - "license": "ISC", "dependencies": { "brace-expansion": "^1.1.7" }, @@ -581,11 +607,10 @@ } }, "node_modules/@eslint/js": { - "version": "9.9.0", - "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.9.0.tgz", - "integrity": "sha512-hhetes6ZHP3BlXLxmd8K2SNgkhNSi+UcecbnwWKwpP7kyi/uC75DJ1lOOBO3xrC4jyojtGE3YxKZPHfk4yrgug==", + "version": "9.9.1", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.9.1.tgz", + "integrity": "sha512-xIDQRsfg5hNBqHz04H1R3scSVwmI+KUbqjsQKHKQ1DAUSaUjYPReZZmS/5PNiKu1fUvzDd6H7DEDKACSEhu+TQ==", "dev": true, - "license": "MIT", "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" } @@ -595,7 +620,6 @@ "resolved": "https://registry.npmjs.org/@eslint/object-schema/-/object-schema-2.1.4.tgz", "integrity": "sha512-BsWiH1yFGjXXS2yvrf5LyuoSIIbPrGUWob917o+BTKuZ7qJdxX8aJLRxs1fS9n6r7vESrq1OUqb68dANcFXuQQ==", "dev": true, - "license": "Apache-2.0", "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" } @@ -604,7 +628,6 @@ "version": "1.6.7", "resolved": "https://registry.npmjs.org/@floating-ui/core/-/core-1.6.7.tgz", "integrity": "sha512-yDzVT/Lm101nQ5TCVeK65LtdN7Tj4Qpr9RTXJ2vPFLqtLxwOrpoxAHAJI8J3yYWUc40J0BDBheaitK5SJmno2g==", - "license": "MIT", "dependencies": { "@floating-ui/utils": "^0.2.7" } @@ -613,7 +636,6 @@ "version": "1.6.10", "resolved": "https://registry.npmjs.org/@floating-ui/dom/-/dom-1.6.10.tgz", "integrity": "sha512-fskgCFv8J8OamCmyun8MfjB1Olfn+uZKjOKZ0vhYF3gRmEUXcGOjxWL8bBr7i4kIuPZ2KD2S3EUIOxnjC8kl2A==", - "license": "MIT", "dependencies": { "@floating-ui/core": "^1.6.0", "@floating-ui/utils": "^0.2.7" @@ -623,7 +645,6 @@ "version": "2.1.1", "resolved": "https://registry.npmjs.org/@floating-ui/react-dom/-/react-dom-2.1.1.tgz", "integrity": "sha512-4h84MJt3CHrtG18mGsXuLCHMrug49d7DFkU0RMIyshRveBeyV2hmV/pDaF2Uxtu8kgq5r46llp5E5FQiR0K2Yg==", - "license": "MIT", "dependencies": { "@floating-ui/dom": "^1.0.0" }, @@ -635,16 +656,14 @@ "node_modules/@floating-ui/utils": { "version": "0.2.7", "resolved": "https://registry.npmjs.org/@floating-ui/utils/-/utils-0.2.7.tgz", - "integrity": "sha512-X8R8Oj771YRl/w+c1HqAC1szL8zWQRwFvgDwT129k9ACdBoud/+/rX9V0qiMl6LWUdP9voC2nDVZYPMQQsb6eA==", - "license": "MIT" + "integrity": "sha512-X8R8Oj771YRl/w+c1HqAC1szL8zWQRwFvgDwT129k9ACdBoud/+/rX9V0qiMl6LWUdP9voC2nDVZYPMQQsb6eA==" }, "node_modules/@graphiql/react": { - "version": "0.26.0", - "resolved": "https://registry.npmjs.org/@graphiql/react/-/react-0.26.0.tgz", - "integrity": "sha512-WMuv4/SPDw/+b5RuYX2/43SRknCiODQFIY1lYkUcAiU379gcX6QvezDkevrfFgWW8l4wb/pkQ6BR98Wjx9hXBw==", - "license": "MIT", + "version": "0.26.2", + "resolved": "https://registry.npmjs.org/@graphiql/react/-/react-0.26.2.tgz", + "integrity": "sha512-aO4GWf/kJmqrjO+PORT/NPxwGvPGlg+mwye1v8xAlf8Q9j7P0hVtVBawYaSLUCCfJ/QnH7JAP+0VRamyooZZCw==", "dependencies": { - "@graphiql/toolkit": "^0.10.0", + "@graphiql/toolkit": "^0.11.0", "@headlessui/react": "^1.7.15", "@radix-ui/react-dialog": "^1.0.4", "@radix-ui/react-dropdown-menu": "^2.0.5", @@ -653,7 +672,7 @@ "@types/codemirror": "^5.60.8", "clsx": "^1.2.1", "codemirror": "^5.65.3", - "codemirror-graphql": "^2.1.0", + "codemirror-graphql": "^2.1.1", "copy-to-clipboard": "^3.2.0", "framer-motion": "^6.5.1", "get-value": "^3.0.1", @@ -668,10 +687,9 @@ } }, "node_modules/@graphiql/toolkit": { - "version": "0.10.0", - "resolved": "https://registry.npmjs.org/@graphiql/toolkit/-/toolkit-0.10.0.tgz", - "integrity": "sha512-tVWVYL4AKrkBr4f4Vw4/EV1NjstVYg3TGnatFVe6TdBW4kBeATBz5jyGJju/Oq2JrrrQ2LNXfhCDwfdjSlKyHg==", - "license": "MIT", + "version": "0.11.0", + "resolved": "https://registry.npmjs.org/@graphiql/toolkit/-/toolkit-0.11.0.tgz", + "integrity": "sha512-VqqQrvkMwgbGhj7J5907yfuAy5B1OCgOTIPi7gtRneG1jYmnqvSxi8Yrmu0B8G8fZxkxKVsYi8dE8EtsOBrTGQ==", "dependencies": { "@n1ru4l/push-pull-async-iterable-iterator": "^3.1.0", "meros": "^1.1.4" @@ -690,7 +708,6 @@ "version": "1.7.19", "resolved": "https://registry.npmjs.org/@headlessui/react/-/react-1.7.19.tgz", "integrity": "sha512-Ll+8q3OlMJfJbAKM/+/Y2q6PPYbryqNTXDbryx7SXLIDamkF6iQFbriYHga0dY44PvDhvvBWCx1Xj4U5+G4hOw==", - "license": "MIT", "dependencies": { "@tanstack/react-virtual": "^3.0.0-beta.60", "client-only": "^0.0.1" @@ -731,11 +748,34 @@ "url": "https://github.com/sponsors/nzakas" } }, + "node_modules/@lezer/common": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/@lezer/common/-/common-1.2.1.tgz", + "integrity": "sha512-yemX0ZD2xS/73llMZIK6KplkjIjf2EvAHcinDi/TfJ9hS25G0388+ClHt6/3but0oOxinTcQHJLDXh6w1crzFQ==", + "peer": true + }, + "node_modules/@lezer/highlight": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/@lezer/highlight/-/highlight-1.2.1.tgz", + "integrity": "sha512-Z5duk4RN/3zuVO7Jq0pGLJ3qynpxUVsh7IbUbGj88+uV2ApSAn6kWg2au3iJb+0Zi7kKtqffIESgNcRXWZWmSA==", + "peer": true, + "dependencies": { + "@lezer/common": "^1.0.0" + } + }, + "node_modules/@lezer/lr": { + "version": "1.4.2", + "resolved": "https://registry.npmjs.org/@lezer/lr/-/lr-1.4.2.tgz", + "integrity": "sha512-pu0K1jCIdnQ12aWNaAVU5bzi7Bd1w54J3ECgANPmYLtQKP0HBj2cE/5coBD66MT10xbtIuUr7tg0Shbsvk0mDA==", + "peer": true, + "dependencies": { + "@lezer/common": "^1.0.0" + } + }, "node_modules/@motionone/animation": { "version": "10.18.0", "resolved": "https://registry.npmjs.org/@motionone/animation/-/animation-10.18.0.tgz", "integrity": "sha512-9z2p5GFGCm0gBsZbi8rVMOAJCtw1WqBTIPw3ozk06gDvZInBPIsQcHgYogEJ4yuHJ+akuW8g1SEIOpTOvYs8hw==", - "license": "MIT", "dependencies": { "@motionone/easing": "^10.18.0", "@motionone/types": "^10.17.1", @@ -747,7 +787,6 @@ "version": "10.12.0", "resolved": "https://registry.npmjs.org/@motionone/dom/-/dom-10.12.0.tgz", "integrity": "sha512-UdPTtLMAktHiqV0atOczNYyDd/d8Cf5fFsd1tua03PqTwwCe/6lwhLSQ8a7TbnQ5SN0gm44N1slBfj+ORIhrqw==", - "license": "MIT", "dependencies": { "@motionone/animation": "^10.12.0", "@motionone/generators": "^10.12.0", @@ -761,7 +800,6 @@ "version": "10.18.0", "resolved": "https://registry.npmjs.org/@motionone/easing/-/easing-10.18.0.tgz", "integrity": "sha512-VcjByo7XpdLS4o9T8t99JtgxkdMcNWD3yHU/n6CLEz3bkmKDRZyYQ/wmSf6daum8ZXqfUAgFeCZSpJZIMxaCzg==", - "license": "MIT", "dependencies": { "@motionone/utils": "^10.18.0", "tslib": "^2.3.1" @@ -771,7 +809,6 @@ "version": "10.18.0", "resolved": "https://registry.npmjs.org/@motionone/generators/-/generators-10.18.0.tgz", "integrity": "sha512-+qfkC2DtkDj4tHPu+AFKVfR/C30O1vYdvsGYaR13W/1cczPrrcjdvYCj0VLFuRMN+lP1xvpNZHCRNM4fBzn1jg==", - "license": "MIT", "dependencies": { "@motionone/types": "^10.17.1", "@motionone/utils": "^10.18.0", @@ -781,14 +818,12 @@ "node_modules/@motionone/types": { "version": "10.17.1", "resolved": "https://registry.npmjs.org/@motionone/types/-/types-10.17.1.tgz", - "integrity": "sha512-KaC4kgiODDz8hswCrS0btrVrzyU2CSQKO7Ps90ibBVSQmjkrt2teqta6/sOG59v7+dPnKMAg13jyqtMKV2yJ7A==", - "license": "MIT" + "integrity": "sha512-KaC4kgiODDz8hswCrS0btrVrzyU2CSQKO7Ps90ibBVSQmjkrt2teqta6/sOG59v7+dPnKMAg13jyqtMKV2yJ7A==" }, "node_modules/@motionone/utils": { "version": "10.18.0", "resolved": "https://registry.npmjs.org/@motionone/utils/-/utils-10.18.0.tgz", "integrity": "sha512-3XVF7sgyTSI2KWvTf6uLlBJ5iAgRgmvp3bpuOiQJvInd4nZ19ET8lX5unn30SlmRH7hXbBbH+Gxd0m0klJ3Xtw==", - "license": "MIT", "dependencies": { "@motionone/types": "^10.17.1", "hey-listen": "^1.0.8", @@ -799,7 +834,6 @@ "version": "3.2.0", "resolved": "https://registry.npmjs.org/@n1ru4l/push-pull-async-iterable-iterator/-/push-pull-async-iterable-iterator-3.2.0.tgz", "integrity": "sha512-3fkKj25kEjsfObL6IlKPAlHYPq/oYwUkkQ03zsTTiDjD7vg/RxjdiLeCydqtxHZP0JgsXL3D/X5oAkMGzuUp/Q==", - "license": "MIT", "engines": { "node": ">=12" } @@ -845,14 +879,12 @@ "node_modules/@radix-ui/primitive": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/@radix-ui/primitive/-/primitive-1.1.0.tgz", - "integrity": "sha512-4Z8dn6Upk0qk4P74xBhZ6Hd/w0mPEzOOLxy4xiPXOXqjF7jZS0VAKk7/x/H6FyY2zCkYJqePf1G5KmkmNJ4RBA==", - "license": "MIT" + "integrity": "sha512-4Z8dn6Upk0qk4P74xBhZ6Hd/w0mPEzOOLxy4xiPXOXqjF7jZS0VAKk7/x/H6FyY2zCkYJqePf1G5KmkmNJ4RBA==" }, "node_modules/@radix-ui/react-arrow": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/@radix-ui/react-arrow/-/react-arrow-1.1.0.tgz", "integrity": "sha512-FmlW1rCg7hBpEBwFbjHwCW6AmWLQM6g/v0Sn8XbP9NvmSZ2San1FpQeyPtufzOMSIx7Y4dzjlHoifhp+7NkZhw==", - "license": "MIT", "dependencies": { "@radix-ui/react-primitive": "2.0.0" }, @@ -875,7 +907,6 @@ "version": "1.1.0", "resolved": "https://registry.npmjs.org/@radix-ui/react-collection/-/react-collection-1.1.0.tgz", "integrity": "sha512-GZsZslMJEyo1VKm5L1ZJY8tGDxZNPAoUeQUIbKeJfoi7Q4kmig5AsgLMYYuyYbfjd8fBmFORAIwYAkXMnXZgZw==", - "license": "MIT", "dependencies": { "@radix-ui/react-compose-refs": "1.1.0", "@radix-ui/react-context": "1.1.0", @@ -901,7 +932,6 @@ "version": "1.1.0", "resolved": "https://registry.npmjs.org/@radix-ui/react-compose-refs/-/react-compose-refs-1.1.0.tgz", "integrity": "sha512-b4inOtiaOnYf9KWyO3jAeeCG6FeyfY6ldiEPanbUjWd+xIk5wZeHa8yVwmrJ2vderhu/BQvzCrJI0lHd+wIiqw==", - "license": "MIT", "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" @@ -916,7 +946,6 @@ "version": "1.1.0", "resolved": "https://registry.npmjs.org/@radix-ui/react-context/-/react-context-1.1.0.tgz", "integrity": "sha512-OKrckBy+sMEgYM/sMmqmErVn0kZqrHPJze+Ql3DzYsDDp0hl0L62nx/2122/Bvps1qz645jlcu2tD9lrRSdf8A==", - "license": "MIT", "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" @@ -931,7 +960,6 @@ "version": "1.1.1", "resolved": "https://registry.npmjs.org/@radix-ui/react-dialog/-/react-dialog-1.1.1.tgz", "integrity": "sha512-zysS+iU4YP3STKNS6USvFVqI4qqx8EpiwmT5TuCApVEBca+eRCbONi4EgzfNSuVnOXvC5UPHHMjs8RXO6DH9Bg==", - "license": "MIT", "dependencies": { "@radix-ui/primitive": "1.1.0", "@radix-ui/react-compose-refs": "1.1.0", @@ -967,7 +995,6 @@ "version": "1.1.0", "resolved": "https://registry.npmjs.org/@radix-ui/react-direction/-/react-direction-1.1.0.tgz", "integrity": "sha512-BUuBvgThEiAXh2DWu93XsT+a3aWrGqolGlqqw5VU1kG7p/ZH2cuDlM1sRLNnY3QcBS69UIz2mcKhMxDsdewhjg==", - "license": "MIT", "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" @@ -982,7 +1009,6 @@ "version": "1.1.0", "resolved": "https://registry.npmjs.org/@radix-ui/react-dismissable-layer/-/react-dismissable-layer-1.1.0.tgz", "integrity": "sha512-/UovfmmXGptwGcBQawLzvn2jOfM0t4z3/uKffoBlj724+n3FvBbZ7M0aaBOmkp6pqFYpO4yx8tSVJjx3Fl2jig==", - "license": "MIT", "dependencies": { "@radix-ui/primitive": "1.1.0", "@radix-ui/react-compose-refs": "1.1.0", @@ -1009,7 +1035,6 @@ "version": "2.1.1", "resolved": "https://registry.npmjs.org/@radix-ui/react-dropdown-menu/-/react-dropdown-menu-2.1.1.tgz", "integrity": "sha512-y8E+x9fBq9qvteD2Zwa4397pUVhYsh9iq44b5RD5qu1GMJWBCBuVg1hMyItbc6+zH00TxGRqd9Iot4wzf3OoBQ==", - "license": "MIT", "dependencies": { "@radix-ui/primitive": "1.1.0", "@radix-ui/react-compose-refs": "1.1.0", @@ -1038,7 +1063,6 @@ "version": "1.1.0", "resolved": "https://registry.npmjs.org/@radix-ui/react-focus-guards/-/react-focus-guards-1.1.0.tgz", "integrity": "sha512-w6XZNUPVv6xCpZUqb/yN9DL6auvpGX3C/ee6Hdi16v2UUy25HV2Q5bcflsiDyT/g5RwbPQ/GIT1vLkeRb+ITBw==", - "license": "MIT", "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" @@ -1053,7 +1077,6 @@ "version": "1.1.0", "resolved": "https://registry.npmjs.org/@radix-ui/react-focus-scope/-/react-focus-scope-1.1.0.tgz", "integrity": "sha512-200UD8zylvEyL8Bx+z76RJnASR2gRMuxlgFCPAe/Q/679a/r0eK3MBVYMb7vZODZcffZBdob1EGnky78xmVvcA==", - "license": "MIT", "dependencies": { "@radix-ui/react-compose-refs": "1.1.0", "@radix-ui/react-primitive": "2.0.0", @@ -1078,7 +1101,6 @@ "version": "1.1.0", "resolved": "https://registry.npmjs.org/@radix-ui/react-id/-/react-id-1.1.0.tgz", "integrity": "sha512-EJUrI8yYh7WOjNOqpoJaf1jlFIH2LvtgAl+YcFqNCa+4hj64ZXmPkAKOFs/ukjz3byN6bdb/AVUqHkI8/uWWMA==", - "license": "MIT", "dependencies": { "@radix-ui/react-use-layout-effect": "1.1.0" }, @@ -1096,7 +1118,6 @@ "version": "2.1.1", "resolved": "https://registry.npmjs.org/@radix-ui/react-menu/-/react-menu-2.1.1.tgz", "integrity": "sha512-oa3mXRRVjHi6DZu/ghuzdylyjaMXLymx83irM7hTxutQbD+7IhPKdMdRHD26Rm+kHRrWcrUkkRPv5pd47a2xFQ==", - "license": "MIT", "dependencies": { "@radix-ui/primitive": "1.1.0", "@radix-ui/react-collection": "1.1.0", @@ -1136,7 +1157,6 @@ "version": "1.2.0", "resolved": "https://registry.npmjs.org/@radix-ui/react-popper/-/react-popper-1.2.0.tgz", "integrity": "sha512-ZnRMshKF43aBxVWPWvbj21+7TQCvhuULWJ4gNIKYpRlQt5xGRhLx66tMp8pya2UkGHTSlhpXwmjqltDYHhw7Vg==", - "license": "MIT", "dependencies": { "@floating-ui/react-dom": "^2.0.0", "@radix-ui/react-arrow": "1.1.0", @@ -1168,7 +1188,6 @@ "version": "1.1.1", "resolved": "https://registry.npmjs.org/@radix-ui/react-portal/-/react-portal-1.1.1.tgz", "integrity": "sha512-A3UtLk85UtqhzFqtoC8Q0KvR2GbXF3mtPgACSazajqq6A41mEQgo53iPzY4i6BwDxlIFqWIhiQ2G729n+2aw/g==", - "license": "MIT", "dependencies": { "@radix-ui/react-primitive": "2.0.0", "@radix-ui/react-use-layout-effect": "1.1.0" @@ -1192,7 +1211,6 @@ "version": "1.1.0", "resolved": "https://registry.npmjs.org/@radix-ui/react-presence/-/react-presence-1.1.0.tgz", "integrity": "sha512-Gq6wuRN/asf9H/E/VzdKoUtT8GC9PQc9z40/vEr0VCJ4u5XvvhWIrSsCB6vD2/cH7ugTdSfYq9fLJCcM00acrQ==", - "license": "MIT", "dependencies": { "@radix-ui/react-compose-refs": "1.1.0", "@radix-ui/react-use-layout-effect": "1.1.0" @@ -1216,7 +1234,6 @@ "version": "2.0.0", "resolved": "https://registry.npmjs.org/@radix-ui/react-primitive/-/react-primitive-2.0.0.tgz", "integrity": "sha512-ZSpFm0/uHa8zTvKBDjLFWLo8dkr4MBsiDLz0g3gMUwqgLHz9rTaRRGYDgvZPtBJgYCBKXkS9fzmoySgr8CO6Cw==", - "license": "MIT", "dependencies": { "@radix-ui/react-slot": "1.1.0" }, @@ -1239,7 +1256,6 @@ "version": "1.1.0", "resolved": "https://registry.npmjs.org/@radix-ui/react-roving-focus/-/react-roving-focus-1.1.0.tgz", "integrity": "sha512-EA6AMGeq9AEeQDeSH0aZgG198qkfHSbvWTf1HvoDmOB5bBG/qTxjYMWUKMnYiV6J/iP/J8MEFSuB2zRU2n7ODA==", - "license": "MIT", "dependencies": { "@radix-ui/primitive": "1.1.0", "@radix-ui/react-collection": "1.1.0", @@ -1270,7 +1286,6 @@ "version": "1.1.0", "resolved": "https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-1.1.0.tgz", "integrity": "sha512-FUCf5XMfmW4dtYl69pdS4DbxKy8nj4M7SafBgPllysxmdachynNflAdp/gCsnYWNDnge6tI9onzMp5ARYc1KNw==", - "license": "MIT", "dependencies": { "@radix-ui/react-compose-refs": "1.1.0" }, @@ -1288,7 +1303,6 @@ "version": "1.1.2", "resolved": "https://registry.npmjs.org/@radix-ui/react-tooltip/-/react-tooltip-1.1.2.tgz", "integrity": "sha512-9XRsLwe6Yb9B/tlnYCPVUd/TFS4J7HuOZW345DCeC6vKIxQGMZdx21RK4VoZauPD5frgkXTYVS5y90L+3YBn4w==", - "license": "MIT", "dependencies": { "@radix-ui/primitive": "1.1.0", "@radix-ui/react-compose-refs": "1.1.0", @@ -1322,7 +1336,6 @@ "version": "1.1.0", "resolved": "https://registry.npmjs.org/@radix-ui/react-use-callback-ref/-/react-use-callback-ref-1.1.0.tgz", "integrity": "sha512-CasTfvsy+frcFkbXtSJ2Zu9JHpN8TYKxkgJGWbjiZhFivxaeW7rMeZt7QELGVLaYVfFMsKHjb7Ak0nMEe+2Vfw==", - "license": "MIT", "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" @@ -1337,7 +1350,6 @@ "version": "1.1.0", "resolved": "https://registry.npmjs.org/@radix-ui/react-use-controllable-state/-/react-use-controllable-state-1.1.0.tgz", "integrity": "sha512-MtfMVJiSr2NjzS0Aa90NPTnvTSg6C/JLCV7ma0W6+OMV78vd8OyRpID+Ng9LxzsPbLeuBnWBA1Nq30AtBIDChw==", - "license": "MIT", "dependencies": { "@radix-ui/react-use-callback-ref": "1.1.0" }, @@ -1355,7 +1367,6 @@ "version": "1.1.0", "resolved": "https://registry.npmjs.org/@radix-ui/react-use-escape-keydown/-/react-use-escape-keydown-1.1.0.tgz", "integrity": "sha512-L7vwWlR1kTTQ3oh7g1O0CBF3YCyyTj8NmhLR+phShpyA50HCfBFKVJTpshm9PzLiKmehsrQzTYTpX9HvmC9rhw==", - "license": "MIT", "dependencies": { "@radix-ui/react-use-callback-ref": "1.1.0" }, @@ -1373,7 +1384,6 @@ "version": "1.1.0", "resolved": "https://registry.npmjs.org/@radix-ui/react-use-layout-effect/-/react-use-layout-effect-1.1.0.tgz", "integrity": "sha512-+FPE0rOdziWSrH9athwI1R0HDVbWlEhd+FR+aSDk4uWGmSJ9Z54sdZVDQPZAinJhJXwfT+qnj969mCsT2gfm5w==", - "license": "MIT", "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" @@ -1388,7 +1398,6 @@ "version": "1.1.0", "resolved": "https://registry.npmjs.org/@radix-ui/react-use-rect/-/react-use-rect-1.1.0.tgz", "integrity": "sha512-0Fmkebhr6PiseyZlYAOtLS+nb7jLmpqTrJyv61Pe68MKYW6OWdRE2kI70TaYY27u7H0lajqM3hSMMLFq18Z7nQ==", - "license": "MIT", "dependencies": { "@radix-ui/rect": "1.1.0" }, @@ -1406,7 +1415,6 @@ "version": "1.1.0", "resolved": "https://registry.npmjs.org/@radix-ui/react-use-size/-/react-use-size-1.1.0.tgz", "integrity": "sha512-XW3/vWuIXHa+2Uwcc2ABSfcCledmXhhQPlGbfcRXbiUQI5Icjcg19BGCZVKKInYbvUCut/ufbbLLPFC5cbb1hw==", - "license": "MIT", "dependencies": { "@radix-ui/react-use-layout-effect": "1.1.0" }, @@ -1424,7 +1432,6 @@ "version": "1.1.0", "resolved": "https://registry.npmjs.org/@radix-ui/react-visually-hidden/-/react-visually-hidden-1.1.0.tgz", "integrity": "sha512-N8MDZqtgCgG5S3aV60INAB475osJousYpZ4cTJ2cFbMpdHS5Y6loLTH8LPtkj2QN0x93J30HT/M3qJXM0+lyeQ==", - "license": "MIT", "dependencies": { "@radix-ui/react-primitive": "2.0.0" }, @@ -1446,8 +1453,7 @@ "node_modules/@radix-ui/rect": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/@radix-ui/rect/-/rect-1.1.0.tgz", - "integrity": "sha512-A9+lCBZoaMJlVKcRBz2YByCG+Cp2t6nAnMnNba+XiWxnj6r4JUFqfsgwocMBZU9LPtdxC6wB56ySYpc7LQIoJg==", - "license": "MIT" + "integrity": "sha512-A9+lCBZoaMJlVKcRBz2YByCG+Cp2t6nAnMnNba+XiWxnj6r4JUFqfsgwocMBZU9LPtdxC6wB56ySYpc7LQIoJg==" }, "node_modules/@rollup/rollup-android-arm-eabi": { "version": "4.21.0", @@ -2386,12 +2392,11 @@ } }, "node_modules/@tanstack/react-virtual": { - "version": "3.10.2", - "resolved": "https://registry.npmjs.org/@tanstack/react-virtual/-/react-virtual-3.10.2.tgz", - "integrity": "sha512-RXOj33/xYgylGNczUrufi/ZbpUreBQmlD9ewz1PNZ4NIe6clTMh5NqAL9gXjRBy59UyZSlqo3c1p3EEzEX27oA==", - "license": "MIT", + "version": "3.10.4", + "resolved": "https://registry.npmjs.org/@tanstack/react-virtual/-/react-virtual-3.10.4.tgz", + "integrity": "sha512-Y2y1QJN3e5gNTG4wlZcoW2IAFrVCuho80oyeODKKFVSbAhJAXmkDNH3ZztM6EQij5ueqpqgz5FlsgKP9TGjImA==", "dependencies": { - "@tanstack/virtual-core": "3.10.2" + "@tanstack/virtual-core": "3.10.4" }, "funding": { "type": "github", @@ -2403,10 +2408,9 @@ } }, "node_modules/@tanstack/virtual-core": { - "version": "3.10.2", - "resolved": "https://registry.npmjs.org/@tanstack/virtual-core/-/virtual-core-3.10.2.tgz", - "integrity": "sha512-nQXJnavN0D5PcKW2XL+w05aO/bxnuBq0+p3X+RG+R+lTHluNSWp5ePNbz0wIfg7U4HPrX/LBE9llMOvS3/6Cuw==", - "license": "MIT", + "version": "3.10.4", + "resolved": "https://registry.npmjs.org/@tanstack/virtual-core/-/virtual-core-3.10.4.tgz", + "integrity": "sha512-yHyli4RHVsI+eJ0RjmOsjA9RpHp3/Zah9t+iRjmFa72dq00TeG/NwuLYuCV6CB4RkWD4i5RD421j1eb6BdKgvQ==", "funding": { "type": "github", "url": "https://github.com/sponsors/tannerlinsley" @@ -2416,7 +2420,6 @@ "version": "5.60.15", "resolved": "https://registry.npmjs.org/@types/codemirror/-/codemirror-5.60.15.tgz", "integrity": "sha512-dTOvwEQ+ouKJ/rE9LT1Ue2hmP6H1mZv5+CCnNWu2qtiOe2LQa9lCprEY20HxiDmV/Bxh+dXjywmy5aKvoGjULA==", - "license": "MIT", "dependencies": { "@types/tern": "*" } @@ -2440,7 +2443,7 @@ "version": "15.7.12", "resolved": "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.12.tgz", "integrity": "sha512-5zvhXYtRNRluoE/jAp4GVsSduVUzNWKkOZrCDBWYtE7biZywwdC2AcEzg+cSMLFRfVgeAFqpfNabiPjxFddV1Q==", - "dev": true, + "devOptional": true, "license": "MIT" }, "node_modules/@types/ramda": { @@ -2456,7 +2459,7 @@ "version": "18.3.4", "resolved": "https://registry.npmjs.org/@types/react/-/react-18.3.4.tgz", "integrity": "sha512-J7W30FTdfCxDDjmfRM+/JqLHBIyl7xUIp9kwK637FGmY7+mkSFSe6L4jpZzhj5QMfLssSDP4/i75AKkrdC7/Jw==", - "dev": true, + "devOptional": true, "license": "MIT", "dependencies": { "@types/prop-types": "*", @@ -2467,7 +2470,7 @@ "version": "18.3.0", "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-18.3.0.tgz", "integrity": "sha512-EhwApuTmMBmXuFOikhQLIBUn6uFg81SwLMOAUgodJF14SOBOCMdU04gDoYi0WOJJHD144TL32z4yDqCW3dnkQg==", - "dev": true, + "devOptional": true, "license": "MIT", "dependencies": { "@types/react": "*" @@ -2487,7 +2490,6 @@ "version": "0.23.9", "resolved": "https://registry.npmjs.org/@types/tern/-/tern-0.23.9.tgz", "integrity": "sha512-ypzHFE/wBzh+BlH6rrBgS5I/Z7RD21pGhZ2rltb/+ZrVM1awdZwjx7hE5XfuYgHWk9uvV5HLZN3SloevCAp3Bw==", - "license": "MIT", "dependencies": { "@types/estree": "*" } @@ -2789,7 +2791,6 @@ "version": "1.2.4", "resolved": "https://registry.npmjs.org/aria-hidden/-/aria-hidden-1.2.4.tgz", "integrity": "sha512-y+CcFFwelSXpLZk/7fMB2mUbGtX9lKycf1MWJ7CaTIERyitVlyQx6C+sxcROU2BAJ24OiZyK+8wj2i8AlBoS3A==", - "license": "MIT", "dependencies": { "tslib": "^2.0.0" }, @@ -2991,14 +2992,12 @@ "node_modules/client-only": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/client-only/-/client-only-0.0.1.tgz", - "integrity": "sha512-IV3Ou0jSMzZrd3pZ48nLkT9DA7Ag1pnPzaiQhpW7c3RbcqqzvzzVu+L8gfqMp/8IM2MQtSiqaCxrrcfu8I8rMA==", - "license": "MIT" + "integrity": "sha512-IV3Ou0jSMzZrd3pZ48nLkT9DA7Ag1pnPzaiQhpW7c3RbcqqzvzzVu+L8gfqMp/8IM2MQtSiqaCxrrcfu8I8rMA==" }, "node_modules/clsx": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/clsx/-/clsx-1.2.1.tgz", "integrity": "sha512-EcR6r5a8bj6pu3ycsa/E/cKVGuTgZJZdsyUYHOksG/UHIiKfjxzRxYJpyVBwYaQeOvghal9fcc4PidlgzugAQg==", - "license": "MIT", "engines": { "node": ">=6" } @@ -3006,14 +3005,12 @@ "node_modules/codemirror": { "version": "5.65.17", "resolved": "https://registry.npmjs.org/codemirror/-/codemirror-5.65.17.tgz", - "integrity": "sha512-1zOsUx3lzAOu/gnMAZkQ9kpIHcPYOc9y1Fbm2UVk5UBPkdq380nhkelG0qUwm1f7wPvTbndu9ZYlug35EwAZRQ==", - "license": "MIT" + "integrity": "sha512-1zOsUx3lzAOu/gnMAZkQ9kpIHcPYOc9y1Fbm2UVk5UBPkdq380nhkelG0qUwm1f7wPvTbndu9ZYlug35EwAZRQ==" }, "node_modules/codemirror-graphql": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/codemirror-graphql/-/codemirror-graphql-2.1.0.tgz", - "integrity": "sha512-Lm5augEJOd+7L6HXzBDmx/n7ViL8P/Dt0ba21X0JLeaMMEWtcG1SOvIdeI35St9ADOGdu/eMOg7aciX/RnWDFA==", - "license": "MIT", + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/codemirror-graphql/-/codemirror-graphql-2.1.1.tgz", + "integrity": "sha512-qVNd+H4OqkeBLDztB5bYllAmToxmZASOoELgbf+csNcoovIHKqSB/eppkzWI5jdQGd8bvLK1lTePfqXsCBFryw==", "dependencies": { "@types/codemirror": "^0.0.90", "graphql-language-service": "5.3.0" @@ -3028,7 +3025,6 @@ "version": "0.0.90", "resolved": "https://registry.npmjs.org/@types/codemirror/-/codemirror-0.0.90.tgz", "integrity": "sha512-8Z9+tSg27NPRGubbUPUCrt5DDG/OWzLph5BvcDykwR5D7RyZh5mhHG0uS1ePKV1YFCA+/cwc4Ey2AJAEFfV3IA==", - "license": "MIT", "dependencies": { "@types/tern": "*" } @@ -3136,14 +3132,13 @@ "version": "3.1.3", "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.1.3.tgz", "integrity": "sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw==", - "dev": true, + "devOptional": true, "license": "MIT" }, "node_modules/debounce-promise": { "version": "3.1.2", "resolved": "https://registry.npmjs.org/debounce-promise/-/debounce-promise-3.1.2.tgz", - "integrity": "sha512-rZHcgBkbYavBeD9ej6sP56XfG53d51CD4dnaw989YX/nZ/ZJfgRx/9ePKmTNiUiyQvh4mtrMoS3OAWW+yoYtpg==", - "license": "MIT" + "integrity": "sha512-rZHcgBkbYavBeD9ej6sP56XfG53d51CD4dnaw989YX/nZ/ZJfgRx/9ePKmTNiUiyQvh4mtrMoS3OAWW+yoYtpg==" }, "node_modules/debug": { "version": "4.3.6", @@ -3226,8 +3221,7 @@ "node_modules/detect-node-es": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/detect-node-es/-/detect-node-es-1.1.0.tgz", - "integrity": "sha512-ypdmJU/TbBby2Dxibuv7ZLW3Bs1QEmM7nHjEANfohJLvE0XVujisn1qPJcZxg+qDucsr+bP6fLD1rPS3AhJ7EQ==", - "license": "MIT" + "integrity": "sha512-ypdmJU/TbBby2Dxibuv7ZLW3Bs1QEmM7nHjEANfohJLvE0XVujisn1qPJcZxg+qDucsr+bP6fLD1rPS3AhJ7EQ==" }, "node_modules/dir-glob": { "version": "3.0.1", @@ -3271,7 +3265,6 @@ "version": "4.5.0", "resolved": "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz", "integrity": "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==", - "license": "BSD-2-Clause", "engines": { "node": ">=0.12" }, @@ -3332,17 +3325,16 @@ } }, "node_modules/eslint": { - "version": "9.9.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-9.9.0.tgz", - "integrity": "sha512-JfiKJrbx0506OEerjK2Y1QlldtBxkAlLxT5OEcRF8uaQ86noDe2k31Vw9rnSWv+MXZHj7OOUV/dA0AhdLFcyvA==", + "version": "9.9.1", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-9.9.1.tgz", + "integrity": "sha512-dHvhrbfr4xFQ9/dq+jcVneZMyRYLjggWjk6RVsIiHsP8Rz6yZ8LvZ//iU4TrZF+SXWG+JkNF2OyiZRvzgRDqMg==", "dev": true, - "license": "MIT", "dependencies": { "@eslint-community/eslint-utils": "^4.2.0", "@eslint-community/regexpp": "^4.11.0", - "@eslint/config-array": "^0.17.1", + "@eslint/config-array": "^0.18.0", "@eslint/eslintrc": "^3.1.0", - "@eslint/js": "9.9.0", + "@eslint/js": "9.9.1", "@humanwhocodes/module-importer": "^1.0.1", "@humanwhocodes/retry": "^0.3.0", "@nodelib/fs.walk": "^1.2.8", @@ -3758,7 +3750,6 @@ "version": "6.5.1", "resolved": "https://registry.npmjs.org/framer-motion/-/framer-motion-6.5.1.tgz", "integrity": "sha512-o1BGqqposwi7cgDrtg0dNONhkmPsUFDaLcKXigzuTFC5x58mE8iyTazxSudFzmT6MEyJKfjjU8ItoMe3W+3fiw==", - "license": "MIT", "dependencies": { "@motionone/dom": "10.12.0", "framesync": "6.0.1", @@ -3779,7 +3770,6 @@ "version": "6.0.1", "resolved": "https://registry.npmjs.org/framesync/-/framesync-6.0.1.tgz", "integrity": "sha512-fUY88kXvGiIItgNC7wcTOl0SNRCVXMKSWW2Yzfmn7EKNc+MpCzcz9DhdHcdjbrtN3c6R4H5dTY2jiCpPdysEjA==", - "license": "MIT", "dependencies": { "tslib": "^2.1.0" } @@ -3810,7 +3800,6 @@ "version": "1.0.1", "resolved": "https://registry.npmjs.org/get-nonce/-/get-nonce-1.0.1.tgz", "integrity": "sha512-FJhYRoDaiatfEkUK8HKlicmu/3SGFD51q3itKDGoSTysQJBnfOcxU5GxnhE1E6soB76MbT0MBtnKJuXyAx+96Q==", - "license": "MIT", "engines": { "node": ">=6" } @@ -3819,7 +3808,6 @@ "version": "3.0.1", "resolved": "https://registry.npmjs.org/get-value/-/get-value-3.0.1.tgz", "integrity": "sha512-mKZj9JLQrwMBtj5wxi6MH8Z5eSKaERpAwjg43dPtlGI1ZVEgH/qC7T8/6R2OBSUA+zzHBZgICsVJaEIV2tKTDA==", - "license": "MIT", "dependencies": { "isobject": "^3.0.1" }, @@ -3889,12 +3877,11 @@ "license": "MIT" }, "node_modules/graphiql": { - "version": "3.7.0", - "resolved": "https://registry.npmjs.org/graphiql/-/graphiql-3.7.0.tgz", - "integrity": "sha512-M38uOeD8y0M85VnrifhpXtcgGshQG2dtQGJ6fPZB9c659sA6y2Yh9aDnE055/n2ricidwSLrKmsiDXrvDuoU1A==", - "license": "MIT", + "version": "3.7.1", + "resolved": "https://registry.npmjs.org/graphiql/-/graphiql-3.7.1.tgz", + "integrity": "sha512-kmummedOrFYs0BI5evrVY0AerOYlaMt/Sc/e+Sta1x8X6vEMYWNeUUz/kKF2NQT5BcsR3FnNdFt1Gk2QMgueGQ==", "dependencies": { - "@graphiql/react": "^0.26.0" + "@graphiql/react": "^0.26.2" }, "peerDependencies": { "graphql": "^15.5.0 || ^16.0.0 || ^17.0.0-alpha.2", @@ -3915,7 +3902,6 @@ "version": "5.3.0", "resolved": "https://registry.npmjs.org/graphql-language-service/-/graphql-language-service-5.3.0.tgz", "integrity": "sha512-gCQIIy7lM9CB1KPLEb+DNZLczA9zuTLEOJE2hEQZTFYInogdmMDRa6RAkvM4LL0LcgcS+3uPs6KtHlcjCqRbUg==", - "license": "MIT", "dependencies": { "debounce-promise": "^3.1.2", "nullthrows": "^1.0.0", @@ -3968,8 +3954,7 @@ "node_modules/hey-listen": { "version": "1.0.8", "resolved": "https://registry.npmjs.org/hey-listen/-/hey-listen-1.0.8.tgz", - "integrity": "sha512-COpmrF2NOg4TBWUJ5UVyaCU2A88wEMkUPK4hNqyCkqHbxT92BbvfjoSozkAIIm6XhicGlJHhFdullInrdhwU8Q==", - "license": "MIT" + "integrity": "sha512-COpmrF2NOg4TBWUJ5UVyaCU2A88wEMkUPK4hNqyCkqHbxT92BbvfjoSozkAIIm6XhicGlJHhFdullInrdhwU8Q==" }, "node_modules/highlight.js": { "version": "10.7.3", @@ -4159,7 +4144,6 @@ "version": "2.0.4", "resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-2.0.4.tgz", "integrity": "sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og==", - "license": "MIT", "dependencies": { "isobject": "^3.0.1" }, @@ -4171,7 +4155,6 @@ "version": "3.0.1", "resolved": "https://registry.npmjs.org/is-primitive/-/is-primitive-3.0.1.tgz", "integrity": "sha512-GljRxhWvlCNRfZyORiH77FwdFwGcMO620o37EOYC0ORWdq+WYNVqW0w2Juzew4M+L81l6/QS3t5gkkihyRqv9w==", - "license": "MIT", "engines": { "node": ">=0.10.0" } @@ -4187,7 +4170,6 @@ "version": "3.0.1", "resolved": "https://registry.npmjs.org/isobject/-/isobject-3.0.1.tgz", "integrity": "sha512-WhB9zCku7EGTj/HQQRz5aUQEUeoQZH2bWcltRErOpymJ4boYE6wL9Tbr23krRPSZ+C5zqNSrSw+Cc7sZZ4b7vg==", - "license": "MIT", "engines": { "node": ">=0.10.0" } @@ -4265,7 +4247,6 @@ "version": "5.0.0", "resolved": "https://registry.npmjs.org/linkify-it/-/linkify-it-5.0.0.tgz", "integrity": "sha512-5aHCbzQRADcdP+ATqnDuhhJ/MRIqDkZX5pyjFHRRysS8vZ5AbqGEoFIb6pYHPZ+L/OC2Lc+xT8uHVVR5CAK/wQ==", - "license": "MIT", "dependencies": { "uc.micro": "^2.0.0" } @@ -4335,7 +4316,6 @@ "version": "14.1.0", "resolved": "https://registry.npmjs.org/markdown-it/-/markdown-it-14.1.0.tgz", "integrity": "sha512-a54IwgWPaeBCAAsv13YgmALOF1elABB08FxO9i+r4VFk5Vl4pKokRPeX8u5TCgSsPi6ec1otfLjdOpVcgbpshg==", - "license": "MIT", "dependencies": { "argparse": "^2.0.1", "entities": "^4.4.0", @@ -4351,8 +4331,7 @@ "node_modules/mdurl": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/mdurl/-/mdurl-2.0.0.tgz", - "integrity": "sha512-Lf+9+2r+Tdp5wXDXC4PcIBjTDtq4UKjCPMQhKIuzpJNW0b96kVqSwW0bT7FhRSfmAiFYgP+SCRvdrDozfh0U5w==", - "license": "MIT" + "integrity": "sha512-Lf+9+2r+Tdp5wXDXC4PcIBjTDtq4UKjCPMQhKIuzpJNW0b96kVqSwW0bT7FhRSfmAiFYgP+SCRvdrDozfh0U5w==" }, "node_modules/merge2": { "version": "1.4.1", @@ -4368,7 +4347,6 @@ "version": "1.3.0", "resolved": "https://registry.npmjs.org/meros/-/meros-1.3.0.tgz", "integrity": "sha512-2BNGOimxEz5hmjUG2FwoxCt5HN7BXdaWyFqEwxPTrJzVdABtrL4TiHTcsWSFAxPQ/tOnEaQEJh3qWq71QRMY+w==", - "license": "MIT", "engines": { "node": ">=13" }, @@ -4588,8 +4566,7 @@ "node_modules/nullthrows": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/nullthrows/-/nullthrows-1.1.1.tgz", - "integrity": "sha512-2vPPEi+Z7WqML2jZYddDIfy5Dqb0r2fze2zTxNNknZaFpVHU3mFB3R+DWeJWGVx0ecvttSGlJTI+WG+8Z4cDWw==", - "license": "MIT" + "integrity": "sha512-2vPPEi+Z7WqML2jZYddDIfy5Dqb0r2fze2zTxNNknZaFpVHU3mFB3R+DWeJWGVx0ecvttSGlJTI+WG+8Z4cDWw==" }, "node_modules/object-assign": { "version": "4.1.1", @@ -4769,7 +4746,6 @@ "version": "11.0.3", "resolved": "https://registry.npmjs.org/popmotion/-/popmotion-11.0.3.tgz", "integrity": "sha512-Y55FLdj3UxkR7Vl3s7Qr4e9m0onSnP8W7d/xQLsoJM40vs6UKHFdygs6SWryasTZYqugMjm3BepCF4CWXDiHgA==", - "license": "MIT", "dependencies": { "framesync": "6.0.1", "hey-listen": "^1.0.8", @@ -4916,7 +4892,6 @@ "version": "2.3.1", "resolved": "https://registry.npmjs.org/punycode.js/-/punycode.js-2.3.1.tgz", "integrity": "sha512-uxFIHU0YlHYhDQtV4R9J6a52SLx28BCjT+4ieh7IGbgwVJWO+km431c4yRlREUAsAmt/uMjQUyQHNEPf0M39CA==", - "license": "MIT", "engines": { "node": ">=6" } @@ -5138,7 +5113,6 @@ "version": "2.5.7", "resolved": "https://registry.npmjs.org/react-remove-scroll/-/react-remove-scroll-2.5.7.tgz", "integrity": "sha512-FnrTWO4L7/Bhhf3CYBNArEG/yROV0tKmTv7/3h9QCFvH6sndeFf1wPqOcbFVu5VAulS5dV1wGT3GZZ/1GawqiA==", - "license": "MIT", "dependencies": { "react-remove-scroll-bar": "^2.3.4", "react-style-singleton": "^2.2.1", @@ -5163,7 +5137,6 @@ "version": "2.3.6", "resolved": "https://registry.npmjs.org/react-remove-scroll-bar/-/react-remove-scroll-bar-2.3.6.tgz", "integrity": "sha512-DtSYaao4mBmX+HDo5YWYdBWQwYIQQshUV/dVxFxK+KM26Wjwp1gZ6rv6OC3oujI6Bfu6Xyg3TwK533AQutsn/g==", - "license": "MIT", "dependencies": { "react-style-singleton": "^2.2.1", "tslib": "^2.0.0" @@ -5185,7 +5158,6 @@ "version": "2.2.1", "resolved": "https://registry.npmjs.org/react-style-singleton/-/react-style-singleton-2.2.1.tgz", "integrity": "sha512-ZWj0fHEMyWkHzKYUr2Bs/4zU6XLmq9HsgBURm7g5pAVfyn49DgUiNgY2d4lXRlYSiCif9YBGpQleewkcqddc7g==", - "license": "MIT", "dependencies": { "get-nonce": "^1.0.0", "invariant": "^2.2.4", @@ -5482,7 +5454,6 @@ "https://paypal.me/jonathanschlinkert", "https://jonschlinkert.dev/sponsor" ], - "license": "MIT", "dependencies": { "is-plain-object": "^2.0.4", "is-primitive": "^3.0.1" @@ -5656,11 +5627,16 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/style-mod": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/style-mod/-/style-mod-4.1.2.tgz", + "integrity": "sha512-wnD1HyVqpJUI2+eKZ+eo1UwghftP6yuFheBqqe+bWCotBjC2K1YnteJILRMs3SM4V/0dLEW1SC27MWP5y+mwmw==", + "peer": true + }, "node_modules/style-value-types": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/style-value-types/-/style-value-types-5.0.0.tgz", "integrity": "sha512-08yq36Ikn4kx4YU6RD7jWEv27v4V+PUsOGa4n/as8Et3CuODMJQ00ENeAVXAeydX4Z2j1XHZF1K2sX4mGl18fA==", - "license": "MIT", "dependencies": { "hey-listen": "^1.0.8", "tslib": "^2.1.0" @@ -5933,8 +5909,7 @@ "node_modules/uc.micro": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/uc.micro/-/uc.micro-2.1.0.tgz", - "integrity": "sha512-ARDJmphmdvUk6Glw7y9DQ2bFkKBHwQHLi2lsaH6PPmz/Ka9sFOBsBluozhDltWmnv9u/cF6Rt87znRTPV+yp/A==", - "license": "MIT" + "integrity": "sha512-ARDJmphmdvUk6Glw7y9DQ2bFkKBHwQHLi2lsaH6PPmz/Ka9sFOBsBluozhDltWmnv9u/cF6Rt87znRTPV+yp/A==" }, "node_modules/unraw": { "version": "3.0.0", @@ -5966,7 +5941,6 @@ "version": "1.3.2", "resolved": "https://registry.npmjs.org/use-callback-ref/-/use-callback-ref-1.3.2.tgz", "integrity": "sha512-elOQwe6Q8gqZgDA8mrh44qRTQqpIHDcZ3hXTLjBe1i4ph8XpNJnO+aQf3NaG+lriLopI4HMx9VjQLfPQ6vhnoA==", - "license": "MIT", "dependencies": { "tslib": "^2.0.0" }, @@ -5987,7 +5961,6 @@ "version": "1.1.2", "resolved": "https://registry.npmjs.org/use-sidecar/-/use-sidecar-1.1.2.tgz", "integrity": "sha512-epTbsLuzZ7lPClpz2TyryBfztm7m+28DlEv2ZCQ3MDr5ssiwyOwGH/e5F9CkfWjJ1t4clvI58yF822/GUkjjhw==", - "license": "MIT", "dependencies": { "detect-node-es": "^1.1.0", "tslib": "^2.0.0" @@ -6084,8 +6057,19 @@ "node_modules/vscode-languageserver-types": { "version": "3.17.5", "resolved": "https://registry.npmjs.org/vscode-languageserver-types/-/vscode-languageserver-types-3.17.5.tgz", - "integrity": "sha512-Ld1VelNuX9pdF39h2Hgaeb5hEZM2Z3jUrrMgWQAu82jMtZp7p3vJT3BzToKtZI7NgQssZje5o0zryOrhQvzQAg==", - "license": "MIT" + "integrity": "sha512-Ld1VelNuX9pdF39h2Hgaeb5hEZM2Z3jUrrMgWQAu82jMtZp7p3vJT3BzToKtZI7NgQssZje5o0zryOrhQvzQAg==" + }, + "node_modules/w3c-keyname": { + "version": "2.2.8", + "resolved": "https://registry.npmjs.org/w3c-keyname/-/w3c-keyname-2.2.8.tgz", + "integrity": "sha512-dpojBhNsCNN7T82Tm7k26A6G9ML3NkhDsnw9n/eoxSRlVBB4CEtIQ/KTCLI2Fwf3ataSXRhYFkQi3SlnFwPvPQ==", + "peer": true + }, + "node_modules/w3c-keyname": { + "version": "2.2.8", + "resolved": "https://registry.npmjs.org/w3c-keyname/-/w3c-keyname-2.2.8.tgz", + "integrity": "sha512-dpojBhNsCNN7T82Tm7k26A6G9ML3NkhDsnw9n/eoxSRlVBB4CEtIQ/KTCLI2Fwf3ataSXRhYFkQi3SlnFwPvPQ==", + "peer": true }, "node_modules/web-streams-polyfill": { "version": "3.3.3", diff --git a/playground/package.json b/playground/package.json index 3a98aa92e5..9a10720778 100644 --- a/playground/package.json +++ b/playground/package.json @@ -10,7 +10,7 @@ "preview": "vite preview" }, "dependencies": { - "graphiql": "^3.7.0", + "graphiql": "^3.7.1", "graphql": "^16.9.0", "react": "^18.3.1", "react-dom": "^18.3.1", @@ -23,7 +23,7 @@ "@typescript-eslint/eslint-plugin": "^8.2.0", "@typescript-eslint/parser": "^8.2.0", "@vitejs/plugin-react-swc": "^3.7.0", - "eslint": "^9.9.0", + "eslint": "^9.9.1", "eslint-plugin-react-hooks": "^4.6.2", "eslint-plugin-react-refresh": "^0.4.9", "typescript": "^5.5.4", From fdf4047483d196c5a78c83f14541061893283e66 Mon Sep 17 00:00:00 2001 From: ONLYUSEmePHONE Date: Wed, 28 Aug 2024 16:47:36 -0400 Subject: [PATCH 05/71] fix: Ignore badger path if in-memory (#2967) ## Relevant issue(s) Resolves #2964 ## Description Small change that now only sets the badger options `Dir` and `ValueDir` to the `path` parameter if the badger option `InMemory` is not `true`. --- datastore/badger/v4/datastore.go | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/datastore/badger/v4/datastore.go b/datastore/badger/v4/datastore.go index caac4144a2..5f2864458e 100644 --- a/datastore/badger/v4/datastore.go +++ b/datastore/badger/v4/datastore.go @@ -142,8 +142,11 @@ func NewDatastore(path string, options *Options) (*Datastore, error) { gcSleep = gcInterval } - opt.Dir = path - opt.ValueDir = path + if !opt.InMemory { + opt.Dir = path + opt.ValueDir = path + } + opt.Logger = &compatLogger{ SugaredLogger: *log.Desugar().WithOptions(zap.AddCallerSkip(1)).Sugar(), skipLogger: *log.Desugar().WithOptions(zap.AddCallerSkip(2)).Sugar(), From 9256357cb679947fc2f11f56b6f0c95420e9abbc Mon Sep 17 00:00:00 2001 From: Fred Carle Date: Sun, 1 Sep 2024 10:24:06 -0400 Subject: [PATCH 06/71] chore: Change from ipld traversal to direct link access (#2931) ## Relevant issue(s) Resolves #2930 ## Description This PR removes the use of the ipld traversal function in favor of direct link access. This makes the sync process a bit more efficient and will make it easier to initiate encryption key exchange. --- go.sum | 2 -- net/sync_dag.go | 85 +++++++++++++++++++++++++++---------------------- 2 files changed, 47 insertions(+), 40 deletions(-) diff --git a/go.sum b/go.sum index 59d3052b4e..f1d63d2b20 100644 --- a/go.sum +++ b/go.sum @@ -1488,8 +1488,6 @@ github.com/viant/assertly v0.4.8/go.mod h1:aGifi++jvCrUaklKEKT0BU95igDNaqkvz+49u github.com/viant/toolbox v0.24.0/go.mod h1:OxMCG57V0PXuIP2HNQrtJf2CjqdmbrOx5EkMILuUhzM= github.com/vito/go-sse v1.1.2 h1:FLQ1J0tMGN7pKa3KOyZCHojYDR0Z/L/y+3ejUO3P+tM= github.com/vito/go-sse v1.1.2/go.mod h1:2wkcaQ+jtlZ94Uve8gYZjFpL68luAjssTINA2hpgcZs= -github.com/warpfork/go-testmark v0.12.1 h1:rMgCpJfwy1sJ50x0M0NgyphxYYPMOODIJHhsXyEHU0s= -github.com/warpfork/go-testmark v0.12.1/go.mod h1:kHwy7wfvGSPh1rQJYKayD4AbtNaeyZdcGi9tNJTaa5Y= github.com/warpfork/go-wish v0.0.0-20220906213052-39a1cc7a02d0 h1:GDDkbFiaK8jsSDJfjId/PEGEShv6ugrt4kYsC5UIDaQ= github.com/warpfork/go-wish v0.0.0-20220906213052-39a1cc7a02d0/go.mod h1:x6AKhvSSexNrVSrViXSHUEbICjmGXhtgABaHIySUSGw= github.com/wasmerio/wasmer-go v1.0.4 h1:MnqHoOGfiQ8MMq2RF6wyCeebKOe84G88h5yv+vmxJgs= diff --git a/net/sync_dag.go b/net/sync_dag.go index d856f36b2a..e9c17035bf 100644 --- a/net/sync_dag.go +++ b/net/sync_dag.go @@ -12,20 +12,13 @@ package net import ( "context" + "sync" "time" "github.com/ipfs/boxo/blockservice" - "github.com/ipld/go-ipld-prime" - "github.com/ipld/go-ipld-prime/datamodel" "github.com/ipld/go-ipld-prime/linking" cidlink "github.com/ipld/go-ipld-prime/linking/cid" - "github.com/ipld/go-ipld-prime/linking/preload" - basicnode "github.com/ipld/go-ipld-prime/node/basic" - "github.com/ipld/go-ipld-prime/schema" "github.com/ipld/go-ipld-prime/storage/bsrvadapter" - "github.com/ipld/go-ipld-prime/traversal" - "github.com/ipld/go-ipld-prime/traversal/selector" - "github.com/ipld/go-ipld-prime/traversal/selector/builder" coreblock "github.com/sourcenetwork/defradb/internal/core/block" ) @@ -40,9 +33,6 @@ var syncDAGTimeout = 60 * time.Second // This process walks the entire DAG until the issue below is resolved. // https://github.com/sourcenetwork/defradb/issues/2722 func syncDAG(ctx context.Context, bserv blockservice.BlockService, block *coreblock.Block) error { - ctx, cancel := context.WithTimeout(ctx, syncDAGTimeout) - defer cancel() - // use a session to make remote fetches more efficient ctx = blockservice.ContextWithSession(ctx, bserv) store := &bsrvadapter.Adapter{Wrapped: bserv} @@ -58,37 +48,56 @@ func syncDAG(ctx context.Context, bserv blockservice.BlockService, block *corebl return err } - ssb := builder.NewSelectorSpecBuilder(basicnode.Prototype.Any) - matchAllSelector, err := ssb.ExploreRecursive(selector.RecursionLimitNone(), ssb.ExploreUnion( - ssb.Matcher(), - ssb.ExploreAll(ssb.ExploreRecursiveEdge()), - )).Selector() + err = loadBlockLinks(ctx, lsys, block) if err != nil { return err } + return nil +} - // prototypeChooser returns the node prototype to use when traversing - prototypeChooser := func(lnk ipld.Link, lnkCtx ipld.LinkContext) (ipld.NodePrototype, error) { - if tlnkNd, ok := lnkCtx.LinkNode.(schema.TypedLinkNode); ok { - return tlnkNd.LinkTargetNodePrototype(), nil - } - return basicnode.Prototype.Any, nil - } - // preloader is used to asynchronously load blocks before traversing - // - // any errors encountered during preload are ignored - preloader := func(pctx preload.PreloadContext, l preload.Link) { - go lsys.Load(linking.LinkContext{Ctx: pctx.Ctx}, l.Link, coreblock.SchemaPrototype) //nolint:errcheck - } - config := traversal.Config{ - Ctx: ctx, - LinkSystem: lsys, - LinkVisitOnlyOnce: true, - LinkTargetNodePrototypeChooser: prototypeChooser, - Preloader: preloader, +// loadBlockLinks loads the links of a block recursively. +// +// If it encounters errors in the concurrent loading of links, it will return +// the first error it encountered. +func loadBlockLinks(ctx context.Context, lsys linking.LinkSystem, block *coreblock.Block) error { + ctx, cancel := context.WithTimeout(ctx, syncDAGTimeout) + defer cancel() + + var wg sync.WaitGroup + var asyncErr error + var asyncErrOnce sync.Once + + setAsyncErr := func(err error) { + asyncErr = err + cancel() } - visit := func(p traversal.Progress, n datamodel.Node) error { - return nil + + for _, lnk := range block.Links { + wg.Add(1) + go func(lnk coreblock.DAGLink) { + defer wg.Done() + if ctx.Err() != nil { + return + } + nd, err := lsys.Load(linking.LinkContext{Ctx: ctx}, lnk, coreblock.SchemaPrototype) + if err != nil { + asyncErrOnce.Do(func() { setAsyncErr(err) }) + return + } + linkBlock, err := coreblock.GetFromNode(nd) + if err != nil { + asyncErrOnce.Do(func() { setAsyncErr(err) }) + return + } + err = loadBlockLinks(ctx, lsys, linkBlock) + if err != nil { + asyncErrOnce.Do(func() { setAsyncErr(err) }) + return + } + }(lnk) } - return traversal.Progress{Cfg: &config}.WalkMatching(block.GenerateNode(), matchAllSelector, visit) + + wg.Wait() + + return asyncErr } From f1489efca6351f9b89515eb0feb9ad0254b6c1a4 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Tue, 3 Sep 2024 11:30:54 -0400 Subject: [PATCH 07/71] bot: Update dependencies (bulk dependabot PRs) 02-09-2024 (#2975) --- go.mod | 6 +- go.sum | 12 +- playground/package-lock.json | 320 ++++++++++++++--------------------- playground/package.json | 6 +- 4 files changed, 140 insertions(+), 204 deletions(-) diff --git a/go.mod b/go.mod index f3455ec9af..55d5e4f047 100644 --- a/go.mod +++ b/go.mod @@ -57,11 +57,11 @@ require ( github.com/vito/go-sse v1.1.2 github.com/zalando/go-keyring v0.2.5 go.opentelemetry.io/otel/metric v1.29.0 - go.opentelemetry.io/otel/sdk/metric v1.28.0 + go.opentelemetry.io/otel/sdk/metric v1.29.0 go.uber.org/zap v1.27.0 golang.org/x/exp v0.0.0-20240808152545-0cdaa3abc0fa golang.org/x/term v0.23.0 - google.golang.org/grpc v1.65.0 + google.golang.org/grpc v1.66.0 google.golang.org/protobuf v1.34.2 ) @@ -350,7 +350,7 @@ require ( go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0 // indirect go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.52.0 // indirect go.opentelemetry.io/otel v1.29.0 // indirect - go.opentelemetry.io/otel/sdk v1.28.0 // indirect + go.opentelemetry.io/otel/sdk v1.29.0 // indirect go.opentelemetry.io/otel/trace v1.29.0 // indirect go.uber.org/dig v1.18.0 // indirect go.uber.org/fx v1.22.2 // indirect diff --git a/go.sum b/go.sum index f1d63d2b20..d826c43cdf 100644 --- a/go.sum +++ b/go.sum @@ -1541,10 +1541,10 @@ go.opentelemetry.io/otel v1.29.0 h1:PdomN/Al4q/lN6iBJEN3AwPvUiHPMlt93c8bqTG5Llw= go.opentelemetry.io/otel v1.29.0/go.mod h1:N/WtXPs1CNCUEx+Agz5uouwCba+i+bJGFicT8SR4NP8= go.opentelemetry.io/otel/metric v1.29.0 h1:vPf/HFWTNkPu1aYeIsc98l4ktOQaL6LeSoeV2g+8YLc= go.opentelemetry.io/otel/metric v1.29.0/go.mod h1:auu/QWieFVWx+DmQOUMgj0F8LHWdgalxXqvp7BII/W8= -go.opentelemetry.io/otel/sdk v1.28.0 h1:b9d7hIry8yZsgtbmM0DKyPWMMUMlK9NEKuIG4aBqWyE= -go.opentelemetry.io/otel/sdk v1.28.0/go.mod h1:oYj7ClPUA7Iw3m+r7GeEjz0qckQRJK2B8zjcZEfu7Pg= -go.opentelemetry.io/otel/sdk/metric v1.28.0 h1:OkuaKgKrgAbYrrY0t92c+cC+2F6hsFNnCQArXCKlg08= -go.opentelemetry.io/otel/sdk/metric v1.28.0/go.mod h1:cWPjykihLAPvXKi4iZc1dpER3Jdq2Z0YLse3moQUCpg= +go.opentelemetry.io/otel/sdk v1.29.0 h1:vkqKjk7gwhS8VaWb0POZKmIEDimRCMsopNYnriHyryo= +go.opentelemetry.io/otel/sdk v1.29.0/go.mod h1:pM8Dx5WKnvxLCb+8lG1PRNIDxu9g9b9g59Qr7hfAAok= +go.opentelemetry.io/otel/sdk/metric v1.29.0 h1:K2CfmJohnRgvZ9UAj2/FhIf/okdWcNdBwe1m8xFXiSY= +go.opentelemetry.io/otel/sdk/metric v1.29.0/go.mod h1:6zZLdCl2fkauYoZIOn/soQIDSWFmNSRcICarHfuhNJQ= go.opentelemetry.io/otel/trace v1.29.0 h1:J/8ZNK4XgR7a21DZUAsbF8pZ5Jcw1VhACmnYt39JTi4= go.opentelemetry.io/otel/trace v1.29.0/go.mod h1:eHl3w0sp3paPkYstJOmAimxhiFXPg+MMTlEh3nsQgWQ= go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI= @@ -2204,8 +2204,8 @@ google.golang.org/grpc v1.48.0/go.mod h1:vN9eftEi1UMyUsIF80+uQXhHjbXYbm0uXoFCACu google.golang.org/grpc v1.49.0/go.mod h1:ZgQEeidpAuNRZ8iRrlBKXZQP1ghovWIVhdJRyCDK+GI= google.golang.org/grpc v1.50.0/go.mod h1:ZgQEeidpAuNRZ8iRrlBKXZQP1ghovWIVhdJRyCDK+GI= google.golang.org/grpc v1.50.1/go.mod h1:ZgQEeidpAuNRZ8iRrlBKXZQP1ghovWIVhdJRyCDK+GI= -google.golang.org/grpc v1.65.0 h1:bs/cUb4lp1G5iImFFd3u5ixQzweKizoZJAwBNLR42lc= -google.golang.org/grpc v1.65.0/go.mod h1:WgYC2ypjlB0EiQi6wdKixMqukr6lBc0Vo+oOgjrM5ZQ= +google.golang.org/grpc v1.66.0 h1:DibZuoBznOxbDQxRINckZcUvnCEvrW9pcWIE2yF9r1c= +google.golang.org/grpc v1.66.0/go.mod h1:s3/l6xSSCURdVfAnL+TqCNMyTDAGN6+lZeVxnZR128Y= google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0/go.mod h1:6Kw0yEErY5E/yWrBtf03jp27GLLJujG4z/JK95pnjjw= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= diff --git a/playground/package-lock.json b/playground/package-lock.json index 872277b296..884f3c8bb5 100644 --- a/playground/package-lock.json +++ b/playground/package-lock.json @@ -15,11 +15,11 @@ "swagger-ui-react": "^5.17.14" }, "devDependencies": { - "@types/react": "^18.3.3", + "@types/react": "^18.3.5", "@types/react-dom": "^18.3.0", "@types/swagger-ui-react": "^4.18.3", - "@typescript-eslint/eslint-plugin": "^8.2.0", - "@typescript-eslint/parser": "^8.2.0", + "@typescript-eslint/eslint-plugin": "^8.3.0", + "@typescript-eslint/parser": "^8.3.0", "@vitejs/plugin-react-swc": "^3.7.0", "eslint": "^9.9.1", "eslint-plugin-react-hooks": "^4.6.2", @@ -59,37 +59,6 @@ "integrity": "sha512-NVf/1YycDMs6+FxS0Tb/W8MjJRDQdXF+tBfDtZ5UZeiRUkTmwKc4vmYCKZTyymfJk1gnMsauvZSX/HiV9jOABw==", "license": "MIT" }, - "node_modules/@codemirror/language": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/@codemirror/language/-/language-6.0.0.tgz", - "integrity": "sha512-rtjk5ifyMzOna1c7PBu7J1VCt0PvA5wy3o8eMVnxMKb7z8KA7JFecvD04dSn14vj/bBaAbqRsGed5OjtofEnLA==", - "peer": true, - "dependencies": { - "@codemirror/state": "^6.0.0", - "@codemirror/view": "^6.0.0", - "@lezer/common": "^1.0.0", - "@lezer/highlight": "^1.0.0", - "@lezer/lr": "^1.0.0", - "style-mod": "^4.0.0" - } - }, - "node_modules/@codemirror/state": { - "version": "6.4.1", - "resolved": "https://registry.npmjs.org/@codemirror/state/-/state-6.4.1.tgz", - "integrity": "sha512-QkEyUiLhsJoZkbumGZlswmAhA7CBU02Wrz7zvH4SrcifbsqwlXShVXg65f3v/ts57W3dqyamEriMhij1Z3Zz4A==", - "peer": true - }, - "node_modules/@codemirror/view": { - "version": "6.33.0", - "resolved": "https://registry.npmjs.org/@codemirror/view/-/view-6.33.0.tgz", - "integrity": "sha512-AroaR3BvnjRW8fiZBalAaK+ZzB5usGgI014YKElYZvQdNH5ZIidHlO+cyf/2rWzyBFRkvG6VhiXeAEbC53P2YQ==", - "peer": true, - "dependencies": { - "@codemirror/state": "^6.4.0", - "style-mod": "^4.1.0", - "w3c-keyname": "^2.2.4" - } - }, "node_modules/@emotion/is-prop-valid": { "version": "0.8.8", "resolved": "https://registry.npmjs.org/@emotion/is-prop-valid/-/is-prop-valid-0.8.8.tgz", @@ -748,30 +717,6 @@ "url": "https://github.com/sponsors/nzakas" } }, - "node_modules/@lezer/common": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/@lezer/common/-/common-1.2.1.tgz", - "integrity": "sha512-yemX0ZD2xS/73llMZIK6KplkjIjf2EvAHcinDi/TfJ9hS25G0388+ClHt6/3but0oOxinTcQHJLDXh6w1crzFQ==", - "peer": true - }, - "node_modules/@lezer/highlight": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/@lezer/highlight/-/highlight-1.2.1.tgz", - "integrity": "sha512-Z5duk4RN/3zuVO7Jq0pGLJ3qynpxUVsh7IbUbGj88+uV2ApSAn6kWg2au3iJb+0Zi7kKtqffIESgNcRXWZWmSA==", - "peer": true, - "dependencies": { - "@lezer/common": "^1.0.0" - } - }, - "node_modules/@lezer/lr": { - "version": "1.4.2", - "resolved": "https://registry.npmjs.org/@lezer/lr/-/lr-1.4.2.tgz", - "integrity": "sha512-pu0K1jCIdnQ12aWNaAVU5bzi7Bd1w54J3ECgANPmYLtQKP0HBj2cE/5coBD66MT10xbtIuUr7tg0Shbsvk0mDA==", - "peer": true, - "dependencies": { - "@lezer/common": "^1.0.0" - } - }, "node_modules/@motionone/animation": { "version": "10.18.0", "resolved": "https://registry.npmjs.org/@motionone/animation/-/animation-10.18.0.tgz", @@ -2443,7 +2388,7 @@ "version": "15.7.12", "resolved": "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.12.tgz", "integrity": "sha512-5zvhXYtRNRluoE/jAp4GVsSduVUzNWKkOZrCDBWYtE7biZywwdC2AcEzg+cSMLFRfVgeAFqpfNabiPjxFddV1Q==", - "devOptional": true, + "dev": true, "license": "MIT" }, "node_modules/@types/ramda": { @@ -2456,11 +2401,10 @@ } }, "node_modules/@types/react": { - "version": "18.3.4", - "resolved": "https://registry.npmjs.org/@types/react/-/react-18.3.4.tgz", - "integrity": "sha512-J7W30FTdfCxDDjmfRM+/JqLHBIyl7xUIp9kwK637FGmY7+mkSFSe6L4jpZzhj5QMfLssSDP4/i75AKkrdC7/Jw==", - "devOptional": true, - "license": "MIT", + "version": "18.3.5", + "resolved": "https://registry.npmjs.org/@types/react/-/react-18.3.5.tgz", + "integrity": "sha512-WeqMfGJLGuLCqHGYRGHxnKrXcTitc6L/nBUWfWPcTarG3t9PsquqUMuVeXZeca+mglY4Vo5GZjCi0A3Or2lnxA==", + "dev": true, "dependencies": { "@types/prop-types": "*", "csstype": "^3.0.2" @@ -2470,7 +2414,7 @@ "version": "18.3.0", "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-18.3.0.tgz", "integrity": "sha512-EhwApuTmMBmXuFOikhQLIBUn6uFg81SwLMOAUgodJF14SOBOCMdU04gDoYi0WOJJHD144TL32z4yDqCW3dnkQg==", - "devOptional": true, + "dev": true, "license": "MIT", "dependencies": { "@types/react": "*" @@ -2507,17 +2451,17 @@ "license": "MIT" }, "node_modules/@typescript-eslint/eslint-plugin": { - "version": "8.2.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.2.0.tgz", - "integrity": "sha512-02tJIs655em7fvt9gps/+4k4OsKULYGtLBPJfOsmOq1+3cdClYiF0+d6mHu6qDnTcg88wJBkcPLpQhq7FyDz0A==", + "version": "8.4.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.4.0.tgz", + "integrity": "sha512-rg8LGdv7ri3oAlenMACk9e+AR4wUV0yrrG+XKsGKOK0EVgeEDqurkXMPILG2836fW4ibokTB5v4b6Z9+GYQDEw==", "dev": true, "license": "MIT", "dependencies": { "@eslint-community/regexpp": "^4.10.0", - "@typescript-eslint/scope-manager": "8.2.0", - "@typescript-eslint/type-utils": "8.2.0", - "@typescript-eslint/utils": "8.2.0", - "@typescript-eslint/visitor-keys": "8.2.0", + "@typescript-eslint/scope-manager": "8.4.0", + "@typescript-eslint/type-utils": "8.4.0", + "@typescript-eslint/utils": "8.4.0", + "@typescript-eslint/visitor-keys": "8.4.0", "graphemer": "^1.4.0", "ignore": "^5.3.1", "natural-compare": "^1.4.0", @@ -2541,16 +2485,15 @@ } }, "node_modules/@typescript-eslint/parser": { - "version": "8.2.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.2.0.tgz", - "integrity": "sha512-j3Di+o0lHgPrb7FxL3fdEy6LJ/j2NE8u+AP/5cQ9SKb+JLH6V6UHDqJ+e0hXBkHP1wn1YDFjYCS9LBQsZDlDEg==", + "version": "8.3.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.3.0.tgz", + "integrity": "sha512-h53RhVyLu6AtpUzVCYLPhZGL5jzTD9fZL+SYf/+hYOx2bDkyQXztXSc4tbvKYHzfMXExMLiL9CWqJmVz6+78IQ==", "dev": true, - "license": "BSD-2-Clause", "dependencies": { - "@typescript-eslint/scope-manager": "8.2.0", - "@typescript-eslint/types": "8.2.0", - "@typescript-eslint/typescript-estree": "8.2.0", - "@typescript-eslint/visitor-keys": "8.2.0", + "@typescript-eslint/scope-manager": "8.3.0", + "@typescript-eslint/types": "8.3.0", + "@typescript-eslint/typescript-estree": "8.3.0", + "@typescript-eslint/visitor-keys": "8.3.0", "debug": "^4.3.4" }, "engines": { @@ -2569,15 +2512,90 @@ } } }, + "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/scope-manager": { + "version": "8.3.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.3.0.tgz", + "integrity": "sha512-mz2X8WcN2nVu5Hodku+IR8GgCOl4C0G/Z1ruaWN4dgec64kDBabuXyPAr+/RgJtumv8EEkqIzf3X2U5DUKB2eg==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "8.3.0", + "@typescript-eslint/visitor-keys": "8.3.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/types": { + "version": "8.3.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.3.0.tgz", + "integrity": "sha512-y6sSEeK+facMaAyixM36dQ5NVXTnKWunfD1Ft4xraYqxP0lC0POJmIaL/mw72CUMqjY9qfyVfXafMeaUj0noWw==", + "dev": true, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/typescript-estree": { + "version": "8.3.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.3.0.tgz", + "integrity": "sha512-Mq7FTHl0R36EmWlCJWojIC1qn/ZWo2YiWYc1XVtasJ7FIgjo0MVv9rZWXEE7IK2CGrtwe1dVOxWwqXUdNgfRCA==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "8.3.0", + "@typescript-eslint/visitor-keys": "8.3.0", + "debug": "^4.3.4", + "fast-glob": "^3.3.2", + "is-glob": "^4.0.3", + "minimatch": "^9.0.4", + "semver": "^7.6.0", + "ts-api-utils": "^1.3.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/visitor-keys": { + "version": "8.3.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.3.0.tgz", + "integrity": "sha512-RmZwrTbQ9QveF15m/Cl28n0LXD6ea2CjkhH5rQ55ewz3H24w+AMCJHPVYaZ8/0HoG8Z3cLLFFycRXxeO2tz9FA==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "8.3.0", + "eslint-visitor-keys": "^3.4.3" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, "node_modules/@typescript-eslint/scope-manager": { - "version": "8.2.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.2.0.tgz", - "integrity": "sha512-OFn80B38yD6WwpoHU2Tz/fTz7CgFqInllBoC3WP+/jLbTb4gGPTy9HBSTsbDWkMdN55XlVU0mMDYAtgvlUspGw==", + "version": "8.4.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.4.0.tgz", + "integrity": "sha512-n2jFxLeY0JmKfUqy3P70rs6vdoPjHK8P/w+zJcV3fk0b0BwRXC/zxRTEnAsgYT7MwdQDt/ZEbtdzdVC+hcpF0A==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/types": "8.2.0", - "@typescript-eslint/visitor-keys": "8.2.0" + "@typescript-eslint/types": "8.4.0", + "@typescript-eslint/visitor-keys": "8.4.0" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -2588,14 +2606,14 @@ } }, "node_modules/@typescript-eslint/type-utils": { - "version": "8.2.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.2.0.tgz", - "integrity": "sha512-g1CfXGFMQdT5S+0PSO0fvGXUaiSkl73U1n9LTK5aRAFnPlJ8dLKkXr4AaLFvPedW8lVDoMgLLE3JN98ZZfsj0w==", + "version": "8.4.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.4.0.tgz", + "integrity": "sha512-pu2PAmNrl9KX6TtirVOrbLPLwDmASpZhK/XU7WvoKoCUkdtq9zF7qQ7gna0GBZFN0hci0vHaSusiL2WpsQk37A==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/typescript-estree": "8.2.0", - "@typescript-eslint/utils": "8.2.0", + "@typescript-eslint/typescript-estree": "8.4.0", + "@typescript-eslint/utils": "8.4.0", "debug": "^4.3.4", "ts-api-utils": "^1.3.0" }, @@ -2613,9 +2631,9 @@ } }, "node_modules/@typescript-eslint/types": { - "version": "8.2.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.2.0.tgz", - "integrity": "sha512-6a9QSK396YqmiBKPkJtxsgZZZVjYQ6wQ/TlI0C65z7vInaETuC6HAHD98AGLC8DyIPqHytvNuS8bBVvNLKyqvQ==", + "version": "8.4.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.4.0.tgz", + "integrity": "sha512-T1RB3KQdskh9t3v/qv7niK6P8yvn7ja1mS7QK7XfRVL6wtZ8/mFs/FHf4fKvTA0rKnqnYxl/uHFNbnEt0phgbw==", "dev": true, "license": "MIT", "engines": { @@ -2627,16 +2645,16 @@ } }, "node_modules/@typescript-eslint/typescript-estree": { - "version": "8.2.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.2.0.tgz", - "integrity": "sha512-kiG4EDUT4dImplOsbh47B1QnNmXSoUqOjWDvCJw/o8LgfD0yr7k2uy54D5Wm0j4t71Ge1NkynGhpWdS0dEIAUA==", + "version": "8.4.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.4.0.tgz", + "integrity": "sha512-kJ2OIP4dQw5gdI4uXsaxUZHRwWAGpREJ9Zq6D5L0BweyOrWsL6Sz0YcAZGWhvKnH7fm1J5YFE1JrQL0c9dd53A==", "dev": true, "license": "BSD-2-Clause", "dependencies": { - "@typescript-eslint/types": "8.2.0", - "@typescript-eslint/visitor-keys": "8.2.0", + "@typescript-eslint/types": "8.4.0", + "@typescript-eslint/visitor-keys": "8.4.0", "debug": "^4.3.4", - "globby": "^11.1.0", + "fast-glob": "^3.3.2", "is-glob": "^4.0.3", "minimatch": "^9.0.4", "semver": "^7.6.0", @@ -2656,16 +2674,16 @@ } }, "node_modules/@typescript-eslint/utils": { - "version": "8.2.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.2.0.tgz", - "integrity": "sha512-O46eaYKDlV3TvAVDNcoDzd5N550ckSe8G4phko++OCSC1dYIb9LTc3HDGYdWqWIAT5qDUKphO6sd9RrpIJJPfg==", + "version": "8.4.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.4.0.tgz", + "integrity": "sha512-swULW8n1IKLjRAgciCkTCafyTHHfwVQFt8DovmaF69sKbOxTSFMmIZaSHjqO9i/RV0wIblaawhzvtva8Nmm7lQ==", "dev": true, "license": "MIT", "dependencies": { "@eslint-community/eslint-utils": "^4.4.0", - "@typescript-eslint/scope-manager": "8.2.0", - "@typescript-eslint/types": "8.2.0", - "@typescript-eslint/typescript-estree": "8.2.0" + "@typescript-eslint/scope-manager": "8.4.0", + "@typescript-eslint/types": "8.4.0", + "@typescript-eslint/typescript-estree": "8.4.0" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -2679,13 +2697,13 @@ } }, "node_modules/@typescript-eslint/visitor-keys": { - "version": "8.2.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.2.0.tgz", - "integrity": "sha512-sbgsPMW9yLvS7IhCi8IpuK1oBmtbWUNP+hBdwl/I9nzqVsszGnNGti5r9dUtF5RLivHUFFIdRvLiTsPhzSyJ3Q==", + "version": "8.4.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.4.0.tgz", + "integrity": "sha512-zTQD6WLNTre1hj5wp09nBIDiOc2U5r/qmzo7wxPn4ZgAjHql09EofqhF9WF+fZHzL5aCyaIpPcT2hyxl73kr9A==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/types": "8.2.0", + "@typescript-eslint/types": "8.4.0", "eslint-visitor-keys": "^3.4.3" }, "engines": { @@ -2798,16 +2816,6 @@ "node": ">=10" } }, - "node_modules/array-union": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz", - "integrity": "sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, "node_modules/asynckit": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", @@ -3132,7 +3140,7 @@ "version": "3.1.3", "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.1.3.tgz", "integrity": "sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw==", - "devOptional": true, + "dev": true, "license": "MIT" }, "node_modules/debounce-promise": { @@ -3223,19 +3231,6 @@ "resolved": "https://registry.npmjs.org/detect-node-es/-/detect-node-es-1.1.0.tgz", "integrity": "sha512-ypdmJU/TbBby2Dxibuv7ZLW3Bs1QEmM7nHjEANfohJLvE0XVujisn1qPJcZxg+qDucsr+bP6fLD1rPS3AhJ7EQ==" }, - "node_modules/dir-glob": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz", - "integrity": "sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==", - "dev": true, - "license": "MIT", - "dependencies": { - "path-type": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/dompurify": { "version": "3.1.4", "resolved": "https://registry.npmjs.org/dompurify/-/dompurify-3.1.4.tgz", @@ -3848,27 +3843,6 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/globby": { - "version": "11.1.0", - "resolved": "https://registry.npmjs.org/globby/-/globby-11.1.0.tgz", - "integrity": "sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==", - "dev": true, - "license": "MIT", - "dependencies": { - "array-union": "^2.1.0", - "dir-glob": "^3.0.1", - "fast-glob": "^3.2.9", - "ignore": "^5.2.0", - "merge2": "^1.4.1", - "slash": "^3.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/graphemer": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/graphemer/-/graphemer-1.4.0.tgz", @@ -4712,16 +4686,6 @@ "node": ">=8" } }, - "node_modules/path-type": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz", - "integrity": "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, "node_modules/picocolors": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.1.tgz", @@ -5555,16 +5519,6 @@ "simple-concat": "^1.0.0" } }, - "node_modules/slash": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", - "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, "node_modules/source-map-js": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.0.tgz", @@ -5627,12 +5581,6 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/style-mod": { - "version": "4.1.2", - "resolved": "https://registry.npmjs.org/style-mod/-/style-mod-4.1.2.tgz", - "integrity": "sha512-wnD1HyVqpJUI2+eKZ+eo1UwghftP6yuFheBqqe+bWCotBjC2K1YnteJILRMs3SM4V/0dLEW1SC27MWP5y+mwmw==", - "peer": true - }, "node_modules/style-value-types": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/style-value-types/-/style-value-types-5.0.0.tgz", @@ -6059,18 +6007,6 @@ "resolved": "https://registry.npmjs.org/vscode-languageserver-types/-/vscode-languageserver-types-3.17.5.tgz", "integrity": "sha512-Ld1VelNuX9pdF39h2Hgaeb5hEZM2Z3jUrrMgWQAu82jMtZp7p3vJT3BzToKtZI7NgQssZje5o0zryOrhQvzQAg==" }, - "node_modules/w3c-keyname": { - "version": "2.2.8", - "resolved": "https://registry.npmjs.org/w3c-keyname/-/w3c-keyname-2.2.8.tgz", - "integrity": "sha512-dpojBhNsCNN7T82Tm7k26A6G9ML3NkhDsnw9n/eoxSRlVBB4CEtIQ/KTCLI2Fwf3ataSXRhYFkQi3SlnFwPvPQ==", - "peer": true - }, - "node_modules/w3c-keyname": { - "version": "2.2.8", - "resolved": "https://registry.npmjs.org/w3c-keyname/-/w3c-keyname-2.2.8.tgz", - "integrity": "sha512-dpojBhNsCNN7T82Tm7k26A6G9ML3NkhDsnw9n/eoxSRlVBB4CEtIQ/KTCLI2Fwf3ataSXRhYFkQi3SlnFwPvPQ==", - "peer": true - }, "node_modules/web-streams-polyfill": { "version": "3.3.3", "resolved": "https://registry.npmjs.org/web-streams-polyfill/-/web-streams-polyfill-3.3.3.tgz", diff --git a/playground/package.json b/playground/package.json index 9a10720778..cf14266447 100644 --- a/playground/package.json +++ b/playground/package.json @@ -17,11 +17,11 @@ "swagger-ui-react": "^5.17.14" }, "devDependencies": { - "@types/react": "^18.3.3", + "@types/react": "^18.3.5", "@types/react-dom": "^18.3.0", "@types/swagger-ui-react": "^4.18.3", - "@typescript-eslint/eslint-plugin": "^8.2.0", - "@typescript-eslint/parser": "^8.2.0", + "@typescript-eslint/eslint-plugin": "^8.3.0", + "@typescript-eslint/parser": "^8.3.0", "@vitejs/plugin-react-swc": "^3.7.0", "eslint": "^9.9.1", "eslint-plugin-react-hooks": "^4.6.2", From 04531c3a83a93fd0dc9522adc99bfb4637306439 Mon Sep 17 00:00:00 2001 From: AndrewSisley Date: Sat, 7 Sep 2024 12:43:30 -0400 Subject: [PATCH 08/71] fix: Rework relation field kinds (#2961) ## Relevant issue(s) Resolves #2619 #2493 ## Description Reworks relation field kinds so that relations are not defined by names (which are mutable). This PR replaces the old `ObjectKind` and `ObjectArrayKind`s with `CollectionKind`, `SchemaKind` and `SelfKind`. `NamedKind` has also been added - it allows users to interact with the definitions using their names (like before), before the kind gets translated into one of the other new kinds before storage. Because Schemas must form a DAG (because of their CIDs (SchemaVersionID and Root), Schemas defined within the same action that form circular relations (e.g. User=>Dog=>User) have their CIDs grouped into a set with the same base id, plus an index relative to their lexographical order (name) within the set suffixed onto their CIDs. This change is not user visible, besides the `-[index]` suffix on their CIDs. Quite a lot had to change to facilitate this, and only a handful of things have been broken out to separate commits. Most of the work is in commit `Rework relation field kinds` - when reviewing that commit, I suggest starting with `tests/integration/collection_description/updates/replace/name_one_many_test.go` to see the original bug, followed by a quick scan of `client/schema_field_description.go`, then a look at `tests/integration/schema/self_ref_test.go` before properly reviewing the changes. The bulk of the self reference logic is in `internal/db/schema_id.go`. --- client/db.go | 3 + client/definitions.go | 171 ++++ client/errors.go | 9 + client/normal_value_test.go | 4 +- client/request/consts.go | 33 +- client/schema_field_description.go | 256 ++++-- .../i2619-relation-rework.md | 3 + internal/core/key.go | 65 ++ internal/db/backup.go | 19 +- internal/db/backup_test.go | 2 +- internal/db/collection.go | 19 +- internal/db/collection_define.go | 80 +- internal/db/collection_id.go | 127 +++ internal/db/collection_update.go | 10 +- internal/db/definition_validation.go | 240 +++--- internal/db/description/collection.go | 49 ++ internal/db/description/schema.go | 23 +- internal/db/errors.go | 9 + internal/db/schema.go | 303 +++---- internal/db/schema_id.go | 393 ++++++++++ internal/planner/mapper/mapper.go | 7 +- internal/request/graphql/schema/collection.go | 17 +- .../graphql/schema/descriptions_test.go | 697 ----------------- internal/request/graphql/schema/generate.go | 11 +- tests/gen/gen_auto.go | 30 +- tests/gen/gen_auto_configurator.go | 105 ++- tests/gen/gen_auto_test.go | 22 +- tests/gen/schema_parser.go | 39 +- .../backup/one_to_many/export_test.go | 4 +- .../backup/one_to_many/import_test.go | 12 +- .../backup/one_to_one/export_test.go | 6 +- .../backup/one_to_one/import_test.go | 22 +- .../backup/self_reference/export_test.go | 8 +- .../backup/self_reference/import_test.go | 47 +- .../updates/replace/id_test.go | 2 +- .../updates/replace/name_one_many_test.go | 159 ++++ .../encryption/commit_relation_test.go | 22 +- .../explain/execute/dagscan_test.go | 10 +- .../explain/execute/delete_test.go | 3 +- tests/integration/explain/execute/fixture.go | 50 +- .../explain/execute/type_join_test.go | 12 +- .../explain/execute/with_average_test.go | 6 + .../explain/execute/with_count_test.go | 2 + .../explain/execute/with_limit_test.go | 5 + .../explain/execute/with_order_test.go | 9 +- .../explain/execute/with_sum_test.go | 5 + ...uery_with_compound_filter_relation_test.go | 8 +- .../index/query_with_relation_filter_test.go | 14 +- .../one_to_many/with_alias_test.go | 2 +- .../one_to_one_to_one/with_txn_test.go | 36 +- .../one_to_many/with_show_deleted_test.go | 12 +- .../one_to_one_to_one/with_id_test.go | 18 +- .../one_to_one_to_one/with_txn_test.go | 52 +- .../field_kinds/one_to_one/with_alias_test.go | 16 +- .../one_to_one/with_self_ref_test.go | 36 +- .../one_to_one/with_simple_test.go | 18 +- .../query/one_to_many/with_cid_doc_id_test.go | 16 +- .../with_count_limit_offset_test.go | 8 +- .../one_to_many/with_count_limit_test.go | 2 +- .../query/one_to_many/with_doc_id_test.go | 2 +- .../query/one_to_many/with_doc_ids_test.go | 6 +- .../with_filter_related_id_test.go | 12 +- .../one_to_many/with_group_filter_test.go | 8 +- .../with_group_related_id_alias_test.go | 150 ++-- .../one_to_many/with_group_related_id_test.go | 68 +- .../query/one_to_many/with_group_test.go | 22 +- .../query/one_to_many/with_related_id_test.go | 18 +- .../one_to_many/with_sum_limit_offset_test.go | 2 +- .../query/one_to_many_to_many/joins_test.go | 44 +- .../query/one_to_many_to_one/fixture.go | 10 +- .../query/one_to_many_to_one/joins_test.go | 34 +- .../query/one_to_many_to_one/simple_test.go | 4 +- .../one_to_many_to_one/with_filter_test.go | 12 +- .../query/one_to_one/simple_test.go | 10 +- .../one_to_one/with_clashing_id_field_test.go | 2 +- .../with_group_related_id_alias_test.go | 20 +- .../one_to_one/with_group_related_id_test.go | 20 +- .../query/one_to_one_multiple/simple_test.go | 12 +- .../query/one_to_one_to_one/simple_test.go | 24 +- .../query/one_to_two_many/simple_test.go | 80 +- tests/integration/schema/one_many_test.go | 132 +++- tests/integration/schema/one_one_test.go | 133 ++++ tests/integration/schema/relations_test.go | 144 ---- tests/integration/schema/self_ref_test.go | 737 ++++++++++++++++++ .../field/kind/foreign_object_array_test.go | 2 +- .../schema/updates/remove/simple_test.go | 4 +- .../schema/updates/replace/simple_test.go | 58 -- tests/integration/state.go | 8 +- tests/integration/utils.go | 14 +- tests/predefined/gen_predefined.go | 106 +-- tests/predefined/gen_predefined_test.go | 17 +- 91 files changed, 3333 insertions(+), 1950 deletions(-) create mode 100644 docs/data_format_changes/i2619-relation-rework.md create mode 100644 internal/db/collection_id.go create mode 100644 internal/db/schema_id.go delete mode 100644 internal/request/graphql/schema/descriptions_test.go create mode 100644 tests/integration/collection_description/updates/replace/name_one_many_test.go delete mode 100644 tests/integration/schema/relations_test.go create mode 100644 tests/integration/schema/self_ref_test.go diff --git a/client/db.go b/client/db.go index ad2229cdb0..e77dd6cb87 100644 --- a/client/db.go +++ b/client/db.go @@ -283,6 +283,9 @@ type CollectionFetchOptions struct { // If provided, only collections with schemas of this root will be returned. SchemaRoot immutable.Option[string] + // If provided, only collections with this root will be returned. + Root immutable.Option[uint32] + // If provided, only collections with this name will be returned. Name immutable.Option[string] diff --git a/client/definitions.go b/client/definitions.go index c32fd41b4f..af571d5983 100644 --- a/client/definitions.go +++ b/client/definitions.go @@ -11,9 +11,15 @@ package client import ( + "context" + "errors" + "fmt" "strings" + "github.com/sourcenetwork/immutable" + "github.com/sourcenetwork/defradb/client/request" + "github.com/sourcenetwork/defradb/datastore" ) // CollectionDefinition contains the metadata defining what a Collection is. @@ -195,3 +201,168 @@ func (f FieldDefinition) GetSecondaryRelationField(c CollectionDefinition) (Fiel secondary, valid := c.GetFieldByName(strings.TrimSuffix(f.Name, request.RelatedObjectID)) return secondary, valid && !secondary.IsPrimaryRelation } + +// DefinitionCache is an object providing easy access to cached collection definitions. +type DefinitionCache struct { + // The full set of [CollectionDefinition]s within this cache + Definitions []CollectionDefinition + + // The cached Definitions mapped by the Root of their [SchemaDescription] + DefinitionsBySchemaRoot map[string]CollectionDefinition + + // The cached Definitions mapped by the Root of their [CollectionDescription] + DefinitionsByCollectionRoot map[uint32]CollectionDefinition +} + +// NewDefinitionCache creates a new [DefinitionCache] populated with the given [CollectionDefinition]s. +func NewDefinitionCache(definitions []CollectionDefinition) DefinitionCache { + definitionsBySchemaRoot := make(map[string]CollectionDefinition, len(definitions)) + definitionsByCollectionRoot := make(map[uint32]CollectionDefinition, len(definitions)) + + for _, def := range definitions { + definitionsBySchemaRoot[def.Schema.Root] = def + definitionsByCollectionRoot[def.Description.RootID] = def + } + + return DefinitionCache{ + Definitions: definitions, + DefinitionsBySchemaRoot: definitionsBySchemaRoot, + DefinitionsByCollectionRoot: definitionsByCollectionRoot, + } +} + +// GetDefinition returns the definition that the given [FieldKind] points to, if it is found in the +// given [DefinitionCache]. +// +// If the related definition is not found, default and false will be returned. +func GetDefinition( + cache DefinitionCache, + host CollectionDefinition, + kind FieldKind, +) (CollectionDefinition, bool) { + switch typedKind := kind.(type) { + case *NamedKind: + for _, def := range cache.Definitions { + if def.GetName() == typedKind.Name { + return def, true + } + } + + return CollectionDefinition{}, false + + case *SchemaKind: + def, ok := cache.DefinitionsBySchemaRoot[typedKind.Root] + return def, ok + + case *CollectionKind: + def, ok := cache.DefinitionsByCollectionRoot[typedKind.Root] + return def, ok + + case *SelfKind: + if host.Description.RootID != 0 { + return host, true + } + + if typedKind.RelativeID == "" { + return host, true + } + + hostIDBase := strings.Split(host.Schema.Root, "-")[0] + targetID := fmt.Sprintf("%s-%s", hostIDBase, typedKind.RelativeID) + + def, ok := cache.DefinitionsBySchemaRoot[targetID] + return def, ok + + default: + // no-op + } + + return CollectionDefinition{}, false +} + +// GetDefinitionFromStore returns the definition that the given [FieldKind] points to, if it is found +// in the given store. +// +// If the related definition is not found, or an error occurs, default and false will be returned. +func GetDefinitionFromStore( + ctx context.Context, + store Store, + host CollectionDefinition, + kind FieldKind, +) (CollectionDefinition, bool, error) { + switch typedKind := kind.(type) { + case *NamedKind: + col, err := store.GetCollectionByName(ctx, typedKind.Name) + if errors.Is(err, datastore.ErrNotFound) { + schemas, err := store.GetSchemas(ctx, SchemaFetchOptions{ + Name: immutable.Some(typedKind.Name), + }) + if len(schemas) == 0 || err != nil { + return CollectionDefinition{}, false, err + } + + return CollectionDefinition{ + // todo - returning the first is a temporary simplification until + // https://github.com/sourcenetwork/defradb/issues/2934 + Schema: schemas[0], + }, true, nil + } else if err != nil { + return CollectionDefinition{}, false, err + } + + return col.Definition(), true, nil + + case *SchemaKind: + schemas, err := store.GetSchemas(ctx, SchemaFetchOptions{ + Root: immutable.Some(typedKind.Root), + }) + if len(schemas) == 0 || err != nil { + return CollectionDefinition{}, false, err + } + + return CollectionDefinition{ + // todo - returning the first is a temporary simplification until + // https://github.com/sourcenetwork/defradb/issues/2934 + Schema: schemas[0], + }, true, nil + + case *CollectionKind: + cols, err := store.GetCollections(ctx, CollectionFetchOptions{ + Root: immutable.Some(typedKind.Root), + }) + + if len(cols) == 0 || err != nil { + return CollectionDefinition{}, false, err + } + + return cols[0].Definition(), true, nil + + case *SelfKind: + if host.Description.RootID != 0 { + return host, true, nil + } + + if typedKind.RelativeID == "" { + return host, true, nil + } + + hostIDBase := strings.Split(host.Schema.Root, "-")[0] + targetID := fmt.Sprintf("%s-%s", hostIDBase, typedKind.RelativeID) + + cols, err := store.GetCollections(ctx, CollectionFetchOptions{ + SchemaRoot: immutable.Some(targetID), + }) + if len(cols) == 0 || err != nil { + return CollectionDefinition{}, false, err + } + def := cols[0].Definition() + def.Description = CollectionDescription{} + + return def, true, nil + + default: + // no-op + } + + return CollectionDefinition{}, false, nil +} diff --git a/client/errors.go b/client/errors.go index dac8ebcc87..46b598b52c 100644 --- a/client/errors.go +++ b/client/errors.go @@ -32,6 +32,7 @@ const ( errCanNotNormalizeValue string = "can not normalize value" errCanNotTurnNormalValueIntoArray string = "can not turn normal value into array" errCanNotMakeNormalNilFromFieldKind string = "can not make normal nil from field kind" + errFailedToParseKind string = "failed to parse kind" ) // Errors returnable from this package. @@ -57,6 +58,7 @@ var ( ErrCanNotTurnNormalValueIntoArray = errors.New(errCanNotTurnNormalValueIntoArray) ErrCanNotMakeNormalNilFromFieldKind = errors.New(errCanNotMakeNormalNilFromFieldKind) ErrCollectionNotFound = errors.New(errCollectionNotFound) + ErrFailedToParseKind = errors.New(errFailedToParseKind) ) // NewErrFieldNotExist returns an error indicating that the given field does not exist. @@ -165,3 +167,10 @@ func NewErrCRDTKindMismatch(cType, kind string) error { func NewErrInvalidJSONPaylaod(payload string) error { return errors.New(errInvalidJSONPayload, errors.NewKV("Payload", payload)) } + +func NewErrFailedToParseKind(kind []byte) error { + return errors.New( + errCRDTKindMismatch, + errors.NewKV("Kind", kind), + ) +} diff --git a/client/normal_value_test.go b/client/normal_value_test.go index 73e9def5d6..ce454a55b4 100644 --- a/client/normal_value_test.go +++ b/client/normal_value_test.go @@ -1393,8 +1393,8 @@ func TestNormalValue_NewNormalNil(t *testing.T) { for _, kind := range FieldKindStringToEnumMapping { fieldKinds = append(fieldKinds, kind) } - fieldKinds = append(fieldKinds, ObjectKind("Object")) - fieldKinds = append(fieldKinds, ObjectArrayKind("ObjectArr")) + fieldKinds = append(fieldKinds, NewCollectionKind(1, false)) + fieldKinds = append(fieldKinds, NewCollectionKind(1, true)) for _, kind := range fieldKinds { if kind.IsNillable() { diff --git a/client/request/consts.go b/client/request/consts.go index ed39cfd4a7..8b98199827 100644 --- a/client/request/consts.go +++ b/client/request/consts.go @@ -72,6 +72,12 @@ const ( DeltaArgPriority = "Priority" DeltaArgDocID = "DocID" + // SelfTypeName is the name given to relation field types that reference the host type. + // + // For example, when a `User` collection contains a relation to the `User` collection the field + // will be of type [SelfTypeName]. + SelfTypeName = "Self" + LinksNameFieldName = "name" LinksCidFieldName = "cid" @@ -85,15 +91,24 @@ var ( string(DESC): DESC, } - ReservedFields = map[string]bool{ - TypeNameFieldName: true, - VersionFieldName: true, - GroupFieldName: true, - CountFieldName: true, - SumFieldName: true, - AverageFieldName: true, - DocIDFieldName: true, - DeletedFieldName: true, + // ReservedTypeNames is the set of type names reserved by the system. + // + // Users cannot define types using these names. + // + // For example, collections and schemas may not be defined using these names. + ReservedTypeNames = map[string]struct{}{ + SelfTypeName: {}, + } + + ReservedFields = map[string]struct{}{ + TypeNameFieldName: {}, + VersionFieldName: {}, + GroupFieldName: {}, + CountFieldName: {}, + SumFieldName: {}, + AverageFieldName: {}, + DocIDFieldName: {}, + DeletedFieldName: {}, } Aggregates = map[string]struct{}{ diff --git a/client/schema_field_description.go b/client/schema_field_description.go index 4c8f0f72d0..cc5690b72c 100644 --- a/client/schema_field_description.go +++ b/client/schema_field_description.go @@ -12,8 +12,10 @@ package client import ( "encoding/json" + "fmt" "strconv" - "strings" + + "github.com/sourcenetwork/defradb/client/request" ) // FieldKind describes the type of a field. @@ -21,12 +23,6 @@ type FieldKind interface { // String returns the string representation of this FieldKind. String() string - // Underlying returns the underlying Kind as a string. - // - // If this is an array, it will return the element kind, else it will return the same as - // [String()]. - Underlying() string - // IsNillable returns true if this kind supports nil values. IsNillable() bool @@ -62,16 +58,63 @@ type ScalarKind uint8 // ScalarArrayKind represents arrays of simple scalar field kinds, such as `[Int]`. type ScalarArrayKind uint8 -// ObjectKind represents singular objects (foreign and embedded), such as `User`. -type ObjectKind string +// CollectionKind represents a relationship with a [CollectionDescription]. +type CollectionKind struct { + // If true, this side of the relationship points to many related records. + Array bool + + // The root ID of the related [CollectionDescription]. + Root uint32 +} -// ObjectKind represents arrays of objects (foreign and embedded), such as `[User]`. -type ObjectArrayKind string +// SchemaKind represents a relationship with a [SchemaDescription]. +type SchemaKind struct { + // If true, this side of the relationship points to many related records. + Array bool + + // The root ID of the related [SchemaDescription]. + Root string +} + +// NamedKind represents a temporary declaration of a relationship to another +// [CollectionDefinition]. +// +// This is used only to temporarily describe a relationship, this kind will +// never be persisted in the store and instead will be converted to one of +// [CollectionKind], [SchemaKind] or [SelfKind] first. +type NamedKind struct { + // The current name of the related [CollectionDefinition]. + Name string + + // If true, this side of the relationship points to many related records. + Array bool +} + +// SelfKind represents a relationship with the host. +// +// This includes any other schema that formed a circular dependency with the +// host at the point at which they were created. +// +// For example: the relations in User=>Dog=>User form a circle, and would be +// defined using [SelfKind] instead of [SchemaKind]. +// +// This is because schema IDs are content IDs and cannot be generated for a +// single element within a circular dependency tree. +type SelfKind struct { + // The relative ID to the related type. If this points at its host this + // will be empty. + RelativeID string + + // If true, this side of the relationship points to many related records. + Array bool +} var _ FieldKind = ScalarKind(0) var _ FieldKind = ScalarArrayKind(0) -var _ FieldKind = ObjectKind("") -var _ FieldKind = ObjectArrayKind("") +var _ FieldKind = (*CollectionKind)(nil) +var _ FieldKind = (*SchemaKind)(nil) +var _ FieldKind = (*SelfKind)(nil) +var _ FieldKind = (*NamedKind)(nil) func (k ScalarKind) String() string { switch k { @@ -96,10 +139,6 @@ func (k ScalarKind) String() string { } } -func (k ScalarKind) Underlying() string { - return k.String() -} - func (k ScalarKind) IsNillable() bool { return true } @@ -135,10 +174,6 @@ func (k ScalarArrayKind) String() string { } } -func (k ScalarArrayKind) Underlying() string { - return strings.Trim(k.String(), "[]") -} - func (k ScalarArrayKind) IsNillable() bool { return true } @@ -151,48 +186,115 @@ func (k ScalarArrayKind) IsArray() bool { return true } -func (k ObjectKind) String() string { - return string(k) +func NewCollectionKind(root uint32, isArray bool) *CollectionKind { + return &CollectionKind{ + Root: root, + Array: isArray, + } } -func (k ObjectKind) Underlying() string { - return k.String() +func (k *CollectionKind) String() string { + if k.Array { + return fmt.Sprintf("[%v]", k.Root) + } + return strconv.FormatInt(int64(k.Root), 10) } -func (k ObjectKind) IsNillable() bool { +func (k *CollectionKind) IsNillable() bool { return true } -func (k ObjectKind) IsObject() bool { +func (k *CollectionKind) IsObject() bool { return true } -func (k ObjectKind) IsArray() bool { - return false +func (k *CollectionKind) IsArray() bool { + return k.Array +} + +func NewSchemaKind(root string, isArray bool) *SchemaKind { + return &SchemaKind{ + Root: root, + Array: isArray, + } +} + +func (k *SchemaKind) String() string { + if k.Array { + return fmt.Sprintf("[%v]", k.Root) + } + return k.Root +} + +func (k *SchemaKind) IsNillable() bool { + return true } -func (k ObjectArrayKind) String() string { - return "[" + string(k) + "]" +func (k *SchemaKind) IsObject() bool { + return true +} + +func (k *SchemaKind) IsArray() bool { + return k.Array } -func (k ObjectArrayKind) Underlying() string { - return strings.Trim(k.String(), "[]") +func NewSelfKind(relativeID string, isArray bool) *SelfKind { + return &SelfKind{ + RelativeID: relativeID, + Array: isArray, + } +} + +func (k *SelfKind) String() string { + var relativeName string + if k.RelativeID != "" { + relativeName = fmt.Sprintf("%s-%s", request.SelfTypeName, k.RelativeID) + } else { + relativeName = request.SelfTypeName + } + + if k.Array { + return fmt.Sprintf("[%s]", relativeName) + } + return relativeName } -func (k ObjectArrayKind) IsNillable() bool { +func (k *SelfKind) IsNillable() bool { return true } -func (k ObjectArrayKind) IsObject() bool { +func (k *SelfKind) IsObject() bool { return true } -func (k ObjectArrayKind) IsArray() bool { +func (k *SelfKind) IsArray() bool { + return k.Array +} + +func NewNamedKind(name string, isArray bool) *NamedKind { + return &NamedKind{ + Name: name, + Array: isArray, + } +} + +func (k *NamedKind) String() string { + if k.Array { + return fmt.Sprintf("[%v]", k.Name) + } + return k.Name +} + +func (k *NamedKind) IsNillable() bool { return true } -func (k ObjectArrayKind) MarshalJSON() ([]byte, error) { - return []byte(`"` + k.String() + `"`), nil +func (k *NamedKind) IsObject() bool { + return true +} + +func (k *NamedKind) IsArray() bool { + return k.Array } // Note: These values are serialized and persisted in the database, avoid modifying existing values. @@ -229,22 +331,24 @@ const ( // in the future. They currently roughly correspond to the GQL field types, but this // equality is not guaranteed. var FieldKindStringToEnumMapping = map[string]FieldKind{ - "ID": FieldKind_DocID, - "Boolean": FieldKind_NILLABLE_BOOL, - "[Boolean]": FieldKind_NILLABLE_BOOL_ARRAY, - "[Boolean!]": FieldKind_BOOL_ARRAY, - "Int": FieldKind_NILLABLE_INT, - "[Int]": FieldKind_NILLABLE_INT_ARRAY, - "[Int!]": FieldKind_INT_ARRAY, - "DateTime": FieldKind_NILLABLE_DATETIME, - "Float": FieldKind_NILLABLE_FLOAT, - "[Float]": FieldKind_NILLABLE_FLOAT_ARRAY, - "[Float!]": FieldKind_FLOAT_ARRAY, - "String": FieldKind_NILLABLE_STRING, - "[String]": FieldKind_NILLABLE_STRING_ARRAY, - "[String!]": FieldKind_STRING_ARRAY, - "Blob": FieldKind_NILLABLE_BLOB, - "JSON": FieldKind_NILLABLE_JSON, + "ID": FieldKind_DocID, + "Boolean": FieldKind_NILLABLE_BOOL, + "[Boolean]": FieldKind_NILLABLE_BOOL_ARRAY, + "[Boolean!]": FieldKind_BOOL_ARRAY, + "Int": FieldKind_NILLABLE_INT, + "[Int]": FieldKind_NILLABLE_INT_ARRAY, + "[Int!]": FieldKind_INT_ARRAY, + "DateTime": FieldKind_NILLABLE_DATETIME, + "Float": FieldKind_NILLABLE_FLOAT, + "[Float]": FieldKind_NILLABLE_FLOAT_ARRAY, + "[Float!]": FieldKind_FLOAT_ARRAY, + "String": FieldKind_NILLABLE_STRING, + "[String]": FieldKind_NILLABLE_STRING_ARRAY, + "[String!]": FieldKind_STRING_ARRAY, + "Blob": FieldKind_NILLABLE_BLOB, + "JSON": FieldKind_NILLABLE_JSON, + request.SelfTypeName: NewSelfKind("", false), + fmt.Sprintf("[%s]", request.SelfTypeName): NewSelfKind("", true), } // IsRelation returns true if this field is a relation. @@ -279,11 +383,40 @@ func (f *SchemaFieldDescription) UnmarshalJSON(bytes []byte) error { return nil } +// objectKind is a private type used to facilitate the unmarshalling +// of json to a [FieldKind]. +type objectKind struct { + Array bool + Root any + RelativeID string +} + func parseFieldKind(bytes json.RawMessage) (FieldKind, error) { if len(bytes) == 0 { return FieldKind_None, nil } + if bytes[0] == '{' { + var objKind objectKind + err := json.Unmarshal(bytes, &objKind) + if err != nil { + return nil, err + } + + if objKind.Root == nil { + return NewSelfKind(objKind.RelativeID, objKind.Array), nil + } + + switch root := objKind.Root.(type) { + case float64: + return NewCollectionKind(uint32(root), objKind.Array), nil + case string: + return NewSchemaKind(root, objKind.Array), nil + default: + return nil, NewErrFailedToParseKind(bytes) + } + } + if bytes[0] != '"' { // If the Kind is not represented by a string, assume try to parse it to an int, as // that is the only other type we support. @@ -313,12 +446,13 @@ func parseFieldKind(bytes json.RawMessage) (FieldKind, error) { return kind, nil } - // If we don't find the string representation of this type in the - // scalar mapping, assume it is an object - if it is not, validation - // will catch this later. If it is unknown we have no way of telling - // as to whether the user thought it was a scalar or an object anyway. - if strKind[0] == '[' { - return ObjectArrayKind(strings.Trim(strKind, "[]")), nil + isArray := strKind[0] == '[' + if isArray { + // Strip the brackets + strKind = strKind[1 : len(strKind)-1] } - return ObjectKind(strKind), nil + + // This is used by patch schema/collection, where new fields added + // by users will be initially added as [NamedKind]s. + return NewNamedKind(strKind, isArray), nil } diff --git a/docs/data_format_changes/i2619-relation-rework.md b/docs/data_format_changes/i2619-relation-rework.md new file mode 100644 index 0000000000..af0f8b56af --- /dev/null +++ b/docs/data_format_changes/i2619-relation-rework.md @@ -0,0 +1,3 @@ +# Rework relation field kinds + +The way relations are modelled has been reworked, this affects the format in which they are persisted in storage. diff --git a/internal/core/key.go b/internal/core/key.go index 8f0ab3fd4e..ecbe3fd0d7 100644 --- a/internal/core/key.go +++ b/internal/core/key.go @@ -47,6 +47,7 @@ const ( COLLECTION_ID = "/collection/id" COLLECTION_NAME = "/collection/name" COLLECTION_SCHEMA_VERSION = "/collection/version" + COLLECTION_ROOT = "/collection/root" COLLECTION_INDEX = "/collection/index" SCHEMA_VERSION = "/schema/version/v" SCHEMA_VERSION_ROOT = "/schema/version/r" @@ -142,6 +143,17 @@ type CollectionSchemaVersionKey struct { var _ Key = (*CollectionSchemaVersionKey)(nil) +// CollectionRootKey points to nil, but the keys/prefix can be used +// to get collections that are of a given RootID. +// +// It is stored in the format `/collection/root/[RootID]/[CollectionID]`. +type CollectionRootKey struct { + RootID uint32 + CollectionID uint32 +} + +var _ Key = (*CollectionRootKey)(nil) + // CollectionIndexKey to a stored description of an index type CollectionIndexKey struct { // CollectionID is the id of the collection that the index is on @@ -288,6 +300,37 @@ func NewCollectionSchemaVersionKeyFromString(key string) (CollectionSchemaVersio }, nil } +func NewCollectionRootKey(rootID uint32, collectionID uint32) CollectionRootKey { + return CollectionRootKey{ + RootID: rootID, + CollectionID: collectionID, + } +} + +// NewCollectionRootKeyFromString creates a new [CollectionRootKey]. +// +// It expects the key to be in the format `/collection/root/[RootID]/[CollectionID]`. +func NewCollectionRootKeyFromString(key string) (CollectionRootKey, error) { + keyArr := strings.Split(key, "/") + if len(keyArr) != 5 || keyArr[1] != COLLECTION || keyArr[2] != "root" { + return CollectionRootKey{}, ErrInvalidKey + } + rootID, err := strconv.Atoi(keyArr[3]) + if err != nil { + return CollectionRootKey{}, err + } + + collectionID, err := strconv.Atoi(keyArr[4]) + if err != nil { + return CollectionRootKey{}, err + } + + return CollectionRootKey{ + RootID: uint32(rootID), + CollectionID: uint32(collectionID), + }, nil +} + // NewCollectionIndexKey creates a new CollectionIndexKey from a collection name and index name. func NewCollectionIndexKey(colID immutable.Option[uint32], indexName string) CollectionIndexKey { return CollectionIndexKey{CollectionID: colID, IndexName: indexName} @@ -588,6 +631,28 @@ func (k CollectionSchemaVersionKey) ToDS() ds.Key { return ds.NewKey(k.ToString()) } +func (k CollectionRootKey) ToString() string { + result := COLLECTION_ROOT + + if k.RootID != 0 { + result = fmt.Sprintf("%s/%s", result, strconv.Itoa(int(k.RootID))) + } + + if k.CollectionID != 0 { + result = fmt.Sprintf("%s/%s", result, strconv.Itoa(int(k.CollectionID))) + } + + return result +} + +func (k CollectionRootKey) Bytes() []byte { + return []byte(k.ToString()) +} + +func (k CollectionRootKey) ToDS() ds.Key { + return ds.NewKey(k.ToString()) +} + func (k SchemaVersionKey) ToString() string { result := SCHEMA_VERSION diff --git a/internal/db/backup.go b/internal/db/backup.go index 1353376f34..e41a29178d 100644 --- a/internal/db/backup.go +++ b/internal/db/backup.go @@ -134,10 +134,12 @@ func (db *db) basicExport(ctx context.Context, config *client.BackupConfig) (err cols = append(cols, col) } } - colNameCache := map[string]struct{}{} + + definitions := make([]client.CollectionDefinition, 0, len(cols)) for _, col := range cols { - colNameCache[col.Name().Value()] = struct{}{} + definitions = append(definitions, col.Definition()) } + definitionCache := client.NewDefinitionCache(definitions) tempFile := config.Filepath + ".temp" f, err := os.Create(tempFile) @@ -213,9 +215,6 @@ func (db *db) basicExport(ctx context.Context, config *client.BackupConfig) (err // replace any foreign key if it needs to be changed for _, field := range col.Schema().Fields { if field.Kind.IsObject() && !field.Kind.IsArray() { - if _, ok := colNameCache[field.Kind.Underlying()]; !ok { - continue - } if foreignKey, err := doc.Get(field.Name + request.RelatedObjectID); err == nil { if newKey, ok := keyChangeCache[foreignKey.(string)]; ok { err := doc.Set(field.Name+request.RelatedObjectID, newKey) @@ -227,10 +226,14 @@ func (db *db) basicExport(ctx context.Context, config *client.BackupConfig) (err refFieldName = field.Name + request.RelatedObjectID } } else { - foreignCol, err := db.getCollectionByName(ctx, field.Kind.Underlying()) - if err != nil { - return NewErrFailedToGetCollection(field.Kind.Underlying(), err) + foreignDef, ok := client.GetDefinition(definitionCache, col.Definition(), field.Kind) + if !ok { + // If the collection is not in the cache the backup was not configured to + // handle this collection. + continue } + foreignCol := db.newCollection(foreignDef.Description, foreignDef.Schema) + foreignDocID, err := client.NewDocIDFromString(foreignKey.(string)) if err != nil { return err diff --git a/internal/db/backup_test.go b/internal/db/backup_test.go index 0d84a394d1..033f95bcd7 100644 --- a/internal/db/backup_test.go +++ b/internal/db/backup_test.go @@ -287,7 +287,7 @@ func TestBasicExport_WithMultipleCollectionsAndUpdate_NoError(t *testing.T) { require.NoError(t, err) expectedMap := map[string]any{} - data := []byte(`{"Book":[{"_docID":"bae-45c92e9c-4d31-5e96-8bd7-3d532734e117", "_docIDNew":"bae-3ca9a4c3-6240-5e86-a00f-9590d2f2ecf3", "author_id":"bae-9918e1ec-c62b-5de2-8fbf-c82795b8ac7f", "name":"John and the sourcerers' stone"}, {"_docID":"bae-8fc3d148-869b-5629-ae22-5423c73f709b", "_docIDNew":"bae-33c136bd-4375-54a0-81ff-54ca560c7bb8", "author_id":"bae-9918e1ec-c62b-5de2-8fbf-c82795b8ac7f", "name":"Game of chains"}], "User":[{"_docID":"bae-7fca96a2-5f01-5558-a81f-09b47587f26d", "_docIDNew":"bae-9918e1ec-c62b-5de2-8fbf-c82795b8ac7f", "age":31, "name":"John"}, {"_docID":"bae-ebfe11e2-045d-525d-9fb7-2abb961dc84f", "_docIDNew":"bae-ebfe11e2-045d-525d-9fb7-2abb961dc84f", "age":31, "name":"Bob"}]}`) + data := []byte(`{"Book":[{"_docID":"bae-4a28c746-ccbf-5511-91a9-391036f42f80", "_docIDNew":"bae-d821f684-47de-5b63-b9c7-6eccec368e52", "author_id":"bae-9918e1ec-c62b-5de2-8fbf-c82795b8ac7f", "name":"Game of chains"}, {"_docID":"bae-8c8be5c6-d26b-50d4-9378-2acd5fe6959d", "_docIDNew":"bae-c94e52f8-6e91-522c-b6a6-38346a06b3d2", "author_id":"bae-9918e1ec-c62b-5de2-8fbf-c82795b8ac7f", "name":"John and the sourcerers' stone"}], "User":[{"_docID":"bae-7fca96a2-5f01-5558-a81f-09b47587f26d", "_docIDNew":"bae-9918e1ec-c62b-5de2-8fbf-c82795b8ac7f", "age":31, "name":"John"}, {"_docID":"bae-ebfe11e2-045d-525d-9fb7-2abb961dc84f", "_docIDNew":"bae-ebfe11e2-045d-525d-9fb7-2abb961dc84f", "age":31, "name":"Bob"}]}`) err = json.Unmarshal(data, &expectedMap) require.NoError(t, err) require.EqualValues(t, expectedMap, fileMap) diff --git a/internal/db/collection.go b/internal/db/collection.go index 088e5075fa..e6205fecd9 100644 --- a/internal/db/collection.go +++ b/internal/db/collection.go @@ -129,6 +129,13 @@ func (db *db) getCollections( var cols []client.CollectionDescription switch { + case options.Root.HasValue(): + var err error + cols, err = description.GetCollectionsByRoot(ctx, txn, options.Root.Value()) + if err != nil { + return nil, err + } + case options.Name.HasValue(): col, err := description.GetCollectionByName(ctx, txn, options.Name.Value()) if err != nil { @@ -173,6 +180,13 @@ func (db *db) getCollections( continue } } + + if options.Root.HasValue() { + if col.RootID != options.Root.Value() { + continue + } + } + // By default, we don't return inactive collections unless a specific version is requested. if !options.IncludeInactive.Value() && !col.Name.HasValue() && !options.SchemaVersionID.HasValue() { continue @@ -745,11 +759,12 @@ func (c *collection) validateOneToOneLinkDoesntAlreadyExist( return nil } - otherCol, err := c.db.getCollectionByName(ctx, objFieldDescription.Kind.Underlying()) + otherCol, _, err := client.GetDefinitionFromStore(ctx, c.db, c.Definition(), objFieldDescription.Kind) if err != nil { return err } - otherObjFieldDescription, _ := otherCol.Description().GetFieldByRelation( + + otherObjFieldDescription, _ := otherCol.Description.GetFieldByRelation( fieldDescription.RelationName, c.Name().Value(), objFieldDescription.Name, diff --git a/internal/db/collection_define.go b/internal/db/collection_define.go index 6eb8d9dddb..357ab07d61 100644 --- a/internal/db/collection_define.go +++ b/internal/db/collection_define.go @@ -20,8 +20,6 @@ import ( "github.com/sourcenetwork/immutable" "github.com/sourcenetwork/defradb/client" - "github.com/sourcenetwork/defradb/client/request" - "github.com/sourcenetwork/defradb/internal/core" "github.com/sourcenetwork/defradb/internal/db/description" ) @@ -36,69 +34,24 @@ func (db *db) createCollections( return nil, err } - txn := mustGetContextTxn(ctx) - + newSchemas := make([]client.SchemaDescription, len(newDefinitions)) for i, def := range newDefinitions { - schemaByName := map[string]client.SchemaDescription{} - for _, existingDefinition := range existingDefinitions { - schemaByName[existingDefinition.Schema.Name] = existingDefinition.Schema - } - for _, newDefinition := range newDefinitions { - schemaByName[newDefinition.Schema.Name] = newDefinition.Schema - } - - schema, err := description.CreateSchemaVersion(ctx, txn, def.Schema) - if err != nil { - return nil, err - } - newDefinitions[i].Description.SchemaVersionID = schema.VersionID - newDefinitions[i].Schema = schema + newSchemas[i] = def.Schema } - for i, def := range newDefinitions { - if len(def.Description.Fields) == 0 { - // This is a schema-only definition, we should not create a collection for it - continue - } - - colSeq, err := db.getSequence(ctx, core.CollectionIDSequenceKey{}) - if err != nil { - return nil, err - } - colID, err := colSeq.next(ctx) - if err != nil { - return nil, err - } - - fieldSeq, err := db.getSequence(ctx, core.NewFieldIDSequenceKey(uint32(colID))) - if err != nil { - return nil, err - } + err = setSchemaIDs(newSchemas) + if err != nil { + return nil, err + } - newDefinitions[i].Description.ID = uint32(colID) - newDefinitions[i].Description.RootID = newDefinitions[i].Description.ID - - for _, localField := range def.Description.Fields { - var fieldID uint64 - if localField.Name == request.DocIDFieldName { - // There is no hard technical requirement for this, we just think it looks nicer - // if the doc id is at the zero index. It makes it look a little nicer in commit - // queries too. - fieldID = 0 - } else { - fieldID, err = fieldSeq.next(ctx) - if err != nil { - return nil, err - } - } + for i := range newDefinitions { + newDefinitions[i].Description.SchemaVersionID = newSchemas[i].VersionID + newDefinitions[i].Schema = newSchemas[i] + } - for j := range def.Description.Fields { - if def.Description.Fields[j].Name == localField.Name { - newDefinitions[i].Description.Fields[j].ID = client.FieldID(fieldID) - break - } - } - } + err = db.setCollectionIDs(ctx, newDefinitions) + if err != nil { + return nil, err } err = db.validateNewCollection( @@ -116,7 +69,14 @@ func (db *db) createCollections( return nil, err } + txn := mustGetContextTxn(ctx) + for _, def := range newDefinitions { + _, err := description.CreateSchemaVersion(ctx, txn, def.Schema) + if err != nil { + return nil, err + } + if len(def.Description.Fields) == 0 { // This is a schema-only definition, we should not create a collection for it returnDescriptions = append(returnDescriptions, def) diff --git a/internal/db/collection_id.go b/internal/db/collection_id.go new file mode 100644 index 0000000000..e635a4477f --- /dev/null +++ b/internal/db/collection_id.go @@ -0,0 +1,127 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package db + +import ( + "context" + + "github.com/sourcenetwork/immutable" + + "github.com/sourcenetwork/defradb/client" + "github.com/sourcenetwork/defradb/client/request" + "github.com/sourcenetwork/defradb/internal/core" +) + +// setCollectionIDs sets the IDs on a collection description, including field IDs, mutating the input set. +func (db *db) setCollectionIDs(ctx context.Context, newCollections []client.CollectionDefinition) error { + err := db.setCollectionID(ctx, newCollections) + if err != nil { + return err + } + + return db.setFieldIDs(ctx, newCollections) +} + +// setCollectionID sets the IDs directly on a collection description, excluding stuff like field IDs, +// mutating the input set. +func (db *db) setCollectionID(ctx context.Context, newCollections []client.CollectionDefinition) error { + colSeq, err := db.getSequence(ctx, core.CollectionIDSequenceKey{}) + if err != nil { + return err + } + + for i := range newCollections { + if len(newCollections[i].Description.Fields) == 0 { + // This is a schema-only definition, we should not create a collection for it + continue + } + + colID, err := colSeq.next(ctx) + if err != nil { + return err + } + + // Unlike schema, collections can be mutated and thus we need to make sure this function + // does not assign new IDs to existing collections. + if newCollections[i].Description.ID == 0 { + newCollections[i].Description.ID = uint32(colID) + } + + if newCollections[i].Description.RootID == 0 { + newCollections[i].Description.RootID = uint32(colID) + } + } + + return nil +} + +// setFieldIDs sets the field IDs hosted on the given collections, mutating the input set. +func (db *db) setFieldIDs(ctx context.Context, definitions []client.CollectionDefinition) error { + collectionsByName := map[string]client.CollectionDescription{} + schemasByName := map[string]client.SchemaDescription{} + for _, def := range definitions { + if def.Description.Name.HasValue() { + collectionsByName[def.Description.Name.Value()] = def.Description + } + schemasByName[def.Schema.Name] = def.Schema + } + + for i := range definitions { + fieldSeq, err := db.getSequence(ctx, core.NewFieldIDSequenceKey(definitions[i].Description.RootID)) + if err != nil { + return err + } + + for j := range definitions[i].Description.Fields { + var fieldID client.FieldID + if definitions[i].Description.Fields[j].ID != client.FieldID(0) { + fieldID = definitions[i].Description.Fields[j].ID + } else if definitions[i].Description.Fields[j].Name == request.DocIDFieldName { + // There is no hard technical requirement for this, we just think it looks nicer + // if the doc id is at the zero index. It makes it look a little nicer in commit + // queries too. + fieldID = 0 + } else { + nextID, err := fieldSeq.next(ctx) + if err != nil { + return err + } + fieldID = client.FieldID(nextID) + } + + if definitions[i].Description.Fields[j].Kind.HasValue() { + switch kind := definitions[i].Description.Fields[j].Kind.Value().(type) { + case *client.NamedKind: + var newKind client.FieldKind + if kind.Name == definitions[i].Description.Name.Value() { + newKind = client.NewSelfKind("", kind.IsArray()) + } else if otherCol, ok := collectionsByName[kind.Name]; ok { + newKind = client.NewCollectionKind(otherCol.RootID, kind.IsArray()) + } else if otherSchema, ok := schemasByName[kind.Name]; ok { + newKind = client.NewSchemaKind(otherSchema.Root, kind.IsArray()) + } else { + // Continue, and let the validation stage return user friendly errors + // if appropriate + continue + } + + definitions[i].Description.Fields[j].Kind = immutable.Some(newKind) + default: + // no-op + } + } + + definitions[i].Description.Fields[j].ID = fieldID + } + } + + return nil +} diff --git a/internal/db/collection_update.go b/internal/db/collection_update.go index d29e562977..9b44f217b1 100644 --- a/internal/db/collection_update.go +++ b/internal/db/collection_update.go @@ -151,12 +151,12 @@ func (c *collection) patchPrimaryDoc( return err } - primaryCol, err := c.db.getCollectionByName(ctx, relationFieldDescription.Kind.Underlying()) + primaryDef, _, err := client.GetDefinitionFromStore(ctx, c.db, c.Definition(), relationFieldDescription.Kind) if err != nil { return err } - primaryField, ok := primaryCol.Description().GetFieldByRelation( + primaryField, ok := primaryDef.Description.GetFieldByRelation( relationFieldDescription.RelationName, secondaryCollectionName, relationFieldDescription.Name, @@ -165,11 +165,12 @@ func (c *collection) patchPrimaryDoc( return client.NewErrFieldNotExist(relationFieldDescription.RelationName) } - primaryIDField, ok := primaryCol.Definition().GetFieldByName(primaryField.Name + request.RelatedObjectID) + primaryIDField, ok := primaryDef.GetFieldByName(primaryField.Name + request.RelatedObjectID) if !ok { return client.NewErrFieldNotExist(primaryField.Name + request.RelatedObjectID) } + primaryCol := c.db.newCollection(primaryDef.Description, primaryDef.Schema) doc, err := primaryCol.Get( ctx, primaryDocID, @@ -185,8 +186,7 @@ func (c *collection) patchPrimaryDoc( return nil } - pc := c.db.newCollection(primaryCol.Description(), primaryCol.Schema()) - err = pc.validateOneToOneLinkDoesntAlreadyExist( + err = primaryCol.validateOneToOneLinkDoesntAlreadyExist( ctx, primaryDocID.String(), primaryIDField, diff --git a/internal/db/definition_validation.go b/internal/db/definition_validation.go index 9e28624982..f58e75fa7e 100644 --- a/internal/db/definition_validation.go +++ b/internal/db/definition_validation.go @@ -30,44 +30,62 @@ type definitionState struct { schemaByName map[string]client.SchemaDescription definitionsByName map[string]client.CollectionDefinition + definitionCache client.DefinitionCache } -// newDefinitionState creates a new definitionState object given the provided -// descriptions. -func newDefinitionState( +// newDefinitionStateFromCols creates a new definitionState object given the provided +// collection descriptions. +func newDefinitionStateFromCols( collections []client.CollectionDescription, - schemasByID map[string]client.SchemaDescription, ) *definitionState { collectionsByID := map[uint32]client.CollectionDescription{} definitionsByName := map[string]client.CollectionDefinition{} + definitions := []client.CollectionDefinition{} schemaByName := map[string]client.SchemaDescription{} - schemaVersionsAdded := map[string]struct{}{} for _, col := range collections { if len(col.Fields) == 0 { continue } - schema := schemasByID[col.SchemaVersionID] definition := client.CollectionDefinition{ Description: col, - Schema: schema, } definitionsByName[definition.GetName()] = definition - schemaVersionsAdded[schema.VersionID] = struct{}{} + definitions = append(definitions, definition) collectionsByID[col.ID] = col } - for _, schema := range schemasByID { - schemaByName[schema.Name] = schema + return &definitionState{ + collections: collections, + collectionsByID: collectionsByID, + schemaByID: map[string]client.SchemaDescription{}, + schemaByName: schemaByName, + definitionsByName: definitionsByName, + definitionCache: client.NewDefinitionCache(definitions), + } +} - if _, ok := schemaVersionsAdded[schema.VersionID]; ok { - continue - } +// newDefinitionState creates a new definitionState object given the provided +// definitions. +func newDefinitionState( + definitions []client.CollectionDefinition, +) *definitionState { + collectionsByID := map[uint32]client.CollectionDescription{} + schemasByID := map[string]client.SchemaDescription{} + definitionsByName := map[string]client.CollectionDefinition{} + collections := []client.CollectionDescription{} + schemaByName := map[string]client.SchemaDescription{} + + for _, def := range definitions { + definitionsByName[def.GetName()] = def + schemasByID[def.Schema.VersionID] = def.Schema + schemaByName[def.Schema.Name] = def.Schema - definitionsByName[schema.Name] = client.CollectionDefinition{ - Schema: schema, + if len(def.Description.Fields) != 0 { + collectionsByID[def.Description.ID] = def.Description + collections = append(collections, def.Description) } } @@ -77,6 +95,7 @@ func newDefinitionState( schemaByID: schemasByID, schemaByName: schemaByName, definitionsByName: definitionsByName, + definitionCache: client.NewDefinitionCache(definitions), } } @@ -93,7 +112,7 @@ type definitionValidator = func( // they will not be executed for updates to existing records. var createOnlyValidators = []definitionValidator{} -// createOnlyValidators are executed on the update of existing descriptions only +// updateOnlyValidators are executed on the update of existing descriptions only // they will not be executed for new records. var updateOnlyValidators = []definitionValidator{ validateSourcesNotRedefined, @@ -102,10 +121,7 @@ var updateOnlyValidators = []definitionValidator{ validatePolicyNotModified, validateIDNotZero, validateIDUnique, - validateIDExists, validateRootIDNotMutated, - validateSchemaVersionIDNotMutated, - validateCollectionNotRemoved, validateSingleVersionActive, validateSchemaNotAdded, validateSchemaFieldNotDeleted, @@ -113,6 +129,27 @@ var updateOnlyValidators = []definitionValidator{ validateFieldNotMoved, } +var schemaUpdateValidators = append( + append( + []definitionValidator{}, + updateOnlyValidators..., + ), + globalValidators..., +) + +var collectionUpdateValidators = append( + append( + append( + []definitionValidator{}, + updateOnlyValidators..., + ), + validateIDExists, + validateSchemaVersionIDNotMutated, + validateCollectionNotRemoved, + ), + globalValidators..., +) + // globalValidators are run on create and update of records. var globalValidators = []definitionValidator{ validateCollectionNameUnique, @@ -126,32 +163,23 @@ var globalValidators = []definitionValidator{ validateTypeSupported, validateTypeAndKindCompatible, validateFieldNotDuplicated, + validateSelfReferences, } -var updateValidators = append( - append([]definitionValidator{}, updateOnlyValidators...), - globalValidators..., -) - var createValidators = append( append([]definitionValidator{}, createOnlyValidators...), globalValidators..., ) -func (db *db) validateCollectionChanges( +func (db *db) validateSchemaUpdate( ctx context.Context, - oldCols []client.CollectionDescription, - newColsByID map[uint32]client.CollectionDescription, + oldDefinitions []client.CollectionDefinition, + newDefinitions []client.CollectionDefinition, ) error { - newCols := make([]client.CollectionDescription, 0, len(newColsByID)) - for _, col := range newColsByID { - newCols = append(newCols, col) - } + newState := newDefinitionState(newDefinitions) + oldState := newDefinitionState(oldDefinitions) - newState := newDefinitionState(newCols, map[string]client.SchemaDescription{}) - oldState := newDefinitionState(oldCols, map[string]client.SchemaDescription{}) - - for _, validator := range updateValidators { + for _, validator := range schemaUpdateValidators { err := validator(ctx, db, newState, oldState) if err != nil { return err @@ -161,38 +189,20 @@ func (db *db) validateCollectionChanges( return nil } -func (db *db) validateNewCollection( +func (db *db) validateCollectionChanges( ctx context.Context, - newDefinitions []client.CollectionDefinition, - oldDefinitions []client.CollectionDefinition, + oldCols []client.CollectionDescription, + newColsByID map[uint32]client.CollectionDescription, ) error { - newCollections := []client.CollectionDescription{} - newSchemasByID := map[string]client.SchemaDescription{} - - for _, def := range newDefinitions { - if len(def.Description.Fields) != 0 { - newCollections = append(newCollections, def.Description) - } - - newSchemasByID[def.Schema.VersionID] = def.Schema - } - - newState := newDefinitionState(newCollections, newSchemasByID) - - oldCollections := []client.CollectionDescription{} - oldSchemasByID := map[string]client.SchemaDescription{} - - for _, def := range oldDefinitions { - if len(def.Description.Fields) != 0 { - oldCollections = append(oldCollections, def.Description) - } - - oldSchemasByID[def.Schema.VersionID] = def.Schema + newCols := make([]client.CollectionDescription, 0, len(newColsByID)) + for _, col := range newColsByID { + newCols = append(newCols, col) } - oldState := newDefinitionState(oldCollections, oldSchemasByID) + newState := newDefinitionStateFromCols(newCols) + oldState := newDefinitionStateFromCols(oldCols) - for _, validator := range createValidators { + for _, validator := range collectionUpdateValidators { err := validator(ctx, db, newState, oldState) if err != nil { return err @@ -202,24 +212,15 @@ func (db *db) validateNewCollection( return nil } -func (db *db) validateSchemaUpdate( +func (db *db) validateNewCollection( ctx context.Context, - newSchemaByName map[string]client.SchemaDescription, - oldSchemaByName map[string]client.SchemaDescription, + newDefinitions []client.CollectionDefinition, + oldDefinitions []client.CollectionDefinition, ) error { - newSchemaByID := make(map[string]client.SchemaDescription, len(newSchemaByName)) - oldSchemaByID := make(map[string]client.SchemaDescription, len(oldSchemaByName)) - for _, schema := range newSchemaByName { - newSchemaByID[schema.VersionID] = schema - } - for _, schema := range oldSchemaByName { - oldSchemaByID[schema.VersionID] = schema - } - - newState := newDefinitionState([]client.CollectionDescription{}, newSchemaByID) - oldState := newDefinitionState([]client.CollectionDescription{}, oldSchemaByID) + newState := newDefinitionState(newDefinitions) + oldState := newDefinitionState(oldDefinitions) - for _, validator := range updateValidators { + for _, validator := range createValidators { err := validator(ctx, db, newState, oldState) if err != nil { return err @@ -245,10 +246,10 @@ func validateRelationPointsToValidKind( continue } - underlying := field.Kind.Value().Underlying() - _, ok := newState.definitionsByName[underlying] + definition := newState.definitionsByName[newCollection.Name.Value()] + _, ok := client.GetDefinition(newState.definitionCache, definition, field.Kind.Value()) if !ok { - return NewErrFieldKindNotFound(field.Name, underlying) + return NewErrFieldKindNotFound(field.Name, field.Kind.Value().String()) } } } @@ -259,10 +260,9 @@ func validateRelationPointsToValidKind( continue } - underlying := field.Kind.Underlying() - _, ok := newState.definitionsByName[underlying] + _, ok := client.GetDefinition(newState.definitionCache, client.CollectionDefinition{Schema: schema}, field.Kind) if !ok { - return NewErrFieldKindNotFound(field.Name, underlying) + return NewErrFieldKindNotFound(field.Name, field.Kind.String()) } } } @@ -305,8 +305,7 @@ func validateSecondaryFieldsPairUp( continue } - underlying := field.Kind.Value().Underlying() - otherDef, ok := newState.definitionsByName[underlying] + otherDef, ok := client.GetDefinition(newState.definitionCache, definition, field.Kind.Value()) if !ok { continue } @@ -322,13 +321,13 @@ func validateSecondaryFieldsPairUp( field.Name, ) if !ok { - return NewErrRelationMissingField(underlying, field.RelationName.Value()) + return NewErrRelationMissingField(otherDef.GetName(), field.RelationName.Value()) } _, ok = otherDef.Schema.GetFieldByName(otherField.Name) if !ok { // This secondary is paired with another secondary, which is invalid - return NewErrRelationMissingField(underlying, field.RelationName.Value()) + return NewErrRelationMissingField(otherDef.GetName(), field.RelationName.Value()) } } } @@ -367,8 +366,7 @@ func validateSingleSidePrimary( continue } - underlying := field.Kind.Underlying() - otherDef, ok := newState.definitionsByName[underlying] + otherDef, ok := client.GetDefinition(newState.definitionCache, definition, field.Kind) if !ok { continue } @@ -609,7 +607,11 @@ func validateRootIDNotMutated( } for _, newSchema := range newState.schemaByName { - oldSchema := oldState.schemaByName[newSchema.Name] + oldSchema, ok := oldState.schemaByName[newSchema.Name] + if !ok { + continue + } + if newSchema.Root != oldSchema.Root { return NewErrSchemaRootDoesntMatch( newSchema.Name, @@ -843,6 +845,58 @@ func validateFieldNotDuplicated( return nil } +func validateSelfReferences( + ctx context.Context, + db *db, + newState *definitionState, + oldState *definitionState, +) error { + for _, schema := range newState.schemaByName { + for _, field := range schema.Fields { + if _, ok := field.Kind.(*client.SelfKind); ok { + continue + } + + otherDef, ok := client.GetDefinition( + newState.definitionCache, + client.CollectionDefinition{Schema: schema}, + field.Kind, + ) + if !ok { + continue + } + + if otherDef.Schema.Root == schema.Root { + return NewErrSelfReferenceWithoutSelf(field.Name) + } + } + } + + for _, col := range newState.collections { + for _, field := range col.Fields { + if !field.Kind.HasValue() { + continue + } + + if _, ok := field.Kind.Value().(*client.SelfKind); ok { + continue + } + + definition := newState.definitionsByName[col.Name.Value()] + otherDef, ok := client.GetDefinition(newState.definitionCache, definition, field.Kind.Value()) + if !ok { + continue + } + + if otherDef.Description.RootID == col.RootID { + return NewErrSelfReferenceWithoutSelf(field.Name) + } + } + } + + return nil +} + func validateSecondaryNotOnSchema( ctx context.Context, db *db, @@ -896,6 +950,12 @@ func validateSchemaNotAdded( oldState *definitionState, ) error { for _, newSchema := range newState.schemaByName { + if newSchema.Name == "" { + // continue, and allow a more appropriate rule to return a nicer error + // for the user + continue + } + if _, exists := oldState.schemaByName[newSchema.Name]; !exists { return NewErrAddSchemaWithPatch(newSchema.Name) } diff --git a/internal/db/description/collection.go b/internal/db/description/collection.go index 90ef594a39..20f652888e 100644 --- a/internal/db/description/collection.go +++ b/internal/db/description/collection.go @@ -97,6 +97,12 @@ func SaveCollection( return client.CollectionDescription{}, err } + rootKey := core.NewCollectionRootKey(desc.RootID, desc.ID) + err = txn.Systemstore().Put(ctx, rootKey.ToDS(), []byte{}) + if err != nil { + return client.CollectionDescription{}, err + } + return desc, nil } @@ -143,6 +149,49 @@ func GetCollectionByName( return GetCollectionByID(ctx, txn, id) } +func GetCollectionsByRoot( + ctx context.Context, + txn datastore.Txn, + root uint32, +) ([]client.CollectionDescription, error) { + rootKey := core.NewCollectionRootKey(root, 0) + + rootQuery, err := txn.Systemstore().Query(ctx, query.Query{ + Prefix: rootKey.ToString(), + KeysOnly: true, + }) + if err != nil { + return nil, NewErrFailedToCreateCollectionQuery(err) + } + + cols := []client.CollectionDescription{} + for res := range rootQuery.Next() { + if res.Error != nil { + if err := rootQuery.Close(); err != nil { + return nil, NewErrFailedToCloseSchemaQuery(err) + } + return nil, err + } + + rootKey, err := core.NewCollectionRootKeyFromString(string(res.Key)) + if err != nil { + if err := rootQuery.Close(); err != nil { + return nil, NewErrFailedToCloseSchemaQuery(err) + } + return nil, err + } + + col, err := GetCollectionByID(ctx, txn, rootKey.CollectionID) + if err != nil { + return nil, err + } + + cols = append(cols, col) + } + + return cols, nil +} + // GetCollectionsBySchemaVersionID returns all collections that use the given // schemaVersionID. // diff --git a/internal/db/description/schema.go b/internal/db/description/schema.go index 6f5a782ec7..f9d5935770 100644 --- a/internal/db/description/schema.go +++ b/internal/db/description/schema.go @@ -19,7 +19,6 @@ import ( "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/datastore" "github.com/sourcenetwork/defradb/internal/core" - "github.com/sourcenetwork/defradb/internal/core/cid" ) // CreateSchemaVersion creates and saves to the store a new schema version. @@ -35,31 +34,13 @@ func CreateSchemaVersion( return client.SchemaDescription{}, err } - scid, err := cid.NewSHA256CidV1(buf) - if err != nil { - return client.SchemaDescription{}, err - } - versionID := scid.String() - isNew := desc.Root == "" - - desc.VersionID = versionID - if isNew { - // If this is a new schema, the Root will match the version ID - desc.Root = versionID - } - - // Rebuild the json buffer to include the newly set ID properties - buf, err = json.Marshal(desc) - if err != nil { - return client.SchemaDescription{}, err - } - - key := core.NewSchemaVersionKey(versionID) + key := core.NewSchemaVersionKey(desc.VersionID) err = txn.Systemstore().Put(ctx, key.ToDS(), buf) if err != nil { return client.SchemaDescription{}, err } + isNew := desc.Root == desc.VersionID if !isNew { // We don't need to add a root key if this is the first version schemaVersionHistoryKey := core.NewSchemaRootKey(desc.Root, desc.VersionID) diff --git a/internal/db/errors.go b/internal/db/errors.go index e8a835f3f2..71f7978a1b 100644 --- a/internal/db/errors.go +++ b/internal/db/errors.go @@ -101,6 +101,7 @@ const ( errReplicatorCollections string = "failed to get collections for replicator" errReplicatorNotFound string = "replicator not found" errCanNotEncryptBuiltinField string = "can not encrypt build-in field" + errSelfReferenceWithoutSelf string = "must specify 'Self' kind for self referencing relations" ) var ( @@ -141,6 +142,7 @@ var ( ErrReplicatorCollections = errors.New(errReplicatorCollections) ErrReplicatorNotFound = errors.New(errReplicatorNotFound) ErrCanNotEncryptBuiltinField = errors.New(errCanNotEncryptBuiltinField) + ErrSelfReferenceWithoutSelf = errors.New(errSelfReferenceWithoutSelf) ) // NewErrFailedToGetHeads returns a new error indicating that the heads of a document @@ -650,3 +652,10 @@ func NewErrReplicatorDocID(inner error, kv ...errors.KV) error { func NewErrReplicatorCollections(inner error, kv ...errors.KV) error { return errors.Wrap(errReplicatorCollections, inner, kv...) } + +func NewErrSelfReferenceWithoutSelf(fieldName string) error { + return errors.New( + errSelfReferenceWithoutSelf, + errors.NewKV("Field", fieldName), + ) +} diff --git a/internal/db/schema.go b/internal/db/schema.go index d2aeb8bcb9..d9b9a4055c 100644 --- a/internal/db/schema.go +++ b/internal/db/schema.go @@ -23,7 +23,6 @@ import ( "github.com/sourcenetwork/immutable" "github.com/sourcenetwork/defradb/client" - "github.com/sourcenetwork/defradb/internal/core" "github.com/sourcenetwork/defradb/internal/db/description" ) @@ -131,18 +130,15 @@ func (db *db) patchSchema( return err } - for _, schema := range newSchemaByName { - err := db.updateSchema( - ctx, - existingSchemaByName, - newSchemaByName, - schema, - migration, - setAsDefaultVersion, - ) - if err != nil { - return err - } + err = db.updateSchema( + ctx, + existingSchemaByName, + newSchemaByName, + migration, + setAsDefaultVersion, + ) + if err != nil { + return err } return db.loadSchema(ctx) @@ -332,192 +328,202 @@ func (db *db) updateSchema( ctx context.Context, existingSchemaByName map[string]client.SchemaDescription, proposedDescriptionsByName map[string]client.SchemaDescription, - schema client.SchemaDescription, migration immutable.Option[model.Lens], setAsActiveVersion bool, ) error { - previousSchema := existingSchemaByName[schema.Name] + newSchemas := []client.SchemaDescription{} + for _, schema := range proposedDescriptionsByName { + previousSchema := existingSchemaByName[schema.Name] - areEqual := areSchemasEqual(schema, previousSchema) - if areEqual { - return nil - } - - err := db.validateSchemaUpdate(ctx, proposedDescriptionsByName, existingSchemaByName) - if err != nil { - return err - } + previousFieldNames := make(map[string]struct{}, len(previousSchema.Fields)) + for _, field := range previousSchema.Fields { + previousFieldNames[field.Name] = struct{}{} + } - for _, field := range schema.Fields { - if field.Kind.IsObject() && !field.Kind.IsArray() { - idFieldName := field.Name + "_id" - if _, ok := schema.GetFieldByName(idFieldName); !ok { - schema.Fields = append(schema.Fields, client.SchemaFieldDescription{ - Name: idFieldName, - Kind: client.FieldKind_DocID, - }) + for i, field := range schema.Fields { + if _, existed := previousFieldNames[field.Name]; !existed && field.Typ == client.NONE_CRDT { + // If no CRDT Type has been provided, default to LWW_REGISTER. + schema.Fields[i].Typ = client.LWW_REGISTER } } - } - - previousFieldNames := make(map[string]struct{}, len(previousSchema.Fields)) - for _, field := range previousSchema.Fields { - previousFieldNames[field.Name] = struct{}{} - } - for i, field := range schema.Fields { - if _, existed := previousFieldNames[field.Name]; !existed && field.Typ == client.NONE_CRDT { - // If no CRDT Type has been provided, default to LWW_REGISTER. - field.Typ = client.LWW_REGISTER - schema.Fields[i] = field + for _, field := range schema.Fields { + if field.Kind.IsObject() && !field.Kind.IsArray() { + idFieldName := field.Name + "_id" + if _, ok := schema.GetFieldByName(idFieldName); !ok { + schema.Fields = append(schema.Fields, client.SchemaFieldDescription{ + Name: idFieldName, + Kind: client.FieldKind_DocID, + }) + } + } } - } - txn := mustGetContextTxn(ctx) - previousVersionID := schema.VersionID - schema, err = description.CreateSchemaVersion(ctx, txn, schema) - if err != nil { - return err + newSchemas = append(newSchemas, schema) } - // After creating the new schema version, we need to create new collection versions for - // any collection using the previous version. These will be inactive unless [setAsActiveVersion] - // is true. - - cols, err := description.GetCollectionsBySchemaVersionID(ctx, txn, previousVersionID) + err := setSchemaIDs(newSchemas) if err != nil { return err } - colSeq, err := db.getSequence(ctx, core.CollectionIDSequenceKey{}) - if err != nil { - return err + for _, schema := range newSchemas { + proposedDescriptionsByName[schema.Name] = schema } - for _, col := range cols { - previousID := col.ID + for _, schema := range proposedDescriptionsByName { + previousSchema := existingSchemaByName[schema.Name] - existingCols, err := description.GetCollectionsBySchemaVersionID(ctx, txn, schema.VersionID) + areEqual := areSchemasEqual(schema, previousSchema) + if areEqual { + continue + } + + txn := mustGetContextTxn(ctx) + schema, err = description.CreateSchemaVersion(ctx, txn, schema) if err != nil { return err } - // The collection version may exist before the schema version was created locally. This is - // because migrations for the globally known schema version may have been registered locally - // (typically to handle documents synced over P2P at higher versions) before the local schema - // was updated. We need to check for them now, and update them instead of creating new ones - // if they exist. - var isExistingCol bool - existingColLoop: - for _, existingCol := range existingCols { - sources := existingCol.CollectionSources() - for _, source := range sources { - // Make sure that this collection is the parent of the current [col], and not part of - // another collection set that happens to be using the same schema. - if source.SourceCollectionID == previousID { - if existingCol.RootID == client.OrphanRootID { - existingCol.RootID = col.RootID - } + // After creating the new schema version, we need to create new collection versions for + // any collection using the previous version. These will be inactive unless [setAsActiveVersion] + // is true. - fieldSeq, err := db.getSequence(ctx, core.NewFieldIDSequenceKey(existingCol.RootID)) - if err != nil { - return err - } + previousVersionID := existingSchemaByName[schema.Name].VersionID + cols, err := description.GetCollectionsBySchemaVersionID(ctx, txn, previousVersionID) + if err != nil { + return err + } - for _, globalField := range schema.Fields { - var fieldID client.FieldID - // We must check the source collection if the field already exists, and take its ID - // from there, otherwise the field must be generated by the sequence. - existingField, ok := col.GetFieldByName(globalField.Name) - if ok { - fieldID = existingField.ID - } else { - nextFieldID, err := fieldSeq.next(ctx) - if err != nil { - return err + existingCols, err := description.GetCollectionsBySchemaVersionID(ctx, txn, schema.VersionID) + if err != nil { + return err + } + + definitions := make([]client.CollectionDefinition, 0, len(cols)) + + for _, col := range cols { + previousID := col.ID + + // The collection version may exist before the schema version was created locally. This is + // because migrations for the globally known schema version may have been registered locally + // (typically to handle documents synced over P2P at higher versions) before the local schema + // was updated. We need to check for them now, and update them instead of creating new ones + // if they exist. + var isExistingCol bool + existingColLoop: + for _, existingCol := range existingCols { + sources := existingCol.CollectionSources() + for _, source := range sources { + // Make sure that this collection is the parent of the current [col], and not part of + // another collection set that happens to be using the same schema. + if source.SourceCollectionID == previousID { + if existingCol.RootID == client.OrphanRootID { + existingCol.RootID = col.RootID + } + + for _, globalField := range schema.Fields { + var fieldID client.FieldID + // We must check the source collection if the field already exists, and take its ID + // from there, otherwise the field must be generated by the sequence. + existingField, ok := col.GetFieldByName(globalField.Name) + if ok { + fieldID = existingField.ID } - fieldID = client.FieldID(nextFieldID) + + existingCol.Fields = append( + existingCol.Fields, + client.CollectionFieldDescription{ + Name: globalField.Name, + ID: fieldID, + }, + ) } - existingCol.Fields = append( - existingCol.Fields, + definitions = append(definitions, client.CollectionDefinition{ + Description: existingCol, + Schema: schema, + }) + + isExistingCol = true + break existingColLoop + } + } + } + + if !isExistingCol { + // Create any new collections without a name (inactive), if [setAsActiveVersion] is true + // they will be activated later along with any existing collection versions. + col.ID = 0 + col.Name = immutable.None[string]() + col.SchemaVersionID = schema.VersionID + col.Sources = []any{ + &client.CollectionSource{ + SourceCollectionID: previousID, + Transform: migration, + }, + } + + for _, globalField := range schema.Fields { + _, exists := col.GetFieldByName(globalField.Name) + if !exists { + col.Fields = append( + col.Fields, client.CollectionFieldDescription{ Name: globalField.Name, - ID: fieldID, }, ) } - existingCol, err = description.SaveCollection(ctx, txn, existingCol) - if err != nil { - return err - } - isExistingCol = true - break existingColLoop } - } - } - if !isExistingCol { - colID, err := colSeq.next(ctx) - if err != nil { - return err + definitions = append(definitions, client.CollectionDefinition{ + Description: col, + Schema: schema, + }) } + } - fieldSeq, err := db.getSequence(ctx, core.NewFieldIDSequenceKey(col.RootID)) - if err != nil { - return err - } + err = db.setCollectionIDs(ctx, definitions) + if err != nil { + return err + } - // Create any new collections without a name (inactive), if [setAsActiveVersion] is true - // they will be activated later along with any existing collection versions. - col.Name = immutable.None[string]() - col.ID = uint32(colID) - col.SchemaVersionID = schema.VersionID - col.Sources = []any{ - &client.CollectionSource{ - SourceCollectionID: previousID, - Transform: migration, - }, - } + allExistingCols, err := db.getCollections(ctx, client.CollectionFetchOptions{}) + if err != nil { + return err + } - for _, globalField := range schema.Fields { - _, exists := col.GetFieldByName(globalField.Name) - if !exists { - fieldID, err := fieldSeq.next(ctx) - if err != nil { - return err - } + oldDefs := make([]client.CollectionDefinition, 0, len(allExistingCols)) + for _, col := range allExistingCols { + oldDefs = append(oldDefs, col.Definition()) + } - col.Fields = append( - col.Fields, - client.CollectionFieldDescription{ - Name: globalField.Name, - ID: client.FieldID(fieldID), - }, - ) - } - } + err = db.validateSchemaUpdate(ctx, oldDefs, definitions) + if err != nil { + return err + } - _, err = description.SaveCollection(ctx, txn, col) + for _, def := range definitions { + _, err = description.SaveCollection(ctx, txn, def.Description) if err != nil { return err } if migration.HasValue() { - err = db.LensRegistry().SetMigration(ctx, col.ID, migration.Value()) + err = db.LensRegistry().SetMigration(ctx, def.Description.ID, migration.Value()) if err != nil { return err } } } - } - if setAsActiveVersion { - // activate collection versions using the new schema ID. This call must be made after - // all new collection versions have been saved. - err = db.setActiveSchemaVersion(ctx, schema.VersionID) - if err != nil { - return err + if setAsActiveVersion { + // activate collection versions using the new schema ID. This call must be made after + // all new collection versions have been saved. + err = db.setActiveSchemaVersion(ctx, schema.VersionID) + if err != nil { + return err + } } } @@ -536,6 +542,5 @@ func areSchemasEqual(this client.SchemaDescription, that client.SchemaDescriptio } return this.Name == that.Name && - this.Root == that.Root && - this.VersionID == that.VersionID + this.Root == that.Root } diff --git a/internal/db/schema_id.go b/internal/db/schema_id.go new file mode 100644 index 0000000000..86415d6894 --- /dev/null +++ b/internal/db/schema_id.go @@ -0,0 +1,393 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package db + +import ( + "encoding/json" + "fmt" + "slices" + "strings" + + "github.com/sourcenetwork/defradb/client" + "github.com/sourcenetwork/defradb/internal/core/cid" +) + +const schemaSetDeliminator string = "-" + +// setSchemaIDs sets all ID fields on a schema description, mutating the input parameter. +// +// This includes RootID (if not already set), VersionID, and relational fields. +func setSchemaIDs(newSchemas []client.SchemaDescription) error { + // We need to group the inputs and then mutate them, so we temporarily + // map them to pointers. + newSchemaPtrs := make([]*client.SchemaDescription, len(newSchemas)) + for i := range newSchemas { + schema := newSchemas[i] + newSchemaPtrs[i] = &schema + } + + schemaSets := getSchemaSets(newSchemaPtrs) + + for _, schemaSet := range schemaSets { + setID, err := generateSetID(schemaSet) + if err != nil { + return err + } + + assignIDs(setID, schemaSet) + } + + for i := range newSchemaPtrs { + newSchemas[i] = *newSchemaPtrs[i] + } + + substituteRelationFieldKinds(newSchemas) + + return nil +} + +// schemaRelations is a trimmed down [client.SchemaDescription] containing +// only the useful information to the functions in this file. +type schemaRelations struct { + // The name of this schema + name string + + // The schema names of the primary relations from this schema. + relations []string +} + +// getSchemaSets groups schemas into sets. +// +// Most sets will contain a single schema, however if a circular dependency chain is found +// all elements within that chain will be grouped together into a single set. +// +// For example if User contains a relation *to* Dog, and Dog contains a relationship *to* +// User, they will be grouped into the same set. +func getSchemaSets(newSchemas []*client.SchemaDescription) [][]*client.SchemaDescription { + schemasWithRelations := map[string]schemaRelations{} + for _, schema := range newSchemas { + relations := []string{} + for _, field := range schema.Fields { + switch kind := field.Kind.(type) { + case *client.NamedKind: + // We only need to worry about use provided `NamedKind` relations in this scope. + // Other relation kinds can either not be circular, or are relative to the host. + relations = append(relations, kind.Name) + default: + // no-op + } + } + + if len(relations) == 0 { + // If a schema is defined with no relations, then it is not relevant to this function + // and can be skipped. + continue + } + + schemasWithRelations[schema.Name] = schemaRelations{ + name: schema.Name, + relations: relations, + } + } + + changedInLoop := true + for changedInLoop { + // This loop strips out schemas from `schemasWithRelations` that do not form circular + // schema sets (e.g. User=>Dog=>User). This allows later logic that figures out the + // exact path that circles forms to operate on a minimal set of data, reducing its cost + // and complexity. + // + // Some non circular relations may still remain after this first pass, for example + // one-directional relations between two circles. + changedInLoop = false + for _, schema := range schemasWithRelations { + i := 0 + relation := "" + deleteI := false + for i, relation = range schema.relations { + if _, ok := schemasWithRelations[relation]; !ok { + // If the related schema is not in `schemasWithRelations` it must have been removed + // in a previous iteration of the schemasWithRelations loop, this will have been + // done because it had no relevant remaining relations and thus could not be part + // of a circular schema set. If this is the case, this `relation` is also irrelevant + // here and can be removed as it too cannot form part of a circular schema set. + changedInLoop = true + deleteI = true + break + } + } + + if deleteI { + old := schema.relations + schema.relations = make([]string, len(schema.relations)-1) + if i > 0 { + copy(schema.relations, old[:i-1]) + } + copy(schema.relations[i:], old[i+1:]) + schemasWithRelations[schema.name] = schema + } + + if len(schema.relations) == 0 { + // If there are no relevant relations from this schema, remove the schema from + // `schemasWithRelations` as the schema cannot form part of a circular schema + // set. + changedInLoop = true + delete(schemasWithRelations, schema.name) + break + } + } + } + + // If len(schemasWithRelations) > 0 here there are circular relations. + // We then need to traverse them all to break the remaing set down into + // sub sets of non-overlapping circles - we want this as the self-referencing + // set must be as small as possible, so that users providing multiple SDL/schema operations + // will result in the same IDs as a single large operation, provided that the individual schema + // declarations remain the same. + + circularSchemaNames := make([]string, len(schemasWithRelations)) + for name := range schemasWithRelations { + circularSchemaNames = append(circularSchemaNames, name) + } + // The order in which ID indexes are assigned must be deterministic, so + // we must loop through a sorted slice instead of the map. + slices.Sort(circularSchemaNames) + + var i int + schemaSetIds := map[string]int{} + schemasHit := map[string]struct{}{} + for _, name := range circularSchemaNames { + schema := schemasWithRelations[name] + mapSchemaSetIDs(&i, schema, schemaSetIds, schemasWithRelations, schemasHit) + } + + schemaSetsByID := map[int][]*client.SchemaDescription{} + for _, schema := range newSchemas { + schemaSetId, ok := schemaSetIds[schema.Name] + if !ok { + // In most cases, if a schema does not form a circular set then it will not be in + // schemaSetIds, and we can assign it a new, unused setID + i++ + schemaSetId = i + } + + schemaSet, ok := schemaSetsByID[schemaSetId] + if !ok { + schemaSet = make([]*client.SchemaDescription, 0, 1) + } + + schemaSet = append(schemaSet, schema) + schemaSetsByID[schemaSetId] = schemaSet + } + + schemaSets := [][]*client.SchemaDescription{} + for _, schemaSet := range schemaSetsByID { + schemaSets = append(schemaSets, schemaSet) + } + + return schemaSets +} + +// mapSchemaSetIDs recursively scans through a schema and its relations, assigning each schema to a temporary setID. +// +// If a set of schemas form a circular dependency, all involved schemas will be assigned the same setID. Assigned setIDs +// will be added to the input param `schemaSetIds`. +// +// This function will return when all descendents of the initial schema have been processed. +// +// Parameters: +// - i: The largest setID so far assigned. This parameter is mutated by this function. +// - schema: The current schema to process +// - schemaSetIds: The set of already assigned setIDs mapped by schema name - this parameter will be mutated by this +// function +// - schemasRelationsBySchemaName: The full set of relevant schemas/relations mapped by schema name +// - schemasFullyProcessed: The set of schema names that have already been completely processed. If `schema` is in +// this set the function will return. This parameter is mutated by this function. +func mapSchemaSetIDs( + i *int, + schema schemaRelations, + schemaSetIds map[string]int, + schemasRelationsBySchemaName map[string]schemaRelations, + schemasFullyProcessed map[string]struct{}, +) { + if _, ok := schemasFullyProcessed[schema.name]; ok { + // we've circled all the way through and already processed this schema + return + } + schemasFullyProcessed[schema.name] = struct{}{} + + for _, relation := range schema.relations { + // if more than one relation, need to find out if the relation loops back here! It might connect to a separate circle + circlesBackHere := circlesBack(schema.name, relation, schemasRelationsBySchemaName, map[string]struct{}{}) + + var circleID int + if circlesBackHere { + if id, ok := schemaSetIds[relation]; ok { + // If this schema has already been assigned a setID, use that + circleID = id + } else { + schemaSetId, ok := schemaSetIds[schema.name] + if !ok { + // If this schema has not already been assigned a setID, it must be + // the first discovered node in a new circle. Assign it a new setID, + // this will be picked up by its circle-forming descendents. + *i = *i + 1 + schemaSetId = *i + } + schemaSetIds[schema.name] = schemaSetId + circleID = schemaSetId + } + } else { + // If this schema and its relations does not circle back to itself, we + // increment `i` and assign the new value to this schema *only* + *i = *i + 1 + circleID = *i + } + + schemaSetIds[relation] = circleID + mapSchemaSetIDs( + i, + schemasRelationsBySchemaName[relation], + schemaSetIds, + schemasRelationsBySchemaName, + schemasFullyProcessed, + ) + } +} + +// circlesBack returns true if any path from this schema through it's relations (and their relations) circles +// back to this schema. +// +// Parameters: +// - originalSchemaName: The original start schema of this recursive check - this will not change as this function +// recursively checks the relations on `currentSchemaName`. +// - currentSchemaName: The current schema to process. +// - schemasWithRelations: The full set of relevant schemas that may be referenced by this schema or its descendents. +// - schemasFullyProcessed: The set of schema names that have already been completely processed. If `schema` is in +// this set the function will return. This parameter is mutated by this function. +func circlesBack( + originalSchemaName string, + currentSchemaName string, + schemasWithRelations map[string]schemaRelations, + schemasFullyProcessed map[string]struct{}, +) bool { + if _, ok := schemasFullyProcessed[currentSchemaName]; ok { + // we've circled all the way through and not found the original + return false + } + + if currentSchemaName == originalSchemaName { + return true + } + + schemasFullyProcessed[currentSchemaName] = struct{}{} + + for _, relation := range schemasWithRelations[currentSchemaName].relations { + ciclesBackToOriginal := circlesBack(originalSchemaName, relation, schemasWithRelations, schemasFullyProcessed) + if ciclesBackToOriginal { + return true + } + } + + return false +} + +func generateSetID(schemaSet []*client.SchemaDescription) (string, error) { + // The schemas within each set must be in a deterministic order to ensure that + // their IDs are deterministic. + slices.SortFunc(schemaSet, func(a, b *client.SchemaDescription) int { + return strings.Compare(a.Name, b.Name) + }) + + var cidComponents any + if len(schemaSet) == 1 { + cidComponents = schemaSet[0] + } else { + cidComponents = schemaSet + } + + buf, err := json.Marshal(cidComponents) + if err != nil { + return "", err + } + + scid, err := cid.NewSHA256CidV1(buf) + if err != nil { + return "", err + } + return scid.String(), nil +} + +func assignIDs(baseID string, schemaSet []*client.SchemaDescription) { + if len(schemaSet) == 1 { + schemaSet[0].VersionID = baseID + if schemaSet[0].Root == "" { + // Schema Root remains constant through all versions, if it is set at this point + // do not update it. + schemaSet[0].Root = baseID + } + return + } + + for i := range schemaSet { + id := fmt.Sprintf("%s%v%v", baseID, schemaSetDeliminator, i) + + schemaSet[i].VersionID = id + if schemaSet[i].Root == "" { + // Schema Root remains constant through all versions, if it is set at this point + // do not update it. + schemaSet[i].Root = id + } + } +} + +// substituteRelationFieldKinds substitutes relations defined using [NamedKind]s to their long-term +// types. +// +// Using names to reference other types is unsuitable as the names may change over time. +func substituteRelationFieldKinds(schemas []client.SchemaDescription) { + schemasByName := map[string]client.SchemaDescription{} + for _, schema := range schemas { + schemasByName[schema.Name] = schema + } + + for i := range schemas { + rootComponents := strings.Split(schemas[i].Root, schemaSetDeliminator) + rootBase := rootComponents[0] + + for j := range schemas[i].Fields { + switch kind := schemas[i].Fields[j].Kind.(type) { + case *client.NamedKind: + relationSchema, ok := schemasByName[kind.Name] + if !ok { + // Continue, and let the validation step pick up whatever went wrong later + continue + } + + relationRootComponents := strings.Split(relationSchema.Root, schemaSetDeliminator) + if relationRootComponents[0] == rootBase { + if len(relationRootComponents) == 2 { + schemas[i].Fields[j].Kind = client.NewSelfKind(relationRootComponents[1], kind.IsArray()) + } else { + // If the relation root is simple and does not contain a relative index, then this relation + // must point to the host schema (self-reference, e.g. User=>User). + schemas[i].Fields[j].Kind = client.NewSelfKind("", kind.IsArray()) + } + } else { + schemas[i].Fields[j].Kind = client.NewSchemaKind(relationSchema.Root, kind.IsArray()) + } + + default: + // no-op + } + } + } +} diff --git a/internal/planner/mapper/mapper.go b/internal/planner/mapper/mapper.go index 7542449727..706f9235de 100644 --- a/internal/planner/mapper/mapper.go +++ b/internal/planner/mapper/mapper.go @@ -821,9 +821,14 @@ func getCollectionName( hostFieldDesc, parentHasField := parentCollection.Definition().GetFieldByName(selectRequest.Name) if parentHasField && hostFieldDesc.Kind.IsObject() { + def, found, err := client.GetDefinitionFromStore(ctx, store, parentCollection.Definition(), hostFieldDesc.Kind) + if !found { + return "", NewErrTypeNotFound(hostFieldDesc.Kind.String()) + } + // If this field exists on the parent, and it is a child object // then this collection name is the collection name of the child. - return hostFieldDesc.Kind.Underlying(), nil + return def.GetName(), err } } diff --git a/internal/request/graphql/schema/collection.go b/internal/request/graphql/schema/collection.go index be361e0ec2..5f8d121b62 100644 --- a/internal/request/graphql/schema/collection.go +++ b/internal/request/graphql/schema/collection.go @@ -371,8 +371,8 @@ func fieldsFromAST( schemaFieldDescriptions := []client.SchemaFieldDescription{} collectionFieldDescriptions := []client.CollectionFieldDescription{} - if kind.IsObject() { - relationName, err := getRelationshipName(field, hostObjectName, kind.Underlying()) + if namedKind, ok := kind.(*client.NamedKind); ok { + relationName, err := getRelationshipName(field, hostObjectName, namedKind.Name) if err != nil { return nil, nil, err } @@ -544,7 +544,7 @@ func astTypeToKind(t ast.Type) (client.FieldKind, error) { case typeString: return client.FieldKind_NILLABLE_STRING_ARRAY, nil default: - return client.ObjectArrayKind(astTypeVal.Type.(*ast.Named).Name.Value), nil + return client.NewNamedKind(astTypeVal.Type.(*ast.Named).Name.Value, true), nil } } @@ -567,7 +567,7 @@ func astTypeToKind(t ast.Type) (client.FieldKind, error) { case typeJSON: return client.FieldKind_NILLABLE_JSON, nil default: - return client.ObjectKind(astTypeVal.Name.Value), nil + return client.NewNamedKind(astTypeVal.Name.Value, false), nil } case *ast.NonNull: @@ -644,7 +644,12 @@ func finalizeRelations( } for _, field := range definition.Description.Fields { - if !field.Kind.HasValue() || !field.Kind.Value().IsObject() || field.Kind.Value().IsArray() { + if !field.Kind.HasValue() { + continue + } + + namedKind, ok := field.Kind.Value().(*client.NamedKind) + if !ok || namedKind.IsArray() { // We only need to process the primary side of a relation here, if the field is not a relation // or if it is an array, we can skip it. continue @@ -654,7 +659,7 @@ func finalizeRelations( for _, otherDef := range definitions { // Check the 'other' schema name, there can only be a one-one mapping in an SDL // appart from embedded, which will be schema only. - if otherDef.Schema.Name == field.Kind.Value().Underlying() { + if otherDef.Schema.Name == namedKind.Name { otherColDefinition = immutable.Some(otherDef) break } diff --git a/internal/request/graphql/schema/descriptions_test.go b/internal/request/graphql/schema/descriptions_test.go deleted file mode 100644 index 320bef158a..0000000000 --- a/internal/request/graphql/schema/descriptions_test.go +++ /dev/null @@ -1,697 +0,0 @@ -// Copyright 2022 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package schema - -import ( - "context" - "testing" - - "github.com/sourcenetwork/immutable" - "github.com/stretchr/testify/assert" - - "github.com/sourcenetwork/defradb/client" -) - -func TestSingleSimpleType(t *testing.T) { - cases := []descriptionTestCase{ - { - description: "Single simple type", - sdl: ` - type User { - name: String - age: Int - verified: Boolean - } - `, - targetDescs: []client.CollectionDefinition{ - { - Description: client.CollectionDescription{ - Name: immutable.Some("User"), - Indexes: []client.IndexDescription{}, - Fields: []client.CollectionFieldDescription{ - { - Name: "_docID", - }, - { - Name: "age", - }, - { - Name: "name", - }, - { - Name: "verified", - }, - }, - }, - Schema: client.SchemaDescription{ - Name: "User", - Fields: []client.SchemaFieldDescription{ - { - Name: "_docID", - Kind: client.FieldKind_DocID, - Typ: client.NONE_CRDT, - }, - { - Name: "age", - Kind: client.FieldKind_NILLABLE_INT, - Typ: client.LWW_REGISTER, - }, - { - Name: "name", - Kind: client.FieldKind_NILLABLE_STRING, - Typ: client.LWW_REGISTER, - }, - { - Name: "verified", - Kind: client.FieldKind_NILLABLE_BOOL, - Typ: client.LWW_REGISTER, - }, - }, - }, - }, - }, - }, - { - description: "Multiple simple types", - sdl: ` - type User { - name: String - age: Int - verified: Boolean - } - - type Author { - name: String - publisher: String - rating: Float - } - `, - targetDescs: []client.CollectionDefinition{ - { - Description: client.CollectionDescription{ - Name: immutable.Some("User"), - Indexes: []client.IndexDescription{}, - Fields: []client.CollectionFieldDescription{ - { - Name: "_docID", - }, - { - Name: "age", - }, - { - Name: "name", - }, - { - Name: "verified", - }, - }, - }, - Schema: client.SchemaDescription{ - Name: "User", - Fields: []client.SchemaFieldDescription{ - { - Name: "_docID", - Kind: client.FieldKind_DocID, - Typ: client.NONE_CRDT, - }, - { - Name: "age", - Kind: client.FieldKind_NILLABLE_INT, - Typ: client.LWW_REGISTER, - }, - { - Name: "name", - Kind: client.FieldKind_NILLABLE_STRING, - Typ: client.LWW_REGISTER, - }, - { - Name: "verified", - Kind: client.FieldKind_NILLABLE_BOOL, - Typ: client.LWW_REGISTER, - }, - }, - }, - }, - { - Description: client.CollectionDescription{ - Name: immutable.Some("Author"), - Indexes: []client.IndexDescription{}, - Fields: []client.CollectionFieldDescription{ - { - Name: "_docID", - }, - { - Name: "name", - }, - { - Name: "publisher", - }, - { - Name: "rating", - }, - }, - }, - Schema: client.SchemaDescription{ - Name: "Author", - Fields: []client.SchemaFieldDescription{ - { - Name: "_docID", - Kind: client.FieldKind_DocID, - Typ: client.NONE_CRDT, - }, - { - Name: "name", - Kind: client.FieldKind_NILLABLE_STRING, - Typ: client.LWW_REGISTER, - }, - { - Name: "publisher", - Kind: client.FieldKind_NILLABLE_STRING, - Typ: client.LWW_REGISTER, - }, - { - Name: "rating", - Kind: client.FieldKind_NILLABLE_FLOAT, - Typ: client.LWW_REGISTER, - }, - }, - }, - }, - }, - }, - { - description: "Multiple types with relations (one-to-one)", - sdl: ` - type Book { - name: String - rating: Float - author: Author - } - - type Author { - name: String - age: Int - published: Book @primary - } - `, - targetDescs: []client.CollectionDefinition{ - { - Description: client.CollectionDescription{ - Name: immutable.Some("Book"), - Indexes: []client.IndexDescription{}, - Fields: []client.CollectionFieldDescription{ - { - Name: "_docID", - }, - { - Name: "author", - Kind: immutable.Some[client.FieldKind](client.ObjectKind("Author")), - RelationName: immutable.Some("author_book"), - }, - { - Name: "author_id", - Kind: immutable.Some[client.FieldKind](client.FieldKind_DocID), - RelationName: immutable.Some("author_book"), - }, - { - Name: "name", - }, - { - Name: "rating", - }, - }, - }, - Schema: client.SchemaDescription{ - Name: "Book", - Fields: []client.SchemaFieldDescription{ - { - Name: "_docID", - Kind: client.FieldKind_DocID, - Typ: client.NONE_CRDT, - }, - { - Name: "name", - Kind: client.FieldKind_NILLABLE_STRING, - Typ: client.LWW_REGISTER, - }, - { - Name: "rating", - Kind: client.FieldKind_NILLABLE_FLOAT, - Typ: client.LWW_REGISTER, - }, - }, - }, - }, - { - Description: client.CollectionDescription{ - Name: immutable.Some("Author"), - Indexes: []client.IndexDescription{}, - Fields: []client.CollectionFieldDescription{ - { - Name: "_docID", - }, - { - Name: "age", - }, - { - Name: "name", - }, - { - Name: "published", - Kind: immutable.Some[client.FieldKind](client.ObjectKind("Book")), - RelationName: immutable.Some("author_book"), - }, - { - Name: "published_id", - Kind: immutable.Some[client.FieldKind](client.FieldKind_DocID), - RelationName: immutable.Some("author_book"), - }, - }, - }, - Schema: client.SchemaDescription{ - Name: "Author", - Fields: []client.SchemaFieldDescription{ - { - Name: "_docID", - Kind: client.FieldKind_DocID, - Typ: client.NONE_CRDT, - }, - { - Name: "age", - Kind: client.FieldKind_NILLABLE_INT, - Typ: client.LWW_REGISTER, - }, - { - Name: "name", - Kind: client.FieldKind_NILLABLE_STRING, - Typ: client.LWW_REGISTER, - }, - { - Name: "published", - Kind: client.ObjectKind("Book"), - Typ: client.LWW_REGISTER, - }, - { - Name: "published_id", - Kind: client.FieldKind_DocID, - Typ: client.LWW_REGISTER, - }, - }, - }, - }, - }, - }, - { - description: "Multiple types with relations (one-to-one)", - sdl: ` - type Book { - name: String - rating: Float - author: Author @relation(name:"book_authors") - } - - type Author { - name: String - age: Int - published: Book @relation(name:"book_authors") @primary - } - `, - targetDescs: []client.CollectionDefinition{ - { - Description: client.CollectionDescription{ - Name: immutable.Some("Book"), - Indexes: []client.IndexDescription{}, - Fields: []client.CollectionFieldDescription{ - { - Name: "_docID", - }, - { - Name: "author", - Kind: immutable.Some[client.FieldKind](client.ObjectKind("Author")), - RelationName: immutable.Some("book_authors"), - }, - { - Name: "author_id", - Kind: immutable.Some[client.FieldKind](client.FieldKind_DocID), - RelationName: immutable.Some("book_authors"), - }, - { - Name: "name", - }, - { - Name: "rating", - }, - }, - }, - Schema: client.SchemaDescription{ - Name: "Book", - Fields: []client.SchemaFieldDescription{ - { - Name: "_docID", - Kind: client.FieldKind_DocID, - Typ: client.NONE_CRDT, - }, - { - Name: "name", - Kind: client.FieldKind_NILLABLE_STRING, - Typ: client.LWW_REGISTER, - }, - { - Name: "rating", - Kind: client.FieldKind_NILLABLE_FLOAT, - Typ: client.LWW_REGISTER, - }, - }, - }, - }, - { - Description: client.CollectionDescription{ - Name: immutable.Some("Author"), - Indexes: []client.IndexDescription{}, - Fields: []client.CollectionFieldDescription{ - { - Name: "_docID", - }, - { - Name: "age", - }, - { - Name: "name", - }, - { - Name: "published", - Kind: immutable.Some[client.FieldKind](client.ObjectKind("Book")), - RelationName: immutable.Some("book_authors"), - }, - { - Name: "published_id", - Kind: immutable.Some[client.FieldKind](client.FieldKind_DocID), - RelationName: immutable.Some("book_authors"), - }, - }, - }, - Schema: client.SchemaDescription{ - Name: "Author", - Fields: []client.SchemaFieldDescription{ - { - Name: "_docID", - Kind: client.FieldKind_DocID, - Typ: client.NONE_CRDT, - }, - { - Name: "age", - Kind: client.FieldKind_NILLABLE_INT, - Typ: client.LWW_REGISTER, - }, - { - Name: "name", - Kind: client.FieldKind_NILLABLE_STRING, - Typ: client.LWW_REGISTER, - }, - { - Name: "published", - Kind: client.ObjectKind("Book"), - Typ: client.LWW_REGISTER, - }, - { - Name: "published_id", - Kind: client.FieldKind_DocID, - Typ: client.LWW_REGISTER, - }, - }, - }, - }, - }, - }, - { - description: "Multiple types with relations (one-to-one) with directive", - sdl: ` - type Book { - name: String - rating: Float - author: Author @primary - } - - type Author { - name: String - age: Int - published: Book - } - `, - targetDescs: []client.CollectionDefinition{ - { - Description: client.CollectionDescription{ - Name: immutable.Some("Book"), - Indexes: []client.IndexDescription{}, - Fields: []client.CollectionFieldDescription{ - { - Name: "_docID", - }, - { - Name: "author", - Kind: immutable.Some[client.FieldKind](client.ObjectKind("Author")), - RelationName: immutable.Some("author_book"), - }, - { - Name: "author_id", - Kind: immutable.Some[client.FieldKind](client.FieldKind_DocID), - RelationName: immutable.Some("author_book"), - }, - { - Name: "name", - }, - { - Name: "rating", - }, - }, - }, - Schema: client.SchemaDescription{ - Name: "Book", - Fields: []client.SchemaFieldDescription{ - { - Name: "_docID", - Kind: client.FieldKind_DocID, - Typ: client.NONE_CRDT, - }, - { - Name: "author", - Kind: client.ObjectKind("Author"), - Typ: client.LWW_REGISTER, - }, - { - Name: "author_id", - Kind: client.FieldKind_DocID, - Typ: client.LWW_REGISTER, - }, - { - Name: "name", - Kind: client.FieldKind_NILLABLE_STRING, - Typ: client.LWW_REGISTER, - }, - { - Name: "rating", - Kind: client.FieldKind_NILLABLE_FLOAT, - Typ: client.LWW_REGISTER, - }, - }, - }, - }, - { - Description: client.CollectionDescription{ - Name: immutable.Some("Author"), - Indexes: []client.IndexDescription{}, - Fields: []client.CollectionFieldDescription{ - { - Name: "_docID", - }, - { - Name: "age", - }, - { - Name: "name", - }, - { - Name: "published", - Kind: immutable.Some[client.FieldKind](client.ObjectKind("Book")), - RelationName: immutable.Some("author_book"), - }, - { - Name: "published_id", - Kind: immutable.Some[client.FieldKind](client.FieldKind_DocID), - RelationName: immutable.Some("author_book"), - }, - }, - }, - Schema: client.SchemaDescription{ - Name: "Author", - Fields: []client.SchemaFieldDescription{ - { - Name: "_docID", - Kind: client.FieldKind_DocID, - Typ: client.NONE_CRDT, - }, - { - Name: "age", - Kind: client.FieldKind_NILLABLE_INT, - Typ: client.LWW_REGISTER, - }, - { - Name: "name", - Kind: client.FieldKind_NILLABLE_STRING, - Typ: client.LWW_REGISTER, - }, - }, - }, - }, - }, - }, - { - description: "Multiple types with relations (one-to-many)", - sdl: ` - type Book { - name: String - rating: Float - author: Author - } - - type Author { - name: String - age: Int - published: [Book] - } - `, - targetDescs: []client.CollectionDefinition{ - { - Description: client.CollectionDescription{ - Name: immutable.Some("Book"), - Indexes: []client.IndexDescription{}, - Fields: []client.CollectionFieldDescription{ - { - Name: "_docID", - }, - { - Name: "author", - Kind: immutable.Some[client.FieldKind](client.ObjectKind("Author")), - RelationName: immutable.Some("author_book"), - }, - { - Name: "author_id", - Kind: immutable.Some[client.FieldKind](client.FieldKind_DocID), - RelationName: immutable.Some("author_book"), - }, - { - Name: "name", - }, - { - Name: "rating", - }, - }, - }, - Schema: client.SchemaDescription{ - Name: "Book", - Fields: []client.SchemaFieldDescription{ - { - Name: "_docID", - Kind: client.FieldKind_DocID, - Typ: client.NONE_CRDT, - }, - { - Name: "name", - Kind: client.FieldKind_NILLABLE_STRING, - Typ: client.LWW_REGISTER, - }, - { - Name: "rating", - Kind: client.FieldKind_NILLABLE_FLOAT, - Typ: client.LWW_REGISTER, - }, - { - Name: "author", - Kind: client.ObjectKind("Author"), - Typ: client.LWW_REGISTER, - }, - { - Name: "author_id", - Kind: client.FieldKind_DocID, - Typ: client.LWW_REGISTER, - }, - }, - }, - }, - { - Description: client.CollectionDescription{ - Name: immutable.Some("Author"), - Indexes: []client.IndexDescription{}, - Fields: []client.CollectionFieldDescription{ - { - Name: "_docID", - }, - { - Name: "age", - }, - { - Name: "name", - }, - { - Name: "published", - Kind: immutable.Some[client.FieldKind](client.ObjectArrayKind("Book")), - RelationName: immutable.Some("author_book"), - }, - }, - }, - Schema: client.SchemaDescription{ - Name: "Author", - Fields: []client.SchemaFieldDescription{ - { - Name: "_docID", - Kind: client.FieldKind_DocID, - Typ: client.NONE_CRDT, - }, - { - Name: "age", - Kind: client.FieldKind_NILLABLE_INT, - Typ: client.LWW_REGISTER, - }, - { - Name: "name", - Kind: client.FieldKind_NILLABLE_STRING, - Typ: client.LWW_REGISTER, - }, - }, - }, - }, - }, - }, - } - - for _, test := range cases { - runCreateDescriptionTest(t, test) - } -} - -func runCreateDescriptionTest(t *testing.T, testcase descriptionTestCase) { - ctx := context.Background() - - descs, err := FromString(ctx, testcase.sdl) - assert.NoError(t, err, testcase.description) - assert.Equal(t, len(descs), len(testcase.targetDescs), testcase.description) - - for i, d := range descs { - assert.Equal(t, testcase.targetDescs[i].Description, d.Description, testcase.description) - assert.Equal(t, testcase.targetDescs[i].Schema, d.Schema, testcase.description) - } -} - -type descriptionTestCase struct { - description string - sdl string - targetDescs []client.CollectionDefinition -} diff --git a/internal/request/graphql/schema/generate.go b/internal/request/graphql/schema/generate.go index d2d4841408..b851261c01 100644 --- a/internal/request/graphql/schema/generate.go +++ b/internal/request/graphql/schema/generate.go @@ -416,6 +416,8 @@ func (g *Generator) createExpandedFieldList( func (g *Generator) buildTypes( collections []client.CollectionDefinition, ) ([]*gql.Object, error) { + definitionCache := client.NewDefinitionCache(collections) + // @todo: Check for duplicate named defined types in the TypeMap // get all the defined types from the AST objs := make([]*gql.Object, 0) @@ -467,11 +469,10 @@ func (g *Generator) buildTypes( } var ttype gql.Type - if field.Kind.IsObject() { - var ok bool - ttype, ok = g.manager.schema.TypeMap()[field.Kind.Underlying()] + if otherDef, ok := client.GetDefinition(definitionCache, collection, field.Kind); ok { + ttype, ok = g.manager.schema.TypeMap()[otherDef.GetName()] if !ok { - return nil, NewErrTypeNotFound(field.Kind.Underlying()) + return nil, NewErrTypeNotFound(field.Kind.String()) } if field.Kind.IsArray() { ttype = gql.NewList(ttype) @@ -480,7 +481,7 @@ func (g *Generator) buildTypes( var ok bool ttype, ok = fieldKindToGQLType[field.Kind] if !ok { - return nil, NewErrTypeNotFound(fmt.Sprint(field.Kind)) + return nil, NewErrTypeNotFound(field.Kind.String()) } } diff --git a/tests/gen/gen_auto.go b/tests/gen/gen_auto.go index 487558e934..5b00d5f5b2 100644 --- a/tests/gen/gen_auto.go +++ b/tests/gen/gen_auto.go @@ -38,7 +38,7 @@ func AutoGenerateFromSDL(gqlSDL string, options ...Option) ([]GeneratedDoc, erro if err != nil { return nil, err } - typeDefs, err := parseSDL(gqlSDL) + typeDefs, err := ParseSDL(gqlSDL) if err != nil { return nil, err } @@ -119,9 +119,15 @@ func (g *randomDocGenerator) getMaxTotalDemand() int { } // getNextPrimaryDocID returns the docID of the next primary document to be used as a relation. -func (g *randomDocGenerator) getNextPrimaryDocID(secondaryType string, field *client.FieldDefinition) string { +func (g *randomDocGenerator) getNextPrimaryDocID( + host client.CollectionDefinition, + secondaryType string, + field *client.FieldDefinition, +) string { ind := g.configurator.usageCounter.getNextTypeIndForField(secondaryType, field) - return g.generatedDocs[field.Kind.Underlying()][ind].docID + otherDef, _ := client.GetDefinition(g.configurator.definitionCache, host, field.Kind) + + return g.generatedDocs[otherDef.GetName()][ind].docID } func (g *randomDocGenerator) generateRandomDocs(order []string) error { @@ -141,9 +147,9 @@ func (g *randomDocGenerator) generateRandomDocs(order []string) error { if field.IsRelation() { if field.IsPrimaryRelation && field.Kind.IsObject() { if strings.HasSuffix(field.Name, request.RelatedObjectID) { - newDoc[field.Name] = g.getNextPrimaryDocID(typeName, &field) + newDoc[field.Name] = g.getNextPrimaryDocID(typeDef, typeName, &field) } else { - newDoc[field.Name+request.RelatedObjectID] = g.getNextPrimaryDocID(typeName, &field) + newDoc[field.Name+request.RelatedObjectID] = g.getNextPrimaryDocID(typeDef, typeName, &field) } } } else { @@ -210,7 +216,8 @@ func (g *randomDocGenerator) getValueGenerator(fieldKind client.FieldKind, field func validateDefinitions(definitions []client.CollectionDefinition) error { colIDs := make(map[uint32]struct{}) colNames := make(map[string]struct{}) - fieldRefs := []string{} + defCache := client.NewDefinitionCache(definitions) + for _, def := range definitions { if def.Description.Name.Value() == "" { return NewErrIncompleteColDefinition("description name is empty") @@ -226,17 +233,16 @@ func validateDefinitions(definitions []client.CollectionDefinition) error { return NewErrIncompleteColDefinition("field name is empty") } if field.Kind.IsObject() { - fieldRefs = append(fieldRefs, field.Kind.Underlying()) + _, found := client.GetDefinition(defCache, def, field.Kind) + if !found { + return NewErrIncompleteColDefinition("field schema references unknown collection") + } } } colNames[def.Description.Name.Value()] = struct{}{} colIDs[def.Description.ID] = struct{}{} } - for _, ref := range fieldRefs { - if _, ok := colNames[ref]; !ok { - return NewErrIncompleteColDefinition("field schema references unknown collection") - } - } + if len(colIDs) != len(definitions) { return NewErrIncompleteColDefinition("duplicate collection IDs") } diff --git a/tests/gen/gen_auto_configurator.go b/tests/gen/gen_auto_configurator.go index ec8c1ea881..ce567fd385 100644 --- a/tests/gen/gen_auto_configurator.go +++ b/tests/gen/gen_auto_configurator.go @@ -15,6 +15,8 @@ import ( "math/rand" "time" + "github.com/sourcenetwork/immutable" + "github.com/sourcenetwork/defradb/client" ) @@ -35,7 +37,9 @@ func (d typeDemand) getAverage() int { // demand for each type, setting up the relation usage counters, and setting up // the random seed. type docsGenConfigurator struct { - types map[string]client.CollectionDefinition + types map[string]client.CollectionDefinition + definitionCache client.DefinitionCache + config configsMap primaryGraph map[string][]string typesOrder []string @@ -44,6 +48,8 @@ type docsGenConfigurator struct { random *rand.Rand } +type collectionID = uint32 + // typeUsageCounters is a map of primary type to secondary type to field name to // relation usage. This is used to keep track of the usage of each relation. // Each foreign field has a tracker that keeps track of which and how many of primary @@ -51,13 +57,13 @@ type docsGenConfigurator struct { // number of documents generated for each primary type is within the range of the // demand for that type and to guarantee a uniform distribution of the documents. type typeUsageCounters struct { - m map[string]map[string]map[string]*relationUsage + m map[collectionID]map[string]map[string]*relationUsage random *rand.Rand } func newTypeUsageCounter(random *rand.Rand) typeUsageCounters { return typeUsageCounters{ - m: make(map[string]map[string]map[string]*relationUsage), + m: make(map[collectionID]map[string]map[string]*relationUsage), random: random, } } @@ -68,21 +74,35 @@ func (c *typeUsageCounters) addRelationUsage( field client.FieldDefinition, minPerDoc, maxPerDoc, numDocs int, ) { - primaryType := field.Kind.Underlying() - if _, ok := c.m[primaryType]; !ok { - c.m[primaryType] = make(map[string]map[string]*relationUsage) + var collectionRoot uint32 + switch kind := field.Kind.(type) { + case *client.CollectionKind: + collectionRoot = kind.Root + + default: + return + } + + if _, ok := c.m[collectionRoot]; !ok { + c.m[collectionRoot] = make(map[string]map[string]*relationUsage) } - if _, ok := c.m[primaryType][secondaryType]; !ok { - c.m[primaryType][secondaryType] = make(map[string]*relationUsage) + if _, ok := c.m[collectionRoot][secondaryType]; !ok { + c.m[collectionRoot][secondaryType] = make(map[string]*relationUsage) } - if _, ok := c.m[primaryType][secondaryType][field.Name]; !ok { - c.m[primaryType][secondaryType][field.Name] = newRelationUsage(minPerDoc, maxPerDoc, numDocs, c.random) + if _, ok := c.m[collectionRoot][secondaryType][field.Name]; !ok { + c.m[collectionRoot][secondaryType][field.Name] = newRelationUsage(minPerDoc, maxPerDoc, numDocs, c.random) } } // getNextTypeIndForField returns the next index to be used for a foreign field. func (c *typeUsageCounters) getNextTypeIndForField(secondaryType string, field *client.FieldDefinition) int { - current := c.m[field.Kind.Underlying()][secondaryType][field.Name] + var collectionRoot uint32 + switch kind := field.Kind.(type) { + case *client.CollectionKind: + collectionRoot = kind.Root + } + + current := c.m[collectionRoot][secondaryType][field.Name] return current.useNextDocIDIndex() } @@ -156,10 +176,16 @@ func (u *relationUsage) allocateIndexes() { } func newDocGenConfigurator(types map[string]client.CollectionDefinition, config configsMap) docsGenConfigurator { + defs := make([]client.CollectionDefinition, 0, len(types)) + for _, def := range types { + defs = append(defs, def) + } + return docsGenConfigurator{ - types: types, - config: config, - docsDemand: make(map[string]typeDemand), + types: types, + definitionCache: client.NewDefinitionCache(defs), + config: config, + docsDemand: make(map[string]typeDemand), } } @@ -185,7 +211,7 @@ func (g *docsGenConfigurator) Configure(options ...Option) error { g.usageCounter = newTypeUsageCounter(g.random) - g.primaryGraph = getRelationGraph(g.types) + g.primaryGraph = g.getRelationGraph(g.types) g.typesOrder = getTopologicalOrder(g.primaryGraph, g.types) if len(g.docsDemand) == 0 { @@ -252,7 +278,10 @@ func (g *docsGenConfigurator) allocateUsageCounterIndexes() { demand.min = max g.docsDemand[typeName] = demand } - for _, usage := range g.usageCounter.m[typeName] { + + def := g.types[typeName] + + for _, usage := range g.usageCounter.m[def.Description.RootID] { for _, field := range usage { if field.numAvailablePrimaryDocs == math.MaxInt { field.numAvailablePrimaryDocs = max @@ -272,8 +301,16 @@ func (g *docsGenConfigurator) getDemandForPrimaryType( primaryGraph map[string][]string, ) (typeDemand, error) { primaryTypeDef := g.types[primaryType] + secondaryTypeDef := g.types[secondaryType] + for _, field := range primaryTypeDef.GetFields() { - if field.Kind.IsObject() && field.Kind.Underlying() == secondaryType { + var otherRoot immutable.Option[uint32] + switch kind := field.Kind.(type) { + case *client.CollectionKind: + otherRoot = immutable.Some(kind.Root) + } + + if otherRoot.HasValue() && otherRoot.Value() == secondaryTypeDef.Description.RootID { primaryDemand := typeDemand{min: secondaryDemand.min, max: secondaryDemand.max} minPerDoc, maxPerDoc := 1, 1 @@ -312,7 +349,7 @@ func (g *docsGenConfigurator) getDemandForPrimaryType( return typeDemand{}, NewErrCanNotSupplyTypeDemand(primaryType) } g.docsDemand[primaryType] = primaryDemand - g.initRelationUsages(field.Kind.Underlying(), primaryType, minPerDoc, maxPerDoc) + g.initRelationUsages(secondaryTypeDef.GetName(), primaryType, minPerDoc, maxPerDoc) } } return secondaryDemand, nil @@ -344,7 +381,8 @@ func (g *docsGenConfigurator) calculateDemandForSecondaryTypes( newSecDemand := typeDemand{min: primaryDocDemand.min, max: primaryDocDemand.max} minPerDoc, maxPerDoc := 1, 1 - curSecDemand, hasSecDemand := g.docsDemand[field.Kind.Underlying()] + otherType, _ := client.GetDefinition(g.definitionCache, typeDef, field.Kind) + curSecDemand, hasSecDemand := g.docsDemand[otherType.GetName()] if field.Kind.IsArray() { fieldConf := g.config.ForField(typeName, field.Name) @@ -368,23 +406,23 @@ func (g *docsGenConfigurator) calculateDemandForSecondaryTypes( if hasSecDemand { if curSecDemand.min < newSecDemand.min || curSecDemand.max > newSecDemand.max { - return NewErrCanNotSupplyTypeDemand(field.Kind.Underlying()) + return NewErrCanNotSupplyTypeDemand(otherType.GetName()) } } else { - g.docsDemand[field.Kind.Underlying()] = newSecDemand + g.docsDemand[otherType.GetName()] = newSecDemand } - g.initRelationUsages(field.Kind.Underlying(), typeName, minPerDoc, maxPerDoc) + g.initRelationUsages(otherType.GetName(), typeName, minPerDoc, maxPerDoc) - err := g.calculateDemandForSecondaryTypes(field.Kind.Underlying(), primaryGraph) + err := g.calculateDemandForSecondaryTypes(otherType.GetName(), primaryGraph) if err != nil { return err } - for _, primaryTypeName := range primaryGraph[field.Kind.Underlying()] { + for _, primaryTypeName := range primaryGraph[otherType.GetName()] { if _, ok := g.docsDemand[primaryTypeName]; !ok { primaryDemand, err := g.getDemandForPrimaryType( primaryTypeName, - field.Kind.Underlying(), + otherType.GetName(), newSecDemand, primaryGraph, ) @@ -401,15 +439,22 @@ func (g *docsGenConfigurator) calculateDemandForSecondaryTypes( func (g *docsGenConfigurator) initRelationUsages(secondaryType, primaryType string, minPerDoc, maxPerDoc int) { secondaryTypeDef := g.types[secondaryType] + primaryTypeDef := g.types[primaryType] for _, secondaryTypeField := range secondaryTypeDef.GetFields() { - if secondaryTypeField.Kind.Underlying() == primaryType { + var otherRoot immutable.Option[uint32] + switch kind := secondaryTypeField.Kind.(type) { + case *client.CollectionKind: + otherRoot = immutable.Some(kind.Root) + } + + if otherRoot.HasValue() && otherRoot.Value() == primaryTypeDef.Description.RootID { g.usageCounter.addRelationUsage(secondaryType, secondaryTypeField, minPerDoc, maxPerDoc, g.docsDemand[primaryType].getAverage()) } } } -func getRelationGraph(types map[string]client.CollectionDefinition) map[string][]string { +func (g *docsGenConfigurator) getRelationGraph(types map[string]client.CollectionDefinition) map[string][]string { primaryGraph := make(map[string][]string) appendUnique := func(slice []string, val string) []string { @@ -424,10 +469,12 @@ func getRelationGraph(types map[string]client.CollectionDefinition) map[string][ for typeName, typeDef := range types { for _, field := range typeDef.GetFields() { if field.Kind.IsObject() { + otherDef, _ := client.GetDefinition(g.definitionCache, typeDef, field.Kind) + if field.IsPrimaryRelation { - primaryGraph[typeName] = appendUnique(primaryGraph[typeName], field.Kind.Underlying()) + primaryGraph[typeName] = appendUnique(primaryGraph[typeName], otherDef.GetName()) } else { - primaryGraph[field.Kind.Underlying()] = appendUnique(primaryGraph[field.Kind.Underlying()], typeName) + primaryGraph[otherDef.GetName()] = appendUnique(primaryGraph[otherDef.GetName()], typeName) } } } diff --git a/tests/gen/gen_auto_test.go b/tests/gen/gen_auto_test.go index 02cb45331b..612b244030 100644 --- a/tests/gen/gen_auto_test.go +++ b/tests/gen/gen_auto_test.go @@ -1209,7 +1209,7 @@ func TestAutoGenerate_IfCollectionDefinitionIsIncomplete_ReturnError(t *testing. }, { Name: "device", - Kind: immutable.Some[client.FieldKind](client.ObjectKind("Device")), + Kind: immutable.Some[client.FieldKind](client.NewNamedKind("Device", false)), }, }, }, @@ -1233,7 +1233,7 @@ func TestAutoGenerate_IfCollectionDefinitionIsIncomplete_ReturnError(t *testing. }, { Name: "owner", - Kind: immutable.Some[client.FieldKind](client.ObjectKind("User")), + Kind: immutable.Some[client.FieldKind](client.NewNamedKind("User", false)), }, }, }, @@ -1246,7 +1246,7 @@ func TestAutoGenerate_IfCollectionDefinitionIsIncomplete_ReturnError(t *testing. }, { Name: "owner", - Kind: client.ObjectKind("User"), + Kind: client.NewNamedKind("User", false), }, }, }, @@ -1316,8 +1316,9 @@ func TestAutoGenerate_IfColDefinitionsAreValid_ShouldGenerate(t *testing.T) { defs := []client.CollectionDefinition{ { Description: client.CollectionDescription{ - Name: immutable.Some("User"), - ID: 0, + Name: immutable.Some("User"), + ID: 0, + RootID: 0, Fields: []client.CollectionFieldDescription{ { Name: "name", @@ -1330,7 +1331,7 @@ func TestAutoGenerate_IfColDefinitionsAreValid_ShouldGenerate(t *testing.T) { }, { Name: "devices", - Kind: immutable.Some[client.FieldKind](client.ObjectArrayKind("Device")), + Kind: immutable.Some[client.FieldKind](client.NewCollectionKind(1, true)), RelationName: immutable.Some("Device_owner"), }, }, @@ -1355,15 +1356,16 @@ func TestAutoGenerate_IfColDefinitionsAreValid_ShouldGenerate(t *testing.T) { }, { Description: client.CollectionDescription{ - Name: immutable.Some("Device"), - ID: 1, + Name: immutable.Some("Device"), + ID: 1, + RootID: 1, Fields: []client.CollectionFieldDescription{ { Name: "model", }, { Name: "owner", - Kind: immutable.Some[client.FieldKind](client.ObjectKind("User")), + Kind: immutable.Some[client.FieldKind](client.NewCollectionKind(0, false)), RelationName: immutable.Some("Device_owner"), }, { @@ -1381,7 +1383,7 @@ func TestAutoGenerate_IfColDefinitionsAreValid_ShouldGenerate(t *testing.T) { }, { Name: "owner", - Kind: client.ObjectKind("User"), + Kind: client.NewNamedKind("User", false), Typ: client.LWW_REGISTER, }, { diff --git a/tests/gen/schema_parser.go b/tests/gen/schema_parser.go index fbd82bc47b..3e08212b5c 100644 --- a/tests/gen/schema_parser.go +++ b/tests/gen/schema_parser.go @@ -17,21 +17,48 @@ import ( "unicode" "github.com/sourcenetwork/defradb/client" - "github.com/sourcenetwork/defradb/internal/request/graphql" + "github.com/sourcenetwork/defradb/node" ) -func parseSDL(gqlSDL string) (map[string]client.CollectionDefinition, error) { - parser, err := graphql.NewParser() +func ParseSDL(gqlSDL string) (map[string]client.CollectionDefinition, error) { + ctx := context.Background() + + // Spinning up a temporary in-memory node with all extras disabled is the + // most reliable and cheapest maintainance-cost-wise way to fully parse + // the SDL and correctly link all relations. + node, err := node.NewNode( + ctx, + node.WithBadgerInMemory(true), + node.WithDisableAPI(true), + node.WithDisableP2P(true), + ) if err != nil { return nil, err } - cols, err := parser.ParseSDL(context.Background(), gqlSDL) + + err = node.Start(ctx) + if err != nil { + return nil, err + } + + _, err = node.DB.AddSchema(ctx, gqlSDL) + if err != nil { + return nil, err + } + + cols, err := node.DB.GetCollections(ctx, client.CollectionFetchOptions{}) if err != nil { return nil, err } - result := make(map[string]client.CollectionDefinition) + + err = node.Close(ctx) + if err != nil { + return nil, err + } + + result := make(map[string]client.CollectionDefinition, len(cols)) for _, col := range cols { - result[col.Description.Name.Value()] = col + result[col.Definition().GetName()] = col.Definition() } return result, nil } diff --git a/tests/integration/backup/one_to_many/export_test.go b/tests/integration/backup/one_to_many/export_test.go index 9126456149..ae81942b77 100644 --- a/tests/integration/backup/one_to_many/export_test.go +++ b/tests/integration/backup/one_to_many/export_test.go @@ -60,7 +60,7 @@ func TestBackupExport_AllCollectionsMultipleDocsAndDocUpdate_NoError(t *testing. Doc: `{"age": 31}`, }, testUtils.BackupExport{ - ExpectedContent: `{"User":[{"_docID":"bae-7fca96a2-5f01-5558-a81f-09b47587f26d","_docIDNew":"bae-9918e1ec-c62b-5de2-8fbf-c82795b8ac7f","age":31,"name":"John"},{"_docID":"bae-ebfe11e2-045d-525d-9fb7-2abb961dc84f","_docIDNew":"bae-ebfe11e2-045d-525d-9fb7-2abb961dc84f","age":31,"name":"Bob"}],"Book":[{"_docID":"bae-45c92e9c-4d31-5e96-8bd7-3d532734e117","_docIDNew":"bae-3ca9a4c3-6240-5e86-a00f-9590d2f2ecf3","author_id":"bae-9918e1ec-c62b-5de2-8fbf-c82795b8ac7f","name":"John and the sourcerers' stone"}]}`, + ExpectedContent: `{"User":[{"_docID":"bae-7fca96a2-5f01-5558-a81f-09b47587f26d","_docIDNew":"bae-9918e1ec-c62b-5de2-8fbf-c82795b8ac7f","age":31,"name":"John"},{"_docID":"bae-ebfe11e2-045d-525d-9fb7-2abb961dc84f","_docIDNew":"bae-ebfe11e2-045d-525d-9fb7-2abb961dc84f","age":31,"name":"Bob"}],"Book":[{"_docID":"bae-8c8be5c6-d26b-50d4-9378-2acd5fe6959d","_docIDNew":"bae-c94e52f8-6e91-522c-b6a6-38346a06b3d2","author_id":"bae-9918e1ec-c62b-5de2-8fbf-c82795b8ac7f","name":"John and the sourcerers' stone"}]}`, }, }, } @@ -99,7 +99,7 @@ func TestBackupExport_AllCollectionsMultipleDocsAndMultipleDocUpdate_NoError(t * Doc: `{"age": 31}`, }, testUtils.BackupExport{ - ExpectedContent: `{"User":[{"_docID":"bae-7fca96a2-5f01-5558-a81f-09b47587f26d","_docIDNew":"bae-9918e1ec-c62b-5de2-8fbf-c82795b8ac7f","age":31,"name":"John"},{"_docID":"bae-ebfe11e2-045d-525d-9fb7-2abb961dc84f","_docIDNew":"bae-ebfe11e2-045d-525d-9fb7-2abb961dc84f","age":31,"name":"Bob"}],"Book":[{"_docID":"bae-45c92e9c-4d31-5e96-8bd7-3d532734e117","_docIDNew":"bae-3ca9a4c3-6240-5e86-a00f-9590d2f2ecf3","author_id":"bae-9918e1ec-c62b-5de2-8fbf-c82795b8ac7f","name":"John and the sourcerers' stone"},{"_docID":"bae-8fc3d148-869b-5629-ae22-5423c73f709b","_docIDNew":"bae-33c136bd-4375-54a0-81ff-54ca560c7bb8","author_id":"bae-9918e1ec-c62b-5de2-8fbf-c82795b8ac7f","name":"Game of chains"}]}`, + ExpectedContent: `{"User":[{"_docID":"bae-7fca96a2-5f01-5558-a81f-09b47587f26d","_docIDNew":"bae-9918e1ec-c62b-5de2-8fbf-c82795b8ac7f","age":31,"name":"John"},{"_docID":"bae-ebfe11e2-045d-525d-9fb7-2abb961dc84f","_docIDNew":"bae-ebfe11e2-045d-525d-9fb7-2abb961dc84f","age":31,"name":"Bob"}],"Book":[{"_docID":"bae-4a28c746-ccbf-5511-91a9-391036f42f80","_docIDNew":"bae-d821f684-47de-5b63-b9c7-6eccec368e52","author_id":"bae-9918e1ec-c62b-5de2-8fbf-c82795b8ac7f","name":"Game of chains"},{"_docID":"bae-8c8be5c6-d26b-50d4-9378-2acd5fe6959d","_docIDNew":"bae-c94e52f8-6e91-522c-b6a6-38346a06b3d2","author_id":"bae-9918e1ec-c62b-5de2-8fbf-c82795b8ac7f","name":"John and the sourcerers' stone"}]}`, }, }, } diff --git a/tests/integration/backup/one_to_many/import_test.go b/tests/integration/backup/one_to_many/import_test.go index 2a48fb878f..39458f9869 100644 --- a/tests/integration/backup/one_to_many/import_test.go +++ b/tests/integration/backup/one_to_many/import_test.go @@ -88,14 +88,14 @@ func TestBackupImport_WithMultipleNoKeyAndMultipleCollectionsAndUpdatedDocs_NoEr ImportContent: `{ "Book":[ { - "_docID":"bae-8fc3d148-869b-5629-ae22-5423c73f709b", - "_docIDNew":"bae-33c136bd-4375-54a0-81ff-54ca560c7bb8", + "_docID":"bae-4a28c746-ccbf-5511-91a9-391036f42f80", + "_docIDNew":"bae-d821f684-47de-5b63-b9c7-6eccec368e52", "author_id":"bae-9918e1ec-c62b-5de2-8fbf-c82795b8ac7f", "name":"Game of chains" }, { - "_docID":"bae-45c92e9c-4d31-5e96-8bd7-3d532734e117", - "_docIDNew":"bae-3ca9a4c3-6240-5e86-a00f-9590d2f2ecf3", + "_docID":"bae-8c8be5c6-d26b-50d4-9378-2acd5fe6959d", + "_docIDNew":"bae-c94e52f8-6e91-522c-b6a6-38346a06b3d2", "author_id":"bae-9918e1ec-c62b-5de2-8fbf-c82795b8ac7f", "name":"John and the sourcerers' stone" } @@ -150,13 +150,13 @@ func TestBackupImport_WithMultipleNoKeyAndMultipleCollectionsAndUpdatedDocs_NoEr Results: map[string]any{ "Book": []map[string]any{ { - "name": "Game of chains", + "name": "John and the sourcerers' stone", "author": map[string]any{ "_docID": "bae-9918e1ec-c62b-5de2-8fbf-c82795b8ac7f", }, }, { - "name": "John and the sourcerers' stone", + "name": "Game of chains", "author": map[string]any{ "_docID": "bae-9918e1ec-c62b-5de2-8fbf-c82795b8ac7f", }, diff --git a/tests/integration/backup/one_to_one/export_test.go b/tests/integration/backup/one_to_one/export_test.go index fb63d52208..f765d634a9 100644 --- a/tests/integration/backup/one_to_one/export_test.go +++ b/tests/integration/backup/one_to_one/export_test.go @@ -60,7 +60,7 @@ func TestBackupExport_AllCollectionsMultipleDocsAndDocUpdate_NoError(t *testing. Doc: `{"age": 31}`, }, testUtils.BackupExport{ - ExpectedContent: `{"User":[{"_docID":"bae-7fca96a2-5f01-5558-a81f-09b47587f26d","_docIDNew":"bae-9918e1ec-c62b-5de2-8fbf-c82795b8ac7f","age":31,"name":"John"},{"_docID":"bae-ebfe11e2-045d-525d-9fb7-2abb961dc84f","_docIDNew":"bae-ebfe11e2-045d-525d-9fb7-2abb961dc84f","age":31,"name":"Bob"}],"Book":[{"_docID":"bae-f33a7110-fb6f-57aa-9501-df0111427315","_docIDNew":"bae-c9c1a385-afce-5ef7-8b98-9369b157fd97","author_id":"bae-9918e1ec-c62b-5de2-8fbf-c82795b8ac7f","name":"John and the sourcerers' stone"}]}`, + ExpectedContent: `{"User":[{"_docID":"bae-7fca96a2-5f01-5558-a81f-09b47587f26d","_docIDNew":"bae-9918e1ec-c62b-5de2-8fbf-c82795b8ac7f","age":31,"name":"John"},{"_docID":"bae-ebfe11e2-045d-525d-9fb7-2abb961dc84f","_docIDNew":"bae-ebfe11e2-045d-525d-9fb7-2abb961dc84f","age":31,"name":"Bob"}],"Book":[{"_docID":"bae-af59fdc4-e495-5fd3-a9a6-386249aafdbb","_docIDNew":"bae-d374c406-c6ea-51cd-9e9b-dd44a97b499c","author_id":"bae-9918e1ec-c62b-5de2-8fbf-c82795b8ac7f","name":"John and the sourcerers' stone"}]}`, }, }, } @@ -108,7 +108,7 @@ func TestBackupExport_DoubleReletionship_NoError(t *testing.T) { Doc: `{"age": 31}`, }, testUtils.BackupExport{ - ExpectedContent: `{"User":[{"_docID":"bae-7fca96a2-5f01-5558-a81f-09b47587f26d","_docIDNew":"bae-9918e1ec-c62b-5de2-8fbf-c82795b8ac7f","age":31,"name":"John"},{"_docID":"bae-ebfe11e2-045d-525d-9fb7-2abb961dc84f","_docIDNew":"bae-ebfe11e2-045d-525d-9fb7-2abb961dc84f","age":31,"name":"Bob"}],"Book":[{"_docID":"bae-ccf9da82-8ed6-5133-b64f-558c21bc8dfd","_docIDNew":"bae-27ae099a-fa7d-5a66-a919-6c3b0322d17c","author_id":"bae-9918e1ec-c62b-5de2-8fbf-c82795b8ac7f","favourite_id":"bae-ebfe11e2-045d-525d-9fb7-2abb961dc84f","name":"John and the sourcerers' stone"}]}`, + ExpectedContent: `{"User":[{"_docID":"bae-7fca96a2-5f01-5558-a81f-09b47587f26d","_docIDNew":"bae-9918e1ec-c62b-5de2-8fbf-c82795b8ac7f","age":31,"name":"John"},{"_docID":"bae-ebfe11e2-045d-525d-9fb7-2abb961dc84f","_docIDNew":"bae-ebfe11e2-045d-525d-9fb7-2abb961dc84f","age":31,"name":"Bob"}],"Book":[{"_docID":"bae-bddb7139-7035-5fff-a118-3fc2033723b3","_docIDNew":"bae-6972e51f-a8cd-59eb-9a34-8a37058ddf4e","author_id":"bae-9918e1ec-c62b-5de2-8fbf-c82795b8ac7f","favourite_id":"bae-ebfe11e2-045d-525d-9fb7-2abb961dc84f","name":"John and the sourcerers' stone"}]}`, }, }, } @@ -160,7 +160,7 @@ func TestBackupExport_DoubleReletionshipWithUpdate_NoError(t *testing.T) { Doc: `{"age": 31}`, }, testUtils.BackupExport{ - ExpectedContent: `{"User":[{"_docID":"bae-7fca96a2-5f01-5558-a81f-09b47587f26d","_docIDNew":"bae-9918e1ec-c62b-5de2-8fbf-c82795b8ac7f","age":31,"name":"John"},{"_docID":"bae-ebfe11e2-045d-525d-9fb7-2abb961dc84f","_docIDNew":"bae-ebfe11e2-045d-525d-9fb7-2abb961dc84f","age":31,"name":"Bob"}],"Book":[{"_docID":"bae-ccf9da82-8ed6-5133-b64f-558c21bc8dfd","_docIDNew":"bae-27ae099a-fa7d-5a66-a919-6c3b0322d17c","author_id":"bae-9918e1ec-c62b-5de2-8fbf-c82795b8ac7f","favourite_id":"bae-ebfe11e2-045d-525d-9fb7-2abb961dc84f","name":"John and the sourcerers' stone"},{"_docID":"bae-ffba7007-d4d4-5630-be53-d66f56da57fd","_docIDNew":"bae-ffba7007-d4d4-5630-be53-d66f56da57fd","name":"Game of chains"}]}`, + ExpectedContent: `{"User":[{"_docID":"bae-7fca96a2-5f01-5558-a81f-09b47587f26d","_docIDNew":"bae-9918e1ec-c62b-5de2-8fbf-c82795b8ac7f","age":31,"name":"John"},{"_docID":"bae-ebfe11e2-045d-525d-9fb7-2abb961dc84f","_docIDNew":"bae-ebfe11e2-045d-525d-9fb7-2abb961dc84f","age":31,"name":"Bob"}],"Book":[{"_docID":"bae-0aa10275-4f6e-5b38-9915-5664dd4c7802","_docIDNew":"bae-0aa10275-4f6e-5b38-9915-5664dd4c7802","name":"Game of chains"},{"_docID":"bae-bddb7139-7035-5fff-a118-3fc2033723b3","_docIDNew":"bae-6972e51f-a8cd-59eb-9a34-8a37058ddf4e","author_id":"bae-9918e1ec-c62b-5de2-8fbf-c82795b8ac7f","favourite_id":"bae-ebfe11e2-045d-525d-9fb7-2abb961dc84f","name":"John and the sourcerers' stone"}]}`, }, }, } diff --git a/tests/integration/backup/one_to_one/import_test.go b/tests/integration/backup/one_to_one/import_test.go index 15bdad354b..41f1f4d47d 100644 --- a/tests/integration/backup/one_to_one/import_test.go +++ b/tests/integration/backup/one_to_one/import_test.go @@ -67,10 +67,10 @@ func TestBackupImport_WithMultipleNoKeyAndMultipleCollections_NoError(t *testing Results: map[string]any{ "Book": []map[string]any{ { - "name": "Game of chains", + "name": "John and the sourcerers' stone", }, { - "name": "John and the sourcerers' stone", + "name": "Game of chains", }, }, }, @@ -88,8 +88,8 @@ func TestBackupImport_WithMultipleNoKeyAndMultipleCollectionsAndUpdatedDocs_NoEr ImportContent: `{ "Book":[ { - "_docID":"bae-f33a7110-fb6f-57aa-9501-df0111427315", - "_docIDNew":"bae-c9c1a385-afce-5ef7-8b98-9369b157fd97", + "_docID":"bae-af59fdc4-e495-5fd3-a9a6-386249aafdbb", + "_docIDNew":"bae-d374c406-c6ea-51cd-9e9b-dd44a97b499c", "author_id":"bae-9918e1ec-c62b-5de2-8fbf-c82795b8ac7f", "name":"John and the sourcerers' stone" } @@ -171,8 +171,8 @@ func TestBackupImport_WithMultipleNoKeyAndMultipleCollectionsAndMultipleUpdatedD "name":"Game of chains" }, { - "_docID":"bae-f33a7110-fb6f-57aa-9501-df0111427315", - "_docIDNew":"bae-c9c1a385-afce-5ef7-8b98-9369b157fd97", + "_docID":"bae-af59fdc4-e495-5fd3-a9a6-386249aafdbb", + "_docIDNew":"bae-d374c406-c6ea-51cd-9e9b-dd44a97b499c", "author_id":"bae-9918e1ec-c62b-5de2-8fbf-c82795b8ac7f", "name":"John and the sourcerers' stone" } @@ -219,7 +219,7 @@ func TestBackupImport_DoubleRelationshipWithUpdate_NoError(t *testing.T) { `, }, testUtils.BackupImport{ - ImportContent: `{"Book":[{"_docID":"bae-236c14bd-4621-5d43-bc03-4442f3b8719e","_docIDNew":"bae-6dbb3738-d3db-5121-acee-6fbdd97ff7a8","author_id":"bae-9918e1ec-c62b-5de2-8fbf-c82795b8ac7f","favourite_id":"bae-9918e1ec-c62b-5de2-8fbf-c82795b8ac7f","name":"John and the sourcerers' stone"},{"_docID":"bae-ffba7007-d4d4-5630-be53-d66f56da57fd","_docIDNew":"bae-ffba7007-d4d4-5630-be53-d66f56da57fd","name":"Game of chains"}],"User":[{"_docID":"bae-ebfe11e2-045d-525d-9fb7-2abb961dc84f","_docIDNew":"bae-ebfe11e2-045d-525d-9fb7-2abb961dc84f","age":31,"name":"Bob"},{"_docID":"bae-7fca96a2-5f01-5558-a81f-09b47587f26d","_docIDNew":"bae-9918e1ec-c62b-5de2-8fbf-c82795b8ac7f","age":31,"name":"John"}]}`, + ImportContent: `{"Book":[{"_docID":"bae-236c14bd-4621-5d43-bc03-4442f3b8719e","_docIDNew":"bae-6dbb3738-d3db-5121-acee-6fbdd97ff7a8","author_id":"bae-9918e1ec-c62b-5de2-8fbf-c82795b8ac7f","favourite_id":"bae-9918e1ec-c62b-5de2-8fbf-c82795b8ac7f","name":"John and the sourcerers' stone"},{"_docID":"bae-0aa10275-4f6e-5b38-9915-5664dd4c7802","_docIDNew":"bae-0aa10275-4f6e-5b38-9915-5664dd4c7802","name":"Game of chains"}],"User":[{"_docID":"bae-ebfe11e2-045d-525d-9fb7-2abb961dc84f","_docIDNew":"bae-ebfe11e2-045d-525d-9fb7-2abb961dc84f","age":31,"name":"Bob"},{"_docID":"bae-7fca96a2-5f01-5558-a81f-09b47587f26d","_docIDNew":"bae-9918e1ec-c62b-5de2-8fbf-c82795b8ac7f","age":31,"name":"John"}]}`, }, testUtils.Request{ Request: ` @@ -236,6 +236,10 @@ func TestBackupImport_DoubleRelationshipWithUpdate_NoError(t *testing.T) { }`, Results: map[string]any{ "Book": []map[string]any{ + { + "name": "Game of chains", + "author": nil, + }, { "name": "John and the sourcerers' stone", "author": map[string]any{ @@ -245,10 +249,6 @@ func TestBackupImport_DoubleRelationshipWithUpdate_NoError(t *testing.T) { }, }, }, - { - "name": "Game of chains", - "author": nil, - }, }, }, }, diff --git a/tests/integration/backup/self_reference/export_test.go b/tests/integration/backup/self_reference/export_test.go index b31216a1ad..8736c73660 100644 --- a/tests/integration/backup/self_reference/export_test.go +++ b/tests/integration/backup/self_reference/export_test.go @@ -26,13 +26,13 @@ func TestBackupExport_Simple_NoError(t *testing.T) { }, testUtils.CreateDoc{ CollectionID: 0, - Doc: `{"name": "Bob", "age": 31, "boss": "bae-a2162ff0-3257-50f1-ba2f-39c299921220"}`, + Doc: `{"name": "Bob", "age": 31, "boss": "bae-8096e3d7-41ab-5afe-ad88-481150483db1"}`, }, testUtils.BackupExport{ Config: client.BackupConfig{ Collections: []string{"User"}, }, - ExpectedContent: `{"User":[{"_docID":"bae-a2162ff0-3257-50f1-ba2f-39c299921220","_docIDNew":"bae-a2162ff0-3257-50f1-ba2f-39c299921220","age":30,"name":"John"},{"_docID":"bae-f4def2b3-2fe8-5e3b-838e-b9d9f8aca102","_docIDNew":"bae-f4def2b3-2fe8-5e3b-838e-b9d9f8aca102","age":31,"boss_id":"bae-a2162ff0-3257-50f1-ba2f-39c299921220","name":"Bob"}]}`, + ExpectedContent: `{"User":[{"_docID":"bae-0dfbaf9f-3c58-5133-aa07-a9f25d792f4e","_docIDNew":"bae-0dfbaf9f-3c58-5133-aa07-a9f25d792f4e","age":31,"boss_id":"bae-8096e3d7-41ab-5afe-ad88-481150483db1","name":"Bob"},{"_docID":"bae-8096e3d7-41ab-5afe-ad88-481150483db1","_docIDNew":"bae-8096e3d7-41ab-5afe-ad88-481150483db1","age":30,"name":"John"}]}`, }, }, } @@ -49,7 +49,7 @@ func TestBackupExport_MultipleDocsAndDocUpdate_NoError(t *testing.T) { }, testUtils.CreateDoc{ CollectionID: 0, - Doc: `{"name": "Bob", "age": 31, "boss": "bae-a2162ff0-3257-50f1-ba2f-39c299921220"}`, + Doc: `{"name": "Bob", "age": 31, "boss": "bae-8096e3d7-41ab-5afe-ad88-481150483db1"}`, }, testUtils.UpdateDoc{ CollectionID: 0, @@ -57,7 +57,7 @@ func TestBackupExport_MultipleDocsAndDocUpdate_NoError(t *testing.T) { Doc: `{"age": 31}`, }, testUtils.BackupExport{ - ExpectedContent: `{"User":[{"_docID":"bae-a2162ff0-3257-50f1-ba2f-39c299921220","_docIDNew":"bae-99fbc678-167f-5325-bdf1-79fa76039125","age":31,"name":"John"},{"_docID":"bae-f4def2b3-2fe8-5e3b-838e-b9d9f8aca102","_docIDNew":"bae-98531af8-dda5-5993-b140-1495fa8f1576","age":31,"boss_id":"bae-99fbc678-167f-5325-bdf1-79fa76039125","name":"Bob"}]}`, + ExpectedContent: `{"User":[{"_docID":"bae-0dfbaf9f-3c58-5133-aa07-a9f25d792f4e","_docIDNew":"bae-f3c5fc81-300f-5dd7-aeb3-20dd15883930","age":31,"boss_id":"bae-e4423c73-b867-511b-a5f1-565bd87d9c53","name":"Bob"},{"_docID":"bae-8096e3d7-41ab-5afe-ad88-481150483db1","_docIDNew":"bae-e4423c73-b867-511b-a5f1-565bd87d9c53","age":31,"name":"John"}]}`, }, }, } diff --git a/tests/integration/backup/self_reference/import_test.go b/tests/integration/backup/self_reference/import_test.go index 1d0b39d2e7..712b191b00 100644 --- a/tests/integration/backup/self_reference/import_test.go +++ b/tests/integration/backup/self_reference/import_test.go @@ -25,13 +25,13 @@ func TestBackupSelfRefImport_Simple_NoError(t *testing.T) { ImportContent: `{ "User":[ { - "_docID":"bae-f4def2b3-2fe8-5e3b-838e-b9d9f8aca102", + "_docID":"bae-0dfbaf9f-3c58-5133-aa07-a9f25d792f4e", "age":31, - "boss_id":"bae-a2162ff0-3257-50f1-ba2f-39c299921220", + "boss_id":"bae-8096e3d7-41ab-5afe-ad88-481150483db1", "name":"Bob" }, { - "_docID":"bae-a2162ff0-3257-50f1-ba2f-39c299921220", + "_docID":"bae-8096e3d7-41ab-5afe-ad88-481150483db1", "age":30, "name":"John" } @@ -50,16 +50,16 @@ func TestBackupSelfRefImport_Simple_NoError(t *testing.T) { }`, Results: map[string]any{ "User": []map[string]any{ - { - "name": "John", - "boss": nil, - }, { "name": "Bob", "boss": map[string]any{ "name": "John", }, }, + { + "name": "John", + "boss": nil, + }, }, }, }, @@ -73,10 +73,10 @@ func TestBackupSelfRefImport_SelfRef_NoError(t *testing.T) { expectedExportData := `{` + `"User":[` + `{` + - `"_docID":"bae-20631b3d-1498-51f1-be29-5c0effbfa646",` + - `"_docIDNew":"bae-20631b3d-1498-51f1-be29-5c0effbfa646",` + + `"_docID":"bae-b9449db8-3894-5701-84ce-ee96a3eafc9c",` + + `"_docIDNew":"bae-b9449db8-3894-5701-84ce-ee96a3eafc9c",` + `"age":31,` + - `"boss_id":"bae-20631b3d-1498-51f1-be29-5c0effbfa646",` + + `"boss_id":"bae-b9449db8-3894-5701-84ce-ee96a3eafc9c",` + `"name":"Bob"` + `}` + `]` + @@ -100,7 +100,7 @@ func TestBackupSelfRefImport_SelfRef_NoError(t *testing.T) { testUtils.UpdateDoc{ NodeID: immutable.Some(0), Doc: `{ - "boss_id": "bae-20631b3d-1498-51f1-be29-5c0effbfa646" + "boss_id": "bae-b9449db8-3894-5701-84ce-ee96a3eafc9c" }`, }, testUtils.BackupExport{ @@ -277,18 +277,18 @@ func TestBackupSelfRefImport_SplitPrimaryRelationWithSecondCollection_NoError(t expectedExportData := `{` + `"Author":[` + `{` + - `"_docID":"bae-069af8c0-9728-5dde-84ff-ab2dd836f165",` + - `"_docIDNew":"bae-f2e84aeb-decc-5e40-94ff-e365f0ed0f4b",` + - `"book_id":"bae-006376a9-5ceb-5bd0-bfed-6ff5afd3eb93",` + + `"_docID":"bae-bf1f16db-3c02-5759-8127-7d73346442cc",` + + `"_docIDNew":"bae-bf1f16db-3c02-5759-8127-7d73346442cc",` + + `"book_id":"bae-89136f56-3779-5656-b8a6-f76a1c262f37",` + `"name":"John"` + `}` + `],` + `"Book":[` + `{` + - `"_docID":"bae-2b931633-22bf-576f-b788-d8098b213e5a",` + - `"_docIDNew":"bae-c821a0a9-7afc-583b-accb-dc99a09c1ff8",` + + `"_docID":"bae-89136f56-3779-5656-b8a6-f76a1c262f37",` + + `"_docIDNew":"bae-66b0f769-c743-5a50-ae6d-1dcd978e2404",` + `"name":"John and the sourcerers' stone",` + - `"reviewedBy_id":"bae-069af8c0-9728-5dde-84ff-ab2dd836f165"` + + `"reviewedBy_id":"bae-bf1f16db-3c02-5759-8127-7d73346442cc"` + `}` + `]` + `}` @@ -316,7 +316,7 @@ func TestBackupSelfRefImport_SplitPrimaryRelationWithSecondCollection_NoError(t testUtils.CreateDoc{ NodeID: immutable.Some(0), CollectionID: 1, - // bae-2b931633-22bf-576f-b788-d8098b213e5a + // bae-89136f56-3779-5656-b8a6-f76a1c262f37 Doc: `{ "name": "John and the sourcerers' stone" }`, @@ -326,7 +326,7 @@ func TestBackupSelfRefImport_SplitPrimaryRelationWithSecondCollection_NoError(t CollectionID: 0, Doc: `{ "name": "John", - "book": "bae-2b931633-22bf-576f-b788-d8098b213e5a" + "book": "bae-89136f56-3779-5656-b8a6-f76a1c262f37" }`, }, testUtils.UpdateDoc{ @@ -334,7 +334,7 @@ func TestBackupSelfRefImport_SplitPrimaryRelationWithSecondCollection_NoError(t CollectionID: 1, DocID: 0, Doc: `{ - "reviewedBy_id": "bae-069af8c0-9728-5dde-84ff-ab2dd836f165" + "reviewedBy_id": "bae-bf1f16db-3c02-5759-8127-7d73346442cc" }`, }, /* @@ -366,11 +366,8 @@ func TestBackupSelfRefImport_SplitPrimaryRelationWithSecondCollection_NoError(t Results: map[string]any{ "Book": []map[string]any{ { - "name": "John and the sourcerers' stone", - "author": map[string]any{ - "name": "John", - "reviewed": nil, - }, + "name": "John and the sourcerers' stone", + "author": nil, }, }, }, diff --git a/tests/integration/collection_description/updates/replace/id_test.go b/tests/integration/collection_description/updates/replace/id_test.go index a89dad193b..7538f2161e 100644 --- a/tests/integration/collection_description/updates/replace/id_test.go +++ b/tests/integration/collection_description/updates/replace/id_test.go @@ -115,7 +115,7 @@ func TestColDescrUpdateReplaceID_WithExistingDifferentRoot_Errors(t *testing.T) { "op": "replace", "path": "/2/ID", "value": 1 } ] `, - ExpectedError: "collection root ID cannot be mutated.", + ExpectedError: "collection root ID cannot be mutated. CollectionID:", }, }, } diff --git a/tests/integration/collection_description/updates/replace/name_one_many_test.go b/tests/integration/collection_description/updates/replace/name_one_many_test.go new file mode 100644 index 0000000000..0993af66c0 --- /dev/null +++ b/tests/integration/collection_description/updates/replace/name_one_many_test.go @@ -0,0 +1,159 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package replace + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestColDescrUpdateReplaceNameOneToMany_GivenExistingName(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Author { + name: String + books: [Book] + } + + type Book { + name: String + author: Author + } + `, + }, + testUtils.CreateDoc{ + DocMap: map[string]any{ + "name": "John Grisham", + }, + }, + testUtils.CreateDoc{ + CollectionID: 1, + DocMap: map[string]any{ + "name": "Painted House", + "author": testUtils.NewDocIndex(0, 0), + }, + }, + testUtils.PatchCollection{ + Patch: ` + [ + { "op": "replace", "path": "/1/Name", "value": "Writer" } + ] + `, + }, + testUtils.Request{ + Request: `query { + Book { + name + author { + name + } + } + }`, + Results: map[string]any{ + "Book": []map[string]any{ + { + "name": "Painted House", + "author": map[string]any{ + "name": "John Grisham", + }, + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestColDescrUpdateReplaceNameOneToMany_GivenExistingNameReplacedBeforeAndAfterCreate(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Author { + name: String + books: [Book] + } + + type Book { + name: String + author: Author + } + `, + }, + testUtils.CreateDoc{ + DocMap: map[string]any{ + "name": "John Grisham", + }, + }, + testUtils.CreateDoc{ + CollectionID: 1, + DocMap: map[string]any{ + "name": "Painted House", + "author": testUtils.NewDocIndex(0, 0), + }, + }, + testUtils.PatchCollection{ + Patch: ` + [ + { "op": "replace", "path": "/1/Name", "value": "Writer" } + ] + `, + }, + testUtils.CreateDoc{ + DocMap: map[string]any{ + "name": "Cornelia Funke", + }, + }, + testUtils.CreateDoc{ + CollectionID: 1, + DocMap: map[string]any{ + "name": "Theif Lord", + "author": testUtils.NewDocIndex(0, 1), + }, + }, + testUtils.Request{ + Request: `query { + Book { + name + author { + name + } + } + }`, + // This test ensures that documents created before and after the collection rename + // are correctly fetched together + Results: map[string]any{ + "Book": []map[string]any{ + { + "name": "Painted House", + "author": map[string]any{ + "name": "John Grisham", + }, + }, + { + "name": "Theif Lord", + "author": map[string]any{ + "name": "Cornelia Funke", + }, + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/encryption/commit_relation_test.go b/tests/integration/encryption/commit_relation_test.go index 7097c0fb56..b75d42580e 100644 --- a/tests/integration/encryption/commit_relation_test.go +++ b/tests/integration/encryption/commit_relation_test.go @@ -18,7 +18,7 @@ import ( func TestDocEncryption_WithEncryptionSecondaryRelations_ShouldStoreEncryptedCommit(t *testing.T) { const userDocID = "bae-4d563681-e131-5e01-8ab4-6c65ac0d0478" - const deviceDocID = "bae-50211587-fde7-5d75-8034-e7040dfba203" + const deviceDocID = "bae-29ab9ee8-80cb-53eb-a467-f96a170f4cb7" test := testUtils.TestCase{ Actions: []any{ @@ -64,16 +64,6 @@ func TestDocEncryption_WithEncryptionSecondaryRelations_ShouldStoreEncryptedComm `, Results: map[string]any{ "commits": []map[string]any{ - { - "delta": encrypt(testUtils.CBORValue("Chris"), userDocID, ""), - "docID": userDocID, - "fieldName": "name", - }, - { - "delta": nil, - "docID": userDocID, - "fieldName": nil, - }, { "delta": encrypt(testUtils.CBORValue("Sony"), deviceDocID, ""), "docID": deviceDocID, @@ -94,6 +84,16 @@ func TestDocEncryption_WithEncryptionSecondaryRelations_ShouldStoreEncryptedComm "docID": deviceDocID, "fieldName": nil, }, + { + "delta": encrypt(testUtils.CBORValue("Chris"), userDocID, ""), + "docID": userDocID, + "fieldName": "name", + }, + { + "delta": nil, + "docID": userDocID, + "fieldName": nil, + }, }, }, }, diff --git a/tests/integration/explain/execute/dagscan_test.go b/tests/integration/explain/execute/dagscan_test.go index 8093accba4..4860051439 100644 --- a/tests/integration/explain/execute/dagscan_test.go +++ b/tests/integration/explain/execute/dagscan_test.go @@ -25,12 +25,13 @@ func TestExecuteExplainCommitsDagScan(t *testing.T) { Actions: []any{ explainUtils.SchemaForExplainTests, - // Authors + create2AddressDocuments(), + create2AuthorContactDocuments(), create2AuthorDocuments(), testUtils.ExplainRequest{ Request: `query @explain(type: execute) { - commits (docID: "bae-dcdbc1dc-8428-592d-ad9d-ca0f1430e1bf") { + commits (docID: "bae-333455ca-1563-54c3-85a4-1db7ea4e9c59") { links { cid } @@ -72,12 +73,13 @@ func TestExecuteExplainLatestCommitsDagScan(t *testing.T) { Actions: []any{ explainUtils.SchemaForExplainTests, - // Author + create2AddressDocuments(), + create2AuthorContactDocuments(), create2AuthorDocuments(), testUtils.ExplainRequest{ Request: `query @explain(type: execute) { - latestCommits(docID: "bae-dcdbc1dc-8428-592d-ad9d-ca0f1430e1bf") { + latestCommits(docID: "bae-333455ca-1563-54c3-85a4-1db7ea4e9c59") { cid links { cid diff --git a/tests/integration/explain/execute/delete_test.go b/tests/integration/explain/execute/delete_test.go index 2d29f3a276..e80189fd8a 100644 --- a/tests/integration/explain/execute/delete_test.go +++ b/tests/integration/explain/execute/delete_test.go @@ -76,7 +76,8 @@ func TestExecuteExplainMutationRequestWithDeleteUsingFilter(t *testing.T) { Actions: []any{ explainUtils.SchemaForExplainTests, - // Author + create2AddressDocuments(), + create2AuthorContactDocuments(), create2AuthorDocuments(), testUtils.ExplainRequest{ diff --git a/tests/integration/explain/execute/fixture.go b/tests/integration/explain/execute/fixture.go index 17a172552f..0592352de0 100644 --- a/tests/integration/explain/execute/fixture.go +++ b/tests/integration/explain/execute/fixture.go @@ -80,23 +80,21 @@ func create2AuthorDocuments() []testUtils.CreateDoc { return []testUtils.CreateDoc{ { CollectionID: 2, - // _docID: "bae-7f54d9e0-cbde-5320-aa6c-5c8895a89138" - Doc: `{ - "name": "John Grisham", - "age": 65, - "verified": true, - "contact_id": "bae-819c9c03-9d49-5fd5-aaee-0dc5a70bbe44" - }`, + DocMap: map[string]any{ + "name": "John Grisham", + "age": 65, + "verified": true, + "contact_id": testUtils.NewDocIndex(3, 0), + }, }, { CollectionID: 2, - // _docID: "bae-68cb395d-df73-5bcb-b623-615a140dee12" - Doc: `{ - "name": "Cornelia Funke", - "age": 62, - "verified": false, - "contact_id": "bae-9bf0272a-c521-5bef-a7ba-642e8be6e433" - }`, + DocMap: map[string]any{ + "name": "Cornelia Funke", + "age": 62, + "verified": false, + "contact_id": testUtils.NewDocIndex(3, 1), + }, }, } } @@ -105,23 +103,19 @@ func create2AuthorContactDocuments() []testUtils.CreateDoc { return []testUtils.CreateDoc{ { CollectionID: 3, - // "author_id": "bae-7f54d9e0-cbde-5320-aa6c-5c8895a89138" - // _docID: "bae-819c9c03-9d49-5fd5-aaee-0dc5a70bbe44" - Doc: `{ - "cell": "5197212301", - "email": "john_grisham@example.com", - "address_id": "bae-14f20db7-3654-58de-9156-596ef2cfd790" - }`, + DocMap: map[string]any{ + "cell": "5197212301", + "email": "john_grisham@example.com", + "address_id": testUtils.NewDocIndex(4, 0), + }, }, { CollectionID: 3, - // "author_id": "bae-68cb395d-df73-5bcb-b623-615a140dee12", - // _docID: "bae-9bf0272a-c521-5bef-a7ba-642e8be6e433" - Doc: `{ - "cell": "5197212302", - "email": "cornelia_funke@example.com", - "address_id": "bae-49f715e7-7f01-5509-a213-ed98cb81583f" - }`, + DocMap: map[string]any{ + "cell": "5197212302", + "email": "cornelia_funke@example.com", + "address_id": testUtils.NewDocIndex(4, 1), + }, }, } } diff --git a/tests/integration/explain/execute/type_join_test.go b/tests/integration/explain/execute/type_join_test.go index 60f717a84f..511c3498a9 100644 --- a/tests/integration/explain/execute/type_join_test.go +++ b/tests/integration/explain/execute/type_join_test.go @@ -25,11 +25,9 @@ func TestExecuteExplainRequestWithAOneToOneJoin(t *testing.T) { Actions: []any{ explainUtils.SchemaForExplainTests, - // Authors - create2AuthorDocuments(), - - // Contacts + create2AddressDocuments(), create2AuthorContactDocuments(), + create2AuthorDocuments(), testUtils.ExplainRequest{ Request: `query @explain(type: execute) { @@ -87,11 +85,9 @@ func TestExecuteExplainWithMultipleOneToOneJoins(t *testing.T) { Actions: []any{ explainUtils.SchemaForExplainTests, - // Authors - create2AuthorDocuments(), - - // Contacts + create2AddressDocuments(), create2AuthorContactDocuments(), + create2AuthorDocuments(), testUtils.ExplainRequest{ Request: `query @explain(type: execute) { diff --git a/tests/integration/explain/execute/with_average_test.go b/tests/integration/explain/execute/with_average_test.go index 1c81ce5021..fdadf6fcaa 100644 --- a/tests/integration/explain/execute/with_average_test.go +++ b/tests/integration/explain/execute/with_average_test.go @@ -24,6 +24,9 @@ func TestExecuteExplainAverageRequestOnArrayField(t *testing.T) { Actions: []any{ explainUtils.SchemaForExplainTests, + + create2AddressDocuments(), + create2AuthorContactDocuments(), create2AuthorDocuments(), create3BookDocuments(), @@ -81,6 +84,9 @@ func TestExplainExplainAverageRequestOnJoinedField(t *testing.T) { Actions: []any{ explainUtils.SchemaForExplainTests, + + create2AddressDocuments(), + create2AuthorContactDocuments(), create2AuthorDocuments(), create3BookDocuments(), diff --git a/tests/integration/explain/execute/with_count_test.go b/tests/integration/explain/execute/with_count_test.go index 81dafce0d9..190a685592 100644 --- a/tests/integration/explain/execute/with_count_test.go +++ b/tests/integration/explain/execute/with_count_test.go @@ -25,6 +25,8 @@ func TestExecuteExplainRequestWithCountOnOneToManyRelation(t *testing.T) { Actions: []any{ explainUtils.SchemaForExplainTests, + create2AddressDocuments(), + create2AuthorContactDocuments(), create2AuthorDocuments(), create3BookDocuments(), diff --git a/tests/integration/explain/execute/with_limit_test.go b/tests/integration/explain/execute/with_limit_test.go index 1f40902525..6a930067df 100644 --- a/tests/integration/explain/execute/with_limit_test.go +++ b/tests/integration/explain/execute/with_limit_test.go @@ -24,6 +24,9 @@ func TestExecuteExplainRequestWithBothLimitAndOffsetOnParent(t *testing.T) { Actions: []any{ explainUtils.SchemaForExplainTests, + + create2AddressDocuments(), + create2AuthorContactDocuments(), create2AuthorDocuments(), create3BookDocuments(), @@ -75,6 +78,8 @@ func TestExecuteExplainRequestWithBothLimitAndOffsetOnParentAndLimitOnChild(t *t Actions: []any{ explainUtils.SchemaForExplainTests, + create2AddressDocuments(), + create2AuthorContactDocuments(), create2AuthorDocuments(), create3ArticleDocuments(), diff --git a/tests/integration/explain/execute/with_order_test.go b/tests/integration/explain/execute/with_order_test.go index 8b27cdbf04..55f0745928 100644 --- a/tests/integration/explain/execute/with_order_test.go +++ b/tests/integration/explain/execute/with_order_test.go @@ -25,7 +25,8 @@ func TestExecuteExplainRequestWithOrderFieldOnParent(t *testing.T) { Actions: []any{ explainUtils.SchemaForExplainTests, - // Authors + create2AddressDocuments(), + create2AuthorContactDocuments(), create2AuthorDocuments(), testUtils.ExplainRequest{ @@ -163,6 +164,8 @@ func TestExecuteExplainRequestWithOrderFieldOnChild(t *testing.T) { Actions: []any{ explainUtils.SchemaForExplainTests, + create2AddressDocuments(), + create2AuthorContactDocuments(), create2AuthorDocuments(), create3ArticleDocuments(), @@ -223,6 +226,8 @@ func TestExecuteExplainRequestWithOrderFieldOnBothParentAndChild(t *testing.T) { Actions: []any{ explainUtils.SchemaForExplainTests, + create2AddressDocuments(), + create2AuthorContactDocuments(), create2AuthorDocuments(), create3ArticleDocuments(), @@ -287,6 +292,8 @@ func TestExecuteExplainRequestWhereParentFieldIsOrderedByChildField(t *testing.T Actions: []any{ explainUtils.SchemaForExplainTests, + create2AddressDocuments(), + create2AuthorContactDocuments(), create2AuthorDocuments(), create3ArticleDocuments(), diff --git a/tests/integration/explain/execute/with_sum_test.go b/tests/integration/explain/execute/with_sum_test.go index dbc6b3761f..23dc85ce38 100644 --- a/tests/integration/explain/execute/with_sum_test.go +++ b/tests/integration/explain/execute/with_sum_test.go @@ -24,6 +24,9 @@ func TestExecuteExplainRequestWithSumOfInlineArrayField(t *testing.T) { Actions: []any{ explainUtils.SchemaForExplainTests, + + create2AddressDocuments(), + create2AuthorContactDocuments(), create2AuthorDocuments(), create3BookDocuments(), @@ -75,6 +78,8 @@ func TestExecuteExplainRequestSumOfRelatedOneToManyField(t *testing.T) { Actions: []any{ explainUtils.SchemaForExplainTests, + create2AddressDocuments(), + create2AuthorContactDocuments(), create2AuthorDocuments(), create3ArticleDocuments(), diff --git a/tests/integration/index/query_with_compound_filter_relation_test.go b/tests/integration/index/query_with_compound_filter_relation_test.go index 4708bb2c2a..98a57b6889 100644 --- a/tests/integration/index/query_with_compound_filter_relation_test.go +++ b/tests/integration/index/query_with_compound_filter_relation_test.go @@ -196,10 +196,10 @@ func TestIndex_QueryWithIndexOnOneToManyRelationAndFilter_Data(t *testing.T) { Results: map[string]any{ "Program": []map[string]any{ { - "name": "LensVM", + "name": "DefraDB", }, { - "name": "DefraDB", + "name": "LensVM", }, }, }, @@ -280,13 +280,13 @@ func TestIndex_QueryWithIndexOnOneToManyRelationOrFilter_Data(t *testing.T) { Results: map[string]any{ "Program": []map[string]any{ { - "name": "Zanzi", + "name": "DefraDB", }, { "name": "LensVM", }, { - "name": "DefraDB", + "name": "Zanzi", }, }, }, diff --git a/tests/integration/index/query_with_relation_filter_test.go b/tests/integration/index/query_with_relation_filter_test.go index c9798903b3..9428626810 100644 --- a/tests/integration/index/query_with_relation_filter_test.go +++ b/tests/integration/index/query_with_relation_filter_test.go @@ -56,8 +56,8 @@ func TestQueryWithIndexOnOneToManyRelation_IfFilterOnIndexedRelation_ShouldFilte Request: req1, Results: map[string]any{ "User": []map[string]any{ - {"name": "Islam"}, {"name": "Shahzad"}, + {"name": "Islam"}, {"name": "Keenan"}, }, }, @@ -124,8 +124,8 @@ func TestQueryWithIndexOnOneToManyRelation_IfFilterOnIndexedRelation_ShouldFilte Request: req1, Results: map[string]any{ "User": []map[string]any{ - {"name": "Islam"}, {"name": "Shahzad"}, + {"name": "Islam"}, {"name": "Keenan"}, }, }, @@ -204,8 +204,8 @@ func TestQueryWithIndexOnOneToOnesSecondaryRelation_IfFilterOnIndexedRelation_Sh Request: req2, Results: map[string]any{ "User": []map[string]any{ - {"name": "Shahzad"}, {"name": "John"}, + {"name": "Shahzad"}, {"name": "Fred"}, }, }, @@ -490,11 +490,11 @@ func TestQueryWithIndexOnOneToMany_IfFilterOnIndexedRelation_ShouldFilter(t *tes "devices": []map[string]any{ { "model": "Walkman", - "manufacturer": "The Proclaimers", + "manufacturer": "Sony", }, { "model": "Walkman", - "manufacturer": "Sony", + "manufacturer": "The Proclaimers", }, // The filter is on User, so all devices belonging to it will be returned { @@ -580,11 +580,11 @@ func TestQueryWithIndexOnOneToMany_IfFilterOnIndexedRelation_ShouldFilterWithExp "devices": []map[string]any{ { "model": "Walkman", - "manufacturer": "The Proclaimers", + "manufacturer": "Sony", }, { "model": "Walkman", - "manufacturer": "Sony", + "manufacturer": "The Proclaimers", }, { "model": "Running Man", diff --git a/tests/integration/mutation/create/field_kinds/one_to_many/with_alias_test.go b/tests/integration/mutation/create/field_kinds/one_to_many/with_alias_test.go index 6fedc0c8c2..fe66f58d98 100644 --- a/tests/integration/mutation/create/field_kinds/one_to_many/with_alias_test.go +++ b/tests/integration/mutation/create/field_kinds/one_to_many/with_alias_test.go @@ -191,7 +191,7 @@ func TestMutationCreateOneToMany_AliasedRelationNameToLinkFromManySide(t *testin func TestMutationUpdateOneToMany_AliasRelationNameAndInternalIDBothProduceSameDocID(t *testing.T) { // These IDs MUST be shared by both tests below. - bookID := "bae-e4888569-d423-58b7-94c5-5886e3cffe22" + bookID := "bae-1d943ec7-1701-5910-a467-7d863beada5d" nonAliasedTest := testUtils.TestCase{ Description: "One to many update mutation using relation alias name from single side (wrong)", diff --git a/tests/integration/mutation/create/field_kinds/one_to_one_to_one/with_txn_test.go b/tests/integration/mutation/create/field_kinds/one_to_one_to_one/with_txn_test.go index f626483375..dcbee56e37 100644 --- a/tests/integration/mutation/create/field_kinds/one_to_one_to_one/with_txn_test.go +++ b/tests/integration/mutation/create/field_kinds/one_to_one_to_one/with_txn_test.go @@ -49,7 +49,7 @@ func TestTransactionalCreationAndLinkingOfRelationalDocumentsForward(t *testing. Results: map[string]any{ "create_Book": []map[string]any{ { - "_docID": "bae-e7943028-5c74-5fd4-9661-0a233edcd287", + "_docID": "bae-5a378128-1b3f-50e7-a5ff-027e707c4b87", }, }, }, @@ -64,7 +64,7 @@ func TestTransactionalCreationAndLinkingOfRelationalDocumentsForward(t *testing. Results: map[string]any{ "create_Book": []map[string]any{ { - "_docID": "bae-7f6a5a76-b90d-5715-a452-708ded9e7ae7", + "_docID": "bae-787391fb-86f8-5cbe-8fc2-ad59f90e267a", }, }, }, @@ -88,7 +88,7 @@ func TestTransactionalCreationAndLinkingOfRelationalDocumentsForward(t *testing. "_docID": "bae-07fd000a-d023-54b9-b8f3-a4318fac8fed", "name": "Website", "published": map[string]any{ - "_docID": "bae-e7943028-5c74-5fd4-9661-0a233edcd287", + "_docID": "bae-5a378128-1b3f-50e7-a5ff-027e707c4b87", "name": "Book By Website", }, }, @@ -125,7 +125,7 @@ func TestTransactionalCreationAndLinkingOfRelationalDocumentsForward(t *testing. "_docID": "bae-21084f46-b12a-53ab-94dd-04d075b4218c", "name": "Online", "published": map[string]any{ - "_docID": "bae-7f6a5a76-b90d-5715-a452-708ded9e7ae7", + "_docID": "bae-787391fb-86f8-5cbe-8fc2-ad59f90e267a", "name": "Book By Online", }, }, @@ -154,19 +154,19 @@ func TestTransactionalCreationAndLinkingOfRelationalDocumentsForward(t *testing. Results: map[string]any{ "Book": []map[string]any{ { - "_docID": "bae-7f6a5a76-b90d-5715-a452-708ded9e7ae7", - "name": "Book By Online", + "_docID": "bae-5a378128-1b3f-50e7-a5ff-027e707c4b87", + "name": "Book By Website", "publisher": map[string]any{ - "_docID": "bae-21084f46-b12a-53ab-94dd-04d075b4218c", - "name": "Online", + "_docID": "bae-07fd000a-d023-54b9-b8f3-a4318fac8fed", + "name": "Website", }, }, { - "_docID": "bae-e7943028-5c74-5fd4-9661-0a233edcd287", - "name": "Book By Website", + "_docID": "bae-787391fb-86f8-5cbe-8fc2-ad59f90e267a", + "name": "Book By Online", "publisher": map[string]any{ - "_docID": "bae-07fd000a-d023-54b9-b8f3-a4318fac8fed", - "name": "Website", + "_docID": "bae-21084f46-b12a-53ab-94dd-04d075b4218c", + "name": "Online", }, }, }, @@ -209,7 +209,7 @@ func TestTransactionalCreationAndLinkingOfRelationalDocumentsBackward(t *testing Results: map[string]any{ "create_Book": []map[string]any{ { - "_docID": "bae-e7943028-5c74-5fd4-9661-0a233edcd287", + "_docID": "bae-5a378128-1b3f-50e7-a5ff-027e707c4b87", }, }, }, @@ -224,7 +224,7 @@ func TestTransactionalCreationAndLinkingOfRelationalDocumentsBackward(t *testing Results: map[string]any{ "create_Book": []map[string]any{ { - "_docID": "bae-7f6a5a76-b90d-5715-a452-708ded9e7ae7", + "_docID": "bae-787391fb-86f8-5cbe-8fc2-ad59f90e267a", }, }, }, @@ -245,7 +245,7 @@ func TestTransactionalCreationAndLinkingOfRelationalDocumentsBackward(t *testing Results: map[string]any{ "Book": []map[string]any{ { - "_docID": "bae-e7943028-5c74-5fd4-9661-0a233edcd287", + "_docID": "bae-5a378128-1b3f-50e7-a5ff-027e707c4b87", "name": "Book By Website", "publisher": map[string]any{ "_docID": "bae-07fd000a-d023-54b9-b8f3-a4318fac8fed", @@ -271,7 +271,7 @@ func TestTransactionalCreationAndLinkingOfRelationalDocumentsBackward(t *testing Results: map[string]any{ "Book": []map[string]any{ { - "_docID": "bae-7f6a5a76-b90d-5715-a452-708ded9e7ae7", + "_docID": "bae-787391fb-86f8-5cbe-8fc2-ad59f90e267a", "name": "Book By Online", "publisher": map[string]any{ "_docID": "bae-21084f46-b12a-53ab-94dd-04d075b4218c", @@ -306,7 +306,7 @@ func TestTransactionalCreationAndLinkingOfRelationalDocumentsBackward(t *testing "_docID": "bae-07fd000a-d023-54b9-b8f3-a4318fac8fed", "name": "Website", "published": map[string]any{ - "_docID": "bae-e7943028-5c74-5fd4-9661-0a233edcd287", + "_docID": "bae-5a378128-1b3f-50e7-a5ff-027e707c4b87", "name": "Book By Website", }, }, @@ -315,7 +315,7 @@ func TestTransactionalCreationAndLinkingOfRelationalDocumentsBackward(t *testing "_docID": "bae-21084f46-b12a-53ab-94dd-04d075b4218c", "name": "Online", "published": map[string]any{ - "_docID": "bae-7f6a5a76-b90d-5715-a452-708ded9e7ae7", + "_docID": "bae-787391fb-86f8-5cbe-8fc2-ad59f90e267a", "name": "Book By Online", }, }, diff --git a/tests/integration/mutation/delete/field_kinds/one_to_many/with_show_deleted_test.go b/tests/integration/mutation/delete/field_kinds/one_to_many/with_show_deleted_test.go index de72ebf893..0c94141b25 100644 --- a/tests/integration/mutation/delete/field_kinds/one_to_many/with_show_deleted_test.go +++ b/tests/integration/mutation/delete/field_kinds/one_to_many/with_show_deleted_test.go @@ -61,14 +61,14 @@ func TestDeletionOfADocumentUsingSingleDocIDWithShowDeletedDocumentQuery(t *test }, testUtils.Request{ Request: `mutation { - delete_Book(docID: "bae-b5c56d8f-b2f5-57f9-b371-4e9e04903e91") { + delete_Book(docID: "bae-39db1d4b-72c0-5b7b-b6f2-c20870982128") { _docID } }`, Results: map[string]any{ "delete_Book": []map[string]any{ { - "_docID": "bae-b5c56d8f-b2f5-57f9-b371-4e9e04903e91", + "_docID": "bae-39db1d4b-72c0-5b7b-b6f2-c20870982128", }, }, }, @@ -94,13 +94,13 @@ func TestDeletionOfADocumentUsingSingleDocIDWithShowDeletedDocumentQuery(t *test "age": int64(30), "published": []map[string]any{ { - "_deleted": false, - "name": "John has a chamber of secrets", + "_deleted": true, + "name": "John and the philosopher are stoned", "rating": 9.9, }, { - "_deleted": true, - "name": "John and the philosopher are stoned", + "_deleted": false, + "name": "John has a chamber of secrets", "rating": 9.9, }, }, diff --git a/tests/integration/mutation/delete/field_kinds/one_to_one_to_one/with_id_test.go b/tests/integration/mutation/delete/field_kinds/one_to_one_to_one/with_id_test.go index fefcc350af..2f8d5dd9f9 100644 --- a/tests/integration/mutation/delete/field_kinds/one_to_one_to_one/with_id_test.go +++ b/tests/integration/mutation/delete/field_kinds/one_to_one_to_one/with_id_test.go @@ -34,7 +34,7 @@ func TestRelationalDeletionOfADocumentUsingSingleKey_Success(t *testing.T) { testUtils.CreateDoc{ // Authors CollectionID: 1, - // bae-455081f4-b810-5363-ab95-50dbd2ec03d0 + // bae-b4f1fb22-52f2-5e3d-950c-f6a4033d8f48 Doc: `{ "name": "Teiva Harsanyi", "age": 48, @@ -53,14 +53,14 @@ func TestRelationalDeletionOfADocumentUsingSingleKey_Success(t *testing.T) { }, testUtils.Request{ Request: `mutation { - delete_Author(docID: "bae-455081f4-b810-5363-ab95-50dbd2ec03d0") { + delete_Author(docID: "bae-b4f1fb22-52f2-5e3d-950c-f6a4033d8f48") { _docID } }`, Results: map[string]any{ "delete_Author": []map[string]any{ { - "_docID": "bae-455081f4-b810-5363-ab95-50dbd2ec03d0", + "_docID": "bae-b4f1fb22-52f2-5e3d-950c-f6a4033d8f48", }, }, }, @@ -84,7 +84,7 @@ func TestRelationalDeletionOfADocumentUsingSingleKey_Success(t *testing.T) { testUtils.CreateDoc{ // Authors CollectionID: 1, - // bae-455081f4-b810-5363-ab95-50dbd2ec03d0 + // bae-b4f1fb22-52f2-5e3d-950c-f6a4033d8f48 Doc: `{ "name": "Teiva Harsanyi", "age": 48, @@ -103,14 +103,14 @@ func TestRelationalDeletionOfADocumentUsingSingleKey_Success(t *testing.T) { }, testUtils.Request{ Request: `mutation { - delete_Author(docID: "bae-455081f4-b810-5363-ab95-50dbd2ec03d0") { + delete_Author(docID: "bae-b4f1fb22-52f2-5e3d-950c-f6a4033d8f48") { AliasOfKey: _docID } }`, Results: map[string]any{ "delete_Author": []map[string]any{ { - "AliasOfKey": "bae-455081f4-b810-5363-ab95-50dbd2ec03d0", + "AliasOfKey": "bae-b4f1fb22-52f2-5e3d-950c-f6a4033d8f48", }, }, }, @@ -134,7 +134,7 @@ func TestRelationalDeletionOfADocumentUsingSingleKey_Success(t *testing.T) { testUtils.CreateDoc{ // Authors CollectionID: 1, - // bae-455081f4-b810-5363-ab95-50dbd2ec03d0 + // bae-b4f1fb22-52f2-5e3d-950c-f6a4033d8f48 Doc: `{ "name": "Teiva Harsanyi", "age": 48, @@ -170,14 +170,14 @@ func TestRelationalDeletionOfADocumentUsingSingleKey_Success(t *testing.T) { }, testUtils.Request{ Request: `mutation { - delete_Author(docID: "bae-455081f4-b810-5363-ab95-50dbd2ec03d0") { + delete_Author(docID: "bae-b4f1fb22-52f2-5e3d-950c-f6a4033d8f48") { Key: _docID } }`, Results: map[string]any{ "delete_Author": []map[string]any{ { - "Key": "bae-455081f4-b810-5363-ab95-50dbd2ec03d0", + "Key": "bae-b4f1fb22-52f2-5e3d-950c-f6a4033d8f48", }, }, }, diff --git a/tests/integration/mutation/delete/field_kinds/one_to_one_to_one/with_txn_test.go b/tests/integration/mutation/delete/field_kinds/one_to_one_to_one/with_txn_test.go index d7be187d49..6a423255d8 100644 --- a/tests/integration/mutation/delete/field_kinds/one_to_one_to_one/with_txn_test.go +++ b/tests/integration/mutation/delete/field_kinds/one_to_one_to_one/with_txn_test.go @@ -22,16 +22,6 @@ func TestTxnDeletionOfRelatedDocFromPrimarySideForwardDirection(t *testing.T) { test := testUtils.TestCase{ Description: "Delete related doc with transaction from primary side (forward).", Actions: []any{ - testUtils.CreateDoc{ - // books - CollectionID: 0, - // "_docID": "bae-e7943028-5c74-5fd4-9661-0a233edcd287", - Doc: `{ - "name": "Book By Website", - "rating": 4.0, - "publisher_id": "bae-07fd000a-d023-54b9-b8f3-a4318fac8fed" - }`, - }, testUtils.CreateDoc{ // publishers CollectionID: 2, @@ -41,18 +31,28 @@ func TestTxnDeletionOfRelatedDocFromPrimarySideForwardDirection(t *testing.T) { "address": "Manning Publications" }`, }, + testUtils.CreateDoc{ + // books + CollectionID: 0, + // "_docID": "bae-5a378128-1b3f-50e7-a5ff-027e707c4b87", + Doc: `{ + "name": "Book By Website", + "rating": 4.0, + "publisher_id": "bae-07fd000a-d023-54b9-b8f3-a4318fac8fed" + }`, + }, testUtils.Request{ // Delete a linked book that exists. TransactionID: immutable.Some(0), Request: `mutation { - delete_Book(docID: "bae-e7943028-5c74-5fd4-9661-0a233edcd287") { + delete_Book(docID: "bae-5a378128-1b3f-50e7-a5ff-027e707c4b87") { _docID } }`, Results: map[string]any{ "delete_Book": []map[string]any{ { - "_docID": "bae-e7943028-5c74-5fd4-9661-0a233edcd287", + "_docID": "bae-5a378128-1b3f-50e7-a5ff-027e707c4b87", }, }, }, @@ -95,7 +95,7 @@ func TestTxnDeletionOfRelatedDocFromPrimarySideBackwardDirection(t *testing.T) { testUtils.CreateDoc{ // books CollectionID: 0, - // "_docID": "bae-e7943028-5c74-5fd4-9661-0a233edcd287", + // "_docID": "bae-5a378128-1b3f-50e7-a5ff-027e707c4b87", Doc: `{ "name": "Book By Website", "rating": 4.0, @@ -115,14 +115,14 @@ func TestTxnDeletionOfRelatedDocFromPrimarySideBackwardDirection(t *testing.T) { // Delete a linked book that exists. TransactionID: immutable.Some(0), Request: `mutation { - delete_Book(docID: "bae-e7943028-5c74-5fd4-9661-0a233edcd287") { + delete_Book(docID: "bae-5a378128-1b3f-50e7-a5ff-027e707c4b87") { _docID } }`, Results: map[string]any{ "delete_Book": []map[string]any{ { - "_docID": "bae-e7943028-5c74-5fd4-9661-0a233edcd287", + "_docID": "bae-5a378128-1b3f-50e7-a5ff-027e707c4b87", }, }, }, @@ -159,7 +159,7 @@ func TestATxnCanReadARecordThatIsDeletedInANonCommitedTxnForwardDirection(t *tes testUtils.CreateDoc{ // books CollectionID: 0, - // "_docID": "bae-e7943028-5c74-5fd4-9661-0a233edcd287", + // "_docID": "bae-5a378128-1b3f-50e7-a5ff-027e707c4b87", Doc: `{ "name": "Book By Website", "rating": 4.0, @@ -179,14 +179,14 @@ func TestATxnCanReadARecordThatIsDeletedInANonCommitedTxnForwardDirection(t *tes // Delete a linked book that exists. TransactionID: immutable.Some(0), Request: `mutation { - delete_Book(docID: "bae-e7943028-5c74-5fd4-9661-0a233edcd287") { + delete_Book(docID: "bae-5a378128-1b3f-50e7-a5ff-027e707c4b87") { _docID } }`, Results: map[string]any{ "delete_Book": []map[string]any{ { - "_docID": "bae-e7943028-5c74-5fd4-9661-0a233edcd287", + "_docID": "bae-5a378128-1b3f-50e7-a5ff-027e707c4b87", }, }, }, @@ -210,7 +210,7 @@ func TestATxnCanReadARecordThatIsDeletedInANonCommitedTxnForwardDirection(t *tes "_docID": "bae-07fd000a-d023-54b9-b8f3-a4318fac8fed", "name": "Website", "published": map[string]any{ - "_docID": "bae-e7943028-5c74-5fd4-9661-0a233edcd287", + "_docID": "bae-5a378128-1b3f-50e7-a5ff-027e707c4b87", "name": "Book By Website", }, }, @@ -255,7 +255,7 @@ func TestATxnCanReadARecordThatIsDeletedInANonCommitedTxnBackwardDirection(t *te testUtils.CreateDoc{ // books CollectionID: 0, - // "_docID": "bae-e7943028-5c74-5fd4-9661-0a233edcd287", + // "_docID": "bae-5a378128-1b3f-50e7-a5ff-027e707c4b87", Doc: `{ "name": "Book By Website", "rating": 4.0, @@ -275,14 +275,14 @@ func TestATxnCanReadARecordThatIsDeletedInANonCommitedTxnBackwardDirection(t *te // Delete a linked book that exists in transaction 0. TransactionID: immutable.Some(0), Request: `mutation { - delete_Book(docID: "bae-e7943028-5c74-5fd4-9661-0a233edcd287") { + delete_Book(docID: "bae-5a378128-1b3f-50e7-a5ff-027e707c4b87") { _docID } }`, Results: map[string]any{ "delete_Book": []map[string]any{ { - "_docID": "bae-e7943028-5c74-5fd4-9661-0a233edcd287", + "_docID": "bae-5a378128-1b3f-50e7-a5ff-027e707c4b87", }, }, }, @@ -303,7 +303,7 @@ func TestATxnCanReadARecordThatIsDeletedInANonCommitedTxnBackwardDirection(t *te Results: map[string]any{ "Book": []map[string]any{ { - "_docID": "bae-e7943028-5c74-5fd4-9661-0a233edcd287", + "_docID": "bae-5a378128-1b3f-50e7-a5ff-027e707c4b87", "name": "Book By Website", "publisher": map[string]any{ "_docID": "bae-07fd000a-d023-54b9-b8f3-a4318fac8fed", @@ -345,7 +345,7 @@ func TestTxnDeletionOfRelatedDocFromNonPrimarySideForwardDirection(t *testing.T) testUtils.CreateDoc{ // books CollectionID: 0, - // "_docID": "bae-7f6a5a76-b90d-5715-a452-708ded9e7ae7", + // "_docID": "bae-787391fb-86f8-5cbe-8fc2-ad59f90e267a", Doc: `{ "name": "Book By Online", "rating": 4.0, @@ -410,7 +410,7 @@ func TestTxnDeletionOfRelatedDocFromNonPrimarySideBackwardDirection(t *testing.T testUtils.CreateDoc{ // books CollectionID: 0, - // "_docID": "bae-7f6a5a76-b90d-5715-a452-708ded9e7ae7", + // "_docID": "bae-787391fb-86f8-5cbe-8fc2-ad59f90e267a", Doc: `{ "name": "Book By Online", "rating": 4.0, @@ -461,7 +461,7 @@ func TestTxnDeletionOfRelatedDocFromNonPrimarySideBackwardDirection(t *testing.T Results: map[string]any{ "Book": []map[string]any{ { - "_docID": "bae-7f6a5a76-b90d-5715-a452-708ded9e7ae7", + "_docID": "bae-787391fb-86f8-5cbe-8fc2-ad59f90e267a", "name": "Book By Online", "publisher": nil, }, diff --git a/tests/integration/mutation/update/field_kinds/one_to_one/with_alias_test.go b/tests/integration/mutation/update/field_kinds/one_to_one/with_alias_test.go index 5996dde5ea..eda176bbcc 100644 --- a/tests/integration/mutation/update/field_kinds/one_to_one/with_alias_test.go +++ b/tests/integration/mutation/update/field_kinds/one_to_one/with_alias_test.go @@ -20,8 +20,8 @@ import ( ) func TestMutationUpdateOneToOne_AliasRelationNameToLinkFromPrimarySide(t *testing.T) { - author1ID := "bae-42d197b8-d14f-5570-a55d-9e8714b2a82a" - bookID := "bae-dfce6a1a-27fa-5dde-bea7-44df2dffac1a" + author1ID := "bae-53eff350-ad8e-532c-b72d-f95c4f47909c" + bookID := "bae-89d64ba1-44e3-5d75-a610-7226077ece48" test := testUtils.TestCase{ Description: "One to one update mutation using alias relation id from single side", @@ -66,8 +66,8 @@ func TestMutationUpdateOneToOne_AliasRelationNameToLinkFromPrimarySide(t *testin } func TestMutationUpdateOneToOne_AliasRelationNameToLinkFromSecondarySide(t *testing.T) { - author1ID := "bae-42d197b8-d14f-5570-a55d-9e8714b2a82a" - author2ID := "bae-a34d8759-e549-5083-8ba6-e04038c41caa" + author1ID := "bae-53eff350-ad8e-532c-b72d-f95c4f47909c" + author2ID := "bae-c058cfd4-259f-5b08-975d-106f13a143d5" test := testUtils.TestCase{ Description: "One to one update mutation using alias relation id from secondary side", @@ -112,7 +112,7 @@ func TestMutationUpdateOneToOne_AliasRelationNameToLinkFromSecondarySide(t *test } func TestMutationUpdateOneToOne_AliasWithInvalidLengthRelationIDToLink_Error(t *testing.T) { - author1ID := "bae-42d197b8-d14f-5570-a55d-9e8714b2a82a" + author1ID := "bae-53eff350-ad8e-532c-b72d-f95c4f47909c" invalidLenSubID := "35953ca-518d-9e6b-9ce6cd00eff5" invalidAuthorID := "bae-" + invalidLenSubID @@ -153,7 +153,7 @@ func TestMutationUpdateOneToOne_AliasWithInvalidLengthRelationIDToLink_Error(t * } func TestMutationUpdateOneToOne_InvalidAliasRelationNameToLinkFromSecondarySide_Error(t *testing.T) { - author1ID := "bae-42d197b8-d14f-5570-a55d-9e8714b2a82a" + author1ID := "bae-53eff350-ad8e-532c-b72d-f95c4f47909c" invalidAuthorID := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ee" test := testUtils.TestCase{ @@ -193,8 +193,8 @@ func TestMutationUpdateOneToOne_InvalidAliasRelationNameToLinkFromSecondarySide_ } func TestMutationUpdateOneToOne_AliasRelationNameToLinkFromSecondarySideWithWrongField_Error(t *testing.T) { - author1ID := "bae-42d197b8-d14f-5570-a55d-9e8714b2a82a" - author2ID := "bae-a34d8759-e549-5083-8ba6-e04038c41caa" + author1ID := "bae-53eff350-ad8e-532c-b72d-f95c4f47909c" + author2ID := "bae-c058cfd4-259f-5b08-975d-106f13a143d5" test := testUtils.TestCase{ Description: "One to one update mutation using relation alias name from secondary side, with a wrong field.", diff --git a/tests/integration/mutation/update/field_kinds/one_to_one/with_self_ref_test.go b/tests/integration/mutation/update/field_kinds/one_to_one/with_self_ref_test.go index 841d30d80c..dd630c0721 100644 --- a/tests/integration/mutation/update/field_kinds/one_to_one/with_self_ref_test.go +++ b/tests/integration/mutation/update/field_kinds/one_to_one/with_self_ref_test.go @@ -18,7 +18,7 @@ import ( ) func TestMutationUpdateOneToOne_SelfReferencingFromPrimary(t *testing.T) { - user1ID := "bae-ec56fb02-88fb-5113-b4d8-1b9be5f2217b" + user1ID := "bae-93b58e20-b3e1-55b9-b5b8-0617fabe710e" test := testUtils.TestCase{ Description: "One to one update mutation, self referencing from primary", @@ -63,16 +63,16 @@ func TestMutationUpdateOneToOne_SelfReferencingFromPrimary(t *testing.T) { }`, Results: map[string]any{ "User": []map[string]any{ + { + "name": "John", + "boss": nil, + }, { "name": "Fred", "boss": map[string]any{ "name": "John", }, }, - { - "name": "John", - "boss": nil, - }, }, }, }, @@ -88,16 +88,16 @@ func TestMutationUpdateOneToOne_SelfReferencingFromPrimary(t *testing.T) { }`, Results: map[string]any{ "User": []map[string]any{ - { - "name": "Fred", - "underling": nil, - }, { "name": "John", "underling": map[string]any{ "name": "Fred", }, }, + { + "name": "Fred", + "underling": nil, + }, }, }, }, @@ -108,7 +108,7 @@ func TestMutationUpdateOneToOne_SelfReferencingFromPrimary(t *testing.T) { } func TestMutationUpdateOneToOne_SelfReferencingFromSecondary(t *testing.T) { - user1ID := "bae-12a7f594-f02e-53b7-81c4-aba27b2e7ea7" + user1ID := "bae-a86ab69e-a2be-54b9-b66e-4e30d6778ffe" test := testUtils.TestCase{ Description: "One to one update mutation, self referencing from secondary", @@ -154,16 +154,16 @@ func TestMutationUpdateOneToOne_SelfReferencingFromSecondary(t *testing.T) { }`, Results: map[string]any{ "User": []map[string]any{ - { - "name": "John", - "boss": nil, - }, { "name": "Fred", "boss": map[string]any{ "name": "John", }, }, + { + "name": "John", + "boss": nil, + }, }, }, }, @@ -179,16 +179,16 @@ func TestMutationUpdateOneToOne_SelfReferencingFromSecondary(t *testing.T) { }`, Results: map[string]any{ "User": []map[string]any{ + { + "name": "Fred", + "underling": nil, + }, { "name": "John", "underling": map[string]any{ "name": "Fred", }, }, - { - "name": "Fred", - "underling": nil, - }, }, }, }, diff --git a/tests/integration/mutation/update/field_kinds/one_to_one/with_simple_test.go b/tests/integration/mutation/update/field_kinds/one_to_one/with_simple_test.go index 8cbbe963f3..b2b3859d2d 100644 --- a/tests/integration/mutation/update/field_kinds/one_to_one/with_simple_test.go +++ b/tests/integration/mutation/update/field_kinds/one_to_one/with_simple_test.go @@ -139,7 +139,7 @@ func TestMutationUpdateOneToOne(t *testing.T) { } func TestMutationUpdateOneToOneSecondarySide(t *testing.T) { - authorID := "bae-42d197b8-d14f-5570-a55d-9e8714b2a82a" + authorID := "bae-53eff350-ad8e-532c-b72d-f95c4f47909c" test := testUtils.TestCase{ Description: "One to one create mutation, from the secondary side", @@ -214,8 +214,8 @@ func TestMutationUpdateOneToOneSecondarySide(t *testing.T) { } func TestMutationUpdateOneToOne_RelationIDToLinkFromPrimarySide(t *testing.T) { - author1ID := "bae-42d197b8-d14f-5570-a55d-9e8714b2a82a" - bookID := "bae-dfce6a1a-27fa-5dde-bea7-44df2dffac1a" + author1ID := "bae-53eff350-ad8e-532c-b72d-f95c4f47909c" + bookID := "bae-89d64ba1-44e3-5d75-a610-7226077ece48" test := testUtils.TestCase{ Description: "One to one update mutation using relation id from single side (wrong)", @@ -260,8 +260,8 @@ func TestMutationUpdateOneToOne_RelationIDToLinkFromPrimarySide(t *testing.T) { } func TestMutationUpdateOneToOne_RelationIDToLinkFromSecondarySide(t *testing.T) { - author1ID := "bae-42d197b8-d14f-5570-a55d-9e8714b2a82a" - author2ID := "bae-a34d8759-e549-5083-8ba6-e04038c41caa" + author1ID := "bae-53eff350-ad8e-532c-b72d-f95c4f47909c" + author2ID := "bae-c058cfd4-259f-5b08-975d-106f13a143d5" test := testUtils.TestCase{ Description: "One to one update mutation using relation id from secondary side", @@ -306,7 +306,7 @@ func TestMutationUpdateOneToOne_RelationIDToLinkFromSecondarySide(t *testing.T) } func TestMutationUpdateOneToOne_InvalidLengthRelationIDToLink_Error(t *testing.T) { - author1ID := "bae-42d197b8-d14f-5570-a55d-9e8714b2a82a" + author1ID := "bae-53eff350-ad8e-532c-b72d-f95c4f47909c" invalidLenSubID := "35953ca-518d-9e6b-9ce6cd00eff5" invalidAuthorID := "bae-" + invalidLenSubID @@ -347,7 +347,7 @@ func TestMutationUpdateOneToOne_InvalidLengthRelationIDToLink_Error(t *testing.T } func TestMutationUpdateOneToOne_InvalidRelationIDToLinkFromSecondarySide_Error(t *testing.T) { - author1ID := "bae-42d197b8-d14f-5570-a55d-9e8714b2a82a" + author1ID := "bae-53eff350-ad8e-532c-b72d-f95c4f47909c" invalidAuthorID := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ee" test := testUtils.TestCase{ @@ -387,8 +387,8 @@ func TestMutationUpdateOneToOne_InvalidRelationIDToLinkFromSecondarySide_Error(t } func TestMutationUpdateOneToOne_RelationIDToLinkFromSecondarySideWithWrongField_Error(t *testing.T) { - author1ID := "bae-42d197b8-d14f-5570-a55d-9e8714b2a82a" - author2ID := "bae-a34d8759-e549-5083-8ba6-e04038c41caa" + author1ID := "bae-53eff350-ad8e-532c-b72d-f95c4f47909c" + author2ID := "bae-c058cfd4-259f-5b08-975d-106f13a143d5" test := testUtils.TestCase{ Description: "One to one update mutation using relation id from secondary side, with a wrong field.", diff --git a/tests/integration/query/one_to_many/with_cid_doc_id_test.go b/tests/integration/query/one_to_many/with_cid_doc_id_test.go index becc516dbc..9811a4c54a 100644 --- a/tests/integration/query/one_to_many/with_cid_doc_id_test.go +++ b/tests/integration/query/one_to_many/with_cid_doc_id_test.go @@ -104,8 +104,8 @@ func TestQueryOneToManyWithCidAndDocID(t *testing.T) { testUtils.Request{ Request: `query { Book ( - cid: "bafyreicjhmyweoyzopsqf7qc4uqqpq7mwnqlpsfb2rzk3j2jg3a4d6fqy4" - docID: "bae-5366ba09-54e8-5381-8169-a770aa9282ae" + cid: "bafyreieytzivxtdjslivrsim22xkszg7sxy4onmp737u5uxf7v2cxvzikm" + docID: "bae-064f13c1-7726-5d53-8eec-c395d94da4d0" ) { name author { @@ -181,8 +181,8 @@ func TestQueryOneToManyWithChildUpdateAndFirstCidAndDocID(t *testing.T) { testUtils.Request{ Request: `query { Book ( - cid: "bafyreicjhmyweoyzopsqf7qc4uqqpq7mwnqlpsfb2rzk3j2jg3a4d6fqy4", - docID: "bae-5366ba09-54e8-5381-8169-a770aa9282ae" + cid: "bafyreieytzivxtdjslivrsim22xkszg7sxy4onmp737u5uxf7v2cxvzikm", + docID: "bae-064f13c1-7726-5d53-8eec-c395d94da4d0" ) { name author { @@ -256,8 +256,8 @@ func TestQueryOneToManyWithParentUpdateAndFirstCidAndDocID(t *testing.T) { testUtils.Request{ Request: `query { Book ( - cid: "bafyreicjhmyweoyzopsqf7qc4uqqpq7mwnqlpsfb2rzk3j2jg3a4d6fqy4", - docID: "bae-5366ba09-54e8-5381-8169-a770aa9282ae" + cid: "bafyreieytzivxtdjslivrsim22xkszg7sxy4onmp737u5uxf7v2cxvzikm", + docID: "bae-064f13c1-7726-5d53-8eec-c395d94da4d0" ) { name rating @@ -331,8 +331,8 @@ func TestQueryOneToManyWithParentUpdateAndLastCidAndDocID(t *testing.T) { testUtils.Request{ Request: `query { Book ( - cid: "bafyreihylh2iftquu5vukm2myjrfbkjnpr5vonlp5s5oo22bfrhddkju6e", - docID: "bae-5366ba09-54e8-5381-8169-a770aa9282ae" + cid: "bafyreia2sayewutxhcewm2ek2p6nwwg6zzeugrxsnwjyvam4pplydkjmz4", + docID: "bae-064f13c1-7726-5d53-8eec-c395d94da4d0" ) { name rating diff --git a/tests/integration/query/one_to_many/with_count_limit_offset_test.go b/tests/integration/query/one_to_many/with_count_limit_offset_test.go index ee0dc26e61..b4c55ed720 100644 --- a/tests/integration/query/one_to_many/with_count_limit_offset_test.go +++ b/tests/integration/query/one_to_many/with_count_limit_offset_test.go @@ -98,10 +98,10 @@ func TestQueryOneToManyWithCountAndLimitAndOffset(t *testing.T) { "_count": 4, "published": []map[string]any{ { - "name": "Painted House", + "name": "The Pelican Brief", }, { - "name": "The Pelican Brief", + "name": "The Firm", }, }, }, @@ -192,10 +192,10 @@ func TestQueryOneToManyWithCountAndDifferentOffsets(t *testing.T) { "_count": 2, "published": []map[string]any{ { - "name": "The Associate", + "name": "Painted House", }, { - "name": "Painted House", + "name": "The Associate", }, }, }, diff --git a/tests/integration/query/one_to_many/with_count_limit_test.go b/tests/integration/query/one_to_many/with_count_limit_test.go index 0b1cdaae96..0611226cc2 100644 --- a/tests/integration/query/one_to_many/with_count_limit_test.go +++ b/tests/integration/query/one_to_many/with_count_limit_test.go @@ -177,7 +177,7 @@ func TestQueryOneToManyWithCountAndDifferentLimits(t *testing.T) { "_count": 2, "published": []map[string]any{ { - "name": "The Associate", + "name": "Painted House", }, }, }, diff --git a/tests/integration/query/one_to_many/with_doc_id_test.go b/tests/integration/query/one_to_many/with_doc_id_test.go index 86551c5934..7bb494dba3 100644 --- a/tests/integration/query/one_to_many/with_doc_id_test.go +++ b/tests/integration/query/one_to_many/with_doc_id_test.go @@ -49,7 +49,7 @@ func TestQueryOneToManyWithChildDocID(t *testing.T) { Author { name published ( - docID: "bae-5366ba09-54e8-5381-8169-a770aa9282ae" + docID: "bae-064f13c1-7726-5d53-8eec-c395d94da4d0" ) { name } diff --git a/tests/integration/query/one_to_many/with_doc_ids_test.go b/tests/integration/query/one_to_many/with_doc_ids_test.go index b704cd0ddd..5b561fe98e 100644 --- a/tests/integration/query/one_to_many/with_doc_ids_test.go +++ b/tests/integration/query/one_to_many/with_doc_ids_test.go @@ -65,7 +65,7 @@ func TestQueryOneToManyWithChildDocIDs(t *testing.T) { Author { name published ( - docIDs: ["bae-5366ba09-54e8-5381-8169-a770aa9282ae", "bae-1ccf3043-d760-543e-be1b-6691fa6aa7a8"] + docIDs: ["bae-064f13c1-7726-5d53-8eec-c395d94da4d0", "bae-649c8101-76b8-5d18-a701-21c97a5c66b3"] ) { name } @@ -77,10 +77,10 @@ func TestQueryOneToManyWithChildDocIDs(t *testing.T) { "name": "John Grisham", "published": []map[string]any{ { - "name": "The Associate", + "name": "Painted House", }, { - "name": "Painted House", + "name": "The Associate", }, }, }, diff --git a/tests/integration/query/one_to_many/with_filter_related_id_test.go b/tests/integration/query/one_to_many/with_filter_related_id_test.go index 4f9bffaffb..a05a042817 100644 --- a/tests/integration/query/one_to_many/with_filter_related_id_test.go +++ b/tests/integration/query/one_to_many/with_filter_related_id_test.go @@ -100,8 +100,8 @@ func TestQueryFromManySideWithEqFilterOnRelatedType(t *testing.T) { }`, Results: map[string]any{ "Book": []map[string]any{ - {"name": "The Client"}, {"name": "Painted House"}, + {"name": "The Client"}, {"name": "A Time for Mercy"}, }, }, @@ -196,8 +196,8 @@ func TestQueryFromManySideWithFilterOnRelatedObjectID(t *testing.T) { }`, Results: map[string]any{ "Book": []map[string]any{ - {"name": "The Client"}, {"name": "Painted House"}, + {"name": "The Client"}, {"name": "A Time for Mercy"}, }, }, @@ -297,8 +297,8 @@ func TestQueryFromManySideWithSameFiltersInDifferentWayOnRelatedType(t *testing. }`, Results: map[string]any{ "Book": []map[string]any{ - {"name": "The Client"}, {"name": "Painted House"}, + {"name": "The Client"}, {"name": "A Time for Mercy"}, }, }, @@ -387,7 +387,7 @@ func TestQueryFromSingleSideWithEqFilterOnRelatedType(t *testing.T) { }, testUtils.Request{ Request: `query { - Author(filter: {published: {_docID: {_eq: "bae-96c9de0f-2903-5589-9604-b42882afde8c"}}}) { + Author(filter: {published: {_docID: {_eq: "bae-064f13c1-7726-5d53-8eec-c395d94da4d0"}}}) { name } }`, @@ -483,11 +483,11 @@ func TestQueryFromSingleSideWithFilterOnRelatedObjectID_Error(t *testing.T) { }, testUtils.Request{ Request: `query { - Author(filter: {published_id: {_eq: "bae-5366ba09-54e8-5381-8169-a770aa9282ae"}}) { + Author(filter: {published_id: {_eq: "bae-064f13c1-7726-5d53-8eec-c395d94da4d0"}}) { name } }`, - ExpectedError: "Argument \"filter\" has invalid value {published_id: {_eq: \"bae-5366ba09-54e8-5381-8169-a770aa9282ae\"}}.\nIn field \"published_id\": Unknown field.", + ExpectedError: "Argument \"filter\" has invalid value {published_id: {_eq: \"bae-064f13c1-7726-5d53-8eec-c395d94da4d0\"}}.\nIn field \"published_id\": Unknown field.", }, }, } diff --git a/tests/integration/query/one_to_many/with_group_filter_test.go b/tests/integration/query/one_to_many/with_group_filter_test.go index 5071b963cf..9356e4f334 100644 --- a/tests/integration/query/one_to_many/with_group_filter_test.go +++ b/tests/integration/query/one_to_many/with_group_filter_test.go @@ -273,14 +273,14 @@ func TestQueryOneToManyWithParentJoinGroupNumberAndNumberFilterOnGroup(t *testin { "name": "John Grisham", "published": []map[string]any{ - { - "name": "The Client", - "rating": 4.5, - }, { "name": "Painted House", "rating": 4.9, }, + { + "name": "The Client", + "rating": 4.5, + }, { "name": "A Time for Mercy", "rating": 4.5, diff --git a/tests/integration/query/one_to_many/with_group_related_id_alias_test.go b/tests/integration/query/one_to_many/with_group_related_id_alias_test.go index 2847b0da2c..ad35a32470 100644 --- a/tests/integration/query/one_to_many/with_group_related_id_alias_test.go +++ b/tests/integration/query/one_to_many/with_group_related_id_alias_test.go @@ -128,16 +128,16 @@ func TestQueryOneToManyWithParentGroupByOnRelatedTypeFromManySideUsingAlias(t *t "author_id": "bae-e1ea288f-09fa-55fa-b0b5-0ac8941ea35b", "_group": []map[string]any{ { - "name": "The Client", - "rating": 4.5, + "name": "Painted House", + "rating": 4.9, "author": map[string]any{ "age": int64(65), "name": "John Grisham", }, }, { - "name": "Painted House", - "rating": 4.9, + "name": "The Client", + "rating": 4.5, "author": map[string]any{ "age": int64(65), "name": "John Grisham", @@ -153,19 +153,6 @@ func TestQueryOneToManyWithParentGroupByOnRelatedTypeFromManySideUsingAlias(t *t }, }, }, - { - "author_id": "bae-34a9bd41-1f0d-5748-8446-48fc36ef2614", - "_group": []map[string]any{ - { - "name": "Histoiare des Celtes et particulierement des Gaulois et des Germains depuis les temps fabuleux jusqua la prise de Roze par les Gaulois", - "rating": 2.0, - "author": map[string]any{ - "age": int64(327), - "name": "Simon Pelloutier", - }, - }, - }, - }, { "author_id": "bae-1594d2aa-d63c-51d2-8e5e-06ee0c9e2e8c", "_group": []map[string]any{ @@ -187,6 +174,19 @@ func TestQueryOneToManyWithParentGroupByOnRelatedTypeFromManySideUsingAlias(t *t }, }, }, + { + "author_id": "bae-34a9bd41-1f0d-5748-8446-48fc36ef2614", + "_group": []map[string]any{ + { + "name": "Histoiare des Celtes et particulierement des Gaulois et des Germains depuis les temps fabuleux jusqua la prise de Roze par les Gaulois", + "rating": 2.0, + "author": map[string]any{ + "age": int64(327), + "name": "Simon Pelloutier", + }, + }, + }, + }, }, }, }, @@ -314,16 +314,16 @@ func TestQueryOneToManyWithParentGroupByOnRelatedTypeFromManySideUsingAliasAndRe }, "_group": []map[string]any{ { - "name": "The Client", - "rating": 4.5, + "name": "Painted House", + "rating": 4.9, "author": map[string]any{ "age": int64(65), "name": "John Grisham", }, }, { - "name": "Painted House", - "rating": 4.9, + "name": "The Client", + "rating": 4.5, "author": map[string]any{ "age": int64(65), "name": "John Grisham", @@ -339,22 +339,6 @@ func TestQueryOneToManyWithParentGroupByOnRelatedTypeFromManySideUsingAliasAndRe }, }, }, - { - "author": map[string]any{ - "name": "Simon Pelloutier", - "_docID": "bae-34a9bd41-1f0d-5748-8446-48fc36ef2614", - }, - "_group": []map[string]any{ - { - "name": "Histoiare des Celtes et particulierement des Gaulois et des Germains depuis les temps fabuleux jusqua la prise de Roze par les Gaulois", - "rating": 2.0, - "author": map[string]any{ - "age": int64(327), - "name": "Simon Pelloutier", - }, - }, - }, - }, { "author": map[string]any{ "name": "Voltaire", @@ -379,6 +363,22 @@ func TestQueryOneToManyWithParentGroupByOnRelatedTypeFromManySideUsingAliasAndRe }, }, }, + { + "author": map[string]any{ + "name": "Simon Pelloutier", + "_docID": "bae-34a9bd41-1f0d-5748-8446-48fc36ef2614", + }, + "_group": []map[string]any{ + { + "name": "Histoiare des Celtes et particulierement des Gaulois et des Germains depuis les temps fabuleux jusqua la prise de Roze par les Gaulois", + "rating": 2.0, + "author": map[string]any{ + "age": int64(327), + "name": "Simon Pelloutier", + }, + }, + }, + }, }, }, }, @@ -500,16 +500,16 @@ func TestQueryOneToManyWithParentGroupByOnRelatedTypeWithIDSelectionFromManySide "author_id": "bae-e1ea288f-09fa-55fa-b0b5-0ac8941ea35b", "_group": []map[string]any{ { - "name": "The Client", - "rating": 4.5, + "name": "Painted House", + "rating": 4.9, "author": map[string]any{ "age": int64(65), "name": "John Grisham", }, }, { - "name": "Painted House", - "rating": 4.9, + "name": "The Client", + "rating": 4.5, "author": map[string]any{ "age": int64(65), "name": "John Grisham", @@ -525,19 +525,6 @@ func TestQueryOneToManyWithParentGroupByOnRelatedTypeWithIDSelectionFromManySide }, }, }, - { - "author_id": "bae-34a9bd41-1f0d-5748-8446-48fc36ef2614", - "_group": []map[string]any{ - { - "name": "Histoiare des Celtes et particulierement des Gaulois et des Germains depuis les temps fabuleux jusqua la prise de Roze par les Gaulois", - "rating": 2.0, - "author": map[string]any{ - "age": int64(327), - "name": "Simon Pelloutier", - }, - }, - }, - }, { "author_id": "bae-1594d2aa-d63c-51d2-8e5e-06ee0c9e2e8c", "_group": []map[string]any{ @@ -559,6 +546,19 @@ func TestQueryOneToManyWithParentGroupByOnRelatedTypeWithIDSelectionFromManySide }, }, }, + { + "author_id": "bae-34a9bd41-1f0d-5748-8446-48fc36ef2614", + "_group": []map[string]any{ + { + "name": "Histoiare des Celtes et particulierement des Gaulois et des Germains depuis les temps fabuleux jusqua la prise de Roze par les Gaulois", + "rating": 2.0, + "author": map[string]any{ + "age": int64(327), + "name": "Simon Pelloutier", + }, + }, + }, + }, }, }, }, @@ -688,16 +688,16 @@ func TestQueryOneToManyWithParentGroupByOnRelatedTypeWithIDSelectionFromManySide }, "_group": []map[string]any{ { - "name": "The Client", - "rating": 4.5, + "name": "Painted House", + "rating": 4.9, "author": map[string]any{ "age": int64(65), "name": "John Grisham", }, }, { - "name": "Painted House", - "rating": 4.9, + "name": "The Client", + "rating": 4.5, "author": map[string]any{ "age": int64(65), "name": "John Grisham", @@ -713,23 +713,6 @@ func TestQueryOneToManyWithParentGroupByOnRelatedTypeWithIDSelectionFromManySide }, }, }, - { - "author_id": "bae-34a9bd41-1f0d-5748-8446-48fc36ef2614", - "author": map[string]any{ - "name": "Simon Pelloutier", - "_docID": "bae-34a9bd41-1f0d-5748-8446-48fc36ef2614", - }, - "_group": []map[string]any{ - { - "name": "Histoiare des Celtes et particulierement des Gaulois et des Germains depuis les temps fabuleux jusqua la prise de Roze par les Gaulois", - "rating": 2.0, - "author": map[string]any{ - "age": int64(327), - "name": "Simon Pelloutier", - }, - }, - }, - }, { "author_id": "bae-1594d2aa-d63c-51d2-8e5e-06ee0c9e2e8c", "author": map[string]any{ @@ -755,6 +738,23 @@ func TestQueryOneToManyWithParentGroupByOnRelatedTypeWithIDSelectionFromManySide }, }, }, + { + "author_id": "bae-34a9bd41-1f0d-5748-8446-48fc36ef2614", + "author": map[string]any{ + "name": "Simon Pelloutier", + "_docID": "bae-34a9bd41-1f0d-5748-8446-48fc36ef2614", + }, + "_group": []map[string]any{ + { + "name": "Histoiare des Celtes et particulierement des Gaulois et des Germains depuis les temps fabuleux jusqua la prise de Roze par les Gaulois", + "rating": 2.0, + "author": map[string]any{ + "age": int64(327), + "name": "Simon Pelloutier", + }, + }, + }, + }, }, }, }, diff --git a/tests/integration/query/one_to_many/with_group_related_id_test.go b/tests/integration/query/one_to_many/with_group_related_id_test.go index 86d391a2f6..5ab39a909b 100644 --- a/tests/integration/query/one_to_many/with_group_related_id_test.go +++ b/tests/integration/query/one_to_many/with_group_related_id_test.go @@ -112,16 +112,16 @@ func TestQueryOneToManyWithParentGroupByOnRelatedTypeIDFromManySide(t *testing.T "author_id": "bae-e1ea288f-09fa-55fa-b0b5-0ac8941ea35b", "_group": []map[string]any{ { - "name": "The Client", - "rating": 4.5, + "name": "Painted House", + "rating": 4.9, "author": map[string]any{ "age": int64(65), "name": "John Grisham", }, }, { - "name": "Painted House", - "rating": 4.9, + "name": "The Client", + "rating": 4.5, "author": map[string]any{ "age": int64(65), "name": "John Grisham", @@ -137,19 +137,6 @@ func TestQueryOneToManyWithParentGroupByOnRelatedTypeIDFromManySide(t *testing.T }, }, }, - { - "author_id": "bae-34a9bd41-1f0d-5748-8446-48fc36ef2614", - "_group": []map[string]any{ - { - "name": "Histoiare des Celtes et particulierement des Gaulois et des Germains depuis les temps fabuleux jusqua la prise de Roze par les Gaulois", - "rating": 2.0, - "author": map[string]any{ - "age": int64(327), - "name": "Simon Pelloutier", - }, - }, - }, - }, { "author_id": "bae-1594d2aa-d63c-51d2-8e5e-06ee0c9e2e8c", "_group": []map[string]any{ @@ -171,6 +158,19 @@ func TestQueryOneToManyWithParentGroupByOnRelatedTypeIDFromManySide(t *testing.T }, }, }, + { + "author_id": "bae-34a9bd41-1f0d-5748-8446-48fc36ef2614", + "_group": []map[string]any{ + { + "name": "Histoiare des Celtes et particulierement des Gaulois et des Germains depuis les temps fabuleux jusqua la prise de Roze par les Gaulois", + "rating": 2.0, + "author": map[string]any{ + "age": int64(327), + "name": "Simon Pelloutier", + }, + }, + }, + }, }, }, }, @@ -276,16 +276,16 @@ func TestQueryOneToManyWithParentGroupByOnRelatedTypeIDWithIDSelectionFromManySi "author_id": "bae-e1ea288f-09fa-55fa-b0b5-0ac8941ea35b", "_group": []map[string]any{ { - "name": "The Client", - "rating": 4.5, + "name": "Painted House", + "rating": 4.9, "author": map[string]any{ "age": int64(65), "name": "John Grisham", }, }, { - "name": "Painted House", - "rating": 4.9, + "name": "The Client", + "rating": 4.5, "author": map[string]any{ "age": int64(65), "name": "John Grisham", @@ -301,19 +301,6 @@ func TestQueryOneToManyWithParentGroupByOnRelatedTypeIDWithIDSelectionFromManySi }, }, }, - { - "author_id": "bae-34a9bd41-1f0d-5748-8446-48fc36ef2614", - "_group": []map[string]any{ - { - "name": "Histoiare des Celtes et particulierement des Gaulois et des Germains depuis les temps fabuleux jusqua la prise de Roze par les Gaulois", - "rating": 2.0, - "author": map[string]any{ - "age": int64(327), - "name": "Simon Pelloutier", - }, - }, - }, - }, { "author_id": "bae-1594d2aa-d63c-51d2-8e5e-06ee0c9e2e8c", "_group": []map[string]any{ @@ -335,6 +322,19 @@ func TestQueryOneToManyWithParentGroupByOnRelatedTypeIDWithIDSelectionFromManySi }, }, }, + { + "author_id": "bae-34a9bd41-1f0d-5748-8446-48fc36ef2614", + "_group": []map[string]any{ + { + "name": "Histoiare des Celtes et particulierement des Gaulois et des Germains depuis les temps fabuleux jusqua la prise de Roze par les Gaulois", + "rating": 2.0, + "author": map[string]any{ + "age": int64(327), + "name": "Simon Pelloutier", + }, + }, + }, + }, }, }, }, diff --git a/tests/integration/query/one_to_many/with_group_test.go b/tests/integration/query/one_to_many/with_group_test.go index 421c785dbe..b3e259b261 100644 --- a/tests/integration/query/one_to_many/with_group_test.go +++ b/tests/integration/query/one_to_many/with_group_test.go @@ -103,21 +103,21 @@ func TestQueryOneToManyWithInnerJoinGroupNumber(t *testing.T) { "age": int64(65), "published": []map[string]any{ { - "rating": 4.5, + "rating": 4.9, "_group": []map[string]any{ { - "name": "The Client", - }, - { - "name": "A Time for Mercy", + "name": "Painted House", }, }, }, { - "rating": 4.9, + "rating": 4.5, "_group": []map[string]any{ { - "name": "Painted House", + "name": "The Client", + }, + { + "name": "A Time for Mercy", }, }, }, @@ -259,14 +259,14 @@ func TestQueryOneToManyWithParentJoinGroupNumber(t *testing.T) { { "name": "John Grisham", "published": []map[string]any{ - { - "name": "The Client", - "rating": 4.5, - }, { "name": "Painted House", "rating": 4.9, }, + { + "name": "The Client", + "rating": 4.5, + }, { "name": "A Time for Mercy", "rating": 4.5, diff --git a/tests/integration/query/one_to_many/with_related_id_test.go b/tests/integration/query/one_to_many/with_related_id_test.go index 906719c532..ddc12c56f6 100644 --- a/tests/integration/query/one_to_many/with_related_id_test.go +++ b/tests/integration/query/one_to_many/with_related_id_test.go @@ -101,22 +101,14 @@ func TestQueryOneToManyWithRelatedTypeIDFromManySide(t *testing.T) { }`, Results: map[string]any{ "Book": []map[string]any{ - { - "name": "The Client", - "author_id": "bae-e1ea288f-09fa-55fa-b0b5-0ac8941ea35b", - }, { "name": "Painted House", "author_id": "bae-e1ea288f-09fa-55fa-b0b5-0ac8941ea35b", }, { - "name": "A Time for Mercy", + "name": "The Client", "author_id": "bae-e1ea288f-09fa-55fa-b0b5-0ac8941ea35b", }, - { - "name": "Histoiare des Celtes et particulierement des Gaulois et des Germains depuis les temps fabuleux jusqua la prise de Roze par les Gaulois", - "author_id": "bae-34a9bd41-1f0d-5748-8446-48fc36ef2614", - }, { "name": "Candide", "author_id": "bae-1594d2aa-d63c-51d2-8e5e-06ee0c9e2e8c", @@ -125,6 +117,14 @@ func TestQueryOneToManyWithRelatedTypeIDFromManySide(t *testing.T) { "name": "Zadig", "author_id": "bae-1594d2aa-d63c-51d2-8e5e-06ee0c9e2e8c", }, + { + "name": "Histoiare des Celtes et particulierement des Gaulois et des Germains depuis les temps fabuleux jusqua la prise de Roze par les Gaulois", + "author_id": "bae-34a9bd41-1f0d-5748-8446-48fc36ef2614", + }, + { + "name": "A Time for Mercy", + "author_id": "bae-e1ea288f-09fa-55fa-b0b5-0ac8941ea35b", + }, }, }, }, diff --git a/tests/integration/query/one_to_many/with_sum_limit_offset_test.go b/tests/integration/query/one_to_many/with_sum_limit_offset_test.go index 420deeca19..0bfddf48e7 100644 --- a/tests/integration/query/one_to_many/with_sum_limit_offset_test.go +++ b/tests/integration/query/one_to_many/with_sum_limit_offset_test.go @@ -83,7 +83,7 @@ func TestQueryOneToManyWithSumWithLimitAndOffset(t *testing.T) { }, { "name": "John Grisham", - "_sum": 9.4, + "_sum": 8.7, }, }, }, diff --git a/tests/integration/query/one_to_many_to_many/joins_test.go b/tests/integration/query/one_to_many_to_many/joins_test.go index 7fe84162e1..6aad836940 100644 --- a/tests/integration/query/one_to_many_to_many/joins_test.go +++ b/tests/integration/query/one_to_many_to_many/joins_test.go @@ -213,12 +213,26 @@ func TestOneToManyToManyJoinsAreLinkedProperly(t *testing.T) { "_docID": testUtils.NewDocIndex(0, 0), "book": []map[string]any{ { - "_docID": testUtils.NewDocIndex(1, 1), - "name": "Theif Lord", + "_docID": testUtils.NewDocIndex(1, 3), + "name": "Painted House", "publisher": []map[string]any{ { - "_docID": testUtils.NewDocIndex(2, 1), - "name": "Only Publisher of Theif Lord", + "_docID": testUtils.NewDocIndex(2, 2), + "name": "Only Publisher of Painted House", + }, + }, + }, + { + "_docID": testUtils.NewDocIndex(1, 5), + "name": "Sooley", + "publisher": []map[string]any{ + { + "_docID": testUtils.NewDocIndex(2, 5), + "name": "Second of Two Publishers of Sooley", + }, + { + "_docID": testUtils.NewDocIndex(2, 4), + "name": "First of Two Publishers of Sooley", }, }, }, @@ -228,12 +242,12 @@ func TestOneToManyToManyJoinsAreLinkedProperly(t *testing.T) { "publisher": []map[string]any{}, }, { - "_docID": testUtils.NewDocIndex(1, 3), - "name": "Painted House", + "_docID": testUtils.NewDocIndex(1, 1), + "name": "Theif Lord", "publisher": []map[string]any{ { - "_docID": testUtils.NewDocIndex(2, 2), - "name": "Only Publisher of Painted House", + "_docID": testUtils.NewDocIndex(2, 1), + "name": "Only Publisher of Theif Lord", }, }, }, @@ -247,20 +261,6 @@ func TestOneToManyToManyJoinsAreLinkedProperly(t *testing.T) { }, }, }, - { - "_docID": testUtils.NewDocIndex(1, 5), - "name": "Sooley", - "publisher": []map[string]any{ - { - "_docID": testUtils.NewDocIndex(2, 5), - "name": "Second of Two Publishers of Sooley", - }, - { - "_docID": testUtils.NewDocIndex(2, 4), - "name": "First of Two Publishers of Sooley", - }, - }, - }, }, }, }, diff --git a/tests/integration/query/one_to_many_to_one/fixture.go b/tests/integration/query/one_to_many_to_one/fixture.go index ac66aa098d..9fe62d6776 100644 --- a/tests/integration/query/one_to_many_to_one/fixture.go +++ b/tests/integration/query/one_to_many_to_one/fixture.go @@ -84,7 +84,7 @@ func createDocsWith6BooksAnd5Publishers() []testUtils.CreateDoc { }, { CollectionID: 1, - // "bae-7697f14d-7b32-5884-8677-344e183c14bf", Has 1 Publisher + // "bae-86f7a96a-be15-5b4d-91c7-bb6047aa4008", Has 1 Publisher DocMap: map[string]any{ "name": "Theif Lord", "rating": 4.8, @@ -93,7 +93,7 @@ func createDocsWith6BooksAnd5Publishers() []testUtils.CreateDoc { }, { CollectionID: 1, - // "bae-374998e0-e84d-5f6b-9e87-5edaaa2d9c7d", Has no Publisher. + // "bae-5ce5698b-5af6-5f50-a6fb-633252be8d12", Has no Publisher. DocMap: map[string]any{ "name": "The Associate", "rating": 4.2, @@ -102,7 +102,7 @@ func createDocsWith6BooksAnd5Publishers() []testUtils.CreateDoc { }, { CollectionID: 1, - // "bae-aef1d940-5ac1-5924-a87f-63ac40758b22", Has 1 Publisher + // "bae-d890c705-8a7a-57ce-88b1-ddd7827438ea", Has 1 Publisher DocMap: map[string]any{ "name": "Painted House", "rating": 4.9, @@ -111,7 +111,7 @@ func createDocsWith6BooksAnd5Publishers() []testUtils.CreateDoc { }, { CollectionID: 1, - // "bae-1d0dcbed-300a-567a-9b48-c23cd026d165", Has 1 Publisher + // "bae-fc61b19e-646a-5537-82d6-69259e4f959a", Has 1 Publisher DocMap: map[string]any{ "name": "A Time for Mercy", "rating": 4.5, @@ -120,7 +120,7 @@ func createDocsWith6BooksAnd5Publishers() []testUtils.CreateDoc { }, { CollectionID: 1, - // "bae-ee6b8339-8a9e-58a9-9a0d-dbd8d44fa149", Has 1 Publisher + // "bae-fc9f77fd-7b26-58c3-ad29-b2bd58a877be", Has 1 Publisher DocMap: map[string]any{ "name": "Sooley", "rating": 3.2, diff --git a/tests/integration/query/one_to_many_to_one/joins_test.go b/tests/integration/query/one_to_many_to_one/joins_test.go index 1699096357..9dc921f18f 100644 --- a/tests/integration/query/one_to_many_to_one/joins_test.go +++ b/tests/integration/query/one_to_many_to_one/joins_test.go @@ -177,39 +177,39 @@ func TestOneToManyToOneJoinsAreLinkedProperly(t *testing.T) { "_docID": "bae-7aabc9d2-fbbc-5911-b0d0-b49a2a1d0e84", "book": []map[string]any{ { - "_docID": "bae-1d0dcbed-300a-567a-9b48-c23cd026d165", - "name": "A Time for Mercy", - "publisher": map[string]any{ - "_docID": "bae-2bad7de3-0f1a-56c0-b499-a552debef4b8", - "name": "Only Publisher of A Time for Mercy", - }, - }, - { - "_docID": "bae-374998e0-e84d-5f6b-9e87-5edaaa2d9c7d", + "_docID": "bae-5ce5698b-5af6-5f50-a6fb-633252be8d12", "name": "The Associate", "publisher": nil, }, { - "_docID": "bae-7697f14d-7b32-5884-8677-344e183c14bf", + "_docID": "bae-86f7a96a-be15-5b4d-91c7-bb6047aa4008", "name": "Theif Lord", "publisher": map[string]any{ - "_docID": "bae-d43823c0-0bb6-58a9-a098-1826dffa4e4a", + "_docID": "bae-6223fba1-5461-5e47-9682-6c769c8e5518", "name": "Only Publisher of Theif Lord", }, }, { - "_docID": "bae-aef1d940-5ac1-5924-a87f-63ac40758b22", + "_docID": "bae-d890c705-8a7a-57ce-88b1-ddd7827438ea", "name": "Painted House", "publisher": map[string]any{ - "_docID": "bae-a104397b-7804-5cd0-93e5-c3986b4e5e71", + "_docID": "bae-de7d087b-d33f-5b4b-b0e4-79de4335d9ed", "name": "Only Publisher of Painted House", }, }, { - "_docID": "bae-ee6b8339-8a9e-58a9-9a0d-dbd8d44fa149", + "_docID": "bae-fc61b19e-646a-5537-82d6-69259e4f959a", + "name": "A Time for Mercy", + "publisher": map[string]any{ + "_docID": "bae-5fd29915-86c6-5e9f-863a-a03292206b8c", + "name": "Only Publisher of A Time for Mercy", + }, + }, + { + "_docID": "bae-fc9f77fd-7b26-58c3-ad29-b2bd58a877be", "name": "Sooley", "publisher": map[string]any{ - "_docID": "bae-efeca601-cce1-5289-b392-85fa5b7bc0f7", + "_docID": "bae-e2cc19bd-4b3e-5cbe-9146-fb24f5913566", "name": "Only Publisher of Sooley", }, }, @@ -220,10 +220,10 @@ func TestOneToManyToOneJoinsAreLinkedProperly(t *testing.T) { "_docID": "bae-fb2a1852-3951-5ce9-a3bf-6825202f201b", "book": []map[string]any{ { - "_docID": "bae-1867d7cb-01b3-572f-a993-1c3f22f46526", + "_docID": "bae-5a5ef6dd-0c2b-5cd0-a644-f0c47a640565", "name": "The Rooster Bar", "publisher": map[string]any{ - "_docID": "bae-09af7e39-8596-584f-8825-cb430c4156b3", + "_docID": "bae-0020b43b-500c-57d0-81b3-43342c9d8d1d", "name": "Only Publisher of The Rooster Bar", }, }, diff --git a/tests/integration/query/one_to_many_to_one/simple_test.go b/tests/integration/query/one_to_many_to_one/simple_test.go index da6206c1e7..d87d3d7f23 100644 --- a/tests/integration/query/one_to_many_to_one/simple_test.go +++ b/tests/integration/query/one_to_many_to_one/simple_test.go @@ -61,7 +61,7 @@ func TestQueryOneToOneRelations(t *testing.T) { }, testUtils.CreateDoc{ CollectionID: 1, - // "bae-7697f14d-7b32-5884-8677-344e183c14bf", Has 1 Publisher + // "bae-86f7a96a-be15-5b4d-91c7-bb6047aa4008", Has 1 Publisher DocMap: map[string]any{ "name": "Theif Lord", "rating": 4.8, @@ -70,7 +70,7 @@ func TestQueryOneToOneRelations(t *testing.T) { }, testUtils.CreateDoc{ CollectionID: 1, - // "bae-374998e0-e84d-5f6b-9e87-5edaaa2d9c7d", Has no Publisher. + // "bae-5ce5698b-5af6-5f50-a6fb-633252be8d12", Has no Publisher. DocMap: map[string]any{ "name": "The Associate", "rating": 4.2, diff --git a/tests/integration/query/one_to_many_to_one/with_filter_test.go b/tests/integration/query/one_to_many_to_one/with_filter_test.go index 147f71f790..07f4400091 100644 --- a/tests/integration/query/one_to_many_to_one/with_filter_test.go +++ b/tests/integration/query/one_to_many_to_one/with_filter_test.go @@ -241,12 +241,6 @@ func TestOneToManyToOneWithTwoLevelDeepFilter(t *testing.T) { "Author": []map[string]any{ { "book": []map[string]any{ - { - "name": "A Time for Mercy", - "publisher": map[string]any{ - "yearOpened": int64(2013), - }, - }, { "name": "The Associate", "publisher": nil, @@ -263,6 +257,12 @@ func TestOneToManyToOneWithTwoLevelDeepFilter(t *testing.T) { "yearOpened": int64(1995), }, }, + { + "name": "A Time for Mercy", + "publisher": map[string]any{ + "yearOpened": int64(2013), + }, + }, { "name": "Sooley", "publisher": map[string]any{ diff --git a/tests/integration/query/one_to_one/simple_test.go b/tests/integration/query/one_to_one/simple_test.go index 4433c3ff9c..91ad7265af 100644 --- a/tests/integration/query/one_to_one/simple_test.go +++ b/tests/integration/query/one_to_one/simple_test.go @@ -257,15 +257,15 @@ func TestQueryOneToOneWithMultipleRecordsSecondaryDirection(t *testing.T) { Results: map[string]any{ "Author": []map[string]any{ { - "name": "Cornelia Funke", + "name": "John Grisham", "published": map[string]any{ - "name": "Theif Lord", + "name": "Painted House", }, }, { - "name": "John Grisham", + "name": "Cornelia Funke", "published": map[string]any{ - "name": "Painted House", + "name": "Theif Lord", }, }, }, @@ -437,7 +437,7 @@ func TestQueryOneToOne_WithRelationIDFromSecondarySide(t *testing.T) { "Book": []map[string]any{ { "name": "Painted House", - "author_id": "bae-420e72a6-e0c6-5a06-a958-2cc7adb7b3d0", + "author_id": "bae-35fc1c36-4347-5bf4-a41f-bf676b145075", }, }, }, diff --git a/tests/integration/query/one_to_one/with_clashing_id_field_test.go b/tests/integration/query/one_to_one/with_clashing_id_field_test.go index 077416084f..9097b80b4b 100644 --- a/tests/integration/query/one_to_one/with_clashing_id_field_test.go +++ b/tests/integration/query/one_to_one/with_clashing_id_field_test.go @@ -63,7 +63,7 @@ func TestQueryOneToOneWithClashingIdFieldOnSecondary(t *testing.T) { "Book": []map[string]any{ { "name": "Painted House", - "author_id": "bae-1a0405fa-e17d-5b0f-8fe2-eb966938df1c", + "author_id": "bae-5da9ad38-0a01-51ad-b54f-43eb9288d4f5", "author": map[string]any{ "name": "John Grisham", }, diff --git a/tests/integration/query/one_to_one/with_group_related_id_alias_test.go b/tests/integration/query/one_to_one/with_group_related_id_alias_test.go index 9def4654f4..0ae9548536 100644 --- a/tests/integration/query/one_to_one/with_group_related_id_alias_test.go +++ b/tests/integration/query/one_to_one/with_group_related_id_alias_test.go @@ -74,7 +74,7 @@ func TestQueryOneToOneWithGroupRelatedIDAlias(t *testing.T) { Results: map[string]any{ "Book": []map[string]any{ { - "author_id": "bae-077b5e8d-5a86-5ae7-a321-ac7e423bb260", + "author_id": "bae-fc7bf08d-9117-5acd-8b49-bc7431b1b238", "author": map[string]any{ "name": "John Grisham", }, @@ -85,7 +85,7 @@ func TestQueryOneToOneWithGroupRelatedIDAlias(t *testing.T) { }, }, { - "author_id": "bae-b11e00fc-340f-558b-909d-2ab94601570b", + "author_id": "bae-fcb12812-4c38-574e-bc8b-91b37ee6cd9b", "author": map[string]any{ "name": "Andrew Lone", }, @@ -156,10 +156,10 @@ func TestQueryOneToOneWithGroupRelatedIDAliasFromSecondaryWithoutInnerGroup(t *t Results: map[string]any{ "Book": []map[string]any{ { - "author_id": "bae-3c308f94-dc9e-5262-b0ce-ef4e8e545820", + "author_id": "bae-23a33112-7345-52f1-8816-0481747645f2", }, { - "author_id": "bae-420e72a6-e0c6-5a06-a958-2cc7adb7b3d0", + "author_id": "bae-35fc1c36-4347-5bf4-a41f-bf676b145075", }, }, }, @@ -225,13 +225,13 @@ func TestQueryOneToOneWithGroupRelatedIDAliasFromSecondaryWithoutInnerGroupWithJ Results: map[string]any{ "Book": []map[string]any{ { - "author_id": "bae-3c308f94-dc9e-5262-b0ce-ef4e8e545820", + "author_id": "bae-23a33112-7345-52f1-8816-0481747645f2", "author": map[string]any{ "name": "Andrew Lone", }, }, { - "author_id": "bae-420e72a6-e0c6-5a06-a958-2cc7adb7b3d0", + "author_id": "bae-35fc1c36-4347-5bf4-a41f-bf676b145075", "author": map[string]any{ "name": "John Grisham", }, @@ -300,7 +300,7 @@ func TestQueryOneToOneWithGroupRelatedIDAliasFromSecondaryWithInnerGroup(t *test Results: map[string]any{ "Book": []map[string]any{ { - "author_id": "bae-3c308f94-dc9e-5262-b0ce-ef4e8e545820", + "author_id": "bae-23a33112-7345-52f1-8816-0481747645f2", "_group": []map[string]any{ { "name": "Go Guide for Rust developers", @@ -308,7 +308,7 @@ func TestQueryOneToOneWithGroupRelatedIDAliasFromSecondaryWithInnerGroup(t *test }, }, { - "author_id": "bae-420e72a6-e0c6-5a06-a958-2cc7adb7b3d0", + "author_id": "bae-35fc1c36-4347-5bf4-a41f-bf676b145075", "_group": []map[string]any{ { "name": "Painted House", @@ -382,7 +382,7 @@ func TestQueryOneToOneWithGroupRelatedIDAliasFromSecondaryWithInnerGroupWithJoin Results: map[string]any{ "Book": []map[string]any{ { - "author_id": "bae-3c308f94-dc9e-5262-b0ce-ef4e8e545820", + "author_id": "bae-23a33112-7345-52f1-8816-0481747645f2", "author": map[string]any{ "name": "Andrew Lone", }, @@ -393,7 +393,7 @@ func TestQueryOneToOneWithGroupRelatedIDAliasFromSecondaryWithInnerGroupWithJoin }, }, { - "author_id": "bae-420e72a6-e0c6-5a06-a958-2cc7adb7b3d0", + "author_id": "bae-35fc1c36-4347-5bf4-a41f-bf676b145075", "author": map[string]any{ "name": "John Grisham", }, diff --git a/tests/integration/query/one_to_one/with_group_related_id_test.go b/tests/integration/query/one_to_one/with_group_related_id_test.go index 7607965203..5b1aa09dce 100644 --- a/tests/integration/query/one_to_one/with_group_related_id_test.go +++ b/tests/integration/query/one_to_one/with_group_related_id_test.go @@ -71,7 +71,7 @@ func TestQueryOneToOneWithGroupRelatedID(t *testing.T) { Results: map[string]any{ "Book": []map[string]any{ { - "author_id": "bae-077b5e8d-5a86-5ae7-a321-ac7e423bb260", + "author_id": "bae-fc7bf08d-9117-5acd-8b49-bc7431b1b238", "_group": []map[string]any{ { "name": "Painted House", @@ -79,7 +79,7 @@ func TestQueryOneToOneWithGroupRelatedID(t *testing.T) { }, }, { - "author_id": "bae-cfee1ed9-ede8-5b80-a6fa-78c727a076ac", + "author_id": "bae-f2dcf043-d24d-5885-9a0a-60196094c782", "_group": []map[string]any{ { "name": "Go Guide for Rust developers", @@ -147,10 +147,10 @@ func TestQueryOneToOneWithGroupRelatedIDFromSecondaryWithoutGroup(t *testing.T) Results: map[string]any{ "Book": []map[string]any{ { - "author_id": "bae-3c308f94-dc9e-5262-b0ce-ef4e8e545820", + "author_id": "bae-23a33112-7345-52f1-8816-0481747645f2", }, { - "author_id": "bae-420e72a6-e0c6-5a06-a958-2cc7adb7b3d0", + "author_id": "bae-35fc1c36-4347-5bf4-a41f-bf676b145075", }, }, }, @@ -216,13 +216,13 @@ func TestQueryOneToOneWithGroupRelatedIDFromSecondaryWithoutGroupWithJoin(t *tes Results: map[string]any{ "Book": []map[string]any{ { - "author_id": "bae-3c308f94-dc9e-5262-b0ce-ef4e8e545820", + "author_id": "bae-23a33112-7345-52f1-8816-0481747645f2", "author": map[string]any{ "name": "Andrew Lone", }, }, { - "author_id": "bae-420e72a6-e0c6-5a06-a958-2cc7adb7b3d0", + "author_id": "bae-35fc1c36-4347-5bf4-a41f-bf676b145075", "author": map[string]any{ "name": "John Grisham", }, @@ -291,7 +291,7 @@ func TestQueryOneToOneWithGroupRelatedIDFromSecondaryWithGroup(t *testing.T) { Results: map[string]any{ "Book": []map[string]any{ { - "author_id": "bae-bb4d6e89-e8b4-5eec-bfeb-6f7aa4840950", + "author_id": "bae-b6aedb41-3a00-54dd-9a84-78d0654dbc42", "_group": []map[string]any{ { "name": "Go Guide for Rust developers", @@ -299,7 +299,7 @@ func TestQueryOneToOneWithGroupRelatedIDFromSecondaryWithGroup(t *testing.T) { }, }, { - "author_id": "bae-420e72a6-e0c6-5a06-a958-2cc7adb7b3d0", + "author_id": "bae-35fc1c36-4347-5bf4-a41f-bf676b145075", "_group": []map[string]any{ { "name": "Painted House", @@ -373,7 +373,7 @@ func TestQueryOneToOneWithGroupRelatedIDFromSecondaryWithGroupWithJoin(t *testin Results: map[string]any{ "Book": []map[string]any{ { - "author_id": "bae-3c308f94-dc9e-5262-b0ce-ef4e8e545820", + "author_id": "bae-23a33112-7345-52f1-8816-0481747645f2", "author": map[string]any{ "name": "Andrew Lone", }, @@ -384,7 +384,7 @@ func TestQueryOneToOneWithGroupRelatedIDFromSecondaryWithGroupWithJoin(t *testin }, }, { - "author_id": "bae-420e72a6-e0c6-5a06-a958-2cc7adb7b3d0", + "author_id": "bae-35fc1c36-4347-5bf4-a41f-bf676b145075", "author": map[string]any{ "name": "John Grisham", }, diff --git a/tests/integration/query/one_to_one_multiple/simple_test.go b/tests/integration/query/one_to_one_multiple/simple_test.go index 66b823138a..4696db5dcf 100644 --- a/tests/integration/query/one_to_one_multiple/simple_test.go +++ b/tests/integration/query/one_to_one_multiple/simple_test.go @@ -302,21 +302,21 @@ func TestQueryOneToOneMultiple_FromSecondary(t *testing.T) { Results: map[string]any{ "Book": []map[string]any{ { - "name": "Painted House", + "name": "Theif Lord", "publisher": map[string]any{ - "name": "Old Publisher", + "name": "New Publisher", }, "author": map[string]any{ - "name": "John Grisham", + "name": "Cornelia Funke", }, }, { - "name": "Theif Lord", + "name": "Painted House", "publisher": map[string]any{ - "name": "New Publisher", + "name": "Old Publisher", }, "author": map[string]any{ - "name": "Cornelia Funke", + "name": "John Grisham", }, }, }, diff --git a/tests/integration/query/one_to_one_to_one/simple_test.go b/tests/integration/query/one_to_one_to_one/simple_test.go index bb054c4380..0486d3db77 100644 --- a/tests/integration/query/one_to_one_to_one/simple_test.go +++ b/tests/integration/query/one_to_one_to_one/simple_test.go @@ -94,20 +94,20 @@ func TestQueryOneToOneToOne(t *testing.T) { Results: map[string]any{ "Publisher": []map[string]any{ { - "name": "New Publisher", + "name": "Old Publisher", "printed": map[string]any{ - "name": "Theif Lord", + "name": "Painted House", "author": map[string]any{ - "name": "Cornelia Funke", + "name": "John Grisham", }, }, }, { - "name": "Old Publisher", + "name": "New Publisher", "printed": map[string]any{ - "name": "Painted House", + "name": "Theif Lord", "author": map[string]any{ - "name": "John Grisham", + "name": "Cornelia Funke", }, }, }, @@ -302,20 +302,20 @@ func TestQueryOneToOneToOnePrimaryThenSecondary(t *testing.T) { Results: map[string]any{ "Publisher": []map[string]any{ { - "name": "New Publisher", + "name": "Old Publisher", "printed": map[string]any{ - "name": "Theif Lord", + "name": "Painted House", "author": map[string]any{ - "name": "Cornelia Funke", + "name": "John Grisham", }, }, }, { - "name": "Old Publisher", + "name": "New Publisher", "printed": map[string]any{ - "name": "Painted House", + "name": "Theif Lord", "author": map[string]any{ - "name": "John Grisham", + "name": "Cornelia Funke", }, }, }, diff --git a/tests/integration/query/one_to_two_many/simple_test.go b/tests/integration/query/one_to_two_many/simple_test.go index 47e946b6e4..c8106f30a3 100644 --- a/tests/integration/query/one_to_two_many/simple_test.go +++ b/tests/integration/query/one_to_two_many/simple_test.go @@ -97,17 +97,6 @@ func TestQueryOneToTwoManyWithNilUnnamedRelationship_FromOneSide(t *testing.T) { }`, Results: map[string]any{ "Book": []map[string]any{ - { - "name": "Painted House", - "rating": 4.9, - "author": map[string]any{ - "name": "John Grisham", - }, - "reviewedBy": map[string]any{ - "name": "Cornelia Funke", - "age": int64(62), - }, - }, { "name": "A Time for Mercy", "rating": 4.5, @@ -130,6 +119,17 @@ func TestQueryOneToTwoManyWithNilUnnamedRelationship_FromOneSide(t *testing.T) { "age": int64(65), }, }, + { + "name": "Painted House", + "rating": 4.9, + "author": map[string]any{ + "name": "John Grisham", + }, + "reviewedBy": map[string]any{ + "name": "Cornelia Funke", + "age": int64(62), + }, + }, }, }, }, @@ -224,14 +224,14 @@ func TestQueryOneToTwoManyWithNilUnnamedRelationship_FromManySide(t *testing.T) "name": "Cornelia Funke", "age": int64(62), "reviewed": []map[string]any{ - { - "name": "Painted House", - "rating": 4.9, - }, { "name": "A Time for Mercy", "rating": 4.5, }, + { + "name": "Painted House", + "rating": 4.9, + }, }, "written": []map[string]any{ { @@ -250,10 +250,10 @@ func TestQueryOneToTwoManyWithNilUnnamedRelationship_FromManySide(t *testing.T) }, "written": []map[string]any{ { - "name": "Painted House", + "name": "A Time for Mercy", }, { - "name": "A Time for Mercy", + "name": "Painted House", }, }, }, @@ -375,21 +375,6 @@ func TestQueryOneToTwoManyWithNamedAndUnnamedRelationships(t *testing.T) { }`, Results: map[string]any{ "Book": []map[string]any{ - { - "name": "Painted House", - "rating": 4.9, - "author": map[string]any{ - "name": "John Grisham", - }, - "reviewedBy": map[string]any{ - "name": "Cornelia Funke", - "age": int64(62), - }, - "price": map[string]any{ - "currency": "GBP", - "value": 12.99, - }, - }, { "name": "A Time for Mercy", "rating": 4.5, @@ -420,6 +405,21 @@ func TestQueryOneToTwoManyWithNamedAndUnnamedRelationships(t *testing.T) { "value": 12.99, }, }, + { + "name": "Painted House", + "rating": 4.9, + "author": map[string]any{ + "name": "John Grisham", + }, + "reviewedBy": map[string]any{ + "name": "Cornelia Funke", + "age": int64(62), + }, + "price": map[string]any{ + "currency": "GBP", + "value": 12.99, + }, + }, }, }, }, @@ -541,14 +541,14 @@ func TestQueryOneToTwoManyWithNamedAndUnnamedRelationships_FromManySide(t *testi "name": "Cornelia Funke", "age": int64(62), "reviewed": []map[string]any{ - { - "name": "Painted House", - "rating": 4.9, - }, { "name": "A Time for Mercy", "rating": 4.5, }, + { + "name": "Painted House", + "rating": 4.9, + }, }, "written": []map[string]any{ { @@ -570,15 +570,15 @@ func TestQueryOneToTwoManyWithNamedAndUnnamedRelationships_FromManySide(t *testi }, "written": []map[string]any{ { - "name": "Painted House", + "name": "A Time for Mercy", "price": map[string]any{ - "value": 12.99, + "value": float64(129), }, }, { - "name": "A Time for Mercy", + "name": "Painted House", "price": map[string]any{ - "value": float64(129), + "value": 12.99, }, }, }, diff --git a/tests/integration/schema/one_many_test.go b/tests/integration/schema/one_many_test.go index 6783a17e7b..f29d0bf34d 100644 --- a/tests/integration/schema/one_many_test.go +++ b/tests/integration/schema/one_many_test.go @@ -16,6 +16,7 @@ import ( "github.com/sourcenetwork/immutable" "github.com/sourcenetwork/defradb/client" + "github.com/sourcenetwork/defradb/client/request" testUtils "github.com/sourcenetwork/defradb/tests/integration" ) @@ -43,7 +44,7 @@ func TestSchemaOneMany_Primary(t *testing.T) { { Name: "dogs", ID: 1, - Kind: immutable.Some[client.FieldKind](client.ObjectArrayKind("Dog")), + Kind: immutable.Some[client.FieldKind](client.NewCollectionKind(2, true)), RelationName: immutable.Some("dog_user"), }, { @@ -65,7 +66,7 @@ func TestSchemaOneMany_Primary(t *testing.T) { { Name: "owner", ID: 2, - Kind: immutable.Some[client.FieldKind](client.ObjectKind("User")), + Kind: immutable.Some[client.FieldKind](client.NewCollectionKind(1, false)), RelationName: immutable.Some("dog_user"), }, { @@ -104,7 +105,7 @@ func TestSchemaOneMany_SelfReferenceOneFieldLexographicallyFirst(t *testing.T) { { Name: "a", ID: 1, - Kind: immutable.Some[client.FieldKind](client.ObjectKind("User")), + Kind: immutable.Some[client.FieldKind](client.NewSelfKind("", false)), RelationName: immutable.Some("user_user"), }, { @@ -116,7 +117,7 @@ func TestSchemaOneMany_SelfReferenceOneFieldLexographicallyFirst(t *testing.T) { { Name: "b", ID: 3, - Kind: immutable.Some[client.FieldKind](client.ObjectArrayKind("User")), + Kind: immutable.Some[client.FieldKind](client.NewSelfKind("", true)), RelationName: immutable.Some("user_user"), }, }, @@ -149,13 +150,13 @@ func TestSchemaOneMany_SelfReferenceManyFieldLexographicallyFirst(t *testing.T) { Name: "a", ID: 1, - Kind: immutable.Some[client.FieldKind](client.ObjectArrayKind("User")), + Kind: immutable.Some[client.FieldKind](client.NewSelfKind("", true)), RelationName: immutable.Some("user_user"), }, { Name: "b", ID: 2, - Kind: immutable.Some[client.FieldKind](client.ObjectKind("User")), + Kind: immutable.Some[client.FieldKind](client.NewSelfKind("", false)), RelationName: immutable.Some("user_user"), }, { @@ -173,3 +174,122 @@ func TestSchemaOneMany_SelfReferenceManyFieldLexographicallyFirst(t *testing.T) testUtils.ExecuteTestCase(t, test) } + +func TestSchemaOneMany_SelfUsingActualName(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + // Note: The @primary directive is required due to + // https://github.com/sourcenetwork/defradb/issues/2620 + // it should be removed when that ticket is closed. + Schema: ` + type User { + boss: User @primary + minions: [User] + } + `, + }, + testUtils.GetCollections{ + ExpectedResults: []client.CollectionDescription{ + { + Name: immutable.Some("User"), + Fields: []client.CollectionFieldDescription{ + { + Name: request.DocIDFieldName, + }, + { + Name: "boss", + ID: 1, + Kind: immutable.Some[client.FieldKind](client.NewSelfKind("", false)), + RelationName: immutable.Some("user_user"), + }, + { + Name: "boss_id", + ID: 2, + Kind: immutable.Some[client.FieldKind](client.FieldKind_DocID), + RelationName: immutable.Some("user_user"), + }, + { + Name: "minions", + ID: 3, + Kind: immutable.Some[client.FieldKind](client.NewSelfKind("", true)), + RelationName: immutable.Some("user_user"), + }, + }, + }, + }, + }, + testUtils.GetSchema{ + ExpectedResults: []client.SchemaDescription{ + { + Name: "User", + Root: "bafkreifchjktkdtha7vkcqt6itzsw6lnzfyp7ufws4s32e7vigu7akn2q4", + VersionID: "bafkreifchjktkdtha7vkcqt6itzsw6lnzfyp7ufws4s32e7vigu7akn2q4", + Fields: []client.SchemaFieldDescription{ + { + Name: request.DocIDFieldName, + Kind: client.FieldKind_DocID, + }, + { + Name: "boss", + Kind: client.NewSelfKind("", false), + Typ: client.LWW_REGISTER, + }, + { + Name: "boss_id", + Kind: client.FieldKind_DocID, + Typ: client.LWW_REGISTER, + }, + }, + }, + }, + }, + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "User") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": map[string]any{ + "name": "User", + "fields": append(DefaultFields, + Field{ + "name": "boss", + "type": map[string]any{ + "kind": "OBJECT", + "name": "User", + }, + }, + Field{ + "name": "boss_id", + "type": map[string]any{ + "kind": "SCALAR", + "name": "ID", + }, + }, + Field{ + "name": "minions", + "type": map[string]any{ + "kind": "LIST", + "name": nil, + }, + }, + ).Tidy(), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/schema/one_one_test.go b/tests/integration/schema/one_one_test.go index 8bc1e5a1fe..425d950804 100644 --- a/tests/integration/schema/one_one_test.go +++ b/tests/integration/schema/one_one_test.go @@ -13,6 +13,10 @@ package schema import ( "testing" + "github.com/sourcenetwork/immutable" + + "github.com/sourcenetwork/defradb/client" + "github.com/sourcenetwork/defradb/client/request" testUtils "github.com/sourcenetwork/defradb/tests/integration" ) @@ -61,3 +65,132 @@ func TestSchemaOneOne_TwoPrimaries_Errors(t *testing.T) { testUtils.ExecuteTestCase(t, test) } + +func TestSchemaOneOne_SelfUsingActualName(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + boss: User @primary + minion: User + } + `, + }, + testUtils.GetCollections{ + ExpectedResults: []client.CollectionDescription{ + { + Name: immutable.Some("User"), + Fields: []client.CollectionFieldDescription{ + { + Name: request.DocIDFieldName, + }, + { + Name: "boss", + ID: 1, + Kind: immutable.Some[client.FieldKind](client.NewSelfKind("", false)), + RelationName: immutable.Some("user_user"), + }, + { + Name: "boss_id", + ID: 2, + Kind: immutable.Some[client.FieldKind](client.FieldKind_DocID), + RelationName: immutable.Some("user_user"), + }, + { + Name: "minion", + ID: 3, + Kind: immutable.Some[client.FieldKind](client.NewSelfKind("", false)), + RelationName: immutable.Some("user_user"), + }, + { + Name: "minion_id", + ID: 4, + Kind: immutable.Some[client.FieldKind](client.FieldKind_DocID), + RelationName: immutable.Some("user_user"), + }, + }, + }, + }, + }, + testUtils.GetSchema{ + ExpectedResults: []client.SchemaDescription{ + { + Name: "User", + Root: "bafkreifchjktkdtha7vkcqt6itzsw6lnzfyp7ufws4s32e7vigu7akn2q4", + VersionID: "bafkreifchjktkdtha7vkcqt6itzsw6lnzfyp7ufws4s32e7vigu7akn2q4", + Fields: []client.SchemaFieldDescription{ + { + Name: request.DocIDFieldName, + Kind: client.FieldKind_DocID, + }, + { + Name: "boss", + Kind: client.NewSelfKind("", false), + Typ: client.LWW_REGISTER, + }, + { + Name: "boss_id", + Kind: client.FieldKind_DocID, + Typ: client.LWW_REGISTER, + }, + }, + }, + }, + }, + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "User") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": map[string]any{ + "name": "User", + "fields": append(DefaultFields, + Field{ + "name": "boss", + "type": map[string]any{ + "kind": "OBJECT", + "name": "User", + }, + }, + Field{ + "name": "boss_id", + "type": map[string]any{ + "kind": "SCALAR", + "name": "ID", + }, + }, + Field{ + "name": "minion", + "type": map[string]any{ + "kind": "OBJECT", + "name": "User", + }, + }, + Field{ + "name": "minion_id", + "type": map[string]any{ + "kind": "SCALAR", + "name": "ID", + }, + }, + ).Tidy(), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/schema/relations_test.go b/tests/integration/schema/relations_test.go deleted file mode 100644 index 892c6e67ac..0000000000 --- a/tests/integration/schema/relations_test.go +++ /dev/null @@ -1,144 +0,0 @@ -// Copyright 2023 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package schema - -import ( - "testing" - - testUtils "github.com/sourcenetwork/defradb/tests/integration" -) - -func TestSchemaRelationOneToOne(t *testing.T) { - test := testUtils.TestCase{ - Actions: []any{ - testUtils.SchemaUpdate{ - Schema: ` - type Dog { - name: String - user: User @primary - } - type User { - dog: Dog - } - `, - }, - testUtils.IntrospectionRequest{ - Request: ` - query { - __type (name: "User") { - name - fields { - name - type { - name - kind - } - } - } - } - `, - ExpectedData: map[string]any{ - "__type": map[string]any{ - "name": "User", - "fields": append(DefaultFields, - Field{ - "name": "dog", - "type": map[string]any{ - "kind": "OBJECT", - "name": "Dog", - }, - }, - Field{ - "name": "dog_id", - "type": map[string]any{ - "kind": "SCALAR", - "name": "ID", - }, - }, - ).Tidy(), - }, - }, - }, - }, - } - - testUtils.ExecuteTestCase(t, test) -} - -func TestSchemaRelationManyToOne(t *testing.T) { - test := testUtils.TestCase{ - Actions: []any{ - testUtils.SchemaUpdate{ - Schema: ` - type Dog { - name: String - user: User - } - type User { - dogs: [Dog] - } - `, - }, - testUtils.IntrospectionRequest{ - Request: ` - query { - __type (name: "User") { - name - fields { - name - type { - name - kind - } - } - } - } - `, - ExpectedData: map[string]any{ - "__type": map[string]any{ - "name": "User", - "fields": append(DefaultFields, - Field{ - "name": "dogs", - "type": map[string]any{ - "kind": "LIST", - "name": nil, - }, - }, - ).Tidy(), - }, - }, - }, - }, - } - - testUtils.ExecuteTestCase(t, test) -} - -func TestSchemaRelationErrorsGivenOneSidedManyRelationField(t *testing.T) { - test := testUtils.TestCase{ - Actions: []any{ - testUtils.SchemaUpdate{ - Schema: ` - type Dog { - name: String - } - type User { - dogs: [Dog] - } - `, - ExpectedError: "relation missing field. Object: Dog, RelationName: dog_user", - }, - }, - } - - testUtils.ExecuteTestCase(t, test) -} diff --git a/tests/integration/schema/self_ref_test.go b/tests/integration/schema/self_ref_test.go new file mode 100644 index 0000000000..888b9db280 --- /dev/null +++ b/tests/integration/schema/self_ref_test.go @@ -0,0 +1,737 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package schema + +import ( + "testing" + + "github.com/sourcenetwork/defradb/client" + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestSchemaSelfReferenceSimple_SchemaHasSimpleSchemaID(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + boss: User + } + `, + }, + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "User") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": map[string]any{ + "name": "User", + "fields": DefaultFields.Append( + Field{ + "name": "boss_id", + "type": map[string]any{ + "kind": "SCALAR", + "name": "ID", + }, + }, + ).Append( + Field{ + "name": "boss", + "type": map[string]any{ + "kind": "OBJECT", + "name": "User", + }, + }, + ).Tidy(), + }, + }, + }, + testUtils.GetSchema{ + ExpectedResults: []client.SchemaDescription{ + { + Name: "User", + Root: "bafkreifchjktkdtha7vkcqt6itzsw6lnzfyp7ufws4s32e7vigu7akn2q4", + VersionID: "bafkreifchjktkdtha7vkcqt6itzsw6lnzfyp7ufws4s32e7vigu7akn2q4", + Fields: []client.SchemaFieldDescription{ + { + Name: "_docID", + Typ: client.NONE_CRDT, + Kind: client.FieldKind_DocID, + }, + { + Name: "boss", + Typ: client.LWW_REGISTER, + // Simple self kinds do not contain a base ID, as there is only one possible value + // that they could hold + Kind: client.NewSelfKind("", false), + }, + { + Name: "boss_id", + Typ: client.LWW_REGISTER, + Kind: client.FieldKind_DocID, + }, + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestSchemaSelfReferenceTwoTypes_SchemaHasComplexSchemaID(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + // The two primary relations form a circular two-collection self reference + Schema: ` + type User { + hosts: Dog @primary @relation(name:"hosts") + walks: Dog @relation(name:"walkies") + } + type Dog { + host: User @relation(name:"hosts") + walker: User @primary @relation(name:"walkies") + } + `, + }, + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "User") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": map[string]any{ + "name": "User", + "fields": DefaultFields.Append( + Field{ + "name": "hosts_id", + "type": map[string]any{ + "kind": "SCALAR", + "name": "ID", + }, + }, + ).Append( + Field{ + "name": "hosts", + "type": map[string]any{ + "kind": "OBJECT", + "name": "Dog", + }, + }, + ).Append( + Field{ + "name": "walks_id", + "type": map[string]any{ + "kind": "SCALAR", + "name": "ID", + }, + }, + ).Append( + Field{ + "name": "walks", + "type": map[string]any{ + "kind": "OBJECT", + "name": "Dog", + }, + }, + ).Tidy(), + }, + }, + }, + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "Dog") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": map[string]any{ + "name": "Dog", + "fields": DefaultFields.Append( + Field{ + "name": "host_id", + "type": map[string]any{ + "kind": "SCALAR", + "name": "ID", + }, + }, + ).Append( + Field{ + "name": "host", + "type": map[string]any{ + "kind": "OBJECT", + "name": "User", + }, + }, + ).Append( + Field{ + "name": "walker_id", + "type": map[string]any{ + "kind": "SCALAR", + "name": "ID", + }, + }, + ).Append( + Field{ + "name": "walker", + "type": map[string]any{ + "kind": "OBJECT", + "name": "User", + }, + }, + ).Tidy(), + }, + }, + }, + testUtils.GetSchema{ + ExpectedResults: []client.SchemaDescription{ + { + Name: "Dog", + // Note how Dog and User share the same base ID, but with a different index suffixed on + // the end. + Root: "bafkreichlth4ajgalengyv3hnmqnxa4vhnv5f34a3gzwh2jaajqb2yxd4i-0", + VersionID: "bafkreichlth4ajgalengyv3hnmqnxa4vhnv5f34a3gzwh2jaajqb2yxd4i-0", + Fields: []client.SchemaFieldDescription{ + { + Name: "_docID", + Typ: client.NONE_CRDT, + Kind: client.FieldKind_DocID, + }, + { + Name: "walker", + Typ: client.LWW_REGISTER, + // Because Dog and User form a circular dependency tree, the relation is declared + // as a SelfKind, with the index identifier of User being held in the relation kind. + Kind: client.NewSelfKind("1", false), + }, + { + Name: "walker_id", + Typ: client.LWW_REGISTER, + Kind: client.FieldKind_DocID, + }, + }, + }, + { + Name: "User", + // Note how Dog and User share the same base ID, but with a different index suffixed on + // the end. + Root: "bafkreichlth4ajgalengyv3hnmqnxa4vhnv5f34a3gzwh2jaajqb2yxd4i-1", + VersionID: "bafkreichlth4ajgalengyv3hnmqnxa4vhnv5f34a3gzwh2jaajqb2yxd4i-1", + Fields: []client.SchemaFieldDescription{ + { + Name: "_docID", + Typ: client.NONE_CRDT, + Kind: client.FieldKind_DocID, + }, + { + Name: "hosts", + Typ: client.LWW_REGISTER, + // Because Dog and User form a circular dependency tree, the relation is declared + // as a SelfKind, with the index identifier of User being held in the relation kind. + Kind: client.NewSelfKind("0", false), + }, + { + Name: "hosts_id", + Typ: client.LWW_REGISTER, + Kind: client.FieldKind_DocID, + }, + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestSchemaSelfReferenceTwoPairsOfTwoTypes_SchemasHaveDifferentComplexSchemaID(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + // - User and Dog form a circular dependency. + // - Cat and Mouse form a another circular dependency. + // - There is a relationship from Cat to User, this does not form a circular dependency + // between the two (User/Dog and Cat/Mouse) circles, this is included to ensure that + // the code does not incorrectly merge the User/Dog and Cat/Mouse circles into a single + // circle. + Schema: ` + type User { + hosts: Dog @primary @relation(name:"hosts") + walks: Dog @relation(name:"walkies") + toleratedBy: Cat @relation(name:"tolerates") + } + type Dog { + host: User @relation(name:"hosts") + walker: User @primary @relation(name:"walkies") + } + type Cat { + loves: Mouse @primary @relation(name:"loves") + hatedBy: Mouse @relation(name:"hates") + tolerates: User @primary @relation(name:"tolerates") + } + type Mouse { + lovedBy: Cat @relation(name:"loves") + hates: Cat @primary @relation(name:"hates") + } + `, + }, + testUtils.GetSchema{ + ExpectedResults: []client.SchemaDescription{ + { + Name: "Cat", + // Cat and Mouse share the same base ID, but with a different index suffixed on + // the end. This base must be different to the Dog/User base ID. + Root: "bafkreiacf7kjwlw32eiizyy6awdnfrnn7edaptp2chhfc5xktgxvrccqsa-0", + VersionID: "bafkreiacf7kjwlw32eiizyy6awdnfrnn7edaptp2chhfc5xktgxvrccqsa-0", + Fields: []client.SchemaFieldDescription{ + { + Name: "_docID", + Typ: client.NONE_CRDT, + Kind: client.FieldKind_DocID, + }, + { + Name: "loves", + Typ: client.LWW_REGISTER, + Kind: client.NewSelfKind("1", false), + }, + { + Name: "loves_id", + Typ: client.LWW_REGISTER, + Kind: client.FieldKind_DocID, + }, + { + Name: "tolerates", + Typ: client.LWW_REGISTER, + // This relationship reaches out of the Cat/Dog circle, and thus must be of type SchemaKind, + // specified with the full User ID (including the `-1` index suffixed). + Kind: client.NewSchemaKind("bafkreichlth4ajgalengyv3hnmqnxa4vhnv5f34a3gzwh2jaajqb2yxd4i-1", false), + }, + { + Name: "tolerates_id", + Typ: client.LWW_REGISTER, + Kind: client.FieldKind_DocID, + }, + }, + }, + { + Name: "Mouse", + // Cat and Mouse share the same base ID, but with a different index suffixed on + // the end. This base must be different to the Dog/User base ID. + Root: "bafkreiacf7kjwlw32eiizyy6awdnfrnn7edaptp2chhfc5xktgxvrccqsa-1", + VersionID: "bafkreiacf7kjwlw32eiizyy6awdnfrnn7edaptp2chhfc5xktgxvrccqsa-1", + Fields: []client.SchemaFieldDescription{ + { + Name: "_docID", + Typ: client.NONE_CRDT, + Kind: client.FieldKind_DocID, + }, + { + Name: "hates", + Typ: client.LWW_REGISTER, + Kind: client.NewSelfKind("0", false), + }, + { + Name: "hates_id", + Typ: client.LWW_REGISTER, + Kind: client.FieldKind_DocID, + }, + }, + }, + { + Name: "Dog", + // Dog and User share the same base ID, but with a different index suffixed on + // the end. This base must be different to the Cat/Mouse base ID. + Root: "bafkreichlth4ajgalengyv3hnmqnxa4vhnv5f34a3gzwh2jaajqb2yxd4i-0", + VersionID: "bafkreichlth4ajgalengyv3hnmqnxa4vhnv5f34a3gzwh2jaajqb2yxd4i-0", + Fields: []client.SchemaFieldDescription{ + { + Name: "_docID", + Typ: client.NONE_CRDT, + Kind: client.FieldKind_DocID, + }, + { + Name: "walker", + Typ: client.LWW_REGISTER, + Kind: client.NewSelfKind("1", false), + }, + { + Name: "walker_id", + Typ: client.LWW_REGISTER, + Kind: client.FieldKind_DocID, + }, + }, + }, + { + Name: "User", + // Dog and User share the same base ID, but with a different index suffixed on + // the end. This base must be different to the Cat/Mouse base ID. + Root: "bafkreichlth4ajgalengyv3hnmqnxa4vhnv5f34a3gzwh2jaajqb2yxd4i-1", + VersionID: "bafkreichlth4ajgalengyv3hnmqnxa4vhnv5f34a3gzwh2jaajqb2yxd4i-1", + Fields: []client.SchemaFieldDescription{ + { + Name: "_docID", + Typ: client.NONE_CRDT, + Kind: client.FieldKind_DocID, + }, + { + Name: "hosts", + Typ: client.LWW_REGISTER, + Kind: client.NewSelfKind("0", false), + }, + { + Name: "hosts_id", + Typ: client.LWW_REGISTER, + Kind: client.FieldKind_DocID, + }, + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestSchemaSelfReferenceTwoPairsOfTwoTypesJoinedByThirdCircle_SchemasAllHaveSameBaseSchemaID(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + // - User and Dog form a circular dependency. + // - Cat and Mouse form a another circular dependency. + // - User and Cat form a circular dependency - this circle overlaps with the two otherwise + // independent User/Dog and Cat/Mouse circles, causing the 4 types to be locked together in + // a larger circle (a relationship DAG cannot be formed) - all 4 types must thus share the + // same base ID. + Schema: ` + type User { + hosts: Dog @primary @relation(name:"hosts") + walks: Dog @relation(name:"walkies") + toleratedBy: Cat @relation(name:"tolerates") + feeds: Cat @primary @relation(name:"feeds") + } + type Dog { + host: User @relation(name:"hosts") + walker: User @primary @relation(name:"walkies") + } + type Cat { + loves: Mouse @primary @relation(name:"loves") + hatedBy: Mouse @relation(name:"hates") + tolerates: User @primary @relation(name:"tolerates") + fedBy: User @relation(name:"feeds") + } + type Mouse { + lovedBy: Cat @relation(name:"loves") + hates: Cat @primary @relation(name:"hates") + } + `, + }, + testUtils.GetSchema{ + ExpectedResults: []client.SchemaDescription{ + { + Name: "Cat", + Root: "bafkreibykyk7nm7hbh44rnyqc6glt7d73dpnn3ttwmichwdqydiajjh3ea-0", + VersionID: "bafkreibykyk7nm7hbh44rnyqc6glt7d73dpnn3ttwmichwdqydiajjh3ea-0", + Fields: []client.SchemaFieldDescription{ + { + Name: "_docID", + Typ: client.NONE_CRDT, + Kind: client.FieldKind_DocID, + }, + { + Name: "loves", + Typ: client.LWW_REGISTER, + Kind: client.NewSelfKind("2", false), + }, + { + Name: "loves_id", + Typ: client.LWW_REGISTER, + Kind: client.FieldKind_DocID, + }, + { + Name: "tolerates", + Typ: client.LWW_REGISTER, + Kind: client.NewSelfKind("3", false), + }, + { + Name: "tolerates_id", + Typ: client.LWW_REGISTER, + Kind: client.FieldKind_DocID, + }, + }, + }, + { + Name: "Dog", + Root: "bafkreibykyk7nm7hbh44rnyqc6glt7d73dpnn3ttwmichwdqydiajjh3ea-1", + VersionID: "bafkreibykyk7nm7hbh44rnyqc6glt7d73dpnn3ttwmichwdqydiajjh3ea-1", + Fields: []client.SchemaFieldDescription{ + { + Name: "_docID", + Typ: client.NONE_CRDT, + Kind: client.FieldKind_DocID, + }, + { + Name: "walker", + Typ: client.LWW_REGISTER, + Kind: client.NewSelfKind("3", false), + }, + { + Name: "walker_id", + Typ: client.LWW_REGISTER, + Kind: client.FieldKind_DocID, + }, + }, + }, + { + Name: "Mouse", + Root: "bafkreibykyk7nm7hbh44rnyqc6glt7d73dpnn3ttwmichwdqydiajjh3ea-2", + VersionID: "bafkreibykyk7nm7hbh44rnyqc6glt7d73dpnn3ttwmichwdqydiajjh3ea-2", + Fields: []client.SchemaFieldDescription{ + { + Name: "_docID", + Typ: client.NONE_CRDT, + Kind: client.FieldKind_DocID, + }, + { + Name: "hates", + Typ: client.LWW_REGISTER, + Kind: client.NewSelfKind("0", false), + }, + { + Name: "hates_id", + Typ: client.LWW_REGISTER, + Kind: client.FieldKind_DocID, + }, + }, + }, + { + Name: "User", + Root: "bafkreibykyk7nm7hbh44rnyqc6glt7d73dpnn3ttwmichwdqydiajjh3ea-3", + VersionID: "bafkreibykyk7nm7hbh44rnyqc6glt7d73dpnn3ttwmichwdqydiajjh3ea-3", + Fields: []client.SchemaFieldDescription{ + { + Name: "_docID", + Typ: client.NONE_CRDT, + Kind: client.FieldKind_DocID, + }, + { + Name: "feeds", + Typ: client.LWW_REGISTER, + Kind: client.NewSelfKind("0", false), + }, + { + Name: "feeds_id", + Typ: client.LWW_REGISTER, + Kind: client.FieldKind_DocID, + }, + { + Name: "hosts", + Typ: client.LWW_REGISTER, + Kind: client.NewSelfKind("1", false), + }, + { + Name: "hosts_id", + Typ: client.LWW_REGISTER, + Kind: client.FieldKind_DocID, + }, + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestSchemaSelfReferenceTwoPairsOfTwoTypesJoinedByThirdCircleAcrossAll_SchemasAllHaveSameBaseSchemaID(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + // - User and Dog form a circular dependency. + // - Cat and Mouse form a another circular dependency. + // - A larger circle is formed by bridging the two (User/Dog and Cat/Mouse) circles + // at different points in the same direction - this circle forms from + // User=>Dog=>Mouse=>Cat=>User=>etc. This test ensures that the two independent circles do not + // confuse the code into ignoring the larger circle. + Schema: ` + type User { + hosts: Dog @primary @relation(name:"hosts") + walks: Dog @relation(name:"walkies") + toleratedBy: Cat @relation(name:"tolerates") + } + type Dog { + host: User @relation(name:"hosts") + walker: User @primary @relation(name:"walkies") + licks: Mouse @primary @relation(name:"licks") + } + type Cat { + loves: Mouse @primary @relation(name:"loves") + hatedBy: Mouse @relation(name:"hates") + tolerates: User @primary @relation(name:"tolerates") + } + type Mouse { + lovedBy: Cat @relation(name:"loves") + hates: Cat @primary @relation(name:"hates") + lickedBy: Dog @relation(name:"licks") + } + `, + }, + testUtils.GetSchema{ + ExpectedResults: []client.SchemaDescription{ + { + Name: "Cat", + Root: "bafkreidetmki4jtod5jfmromvcz2vd75j6t6g3vnw3aenlv7znludye4ru-0", + VersionID: "bafkreidetmki4jtod5jfmromvcz2vd75j6t6g3vnw3aenlv7znludye4ru-0", + Fields: []client.SchemaFieldDescription{ + { + Name: "_docID", + Typ: client.NONE_CRDT, + Kind: client.FieldKind_DocID, + }, + { + Name: "loves", + Typ: client.LWW_REGISTER, + Kind: client.NewSelfKind("2", false), + }, + { + Name: "loves_id", + Typ: client.LWW_REGISTER, + Kind: client.FieldKind_DocID, + }, + { + Name: "tolerates", + Typ: client.LWW_REGISTER, + Kind: client.NewSelfKind("3", false), + }, + { + Name: "tolerates_id", + Typ: client.LWW_REGISTER, + Kind: client.FieldKind_DocID, + }, + }, + }, + { + Name: "Dog", + Root: "bafkreidetmki4jtod5jfmromvcz2vd75j6t6g3vnw3aenlv7znludye4ru-1", + VersionID: "bafkreidetmki4jtod5jfmromvcz2vd75j6t6g3vnw3aenlv7znludye4ru-1", + Fields: []client.SchemaFieldDescription{ + { + Name: "_docID", + Typ: client.NONE_CRDT, + Kind: client.FieldKind_DocID, + }, + { + Name: "licks", + Typ: client.LWW_REGISTER, + Kind: client.NewSelfKind("2", false), + }, + { + Name: "licks_id", + Typ: client.LWW_REGISTER, + Kind: client.FieldKind_DocID, + }, + { + Name: "walker", + Typ: client.LWW_REGISTER, + Kind: client.NewSelfKind("3", false), + }, + { + Name: "walker_id", + Typ: client.LWW_REGISTER, + Kind: client.FieldKind_DocID, + }, + }, + }, + { + Name: "Mouse", + Root: "bafkreidetmki4jtod5jfmromvcz2vd75j6t6g3vnw3aenlv7znludye4ru-2", + VersionID: "bafkreidetmki4jtod5jfmromvcz2vd75j6t6g3vnw3aenlv7znludye4ru-2", + Fields: []client.SchemaFieldDescription{ + { + Name: "_docID", + Typ: client.NONE_CRDT, + Kind: client.FieldKind_DocID, + }, + { + Name: "hates", + Typ: client.LWW_REGISTER, + Kind: client.NewSelfKind("0", false), + }, + { + Name: "hates_id", + Typ: client.LWW_REGISTER, + Kind: client.FieldKind_DocID, + }, + }, + }, + { + Name: "User", + Root: "bafkreidetmki4jtod5jfmromvcz2vd75j6t6g3vnw3aenlv7znludye4ru-3", + VersionID: "bafkreidetmki4jtod5jfmromvcz2vd75j6t6g3vnw3aenlv7znludye4ru-3", + Fields: []client.SchemaFieldDescription{ + { + Name: "_docID", + Typ: client.NONE_CRDT, + Kind: client.FieldKind_DocID, + }, + { + Name: "hosts", + Typ: client.LWW_REGISTER, + Kind: client.NewSelfKind("1", false), + }, + { + Name: "hosts_id", + Typ: client.LWW_REGISTER, + Kind: client.FieldKind_DocID, + }, + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/schema/updates/add/field/kind/foreign_object_array_test.go b/tests/integration/schema/updates/add/field/kind/foreign_object_array_test.go index abeff648fd..c0330e2ecf 100644 --- a/tests/integration/schema/updates/add/field/kind/foreign_object_array_test.go +++ b/tests/integration/schema/updates/add/field/kind/foreign_object_array_test.go @@ -35,7 +35,7 @@ func TestSchemaUpdatesAddFieldKindForeignObjectArray_UnknownSchema(t *testing.T) }} ] `, - ExpectedError: "no type found for given name. Field: foo, Kind: Unknown", + ExpectedError: "no type found for given name. Field: foo, Kind: [Unknown]", }, }, } diff --git a/tests/integration/schema/updates/remove/simple_test.go b/tests/integration/schema/updates/remove/simple_test.go index 6628ccde86..44d331c98b 100644 --- a/tests/integration/schema/updates/remove/simple_test.go +++ b/tests/integration/schema/updates/remove/simple_test.go @@ -34,7 +34,7 @@ func TestSchemaUpdatesRemoveCollectionNameErrors(t *testing.T) { { "op": "remove", "path": "/Users/Name" } ] `, - ExpectedError: "SchemaRoot does not match existing. Name: ", + ExpectedError: "schema name can't be empty", }, }, } @@ -120,7 +120,7 @@ func TestSchemaUpdatesRemoveSchemaNameErrors(t *testing.T) { { "op": "remove", "path": "/Users/Name" } ] `, - ExpectedError: "SchemaRoot does not match existing. Name: ", + ExpectedError: "schema name can't be empty", }, }, } diff --git a/tests/integration/schema/updates/replace/simple_test.go b/tests/integration/schema/updates/replace/simple_test.go index 7e403f7d03..32482a5c44 100644 --- a/tests/integration/schema/updates/replace/simple_test.go +++ b/tests/integration/schema/updates/replace/simple_test.go @@ -50,61 +50,3 @@ func TestSchemaUpdatesReplaceCollectionErrors(t *testing.T) { } testUtils.ExecuteTestCase(t, test) } - -/* WIP -func TestSchemaUpdatesReplaceCollectionNameWithExistingDoesNotChangeVersionID(t *testing.T) { - schemaVersionID := "bafkreicg3xcpjlt3ecguykpcjrdx5ogi4n7cq2fultyr6vippqdxnrny3u" - - test := testUtils.TestCase{ - Description: "Test schema update, replacing collection name with self does not change version ID", - Actions: []any{ - testUtils.SchemaUpdate{ - Schema: ` - type Users { - name: String - } - `, - }, - testUtils.CreateDoc{ - CollectionID: 0, - Doc: `{ - "name": "John" - }`, - }, - testUtils.SchemaPatch{ - // This patch essentially does nothing, replacing the current value with the current value - Patch: ` - [ - { "op": "replace", "path": "/Users/Name", "value": "Users" } - ] - `, - }, - testUtils.UpdateDoc{ - CollectionID: 0, - DocID: 0, - Doc: `{ - "name": "Johnnn" - }`, - }, - testUtils.Request{ - Request: `query { - commits (field: "C") { - schemaVersionId - } - }`, - Results: []map[string]any{ - { - // Update commit - "schemaVersionId": schemaVersionID, - }, - { - // Create commit - "schemaVersionId": schemaVersionID, - }, - }, - }, - }, - } - testUtils.ExecuteTestCase(t, test) -} -*/ diff --git a/tests/integration/state.go b/tests/integration/state.go index 0ea720b09f..e594285318 100644 --- a/tests/integration/state.go +++ b/tests/integration/state.go @@ -159,9 +159,14 @@ type state struct { collections [][]client.Collection // The names of the collections active in this test. - // Indexes matches that of collections. + // Indexes matches that of inital collections. collectionNames []string + // A map of the collection indexes by their Root, this allows easier + // identification of collections in a natural, human readable, order + // even when they are renamed. + collectionIndexesByRoot map[uint32]int + // Document IDs by index, by collection index. // // Each index is assumed to be global, and may be expected across multiple @@ -207,6 +212,7 @@ func newState( dbPaths: []string{}, collections: [][]client.Collection{}, collectionNames: collectionNames, + collectionIndexesByRoot: map[uint32]int{}, docIDs: [][]client.DocID{}, indexes: [][][]client.IndexDescription{}, isBench: false, diff --git a/tests/integration/utils.go b/tests/integration/utils.go index 2576f30762..4e5cf09a06 100644 --- a/tests/integration/utils.go +++ b/tests/integration/utils.go @@ -747,11 +747,23 @@ func refreshCollections( for i, collectionName := range s.collectionNames { for _, collection := range allCollections { if collection.Name().Value() == collectionName { - s.collections[nodeID][i] = collection + if _, ok := s.collectionIndexesByRoot[collection.Description().RootID]; !ok { + // If the root is not found here this is likely the first refreshCollections + // call of the test, we map it by root in case the collection is renamed - + // we still wish to preserve the original index so test maintainers can refrence + // them in a convienient manner. + s.collectionIndexesByRoot[collection.Description().RootID] = i + } break } } } + + for _, collection := range allCollections { + if index, ok := s.collectionIndexesByRoot[collection.Description().RootID]; ok { + s.collections[nodeID][index] = collection + } + } } } diff --git a/tests/predefined/gen_predefined.go b/tests/predefined/gen_predefined.go index 647d878f82..99c1862d48 100644 --- a/tests/predefined/gen_predefined.go +++ b/tests/predefined/gen_predefined.go @@ -11,31 +11,13 @@ package predefined import ( - "context" "strings" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/client/request" - "github.com/sourcenetwork/defradb/internal/request/graphql" "github.com/sourcenetwork/defradb/tests/gen" ) -func parseSDL(gqlSDL string) (map[string]client.CollectionDefinition, error) { - parser, err := graphql.NewParser() - if err != nil { - return nil, err - } - cols, err := parser.ParseSDL(context.Background(), gqlSDL) - if err != nil { - return nil, err - } - result := make(map[string]client.CollectionDefinition) - for _, col := range cols { - result[col.Description.Name.Value()] = col - } - return result, nil -} - // CreateFromSDL generates documents for GraphQL SDL from a predefined list // of docs that might include nested docs. // The SDL is parsed to get the list of fields, and the docs @@ -44,11 +26,21 @@ func parseSDL(gqlSDL string) (map[string]client.CollectionDefinition, error) { // fields, and create SDLs with different fields from it. func CreateFromSDL(gqlSDL string, docsList DocsList) ([]gen.GeneratedDoc, error) { resultDocs := make([]gen.GeneratedDoc, 0, len(docsList.Docs)) - typeDefs, err := parseSDL(gqlSDL) + typeDefsByName, err := gen.ParseSDL(gqlSDL) if err != nil { return nil, err } - generator := docGenerator{types: typeDefs} + + defs := make([]client.CollectionDefinition, 0, len(typeDefsByName)) + for _, def := range typeDefsByName { + defs = append(defs, def) + } + + generator := docGenerator{ + types: typeDefsByName, + definitionCache: client.NewDefinitionCache(defs), + } + for _, doc := range docsList.Docs { docs, err := generator.generateRelatedDocs(doc, docsList.ColName) if err != nil { @@ -87,7 +79,12 @@ func Create(defs []client.CollectionDefinition, docsList DocsList) ([]gen.Genera for _, col := range defs { typeDefs[col.Description.Name.Value()] = col } - generator := docGenerator{types: typeDefs} + + generator := docGenerator{ + types: typeDefs, + definitionCache: client.NewDefinitionCache(defs), + } + for _, doc := range docsList.Docs { docs, err := generator.generateRelatedDocs(doc, docsList.ColName) if err != nil { @@ -99,7 +96,8 @@ func Create(defs []client.CollectionDefinition, docsList DocsList) ([]gen.Genera } type docGenerator struct { - types map[string]client.CollectionDefinition + types map[string]client.CollectionDefinition + definitionCache client.DefinitionCache } // toRequestedDoc removes the fields that are not in the schema of the collection. @@ -132,31 +130,31 @@ func (this *docGenerator) generatePrimary( result := []gen.GeneratedDoc{} requestedSecondary := toRequestedDoc(secDocMap, secType) for _, secDocField := range secType.GetFields() { - if secDocField.IsRelation() { + if secDocField.IsRelation() && secDocField.IsPrimaryRelation { if secDocMapField, hasField := secDocMap[secDocField.Name]; hasField { - if secDocField.IsPrimaryRelation { - primType := this.types[secDocField.Kind.Underlying()] - primDocMap, subResult, err := this.generatePrimary( - secDocMap[secDocField.Name].(map[string]any), &primType) - if err != nil { - return nil, nil, NewErrFailedToGenerateDoc(err) - } - primDoc, err := client.NewDocFromMap(primDocMap, primType) - if err != nil { - return nil, nil, NewErrFailedToGenerateDoc(err) - } - docID := primDoc.ID().String() - requestedSecondary[secDocField.Name+request.RelatedObjectID] = docID - subResult = append(subResult, gen.GeneratedDoc{Col: &primType, Doc: primDoc}) - result = append(result, subResult...) + primaryDef, _ := client.GetDefinition(this.definitionCache, *secType, secDocField.Kind) + primType := this.types[primaryDef.GetName()] - secondaryDocs, err := this.generateSecondaryDocs( - secDocMapField.(map[string]any), docID, &primType, secType.Description.Name.Value()) - if err != nil { - return nil, nil, err - } - result = append(result, secondaryDocs...) + primDocMap, subResult, err := this.generatePrimary( + secDocMap[secDocField.Name].(map[string]any), &primType) + if err != nil { + return nil, nil, NewErrFailedToGenerateDoc(err) + } + primDoc, err := client.NewDocFromMap(primDocMap, primType) + if err != nil { + return nil, nil, NewErrFailedToGenerateDoc(err) } + docID := primDoc.ID().String() + requestedSecondary[secDocField.Name+request.RelatedObjectID] = docID + subResult = append(subResult, gen.GeneratedDoc{Col: &primType, Doc: primDoc}) + result = append(result, subResult...) + + secondaryDocs, err := this.generateSecondaryDocs( + secDocMapField.(map[string]any), docID, &primType, secType.Description.Name.Value()) + if err != nil { + return nil, nil, err + } + result = append(result, secondaryDocs...) } } } @@ -186,6 +184,7 @@ func (this *docGenerator) generateRelatedDocs(docMap map[string]any, typeName st return nil, err } result = append(result, secondaryDocs...) + return result, nil } @@ -197,15 +196,16 @@ func (this *docGenerator) generateSecondaryDocs( ) ([]gen.GeneratedDoc, error) { result := []gen.GeneratedDoc{} for _, field := range primaryType.GetFields() { - if field.IsRelation() { + if field.IsRelation() && !field.IsPrimaryRelation { if _, hasProp := primaryDocMap[field.Name]; hasProp { - if !field.IsPrimaryRelation && - (parentTypeName == "" || parentTypeName != field.Kind.Underlying()) { + otherDef, _ := client.GetDefinition(this.definitionCache, *primaryType, field.Kind) + if parentTypeName == "" || parentTypeName != otherDef.GetName() { docs, err := this.generateSecondaryDocsForField( - primaryDocMap, primaryType.Description.Name.Value(), &field, docID) + primaryDocMap, *primaryType, &field, docID) if err != nil { return nil, err } + result = append(result, docs...) } } @@ -217,15 +217,19 @@ func (this *docGenerator) generateSecondaryDocs( // generateSecondaryDocsForField generates secondary docs for the given field of a primary doc. func (this *docGenerator) generateSecondaryDocsForField( primaryDoc map[string]any, - primaryTypeName string, + primaryType client.CollectionDefinition, relField *client.FieldDefinition, primaryDocID string, ) ([]gen.GeneratedDoc, error) { result := []gen.GeneratedDoc{} - relTypeDef := this.types[relField.Kind.Underlying()] + + relTypeDef, _ := client.GetDefinition(this.definitionCache, primaryType, relField.Kind) + primaryPropName := "" for _, relDocField := range relTypeDef.GetFields() { - if relDocField.Kind.Underlying() == primaryTypeName && relDocField.IsPrimaryRelation { + relDocDef, _ := client.GetDefinition(this.definitionCache, relTypeDef, relDocField.Kind) + + if relDocDef.GetName() == primaryType.GetName() && relDocField.IsPrimaryRelation { primaryPropName = relDocField.Name + request.RelatedObjectID switch relVal := primaryDoc[relField.Name].(type) { case []map[string]any: diff --git a/tests/predefined/gen_predefined_test.go b/tests/predefined/gen_predefined_test.go index 30cd446697..a32c261ce7 100644 --- a/tests/predefined/gen_predefined_test.go +++ b/tests/predefined/gen_predefined_test.go @@ -17,6 +17,7 @@ import ( "github.com/stretchr/testify/require" "github.com/sourcenetwork/defradb/client/request" + "github.com/sourcenetwork/defradb/tests/gen" ) func TestGeneratePredefinedFromSchema_Simple(t *testing.T) { @@ -36,7 +37,7 @@ func TestGeneratePredefinedFromSchema_Simple(t *testing.T) { docs, err := CreateFromSDL(schema, docsList) assert.NoError(t, err) - colDefMap, err := parseSDL(schema) + colDefMap, err := gen.ParseSDL(schema) require.NoError(t, err) errorMsg := assertDocs(mustAddDocIDsToDocs(docsList.Docs, colDefMap["User"]), docs) @@ -60,7 +61,7 @@ func TestGeneratePredefinedFromSchema_StripExcessiveFields(t *testing.T) { }) assert.NoError(t, err) - colDefMap, err := parseSDL(schema) + colDefMap, err := gen.ParseSDL(schema) require.NoError(t, err) errorMsg := assertDocs(mustAddDocIDsToDocs([]map[string]any{ @@ -102,7 +103,7 @@ func TestGeneratePredefinedFromSchema_OneToOne(t *testing.T) { }) assert.NoError(t, err) - colDefMap, err := parseSDL(schema) + colDefMap, err := gen.ParseSDL(schema) require.NoError(t, err) userDocs := mustAddDocIDsToDocs([]map[string]any{ @@ -157,7 +158,7 @@ func TestGeneratePredefinedFromSchema_OneToOnePrimary(t *testing.T) { }) assert.NoError(t, err) - colDefMap, err := parseSDL(schema) + colDefMap, err := gen.ParseSDL(schema) require.NoError(t, err) userDocs := mustAddDocIDsToDocs([]map[string]any{ @@ -213,7 +214,7 @@ func TestGeneratePredefinedFromSchema_OneToOneToOnePrimary(t *testing.T) { }) assert.NoError(t, err) - colDefMap, err := parseSDL(schema) + colDefMap, err := gen.ParseSDL(schema) require.NoError(t, err) specsDoc := mustAddDocIDToDoc(map[string]any{"OS": "iOS"}, colDefMap["Specs"]) @@ -264,7 +265,7 @@ func TestGeneratePredefinedFromSchema_TwoPrimaryToOneMiddle(t *testing.T) { }) assert.NoError(t, err) - colDefMap, err := parseSDL(schema) + colDefMap, err := gen.ParseSDL(schema) require.NoError(t, err) specsDoc := mustAddDocIDToDoc(map[string]any{"OS": "iOS"}, colDefMap["Specs"]) @@ -313,7 +314,7 @@ func TestGeneratePredefinedFromSchema_OneToTwoPrimary(t *testing.T) { }) assert.NoError(t, err) - colDefMap, err := parseSDL(schema) + colDefMap, err := gen.ParseSDL(schema) require.NoError(t, err) deviceDoc := mustAddDocIDToDoc(map[string]any{"model": "iPhone"}, colDefMap["Device"]) @@ -364,7 +365,7 @@ func TestGeneratePredefinedFromSchema_TwoPrimaryToOneRoot(t *testing.T) { }) assert.NoError(t, err) - colDefMap, err := parseSDL(schema) + colDefMap, err := gen.ParseSDL(schema) require.NoError(t, err) deviceDoc := mustAddDocIDToDoc(map[string]any{"model": "iPhone"}, colDefMap["Device"]) From 44ee5cd9d1f6f9447ca1296d8046612cc3e2154d Mon Sep 17 00:00:00 2001 From: ONLYUSEmePHONE Date: Mon, 9 Sep 2024 13:23:54 -0400 Subject: [PATCH 09/71] docs: Rename _key to _docID in docs (#2989) ## Relevant issue(s) Resolves #2977 ## Description This PR renames occurrences of _key to _docID when using schemas in the project's markdown documentation. ## Tasks - [ ] I made sure the code is well commented, particularly hard-to-understand areas. - [x] I made sure the repository-held documentation is changed accordingly. - [x] I made sure the pull request title adheres to the conventional commit style (the subset used in the project can be found in [tools/configs/chglog/config.yml](tools/configs/chglog/config.yml)). - [ ] I made sure to discuss its limitations such as threats to validity, vulnerability to mistake and misuse, robustness to invalidation of assumptions, resource requirements, ... ## How has this been tested? Github markdown preview Specify the platform(s) on which this was tested: - Debian Linux --- docs/website/getting-started.md | 12 ++++++------ docs/website/guides/explain-systems.md | 4 ++-- docs/website/guides/schema-relationship.md | 12 ++++++------ .../references/query-specification/database-api.md | 2 +- .../query-specification/mutation-block.md | 14 +++++++------- 5 files changed, 22 insertions(+), 22 deletions(-) diff --git a/docs/website/getting-started.md b/docs/website/getting-started.md index ad1230fea6..5964e97e8c 100644 --- a/docs/website/getting-started.md +++ b/docs/website/getting-started.md @@ -71,7 +71,7 @@ Submit a `mutation` request to create a document of the `User` type: defradb client query ' mutation { create_User(input: {age: 31, verified: true, points: 90, name: "Bob"}) { - _key + _docID } } ' @@ -83,13 +83,13 @@ Expected response: { "data": [ { - "_key": "bae-91171025-ed21-50e3-b0dc-e31bccdfa1ab", + "_docID": "bae-91171025-ed21-50e3-b0dc-e31bccdfa1ab", } ] } ``` -`_key` is the document's key, a unique identifier of the document, determined by its schema and initial data. +`_docID` is the document's key, a unique identifier of the document, determined by its schema and initial data. ## Query documents @@ -99,7 +99,7 @@ Once you have populated your node with data, you can query it: defradb client query ' query { User { - _key + _docID age name points @@ -108,7 +108,7 @@ defradb client query ' ' ``` -This query obtains *all* users and returns their fields `_key, age, name, points`. GraphQL queries only return the exact fields requested. +This query obtains *all* users and returns their fields `_docID, age, name, points`. GraphQL queries only return the exact fields requested. You can further filter results with the `filter` argument. @@ -116,7 +116,7 @@ You can further filter results with the `filter` argument. defradb client query ' query { User(filter: {points: {_ge: 50}}) { - _key + _docID age name points diff --git a/docs/website/guides/explain-systems.md b/docs/website/guides/explain-systems.md index c185afd003..b52eeeec28 100644 --- a/docs/website/guides/explain-systems.md +++ b/docs/website/guides/explain-systems.md @@ -13,7 +13,7 @@ The DefraDB Explain System is a powerful tool designed to introspect requests, e ```graphql query { Author { - _key + _docID name age } @@ -25,7 +25,7 @@ query { ```graphql query @explain { Author { - _key + _docID name age } diff --git a/docs/website/guides/schema-relationship.md b/docs/website/guides/schema-relationship.md index 073932970f..7470ad7e0a 100644 --- a/docs/website/guides/schema-relationship.md +++ b/docs/website/guides/schema-relationship.md @@ -74,7 +74,7 @@ type Address { ```graphql mutation { create_Address(input: {streetNumber: "123", streetName: "Test road", country: "Canada"}) { - _key + _docID } } ``` @@ -82,7 +82,7 @@ mutation { ```graphql mutation { create_User(input: {name: "Alice", username: "awesomealice", age: 35, address_id: "bae-be6d8024-4953-5a92-84b4-f042d25230c6"}) { - _key + _docID } } ``` @@ -178,7 +178,7 @@ defradb client schema add -f schema.graphql ```graphql mutation { create_Author(input: {name: "Saadi", dateOfBirth: "1210-07-23T03:46:56.647Z"}) { - _key + _docID } } ``` @@ -187,7 +187,7 @@ mutation { ```graphql mutation { create_Book(input: {name: "Gulistan", genre: "Poetry", author_id: "bae-0e7c3bb5-4917-5d98-9fcf-b9db369ea6e4"}) { - _key + _docID } } ``` @@ -196,7 +196,7 @@ mutation { ```graphql mutation { update_Author(id: "bae-0e7c3bb5-4917-5d98-9fcf-b9db369ea6e4", input: {name: "Saadi Shirazi"}) { - _key + _docID } } ``` @@ -205,7 +205,7 @@ mutation { ```graphql mutation { update_Book(filter: {name: {_eq: "Gulistan"}}, input: {description: "Persian poetry of ideas"}) { - _key + _docID } } ``` diff --git a/docs/website/references/query-specification/database-api.md b/docs/website/references/query-specification/database-api.md index 8a40583f89..8797b6894d 100644 --- a/docs/website/references/query-specification/database-api.md +++ b/docs/website/references/query-specification/database-api.md @@ -86,7 +86,7 @@ In addition to using `Commits` specific queries, include commit version sub-fiel ```graphql query { User { - _key + _docID name age diff --git a/docs/website/references/query-specification/mutation-block.md b/docs/website/references/query-specification/mutation-block.md index 255c3b40b1..f358813dd8 100644 --- a/docs/website/references/query-specification/mutation-block.md +++ b/docs/website/references/query-specification/mutation-block.md @@ -89,28 +89,28 @@ A basic example is provided below: ```graphql mutation { update_Book(dockey: '123', input: {name: "John"}) { - _key + _docID name } } ``` -Here, we can see that after applying the mutation, we return the `_key` and `name` fields. We can return any field from the document (not just the updated ones). We can even return and filter on related types. +Here, we can see that after applying the mutation, we return the `_docID` and `name` fields. We can return any field from the document (not just the updated ones). We can even return and filter on related types. Beyond updating by an ID or IDs, we can use a query filter to select which fields to apply our update to. This filter works the same as the queries. ```graphql mutation { update_Book(filter: {rating: {_le: 1.0}}, input: {rating: 1.5}) { - _key + _docID rating name } } ``` -Here, we select all documents with a rating less than or equal to 1.0, update the rating value to 1.5, and return all the affected documents `_key`, `rating`, and `name` fields. +Here, we select all documents with a rating less than or equal to 1.0, update the rating value to 1.5, and return all the affected documents `_docID`, `rating`, and `name` fields. For additional filter details, see the above `Query Block` section. @@ -132,13 +132,13 @@ Here, we can delete a document with ID '123': ```graphql mutation { delete_User(dockey: '123') { - _key + _docID name } } ``` -This will delete the specific document, and return the `_key` and `name` for the deleted document. +This will delete the specific document, and return the `_docID` and `name` for the deleted document. DefraDB currently uses a Hard Delete system, which means that when a document is deleted, it is completely removed from the database. @@ -147,7 +147,7 @@ Similar to the Update system, you can use a filter to select which documents to ```graphql mutation { delete_User(filter: {rating: {_gt: 3}}) { - _key + _docID name } } From efb1f3a305b427f30d34350798b9df956ac24ce2 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Mon, 9 Sep 2024 15:14:28 -0400 Subject: [PATCH 10/71] bot: Update dependencies (bulk dependabot PRs) 09-09-2024 (#2990) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ✅ This PR was created by combining the following PRs: #2988 bot: Bump vite from 5.4.2 to 5.4.3 in /playground #2987 bot: Bump eslint from 9.9.1 to 9.10.0 in /playground #2986 bot: Bump golang.org/x/term from 0.23.0 to 0.24.0 #2985 bot: Bump github.com/ipfs/boxo from 0.22.0 to 0.23.0 #2984 bot: Bump github.com/bits-and-blooms/bitset from 1.14.2 to 1.14.3 #2976 bot: Bump @typescript-eslint/parser from 8.3.0 to 8.4.0 in /playground --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- go.mod | 10 +- go.sum | 22 ++-- playground/package-lock.json | 231 +++++++++++++++++------------------ playground/package.json | 6 +- 4 files changed, 133 insertions(+), 136 deletions(-) diff --git a/go.mod b/go.mod index 55d5e4f047..19a200a01c 100644 --- a/go.mod +++ b/go.mod @@ -3,7 +3,7 @@ module github.com/sourcenetwork/defradb go 1.22 require ( - github.com/bits-and-blooms/bitset v1.14.2 + github.com/bits-and-blooms/bitset v1.14.3 github.com/bxcodec/faker v2.0.1+incompatible github.com/cosmos/cosmos-sdk v0.50.9 github.com/cosmos/gogoproto v1.7.0 @@ -17,7 +17,7 @@ require ( github.com/go-errors/errors v1.5.1 github.com/gofrs/uuid/v5 v5.3.0 github.com/iancoleman/strcase v0.3.0 - github.com/ipfs/boxo v0.22.0 + github.com/ipfs/boxo v0.23.0 github.com/ipfs/go-block-format v0.2.0 github.com/ipfs/go-cid v0.4.1 github.com/ipfs/go-datastore v0.6.0 @@ -29,7 +29,7 @@ require ( github.com/jbenet/goprocess v0.1.4 github.com/lens-vm/lens/host-go v0.0.0-20231127204031-8d858ed2926c github.com/lestrrat-go/jwx/v2 v2.1.1 - github.com/libp2p/go-libp2p v0.36.2 + github.com/libp2p/go-libp2p v0.36.3 github.com/libp2p/go-libp2p-gostream v0.6.0 github.com/libp2p/go-libp2p-kad-dht v0.26.1 github.com/libp2p/go-libp2p-pubsub v0.12.0 @@ -60,7 +60,7 @@ require ( go.opentelemetry.io/otel/sdk/metric v1.29.0 go.uber.org/zap v1.27.0 golang.org/x/exp v0.0.0-20240808152545-0cdaa3abc0fa - golang.org/x/term v0.23.0 + golang.org/x/term v0.24.0 google.golang.org/grpc v1.66.0 google.golang.org/protobuf v1.34.2 ) @@ -361,7 +361,7 @@ require ( golang.org/x/net v0.28.0 // indirect golang.org/x/oauth2 v0.21.0 // indirect golang.org/x/sync v0.8.0 // indirect - golang.org/x/sys v0.24.0 // indirect + golang.org/x/sys v0.25.0 // indirect golang.org/x/text v0.17.0 // indirect golang.org/x/time v0.5.0 // indirect golang.org/x/tools v0.24.0 // indirect diff --git a/go.sum b/go.sum index d826c43cdf..b3193ca332 100644 --- a/go.sum +++ b/go.sum @@ -294,8 +294,8 @@ github.com/bgentry/go-netrc v0.0.0-20140422174119-9fd32a8b3d3d/go.mod h1:6QX/PXZ github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs= github.com/bgentry/speakeasy v0.1.1-0.20220910012023-760eaf8b6816 h1:41iFGWnSlI2gVpmOtVTJZNodLdLQLn/KsJqFvXwnd/s= github.com/bgentry/speakeasy v0.1.1-0.20220910012023-760eaf8b6816/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs= -github.com/bits-and-blooms/bitset v1.14.2 h1:YXVoyPndbdvcEVcseEovVfp0qjJp7S+i5+xgp/Nfbdc= -github.com/bits-and-blooms/bitset v1.14.2/go.mod h1:7hO7Gc7Pp1vODcmWvKMRA9BNmbv6a/7QIWpPxHddWR8= +github.com/bits-and-blooms/bitset v1.14.3 h1:Gd2c8lSNf9pKXom5JtD7AaKO8o7fGQ2LtFj1436qilA= +github.com/bits-and-blooms/bitset v1.14.3/go.mod h1:7hO7Gc7Pp1vODcmWvKMRA9BNmbv6a/7QIWpPxHddWR8= github.com/blang/semver/v4 v4.0.0 h1:1PFHFE6yCCTv8C1TeyNNarDzntLi7wMI5i/pzqYIsAM= github.com/blang/semver/v4 v4.0.0/go.mod h1:IbckMUScFkM3pff0VJDNKRiT6TG/YpiHIM2yvyW5YoQ= github.com/bradfitz/go-smtpd v0.0.0-20170404230938-deb6d6237625/go.mod h1:HYsPBTaaSFSlLx/70C2HPIMNZpVV8+vt/A+FMnYP11g= @@ -852,8 +852,8 @@ github.com/invopop/yaml v0.3.1 h1:f0+ZpmhfBSS4MhG+4HYseMdJhoeeopbSKbq5Rpeelso= github.com/invopop/yaml v0.3.1/go.mod h1:PMOp3nn4/12yEZUFfmOuNHJsZToEEOwoWsT+D81KkeA= github.com/ipfs/bbloom v0.0.4 h1:Gi+8EGJ2y5qiD5FbsbpX/TMNcJw8gSqr7eyjHa4Fhvs= github.com/ipfs/bbloom v0.0.4/go.mod h1:cS9YprKXpoZ9lT0n/Mw/a6/aFV6DTjTLYHeA+gyqMG0= -github.com/ipfs/boxo v0.22.0 h1:QTC+P5uhsBNq6HzX728nsLyFW6rYDeR/5hggf9YZX78= -github.com/ipfs/boxo v0.22.0/go.mod h1:yp1loimX0BDYOR0cyjtcXHv15muEh5V1FqO2QLlzykw= +github.com/ipfs/boxo v0.23.0 h1:dY1PpcvPJ//VuUQ1TUd5TZvmaGuzxJ8dOP6mXaw+ke8= +github.com/ipfs/boxo v0.23.0/go.mod h1:ulu5I6avTmgGmvjuCaBRKwsaOOKjBfQw1EiOOQp8M6E= github.com/ipfs/go-bitfield v1.1.0 h1:fh7FIo8bSwaJEh6DdTWbCeZ1eqOaOkKFI74SCnsWbGA= github.com/ipfs/go-bitfield v1.1.0/go.mod h1:paqf1wjq/D2BBmzfTVFlJQ9IlFOZpg422HL0HqsGWHU= github.com/ipfs/go-block-format v0.2.0 h1:ZqrkxBA2ICbDRbK8KJs/u0O3dlp6gmAuuXUJNiW1Ycs= @@ -866,8 +866,6 @@ github.com/ipfs/go-datastore v0.6.0 h1:JKyz+Gvz1QEZw0LsX1IBn+JFCJQH4SJVFtM4uWU0M github.com/ipfs/go-datastore v0.6.0/go.mod h1:rt5M3nNbSO/8q1t4LNkLyUwRs8HupMeN/8O4Vn9YAT8= github.com/ipfs/go-detect-race v0.0.1 h1:qX/xay2W3E4Q1U7d9lNs1sU9nvguX0a7319XbyQ6cOk= github.com/ipfs/go-detect-race v0.0.1/go.mod h1:8BNT7shDZPo99Q74BpGMK+4D8Mn4j46UU0LZ723meps= -github.com/ipfs/go-ipfs-blocksutil v0.0.1 h1:Eh/H4pc1hsvhzsQoMEP3Bke/aW5P5rVM1IWFJMcGIPQ= -github.com/ipfs/go-ipfs-blocksutil v0.0.1/go.mod h1:Yq4M86uIOmxmGPUHv/uI7uKqZNtLb449gwKqXjIsnRk= github.com/ipfs/go-ipfs-delay v0.0.1 h1:r/UXYyRcddO6thwOnhiznIAiSvxMECGgtv35Xs1IeRQ= github.com/ipfs/go-ipfs-delay v0.0.1/go.mod h1:8SP1YXK1M1kXuc4KJZINY3TQQ03J2rwBG9QfXmbRPrw= github.com/ipfs/go-ipfs-pq v0.0.3 h1:YpoHVJB+jzK15mr/xsWC574tyDLkezVrDNeaalQBsTE= @@ -991,8 +989,8 @@ github.com/libp2p/go-cidranger v1.1.0 h1:ewPN8EZ0dd1LSnrtuwd4709PXVcITVeuwbag38y github.com/libp2p/go-cidranger v1.1.0/go.mod h1:KWZTfSr+r9qEo9OkI9/SIEeAtw+NNoU0dXIXt15Okic= github.com/libp2p/go-flow-metrics v0.1.0 h1:0iPhMI8PskQwzh57jB9WxIuIOQ0r+15PChFGkx3Q3WM= github.com/libp2p/go-flow-metrics v0.1.0/go.mod h1:4Xi8MX8wj5aWNDAZttg6UPmc0ZrnFNsMtpsYUClFtro= -github.com/libp2p/go-libp2p v0.36.2 h1:BbqRkDaGC3/5xfaJakLV/BrpjlAuYqSB0lRvtzL3B/U= -github.com/libp2p/go-libp2p v0.36.2/go.mod h1:XO3joasRE4Eup8yCTTP/+kX+g92mOgRaadk46LmPhHY= +github.com/libp2p/go-libp2p v0.36.3 h1:NHz30+G7D8Y8YmznrVZZla0ofVANrvBl2c+oARfMeDQ= +github.com/libp2p/go-libp2p v0.36.3/go.mod h1:4Y5vFyCUiJuluEPmpnKYf6WFx5ViKPUYs/ixe9ANFZ8= github.com/libp2p/go-libp2p-asn-util v0.4.1 h1:xqL7++IKD9TBFMgnLPZR6/6iYhawHKHl950SO9L6n94= github.com/libp2p/go-libp2p-asn-util v0.4.1/go.mod h1:d/NI6XZ9qxw67b4e+NgpQexCIiFYJjErASrYW4PFDN8= github.com/libp2p/go-libp2p-gostream v0.6.0 h1:QfAiWeQRce6pqnYfmIVWJFXNdDyfiR/qkCnjyaZUPYU= @@ -1870,8 +1868,8 @@ golang.org/x/sys v0.9.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.11.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.16.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/sys v0.24.0 h1:Twjiwq9dn6R1fQcyiK+wQyHWfaz/BJB+YIpzU/Cv3Xg= -golang.org/x/sys v0.24.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.25.0 h1:r+8e+loiHxRqhXVl6ML1nO3l1+oFoWbnlu2Ehimmi34= +golang.org/x/sys v0.25.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.1.0/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= @@ -1880,8 +1878,8 @@ golang.org/x/term v0.7.0/go.mod h1:P32HKFT3hSsZrRxla30E9HqToFYAQPCMs/zFMBUFqPY= golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo= golang.org/x/term v0.11.0/go.mod h1:zC9APTIj3jG3FdV/Ons+XE1riIZXG4aZ4GTHiPZJPIU= golang.org/x/term v0.16.0/go.mod h1:yn7UURbUtPyrVJPGPq404EukNFxcm/foM+bV/bfcDsY= -golang.org/x/term v0.23.0 h1:F6D4vR+EHoL9/sWAWgAR1H2DcHr4PareCbAaCo1RpuU= -golang.org/x/term v0.23.0/go.mod h1:DgV24QBUrK6jhZXl+20l6UWznPlwAHm1Q1mGHtydmSk= +golang.org/x/term v0.24.0 h1:Mh5cbb+Zk2hqqXNO7S1iTjEphVL+jb8ZWaqh/g+JWkM= +golang.org/x/term v0.24.0/go.mod h1:lOBK/LVxemqiMij05LGJ0tzNr8xlmwBRJ81PX6wVLH8= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= diff --git a/playground/package-lock.json b/playground/package-lock.json index 884f3c8bb5..081abe23df 100644 --- a/playground/package-lock.json +++ b/playground/package-lock.json @@ -19,13 +19,13 @@ "@types/react-dom": "^18.3.0", "@types/swagger-ui-react": "^4.18.3", "@typescript-eslint/eslint-plugin": "^8.3.0", - "@typescript-eslint/parser": "^8.3.0", + "@typescript-eslint/parser": "^8.4.0", "@vitejs/plugin-react-swc": "^3.7.0", - "eslint": "^9.9.1", + "eslint": "^9.10.0", "eslint-plugin-react-hooks": "^4.6.2", "eslint-plugin-react-refresh": "^0.4.9", "typescript": "^5.5.4", - "vite": "^5.4.2" + "vite": "^5.4.3" } }, "node_modules/@babel/runtime": { @@ -59,6 +59,37 @@ "integrity": "sha512-NVf/1YycDMs6+FxS0Tb/W8MjJRDQdXF+tBfDtZ5UZeiRUkTmwKc4vmYCKZTyymfJk1gnMsauvZSX/HiV9jOABw==", "license": "MIT" }, + "node_modules/@codemirror/language": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/@codemirror/language/-/language-6.0.0.tgz", + "integrity": "sha512-rtjk5ifyMzOna1c7PBu7J1VCt0PvA5wy3o8eMVnxMKb7z8KA7JFecvD04dSn14vj/bBaAbqRsGed5OjtofEnLA==", + "peer": true, + "dependencies": { + "@codemirror/state": "^6.0.0", + "@codemirror/view": "^6.0.0", + "@lezer/common": "^1.0.0", + "@lezer/highlight": "^1.0.0", + "@lezer/lr": "^1.0.0", + "style-mod": "^4.0.0" + } + }, + "node_modules/@codemirror/state": { + "version": "6.4.1", + "resolved": "https://registry.npmjs.org/@codemirror/state/-/state-6.4.1.tgz", + "integrity": "sha512-QkEyUiLhsJoZkbumGZlswmAhA7CBU02Wrz7zvH4SrcifbsqwlXShVXg65f3v/ts57W3dqyamEriMhij1Z3Zz4A==", + "peer": true + }, + "node_modules/@codemirror/view": { + "version": "6.33.0", + "resolved": "https://registry.npmjs.org/@codemirror/view/-/view-6.33.0.tgz", + "integrity": "sha512-AroaR3BvnjRW8fiZBalAaK+ZzB5usGgI014YKElYZvQdNH5ZIidHlO+cyf/2rWzyBFRkvG6VhiXeAEbC53P2YQ==", + "peer": true, + "dependencies": { + "@codemirror/state": "^6.4.0", + "style-mod": "^4.1.0", + "w3c-keyname": "^2.2.4" + } + }, "node_modules/@emotion/is-prop-valid": { "version": "0.8.8", "resolved": "https://registry.npmjs.org/@emotion/is-prop-valid/-/is-prop-valid-0.8.8.tgz", @@ -576,9 +607,9 @@ } }, "node_modules/@eslint/js": { - "version": "9.9.1", - "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.9.1.tgz", - "integrity": "sha512-xIDQRsfg5hNBqHz04H1R3scSVwmI+KUbqjsQKHKQ1DAUSaUjYPReZZmS/5PNiKu1fUvzDd6H7DEDKACSEhu+TQ==", + "version": "9.10.0", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.10.0.tgz", + "integrity": "sha512-fuXtbiP5GWIn8Fz+LWoOMVf/Jxm+aajZYkhi6CuEm4SxymFM+eUWzbO9qXT+L0iCkL5+KGYMCSGxo686H19S1g==", "dev": true, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -593,6 +624,18 @@ "node": "^18.18.0 || ^20.9.0 || >=21.1.0" } }, + "node_modules/@eslint/plugin-kit": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/@eslint/plugin-kit/-/plugin-kit-0.1.0.tgz", + "integrity": "sha512-autAXT203ixhqei9xt+qkYOvY8l6LAFIdT2UXc/RPNeUVfqRF1BV94GTJyVPFKT8nFM6MyVJhjLj9E8JWvf5zQ==", + "dev": true, + "dependencies": { + "levn": "^0.4.1" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + } + }, "node_modules/@floating-ui/core": { "version": "1.6.7", "resolved": "https://registry.npmjs.org/@floating-ui/core/-/core-1.6.7.tgz", @@ -717,6 +760,30 @@ "url": "https://github.com/sponsors/nzakas" } }, + "node_modules/@lezer/common": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/@lezer/common/-/common-1.2.1.tgz", + "integrity": "sha512-yemX0ZD2xS/73llMZIK6KplkjIjf2EvAHcinDi/TfJ9hS25G0388+ClHt6/3but0oOxinTcQHJLDXh6w1crzFQ==", + "peer": true + }, + "node_modules/@lezer/highlight": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/@lezer/highlight/-/highlight-1.2.1.tgz", + "integrity": "sha512-Z5duk4RN/3zuVO7Jq0pGLJ3qynpxUVsh7IbUbGj88+uV2ApSAn6kWg2au3iJb+0Zi7kKtqffIESgNcRXWZWmSA==", + "peer": true, + "dependencies": { + "@lezer/common": "^1.0.0" + } + }, + "node_modules/@lezer/lr": { + "version": "1.4.2", + "resolved": "https://registry.npmjs.org/@lezer/lr/-/lr-1.4.2.tgz", + "integrity": "sha512-pu0K1jCIdnQ12aWNaAVU5bzi7Bd1w54J3ECgANPmYLtQKP0HBj2cE/5coBD66MT10xbtIuUr7tg0Shbsvk0mDA==", + "peer": true, + "dependencies": { + "@lezer/common": "^1.0.0" + } + }, "node_modules/@motionone/animation": { "version": "10.18.0", "resolved": "https://registry.npmjs.org/@motionone/animation/-/animation-10.18.0.tgz", @@ -2388,7 +2455,7 @@ "version": "15.7.12", "resolved": "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.12.tgz", "integrity": "sha512-5zvhXYtRNRluoE/jAp4GVsSduVUzNWKkOZrCDBWYtE7biZywwdC2AcEzg+cSMLFRfVgeAFqpfNabiPjxFddV1Q==", - "dev": true, + "devOptional": true, "license": "MIT" }, "node_modules/@types/ramda": { @@ -2404,7 +2471,7 @@ "version": "18.3.5", "resolved": "https://registry.npmjs.org/@types/react/-/react-18.3.5.tgz", "integrity": "sha512-WeqMfGJLGuLCqHGYRGHxnKrXcTitc6L/nBUWfWPcTarG3t9PsquqUMuVeXZeca+mglY4Vo5GZjCi0A3Or2lnxA==", - "dev": true, + "devOptional": true, "dependencies": { "@types/prop-types": "*", "csstype": "^3.0.2" @@ -2414,7 +2481,7 @@ "version": "18.3.0", "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-18.3.0.tgz", "integrity": "sha512-EhwApuTmMBmXuFOikhQLIBUn6uFg81SwLMOAUgodJF14SOBOCMdU04gDoYi0WOJJHD144TL32z4yDqCW3dnkQg==", - "dev": true, + "devOptional": true, "license": "MIT", "dependencies": { "@types/react": "*" @@ -2485,15 +2552,15 @@ } }, "node_modules/@typescript-eslint/parser": { - "version": "8.3.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.3.0.tgz", - "integrity": "sha512-h53RhVyLu6AtpUzVCYLPhZGL5jzTD9fZL+SYf/+hYOx2bDkyQXztXSc4tbvKYHzfMXExMLiL9CWqJmVz6+78IQ==", + "version": "8.4.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.4.0.tgz", + "integrity": "sha512-NHgWmKSgJk5K9N16GIhQ4jSobBoJwrmURaLErad0qlLjrpP5bECYg+wxVTGlGZmJbU03jj/dfnb6V9bw+5icsA==", "dev": true, "dependencies": { - "@typescript-eslint/scope-manager": "8.3.0", - "@typescript-eslint/types": "8.3.0", - "@typescript-eslint/typescript-estree": "8.3.0", - "@typescript-eslint/visitor-keys": "8.3.0", + "@typescript-eslint/scope-manager": "8.4.0", + "@typescript-eslint/types": "8.4.0", + "@typescript-eslint/typescript-estree": "8.4.0", + "@typescript-eslint/visitor-keys": "8.4.0", "debug": "^4.3.4" }, "engines": { @@ -2512,81 +2579,6 @@ } } }, - "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/scope-manager": { - "version": "8.3.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.3.0.tgz", - "integrity": "sha512-mz2X8WcN2nVu5Hodku+IR8GgCOl4C0G/Z1ruaWN4dgec64kDBabuXyPAr+/RgJtumv8EEkqIzf3X2U5DUKB2eg==", - "dev": true, - "dependencies": { - "@typescript-eslint/types": "8.3.0", - "@typescript-eslint/visitor-keys": "8.3.0" - }, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/types": { - "version": "8.3.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.3.0.tgz", - "integrity": "sha512-y6sSEeK+facMaAyixM36dQ5NVXTnKWunfD1Ft4xraYqxP0lC0POJmIaL/mw72CUMqjY9qfyVfXafMeaUj0noWw==", - "dev": true, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/typescript-estree": { - "version": "8.3.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.3.0.tgz", - "integrity": "sha512-Mq7FTHl0R36EmWlCJWojIC1qn/ZWo2YiWYc1XVtasJ7FIgjo0MVv9rZWXEE7IK2CGrtwe1dVOxWwqXUdNgfRCA==", - "dev": true, - "dependencies": { - "@typescript-eslint/types": "8.3.0", - "@typescript-eslint/visitor-keys": "8.3.0", - "debug": "^4.3.4", - "fast-glob": "^3.3.2", - "is-glob": "^4.0.3", - "minimatch": "^9.0.4", - "semver": "^7.6.0", - "ts-api-utils": "^1.3.0" - }, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - }, - "peerDependenciesMeta": { - "typescript": { - "optional": true - } - } - }, - "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/visitor-keys": { - "version": "8.3.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.3.0.tgz", - "integrity": "sha512-RmZwrTbQ9QveF15m/Cl28n0LXD6ea2CjkhH5rQ55ewz3H24w+AMCJHPVYaZ8/0HoG8Z3cLLFFycRXxeO2tz9FA==", - "dev": true, - "dependencies": { - "@typescript-eslint/types": "8.3.0", - "eslint-visitor-keys": "^3.4.3" - }, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, "node_modules/@typescript-eslint/scope-manager": { "version": "8.4.0", "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.4.0.tgz", @@ -3140,7 +3132,7 @@ "version": "3.1.3", "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.1.3.tgz", "integrity": "sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw==", - "dev": true, + "devOptional": true, "license": "MIT" }, "node_modules/debounce-promise": { @@ -3320,16 +3312,17 @@ } }, "node_modules/eslint": { - "version": "9.9.1", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-9.9.1.tgz", - "integrity": "sha512-dHvhrbfr4xFQ9/dq+jcVneZMyRYLjggWjk6RVsIiHsP8Rz6yZ8LvZ//iU4TrZF+SXWG+JkNF2OyiZRvzgRDqMg==", + "version": "9.10.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-9.10.0.tgz", + "integrity": "sha512-Y4D0IgtBZfOcOUAIQTSXBKoNGfY0REGqHJG6+Q81vNippW5YlKjHFj4soMxamKK1NXHUWuBZTLdU3Km+L/pcHw==", "dev": true, "dependencies": { "@eslint-community/eslint-utils": "^4.2.0", "@eslint-community/regexpp": "^4.11.0", "@eslint/config-array": "^0.18.0", "@eslint/eslintrc": "^3.1.0", - "@eslint/js": "9.9.1", + "@eslint/js": "9.10.0", + "@eslint/plugin-kit": "^0.1.0", "@humanwhocodes/module-importer": "^1.0.1", "@humanwhocodes/retry": "^0.3.0", "@nodelib/fs.walk": "^1.2.8", @@ -3352,7 +3345,6 @@ "is-glob": "^4.0.0", "is-path-inside": "^3.0.3", "json-stable-stringify-without-jsonify": "^1.0.1", - "levn": "^0.4.1", "lodash.merge": "^4.6.2", "minimatch": "^3.1.2", "natural-compare": "^1.4.0", @@ -4451,7 +4443,6 @@ "url": "https://github.com/sponsors/ai" } ], - "license": "MIT", "bin": { "nanoid": "bin/nanoid.cjs" }, @@ -4687,11 +4678,10 @@ } }, "node_modules/picocolors": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.1.tgz", - "integrity": "sha512-anP1Z8qwhkbmu7MFP5iTt+wQKXgwzf7zTyGlcdzabySa9vd0Xt392U0rVmz9poOaBj0uHJKyyo9/upk0HrEQew==", - "dev": true, - "license": "ISC" + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.0.tgz", + "integrity": "sha512-TQ92mBOW0l3LeMeyLV6mzy/kWr8lkd/hp3mTg7wYK7zJhuBStmGMBG0BdeDZS/dZx1IukaX6Bk11zcln25o1Aw==", + "dev": true }, "node_modules/picomatch": { "version": "2.3.1", @@ -4718,9 +4708,9 @@ } }, "node_modules/postcss": { - "version": "8.4.41", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.41.tgz", - "integrity": "sha512-TesUflQ0WKZqAvg52PWL6kHgLKP6xB6heTOdoYM0Wt2UHyxNa4K25EZZMgKns3BH1RLVbZCREPpLY0rhnNoHVQ==", + "version": "8.4.45", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.45.tgz", + "integrity": "sha512-7KTLTdzdZZYscUc65XmjFiB73vBhBfbPztCYdUNvlaso9PrzjzcmjqBPR0lNGkcVlcO4BjiO5rK/qNz+XAen1Q==", "dev": true, "funding": [ { @@ -4736,7 +4726,6 @@ "url": "https://github.com/sponsors/ai" } ], - "license": "MIT", "dependencies": { "nanoid": "^3.3.7", "picocolors": "^1.0.1", @@ -5520,11 +5509,10 @@ } }, "node_modules/source-map-js": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.0.tgz", - "integrity": "sha512-itJW8lvSA0TXEphiRoawsCksnlf8SyvmFzIhltqAHluXd88pkCd+cXJVHTDwdCr0IzwptSm035IHQktUu1QUMg==", + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz", + "integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==", "dev": true, - "license": "BSD-3-Clause", "engines": { "node": ">=0.10.0" } @@ -5581,6 +5569,12 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/style-mod": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/style-mod/-/style-mod-4.1.2.tgz", + "integrity": "sha512-wnD1HyVqpJUI2+eKZ+eo1UwghftP6yuFheBqqe+bWCotBjC2K1YnteJILRMs3SM4V/0dLEW1SC27MWP5y+mwmw==", + "peer": true + }, "node_modules/style-value-types": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/style-value-types/-/style-value-types-5.0.0.tgz", @@ -5943,14 +5937,13 @@ "optional": true }, "node_modules/vite": { - "version": "5.4.2", - "resolved": "https://registry.npmjs.org/vite/-/vite-5.4.2.tgz", - "integrity": "sha512-dDrQTRHp5C1fTFzcSaMxjk6vdpKvT+2/mIdE07Gw2ykehT49O0z/VHS3zZ8iV/Gh8BJJKHWOe5RjaNrW5xf/GA==", + "version": "5.4.3", + "resolved": "https://registry.npmjs.org/vite/-/vite-5.4.3.tgz", + "integrity": "sha512-IH+nl64eq9lJjFqU+/yrRnrHPVTlgy42/+IzbOdaFDVlyLgI/wDlf+FCobXLX1cT0X5+7LMyH1mIy2xJdLfo8Q==", "dev": true, - "license": "MIT", "dependencies": { "esbuild": "^0.21.3", - "postcss": "^8.4.41", + "postcss": "^8.4.43", "rollup": "^4.20.0" }, "bin": { @@ -6007,6 +6000,12 @@ "resolved": "https://registry.npmjs.org/vscode-languageserver-types/-/vscode-languageserver-types-3.17.5.tgz", "integrity": "sha512-Ld1VelNuX9pdF39h2Hgaeb5hEZM2Z3jUrrMgWQAu82jMtZp7p3vJT3BzToKtZI7NgQssZje5o0zryOrhQvzQAg==" }, + "node_modules/w3c-keyname": { + "version": "2.2.8", + "resolved": "https://registry.npmjs.org/w3c-keyname/-/w3c-keyname-2.2.8.tgz", + "integrity": "sha512-dpojBhNsCNN7T82Tm7k26A6G9ML3NkhDsnw9n/eoxSRlVBB4CEtIQ/KTCLI2Fwf3ataSXRhYFkQi3SlnFwPvPQ==", + "peer": true + }, "node_modules/web-streams-polyfill": { "version": "3.3.3", "resolved": "https://registry.npmjs.org/web-streams-polyfill/-/web-streams-polyfill-3.3.3.tgz", diff --git a/playground/package.json b/playground/package.json index cf14266447..3172ef07ce 100644 --- a/playground/package.json +++ b/playground/package.json @@ -21,12 +21,12 @@ "@types/react-dom": "^18.3.0", "@types/swagger-ui-react": "^4.18.3", "@typescript-eslint/eslint-plugin": "^8.3.0", - "@typescript-eslint/parser": "^8.3.0", + "@typescript-eslint/parser": "^8.4.0", "@vitejs/plugin-react-swc": "^3.7.0", - "eslint": "^9.9.1", + "eslint": "^9.10.0", "eslint-plugin-react-hooks": "^4.6.2", "eslint-plugin-react-refresh": "^0.4.9", "typescript": "^5.5.4", - "vite": "^5.4.2" + "vite": "^5.4.3" } } From 379af34af75d9c82051c2eb15bbd3281e51c0239 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 10 Sep 2024 14:10:28 -0400 Subject: [PATCH 11/71] bot: Bump @typescript-eslint/parser from 8.4.0 to 8.5.0 in /playground (#2991) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [@typescript-eslint/parser](https://github.com/typescript-eslint/typescript-eslint/tree/HEAD/packages/parser) from 8.4.0 to 8.5.0.
Release notes

Sourced from @​typescript-eslint/parser's releases.

v8.5.0

8.5.0 (2024-09-09)

🚀 Features

🩹 Fixes

  • deps: update dependency prism-react-renderer to v2.4.0 (#9943)
  • eslint-plugin: [no-unnecessary-type-assertion] fix TSNonNullExpression fixer (#9898)
  • eslint-plugin: [no-misused-promises] handle static method (#9951)
  • eslint-plugin: [no-unnecessary-type-parameters] fix AST quick path scope analysis (#9900)
  • eslint-plugin: [consistent-type-assertions] access parser services lazily (#9921)

❤️ Thank You

You can read about our versioning strategy and releases on our website.

Changelog

Sourced from @​typescript-eslint/parser's changelog.

8.5.0 (2024-09-09)

This was a version bump only for parser to align it with other projects, there were no code changes.

You can read about our versioning strategy and releases on our website.

Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=@typescript-eslint/parser&package-manager=npm_and_yarn&previous-version=8.4.0&new-version=8.5.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- playground/package-lock.json | 91 ++++++++++++++++++++++++++++++++---- playground/package.json | 2 +- 2 files changed, 84 insertions(+), 9 deletions(-) diff --git a/playground/package-lock.json b/playground/package-lock.json index 081abe23df..71dcf11ed2 100644 --- a/playground/package-lock.json +++ b/playground/package-lock.json @@ -19,7 +19,7 @@ "@types/react-dom": "^18.3.0", "@types/swagger-ui-react": "^4.18.3", "@typescript-eslint/eslint-plugin": "^8.3.0", - "@typescript-eslint/parser": "^8.4.0", + "@typescript-eslint/parser": "^8.5.0", "@vitejs/plugin-react-swc": "^3.7.0", "eslint": "^9.10.0", "eslint-plugin-react-hooks": "^4.6.2", @@ -2552,15 +2552,15 @@ } }, "node_modules/@typescript-eslint/parser": { - "version": "8.4.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.4.0.tgz", - "integrity": "sha512-NHgWmKSgJk5K9N16GIhQ4jSobBoJwrmURaLErad0qlLjrpP5bECYg+wxVTGlGZmJbU03jj/dfnb6V9bw+5icsA==", + "version": "8.5.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.5.0.tgz", + "integrity": "sha512-gF77eNv0Xz2UJg/NbpWJ0kqAm35UMsvZf1GHj8D9MRFTj/V3tAciIWXfmPLsAAF/vUlpWPvUDyH1jjsr0cMVWw==", "dev": true, "dependencies": { - "@typescript-eslint/scope-manager": "8.4.0", - "@typescript-eslint/types": "8.4.0", - "@typescript-eslint/typescript-estree": "8.4.0", - "@typescript-eslint/visitor-keys": "8.4.0", + "@typescript-eslint/scope-manager": "8.5.0", + "@typescript-eslint/types": "8.5.0", + "@typescript-eslint/typescript-estree": "8.5.0", + "@typescript-eslint/visitor-keys": "8.5.0", "debug": "^4.3.4" }, "engines": { @@ -2579,6 +2579,81 @@ } } }, + "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/scope-manager": { + "version": "8.5.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.5.0.tgz", + "integrity": "sha512-06JOQ9Qgj33yvBEx6tpC8ecP9o860rsR22hWMEd12WcTRrfaFgHr2RB/CA/B+7BMhHkXT4chg2MyboGdFGawYg==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "8.5.0", + "@typescript-eslint/visitor-keys": "8.5.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/types": { + "version": "8.5.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.5.0.tgz", + "integrity": "sha512-qjkormnQS5wF9pjSi6q60bKUHH44j2APxfh9TQRXK8wbYVeDYYdYJGIROL87LGZZ2gz3Rbmjc736qyL8deVtdw==", + "dev": true, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/typescript-estree": { + "version": "8.5.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.5.0.tgz", + "integrity": "sha512-vEG2Sf9P8BPQ+d0pxdfndw3xIXaoSjliG0/Ejk7UggByZPKXmJmw3GW5jV2gHNQNawBUyfahoSiCFVov0Ruf7Q==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "8.5.0", + "@typescript-eslint/visitor-keys": "8.5.0", + "debug": "^4.3.4", + "fast-glob": "^3.3.2", + "is-glob": "^4.0.3", + "minimatch": "^9.0.4", + "semver": "^7.6.0", + "ts-api-utils": "^1.3.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/visitor-keys": { + "version": "8.5.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.5.0.tgz", + "integrity": "sha512-yTPqMnbAZJNy2Xq2XU8AdtOW9tJIr+UQb64aXB9f3B1498Zx9JorVgFJcZpEc9UBuCCrdzKID2RGAMkYcDtZOw==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "8.5.0", + "eslint-visitor-keys": "^3.4.3" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, "node_modules/@typescript-eslint/scope-manager": { "version": "8.4.0", "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.4.0.tgz", diff --git a/playground/package.json b/playground/package.json index 3172ef07ce..640cfe3a3e 100644 --- a/playground/package.json +++ b/playground/package.json @@ -21,7 +21,7 @@ "@types/react-dom": "^18.3.0", "@types/swagger-ui-react": "^4.18.3", "@typescript-eslint/eslint-plugin": "^8.3.0", - "@typescript-eslint/parser": "^8.4.0", + "@typescript-eslint/parser": "^8.5.0", "@vitejs/plugin-react-swc": "^3.7.0", "eslint": "^9.10.0", "eslint-plugin-react-hooks": "^4.6.2", From 36fb5b5ff22e167992a36448463d6bcf4f065994 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Thu, 12 Sep 2024 14:30:38 -0700 Subject: [PATCH 12/71] fix(i): Log graphql server address (#2996) ## Relevant issue(s) Resolves #2795 ## Description This PR adds a log showing the graphql server address. It also updates the API log to show if the playground is enabled. ## Tasks - [x] I made sure the code is well commented, particularly hard-to-understand areas. - [x] I made sure the repository-held documentation is changed accordingly. - [x] I made sure the pull request title adheres to the conventional commit style (the subset used in the project can be found in [tools/configs/chglog/config.yml](tools/configs/chglog/config.yml)). - [x] I made sure to discuss its limitations such as threats to validity, vulnerability to mistake and misuse, robustness to invalidation of assumptions, resource requirements, ... ## How has this been tested? Manually tested by running `defradb start` Specify the platform(s) on which this was tested: - MacOS --- http/handler_playground.go | 1 + http/server.go | 4 ++++ node/node.go | 4 +++- 3 files changed, 8 insertions(+), 1 deletion(-) diff --git a/http/handler_playground.go b/http/handler_playground.go index 0a69e312b2..ae0bda70e1 100644 --- a/http/handler_playground.go +++ b/http/handler_playground.go @@ -25,4 +25,5 @@ func init() { panic(err) } playgroundHandler = http.FileServer(http.FS(sub)) + PlaygroundEnabled = true } diff --git a/http/server.go b/http/server.go index f975e200ad..58eca65bd1 100644 --- a/http/server.go +++ b/http/server.go @@ -21,6 +21,10 @@ import ( "github.com/go-chi/chi/v5/middleware" ) +// PlaygroundEnabled is used to detect if the playground is enabled +// on the current http server instance. +var PlaygroundEnabled = false + // We only allow cipher suites that are marked secure // by ssllabs var tlsCipherSuites = []uint16{ diff --git a/node/node.go b/node/node.go index 5eea424956..ffc4abd0ff 100644 --- a/node/node.go +++ b/node/node.go @@ -165,7 +165,9 @@ func (n *Node) Start(ctx context.Context) error { if err != nil { return err } - log.InfoContext(ctx, fmt.Sprintf("Providing HTTP API at %s.", n.Server.Address())) + log.InfoContext(ctx, + fmt.Sprintf("Providing HTTP API at %s PlaygroundEnabled=%t", n.Server.Address(), http.PlaygroundEnabled)) + log.InfoContext(ctx, fmt.Sprintf("Providing GraphQL endpoint at %s/v0/graphql", n.Server.Address())) go func() { if err := n.Server.Serve(); err != nil && !errors.Is(err, gohttp.ErrServerClosed) { log.ErrorContextE(ctx, "HTTP server stopped", err) From ccf4d937138a4f7b8435551266fda5bafeb2a476 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Fri, 13 Sep 2024 15:22:51 -0700 Subject: [PATCH 13/71] feat: GQL variables and operation name (#2993) ## Relevant issue(s) Resolves #1441 Resolves #1395 ## Description This PR adds support for GraphQL variables and operation name. ## Tasks - [x] I made sure the code is well commented, particularly hard-to-understand areas. - [x] I made sure the repository-held documentation is changed accordingly. - [x] I made sure the pull request title adheres to the conventional commit style (the subset used in the project can be found in [tools/configs/chglog/config.yml](tools/configs/chglog/config.yml)). - [x] I made sure to discuss its limitations such as threats to validity, vulnerability to mistake and misuse, robustness to invalidation of assumptions, resource requirements, ... ## How has this been tested? Added tests Specify the platform(s) on which this was tested: - MacOS --- cli/request.go | 22 +- client/db.go | 27 +- client/mocks/db.go | 35 ++- .../references/cli/defradb_client_query.md | 6 +- docs/website/references/http/openapi.json | 7 + go.mod | 4 +- go.sum | 4 +- http/client.go | 13 +- http/handler_store.go | 13 +- internal/core/parser.go | 2 +- internal/db/request.go | 4 +- internal/db/store.go | 9 +- internal/db/view.go | 2 +- internal/request/graphql/parser.go | 4 +- internal/request/graphql/parser/commit.go | 70 ++--- internal/request/graphql/parser/filter.go | 91 ++----- internal/request/graphql/parser/mutation.go | 114 ++------ internal/request/graphql/parser/query.go | 250 +++++++----------- internal/request/graphql/parser/request.go | 117 ++++---- .../request/graphql/parser/subscription.go | 31 +-- internal/request/graphql/schema/generate.go | 2 + internal/request/graphql/schema/manager.go | 19 +- tests/bench/query/planner/utils.go | 5 +- tests/clients/cli/wrapper.go | 17 ++ tests/clients/http/wrapper.go | 3 +- .../mutation/create/with_variables_test.go | 86 ++++++ .../one_to_one/with_clashing_id_field_test.go | 8 + .../query/simple/with_operation_name_test.go | 122 +++++++++ .../query/simple/with_variables_test.go | 170 ++++++++++++ tests/integration/test_case.go | 6 + tests/integration/utils.go | 9 +- 31 files changed, 794 insertions(+), 478 deletions(-) create mode 100644 tests/integration/mutation/create/with_variables_test.go create mode 100644 tests/integration/query/simple/with_operation_name_test.go create mode 100644 tests/integration/query/simple/with_variables_test.go diff --git a/cli/request.go b/cli/request.go index b6ec8e05ce..ae794dc2a1 100644 --- a/cli/request.go +++ b/cli/request.go @@ -11,11 +11,13 @@ package cli import ( + "encoding/json" "io" "os" "github.com/spf13/cobra" + "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/errors" ) @@ -26,6 +28,8 @@ const ( func MakeRequestCommand() *cobra.Command { var filePath string + var operationName string + var variablesJSON string var cmd = &cobra.Command{ Use: "query [-i --identity] [request]", Short: "Send a DefraDB GraphQL query request", @@ -70,8 +74,21 @@ To learn more about the DefraDB GraphQL Query Language, refer to https://docs.so return errors.New("request cannot be empty") } + var options []client.RequestOption + if variablesJSON != "" { + var variables map[string]any + err := json.Unmarshal([]byte(variablesJSON), &variables) + if err != nil { + return err + } + options = append(options, client.WithVariables(variables)) + } + if operationName != "" { + options = append(options, client.WithOperationName(operationName)) + } + store := mustGetContextStore(cmd) - result := store.ExecRequest(cmd.Context(), request) + result := store.ExecRequest(cmd.Context(), request, options...) var errors []string for _, err := range result.GQL.Errors { @@ -88,7 +105,8 @@ To learn more about the DefraDB GraphQL Query Language, refer to https://docs.so return nil }, } - + cmd.Flags().StringVarP(&operationName, "operation", "o", "", "Name of the operation to execute in the query") + cmd.Flags().StringVarP(&variablesJSON, "variables", "v", "", "JSON encoded variables to use in the query") cmd.Flags().StringVarP(&filePath, "file", "f", "", "File containing the query request") return cmd } diff --git a/client/db.go b/client/db.go index e77dd6cb87..ccaa296018 100644 --- a/client/db.go +++ b/client/db.go @@ -246,7 +246,32 @@ type Store interface { GetAllIndexes(context.Context) (map[CollectionName][]IndexDescription, error) // ExecRequest executes the given GQL request against the [Store]. - ExecRequest(ctx context.Context, request string) *RequestResult + ExecRequest(ctx context.Context, request string, opts ...RequestOption) *RequestResult +} + +// GQLOptions contains optional arguments for GQL requests. +type GQLOptions struct { + // OperationName is the name of the operation to exec. + OperationName string + // Variables is a map of names to varible values. + Variables map[string]any +} + +// RequestOption sets an optional request setting. +type RequestOption func(*GQLOptions) + +// WithOperationName sets the operation name for a GQL request. +func WithOperationName(operationName string) RequestOption { + return func(o *GQLOptions) { + o.OperationName = operationName + } +} + +// WithVariables sets the variables for a GQL request. +func WithVariables(variables map[string]any) RequestOption { + return func(o *GQLOptions) { + o.Variables = variables + } } // GQLResult represents the immediate results of a GQL request. diff --git a/client/mocks/db.go b/client/mocks/db.go index 089e41c159..eeb83da291 100644 --- a/client/mocks/db.go +++ b/client/mocks/db.go @@ -526,17 +526,24 @@ func (_c *DB_Events_Call) RunAndReturn(run func() *event.Bus) *DB_Events_Call { return _c } -// ExecRequest provides a mock function with given fields: ctx, request -func (_m *DB) ExecRequest(ctx context.Context, request string) *client.RequestResult { - ret := _m.Called(ctx, request) +// ExecRequest provides a mock function with given fields: ctx, request, opts +func (_m *DB) ExecRequest(ctx context.Context, request string, opts ...client.RequestOption) *client.RequestResult { + _va := make([]interface{}, len(opts)) + for _i := range opts { + _va[_i] = opts[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, request) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) if len(ret) == 0 { panic("no return value specified for ExecRequest") } var r0 *client.RequestResult - if rf, ok := ret.Get(0).(func(context.Context, string) *client.RequestResult); ok { - r0 = rf(ctx, request) + if rf, ok := ret.Get(0).(func(context.Context, string, ...client.RequestOption) *client.RequestResult); ok { + r0 = rf(ctx, request, opts...) } else { if ret.Get(0) != nil { r0 = ret.Get(0).(*client.RequestResult) @@ -554,13 +561,21 @@ type DB_ExecRequest_Call struct { // ExecRequest is a helper method to define mock.On call // - ctx context.Context // - request string -func (_e *DB_Expecter) ExecRequest(ctx interface{}, request interface{}) *DB_ExecRequest_Call { - return &DB_ExecRequest_Call{Call: _e.mock.On("ExecRequest", ctx, request)} +// - opts ...client.RequestOption +func (_e *DB_Expecter) ExecRequest(ctx interface{}, request interface{}, opts ...interface{}) *DB_ExecRequest_Call { + return &DB_ExecRequest_Call{Call: _e.mock.On("ExecRequest", + append([]interface{}{ctx, request}, opts...)...)} } -func (_c *DB_ExecRequest_Call) Run(run func(ctx context.Context, request string)) *DB_ExecRequest_Call { +func (_c *DB_ExecRequest_Call) Run(run func(ctx context.Context, request string, opts ...client.RequestOption)) *DB_ExecRequest_Call { _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(string)) + variadicArgs := make([]client.RequestOption, len(args)-2) + for i, a := range args[2:] { + if a != nil { + variadicArgs[i] = a.(client.RequestOption) + } + } + run(args[0].(context.Context), args[1].(string), variadicArgs...) }) return _c } @@ -570,7 +585,7 @@ func (_c *DB_ExecRequest_Call) Return(_a0 *client.RequestResult) *DB_ExecRequest return _c } -func (_c *DB_ExecRequest_Call) RunAndReturn(run func(context.Context, string) *client.RequestResult) *DB_ExecRequest_Call { +func (_c *DB_ExecRequest_Call) RunAndReturn(run func(context.Context, string, ...client.RequestOption) *client.RequestResult) *DB_ExecRequest_Call { _c.Call.Return(run) return _c } diff --git a/docs/website/references/cli/defradb_client_query.md b/docs/website/references/cli/defradb_client_query.md index 3a07ba7078..7519128bbd 100644 --- a/docs/website/references/cli/defradb_client_query.md +++ b/docs/website/references/cli/defradb_client_query.md @@ -30,8 +30,10 @@ defradb client query [-i --identity] [request] [flags] ### Options ``` - -f, --file string File containing the query request - -h, --help help for query + -f, --file string File containing the query request + -h, --help help for query + -o, --operation string Name of the operation to execute in the query + -v, --variables string JSON encoded variables to use in the query ``` ### Options inherited from parent commands diff --git a/docs/website/references/http/openapi.json b/docs/website/references/http/openapi.json index a8c02e72bf..0b511d7e5d 100644 --- a/docs/website/references/http/openapi.json +++ b/docs/website/references/http/openapi.json @@ -316,8 +316,15 @@ }, "graphql_request": { "properties": { + "operationName": { + "type": "string" + }, "query": { "type": "string" + }, + "variables": { + "additionalProperties": {}, + "type": "object" } }, "type": "object" diff --git a/go.mod b/go.mod index 19a200a01c..4a033fa5eb 100644 --- a/go.mod +++ b/go.mod @@ -2,6 +2,8 @@ module github.com/sourcenetwork/defradb go 1.22 +toolchain go1.22.0 + require ( github.com/bits-and-blooms/bitset v1.14.3 github.com/bxcodec/faker v2.0.1+incompatible @@ -45,7 +47,7 @@ require ( github.com/sourcenetwork/badger/v4 v4.2.1-0.20231113215945-a63444ca5276 github.com/sourcenetwork/corelog v0.0.8 github.com/sourcenetwork/go-libp2p-pubsub-rpc v0.0.14 - github.com/sourcenetwork/graphql-go v0.7.10-0.20231113214537-a9560c1898dd + github.com/sourcenetwork/graphql-go v0.7.10-0.20240910200541-550a51c57c7d github.com/sourcenetwork/immutable v0.3.0 github.com/sourcenetwork/sourcehub v0.2.1-0.20240704194128-f43f5e427274 github.com/spf13/cobra v1.8.1 diff --git a/go.sum b/go.sum index b3193ca332..67e38ff226 100644 --- a/go.sum +++ b/go.sum @@ -1394,8 +1394,8 @@ github.com/sourcenetwork/corelog v0.0.8 h1:jCo0mFBpWrfhUCGzzN3uUtPGyQv3jnITdPO1s github.com/sourcenetwork/corelog v0.0.8/go.mod h1:cMabHgs3kARgYTQeQYSOmaGGP8XMU6sZrHd8LFrL3zA= github.com/sourcenetwork/go-libp2p-pubsub-rpc v0.0.14 h1:620zKV4rOn7U5j/WsPkk4SFj0z9/pVV4bBx0BpZQgro= github.com/sourcenetwork/go-libp2p-pubsub-rpc v0.0.14/go.mod h1:jUoQv592uUX1u7QBjAY4C+l24X9ArhPfifOqXpDHz4U= -github.com/sourcenetwork/graphql-go v0.7.10-0.20231113214537-a9560c1898dd h1:lmpW39/8wPJ0khWRhOcj7Bj0HYKbSmQ8rXMJw1cMB8U= -github.com/sourcenetwork/graphql-go v0.7.10-0.20231113214537-a9560c1898dd/go.mod h1:rkahXkgRH/3vZErN1Bx+qt1+w+CV5fgaJyKKWgISe4U= +github.com/sourcenetwork/graphql-go v0.7.10-0.20240910200541-550a51c57c7d h1:gpBJx/mKmpelxZfHT4AYhPYFgSy8DKp/Ca+bBzIIy2A= +github.com/sourcenetwork/graphql-go v0.7.10-0.20240910200541-550a51c57c7d/go.mod h1:rkahXkgRH/3vZErN1Bx+qt1+w+CV5fgaJyKKWgISe4U= github.com/sourcenetwork/immutable v0.3.0 h1:gHPtGvLrTBTK5YpDAhMU+u+S8v1F6iYmc3nbZLryMdc= github.com/sourcenetwork/immutable v0.3.0/go.mod h1:GD7ceuh/HD7z6cdIwzKK2ctzgZ1qqYFJpsFp+8qYnbI= github.com/sourcenetwork/raccoondb v0.2.1-0.20240606193653-1e91e9be9234 h1:8dA9bVC1A0ChJygtsUfNsek3oR0GnwpLoYpmEo4t2mk= diff --git a/http/client.go b/http/client.go index 6e5cc21276..239b087cd8 100644 --- a/http/client.go +++ b/http/client.go @@ -340,11 +340,22 @@ func (c *Client) GetAllIndexes(ctx context.Context) (map[client.CollectionName][ func (c *Client) ExecRequest( ctx context.Context, query string, + opts ...client.RequestOption, ) *client.RequestResult { methodURL := c.http.baseURL.JoinPath("graphql") result := &client.RequestResult{} - body, err := json.Marshal(&GraphQLRequest{query}) + gqlOptions := &client.GQLOptions{} + for _, o := range opts { + o(gqlOptions) + } + gqlRequest := &GraphQLRequest{ + Query: query, + OperationName: gqlOptions.OperationName, + Variables: gqlOptions.Variables, + } + + body, err := json.Marshal(gqlRequest) if err != nil { result.GQL.Errors = []error{err} return result diff --git a/http/handler_store.go b/http/handler_store.go index f9438d443f..6e28a96b31 100644 --- a/http/handler_store.go +++ b/http/handler_store.go @@ -242,7 +242,9 @@ func (s *storeHandler) PrintDump(rw http.ResponseWriter, req *http.Request) { } type GraphQLRequest struct { - Query string `json:"query"` + Query string `json:"query"` + OperationName string `json:"operationName"` + Variables map[string]any `json:"variables"` } type GraphQLResponse struct { @@ -299,7 +301,14 @@ func (s *storeHandler) ExecRequest(rw http.ResponseWriter, req *http.Request) { return } - result := store.ExecRequest(req.Context(), request.Query) + var options []client.RequestOption + if request.OperationName != "" { + options = append(options, client.WithOperationName(request.OperationName)) + } + if len(request.Variables) > 0 { + options = append(options, client.WithVariables(request.Variables)) + } + result := store.ExecRequest(req.Context(), request.Query, options...) if result.Subscription == nil { responseJSON(rw, http.StatusOK, GraphQLResponse{result.GQL.Data, result.GQL.Errors}) diff --git a/internal/core/parser.go b/internal/core/parser.go index 619f3fd1c2..8955314e26 100644 --- a/internal/core/parser.go +++ b/internal/core/parser.go @@ -45,7 +45,7 @@ type Parser interface { ExecuteIntrospection(request string) *client.RequestResult // Parses the given request, returning a strongly typed model of that request. - Parse(*ast.Document) (*request.Request, []error) + Parse(*ast.Document, *client.GQLOptions) (*request.Request, []error) // NewFilterFromString creates a new filter from a string. NewFilterFromString(collectionType string, body string) (immutable.Option[request.Filter], error) diff --git a/internal/db/request.go b/internal/db/request.go index b7d77185fb..8063ca3f51 100644 --- a/internal/db/request.go +++ b/internal/db/request.go @@ -18,7 +18,7 @@ import ( ) // execRequest executes a request against the database. -func (db *db) execRequest(ctx context.Context, request string) *client.RequestResult { +func (db *db) execRequest(ctx context.Context, request string, options *client.GQLOptions) *client.RequestResult { res := &client.RequestResult{} ast, err := db.parser.BuildRequestAST(request) if err != nil { @@ -29,7 +29,7 @@ func (db *db) execRequest(ctx context.Context, request string) *client.RequestRe return db.parser.ExecuteIntrospection(request) } - parsedRequest, errors := db.parser.Parse(ast) + parsedRequest, errors := db.parser.Parse(ast, options) if len(errors) > 0 { res.GQL.Errors = errors return res diff --git a/internal/db/store.go b/internal/db/store.go index 1686b9af3e..5ebdde0605 100644 --- a/internal/db/store.go +++ b/internal/db/store.go @@ -21,7 +21,7 @@ import ( ) // ExecRequest executes a request against the database. -func (db *db) ExecRequest(ctx context.Context, request string) *client.RequestResult { +func (db *db) ExecRequest(ctx context.Context, request string, opts ...client.RequestOption) *client.RequestResult { ctx, txn, err := ensureContextTxn(ctx, db, false) if err != nil { res := &client.RequestResult{} @@ -30,7 +30,12 @@ func (db *db) ExecRequest(ctx context.Context, request string) *client.RequestRe } defer txn.Discard(ctx) - res := db.execRequest(ctx, request) + options := &client.GQLOptions{} + for _, o := range opts { + o(options) + } + + res := db.execRequest(ctx, request, options) if len(res.GQL.Errors) > 0 { return res } diff --git a/internal/db/view.go b/internal/db/view.go index 2664dd4a57..5ca702f26f 100644 --- a/internal/db/view.go +++ b/internal/db/view.go @@ -43,7 +43,7 @@ func (db *db) addView( return nil, err } - req, errs := db.parser.Parse(ast) + req, errs := db.parser.Parse(ast, &client.GQLOptions{}) if len(errs) > 0 { return nil, errors.Join(errs...) } diff --git a/internal/request/graphql/parser.go b/internal/request/graphql/parser.go index ab995e660c..f2a86430e0 100644 --- a/internal/request/graphql/parser.go +++ b/internal/request/graphql/parser.go @@ -84,7 +84,7 @@ func (p *parser) ExecuteIntrospection(request string) *client.RequestResult { return res } -func (p *parser) Parse(ast *ast.Document) (*request.Request, []error) { +func (p *parser) Parse(ast *ast.Document, options *client.GQLOptions) (*request.Request, []error) { schema := p.schemaManager.Schema() validationResult := gql.ValidateDocument(schema, ast, nil) if !validationResult.IsValid { @@ -95,7 +95,7 @@ func (p *parser) Parse(ast *ast.Document) (*request.Request, []error) { return nil, errors } - query, parsingErrors := defrap.ParseRequest(*schema, ast) + query, parsingErrors := defrap.ParseRequest(*schema, ast, options) if len(parsingErrors) > 0 { return nil, parsingErrors } diff --git a/internal/request/graphql/parser/commit.go b/internal/request/graphql/parser/commit.go index 8dc4db5aa3..a8074d031e 100644 --- a/internal/request/graphql/parser/commit.go +++ b/internal/request/graphql/parser/commit.go @@ -11,8 +11,6 @@ package parser import ( - "strconv" - gql "github.com/sourcenetwork/graphql-go" "github.com/sourcenetwork/graphql-go/language/ast" "github.com/sourcenetwork/immutable" @@ -21,7 +19,11 @@ import ( "github.com/sourcenetwork/defradb/internal/core" ) -func parseCommitSelect(schema gql.Schema, parent *gql.Object, field *ast.Field) (*request.CommitSelect, error) { +func parseCommitSelect( + exe *gql.ExecutionContext, + parent *gql.Object, + field *ast.Field, +) (*request.CommitSelect, error) { commit := &request.CommitSelect{ Field: request.Field{ Name: field.Name.Value, @@ -29,61 +31,39 @@ func parseCommitSelect(schema gql.Schema, parent *gql.Object, field *ast.Field) }, } + fieldDef := gql.GetFieldDef(exe.Schema, parent, field.Name.Value) + arguments := gql.GetArgumentValues(fieldDef.Args, field.Arguments, exe.VariableValues) + for _, argument := range field.Arguments { prop := argument.Name.Value if prop == request.DocIDArgName { - raw := argument.Value.(*ast.StringValue) - commit.DocID = immutable.Some(raw.Value) + commit.DocID = immutable.Some(arguments[prop].(string)) } else if prop == request.Cid { - raw := argument.Value.(*ast.StringValue) - commit.CID = immutable.Some(raw.Value) + commit.CID = immutable.Some(arguments[prop].(string)) } else if prop == request.FieldIDName { - raw := argument.Value.(*ast.StringValue) - commit.FieldID = immutable.Some(raw.Value) + commit.FieldID = immutable.Some(arguments[prop].(string)) } else if prop == request.OrderClause { - obj := argument.Value.(*ast.ObjectValue) - cond, err := ParseConditionsInOrder(obj) + conditions, err := ParseConditionsInOrder(argument.Value.(*ast.ObjectValue), arguments[prop].(map[string]any)) if err != nil { return nil, err } - commit.OrderBy = immutable.Some( - request.OrderBy{ - Conditions: cond, - }, - ) + commit.OrderBy = immutable.Some(request.OrderBy{ + Conditions: conditions, + }) } else if prop == request.LimitClause { - val := argument.Value.(*ast.IntValue) - limit, err := strconv.ParseUint(val.Value, 10, 64) - if err != nil { - return nil, err - } - commit.Limit = immutable.Some(limit) + commit.Limit = immutable.Some(uint64(arguments[prop].(int32))) } else if prop == request.OffsetClause { - val := argument.Value.(*ast.IntValue) - offset, err := strconv.ParseUint(val.Value, 10, 64) - if err != nil { - return nil, err - } - commit.Offset = immutable.Some(offset) + commit.Offset = immutable.Some(uint64(arguments[prop].(int32))) } else if prop == request.DepthClause { - raw := argument.Value.(*ast.IntValue) - depth, err := strconv.ParseUint(raw.Value, 10, 64) - if err != nil { - return nil, err - } - commit.Depth = immutable.Some(depth) + commit.Depth = immutable.Some(uint64(arguments[prop].(int32))) } else if prop == request.GroupByClause { - obj := argument.Value.(*ast.ListValue) fields := []string{} - for _, v := range obj.Values { - fields = append(fields, v.GetValue().(string)) + for _, v := range arguments[prop].([]any) { + fields = append(fields, v.(string)) } - - commit.GroupBy = immutable.Some( - request.GroupBy{ - Fields: fields, - }, - ) + commit.GroupBy = immutable.Some(request.GroupBy{ + Fields: fields, + }) } } @@ -105,14 +85,12 @@ func parseCommitSelect(schema gql.Schema, parent *gql.Object, field *ast.Field) return commit, nil } - fieldDef := gql.GetFieldDef(schema, parent, field.Name.Value) - fieldObject, err := typeFromFieldDef(fieldDef) if err != nil { return nil, err } - commit.Fields, err = parseSelectFields(schema, fieldObject, field.SelectionSet) + commit.Fields, err = parseSelectFields(exe, fieldObject, field.SelectionSet) return commit, err } diff --git a/internal/request/graphql/parser/filter.go b/internal/request/graphql/parser/filter.go index d7e7f44d40..a8ed1ae85f 100644 --- a/internal/request/graphql/parser/filter.go +++ b/internal/request/graphql/parser/filter.go @@ -11,7 +11,6 @@ package parser import ( - "strconv" "strings" gql "github.com/sourcenetwork/graphql-go" @@ -65,62 +64,53 @@ func NewFilterFromString( return NewFilter(obj, filterType) } -type parseFn func(*ast.ObjectValue) (any, error) - // ParseConditionsInOrder is similar to ParseConditions, except instead // of returning a map[string]any, we return a []any. This // is to maintain the ordering info of the statements within the ObjectValue. // This function is mostly used by the Order parser, which needs to parse // conditions in the same way as the Filter object, however the order // of the arguments is important. -func ParseConditionsInOrder(stmt *ast.ObjectValue) ([]request.OrderCondition, error) { - cond, err := parseConditionsInOrder(stmt) - if err != nil { - return nil, err - } - - if v, ok := cond.([]request.OrderCondition); ok { - return v, nil - } - return nil, client.NewErrUnexpectedType[[]request.OrderCondition]("condition", cond) -} - -func parseConditionsInOrder(stmt *ast.ObjectValue) (any, error) { +func ParseConditionsInOrder(stmt *ast.ObjectValue, args map[string]any) ([]request.OrderCondition, error) { conditions := make([]request.OrderCondition, 0) if stmt == nil { return conditions, nil } for _, field := range stmt.Fields { - name := field.Name.Value - val, err := parseVal(field.Value, parseConditionsInOrder) - if err != nil { - return nil, err - } + switch v := args[field.Name.Value].(type) { + case int: // base direction parsed (hopefully, check NameToOrderDirection) + var dir request.OrderDirection + switch v { + case 0: + dir = request.ASC + + case 1: + dir = request.DESC - switch v := val.(type) { - case string: // base direction parsed (hopefully, check NameToOrderDirection) - dir, ok := request.NameToOrderDirection[v] - if !ok { + default: return nil, ErrInvalidOrderDirection } conditions = append(conditions, request.OrderCondition{ - Fields: []string{name}, + Fields: []string{field.Name.Value}, Direction: dir, }) - case []request.OrderCondition: // flatten and incorporate the parsed slice into our current one - for _, cond := range v { + case map[string]any: // flatten and incorporate the parsed slice into our current one + sub, err := ParseConditionsInOrder(field.Value.(*ast.ObjectValue), v) + if err != nil { + return nil, err + } + for _, cond := range sub { // prepend the current field name, to the parsed condition from the slice // Eg. order: {author: {name: ASC, birthday: DESC}} // This results in an array of [name, birthday] converted to // [author.name, author.birthday]. // etc. - cond.Fields = append([]string{name}, cond.Fields...) + cond.Fields = append([]string{field.Name.Value}, cond.Fields...) conditions = append(conditions, cond) } default: - return nil, client.NewErrUnhandledType("parseConditionInOrder", val) + return nil, client.NewErrUnhandledType("parseConditionInOrder", v) } } @@ -149,47 +139,6 @@ func parseConditions(stmt *ast.ObjectValue, inputArg gql.Input) (any, error) { return val, nil } -// parseVal handles all the various input types, and extracts their -// values, with the correct types, into an any. -// recurses on ListValue or ObjectValue -func parseVal(val ast.Value, recurseFn parseFn) (any, error) { - switch val.GetKind() { - case "IntValue": - return strconv.ParseInt(val.GetValue().(string), 10, 64) - case "FloatValue": - return strconv.ParseFloat(val.GetValue().(string), 64) - case "StringValue": - return val.GetValue().(string), nil - case "EnumValue": - return val.GetValue().(string), nil - case "BooleanValue": - return val.GetValue().(bool), nil - - case "NullValue": - return nil, nil - - case "ListValue": - list := make([]any, 0) - for _, item := range val.GetValue().([]ast.Value) { - v, err := parseVal(item, recurseFn) - if err != nil { - return nil, err - } - list = append(list, v) - } - return list, nil - case "ObjectValue": - // check recurseFn, its either ParseConditions, or ParseConditionsInOrder - conditions, err := recurseFn(val.(*ast.ObjectValue)) - if err != nil { - return nil, err - } - return conditions, nil - } - - return nil, ErrFailedToParseConditionValue -} - // ParseFilterFieldsForDescription parses the fields that are defined in the SchemaDescription // from the filter conditions“ func ParseFilterFieldsForDescription( diff --git a/internal/request/graphql/parser/mutation.go b/internal/request/graphql/parser/mutation.go index b329eeec88..95785b78c9 100644 --- a/internal/request/graphql/parser/mutation.go +++ b/internal/request/graphql/parser/mutation.go @@ -17,7 +17,6 @@ import ( "github.com/sourcenetwork/graphql-go/language/ast" "github.com/sourcenetwork/immutable" - "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/client/request" ) @@ -32,7 +31,7 @@ var ( // parseMutationOperationDefinition parses the individual GraphQL // 'mutation' operations, which there may be multiple of. func parseMutationOperationDefinition( - schema gql.Schema, + exe *gql.ExecutionContext, def *ast.OperationDefinition, ) (*request.OperationDefinition, error) { qdef := &request.OperationDefinition{ @@ -42,7 +41,7 @@ func parseMutationOperationDefinition( for i, selection := range def.SelectionSet.Selections { switch node := selection.(type) { case *ast.Field: - mut, err := parseMutation(schema, schema.MutationType(), node) + mut, err := parseMutation(exe, exe.Schema.MutationType(), node) if err != nil { return nil, err } @@ -60,7 +59,7 @@ func parseMutationOperationDefinition( // parseMutation parses a typed mutation field // which includes sub fields, and may include // filters, IDs, payloads, etc. -func parseMutation(schema gql.Schema, parent *gql.Object, field *ast.Field) (*request.ObjectMutation, error) { +func parseMutation(exe *gql.ExecutionContext, parent *gql.Object, field *ast.Field) (*request.ObjectMutation, error) { mut := &request.ObjectMutation{ Field: request.Field{ Name: field.Name.Value, @@ -68,7 +67,8 @@ func parseMutation(schema gql.Schema, parent *gql.Object, field *ast.Field) (*re }, } - fieldDef := gql.GetFieldDef(schema, parent, mut.Name) + fieldDef := gql.GetFieldDef(exe.Schema, parent, mut.Name) + arguments := gql.GetArgumentValues(fieldDef.Args, field.Arguments, exe.VariableValues) // parse the mutation type // mutation names are either generated from a type @@ -100,55 +100,36 @@ func parseMutation(schema gql.Schema, parent *gql.Object, field *ast.Field) (*re prop := argument.Name.Value // parse each individual arg type seperately if prop == request.Input { // parse input - raw := argument.Value.(*ast.ObjectValue) - mut.Input = parseMutationInputObject(raw) + mut.Input = arguments[prop].(map[string]any) } else if prop == request.Inputs { - raw := argument.Value.(*ast.ListValue) - - mut.Inputs = make([]map[string]any, len(raw.Values)) - - for i, val := range raw.Values { - doc, ok := val.(*ast.ObjectValue) - if !ok { - return nil, client.NewErrUnexpectedType[*ast.ObjectValue]("doc array element", val) - } - mut.Inputs[i] = parseMutationInputObject(doc) + inputsValue := arguments[prop].([]any) + inputs := make([]map[string]any, len(inputsValue)) + for i, v := range inputsValue { + inputs[i] = v.(map[string]any) } + mut.Inputs = inputs } else if prop == request.FilterClause { // parse filter - obj := argument.Value.(*ast.ObjectValue) - filterType, ok := getArgumentType(fieldDef, request.FilterClause) - if !ok { - return nil, ErrFilterMissingArgumentType - } - filter, err := NewFilter(obj, filterType) - if err != nil { - return nil, err - } - - mut.Filter = filter + mut.Filter = immutable.Some(request.Filter{ + Conditions: arguments[prop].(map[string]any), + }) } else if prop == request.DocIDArgName { - raw := argument.Value.(*ast.StringValue) - mut.DocIDs = immutable.Some([]string{raw.Value}) + mut.DocIDs = immutable.Some([]string{arguments[prop].(string)}) } else if prop == request.DocIDsArgName { - raw := argument.Value.(*ast.ListValue) - ids := make([]string, len(raw.Values)) - for i, val := range raw.Values { - id, ok := val.(*ast.StringValue) - if !ok { - return nil, client.NewErrUnexpectedType[*ast.StringValue]("ids argument", val) - } - ids[i] = id.Value + docIDsValue := arguments[prop].([]any) + docIDs := make([]string, len(docIDsValue)) + for i, v := range docIDsValue { + docIDs[i] = v.(string) } - mut.DocIDs = immutable.Some(ids) + mut.DocIDs = immutable.Some(docIDs) } else if prop == request.EncryptDocArgName { - mut.Encrypt = argument.Value.(*ast.BooleanValue).Value + mut.Encrypt = arguments[prop].(bool) } else if prop == request.EncryptFieldsArgName { - raw := argument.Value.(*ast.ListValue) - fieldNames := make([]string, len(raw.Values)) - for i, val := range raw.Values { - fieldNames[i] = val.GetValue().(string) + fieldsValue := arguments[prop].([]any) + fields := make([]string, len(fieldsValue)) + for i, v := range fieldsValue { + fields[i] = v.(string) } - mut.EncryptFields = fieldNames + mut.EncryptFields = fields } } @@ -162,47 +143,6 @@ func parseMutation(schema gql.Schema, parent *gql.Object, field *ast.Field) (*re return nil, err } - mut.Fields, err = parseSelectFields(schema, fieldObject, field.SelectionSet) + mut.Fields, err = parseSelectFields(exe, fieldObject, field.SelectionSet) return mut, err } - -// parseMutationInput parses the correct underlying -// value type of the given ast.Value -func parseMutationInput(val ast.Value) any { - switch t := val.(type) { - case *ast.IntValue: - return gql.Int.ParseLiteral(val) - case *ast.FloatValue: - return gql.Float.ParseLiteral(val) - case *ast.BooleanValue: - return t.Value - case *ast.StringValue: - return t.Value - case *ast.ObjectValue: - return parseMutationInputObject(t) - case *ast.ListValue: - return parseMutationInputList(t) - default: - return val.GetValue() - } -} - -// parseMutationInputList parses the correct underlying -// value type for all of the values in the ast.ListValue -func parseMutationInputList(val *ast.ListValue) []any { - list := make([]any, 0) - for _, val := range val.Values { - list = append(list, parseMutationInput(val)) - } - return list -} - -// parseMutationInputObject parses the correct underlying -// value type for all of the fields in the ast.ObjectValue -func parseMutationInputObject(val *ast.ObjectValue) map[string]any { - obj := make(map[string]any) - for _, field := range val.Fields { - obj[field.Name.Value] = parseMutationInput(field.Value) - } - return obj -} diff --git a/internal/request/graphql/parser/query.go b/internal/request/graphql/parser/query.go index 48fde3db1f..1284463ac0 100644 --- a/internal/request/graphql/parser/query.go +++ b/internal/request/graphql/parser/query.go @@ -11,21 +11,17 @@ package parser import ( - "strconv" - gql "github.com/sourcenetwork/graphql-go" "github.com/sourcenetwork/graphql-go/language/ast" "github.com/sourcenetwork/immutable" - "github.com/sourcenetwork/defradb/client" - "github.com/sourcenetwork/defradb/client/request" ) // parseQueryOperationDefinition parses the individual GraphQL // 'query' operations, which there may be multiple of. func parseQueryOperationDefinition( - schema gql.Schema, + exe *gql.ExecutionContext, def *ast.OperationDefinition, ) (*request.OperationDefinition, []error) { qdef := &request.OperationDefinition{ @@ -37,14 +33,14 @@ func parseQueryOperationDefinition( switch node := selection.(type) { case *ast.Field: if _, isCommitQuery := request.CommitQueries[node.Name.Value]; isCommitQuery { - parsed, err := parseCommitSelect(schema, schema.QueryType(), node) + parsed, err := parseCommitSelect(exe, exe.Schema.QueryType(), node) if err != nil { return nil, []error{err} } parsedSelection = parsed } else if _, isAggregate := request.Aggregates[node.Name.Value]; isAggregate { - parsed, err := parseAggregate(schema, schema.QueryType(), node, i) + parsed, err := parseAggregate(exe, exe.Schema.QueryType(), node) if err != nil { return nil, []error{err} } @@ -64,7 +60,7 @@ func parseQueryOperationDefinition( } else { // the query doesn't match a reserve name // so its probably a generated query - parsed, err := parseSelect(schema, schema.QueryType(), node, i) + parsed, err := parseSelect(exe, exe.Schema.QueryType(), node) if err != nil { return nil, []error{err} } @@ -91,10 +87,9 @@ func parseQueryOperationDefinition( // which includes sub fields, and may include // filters, limits, orders, etc.. func parseSelect( - schema gql.Schema, + exe *gql.ExecutionContext, parent *gql.Object, field *ast.Field, - index int, ) (*request.Select, error) { slct := &request.Select{ Field: request.Field{ @@ -103,80 +98,56 @@ func parseSelect( }, } - fieldDef := gql.GetFieldDef(schema, parent, slct.Name) + fieldDef := gql.GetFieldDef(exe.Schema, parent, field.Name.Value) + arguments := gql.GetArgumentValues(fieldDef.Args, field.Arguments, exe.VariableValues) // parse arguments for _, argument := range field.Arguments { - prop := argument.Name.Value - astValue := argument.Value + name := argument.Name.Value + value := arguments[name] // parse filter - switch prop { + switch name { case request.FilterClause: - obj := astValue.(*ast.ObjectValue) - filterType, ok := getArgumentType(fieldDef, request.FilterClause) - if !ok { - return nil, ErrFilterMissingArgumentType - } - filter, err := NewFilter(obj, filterType) - if err != nil { - return slct, err - } - - slct.Filter = filter + slct.Filter = immutable.Some(request.Filter{ + Conditions: value.(map[string]any), + }) case request.DocIDArgName: // parse single DocID field - docIDValue := astValue.(*ast.StringValue) - slct.DocIDs = immutable.Some([]string{docIDValue.Value}) + slct.DocIDs = immutable.Some([]string{value.(string)}) case request.DocIDsArgName: - docIDValues := astValue.(*ast.ListValue).Values + docIDValues := value.([]any) docIDs := make([]string, len(docIDValues)) for i, value := range docIDValues { - docIDs[i] = value.(*ast.StringValue).Value + docIDs[i] = value.(string) } slct.DocIDs = immutable.Some(docIDs) case request.Cid: // parse single CID query field - val := astValue.(*ast.StringValue) - slct.CID = immutable.Some(val.Value) + slct.CID = immutable.Some(value.(string)) case request.LimitClause: // parse limit/offset - val := astValue.(*ast.IntValue) - limit, err := strconv.ParseUint(val.Value, 10, 64) - if err != nil { - return nil, err - } - slct.Limit = immutable.Some(limit) + slct.Limit = immutable.Some(uint64(value.(int32))) case request.OffsetClause: // parse limit/offset - val := astValue.(*ast.IntValue) - offset, err := strconv.ParseUint(val.Value, 10, 64) - if err != nil { - return nil, err - } - slct.Offset = immutable.Some(offset) + slct.Offset = immutable.Some(uint64(value.(int32))) case request.OrderClause: // parse order by - obj := astValue.(*ast.ObjectValue) - cond, err := ParseConditionsInOrder(obj) + conditionsAST := argument.Value.(*ast.ObjectValue) + conditionsValue := value.(map[string]any) + conditions, err := ParseConditionsInOrder(conditionsAST, conditionsValue) if err != nil { return nil, err } - slct.OrderBy = immutable.Some( - request.OrderBy{ - Conditions: cond, - }, - ) + slct.OrderBy = immutable.Some(request.OrderBy{ + Conditions: conditions, + }) case request.GroupByClause: - obj := astValue.(*ast.ListValue) - fields := make([]string, 0) - for _, v := range obj.Values { - fields = append(fields, v.GetValue().(string)) + fieldsValue := value.([]any) + fields := make([]string, len(fieldsValue)) + for i, v := range fieldsValue { + fields[i] = v.(string) } - - slct.GroupBy = immutable.Some( - request.GroupBy{ - Fields: fields, - }, - ) + slct.GroupBy = immutable.Some(request.GroupBy{ + Fields: fields, + }) case request.ShowDeleted: - val := astValue.(*ast.BooleanValue) - slct.ShowDeleted = val.Value + slct.ShowDeleted = value.(bool) } } @@ -191,7 +162,7 @@ func parseSelect( return nil, err } - slct.Fields, err = parseSelectFields(schema, fieldObject, field.SelectionSet) + slct.Fields, err = parseSelectFields(exe, fieldObject, field.SelectionSet) if err != nil { return nil, err } @@ -199,112 +170,87 @@ func parseSelect( return slct, err } -func parseAggregate(schema gql.Schema, parent *gql.Object, field *ast.Field, index int) (*request.Aggregate, error) { +func parseAggregate( + exe *gql.ExecutionContext, + parent *gql.Object, + field *ast.Field, +) (*request.Aggregate, error) { targets := make([]*request.AggregateTarget, len(field.Arguments)) + fieldDef := gql.GetFieldDef(exe.Schema, parent, field.Name.Value) + arguments := gql.GetArgumentValues(fieldDef.Args, field.Arguments, exe.VariableValues) + for i, argument := range field.Arguments { - switch argumentValue := argument.Value.GetValue().(type) { + name := argument.Name.Value + value := arguments[name] + + switch v := value.(type) { case string: targets[i] = &request.AggregateTarget{ - HostName: argumentValue, + HostName: v, } - case []*ast.ObjectField: - hostName := argument.Name.Value + case map[string]any: var childName string var filter immutable.Option[request.Filter] var limit immutable.Option[uint64] var offset immutable.Option[uint64] var order immutable.Option[request.OrderBy] - fieldArg, hasFieldArg := tryGet(argumentValue, request.FieldName) - if hasFieldArg { - if innerPathStringValue, isString := fieldArg.Value.GetValue().(string); isString { - childName = innerPathStringValue - } - } - - filterArg, hasFilterArg := tryGet(argumentValue, request.FilterClause) - if hasFilterArg { - fieldDef := gql.GetFieldDef(schema, parent, field.Name.Value) - argType, ok := getArgumentType(fieldDef, hostName) - if !ok { - return nil, ErrFilterMissingArgumentType - } - argTypeObject, ok := argType.(*gql.InputObject) - if !ok { - return nil, client.NewErrUnexpectedType[*gql.InputObject]("arg type", argType) - } - filterType, ok := getArgumentTypeFromInput(argTypeObject, request.FilterClause) - if !ok { - return nil, ErrFilterMissingArgumentType - } - filterObjVal, ok := filterArg.Value.(*ast.ObjectValue) - if !ok { - return nil, client.NewErrUnexpectedType[*gql.InputObject]("filter arg", filterArg.Value) - } - filterValue, err := NewFilter(filterObjVal, filterType) - if err != nil { - return nil, err - } - filter = filterValue - } - - limitArg, hasLimitArg := tryGet(argumentValue, request.LimitClause) - if hasLimitArg { - limitValue, err := strconv.ParseUint(limitArg.Value.(*ast.IntValue).Value, 10, 64) - if err != nil { - return nil, err - } - limit = immutable.Some(limitValue) - } - - offsetArg, hasOffsetArg := tryGet(argumentValue, request.OffsetClause) - if hasOffsetArg { - offsetValue, err := strconv.ParseUint(offsetArg.Value.(*ast.IntValue).Value, 10, 64) - if err != nil { - return nil, err - } - offset = immutable.Some(offsetValue) - } - - orderArg, hasOrderArg := tryGet(argumentValue, request.OrderClause) - if hasOrderArg { - switch orderArgValue := orderArg.Value.(type) { - case *ast.EnumValue: - // For inline arrays the order arg will be a simple enum declaring the order direction - orderDirectionString := orderArgValue.Value - orderDirection := request.OrderDirection(orderDirectionString) - - order = immutable.Some( - request.OrderBy{ - Conditions: []request.OrderCondition{ - { - Direction: orderDirection, - }, - }, - }, - ) - - case *ast.ObjectValue: - // For relations the order arg will be the complex order object as used by the host object - // for non-aggregate ordering - - // We use the parser package parsing for convienience here - orderConditions, err := ParseConditionsInOrder(orderArgValue) - if err != nil { - return nil, err + for _, f := range argument.Value.(*ast.ObjectValue).Fields { + switch f.Name.Value { + case request.FieldName: + childName = v[request.FieldName].(string) + + case request.FilterClause: + filter = immutable.Some(request.Filter{ + Conditions: v[request.FilterClause].(map[string]any), + }) + + case request.LimitClause: + limit = immutable.Some(uint64(v[request.LimitClause].(int32))) + + case request.OffsetClause: + offset = immutable.Some(uint64(v[request.OffsetClause].(int32))) + + case request.OrderClause: + switch conditionsAST := f.Value.(type) { + case *ast.EnumValue: + // For inline arrays the order arg will be a simple enum declaring the order direction + var orderDirection request.OrderDirection + switch v[request.OrderClause].(int) { + case 0: + orderDirection = request.ASC + + case 1: + orderDirection = request.DESC + + default: + return nil, ErrInvalidOrderDirection + } + + order = immutable.Some(request.OrderBy{ + Conditions: []request.OrderCondition{{ + Direction: orderDirection, + }}, + }) + + case *ast.ObjectValue: + // For relations the order arg will be the complex order object as used by the host object + // for non-aggregate ordering + conditionsValue := v[request.OrderClause].(map[string]any) + conditions, err := ParseConditionsInOrder(conditionsAST, conditionsValue) + if err != nil { + return nil, err + } + order = immutable.Some(request.OrderBy{ + Conditions: conditions, + }) } - - order = immutable.Some( - request.OrderBy{ - Conditions: orderConditions, - }, - ) } } targets[i] = &request.AggregateTarget{ - HostName: hostName, + HostName: name, ChildName: immutable.Some(childName), Filterable: request.Filterable{ Filter: filter, diff --git a/internal/request/graphql/parser/request.go b/internal/request/graphql/parser/request.go index eda5e53169..bc5a8f9510 100644 --- a/internal/request/graphql/parser/request.go +++ b/internal/request/graphql/parser/request.go @@ -23,10 +23,19 @@ import ( // ParseRequest parses a root ast.Document, and returns a formatted Request object. // Requires a non-nil doc, will error otherwise. -func ParseRequest(schema gql.Schema, doc *ast.Document) (*request.Request, []error) { +func ParseRequest(schema gql.Schema, doc *ast.Document, options *client.GQLOptions) (*request.Request, []error) { if doc == nil { return nil, []error{client.NewErrUninitializeProperty("ParseRequest", "doc")} } + exe, err := gql.BuildExecutionContext(gql.BuildExecutionCtxParams{ + Schema: schema, + AST: doc, + OperationName: options.OperationName, + Args: options.Variables, + }) + if err != nil { + return nil, []error{err} + } r := &request.Request{ Queries: make([]*request.OperationDefinition, 0), @@ -34,58 +43,51 @@ func ParseRequest(schema gql.Schema, doc *ast.Document) (*request.Request, []err Subscription: make([]*request.OperationDefinition, 0), } - for _, def := range doc.Definitions { - astOpDef, isOpDef := def.(*ast.OperationDefinition) - if !isOpDef { - continue + astOpDef := exe.Operation.(*ast.OperationDefinition) + switch exe.Operation.GetOperation() { + case ast.OperationTypeQuery: + parsedQueryOpDef, errs := parseQueryOperationDefinition(exe, exe.Operation.(*ast.OperationDefinition)) + if errs != nil { + return nil, errs } + parsedDirectives, err := parseDirectives(astOpDef.Directives) + if err != nil { + return nil, []error{err} + } + parsedQueryOpDef.Directives = parsedDirectives - switch astOpDef.Operation { - case ast.OperationTypeQuery: - parsedQueryOpDef, errs := parseQueryOperationDefinition(schema, astOpDef) - if errs != nil { - return nil, errs - } - - parsedDirectives, err := parseDirectives(astOpDef.Directives) - if err != nil { - return nil, []error{err} - } - parsedQueryOpDef.Directives = parsedDirectives - - r.Queries = append(r.Queries, parsedQueryOpDef) + r.Queries = append(r.Queries, parsedQueryOpDef) - case ast.OperationTypeMutation: - parsedMutationOpDef, err := parseMutationOperationDefinition(schema, astOpDef) - if err != nil { - return nil, []error{err} - } + case ast.OperationTypeMutation: + parsedMutationOpDef, err := parseMutationOperationDefinition(exe, astOpDef) + if err != nil { + return nil, []error{err} + } - parsedDirectives, err := parseDirectives(astOpDef.Directives) - if err != nil { - return nil, []error{err} - } - parsedMutationOpDef.Directives = parsedDirectives + parsedDirectives, err := parseDirectives(astOpDef.Directives) + if err != nil { + return nil, []error{err} + } + parsedMutationOpDef.Directives = parsedDirectives - r.Mutations = append(r.Mutations, parsedMutationOpDef) + r.Mutations = append(r.Mutations, parsedMutationOpDef) - case ast.OperationTypeSubscription: - parsedSubscriptionOpDef, err := parseSubscriptionOperationDefinition(schema, astOpDef) - if err != nil { - return nil, []error{err} - } + case ast.OperationTypeSubscription: + parsedSubscriptionOpDef, err := parseSubscriptionOperationDefinition(exe, astOpDef) + if err != nil { + return nil, []error{err} + } - parsedDirectives, err := parseDirectives(astOpDef.Directives) - if err != nil { - return nil, []error{err} - } - parsedSubscriptionOpDef.Directives = parsedDirectives + parsedDirectives, err := parseDirectives(astOpDef.Directives) + if err != nil { + return nil, []error{err} + } + parsedSubscriptionOpDef.Directives = parsedDirectives - r.Subscription = append(r.Subscription, parsedSubscriptionOpDef) + r.Subscription = append(r.Subscription, parsedSubscriptionOpDef) - default: - return nil, []error{ErrUnknownGQLOperation} - } + default: + return nil, []error{ErrUnknownGQLOperation} } return r, nil @@ -161,16 +163,17 @@ func getFieldAlias(field *ast.Field) immutable.Option[string] { } func parseSelectFields( - schema gql.Schema, + exe *gql.ExecutionContext, parent *gql.Object, - fields *ast.SelectionSet) ([]request.Selection, error) { + fields *ast.SelectionSet, +) ([]request.Selection, error) { selections := make([]request.Selection, len(fields.Selections)) // parse field selections for i, selection := range fields.Selections { switch node := selection.(type) { case *ast.Field: if _, isAggregate := request.Aggregates[node.Name.Value]; isAggregate { - s, err := parseAggregate(schema, parent, node, i) + s, err := parseAggregate(exe, parent, node) if err != nil { return nil, err } @@ -178,7 +181,7 @@ func parseSelectFields( } else if node.SelectionSet == nil { // regular field selections[i] = parseField(node) } else { // sub type with extra fields - s, err := parseSelect(schema, parent, node, i) + s, err := parseSelect(exe, parent, node) if err != nil { return nil, err } @@ -199,15 +202,6 @@ func parseField(field *ast.Field) *request.Field { } } -func tryGet(fields []*ast.ObjectField, name string) (*ast.ObjectField, bool) { - for _, field := range fields { - if field.Name.Value == name { - return field, true - } - } - return nil, false -} - func getArgumentType(field *gql.FieldDefinition, name string) (gql.Input, bool) { for _, arg := range field.Args { if arg.Name() == name { @@ -217,15 +211,6 @@ func getArgumentType(field *gql.FieldDefinition, name string) (gql.Input, bool) return nil, false } -func getArgumentTypeFromInput(input *gql.InputObject, name string) (gql.Input, bool) { - for fname, ftype := range input.Fields() { - if fname == name { - return ftype.Type, true - } - } - return nil, false -} - // typeFromFieldDef will return the output gql.Object type from the given field. // The return type may be a gql.Object or a gql.List, if it is a List type, we // need to get the concrete "OfType". diff --git a/internal/request/graphql/parser/subscription.go b/internal/request/graphql/parser/subscription.go index 0e6042f931..4c6f5e3f5f 100644 --- a/internal/request/graphql/parser/subscription.go +++ b/internal/request/graphql/parser/subscription.go @@ -13,6 +13,7 @@ package parser import ( gql "github.com/sourcenetwork/graphql-go" "github.com/sourcenetwork/graphql-go/language/ast" + "github.com/sourcenetwork/immutable" "github.com/sourcenetwork/defradb/client/request" ) @@ -20,7 +21,7 @@ import ( // parseSubscriptionOperationDefinition parses the individual GraphQL // 'subcription' operations, which there may be multiple of. func parseSubscriptionOperationDefinition( - schema gql.Schema, + exe *gql.ExecutionContext, def *ast.OperationDefinition, ) (*request.OperationDefinition, error) { sdef := &request.OperationDefinition{ @@ -30,7 +31,7 @@ func parseSubscriptionOperationDefinition( for i, selection := range def.SelectionSet.Selections { switch node := selection.(type) { case *ast.Field: - sub, err := parseSubscription(schema, node) + sub, err := parseSubscription(exe, node) if err != nil { return nil, err } @@ -44,7 +45,7 @@ func parseSubscriptionOperationDefinition( // parseSubscription parses a typed subscription field // which includes sub fields, and may include // filters, IDs, etc. -func parseSubscription(schema gql.Schema, field *ast.Field) (*request.ObjectSubscription, error) { +func parseSubscription(exe *gql.ExecutionContext, field *ast.Field) (*request.ObjectSubscription, error) { sub := &request.ObjectSubscription{ Field: request.Field{ Name: field.Name.Value, @@ -54,23 +55,13 @@ func parseSubscription(schema gql.Schema, field *ast.Field) (*request.ObjectSubs sub.Collection = sub.Name - fieldDef := gql.GetFieldDef(schema, schema.QueryType(), field.Name.Value) + fieldDef := gql.GetFieldDef(exe.Schema, exe.Schema.QueryType(), field.Name.Value) + arguments := gql.GetArgumentValues(fieldDef.Args, field.Arguments, exe.VariableValues) - for _, argument := range field.Arguments { - prop := argument.Name.Value - if prop == request.FilterClause { - filterType, ok := getArgumentType(fieldDef, request.FilterClause) - if !ok { - return nil, ErrFilterMissingArgumentType - } - obj := argument.Value.(*ast.ObjectValue) - filter, err := NewFilter(obj, filterType) - if err != nil { - return nil, err - } - - sub.Filter = filter - } + if v, ok := arguments[request.FilterClause]; ok { + sub.Filter = immutable.Some(request.Filter{ + Conditions: v.(map[string]any), + }) } // parse field selections @@ -79,6 +70,6 @@ func parseSubscription(schema gql.Schema, field *ast.Field) (*request.ObjectSubs return nil, err } - sub.Fields, err = parseSelectFields(schema, fieldObject, field.SelectionSet) + sub.Fields, err = parseSelectFields(exe, fieldObject, field.SelectionSet) return sub, err } diff --git a/internal/request/graphql/schema/generate.go b/internal/request/graphql/schema/generate.go index b851261c01..c198296ffb 100644 --- a/internal/request/graphql/schema/generate.go +++ b/internal/request/graphql/schema/generate.go @@ -109,6 +109,7 @@ func (g *Generator) generate(ctx context.Context, collections []client.Collectio // for each built type generate query inputs queryType := g.manager.schema.QueryType() + subscriptionType := g.manager.schema.SubscriptionType() generatedQueryFields := make([]*gql.Field, 0) for _, t := range g.typeDefs { f, err := g.GenerateQueryInputForGQLType(ctx, t) @@ -133,6 +134,7 @@ func (g *Generator) generate(ctx context.Context, collections []client.Collectio } queryType.AddFieldConfig(f.Name, f) + subscriptionType.AddFieldConfig(f.Name, f) } // resolve types diff --git a/internal/request/graphql/schema/manager.go b/internal/request/graphql/schema/manager.go index d409fe96b5..5c9a6c73c8 100644 --- a/internal/request/graphql/schema/manager.go +++ b/internal/request/graphql/schema/manager.go @@ -45,9 +45,10 @@ func NewSchemaManager() (*SchemaManager, error) { crdtEnum, explainEnum, ), - Query: defaultQueryType(commitObject, commitsOrderArg), - Mutation: defaultMutationType(), - Directives: defaultDirectivesType(crdtEnum, explainEnum, orderEnum), + Query: defaultQueryType(commitObject, commitsOrderArg), + Mutation: defaultMutationType(), + Subscription: defaultSubscriptionType(), + Directives: defaultDirectivesType(crdtEnum, explainEnum, orderEnum), }) if err != nil { return sm, err @@ -127,6 +128,18 @@ func defaultMutationType() *gql.Object { }) } +func defaultSubscriptionType() *gql.Object { + return gql.NewObject(gql.ObjectConfig{ + Name: "Subscription", + Fields: gql.Fields{ + "_": &gql.Field{ + Name: "_", + Type: gql.Boolean, + }, + }, + }) +} + // default directives type. func defaultDirectivesType( crdtEnum *gql.Enum, diff --git a/tests/bench/query/planner/utils.go b/tests/bench/query/planner/utils.go index b2a6e3c0d6..43669fa53f 100644 --- a/tests/bench/query/planner/utils.go +++ b/tests/bench/query/planner/utils.go @@ -17,6 +17,7 @@ import ( "github.com/sourcenetwork/defradb/acp" acpIdentity "github.com/sourcenetwork/defradb/acp/identity" + "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/datastore" "github.com/sourcenetwork/defradb/errors" "github.com/sourcenetwork/defradb/internal/core" @@ -41,7 +42,7 @@ func runQueryParserBench( b.ResetTimer() for i := 0; i < b.N; i++ { ast, _ := parser.BuildRequestAST(query) - _, errs := parser.Parse(ast) + _, errs := parser.Parse(ast, &client.GQLOptions{}) if errs != nil { return errors.Wrap("failed to parse query string", errors.New(fmt.Sprintf("%v", errs))) } @@ -69,7 +70,7 @@ func runMakePlanBench( } ast, _ := parser.BuildRequestAST(query) - q, errs := parser.Parse(ast) + q, errs := parser.Parse(ast, &client.GQLOptions{}) if len(errs) > 0 { return errors.Wrap("failed to parse query string", errors.New(fmt.Sprintf("%v", errs))) } diff --git a/tests/clients/cli/wrapper.go b/tests/clients/cli/wrapper.go index 8600448968..76b99ed69f 100644 --- a/tests/clients/cli/wrapper.go +++ b/tests/clients/cli/wrapper.go @@ -396,11 +396,28 @@ func (w *Wrapper) GetAllIndexes(ctx context.Context) (map[client.CollectionName] func (w *Wrapper) ExecRequest( ctx context.Context, query string, + opts ...client.RequestOption, ) *client.RequestResult { args := []string{"client", "query"} args = append(args, query) + options := &client.GQLOptions{} + for _, o := range opts { + o(options) + } + result := &client.RequestResult{} + if options.OperationName != "" { + args = append(args, "--operation", options.OperationName) + } + if len(options.Variables) > 0 { + enc, err := json.Marshal(options.Variables) + if err != nil { + result.GQL.Errors = []error{err} + return result + } + args = append(args, "--variables", string(enc)) + } stdOut, stdErr, err := w.cmd.executeStream(ctx, args) if err != nil { diff --git a/tests/clients/http/wrapper.go b/tests/clients/http/wrapper.go index 5a813c9265..734b7f2b28 100644 --- a/tests/clients/http/wrapper.go +++ b/tests/clients/http/wrapper.go @@ -171,8 +171,9 @@ func (w *Wrapper) GetAllIndexes(ctx context.Context) (map[client.CollectionName] func (w *Wrapper) ExecRequest( ctx context.Context, query string, + opts ...client.RequestOption, ) *client.RequestResult { - return w.client.ExecRequest(ctx, query) + return w.client.ExecRequest(ctx, query, opts...) } func (w *Wrapper) NewTxn(ctx context.Context, readOnly bool) (datastore.Txn, error) { diff --git a/tests/integration/mutation/create/with_variables_test.go b/tests/integration/mutation/create/with_variables_test.go new file mode 100644 index 0000000000..a77e788efb --- /dev/null +++ b/tests/integration/mutation/create/with_variables_test.go @@ -0,0 +1,86 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package create + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" + + "github.com/sourcenetwork/immutable" +) + +func TestMutationCreateWithNonNullVariable(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple create mutation with non null variable input.", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.Request{ + Variables: immutable.Some(map[string]any{ + "user": map[string]any{ + "name": "Bob", + }, + }), + Request: `mutation($user: UsersMutationInputArg!) { + create_Users(input: $user) { + name + } + }`, + Results: map[string]any{ + "create_Users": []map[string]any{ + { + "name": "Bob", + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestMutationCreateWithDefaultVariable(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple create mutation with default variable input.", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.Request{ + Request: `mutation($user: UsersMutationInputArg = {name: "Bob"}) { + create_Users(input: $user) { + name + } + }`, + Results: map[string]any{ + "create_Users": []map[string]any{ + { + "name": "Bob", + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/query/one_to_one/with_clashing_id_field_test.go b/tests/integration/query/one_to_one/with_clashing_id_field_test.go index 9097b80b4b..ed106d35c1 100644 --- a/tests/integration/query/one_to_one/with_clashing_id_field_test.go +++ b/tests/integration/query/one_to_one/with_clashing_id_field_test.go @@ -14,12 +14,20 @@ import ( "testing" testUtils "github.com/sourcenetwork/defradb/tests/integration" + + "github.com/sourcenetwork/immutable" ) // This documents unwanted behaviour, see https://github.com/sourcenetwork/defradb/issues/1520 func TestQueryOneToOneWithClashingIdFieldOnSecondary(t *testing.T) { test := testUtils.TestCase{ Description: "One-to-one relation secondary direction, id field with name clash on secondary side", + SupportedMutationTypes: immutable.Some([]testUtils.MutationType{ + // GQL will parse the input type as ID and + // will return an unexpected type error + testUtils.CollectionSaveMutationType, + testUtils.CollectionNamedMutationType, + }), Actions: []any{ testUtils.SchemaUpdate{ Schema: ` diff --git a/tests/integration/query/simple/with_operation_name_test.go b/tests/integration/query/simple/with_operation_name_test.go new file mode 100644 index 0000000000..5986cede0c --- /dev/null +++ b/tests/integration/query/simple/with_operation_name_test.go @@ -0,0 +1,122 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package simple + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" + + "github.com/sourcenetwork/immutable" +) + +func TestQuerySimpleMultipleOperationsWithOperationName(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query multiple operations with operation name", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "Name": "Alice", + "Age": 40 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Bob", + "Age": 21 + }`, + }, + testUtils.Request{ + OperationName: immutable.Some("UsersByName"), + Request: `query UsersByName { + Users { + Name + } + } + query UsersByAge { + Users { + Age + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Name": "Bob", + }, + { + "Name": "Alice", + }, + }, + }, + }, + testUtils.Request{ + OperationName: immutable.Some("UsersByAge"), + Request: `query UsersByName { + Users { + Name + } + } + query UsersByAge { + Users { + Age + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Age": int64(21), + }, + { + "Age": int64(40), + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQuerySimpleMultipleOperationsWithNoOperationName_ReturnsError(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query multiple operations with no operation name", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "Name": "Alice", + "Age": 40 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Bob", + "Age": 21 + }`, + }, + testUtils.Request{ + Request: `query UsersByName { + Users { + Name + } + } + query UsersByAge { + Users { + Age + } + }`, + ExpectedError: "Must provide operation name if query contains multiple operations.", + }, + }, + } + + executeTestCase(t, test) +} diff --git a/tests/integration/query/simple/with_variables_test.go b/tests/integration/query/simple/with_variables_test.go new file mode 100644 index 0000000000..5192c00ce4 --- /dev/null +++ b/tests/integration/query/simple/with_variables_test.go @@ -0,0 +1,170 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package simple + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" + + "github.com/sourcenetwork/immutable" +) + +func TestQuerySimpleWithNonNullVariable(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with non null variable", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "Name": "Alice", + "Age": 40 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Bob", + "Age": 21 + }`, + }, + testUtils.Request{ + Variables: immutable.Some(map[string]any{ + "age": 50, + "ord": "ASC", + }), + Request: `query($age: Int!, $ord: Ordering!) { + Users(filter: {Age: {_lt: $age}}, order: {Age: $ord}) { + Name + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Name": "Bob", + }, + { + "Name": "Alice", + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQuerySimpleWithVariableDefaultValue(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with variable default value", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "Name": "Alice", + "Age": 40 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Bob", + "Age": 21 + }`, + }, + testUtils.Request{ + Request: `query($age: Int = 50, $ord: Ordering = ASC) { + Users(filter: {Age: {_lt: $age}}, order: {Age: $ord}) { + Name + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Name": "Bob", + }, + { + "Name": "Alice", + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQuerySimpleWithNonNullVariable_ReturnsErrorWhenNull(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with non null variable returns error when null", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "Name": "Alice", + "Age": 40 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Bob", + "Age": 21 + }`, + }, + testUtils.Request{ + Request: `query($age: Int!) { + Users(filter: {Age: {_lt: $age}}) { + Name + } + }`, + ExpectedError: "Variable \"$age\" of required type \"Int!\" was not provided.", + }, + }, + } + + executeTestCase(t, test) +} + +func TestQuerySimpleWithVariableDefaultValueOverride(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with variable default value override", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "Name": "Alice", + "Age": 40 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Bob", + "Age": 21 + }`, + }, + testUtils.Request{ + Variables: immutable.Some(map[string]any{ + "age": int64(30), + }), + Request: `query($age: Int = 50) { + Users(filter: {Age: {_lt: $age}}) { + Name + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Name": "Bob", + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} diff --git a/tests/integration/test_case.go b/tests/integration/test_case.go index 689b8cb815..147ef5899b 100644 --- a/tests/integration/test_case.go +++ b/tests/integration/test_case.go @@ -532,6 +532,12 @@ type Request struct { // Used to identify the transaction for this to run against. Optional. TransactionID immutable.Option[int] + // OperationName sets the operation name option for the request. + OperationName immutable.Option[string] + + // Variables sets the variables option for the request. + Variables immutable.Option[map[string]any] + // The request to execute. Request string diff --git a/tests/integration/utils.go b/tests/integration/utils.go index 4e5cf09a06..b63692365b 100644 --- a/tests/integration/utils.go +++ b/tests/integration/utils.go @@ -1704,7 +1704,14 @@ func executeRequest( identity := getIdentity(s, nodeID, action.Identity) ctx = db.SetContextIdentity(ctx, identity) - result := node.ExecRequest(ctx, action.Request) + var options []client.RequestOption + if action.OperationName.HasValue() { + options = append(options, client.WithOperationName(action.OperationName.Value())) + } + if action.Variables.HasValue() { + options = append(options, client.WithVariables(action.Variables.Value())) + } + result := node.ExecRequest(ctx, action.Request, options...) anyOfByFieldKey := map[docFieldKey][]any{} expectedErrorRaised = assertRequestResults( From f5567f52f9cd711cb8731a2e8059a9aa9f6d29f6 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Mon, 16 Sep 2024 09:03:26 -0700 Subject: [PATCH 14/71] refactor: Index field directive (#2994) ## Relevant issue(s) Resolves #2926 ## Description This PR fixes an issue where the index directive was defined twice. ~The `@index` field level directive has been renamed to `@indexField`.~ The `fields` arg has been renamed to `includes` and is now a list of objects of type: ``` type IndexFieldInput { name: String direction: Ordering } ``` The `direction` argument now sets the default direction of all fields in the `includes` list. When the index is used on a field definition and the field is not in the `includes` list it will be implicitly added. ## Tasks - [x] I made sure the code is well commented, particularly hard-to-understand areas. - [x] I made sure the repository-held documentation is changed accordingly. - [x] I made sure the pull request title adheres to the conventional commit style (the subset used in the project can be found in [tools/configs/chglog/config.yml](tools/configs/chglog/config.yml)). - [x] I made sure to discuss its limitations such as threats to validity, vulnerability to mistake and misuse, robustness to invalidation of assumptions, resource requirements, ... ## How has this been tested? Covered by existing tests. Specify the platform(s) on which this was tested: - MacOS --- internal/request/graphql/schema/collection.go | 172 ++++++++------ .../graphql/schema/index_parse_test.go | 132 +++++++---- internal/request/graphql/schema/manager.go | 12 +- .../request/graphql/schema/types/types.go | 60 +++-- .../index/create_composite_test.go | 222 ++++++++++++++++++ tests/integration/index/create_get_test.go | 2 +- .../index/create_unique_composite_test.go | 2 +- ...y_with_composite_index_field_order_test.go | 20 +- ...y_with_composite_index_only_filter_test.go | 38 +-- ...y_with_composite_inxed_on_relation_test.go | 2 +- .../index/query_with_relation_filter_test.go | 2 +- ...with_unique_composite_index_filter_test.go | 48 ++-- .../index/update_unique_composite_test.go | 2 +- 13 files changed, 502 insertions(+), 212 deletions(-) diff --git a/internal/request/graphql/schema/collection.go b/internal/request/graphql/schema/collection.go index 5f8d121b62..76835fd7c2 100644 --- a/internal/request/graphql/schema/collection.go +++ b/internal/request/graphql/schema/collection.go @@ -133,7 +133,7 @@ func collectionFromAstDefinition( for _, directive := range field.Directives { if directive.Name.Value == types.IndexDirectiveLabel { - index, err := fieldIndexFromAST(field, directive) + index, err := indexFromAST(directive, field) if err != nil { return client.CollectionDefinition{}, err } @@ -164,7 +164,7 @@ func collectionFromAstDefinition( for _, directive := range def.Directives { if directive.Name.Value == types.IndexDirectiveLabel { - index, err := indexFromAST(directive) + index, err := indexFromAST(directive, nil) if err != nil { return client.CollectionDefinition{}, err } @@ -239,14 +239,13 @@ func IsValidIndexName(name string) bool { return true } -func fieldIndexFromAST(field *ast.FieldDefinition, directive *ast.Directive) (client.IndexDescription, error) { - desc := client.IndexDescription{ - Fields: []client.IndexedFieldDescription{ - { - Name: field.Name.Value, - }, - }, - } +func indexFromAST(directive *ast.Directive, fieldDef *ast.FieldDefinition) (client.IndexDescription, error) { + var name string + var unique bool + + var direction *ast.EnumValue + var includes *ast.ListValue + for _, arg := range directive.Arguments { switch arg.Name.Value { case types.IndexDirectivePropName: @@ -254,95 +253,120 @@ func fieldIndexFromAST(field *ast.FieldDefinition, directive *ast.Directive) (cl if !ok { return client.IndexDescription{}, ErrIndexWithInvalidArg } - desc.Name = nameVal.Value - if !IsValidIndexName(desc.Name) { - return client.IndexDescription{}, NewErrIndexWithInvalidName(desc.Name) + name = nameVal.Value + if !IsValidIndexName(name) { + return client.IndexDescription{}, NewErrIndexWithInvalidName(name) } - case types.IndexDirectivePropUnique: - boolVal, ok := arg.Value.(*ast.BooleanValue) + + case types.IndexDirectivePropIncludes: + includesVal, ok := arg.Value.(*ast.ListValue) if !ok { return client.IndexDescription{}, ErrIndexWithInvalidArg } - desc.Unique = boolVal.Value + includes = includesVal + case types.IndexDirectivePropDirection: - dirVal, ok := arg.Value.(*ast.EnumValue) + directionVal, ok := arg.Value.(*ast.EnumValue) if !ok { return client.IndexDescription{}, ErrIndexWithInvalidArg } - if dirVal.Value == types.FieldOrderDESC { - desc.Fields[0].Descending = true + direction = directionVal + + case types.IndexDirectivePropUnique: + uniqueVal, ok := arg.Value.(*ast.BooleanValue) + if !ok { + return client.IndexDescription{}, ErrIndexWithInvalidArg } + unique = uniqueVal.Value + default: return client.IndexDescription{}, ErrIndexWithUnknownArg } } - return desc, nil -} -func indexFromAST(directive *ast.Directive) (client.IndexDescription, error) { - desc := client.IndexDescription{} - var directions *ast.ListValue - for _, arg := range directive.Arguments { - switch arg.Name.Value { - case types.IndexDirectivePropName: - nameVal, ok := arg.Value.(*ast.StringValue) - if !ok { - return client.IndexDescription{}, ErrIndexWithInvalidArg - } - desc.Name = nameVal.Value - if !IsValidIndexName(desc.Name) { - return client.IndexDescription{}, ErrIndexWithInvalidArg - } - case types.IndexDirectivePropFields: - fieldsVal, ok := arg.Value.(*ast.ListValue) - if !ok { - return client.IndexDescription{}, ErrIndexWithInvalidArg - } - for _, field := range fieldsVal.Values { - fieldVal, ok := field.(*ast.StringValue) - if !ok { - return client.IndexDescription{}, ErrIndexWithInvalidArg - } - desc.Fields = append(desc.Fields, client.IndexedFieldDescription{ - Name: fieldVal.Value, - }) - } - case types.IndexDirectivePropDirections: - var ok bool - directions, ok = arg.Value.(*ast.ListValue) - if !ok { - return client.IndexDescription{}, ErrIndexWithInvalidArg + var containsField bool + var fields []client.IndexedFieldDescription + + if includes != nil { + for _, include := range includes.Values { + field, err := indexFieldFromAST(include, direction) + if err != nil { + return client.IndexDescription{}, err } - case types.IndexDirectivePropUnique: - boolVal, ok := arg.Value.(*ast.BooleanValue) - if !ok { - return client.IndexDescription{}, ErrIndexWithInvalidArg + if fieldDef != nil && fieldDef.Name.Value == field.Name { + containsField = true } - desc.Unique = boolVal.Value - default: - return client.IndexDescription{}, ErrIndexWithUnknownArg + fields = append(fields, field) } } - if len(desc.Fields) == 0 { + + // if the directive is applied to a field and + // the field is not in the includes list + // implicitly add it as the first entry + if !containsField && fieldDef != nil { + field := client.IndexedFieldDescription{ + Name: fieldDef.Name.Value, + } + if direction != nil { + field.Descending = direction.Value == types.FieldOrderDESC + } + fields = append([]client.IndexedFieldDescription{field}, fields...) + } + + if len(fields) == 0 { return client.IndexDescription{}, ErrIndexMissingFields } - if directions != nil { - if len(directions.Values) != len(desc.Fields) { - return client.IndexDescription{}, ErrIndexWithInvalidArg - } - for i := range desc.Fields { - dirVal, ok := directions.Values[i].(*ast.EnumValue) + + return client.IndexDescription{ + Name: name, + Fields: fields, + Unique: unique, + }, nil +} + +func indexFieldFromAST(value ast.Value, defaultDirection *ast.EnumValue) (client.IndexedFieldDescription, error) { + argTypeObject, ok := value.(*ast.ObjectValue) + if !ok { + return client.IndexedFieldDescription{}, ErrIndexWithInvalidArg + } + + var name string + var direction *ast.EnumValue + + for _, field := range argTypeObject.Fields { + switch field.Name.Value { + case types.IndexFieldInputName: + nameVal, ok := field.Value.(*ast.StringValue) if !ok { - return client.IndexDescription{}, ErrIndexWithInvalidArg + return client.IndexedFieldDescription{}, ErrIndexWithInvalidArg } - if dirVal.Value == types.FieldOrderASC { - desc.Fields[i].Descending = false - } else if dirVal.Value == types.FieldOrderDESC { - desc.Fields[i].Descending = true + name = nameVal.Value + + case types.IndexFieldInputDirection: + directionVal, ok := field.Value.(*ast.EnumValue) + if !ok { + return client.IndexedFieldDescription{}, ErrIndexWithInvalidArg } + direction = directionVal + + default: + return client.IndexedFieldDescription{}, ErrIndexWithUnknownArg } } - return desc, nil + + var descending bool + // if the direction is explicitly set use that value, otherwise + // if the default direction was set on the index use that value + if direction != nil { + descending = direction.Value == types.FieldOrderDESC + } else if defaultDirection != nil { + descending = defaultDirection.Value == types.FieldOrderDESC + } + + return client.IndexedFieldDescription{ + Name: name, + Descending: descending, + }, nil } func fieldsFromAST( diff --git a/internal/request/graphql/schema/index_parse_test.go b/internal/request/graphql/schema/index_parse_test.go index 8204c2d0ec..0c8413ec85 100644 --- a/internal/request/graphql/schema/index_parse_test.go +++ b/internal/request/graphql/schema/index_parse_test.go @@ -15,6 +15,7 @@ import ( "testing" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" "github.com/sourcenetwork/defradb/client" ) @@ -23,7 +24,7 @@ func TestParseIndexOnStruct(t *testing.T) { cases := []indexTestCase{ { description: "Index with a single field", - sdl: `type user @index(fields: ["name"]) {}`, + sdl: `type user @index(includes: [{name: "name"}]) {}`, targetDescriptions: []client.IndexDescription{ { Name: "", @@ -36,7 +37,7 @@ func TestParseIndexOnStruct(t *testing.T) { }, { description: "Index with a name", - sdl: `type user @index(name: "userIndex", fields: ["name"]) {}`, + sdl: `type user @index(name: "userIndex", includes: [{name: "name"}]) {}`, targetDescriptions: []client.IndexDescription{ { Name: "userIndex", @@ -48,7 +49,7 @@ func TestParseIndexOnStruct(t *testing.T) { }, { description: "Unique index", - sdl: `type user @index(fields: ["name"], unique: true) {}`, + sdl: `type user @index(includes: [{name: "name"}], unique: true) {}`, targetDescriptions: []client.IndexDescription{ { Fields: []client.IndexedFieldDescription{ @@ -60,7 +61,7 @@ func TestParseIndexOnStruct(t *testing.T) { }, { description: "Index explicitly not unique", - sdl: `type user @index(fields: ["name"], unique: false) {}`, + sdl: `type user @index(includes: [{name: "name"}], unique: false) {}`, targetDescriptions: []client.IndexDescription{ { Fields: []client.IndexedFieldDescription{ @@ -72,7 +73,7 @@ func TestParseIndexOnStruct(t *testing.T) { }, { description: "Index with explicit ascending field", - sdl: `type user @index(fields: ["name"], directions: [ASC]) {}`, + sdl: `type user @index(includes: [{name: "name", direction: ASC}]) {}`, targetDescriptions: []client.IndexDescription{ { Fields: []client.IndexedFieldDescription{ @@ -82,7 +83,7 @@ func TestParseIndexOnStruct(t *testing.T) { }, { description: "Index with descending field", - sdl: `type user @index(fields: ["name"], directions: [DESC]) {}`, + sdl: `type user @index(includes: [{name: "name", direction: DESC}]) {}`, targetDescriptions: []client.IndexDescription{ { Fields: []client.IndexedFieldDescription{ @@ -92,7 +93,7 @@ func TestParseIndexOnStruct(t *testing.T) { }, { description: "Index with 2 fields", - sdl: `type user @index(fields: ["name", "age"]) {}`, + sdl: `type user @index(includes: [{name: "name"}, {name: "age"}]) {}`, targetDescriptions: []client.IndexDescription{ { Fields: []client.IndexedFieldDescription{ @@ -104,7 +105,7 @@ func TestParseIndexOnStruct(t *testing.T) { }, { description: "Index with 2 fields and 2 directions", - sdl: `type user @index(fields: ["name", "age"], directions: [ASC, DESC]) {}`, + sdl: `type user @index(includes: [{name: "name", direction: ASC}, {name: "age", direction: DESC}]) {}`, targetDescriptions: []client.IndexDescription{ { Fields: []client.IndexedFieldDescription{ @@ -124,78 +125,53 @@ func TestParseIndexOnStruct(t *testing.T) { func TestParseInvalidIndexOnStruct(t *testing.T) { cases := []invalidIndexTestCase{ { - description: "missing 'fields' argument", + description: "missing 'includes' argument", sdl: `type user @index(name: "userIndex", unique: true) {}`, expectedErr: errIndexMissingFields, }, { description: "unknown argument", - sdl: `type user @index(unknown: "something", fields: ["name"]) {}`, + sdl: `type user @index(unknown: "something", includes: [{name: "name"}]) {}`, expectedErr: errIndexUnknownArgument, }, { description: "invalid index name type", - sdl: `type user @index(name: 1, fields: ["name"]) {}`, + sdl: `type user @index(name: 1, includes: [{name: "name"}]) {}`, expectedErr: errIndexInvalidArgument, }, { description: "index name starts with a number", - sdl: `type user @index(name: "1_user_name", fields: ["name"]) {}`, - expectedErr: errIndexInvalidArgument, + sdl: `type user @index(name: "1_user_name", includes: [{name: "name"}]) {}`, + expectedErr: errIndexInvalidName, }, { description: "index with empty name", - sdl: `type user @index(name: "", fields: ["name"]) {}`, - expectedErr: errIndexInvalidArgument, + sdl: `type user @index(name: "", includes: [{name: "name"}]) {}`, + expectedErr: errIndexInvalidName, }, { description: "index name with spaces", - sdl: `type user @index(name: "user name", fields: ["name"]) {}`, - expectedErr: errIndexInvalidArgument, + sdl: `type user @index(name: "user name", includes: [{name: "name"}]) {}`, + expectedErr: errIndexInvalidName, }, { description: "index name with special symbols", - sdl: `type user @index(name: "user!name", fields: ["name"]) {}`, - expectedErr: errIndexInvalidArgument, + sdl: `type user @index(name: "user!name", includes: [{name: "name"}]) {}`, + expectedErr: errIndexInvalidName, }, { description: "invalid 'unique' value type", - sdl: `type user @index(fields: ["name"], unique: "true") {}`, - expectedErr: errIndexInvalidArgument, - }, - { - description: "invalid 'fields' value type (not a list)", - sdl: `type user @index(fields: "name") {}`, - expectedErr: errIndexInvalidArgument, - }, - { - description: "invalid 'fields' value type (not a string list)", - sdl: `type user @index(fields: [1]) {}`, - expectedErr: errIndexInvalidArgument, - }, - { - description: "invalid 'directions' value type (not a list)", - sdl: `type user @index(fields: ["name"], directions: "ASC") {}`, - expectedErr: errIndexInvalidArgument, - }, - { - description: "invalid 'directions' value type (not a string list)", - sdl: `type user @index(fields: ["name"], directions: [1]) {}`, + sdl: `type user @index(includes: [{name: "name"}], unique: "true") {}`, expectedErr: errIndexInvalidArgument, }, { - description: "invalid 'directions' value type (invalid element value)", - sdl: `type user @index(fields: ["name"], directions: ["direction"]) {}`, + description: "invalid 'includes' value type (not a list)", + sdl: `type user @index(includes: "name") {}`, expectedErr: errIndexInvalidArgument, }, { - description: "fewer directions than fields", - sdl: `type user @index(fields: ["name", "age"], directions: [ASC]) {}`, - expectedErr: errIndexInvalidArgument, - }, - { - description: "more directions than fields", - sdl: `type user @index(fields: ["name"], directions: [ASC, DESC]) {}`, + description: "invalid 'includes' value type (not an object list)", + sdl: `type user @index(includes: [1]) {}`, expectedErr: errIndexInvalidArgument, }, } @@ -293,6 +269,57 @@ func TestParseIndexOnField(t *testing.T) { }, }, }, + { + description: "composite field index with implicit include and implicit ordering", + sdl: `type user { + name: String @index(direction: DESC, includes: [{name: "age"}]) + age: Int + }`, + targetDescriptions: []client.IndexDescription{ + { + Name: "", + Fields: []client.IndexedFieldDescription{ + {Name: "name", Descending: true}, + {Name: "age", Descending: true}, + }, + Unique: false, + }, + }, + }, + { + description: "composite field index with implicit include and explicit ordering", + sdl: `type user { + name: String @index(direction: DESC, includes: [{name: "age", direction: ASC}]) + age: Int + }`, + targetDescriptions: []client.IndexDescription{ + { + Name: "", + Fields: []client.IndexedFieldDescription{ + {Name: "name", Descending: true}, + {Name: "age", Descending: false}, + }, + Unique: false, + }, + }, + }, + { + description: "composite field index with explicit includes", + sdl: `type user { + name: String @index(includes: [{name: "age"}, {name: "name"}]) + age: Int + }`, + targetDescriptions: []client.IndexDescription{ + { + Name: "", + Fields: []client.IndexedFieldDescription{ + {Name: "age", Descending: false}, + {Name: "name", Descending: false}, + }, + Unique: false, + }, + }, + }, } for _, test := range cases { @@ -362,9 +389,10 @@ func parseIndexAndTest(t *testing.T, testCase indexTestCase) { ctx := context.Background() cols, err := FromString(ctx, testCase.sdl) - assert.NoError(t, err, testCase.description) - assert.Equal(t, len(cols), 1, testCase.description) - assert.Equal(t, len(cols[0].Description.Indexes), len(testCase.targetDescriptions), testCase.description) + require.NoError(t, err, testCase.description) + + require.Equal(t, len(cols), 1, testCase.description) + require.Equal(t, len(cols[0].Description.Indexes), len(testCase.targetDescriptions), testCase.description) for i, d := range cols[0].Description.Indexes { assert.Equal(t, testCase.targetDescriptions[i], d, testCase.description) diff --git a/internal/request/graphql/schema/manager.go b/internal/request/graphql/schema/manager.go index 5c9a6c73c8..de2aa52ca3 100644 --- a/internal/request/graphql/schema/manager.go +++ b/internal/request/graphql/schema/manager.go @@ -36,6 +36,8 @@ func NewSchemaManager() (*SchemaManager, error) { commitObject := schemaTypes.CommitObject(commitLinkObject) commitsOrderArg := schemaTypes.CommitsOrderArg(orderEnum) + indexFieldInput := schemaTypes.IndexFieldInputObject(orderEnum) + schema, err := gql.NewSchema(gql.SchemaConfig{ Types: defaultTypes( commitObject, @@ -44,11 +46,12 @@ func NewSchemaManager() (*SchemaManager, error) { orderEnum, crdtEnum, explainEnum, + indexFieldInput, ), Query: defaultQueryType(commitObject, commitsOrderArg), Mutation: defaultMutationType(), + Directives: defaultDirectivesType(crdtEnum, explainEnum, orderEnum, indexFieldInput), Subscription: defaultSubscriptionType(), - Directives: defaultDirectivesType(crdtEnum, explainEnum, orderEnum), }) if err != nil { return sm, err @@ -145,13 +148,13 @@ func defaultDirectivesType( crdtEnum *gql.Enum, explainEnum *gql.Enum, orderEnum *gql.Enum, + indexFieldInput *gql.InputObject, ) []*gql.Directive { return []*gql.Directive{ schemaTypes.CRDTFieldDirective(crdtEnum), schemaTypes.ExplainDirective(explainEnum), schemaTypes.PolicyDirective(), - schemaTypes.IndexDirective(orderEnum), - schemaTypes.IndexFieldDirective(orderEnum), + schemaTypes.IndexDirective(orderEnum, indexFieldInput), schemaTypes.PrimaryDirective(), schemaTypes.RelationDirective(), } @@ -178,6 +181,7 @@ func defaultTypes( orderEnum *gql.Enum, crdtEnum *gql.Enum, explainEnum *gql.Enum, + indexFieldInput *gql.InputObject, ) []gql.Type { blobScalarType := schemaTypes.BlobScalarType() jsonScalarType := schemaTypes.JSONScalarType() @@ -222,5 +226,7 @@ func defaultTypes( crdtEnum, explainEnum, + + indexFieldInput, } } diff --git a/internal/request/graphql/schema/types/types.go b/internal/request/graphql/schema/types/types.go index ae027312ba..121bd4a3a4 100644 --- a/internal/request/graphql/schema/types/types.go +++ b/internal/request/graphql/schema/types/types.go @@ -33,12 +33,14 @@ const ( PolicySchemaDirectivePropID = "id" PolicySchemaDirectivePropResource = "resource" - IndexDirectiveLabel = "index" - IndexDirectivePropName = "name" - IndexDirectivePropUnique = "unique" - IndexDirectivePropFields = "fields" - IndexDirectivePropDirection = "direction" - IndexDirectivePropDirections = "directions" + IndexDirectiveLabel = "index" + IndexDirectivePropName = "name" + IndexDirectivePropUnique = "unique" + IndexDirectivePropDirection = "direction" + IndexDirectivePropIncludes = "includes" + + IndexFieldInputName = "name" + IndexFieldInputDirection = "direction" FieldOrderASC = "ASC" FieldOrderDESC = "DESC" @@ -121,44 +123,52 @@ func PolicyDirective() *gql.Directive { }) } -func IndexDirective(orderingEnum *gql.Enum) *gql.Directive { - return gql.NewDirective(gql.DirectiveConfig{ - Name: IndexDirectiveLabel, - Description: "@index is a directive that can be used to create an index on a type.", - Args: gql.FieldConfigArgument{ - IndexDirectivePropName: &gql.ArgumentConfig{ +func IndexFieldInputObject(orderingEnum *gql.Enum) *gql.InputObject { + return gql.NewInputObject(gql.InputObjectConfig{ + Name: "IndexField", + Description: "Used to create an index from a field.", + Fields: gql.InputObjectConfigFieldMap{ + IndexFieldInputName: &gql.InputObjectFieldConfig{ Type: gql.String, }, - IndexDirectivePropFields: &gql.ArgumentConfig{ - Type: gql.NewList(gql.String), - }, - IndexDirectivePropDirections: &gql.ArgumentConfig{ - Type: gql.NewList(orderingEnum), + IndexFieldInputDirection: &gql.InputObjectFieldConfig{ + Type: orderingEnum, }, }, - Locations: []string{ - gql.DirectiveLocationObject, - }, }) } -func IndexFieldDirective(orderingEnum *gql.Enum) *gql.Directive { +func IndexDirective(orderingEnum *gql.Enum, indexFieldInputObject *gql.InputObject) *gql.Directive { return gql.NewDirective(gql.DirectiveConfig{ Name: IndexDirectiveLabel, - Description: "@index is a directive that can be used to create an index on a field.", + Description: "@index is a directive that can be used to create an index on a type or a field.", Args: gql.FieldConfigArgument{ IndexDirectivePropName: &gql.ArgumentConfig{ - Type: gql.String, + Description: "Sets the index name.", + Type: gql.String, }, IndexDirectivePropUnique: &gql.ArgumentConfig{ - Type: gql.Boolean, + Description: "Makes the index unique.", + Type: gql.Boolean, }, IndexDirectivePropDirection: &gql.ArgumentConfig{ + Description: `Sets the default index ordering for all fields. + + If a field in the includes list does not specify a direction + the default ordering from this value will be used instead.`, Type: orderingEnum, }, + IndexDirectivePropIncludes: &gql.ArgumentConfig{ + Description: `Sets the fields the index is created on. + + When used on a field definition and the field is not in the includes list + it will be implicitly added as the first entry.`, + Type: gql.NewList(indexFieldInputObject), + }, }, Locations: []string{ - gql.DirectiveLocationField, + gql.DirectiveLocationObject, + gql.DirectiveLocationFieldDefinition, }, }) } diff --git a/tests/integration/index/create_composite_test.go b/tests/integration/index/create_composite_test.go index e9a83f1d15..6c1fe6c058 100644 --- a/tests/integration/index/create_composite_test.go +++ b/tests/integration/index/create_composite_test.go @@ -72,3 +72,225 @@ func TestCompositeIndexCreate_WhenCreated_CanRetrieve(t *testing.T) { testUtils.ExecuteTestCase(t, test) } + +func TestCompositeIndexCreate_UsingObjectDirective_SetsDefaultDirection(t *testing.T) { + test := testUtils.TestCase{ + Description: "create composite index using object directive sets default direction", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User @index(direction: DESC, includes: [{name: "name"}, {name: "age"}]) { + name: String + age: Int + } + `, + }, + testUtils.GetIndexes{ + CollectionID: 0, + ExpectedIndexes: []client.IndexDescription{ + { + // this should be User_name_DESC + Name: "User_name_ASC", + ID: 1, + Fields: []client.IndexedFieldDescription{ + { + Name: "name", + Descending: true, + }, + { + Name: "age", + Descending: true, + }, + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestCompositeIndexCreate_UsingObjectDirective_OverridesDefaultDirection(t *testing.T) { + test := testUtils.TestCase{ + Description: "create composite object using field directive overrides default direction", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User @index(direction: DESC, includes: [{name: "name"}, {name: "age", direction: ASC}]) { + name: String + age: Int + } + `, + }, + testUtils.GetIndexes{ + CollectionID: 0, + ExpectedIndexes: []client.IndexDescription{ + { + // this should be User_name_DESC + Name: "User_name_ASC", + ID: 1, + Fields: []client.IndexedFieldDescription{ + { + Name: "name", + Descending: true, + }, + { + Name: "age", + Descending: false, + }, + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestCompositeIndexCreate_UsingFieldDirective_ImplicitlyAddsField(t *testing.T) { + test := testUtils.TestCase{ + Description: "create composite index using field directive implicitly adds field", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String @index(includes: [{name: "age"}]) + age: Int + } + `, + }, + testUtils.GetIndexes{ + CollectionID: 0, + ExpectedIndexes: []client.IndexDescription{ + { + Name: "User_name_ASC", + ID: 1, + Fields: []client.IndexedFieldDescription{ + { + Name: "name", + }, + { + Name: "age", + }, + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestCompositeIndexCreate_UsingFieldDirective_SetsDefaultDirection(t *testing.T) { + test := testUtils.TestCase{ + Description: "create composite index using field directive sets default direction", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String @index(direction: DESC, includes: [{name: "age"}]) + age: Int + } + `, + }, + testUtils.GetIndexes{ + CollectionID: 0, + ExpectedIndexes: []client.IndexDescription{ + { + // this should be User_name_DESC + Name: "User_name_ASC", + ID: 1, + Fields: []client.IndexedFieldDescription{ + { + Name: "name", + Descending: true, + }, + { + Name: "age", + Descending: true, + }, + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestCompositeIndexCreate_UsingFieldDirective_OverridesDefaultDirection(t *testing.T) { + test := testUtils.TestCase{ + Description: "create composite index using field directive overrides default direction", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String @index(direction: DESC, includes: [{name: "age", direction: ASC}]) + age: Int + } + `, + }, + testUtils.GetIndexes{ + CollectionID: 0, + ExpectedIndexes: []client.IndexDescription{ + { + // this should be User_name_DESC + Name: "User_name_ASC", + ID: 1, + Fields: []client.IndexedFieldDescription{ + { + Name: "name", + Descending: true, + }, + { + Name: "age", + Descending: false, + }, + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestCompositeIndexCreate_UsingFieldDirective_WithExplicitIncludes_RespectsOrder(t *testing.T) { + test := testUtils.TestCase{ + Description: "create composite index using field directive with explicit includes respects order", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String @index(includes: [{name: "age"}, {name: "name"}]) + age: Int + } + `, + }, + testUtils.GetIndexes{ + CollectionID: 0, + ExpectedIndexes: []client.IndexDescription{ + { + Name: "User_age_ASC", + ID: 1, + Fields: []client.IndexedFieldDescription{ + { + Name: "age", + }, + { + Name: "name", + }, + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/index/create_get_test.go b/tests/integration/index/create_get_test.go index 3ba27cfa9e..76b63980c4 100644 --- a/tests/integration/index/create_get_test.go +++ b/tests/integration/index/create_get_test.go @@ -23,7 +23,7 @@ func TestIndexGet_ShouldReturnListOfExistingIndexes(t *testing.T) { Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(name: "age_index", fields: ["age"]) { + type User @index(name: "age_index", includes: [{name: "age"}]) { name: String @index(name: "name_index") age: Int } diff --git a/tests/integration/index/create_unique_composite_test.go b/tests/integration/index/create_unique_composite_test.go index 9adb6d2e67..44123eaefe 100644 --- a/tests/integration/index/create_unique_composite_test.go +++ b/tests/integration/index/create_unique_composite_test.go @@ -75,7 +75,7 @@ func TestUniqueCompositeIndexCreate_UponAddingDocWithExistingFieldValue_ReturnEr Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(unique: true, fields: ["name", "age"]) { + type User @index(unique: true, includes: [{name: "name"}, {name: "age"}]) { name: String age: Int email: String diff --git a/tests/integration/index/query_with_composite_index_field_order_test.go b/tests/integration/index/query_with_composite_index_field_order_test.go index 7b38163c1c..f53fbc3312 100644 --- a/tests/integration/index/query_with_composite_index_field_order_test.go +++ b/tests/integration/index/query_with_composite_index_field_order_test.go @@ -22,7 +22,7 @@ func TestQueryWithCompositeIndex_WithDefaultOrder_ShouldFetchInDefaultOrder(t *t Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(fields: ["name", "age"]) { + type User @index(includes: [{name: "name"}, {name: "age"}]) { name: String age: Int }`, @@ -100,7 +100,7 @@ func TestQueryWithCompositeIndex_WithDefaultOrderCaseInsensitive_ShouldFetchInDe Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(fields: ["name", "age"]) { + type User @index(includes: [{name: "name"}, {name: "age"}]) { name: String age: Int }`, @@ -178,7 +178,7 @@ func TestQueryWithCompositeIndex_WithRevertedOrderOnFirstField_ShouldFetchInReve Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(fields: ["name", "age"], directions: [DESC, ASC]) { + type User @index(includes: [{name: "name", direction: DESC}, {name: "age", direction: ASC}]) { name: String age: Int }`, @@ -268,7 +268,7 @@ func TestQueryWithCompositeIndex_WithRevertedOrderOnFirstFieldCaseInsensitive_Sh Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(fields: ["name", "age"], directions: [DESC, ASC]) { + type User @index(includes: [{name: "name", direction: DESC}, {name: "age", direction: ASC}]) { name: String age: Int }`, @@ -358,7 +358,7 @@ func TestQueryWithCompositeIndex_WithRevertedOrderOnSecondField_ShouldFetchInRev Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(fields: ["name", "age"], directions: [ASC, DESC]) { + type User @index(includes: [{name: "name", direction: ASC}, {name: "age", direction: DESC}]) { name: String age: Int }`, @@ -438,7 +438,7 @@ func TestQueryWithCompositeIndex_WithRevertedOrderOnSecondFieldCaseInsensitive_S Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(fields: ["name", "age"], directions: [ASC, DESC]) { + type User @index(includes: [{name: "name", direction: ASC}, {name: "age", direction: DESC}]) { name: String age: Int }`, @@ -516,7 +516,7 @@ func TestQueryWithCompositeIndex_IfExactMatchWithRevertedOrderOnFirstField_Shoul Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(fields: ["name", "age"], directions: [DESC, ASC]) { + type User @index(includes: [{name: "name", direction: DESC}, {name: "age", direction: ASC}]) { name: String age: Int }`, @@ -574,7 +574,7 @@ func TestQueryWithCompositeIndex_IfExactMatchWithRevertedOrderOnSecondField_Shou Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(fields: ["name", "age"], directions: [ASC, DESC]) { + type User @index(includes: [{name: "name", direction: ASC}, {name: "age", direction: DESC}]) { name: String age: Int }`, @@ -632,7 +632,7 @@ func TestQueryWithCompositeIndex_WithInFilterOnFirstFieldWithRevertedOrder_Shoul Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(fields: ["name", "age"], directions: [DESC, ASC]) { + type User @index(includes: [{name: "name", direction: DESC}, {name: "age", direction: ASC}]) { name: String age: Int email: String @@ -667,7 +667,7 @@ func TestQueryWithCompositeIndex_WithInFilterOnSecondFieldWithRevertedOrder_Shou Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(fields: ["name", "age"], directions: [ASC, DESC]) { + type User @index(includes: [{name: "name", direction: ASC}, {name: "age", direction: DESC}]) { name: String age: Int email: String diff --git a/tests/integration/index/query_with_composite_index_only_filter_test.go b/tests/integration/index/query_with_composite_index_only_filter_test.go index 94e6a54727..adaef0d481 100644 --- a/tests/integration/index/query_with_composite_index_only_filter_test.go +++ b/tests/integration/index/query_with_composite_index_only_filter_test.go @@ -40,7 +40,7 @@ func TestQueryWithCompositeIndex_WithEqualFilter_ShouldFetch(t *testing.T) { Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(fields: ["name", "age"]) { + type User @index(includes: [{name: "name"}, {name: "age"}]) { name: String age: Int email: String @@ -96,7 +96,7 @@ func TestQueryWithCompositeIndex_WithGreaterThanFilterOnFirstField_ShouldFetch(t Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(fields: ["age", "name"]) { + type User @index(includes: [{name: "age"}, {name: "name"}]) { name: String age: Int email: String @@ -134,7 +134,7 @@ func TestQueryWithCompositeIndex_WithGreaterThanFilterOnSecondField_ShouldFetch( Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(fields: ["name", "age"]) { + type User @index(includes: [{name: "name"}, {name: "age"}]) { name: String age: Int email: String @@ -172,7 +172,7 @@ func TestQueryWithCompositeIndex_WithGreaterOrEqualFilterOnFirstField_ShouldFetc Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(fields: ["age", "name"]) { + type User @index(includes: [{name: "age"}, {name: "name"}]) { name: String age: Int email: String @@ -211,7 +211,7 @@ func TestQueryWithCompositeIndex_WithGreaterOrEqualFilterOnSecondField_ShouldFet Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(fields: ["name", "age"]) { + type User @index(includes: [{name: "name"}, {name: "age"}]) { name: String age: Int email: String @@ -250,7 +250,7 @@ func TestQueryWithCompositeIndex_WithLessThanFilterOnFirstField_ShouldFetch(t *t Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(fields: ["age", "name"]) { + type User @index(includes: [{name: "age"}, {name: "name"}]) { name: String age: Int email: String @@ -288,7 +288,7 @@ func TestQueryWithCompositeIndex_WithLessThanFilterOnSecondField_ShouldFetch(t * Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(fields: ["name", "age"]) { + type User @index(includes: [{name: "name"}, {name: "age"}]) { name: String age: Int email: String @@ -326,7 +326,7 @@ func TestQueryWithCompositeIndex_WithLessOrEqualFilterOnFirstField_ShouldFetch(t Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(fields: ["age", "name"]) { + type User @index(includes: [{name: "age"}, {name: "name"}]) { name: String age: Int email: String @@ -365,7 +365,7 @@ func TestQueryWithCompositeIndex_WithLessOrEqualFilterOnSecondField_ShouldFetch( Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(fields: ["name", "age"]) { + type User @index(includes: [{name: "name"}, {name: "age"}]) { name: String age: Int email: String @@ -404,7 +404,7 @@ func TestQueryWithCompositeIndex_WithNotEqualFilter_ShouldFetch(t *testing.T) { Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(fields: ["name", "age"]) { + type User @index(includes: [{name: "name"}, {name: "age"}]) { name: String age: Int email: String @@ -449,7 +449,7 @@ func TestQueryWithCompositeIndex_WithInFilter_ShouldFetch(t *testing.T) { Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(fields: ["name", "age"]) { + type User @index(includes: [{name: "name"}, {name: "age"}]) { name: String age: Int email: String @@ -488,7 +488,7 @@ func TestQueryWithCompositeIndex_WithNotInFilter_ShouldFetch(t *testing.T) { Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(fields: ["name", "age"]) { + type User @index(includes: [{name: "name"}, {name: "age"}]) { name: String age: Int email: String @@ -558,7 +558,7 @@ func TestQueryWithCompositeIndex_WithLikeFilter_ShouldFetch(t *testing.T) { Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(fields: ["name", "email"]) { + type User @index(includes: [{name: "name"}, {name: "email"}]) { name: String email: String }`, @@ -655,7 +655,7 @@ func TestQueryWithCompositeIndex_WithNotLikeFilter_ShouldFetch(t *testing.T) { Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(fields: ["name", "email"]) { + type User @index(includes: [{name: "name"}, {name: "email"}]) { name: String email: String }`, @@ -690,7 +690,7 @@ func TestQueryWithCompositeIndex_IfFirstFieldIsNotInFilter_ShouldNotUseIndex(t * Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(fields: ["name", "age"]) { + type User @index(includes: [{name: "name"}, {name: "age"}]) { name: String age: Int email: String @@ -719,7 +719,7 @@ func TestQueryWithCompositeIndex_WithEqualFilterOnNilValueOnFirst_ShouldFetch(t Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(fields: ["name", "age"]) { + type User @index(includes: [{name: "name"}, {name: "age"}]) { name: String age: Int email: String @@ -766,7 +766,7 @@ func TestQueryWithCompositeIndex_WithEqualFilterOnNilValueOnSecond_ShouldFetch(t Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(fields: ["name", "age"]) { + type User @index(includes: [{name: "name"}, {name: "age"}]) { name: String age: Int email: String @@ -823,7 +823,7 @@ func TestQueryWithCompositeIndex_IfMiddleFieldIsNotInFilter_ShouldIgnoreValue(t Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(fields: ["name", "email", "age"]) { + type User @index(includes: [{name: "name"}, {name: "email"}, {name: "age"}]) { name: String email: String age: Int @@ -898,7 +898,7 @@ func TestQueryWithCompositeIndex_IfConsecutiveEqOps_ShouldUseAllToOptimizeQuery( Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(fields: ["name", "age", "numChildren"]) { + type User @index(includes: [{name: "name"}, {name: "age"}, {name: "numChildren"}]) { name: String age: Int numChildren: Int diff --git a/tests/integration/index/query_with_composite_inxed_on_relation_test.go b/tests/integration/index/query_with_composite_inxed_on_relation_test.go index 40d736ba3c..aab19f2d07 100644 --- a/tests/integration/index/query_with_composite_inxed_on_relation_test.go +++ b/tests/integration/index/query_with_composite_inxed_on_relation_test.go @@ -31,7 +31,7 @@ func TestQueryWithCompositeIndexOnManyToOne_WithMultipleIndexedChildNodes_Should devices: [Device] } - type Device @index(fields: ["owner_id", "manufacturer_id"]) { + type Device @index(includes: [{name: "owner_id"}, {name: "manufacturer_id"}]) { model: String owner: User manufacturer: Manufacturer diff --git a/tests/integration/index/query_with_relation_filter_test.go b/tests/integration/index/query_with_relation_filter_test.go index 9428626810..e31685a0cf 100644 --- a/tests/integration/index/query_with_relation_filter_test.go +++ b/tests/integration/index/query_with_relation_filter_test.go @@ -182,7 +182,7 @@ func TestQueryWithIndexOnOneToOnesSecondaryRelation_IfFilterOnIndexedRelation_Sh type Address { user: User @primary - city: String @index + city: String @index }`, }, testUtils.CreatePredefinedDocs{ diff --git a/tests/integration/index/query_with_unique_composite_index_filter_test.go b/tests/integration/index/query_with_unique_composite_index_filter_test.go index 0df9b349ca..190bfca53a 100644 --- a/tests/integration/index/query_with_unique_composite_index_filter_test.go +++ b/tests/integration/index/query_with_unique_composite_index_filter_test.go @@ -40,7 +40,7 @@ func TestQueryWithUniqueCompositeIndex_WithEqualFilter_ShouldFetch(t *testing.T) Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(unique: true, fields: ["name", "age"]) { + type User @index(unique: true, includes: [{name: "name"}, {name: "age"}]) { name: String age: Int email: String @@ -114,7 +114,7 @@ func TestQueryWithUniqueCompositeIndex_WithGreaterThanFilterOnFirstField_ShouldF Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(unique: true, fields: ["age", "name"]) { + type User @index(unique: true, includes: [{name: "age"}, {name: "name"}]) { name: String age: Int email: String @@ -152,7 +152,7 @@ func TestQueryWithUniqueCompositeIndex_WithGreaterThanFilterOnSecondField_Should Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(unique: true, fields: ["name", "age"]) { + type User @index(unique: true, includes: [{name: "name"}, {name: "age"}]) { name: String age: Int email: String @@ -190,7 +190,7 @@ func TestQueryWithUniqueCompositeIndex_WithGreaterOrEqualFilterOnFirstField_Shou Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(unique: true, fields: ["age", "name"]) { + type User @index(unique: true, includes: [{name: "age"}, {name: "name"}]) { name: String age: Int email: String @@ -229,7 +229,7 @@ func TestQueryWithUniqueCompositeIndex_WithGreaterOrEqualFilterOnSecondField_Sho Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(unique: true, fields: ["name", "age"]) { + type User @index(unique: true, includes: [{name: "name"}, {name: "age"}]) { name: String age: Int email: String @@ -268,7 +268,7 @@ func TestQueryWithUniqueCompositeIndex_WithLessThanFilterOnFirstField_ShouldFetc Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(unique: true, fields: ["age", "name"]) { + type User @index(unique: true, includes: [{name: "age"}, {name: "name"}]) { name: String age: Int email: String @@ -306,7 +306,7 @@ func TestQueryWithUniqueCompositeIndex_WithLessThanFilterOnSecondField_ShouldFet Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(unique: true, fields: ["name", "age"]) { + type User @index(unique: true, includes: [{name: "name"}, {name: "age"}]) { name: String age: Int email: String @@ -344,7 +344,7 @@ func TestQueryWithUniqueCompositeIndex_WithLessOrEqualFilterOnFirstField_ShouldF Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(unique: true, fields: ["age", "name"]) { + type User @index(unique: true, includes: [{name: "age"}, {name: "name"}]) { name: String age: Int email: String @@ -383,7 +383,7 @@ func TestQueryWithUniqueCompositeIndex_WithLessOrEqualFilterOnSecondField_Should Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(unique: true, fields: ["name", "age"]) { + type User @index(unique: true, includes: [{name: "name"}, {name: "age"}]) { name: String age: Int email: String @@ -422,7 +422,7 @@ func TestQueryWithUniqueCompositeIndex_WithNotEqualFilter_ShouldFetch(t *testing Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(unique: true, fields: ["name", "age"]) { + type User @index(unique: true, includes: [{name: "name"}, {name: "age"}]) { name: String age: Int email: String @@ -468,7 +468,7 @@ func TestQueryWithUniqueCompositeIndex_WithInForFirstAndEqForRest_ShouldFetchEff Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(unique: true, fields: ["name", "age"]) { + type User @index(unique: true, includes: [{name: "name"}, {name: "age"}]) { name: String age: Int email: String @@ -544,7 +544,7 @@ func TestQueryWithUniqueCompositeIndex_WithInFilter_ShouldFetch(t *testing.T) { Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(unique: true, fields: ["name", "age"]) { + type User @index(unique: true, includes: [{name: "name"}, {name: "age"}]) { name: String age: Int email: String @@ -599,7 +599,7 @@ func TestQueryWithUniqueCompositeIndex_WithNotInFilter_ShouldFetch(t *testing.T) Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(unique: true, fields: ["name", "age"]) { + type User @index(unique: true, includes: [{name: "name"}, {name: "age"}]) { name: String age: Int email: String @@ -669,7 +669,7 @@ func TestQueryWithUniqueCompositeIndex_WithLikeFilter_ShouldFetch(t *testing.T) Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(unique: true, fields: ["name", "email"]) { + type User @index(unique: true, includes: [{name: "name"}, {name: "email"}]) { name: String email: String }`, @@ -766,7 +766,7 @@ func TestQueryWithUniqueCompositeIndex_WithNotLikeFilter_ShouldFetch(t *testing. Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(unique: true, fields: ["name", "email"]) { + type User @index(unique: true, includes: [{name: "name"}, {name: "email"}]) { name: String email: String }`, @@ -806,7 +806,7 @@ func TestQueryWithUniqueCompositeIndex_WithNotCaseInsensitiveLikeFilter_ShouldFe Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(unique: true, fields: ["name", "email"]) { + type User @index(unique: true, includes: [{name: "name"}, {name: "email"}]) { name: String email: String }`, @@ -842,7 +842,7 @@ func TestQueryWithUniqueCompositeIndex_IfFirstFieldIsNotInFilter_ShouldNotUseInd Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(unique: true, fields: ["name", "age"]) { + type User @index(unique: true, includes: [{name: "name"}, {name: "age"}]) { name: String age: Int email: String @@ -871,7 +871,7 @@ func TestQueryWithUniqueCompositeIndex_WithEqualFilterOnNilValueOnFirst_ShouldFe Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(unique: true, fields: ["name", "age"]) { + type User @index(unique: true, includes: [{name: "name"}, {name: "age"}]) { name: String age: Int email: String @@ -925,7 +925,7 @@ func TestQueryWithUniqueCompositeIndex_WithMultipleNilOnFirstFieldAndNilFilter_S Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(unique: true, fields: ["name", "age"]) { + type User @index(unique: true, includes: [{name: "name"}, {name: "age"}]) { name: String age: Int email: String @@ -981,7 +981,7 @@ func TestQueryWithUniqueCompositeIndex_WithEqualFilterOnNilValueOnSecond_ShouldF Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(unique: true, fields: ["name", "age"]) { + type User @index(unique: true, includes: [{name: "name"}, {name: "age"}]) { name: String age: Int about: String @@ -1048,7 +1048,7 @@ func TestQueryWithUniqueCompositeIndex_WithMultipleNilOnSecondFieldsAndNilFilter Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(unique: true, fields: ["name", "age"]) { + type User @index(unique: true, includes: [{name: "name"}, {name: "age"}]) { name: String age: Int email: String @@ -1113,7 +1113,7 @@ func TestQueryWithUniqueCompositeIndex_WithMultipleNilOnBothFieldsAndNilFilter_S Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(unique: true, fields: ["name", "age"]) { + type User @index(unique: true, includes: [{name: "name"}, {name: "age"}]) { name: String age: Int about: String @@ -1214,7 +1214,7 @@ func TestQueryWithUniqueCompositeIndex_AfterUpdateOnNilFields_ShouldFetch(t *tes Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(unique: true, fields: ["name", "age"]) { + type User @index(unique: true, includes: [{name: "name"}, {name: "age"}]) { name: String age: Int about: String @@ -1355,7 +1355,7 @@ func TestQueryWithUniqueCompositeIndex_IfMiddleFieldIsNotInFilter_ShouldIgnoreVa Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(unique: true, fields: ["name", "email", "age"]) { + type User @index(unique: true, includes: [{name: "name"}, {name: "email"}, {name: "age"}]) { name: String email: String age: Int diff --git a/tests/integration/index/update_unique_composite_test.go b/tests/integration/index/update_unique_composite_test.go index 4621e79283..17a831369c 100644 --- a/tests/integration/index/update_unique_composite_test.go +++ b/tests/integration/index/update_unique_composite_test.go @@ -22,7 +22,7 @@ func TestUniqueCompositeIndexUpdate_UponUpdatingDocWithExistingFieldValue_Should Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(unique: true, fields: ["name", "age"]) { + type User @index(unique: true, includes: [{name: "name"}, {name: "age"}]) { name: String age: Int email: String From ea3a74f2f28f969eba760bd93d7a8b32e0d3a3f1 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Mon, 16 Sep 2024 09:58:17 -0700 Subject: [PATCH 15/71] feat: Default scalar field values (#2997) ## Relevant issue(s) Resolves #2952 ## Description This PR adds support for default field values using a new `@default` directive. ## Tasks - [x] I made sure the code is well commented, particularly hard-to-understand areas. - [x] I made sure the repository-held documentation is changed accordingly. - [x] I made sure the pull request title adheres to the conventional commit style (the subset used in the project can be found in [tools/configs/chglog/config.yml](tools/configs/chglog/config.yml)). - [x] I made sure to discuss its limitations such as threats to validity, vulnerability to mistake and misuse, robustness to invalidation of assumptions, resource requirements, ... ## How has this been tested? Added integration tests Specify the platform(s) on which this was tested: - MacOS --- client/collection_field_description.go | 7 + client/definitions.go | 5 + client/document.go | 48 +++- docs/website/references/http/openapi.json | 2 + http/client_collection.go | 5 +- internal/db/fetcher/encoded_doc.go | 5 +- internal/request/graphql/schema/collection.go | 81 +++++- internal/request/graphql/schema/errors.go | 19 ++ internal/request/graphql/schema/generate.go | 3 +- internal/request/graphql/schema/manager.go | 1 + .../request/graphql/schema/types/types.go | 44 +++ tests/clients/cli/wrapper_collection.go | 5 +- .../updates/replace/fields_test.go | 24 ++ .../with_default_fields_test.go | 158 +++++++++++ .../create/with_default_values_test.go | 264 ++++++++++++++++++ .../view/simple/with_default_value_test.go | 70 +++++ 16 files changed, 715 insertions(+), 26 deletions(-) create mode 100644 tests/integration/collection_description/with_default_fields_test.go create mode 100644 tests/integration/mutation/create/with_default_values_test.go create mode 100644 tests/integration/view/simple/with_default_value_test.go diff --git a/client/collection_field_description.go b/client/collection_field_description.go index 98b012d641..bc066d52fc 100644 --- a/client/collection_field_description.go +++ b/client/collection_field_description.go @@ -38,6 +38,11 @@ type CollectionFieldDescription struct { // // Otherwise will be [None]. RelationName immutable.Option[string] + + // DefaultValue contains the default value for this field. + // + // This value has no effect on views. + DefaultValue any } func (f FieldID) String() string { @@ -50,6 +55,7 @@ type collectionFieldDescription struct { Name string ID FieldID RelationName immutable.Option[string] + DefaultValue any // Properties below this line are unmarshalled using custom logic in [UnmarshalJSON] Kind json.RawMessage @@ -64,6 +70,7 @@ func (f *CollectionFieldDescription) UnmarshalJSON(bytes []byte) error { f.Name = descMap.Name f.ID = descMap.ID + f.DefaultValue = descMap.DefaultValue f.RelationName = descMap.RelationName kind, err := parseFieldKind(descMap.Kind) if err != nil { diff --git a/client/definitions.go b/client/definitions.go index af571d5983..269a703ac9 100644 --- a/client/definitions.go +++ b/client/definitions.go @@ -145,6 +145,9 @@ type FieldDefinition struct { // If true, this is the primary half of a relation, otherwise is false. IsPrimaryRelation bool + + // DefaultValue contains the default value for this field. + DefaultValue any } // NewFieldDefinition returns a new [FieldDefinition], combining the given local and global elements @@ -164,6 +167,7 @@ func NewFieldDefinition(local CollectionFieldDescription, global SchemaFieldDesc RelationName: local.RelationName.Value(), Typ: global.Typ, IsPrimaryRelation: kind.IsObject() && !kind.IsArray(), + DefaultValue: local.DefaultValue, } } @@ -174,6 +178,7 @@ func NewLocalFieldDefinition(local CollectionFieldDescription) FieldDefinition { ID: local.ID, Kind: local.Kind.Value(), RelationName: local.RelationName.Value(), + DefaultValue: local.DefaultValue, } } diff --git a/client/document.go b/client/document.go index 82f0a8bb36..fa4c842343 100644 --- a/client/document.go +++ b/client/document.go @@ -84,25 +84,34 @@ type Document struct { collectionDefinition CollectionDefinition } -func newEmptyDoc(collectionDefinition CollectionDefinition) *Document { - return &Document{ +func newEmptyDoc(collectionDefinition CollectionDefinition) (*Document, error) { + doc := &Document{ fields: make(map[string]Field), values: make(map[Field]*FieldValue), collectionDefinition: collectionDefinition, } + if err := doc.setDefaultValues(); err != nil { + return nil, err + } + return doc, nil } // NewDocWithID creates a new Document with a specified key. -func NewDocWithID(docID DocID, collectionDefinition CollectionDefinition) *Document { - doc := newEmptyDoc(collectionDefinition) +func NewDocWithID(docID DocID, collectionDefinition CollectionDefinition) (*Document, error) { + doc, err := newEmptyDoc(collectionDefinition) + if err != nil { + return nil, err + } doc.id = docID - return doc + return doc, nil } // NewDocFromMap creates a new Document from a data map. func NewDocFromMap(data map[string]any, collectionDefinition CollectionDefinition) (*Document, error) { - var err error - doc := newEmptyDoc(collectionDefinition) + doc, err := newEmptyDoc(collectionDefinition) + if err != nil { + return nil, err + } // check if document contains special _docID field k, hasDocID := data[request.DocIDFieldName] @@ -142,8 +151,11 @@ func IsJSONArray(obj []byte) bool { // NewFromJSON creates a new instance of a Document from a raw JSON object byte array. func NewDocFromJSON(obj []byte, collectionDefinition CollectionDefinition) (*Document, error) { - doc := newEmptyDoc(collectionDefinition) - err := doc.SetWithJSON(obj) + doc, err := newEmptyDoc(collectionDefinition) + if err != nil { + return nil, err + } + err = doc.SetWithJSON(obj) if err != nil { return nil, err } @@ -172,7 +184,10 @@ func NewDocsFromJSON(obj []byte, collectionDefinition CollectionDefinition) ([]* if err != nil { return nil, err } - doc := newEmptyDoc(collectionDefinition) + doc, err := newEmptyDoc(collectionDefinition) + if err != nil { + return nil, err + } err = doc.setWithFastJSONObject(o) if err != nil { return nil, err @@ -653,6 +668,19 @@ func (doc *Document) setAndParseObjectType(value map[string]any) error { return nil } +func (doc *Document) setDefaultValues() error { + for _, field := range doc.collectionDefinition.GetFields() { + if field.DefaultValue == nil { + continue // no default value to set + } + err := doc.Set(field.Name, field.DefaultValue) + if err != nil { + return err + } + } + return nil +} + // Fields gets the document fields as a map. func (doc *Document) Fields() map[string]Field { doc.mu.RLock() diff --git a/docs/website/references/http/openapi.json b/docs/website/references/http/openapi.json index 0b511d7e5d..4462c3d9f3 100644 --- a/docs/website/references/http/openapi.json +++ b/docs/website/references/http/openapi.json @@ -83,6 +83,7 @@ "Fields": { "items": { "properties": { + "DefaultValue": {}, "ID": { "maximum": 4294967295, "minimum": 0, @@ -160,6 +161,7 @@ "Fields": { "items": { "properties": { + "DefaultValue": {}, "ID": { "maximum": 4294967295, "minimum": 0, diff --git a/http/client_collection.go b/http/client_collection.go index c13e4c68e9..54167de222 100644 --- a/http/client_collection.go +++ b/http/client_collection.go @@ -307,7 +307,10 @@ func (c *Collection) Get( if err != nil { return nil, err } - doc := client.NewDocWithID(docID, c.def) + doc, err := client.NewDocWithID(docID, c.def) + if err != nil { + return nil, err + } err = doc.SetWithJSON(data) if err != nil { return nil, err diff --git a/internal/db/fetcher/encoded_doc.go b/internal/db/fetcher/encoded_doc.go index 9bb3c6261c..1401626e29 100644 --- a/internal/db/fetcher/encoded_doc.go +++ b/internal/db/fetcher/encoded_doc.go @@ -112,7 +112,10 @@ func Decode(encdoc EncodedDocument, collectionDefinition client.CollectionDefini return nil, err } - doc := client.NewDocWithID(docID, collectionDefinition) + doc, err := client.NewDocWithID(docID, collectionDefinition) + if err != nil { + return nil, err + } properties, err := encdoc.Properties(false) if err != nil { return nil, err diff --git a/internal/request/graphql/schema/collection.go b/internal/request/graphql/schema/collection.go index 76835fd7c2..58536db3af 100644 --- a/internal/request/graphql/schema/collection.go +++ b/internal/request/graphql/schema/collection.go @@ -16,6 +16,7 @@ import ( "sort" "strings" + gql "github.com/sourcenetwork/graphql-go" "github.com/sourcenetwork/graphql-go/language/ast" gqlp "github.com/sourcenetwork/graphql-go/language/parser" "github.com/sourcenetwork/graphql-go/language/source" @@ -26,6 +27,29 @@ import ( "github.com/sourcenetwork/defradb/internal/request/graphql/schema/types" ) +const ( + typeID string = "ID" + typeBoolean string = "Boolean" + typeInt string = "Int" + typeFloat string = "Float" + typeDateTime string = "DateTime" + typeString string = "String" + typeBlob string = "Blob" + typeJSON string = "JSON" +) + +// this mapping is used to check that the default prop value +// matches the field type +var TypeToDefaultPropName = map[string]string{ + typeString: types.DefaultDirectivePropString, + typeBoolean: types.DefaultDirectivePropBool, + typeInt: types.DefaultDirectivePropInt, + typeFloat: types.DefaultDirectivePropFloat, + typeDateTime: types.DefaultDirectivePropDateTime, + typeJSON: types.DefaultDirectivePropJSON, + typeBlob: types.DefaultDirectivePropBlob, +} + // FromString parses a GQL SDL string into a set of collection descriptions. func FromString(ctx context.Context, schemaString string) ( []client.CollectionDefinition, @@ -369,6 +393,39 @@ func indexFieldFromAST(value ast.Value, defaultDirection *ast.EnumValue) (client }, nil } +func defaultFromAST( + field *ast.FieldDefinition, + directive *ast.Directive, +) (any, error) { + astNamed, ok := field.Type.(*ast.Named) + if !ok { + return nil, NewErrDefaultValueNotAllowed(field.Name.Value, field.Type.String()) + } + propName, ok := TypeToDefaultPropName[astNamed.Name.Value] + if !ok { + return nil, NewErrDefaultValueNotAllowed(field.Name.Value, astNamed.Name.Value) + } + var value any + for _, arg := range directive.Arguments { + if propName != arg.Name.Value { + return nil, NewErrDefaultValueInvalid(field.Name.Value, propName, arg.Name.Value) + } + switch t := arg.Value.(type) { + case *ast.IntValue: + value = gql.Int.ParseLiteral(arg.Value) + case *ast.FloatValue: + value = gql.Float.ParseLiteral(arg.Value) + case *ast.BooleanValue: + value = t.Value + case *ast.StringValue: + value = t.Value + default: + value = arg.Value.GetValue() + } + } + return value, nil +} + func fieldsFromAST( field *ast.FieldDefinition, hostObjectName string, @@ -392,6 +449,16 @@ func fieldsFromAST( } hostMap[field.Name.Value] = cType + var defaultValue any + for _, directive := range field.Directives { + if directive.Name.Value == types.DefaultDirectiveLabel { + defaultValue, err = defaultFromAST(field, directive) + if err != nil { + return nil, nil, err + } + } + } + schemaFieldDescriptions := []client.SchemaFieldDescription{} collectionFieldDescriptions := []client.CollectionFieldDescription{} @@ -467,7 +534,8 @@ func fieldsFromAST( collectionFieldDescriptions = append( collectionFieldDescriptions, client.CollectionFieldDescription{ - Name: field.Name.Value, + Name: field.Name.Value, + DefaultValue: defaultValue, }, ) } @@ -529,17 +597,6 @@ func setCRDTType(field *ast.FieldDefinition, kind client.FieldKind) (client.CTyp } func astTypeToKind(t ast.Type) (client.FieldKind, error) { - const ( - typeID string = "ID" - typeBoolean string = "Boolean" - typeInt string = "Int" - typeFloat string = "Float" - typeDateTime string = "DateTime" - typeString string = "String" - typeBlob string = "Blob" - typeJSON string = "JSON" - ) - switch astTypeVal := t.(type) { case *ast.List: switch innerAstTypeVal := astTypeVal.Type.(type) { diff --git a/internal/request/graphql/schema/errors.go b/internal/request/graphql/schema/errors.go index 304df792e6..a5150e291b 100644 --- a/internal/request/graphql/schema/errors.go +++ b/internal/request/graphql/schema/errors.go @@ -30,6 +30,8 @@ const ( errPolicyUnknownArgument string = "policy with unknown argument" errPolicyInvalidIDProp string = "policy directive with invalid id property" errPolicyInvalidResourceProp string = "policy directive with invalid resource property" + errDefaultValueInvalid string = "default value type must match field type" + errDefaultValueNotAllowed string = "default value is not allowed for this field type" ) var ( @@ -136,3 +138,20 @@ func NewErrRelationNotFound(relationName string) error { errors.NewKV("RelationName", relationName), ) } + +func NewErrDefaultValueInvalid(name string, expected string, actual string) error { + return errors.New( + errDefaultValueInvalid, + errors.NewKV("Name", name), + errors.NewKV("Expected", expected), + errors.NewKV("Actual", actual), + ) +} + +func NewErrDefaultValueNotAllowed(fieldName, fieldType string) error { + return errors.New( + errDefaultValueNotAllowed, + errors.NewKV("Name", fieldName), + errors.NewKV("Type", fieldType), + ) +} diff --git a/internal/request/graphql/schema/generate.go b/internal/request/graphql/schema/generate.go index c198296ffb..8ae36d230f 100644 --- a/internal/request/graphql/schema/generate.go +++ b/internal/request/graphql/schema/generate.go @@ -583,7 +583,8 @@ func (g *Generator) buildMutationInputTypes(collections []client.CollectionDefin } fields[field.Name] = &gql.InputObjectFieldConfig{ - Type: ttype, + Type: ttype, + DefaultValue: field.DefaultValue, } } diff --git a/internal/request/graphql/schema/manager.go b/internal/request/graphql/schema/manager.go index de2aa52ca3..66f1eb54c2 100644 --- a/internal/request/graphql/schema/manager.go +++ b/internal/request/graphql/schema/manager.go @@ -152,6 +152,7 @@ func defaultDirectivesType( ) []*gql.Directive { return []*gql.Directive{ schemaTypes.CRDTFieldDirective(crdtEnum), + schemaTypes.DefaultDirective(), schemaTypes.ExplainDirective(explainEnum), schemaTypes.PolicyDirective(), schemaTypes.IndexDirective(orderEnum, indexFieldInput), diff --git a/internal/request/graphql/schema/types/types.go b/internal/request/graphql/schema/types/types.go index 121bd4a3a4..393f9fe62a 100644 --- a/internal/request/graphql/schema/types/types.go +++ b/internal/request/graphql/schema/types/types.go @@ -42,6 +42,15 @@ const ( IndexFieldInputName = "name" IndexFieldInputDirection = "direction" + DefaultDirectiveLabel = "default" + DefaultDirectivePropString = "string" + DefaultDirectivePropBool = "bool" + DefaultDirectivePropInt = "int" + DefaultDirectivePropFloat = "float" + DefaultDirectivePropDateTime = "dateTime" + DefaultDirectivePropJSON = "json" + DefaultDirectivePropBlob = "blob" + FieldOrderASC = "ASC" FieldOrderDESC = "DESC" ) @@ -86,6 +95,41 @@ func ExplainEnum() *gql.Enum { }) } +func DefaultDirective() *gql.Directive { + return gql.NewDirective(gql.DirectiveConfig{ + Name: DefaultDirectiveLabel, + Description: `@default is a directive that can be used to set a default field value. + + Setting a default value on a field within a view has no effect.`, + Args: gql.FieldConfigArgument{ + DefaultDirectivePropString: &gql.ArgumentConfig{ + Type: gql.String, + }, + DefaultDirectivePropBool: &gql.ArgumentConfig{ + Type: gql.Boolean, + }, + DefaultDirectivePropInt: &gql.ArgumentConfig{ + Type: gql.Int, + }, + DefaultDirectivePropFloat: &gql.ArgumentConfig{ + Type: gql.Float, + }, + DefaultDirectivePropDateTime: &gql.ArgumentConfig{ + Type: gql.DateTime, + }, + DefaultDirectivePropJSON: &gql.ArgumentConfig{ + Type: JSONScalarType(), + }, + DefaultDirectivePropBlob: &gql.ArgumentConfig{ + Type: BlobScalarType(), + }, + }, + Locations: []string{ + gql.DirectiveLocationFieldDefinition, + }, + }) +} + func ExplainDirective(explainEnum *gql.Enum) *gql.Directive { return gql.NewDirective(gql.DirectiveConfig{ Name: ExplainLabel, diff --git a/tests/clients/cli/wrapper_collection.go b/tests/clients/cli/wrapper_collection.go index 9ef71e8ce7..d03c23532f 100644 --- a/tests/clients/cli/wrapper_collection.go +++ b/tests/clients/cli/wrapper_collection.go @@ -276,7 +276,10 @@ func (c *Collection) Get( if err != nil { return nil, err } - doc := client.NewDocWithID(docID, c.Definition()) + doc, err := client.NewDocWithID(docID, c.Definition()) + if err != nil { + return nil, err + } err = doc.SetWithJSON(data) if err != nil { return nil, err diff --git a/tests/integration/collection_description/updates/replace/fields_test.go b/tests/integration/collection_description/updates/replace/fields_test.go index 03aa8cdb1e..f984aa175d 100644 --- a/tests/integration/collection_description/updates/replace/fields_test.go +++ b/tests/integration/collection_description/updates/replace/fields_test.go @@ -37,3 +37,27 @@ func TestColDescrUpdateReplaceFields_Errors(t *testing.T) { testUtils.ExecuteTestCase(t, test) } + +func TestColDescrUpdateReplaceDefaultValue_Errors(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String @default(string: "Bob") + } + `, + }, + testUtils.PatchCollection{ + Patch: ` + [ + { "op": "replace", "path": "/1/Fields/1/DefaultValue", "value": "Alice" } + ] + `, + ExpectedError: "collection fields cannot be mutated. CollectionID: 1", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/collection_description/with_default_fields_test.go b/tests/integration/collection_description/with_default_fields_test.go new file mode 100644 index 0000000000..3821fd6359 --- /dev/null +++ b/tests/integration/collection_description/with_default_fields_test.go @@ -0,0 +1,158 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package collection_description + +import ( + "testing" + + "github.com/sourcenetwork/defradb/client" + testUtils "github.com/sourcenetwork/defradb/tests/integration" + + "github.com/sourcenetwork/immutable" +) + +func TestCollectionDescription_WithDefaultFieldValues(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + active: Boolean @default(bool: true) + created: DateTime @default(dateTime: "2000-07-23T03:00:00-00:00") + name: String @default(string: "Bob") + age: Int @default(int: 10) + points: Float @default(float: 30) + metadata: JSON @default(json: "{\"value\":1}") + image: Blob @default(blob: "ff0099") + } + `, + }, + testUtils.GetCollections{ + ExpectedResults: []client.CollectionDescription{ + { + Name: immutable.Some("Users"), + Fields: []client.CollectionFieldDescription{ + { + ID: 0, + Name: "_docID", + }, + { + ID: 1, + Name: "active", + DefaultValue: true, + }, + { + ID: 2, + Name: "age", + DefaultValue: float64(10), + }, + { + ID: 3, + Name: "created", + DefaultValue: "2000-07-23T03:00:00-00:00", + }, + { + ID: 4, + Name: "image", + DefaultValue: "ff0099", + }, + { + ID: 5, + Name: "metadata", + DefaultValue: "{\"value\":1}", + }, + { + ID: 6, + Name: "name", + DefaultValue: "Bob", + }, + { + ID: 7, + Name: "points", + DefaultValue: float64(30), + }, + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestCollectionDescription_WithIncorrectDefaultFieldValueType_ReturnsError(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + active: Boolean @default(int: 10) + } + `, + ExpectedError: "default value type must match field type. Name: active, Expected: bool, Actual: int", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestCollectionDescription_WithMultipleDefaultFieldValueTypes_ReturnsError(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String @default(string: "Bob", int: 10, bool: true, float: 10) + } + `, + ExpectedError: "default value type must match field type. Name: name, Expected: string, Actual: int", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestCollectionDescription_WithDefaultFieldValueOnRelation_ReturnsError(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + friend: User @default(string: "Bob") + } + `, + ExpectedError: "default value is not allowed for this field type. Name: friend, Type: User", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestCollectionDescription_WithDefaultFieldValueOnList_ReturnsError(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + names: [String] @default(string: "Bob") + } + `, + ExpectedError: "default value is not allowed for this field type. Name: names, Type: List", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/mutation/create/with_default_values_test.go b/tests/integration/mutation/create/with_default_values_test.go new file mode 100644 index 0000000000..aeca8a1f0f --- /dev/null +++ b/tests/integration/mutation/create/with_default_values_test.go @@ -0,0 +1,264 @@ +// Copyright 2022 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package create + +import ( + "testing" + "time" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" + + "github.com/sourcenetwork/immutable" +) + +func TestMutationCreate_WithDefaultValues_NoValuesProvided_SetsDefaultValue(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple create mutation, with default values and no values provided", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + active: Boolean @default(bool: true) + created: DateTime @default(dateTime: "2000-07-23T03:00:00-00:00") + name: String @default(string: "Bob") + age: Int @default(int: 40) + points: Float @default(float: 10) + metadata: JSON @default(json: "{\"one\":1}") + image: Blob @default(blob: "ff0099") + } + `, + }, + testUtils.CreateDoc{ + // left empty to test default values + DocMap: map[string]any{}, + }, + testUtils.Request{ + Request: `query { + Users { + age + active + name + points + created + metadata + image + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "age": int64(40), + "active": true, + "name": "Bob", + "points": float64(10), + "created": time.Time(time.Date(2000, time.July, 23, 3, 0, 0, 0, time.UTC)), + "metadata": "{\"one\":1}", + "image": "ff0099", + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestMutationCreate_WithDefaultValues_NilValuesProvided_SetsNilValue(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple create mutation, with default values and null values provided", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + active: Boolean @default(bool: true) + created: DateTime @default(dateTime: "2000-07-23T03:00:00-00:00") + name: String @default(string: "Bob") + age: Int @default(int: 40) + points: Float @default(float: 10) + metadata: JSON @default(json: "{\"one\":1}") + image: Blob @default(blob: "ff0099") + } + `, + }, + testUtils.CreateDoc{ + DocMap: map[string]any{ + "age": nil, + "active": nil, + "name": nil, + "points": nil, + "created": nil, + "metadata": nil, + "image": nil, + }, + }, + testUtils.Request{ + Request: `query { + Users { + age + active + name + points + created + metadata + image + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "age": nil, + "active": nil, + "name": nil, + "points": nil, + "created": nil, + "metadata": nil, + "image": nil, + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestMutationCreate_WithDefaultValues_ValuesProvided_SetsValue(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple create mutation, with default values and values provided", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + active: Boolean @default(bool: true) + created: DateTime @default(dateTime: "2000-07-23T03:00:00-00:00") + name: String @default(string: "Bob") + age: Int @default(int: 40) + points: Float @default(float: 10) + metadata: JSON @default(json: "{\"one\":1}") + image: Blob @default(blob: "ff0099") + } + `, + }, + testUtils.CreateDoc{ + DocMap: map[string]any{ + "age": int64(50), + "active": false, + "name": "Alice", + "points": float64(5), + "created": time.Time(time.Date(2024, time.June, 18, 1, 0, 0, 0, time.UTC)), + "metadata": "{\"two\":2}", + "image": "aabb33", + }, + }, + testUtils.Request{ + Request: `query { + Users { + age + active + name + points + created + metadata + image + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "age": int64(50), + "active": false, + "name": "Alice", + "points": float64(5), + "created": time.Time(time.Date(2024, time.June, 18, 1, 0, 0, 0, time.UTC)), + "metadata": "{\"two\":2}", + "image": "aabb33", + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestMutationCreate_WithDefaultValue_NoValueProvided_CreatedTwice_ReturnsError(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple create mutation, with default value, no value provided, and created twice", + SupportedMutationTypes: immutable.Some([]testUtils.MutationType{ + // This test will fail if using the collection save + // method because it does not create two unique docs + // and instead calls update on the second doc with + // matching fields + testUtils.CollectionNamedMutationType, + testUtils.GQLRequestMutationType, + }), + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String @default(string: "Bob") + age: Int @default(int: 40) + } + `, + }, + testUtils.CreateDoc{ + // left empty to test default values + DocMap: map[string]any{}, + }, + testUtils.CreateDoc{ + // left empty to test default values + DocMap: map[string]any{}, + ExpectedError: "a document with the given ID already exists", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestMutationCreate_WithDefaultValue_NoValueProvided_CreatedTwice_UniqueIndex_ReturnsError(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple create mutation, with default value, no value provided, created twice, and unique index", + SupportedMutationTypes: immutable.Some([]testUtils.MutationType{ + // This test will fail if using the collection save + // method because it does not create two unique docs + // and instead calls update on the second doc with + // matching fields + testUtils.CollectionNamedMutationType, + testUtils.GQLRequestMutationType, + }), + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String @default(string: "Bob") @index(unique: true) + age: Int @default(int: 40) + } + `, + }, + testUtils.CreateDoc{ + // left empty to test default values + DocMap: map[string]any{}, + }, + testUtils.CreateDoc{ + DocMap: map[string]any{ + "age": int64(50), + }, + ExpectedError: "can not index a doc's field(s) that violates unique index", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/view/simple/with_default_value_test.go b/tests/integration/view/simple/with_default_value_test.go new file mode 100644 index 0000000000..a7f5aa660c --- /dev/null +++ b/tests/integration/view/simple/with_default_value_test.go @@ -0,0 +1,70 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package simple + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestView_SimpleWithDefaultValue_DoesNotSetFieldValue(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple view with default value", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + } + `, + }, + testUtils.CreateView{ + Query: ` + User { + name + } + `, + SDL: ` + type UserView { + name: String + age: Int @default(int: 40) + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Alice" + }`, + }, + testUtils.Request{ + Request: ` + query { + UserView { + name + age + } + } + `, + Results: map[string]any{ + "UserView": []map[string]any{ + { + "name": "Alice", + "age": nil, + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} From 49899011a75999543b246d9a852526aa96f60e5c Mon Sep 17 00:00:00 2001 From: AndrewSisley Date: Mon, 16 Sep 2024 19:08:37 -0400 Subject: [PATCH 16/71] feat: Add materialized views (#3000) ## Relevant issue(s) Resolves #2951 ## Description Adds materialized views. Also makes materialized views the default (see discord discussion). The caching behaviour of views in tests is now selected via an environment variable, meaning (with the exception of a few specific examples) a test with a view will test both cacheless and materialized variants - in the CI this adds a new dimension to the matrix, although materialized views are only executed using the simple settings (in-mem store, go client, etc) for now. --- .../workflows/test-and-upload-coverage.yml | 19 ++ cli/cli.go | 1 + cli/view_refresh.go | 75 ++++++ client/collection_description.go | 11 + client/db.go | 8 + client/mocks/db.go | 47 ++++ .../i2951-no-change-tests-updated.md | 3 + .../references/cli/defradb_client_view.md | 1 + .../cli/defradb_client_view_refresh.md | 65 ++++++ docs/website/references/http/openapi.json | 60 +++++ http/client.go | 26 +++ http/handler_store.go | 45 ++++ http/http_client.go | 2 +- internal/core/key.go | 70 ++++++ internal/core/view_item.go | 129 ++++++++++ internal/db/collection_define.go | 12 + internal/db/definition_validation.go | 38 +++ internal/db/errors.go | 18 ++ internal/db/lens.go | 2 + internal/db/store.go | 20 ++ internal/db/view.go | 220 ++++++++++++++++++ internal/planner/lens.go | 28 ++- internal/planner/view.go | 193 ++++++++++++--- internal/request/graphql/schema/collection.go | 36 ++- internal/request/graphql/schema/manager.go | 1 + .../request/graphql/schema/types/types.go | 20 ++ tests/clients/cli/wrapper.go | 19 ++ tests/clients/http/wrapper.go | 4 + .../collection_description/simple_test.go | 5 +- .../updates/replace/materialized_test.go | 121 ++++++++++ .../updates/replace/name_test.go | 13 +- .../replace/query_source_query_test.go | 4 +- .../replace/query_source_transform_test.go | 2 +- .../updates/replace/view_policy_test.go | 95 ++++++++ .../with_default_fields_test.go | 3 +- .../explain/debug/with_view_test.go | 8 +- .../explain/debug/with_view_transform_test.go | 6 +- tests/integration/index/create_unique_test.go | 1 - tests/integration/results.go | 3 +- .../schema/migrations/simple_test.go | 9 + .../schema/migrations/with_txn_test.go | 2 + tests/integration/schema/one_many_test.go | 15 +- tests/integration/schema/one_one_test.go | 3 +- tests/integration/schema/simple_test.go | 3 +- .../schema/updates/with_schema_branch_test.go | 15 ++ tests/integration/test_case.go | 24 ++ tests/integration/utils.go | 70 ++++++ .../view/one_to_many/simple_test.go | 18 +- .../view/one_to_many/with_alias_test.go | 4 +- .../view/one_to_many/with_count_test.go | 6 +- .../one_to_many/with_introspection_test.go | 2 +- .../view/one_to_many/with_transform_test.go | 14 +- .../view/one_to_one/identical_schema_test.go | 4 +- .../view/one_to_one/simple_test.go | 4 +- .../view/one_to_one/with_restart_test.go | 2 +- .../view/one_to_one/with_transform_test.go | 2 +- .../view/simple/materialized_test.go | 129 ++++++++++ tests/integration/view/simple/simple_test.go | 14 +- .../view/simple/with_alias_test.go | 2 +- .../view/simple/with_default_value_test.go | 2 +- .../view/simple/with_filter_test.go | 4 +- .../view/simple/with_introspection_test.go | 2 +- .../view/simple/with_transform_test.go | 8 +- 63 files changed, 1673 insertions(+), 119 deletions(-) create mode 100644 cli/view_refresh.go create mode 100644 docs/data_format_changes/i2951-no-change-tests-updated.md create mode 100644 docs/website/references/cli/defradb_client_view_refresh.md create mode 100644 internal/core/view_item.go create mode 100644 tests/integration/collection_description/updates/replace/materialized_test.go create mode 100644 tests/integration/collection_description/updates/replace/view_policy_test.go create mode 100644 tests/integration/view/simple/materialized_test.go diff --git a/.github/workflows/test-and-upload-coverage.yml b/.github/workflows/test-and-upload-coverage.yml index b94ae0686a..c002f2ec08 100644 --- a/.github/workflows/test-and-upload-coverage.yml +++ b/.github/workflows/test-and-upload-coverage.yml @@ -37,6 +37,7 @@ jobs: lens-type: [wasm-time] acp-type: [local] database-encryption: [false] + view-type: [cacheless] include: - os: ubuntu-latest client-type: go @@ -45,6 +46,7 @@ jobs: lens-type: wasm-time acp-type: local database-encryption: true + view-type: cacheless - os: ubuntu-latest client-type: go database-type: badger-memory @@ -52,6 +54,7 @@ jobs: lens-type: wazero acp-type: local database-encryption: false + view-type: cacheless - os: ubuntu-latest client-type: go database-type: badger-memory @@ -59,6 +62,7 @@ jobs: lens-type: wasmer acp-type: local database-encryption: false + view-type: cacheless - os: ubuntu-latest client-type: go database-type: badger-memory @@ -66,6 +70,7 @@ jobs: lens-type: wasm-time acp-type: source-hub database-encryption: false + view-type: cacheless - os: ubuntu-latest client-type: http database-type: badger-memory @@ -73,6 +78,7 @@ jobs: lens-type: wasm-time acp-type: source-hub database-encryption: false + view-type: cacheless - os: ubuntu-latest client-type: cli database-type: badger-memory @@ -80,6 +86,15 @@ jobs: lens-type: wasm-time acp-type: source-hub database-encryption: false + view-type: cacheless + - os: ubuntu-latest + client-type: go + database-type: badger-memory + mutation-type: collection-save + lens-type: wasm-time + acp-type: local + database-encryption: false + view-type: materialized - os: macos-latest client-type: go database-type: badger-memory @@ -87,6 +102,7 @@ jobs: lens-type: wasm-time acp-type: local database-encryption: false + view-type: cacheless ## TODO: https://github.com/sourcenetwork/defradb/issues/2080 ## Uncomment the lines below to Re-enable the windows build once this todo is resolved. ## - os: windows-latest @@ -96,6 +112,7 @@ jobs: ## lens-type: wasm-time ## acp-type: local ## database-encryption: false +## view-type: cacheless runs-on: ${{ matrix.os }} @@ -115,6 +132,7 @@ jobs: DEFRA_MUTATION_TYPE: ${{ matrix.mutation-type }} DEFRA_LENS_TYPE: ${{ matrix.lens-type }} DEFRA_ACP_TYPE: ${{ matrix.acp-type }} + DEFRA_VIEW_TYPE: ${{ matrix.view-type }} steps: - name: Checkout code into the directory @@ -204,6 +222,7 @@ jobs: _${{ matrix.mutation-type }}\ _${{ matrix.lens-type }}\ _${{ matrix.acp-type }}\ + _${{ matrix.matrix.view-type }}\ _${{ matrix.database-encryption }}\ " path: coverage.txt diff --git a/cli/cli.go b/cli/cli.go index e87ea8dec9..0875e325fe 100644 --- a/cli/cli.go +++ b/cli/cli.go @@ -75,6 +75,7 @@ func NewDefraCommand() *cobra.Command { view := MakeViewCommand() view.AddCommand( MakeViewAddCommand(), + MakeViewRefreshCommand(), ) index := MakeIndexCommand() diff --git a/cli/view_refresh.go b/cli/view_refresh.go new file mode 100644 index 0000000000..b9b8a64329 --- /dev/null +++ b/cli/view_refresh.go @@ -0,0 +1,75 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package cli + +import ( + "github.com/sourcenetwork/immutable" + "github.com/spf13/cobra" + + "github.com/sourcenetwork/defradb/client" +) + +func MakeViewRefreshCommand() *cobra.Command { + var name string + var schemaRoot string + var versionID string + var getInactive bool + var cmd = &cobra.Command{ + Use: "refresh", + Short: "Refresh views.", + Long: `Refresh views, executing the underlying query and LensVm transforms and +persisting the results. + +View is refreshed as the current user, meaning the cached items will reflect that user's +permissions. Subsequent query requests to the view, regardless of user, will receive +items from that cache. + +Example: refresh all views + defradb client view refresh + +Example: refresh views by name + defradb client view refresh --name UserView + +Example: refresh views by schema root id + defradb client view refresh --schema bae123 + +Example: refresh views by version id. This will also return inactive views + defradb client view refresh --version bae123 + `, + RunE: func(cmd *cobra.Command, args []string) error { + store := mustGetContextStore(cmd) + + options := client.CollectionFetchOptions{} + if versionID != "" { + options.SchemaVersionID = immutable.Some(versionID) + } + if schemaRoot != "" { + options.SchemaRoot = immutable.Some(schemaRoot) + } + if name != "" { + options.Name = immutable.Some(name) + } + if getInactive { + options.IncludeInactive = immutable.Some(getInactive) + } + + return store.RefreshViews( + cmd.Context(), + options, + ) + }, + } + cmd.Flags().StringVar(&name, "name", "", "View name") + cmd.Flags().StringVar(&schemaRoot, "schema", "", "View schema Root") + cmd.Flags().StringVar(&versionID, "version", "", "View version ID") + cmd.Flags().BoolVar(&getInactive, "get-inactive", false, "Get inactive views as well as active") + return cmd +} diff --git a/client/collection_description.go b/client/collection_description.go index aa22bf7121..d86a252644 100644 --- a/client/collection_description.go +++ b/client/collection_description.go @@ -79,6 +79,15 @@ type CollectionDescription struct { // parsing is done, to avoid storing an invalid policyID or policy resource // that may not even exist on acp. Policy immutable.Option[PolicyDescription] + + // IsMaterialized defines whether the items in this collection are cached or not. + // + // If it is true, they will be, if false, the data returned on query will be calculated + // at query-time from source. + // + // At the moment this can only be set to `false` if this collection sources its data from + // another collection/query (is a View). + IsMaterialized bool } // QuerySource represents a collection data source from a query. @@ -179,6 +188,7 @@ type collectionDescription struct { ID uint32 RootID uint32 SchemaVersionID string + IsMaterialized bool Policy immutable.Option[PolicyDescription] Indexes []IndexDescription Fields []CollectionFieldDescription @@ -198,6 +208,7 @@ func (c *CollectionDescription) UnmarshalJSON(bytes []byte) error { c.ID = descMap.ID c.RootID = descMap.RootID c.SchemaVersionID = descMap.SchemaVersionID + c.IsMaterialized = descMap.IsMaterialized c.Indexes = descMap.Indexes c.Fields = descMap.Fields c.Sources = make([]any, len(descMap.Sources)) diff --git a/client/db.go b/client/db.go index ccaa296018..ed8ba05b7c 100644 --- a/client/db.go +++ b/client/db.go @@ -196,6 +196,14 @@ type Store interface { transform immutable.Option[model.Lens], ) ([]CollectionDefinition, error) + // RefreshViews refreshes the caches of all views matching the given options. If no options are set, all views + // will be refreshed. + // + // The cached result is dependent on the ACP settings of the source data and the permissions of the user making + // the call. At the moment only one cache can be active at a time, so please pay attention to access rights + // when making this call. + RefreshViews(context.Context, CollectionFetchOptions) error + // SetMigration sets the migration for all collections using the given source-destination schema version IDs. // // There may only be one migration per collection version. If another migration was registered it will be diff --git a/client/mocks/db.go b/client/mocks/db.go index eeb83da291..b14aec5d05 100644 --- a/client/mocks/db.go +++ b/client/mocks/db.go @@ -1489,6 +1489,53 @@ func (_c *DB_PrintDump_Call) RunAndReturn(run func(context.Context) error) *DB_P return _c } +// RefreshViews provides a mock function with given fields: _a0, _a1 +func (_m *DB) RefreshViews(_a0 context.Context, _a1 client.CollectionFetchOptions) error { + ret := _m.Called(_a0, _a1) + + if len(ret) == 0 { + panic("no return value specified for RefreshViews") + } + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, client.CollectionFetchOptions) error); ok { + r0 = rf(_a0, _a1) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// DB_RefreshViews_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'RefreshViews' +type DB_RefreshViews_Call struct { + *mock.Call +} + +// RefreshViews is a helper method to define mock.On call +// - _a0 context.Context +// - _a1 client.CollectionFetchOptions +func (_e *DB_Expecter) RefreshViews(_a0 interface{}, _a1 interface{}) *DB_RefreshViews_Call { + return &DB_RefreshViews_Call{Call: _e.mock.On("RefreshViews", _a0, _a1)} +} + +func (_c *DB_RefreshViews_Call) Run(run func(_a0 context.Context, _a1 client.CollectionFetchOptions)) *DB_RefreshViews_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(client.CollectionFetchOptions)) + }) + return _c +} + +func (_c *DB_RefreshViews_Call) Return(_a0 error) *DB_RefreshViews_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *DB_RefreshViews_Call) RunAndReturn(run func(context.Context, client.CollectionFetchOptions) error) *DB_RefreshViews_Call { + _c.Call.Return(run) + return _c +} + // RemoveP2PCollections provides a mock function with given fields: ctx, collectionIDs func (_m *DB) RemoveP2PCollections(ctx context.Context, collectionIDs []string) error { ret := _m.Called(ctx, collectionIDs) diff --git a/docs/data_format_changes/i2951-no-change-tests-updated.md b/docs/data_format_changes/i2951-no-change-tests-updated.md new file mode 100644 index 0000000000..6e9c0c82fc --- /dev/null +++ b/docs/data_format_changes/i2951-no-change-tests-updated.md @@ -0,0 +1,3 @@ +# Materialized views + +Views have been made materialized by default, this caused the tests to change slightly. diff --git a/docs/website/references/cli/defradb_client_view.md b/docs/website/references/cli/defradb_client_view.md index 81a50abf60..faee845c64 100644 --- a/docs/website/references/cli/defradb_client_view.md +++ b/docs/website/references/cli/defradb_client_view.md @@ -37,4 +37,5 @@ Manage (add) views withing a running DefraDB instance * [defradb client](defradb_client.md) - Interact with a DefraDB node * [defradb client view add](defradb_client_view_add.md) - Add new view +* [defradb client view refresh](defradb_client_view_refresh.md) - Refresh views. diff --git a/docs/website/references/cli/defradb_client_view_refresh.md b/docs/website/references/cli/defradb_client_view_refresh.md new file mode 100644 index 0000000000..0a91aec604 --- /dev/null +++ b/docs/website/references/cli/defradb_client_view_refresh.md @@ -0,0 +1,65 @@ +## defradb client view refresh + +Refresh views. + +### Synopsis + +Refresh views, executing the underlying query and LensVm transforms and +persisting the results. + +View is refreshed as the current user, meaning the cached items will reflect that user's +permissions. Subsequent query requests to the view, regardless of user, will receive +items from that cache. + +Example: refresh all views + defradb client view refresh + +Example: refresh views by name + defradb client view refresh --name UserView + +Example: refresh views by schema root id + defradb client view refresh --schema bae123 + +Example: refresh views by version id. This will also return inactive views + defradb client view refresh --version bae123 + + +``` +defradb client view refresh [flags] +``` + +### Options + +``` + --get-inactive Get inactive views as well as active + -h, --help help for refresh + --name string View name + --schema string View schema Root + --version string View version ID +``` + +### Options inherited from parent commands + +``` + -i, --identity string Hex formatted private key used to authenticate with ACP + --keyring-backend string Keyring backend to use. Options are file or system (default "file") + --keyring-namespace string Service name to use when using the system backend (default "defradb") + --keyring-path string Path to store encrypted keys when using the file backend (default "keys") + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs + --no-keyring Disable the keyring and generate ephemeral keys + --no-log-color Disable colored log output + --rootdir string Directory for persistent data (default: $HOME/.defradb) + --source-hub-address string The SourceHub address authorized by the client to make SourceHub transactions on behalf of the actor + --tx uint Transaction ID + --url string URL of HTTP endpoint to listen on or connect to (default "127.0.0.1:9181") +``` + +### SEE ALSO + +* [defradb client view](defradb_client_view.md) - Manage views within a running DefraDB instance + diff --git a/docs/website/references/http/openapi.json b/docs/website/references/http/openapi.json index 4462c3d9f3..470359097e 100644 --- a/docs/website/references/http/openapi.json +++ b/docs/website/references/http/openapi.json @@ -137,6 +137,9 @@ }, "type": "array" }, + "IsMaterialized": { + "type": "boolean" + }, "Name": {}, "Policy": {}, "RootID": { @@ -215,6 +218,9 @@ }, "type": "array" }, + "IsMaterialized": { + "type": "boolean" + }, "Name": {}, "Policy": {}, "RootID": { @@ -2043,6 +2049,60 @@ "view" ] } + }, + "/view/refresh": { + "post": { + "description": "Refresh view(s) by name, schema id, or version id.", + "operationId": "view_refresh", + "parameters": [ + { + "description": "Collection name", + "in": "query", + "name": "name", + "schema": { + "type": "string" + } + }, + { + "description": "Collection schema root", + "in": "query", + "name": "schema_root", + "schema": { + "type": "string" + } + }, + { + "description": "Collection schema version id", + "in": "query", + "name": "version_id", + "schema": { + "type": "string" + } + }, + { + "description": "If true, inactive collections will be returned in addition to active ones", + "in": "query", + "name": "get_inactive", + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "$ref": "#/components/responses/success" + }, + "400": { + "$ref": "#/components/responses/error" + }, + "default": { + "description": "" + } + }, + "tags": [ + "view" + ] + } } }, "servers": [ diff --git a/http/client.go b/http/client.go index 239b087cd8..0c59e23757 100644 --- a/http/client.go +++ b/http/client.go @@ -219,6 +219,32 @@ func (c *Client) AddView( return descriptions, nil } +func (c *Client) RefreshViews(ctx context.Context, options client.CollectionFetchOptions) error { + methodURL := c.http.baseURL.JoinPath("view", "refresh") + params := url.Values{} + if options.Name.HasValue() { + params.Add("name", options.Name.Value()) + } + if options.SchemaVersionID.HasValue() { + params.Add("version_id", options.SchemaVersionID.Value()) + } + if options.SchemaRoot.HasValue() { + params.Add("schema_root", options.SchemaRoot.Value()) + } + if options.IncludeInactive.HasValue() { + params.Add("get_inactive", strconv.FormatBool(options.IncludeInactive.Value())) + } + methodURL.RawQuery = params.Encode() + + req, err := http.NewRequestWithContext(ctx, http.MethodPost, methodURL.String(), nil) + if err != nil { + return err + } + + _, err = c.http.request(req) + return err +} + func (c *Client) SetMigration(ctx context.Context, config client.LensConfig) error { methodURL := c.http.baseURL.JoinPath("lens") diff --git a/http/handler_store.go b/http/handler_store.go index 6e28a96b31..3f4c52c800 100644 --- a/http/handler_store.go +++ b/http/handler_store.go @@ -220,6 +220,38 @@ func (s *storeHandler) GetSchema(rw http.ResponseWriter, req *http.Request) { responseJSON(rw, http.StatusOK, schema) } +func (s *storeHandler) RefreshViews(rw http.ResponseWriter, req *http.Request) { + store := req.Context().Value(dbContextKey).(client.Store) + + options := client.CollectionFetchOptions{} + if req.URL.Query().Has("name") { + options.Name = immutable.Some(req.URL.Query().Get("name")) + } + if req.URL.Query().Has("version_id") { + options.SchemaVersionID = immutable.Some(req.URL.Query().Get("version_id")) + } + if req.URL.Query().Has("schema_root") { + options.SchemaRoot = immutable.Some(req.URL.Query().Get("schema_root")) + } + if req.URL.Query().Has("get_inactive") { + getInactiveStr := req.URL.Query().Get("get_inactive") + var err error + getInactive, err := strconv.ParseBool(getInactiveStr) + if err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return + } + options.IncludeInactive = immutable.Some(getInactive) + } + + err := store.RefreshViews(req.Context(), options) + if err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return + } + rw.WriteHeader(http.StatusOK) +} + func (s *storeHandler) GetAllIndexes(rw http.ResponseWriter, req *http.Request) { store := req.Context().Value(dbContextKey).(client.Store) @@ -491,6 +523,18 @@ func (h *storeHandler) bindRoutes(router *Router) { collectionDescribe.AddResponse(200, collectionsResponse) collectionDescribe.Responses.Set("400", errorResponse) + viewRefresh := openapi3.NewOperation() + viewRefresh.OperationID = "view_refresh" + viewRefresh.Description = "Refresh view(s) by name, schema id, or version id." + viewRefresh.Tags = []string{"view"} + viewRefresh.AddParameter(collectionNameQueryParam) + viewRefresh.AddParameter(collectionSchemaRootQueryParam) + viewRefresh.AddParameter(collectionVersionIdQueryParam) + viewRefresh.AddParameter(collectionGetInactiveQueryParam) + viewRefresh.Responses = openapi3.NewResponses() + viewRefresh.Responses.Set("200", successResponse) + viewRefresh.Responses.Set("400", errorResponse) + patchCollection := openapi3.NewOperation() patchCollection.OperationID = "patch_collection" patchCollection.Description = "Update collection definitions" @@ -618,6 +662,7 @@ func (h *storeHandler) bindRoutes(router *Router) { router.AddRoute("/collections", http.MethodGet, collectionDescribe, h.GetCollection) router.AddRoute("/collections", http.MethodPatch, patchCollection, h.PatchCollection) router.AddRoute("/view", http.MethodPost, views, h.AddView) + router.AddRoute("/view/refresh", http.MethodPost, viewRefresh, h.RefreshViews) router.AddRoute("/graphql", http.MethodGet, graphQLGet, h.ExecRequest) router.AddRoute("/graphql", http.MethodPost, graphQLPost, h.ExecRequest) router.AddRoute("/debug/dump", http.MethodGet, debugDump, h.PrintDump) diff --git a/http/http_client.go b/http/http_client.go index e13b40dd11..5b7b75577d 100644 --- a/http/http_client.go +++ b/http/http_client.go @@ -81,7 +81,7 @@ func (c *httpClient) request(req *http.Request) ([]byte, error) { // attempt to parse json error var errRes errorResponse if err := json.Unmarshal(data, &errRes); err != nil { - return nil, fmt.Errorf("%s", data) + return nil, fmt.Errorf("%v: %s", res.StatusCode, data) } return nil, errRes.Error } diff --git a/internal/core/key.go b/internal/core/key.go index ecbe3fd0d7..b913a75f54 100644 --- a/internal/core/key.go +++ b/internal/core/key.go @@ -21,6 +21,7 @@ import ( "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/errors" + "github.com/sourcenetwork/defradb/internal/encoding" ) var ( @@ -49,6 +50,7 @@ const ( COLLECTION_SCHEMA_VERSION = "/collection/version" COLLECTION_ROOT = "/collection/root" COLLECTION_INDEX = "/collection/index" + COLLECTION_VIEW_ITEMS = "/collection/vi" SCHEMA_VERSION = "/schema/version/v" SCHEMA_VERSION_ROOT = "/schema/version/r" COLLECTION_SEQ = "/seq/collection" @@ -77,6 +79,24 @@ type DataStoreKey struct { var _ Key = (*DataStoreKey)(nil) +// ViewCacheKey is a trimmed down [DataStoreKey] used for caching the results +// of View items. +// +// It is stored in the format `/collection/vi/[CollectionRootID]/[ItemID]`. It points to the +// full serialized View item. +type ViewCacheKey struct { + // CollectionRootID is the Root of the Collection that this item belongs to. + CollectionRootID uint32 + + // ItemID is the unique (to this CollectionRootID) ID of the View item. + // + // For now this is essentially just the index of the item in the result-set, however + // that is likely to change in the near future. + ItemID uint +} + +var _ Key = (*ViewCacheKey)(nil) + // IndexedField contains information necessary for storing a single // value of a field in an index. type IndexedField struct { @@ -527,6 +547,56 @@ func (k DataStoreKey) ToPrimaryDataStoreKey() PrimaryDataStoreKey { } } +func NewViewCacheColPrefix(rootID uint32) ViewCacheKey { + return ViewCacheKey{ + CollectionRootID: rootID, + } +} + +func NewViewCacheKey(rootID uint32, itemID uint) ViewCacheKey { + return ViewCacheKey{ + CollectionRootID: rootID, + ItemID: itemID, + } +} + +func (k ViewCacheKey) ToString() string { + return string(k.Bytes()) +} + +func (k ViewCacheKey) Bytes() []byte { + result := []byte(COLLECTION_VIEW_ITEMS) + + if k.CollectionRootID != 0 { + result = append(result, '/') + result = encoding.EncodeUvarintAscending(result, uint64(k.CollectionRootID)) + } + + if k.ItemID != 0 { + result = append(result, '/') + result = encoding.EncodeUvarintAscending(result, uint64(k.ItemID)) + } + + return result +} + +func (k ViewCacheKey) ToDS() ds.Key { + return ds.NewKey(k.ToString()) +} + +func (k ViewCacheKey) PrettyPrint() string { + result := COLLECTION_VIEW_ITEMS + + if k.CollectionRootID != 0 { + result = result + "/" + strconv.Itoa(int(k.CollectionRootID)) + } + if k.ItemID != 0 { + result = result + "/" + strconv.Itoa(int(k.ItemID)) + } + + return result +} + // NewIndexDataStoreKey creates a new IndexDataStoreKey from a collection ID, index ID and fields. // It also validates values of the fields. func NewIndexDataStoreKey(collectionID, indexID uint32, fields []IndexedField) IndexDataStoreKey { diff --git a/internal/core/view_item.go b/internal/core/view_item.go new file mode 100644 index 0000000000..440bb6f918 --- /dev/null +++ b/internal/core/view_item.go @@ -0,0 +1,129 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package core + +import ( + "encoding/json" + + "github.com/sourcenetwork/defradb/client" +) + +// MarshalViewItem marshals the given doc ready for storage. +// +// It first trims the doc leaving only an array of field values (including +// relations), and then marshals that into json. +// +// Note: MarshalViewItem and UnmarshalViewItem rely on the Doc (and DocumentMapping) +// being consistent at write and read time. +func MarshalViewItem(doc Doc) ([]byte, error) { + trimmedDoc := trimDoc(doc) + return json.Marshal(trimmedDoc) +} + +func trimDoc(doc Doc) []any { + fields := make([]any, 0, len(doc.Fields)) + for _, field := range doc.Fields { + switch typedField := field.(type) { + case []Doc: + trimmedField := make([]any, 0, len(typedField)) + for _, innerDoc := range typedField { + trimmedField = append( + trimmedField, + trimDoc(innerDoc), + ) + } + fields = append(fields, trimmedField) + + case Doc: + fields = append( + fields, + trimDoc(typedField), + ) + + default: + fields = append(fields, typedField) + } + } + + return fields +} + +// UnmarshalViewItem unmarshals the given byte array into a [Doc] using the given +// mapping. +// +// It assumes that `bytes` is in the appropriate format (see MarshalViewItem) and +// will only error if the json unmarshalling fails. +// +// Note: MarshalViewItem and UnmarshalViewItem rely on the Doc (and DocumentMapping) +// being consistent at write and read time. +func UnmarshalViewItem(documentMap *DocumentMapping, bytes []byte) (Doc, error) { + var trimmedDoc []any + err := json.Unmarshal(bytes, &trimmedDoc) + if err != nil { + return Doc{}, err + } + + return expandViewItem(documentMap, trimmedDoc), nil +} + +func expandViewItem(documentMap *DocumentMapping, trimmed []any) Doc { + fields := make(DocFields, len(trimmed)) + + for _, indexes := range documentMap.IndexesByName { + for _, index := range indexes { + fieldValue := trimmed[index] + var childMapping *DocumentMapping + if index < len(documentMap.ChildMappings) { + childMapping = documentMap.ChildMappings[index] + } + + if childMapping == nil { + // If the childMapping is nil, this property must not be a relation and we can + // set the value and continue. + fields[index] = fieldValue + continue + } + + if untypedArray, ok := fieldValue.([]any); ok { + isArrayOfDocs := true + for _, inner := range untypedArray { + if _, ok := inner.([]any); !ok { + // To know if this is an array of documents we need to check the inner values to see if + // this is esentially an `[][]any` + isArrayOfDocs = false + break + } + } + + if isArrayOfDocs { + innerDocs := make([]Doc, 0, len(untypedArray)) + for _, inner := range untypedArray { + innerDocs = append(innerDocs, expandViewItem(childMapping, inner.([]any))) + } + fields[index] = innerDocs + } else { + fields[index] = expandViewItem(childMapping, untypedArray) + } + } + // else: no-op + // + // The relation must be either an empty array (many side of one-many), or + // nil (one side of either a one-many or one-one). Either way the value is nil/default + // and we can continue + } + } + + return Doc{ + Hidden: false, + Fields: fields, + Status: client.Active, + } +} diff --git a/internal/db/collection_define.go b/internal/db/collection_define.go index 357ab07d61..6597a54f1c 100644 --- a/internal/db/collection_define.go +++ b/internal/db/collection_define.go @@ -157,6 +157,18 @@ func (db *db) patchCollection( existingCol, ok := existingColsByID[col.ID] if ok { + if existingCol.IsMaterialized && !col.IsMaterialized { + // If the collection is being de-materialized - delete any cached values. + // Leaving them around will not break anything, but it would be a waste of + // storage space. + err := db.clearViewCache(ctx, client.CollectionDefinition{ + Description: col, + }) + if err != nil { + return err + } + } + // Clear any existing migrations in the registry, using this semi-hacky way // to avoid adding more functions to a public interface that we wish to remove. diff --git a/internal/db/definition_validation.go b/internal/db/definition_validation.go index f58e75fa7e..47c222bf9c 100644 --- a/internal/db/definition_validation.go +++ b/internal/db/definition_validation.go @@ -164,6 +164,8 @@ var globalValidators = []definitionValidator{ validateTypeAndKindCompatible, validateFieldNotDuplicated, validateSelfReferences, + validateCollectionMaterialized, + validateMaterializedHasNoPolicy, } var createValidators = append( @@ -978,3 +980,39 @@ func validateSchemaNameNotEmpty( return nil } + +// validateCollectionMaterialized verifies that a non-view collection is materialized. +// +// Long term we wish to support this, however for now we block it off. +func validateCollectionMaterialized( + ctx context.Context, + db *db, + newState *definitionState, + oldState *definitionState, +) error { + for _, col := range newState.collections { + if len(col.QuerySources()) == 0 && !col.IsMaterialized { + return NewErrColNotMaterialized(col.Name.Value()) + } + } + + return nil +} + +// validateMaterializedHasNoPolicy verifies that a materialized view has no ACP policy. +// +// Long term we wish to support this, however for now we block it off. +func validateMaterializedHasNoPolicy( + ctx context.Context, + db *db, + newState *definitionState, + oldState *definitionState, +) error { + for _, col := range newState.collections { + if col.IsMaterialized && len(col.QuerySources()) != 0 && col.Policy.HasValue() { + return NewErrMaterializedViewAndACPNotSupported(col.Name.Value()) + } + } + + return nil +} diff --git a/internal/db/errors.go b/internal/db/errors.go index 71f7978a1b..2da8c9c734 100644 --- a/internal/db/errors.go +++ b/internal/db/errors.go @@ -102,6 +102,8 @@ const ( errReplicatorNotFound string = "replicator not found" errCanNotEncryptBuiltinField string = "can not encrypt build-in field" errSelfReferenceWithoutSelf string = "must specify 'Self' kind for self referencing relations" + errColNotMaterialized string = "non-materialized collections are not supported" + errMaterializedViewAndACPNotSupported string = "materialized views do not support ACP" ) var ( @@ -143,6 +145,8 @@ var ( ErrReplicatorNotFound = errors.New(errReplicatorNotFound) ErrCanNotEncryptBuiltinField = errors.New(errCanNotEncryptBuiltinField) ErrSelfReferenceWithoutSelf = errors.New(errSelfReferenceWithoutSelf) + ErrColNotMaterialized = errors.New(errColNotMaterialized) + ErrMaterializedViewAndACPNotSupported = errors.New(errMaterializedViewAndACPNotSupported) ) // NewErrFailedToGetHeads returns a new error indicating that the heads of a document @@ -659,3 +663,17 @@ func NewErrSelfReferenceWithoutSelf(fieldName string) error { errors.NewKV("Field", fieldName), ) } + +func NewErrColNotMaterialized(collection string) error { + return errors.New( + errColNotMaterialized, + errors.NewKV("Collection", collection), + ) +} + +func NewErrMaterializedViewAndACPNotSupported(collection string) error { + return errors.New( + errMaterializedViewAndACPNotSupported, + errors.NewKV("Collection", collection), + ) +} diff --git a/internal/db/lens.go b/internal/db/lens.go index 581ad41f58..bf0c9ce03a 100644 --- a/internal/db/lens.go +++ b/internal/db/lens.go @@ -52,6 +52,7 @@ func (db *db) setMigration(ctx context.Context, cfg client.LensConfig) error { ID: uint32(colID), RootID: client.OrphanRootID, SchemaVersionID: cfg.SourceSchemaVersionID, + IsMaterialized: true, } col, err := description.SaveCollection(ctx, txn, desc) @@ -96,6 +97,7 @@ func (db *db) setMigration(ctx context.Context, cfg client.LensConfig) error { ID: uint32(colID), RootID: sourceCol.RootID, SchemaVersionID: cfg.DestinationSchemaVersionID, + IsMaterialized: true, Sources: []any{ &client.CollectionSource{ SourceCollectionID: sourceCol.ID, diff --git a/internal/db/store.go b/internal/db/store.go index 5ebdde0605..89471b6ac6 100644 --- a/internal/db/store.go +++ b/internal/db/store.go @@ -242,6 +242,26 @@ func (db *db) AddView( return defs, nil } +func (db *db) RefreshViews(ctx context.Context, opts client.CollectionFetchOptions) error { + ctx, txn, err := ensureContextTxn(ctx, db, false) + if err != nil { + return err + } + defer txn.Discard(ctx) + + err = db.refreshViews(ctx, opts) + if err != nil { + return err + } + + err = txn.Commit(ctx) + if err != nil { + return err + } + + return nil +} + // BasicImport imports a json dataset. // filepath must be accessible to the node. func (db *db) BasicImport(ctx context.Context, filepath string) error { diff --git a/internal/db/view.go b/internal/db/view.go index 5ca702f26f..23bb3cad42 100644 --- a/internal/db/view.go +++ b/internal/db/view.go @@ -15,11 +15,16 @@ import ( "errors" "fmt" + ds "github.com/ipfs/go-datastore" + "github.com/ipfs/go-datastore/query" "github.com/lens-vm/lens/host-go/config/model" "github.com/sourcenetwork/immutable" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/client/request" + "github.com/sourcenetwork/defradb/internal/core" + "github.com/sourcenetwork/defradb/internal/db/description" + "github.com/sourcenetwork/defradb/internal/planner" ) func (db *db) addView( @@ -88,3 +93,218 @@ func (db *db) addView( return returnDescriptions, nil } + +func (db *db) refreshViews(ctx context.Context, opts client.CollectionFetchOptions) error { + // For now, we only support user-cache management of views, not all collections + cols, err := db.getViews(ctx, opts) + if err != nil { + return err + } + + for _, col := range cols { + if !col.Description.IsMaterialized { + // We only care about materialized views here, so skip any that aren't + continue + } + + // Clearing and then constructing is a bit inefficient, but it should do for now. + // Long term we probably want to update inline as much as possible to avoid unnessecarily + // moving/adding/deleting keys in storage + err := db.clearViewCache(ctx, col) + if err != nil { + return err + } + + err = db.buildViewCache(ctx, col) + if err != nil { + return err + } + } + + return nil +} + +func (db *db) getViews(ctx context.Context, opts client.CollectionFetchOptions) ([]client.CollectionDefinition, error) { + cols, err := db.getCollections(ctx, opts) + if err != nil { + return nil, err + } + + var views []client.CollectionDefinition + for _, col := range cols { + if querySrcs := col.Description().QuerySources(); len(querySrcs) == 0 { + continue + } + + views = append(views, col.Definition()) + } + + return views, nil +} + +func (db *db) buildViewCache(ctx context.Context, col client.CollectionDefinition) (err error) { + txn := mustGetContextTxn(ctx) + identity := GetContextIdentity(ctx) + + p := planner.New(ctx, identity, db.acp, db, txn) + + // temporarily disable the cache in order to query without using it + col.Description.IsMaterialized = false + col.Description, err = description.SaveCollection(ctx, txn, col.Description) + if err != nil { + return err + } + defer func() { + var defErr error + col.Description.IsMaterialized = true + col.Description, defErr = description.SaveCollection(ctx, txn, col.Description) + if err == nil { + // Do not overwrite the original error if there is one, defErr is probably an artifact of the original + // failue and can be discarded. + err = defErr + } + }() + + request, err := db.generateMaximalSelectFromCollection(ctx, col, immutable.None[string](), map[string]struct{}{}) + if err != nil { + return err + } + + source, err := p.MakeSelectionPlan(request) + if err != nil { + return err + } + + err = source.Init() + if err != nil { + return err + } + defer func() { + defErr := source.Close() + if err == nil { + // Do not overwrite the original error if there is one, defErr is probably an artifact of the original + // failue and can be discarded. + err = defErr + } + }() + + err = source.Start() + if err != nil { + return err + } + + hasValue, err := source.Next() + if err != nil { + return err + } + + // View items are currently keyed by their index, starting at 1. + // The order in which results are returned must be consistent with the results of the + // underlying query/transform. + var itemID uint + for itemID = 1; hasValue; itemID++ { + doc := source.Value() + + serializedItem, err := core.MarshalViewItem(doc) + if err != nil { + return err + } + + itemKey := core.NewViewCacheKey(col.Description.RootID, itemID) + err = txn.Datastore().Put(ctx, itemKey.ToDS(), serializedItem) + if err != nil { + return err + } + + hasValue, err = source.Next() + if err != nil { + return err + } + } + + return nil +} + +func (db *db) clearViewCache(ctx context.Context, col client.CollectionDefinition) error { + txn := mustGetContextTxn(ctx) + prefix := core.NewViewCacheColPrefix(col.Description.RootID) + + q, err := txn.Datastore().Query(ctx, query.Query{ + Prefix: prefix.ToString(), + KeysOnly: true, + }) + if err != nil { + return err + } + + for res := range q.Next() { + if res.Error != nil { + return errors.Join(res.Error, q.Close()) + } + + err = txn.Datastore().Delete(ctx, ds.NewKey(res.Key)) + if err != nil { + return errors.Join(err, q.Close()) + } + } + + return q.Close() +} + +func (db *db) generateMaximalSelectFromCollection( + ctx context.Context, + col client.CollectionDefinition, + fieldName immutable.Option[string], + typesHit map[string]struct{}, +) (*request.Select, error) { + // `__-` is an impossible field name prefix, so we can safely concat using it as a separator without risk + // of collision. + identifier := col.GetName() + "__-" + fieldName.Value() + if _, ok := typesHit[identifier]; ok { + // If this identifier is already in the set, the schema must be circular and we should return + return nil, nil + } + typesHit[identifier] = struct{}{} + + childRequests := []request.Selection{} + for _, field := range col.GetFields() { + if field.IsRelation() && field.Kind.IsObject() { + relatedCol, _, err := client.GetDefinitionFromStore(ctx, db, col, field.Kind) + if err != nil { + return nil, err + } + + innerSelect, err := db.generateMaximalSelectFromCollection( + ctx, + relatedCol, + immutable.Some(field.Name), + typesHit, + ) + if err != nil { + return nil, err + } + + if innerSelect != nil { + // innerSelect may be nil if a circular relationship is defined in the schema and we have already + // added this field + childRequests = append(childRequests, innerSelect) + } + } + } + + var name string + if fieldName.HasValue() { + name = fieldName.Value() + } else { + name = col.GetName() + } + + return &request.Select{ + Field: request.Field{ + Name: name, + }, + ChildSelect: request.ChildSelect{ + Fields: childRequests, + }, + }, nil +} diff --git a/internal/planner/lens.go b/internal/planner/lens.go index 5b18a60138..816e973b09 100644 --- a/internal/planner/lens.go +++ b/internal/planner/lens.go @@ -77,12 +77,7 @@ func (n *lensNode) Next() (bool, error) { return false, err } - nextValue, err := n.toDoc(lensDoc) - if err != nil { - return false, err - } - - n.currentValue = nextValue + n.currentValue = n.toDoc(n.documentMapping, lensDoc) return true, nil } @@ -106,7 +101,7 @@ func (n *lensNode) Next() (bool, error) { return n.Next() } -func (n *lensNode) toDoc(mapDoc map[string]any) (core.Doc, error) { +func (n *lensNode) toDoc(mapping *core.DocumentMapping, mapDoc map[string]any) core.Doc { status := client.Active properties := make([]any, len(mapDoc)) @@ -125,7 +120,7 @@ func (n *lensNode) toDoc(mapDoc map[string]any) (core.Doc, error) { continue } - indexes := n.documentMapping.IndexesByName[fieldName] + indexes := mapping.IndexesByName[fieldName] if len(indexes) == 0 { // Note: This can happen if a migration returns a field that // we do not know about. In which case we have to skip it. @@ -136,6 +131,21 @@ func (n *lensNode) toDoc(mapDoc map[string]any) (core.Doc, error) { // into a lens transform. fieldIndex := indexes[len(indexes)-1] + if fieldIndex < len(mapping.ChildMappings) && mapping.ChildMappings[fieldIndex] != nil { + switch typedValue := fieldValue.(type) { + case map[string]any: + fieldValue = n.toDoc(mapping.ChildMappings[fieldIndex], typedValue) + + case []any: + values := make([]core.Doc, 0, len(typedValue)) + for _, val := range typedValue { + innerDoc := n.toDoc(mapping.ChildMappings[fieldIndex], val.(map[string]any)) + values = append(values, innerDoc) + } + fieldValue = values + } + } + if len(properties) <= fieldIndex { // Because the document is sourced from another mapping, we may still need to grow // the resultant field set. We cannot use [append] because the index of each field @@ -151,7 +161,7 @@ func (n *lensNode) toDoc(mapDoc map[string]any) (core.Doc, error) { Fields: properties, SchemaVersionID: n.collection.SchemaVersionID, Status: status, - }, nil + } } func (n *lensNode) Source() planNode { diff --git a/internal/planner/view.go b/internal/planner/view.go index d014bbe108..0226a2c9c6 100644 --- a/internal/planner/view.go +++ b/internal/planner/view.go @@ -11,6 +11,8 @@ package planner import ( + "github.com/ipfs/go-datastore/query" + "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/internal/core" "github.com/sourcenetwork/defradb/internal/planner/mapper" @@ -33,18 +35,23 @@ func (p *Planner) View(query *mapper.Select, col client.Collection) (planNode, e querySource := (col.Description().Sources[0].(*client.QuerySource)) hasTransform := querySource.Transform.HasValue() - m, err := mapper.ToSelect(p.ctx, p.db, mapper.ObjectSelection, &querySource.Query) - if err != nil { - return nil, err - } + var source planNode + if col.Description().IsMaterialized { + source = p.newCachedViewFetcher(col.Definition(), query.DocumentMapping) + } else { + m, err := mapper.ToSelect(p.ctx, p.db, mapper.ObjectSelection, &querySource.Query) + if err != nil { + return nil, err + } - source, err := p.Select(m) - if err != nil { - return nil, err - } + source, err = p.Select(m) + if err != nil { + return nil, err + } - if hasTransform { - source = p.Lens(source, query.DocumentMapping, col) + if hasTransform { + source = p.Lens(source, query.DocumentMapping, col) + } } viewNode := &viewNode{ @@ -75,28 +82,11 @@ func (n *viewNode) Next() (bool, error) { } func (n *viewNode) Value() core.Doc { - sourceValue := n.source.Value() - if n.hasTransform { - // If this view has a transform the source document will already have been - // converted to the new document mapping. - return sourceValue - } - - sourceMap := n.source.DocumentMap().ToMap(sourceValue) - - // We must convert the document from the source mapping (which was constructed using the - // view's base query) to a document using the output mapping (which was constructed using - // the current query and the output schemas). We do this by source output name, which - // will take into account any aliases defined in the base query. - doc := n.docMapper.documentMapping.NewDoc() - for fieldName, fieldValue := range sourceMap { - // If the field does not exist, ignore it an continue. It likely means that - // the field was declared in the query but not the SDL, and if it is not in the - // SDL it cannot be requested/rendered by the user and would be dropped later anyway. - _ = n.docMapper.documentMapping.TrySetFirstOfName(&doc, fieldName, fieldValue) - } - - return doc + // The source mapping will differ from this node's (request) mapping if either a Lens transform is + // involved, if the the view is materialized, or if any kind of operation is performed on the result + // of the query (such as a filter or aggregate in the user-request), so we must convert the returned + // documents to the request mapping + return convertBetweenMaps(n.source.DocumentMap(), n.documentMapping, n.source.Value()) } func (n *viewNode) Source() planNode { @@ -117,3 +107,142 @@ func (n *viewNode) Close() error { return nil } + +func convertBetweenMaps(srcMap *core.DocumentMapping, dstMap *core.DocumentMapping, src core.Doc) core.Doc { + dst := dstMap.NewDoc() + + srcRenderKeysByIndex := map[int]string{} + for _, renderKey := range srcMap.RenderKeys { + srcRenderKeysByIndex[renderKey.Index] = renderKey.Key + } + + for underlyingName, srcIndexes := range srcMap.IndexesByName { + for _, srcIndex := range srcIndexes { + if srcIndex >= len(src.Fields) { + // Several system fields are not included in schema only types, and there is a mismatch somewhere + // that means we have to handle them here with a continue + continue + } + + var dstName string + if key, ok := srcRenderKeysByIndex[srcIndex]; ok { + dstName = key + } else { + dstName = underlyingName + } + + dstIndexes, dstHasField := dstMap.IndexesByName[dstName] + if !dstHasField { + continue + } + + for _, dstIndex := range dstIndexes { + var srcValue any + if srcIndex < len(srcMap.ChildMappings) && srcMap.ChildMappings[srcIndex] != nil { + if dstIndex >= len(dstMap.ChildMappings) || dstMap.ChildMappings[dstIndex] == nil { + continue + } + + switch inner := src.Fields[srcIndex].(type) { + case core.Doc: + srcValue = convertBetweenMaps(srcMap.ChildMappings[srcIndex], dstMap.ChildMappings[dstIndex], inner) + + case []core.Doc: + dstInners := make([]core.Doc, len(inner)) + for i, srcInnerDoc := range inner { + dstInners[i] = convertBetweenMaps(srcMap.ChildMappings[srcIndex], dstMap.ChildMappings[dstIndex], srcInnerDoc) + } + srcValue = dstInners + } + } else { + srcValue = src.Fields[srcIndex] + } + + dst.Fields[dstIndex] = srcValue + } + } + } + + return dst +} + +// cachedViewFetcher is a planner node that fetches view items from a materialized cache. +type cachedViewFetcher struct { + docMapper + documentIterator + + def client.CollectionDefinition + p *Planner + + queryResults query.Results +} + +var _ planNode = (*cachedViewFetcher)(nil) + +func (p *Planner) newCachedViewFetcher( + def client.CollectionDefinition, + mapping *core.DocumentMapping, +) *cachedViewFetcher { + return &cachedViewFetcher{ + def: def, + p: p, + docMapper: docMapper{mapping}, + } +} + +func (n *cachedViewFetcher) Init() error { + if n.queryResults != nil { + err := n.queryResults.Close() + if err != nil { + return err + } + n.queryResults = nil + } + + prefix := core.NewViewCacheColPrefix(n.def.Description.RootID) + + var err error + n.queryResults, err = n.p.txn.Datastore().Query(n.p.ctx, query.Query{ + Prefix: prefix.ToString(), + }) + if err != nil { + return err + } + + return nil +} + +func (n *cachedViewFetcher) Start() error { + return nil +} + +func (n *cachedViewFetcher) Spans(spans core.Spans) { + // no-op +} + +func (n *cachedViewFetcher) Next() (bool, error) { + result, hasNext := n.queryResults.NextSync() + if !hasNext || result.Error != nil { + return false, result.Error + } + + var err error + n.currentValue, err = core.UnmarshalViewItem(n.documentMapping, result.Value) + if err != nil { + return false, err + } + + return true, nil +} + +func (n *cachedViewFetcher) Source() planNode { + return nil +} + +func (n *cachedViewFetcher) Kind() string { + return "cachedViewFetcher" +} + +func (n *cachedViewFetcher) Close() error { + return n.queryResults.Close() +} diff --git a/internal/request/graphql/schema/collection.go b/internal/request/graphql/schema/collection.go index 58536db3af..15c572921a 100644 --- a/internal/request/graphql/schema/collection.go +++ b/internal/request/graphql/schema/collection.go @@ -186,29 +186,51 @@ func collectionFromAstDefinition( return collectionFieldDescriptions[i].Name < collectionFieldDescriptions[j].Name }) + isMaterialized := immutable.None[bool]() for _, directive := range def.Directives { - if directive.Name.Value == types.IndexDirectiveLabel { + switch directive.Name.Value { + case types.IndexDirectiveLabel: index, err := indexFromAST(directive, nil) if err != nil { return client.CollectionDefinition{}, err } indexDescriptions = append(indexDescriptions, index) - } - if directive.Name.Value == types.PolicySchemaDirectiveLabel { + + case types.PolicySchemaDirectiveLabel: policy, err := policyFromAST(directive) if err != nil { return client.CollectionDefinition{}, err } policyDescription = immutable.Some(policy) + + case types.MaterializedDirectiveLabel: + if isMaterialized.Value() { + continue + } + + explicitIsMaterialized := immutable.None[bool]() + for _, arg := range directive.Arguments { + if arg.Name.Value == types.MaterializedDirectivePropIf { + explicitIsMaterialized = immutable.Some(arg.Value.GetValue().(bool)) + break + } + } + + if explicitIsMaterialized.HasValue() { + isMaterialized = immutable.Some(isMaterialized.Value() || explicitIsMaterialized.Value()) + } else { + isMaterialized = immutable.Some(true) + } } } return client.CollectionDefinition{ Description: client.CollectionDescription{ - Name: immutable.Some(def.Name.Value), - Indexes: indexDescriptions, - Policy: policyDescription, - Fields: collectionFieldDescriptions, + Name: immutable.Some(def.Name.Value), + Indexes: indexDescriptions, + Policy: policyDescription, + Fields: collectionFieldDescriptions, + IsMaterialized: !isMaterialized.HasValue() || isMaterialized.Value(), }, Schema: client.SchemaDescription{ Name: def.Name.Value, diff --git a/internal/request/graphql/schema/manager.go b/internal/request/graphql/schema/manager.go index 66f1eb54c2..0385c50ac9 100644 --- a/internal/request/graphql/schema/manager.go +++ b/internal/request/graphql/schema/manager.go @@ -158,6 +158,7 @@ func defaultDirectivesType( schemaTypes.IndexDirective(orderEnum, indexFieldInput), schemaTypes.PrimaryDirective(), schemaTypes.RelationDirective(), + schemaTypes.MaterializedDirective(), } } diff --git a/internal/request/graphql/schema/types/types.go b/internal/request/graphql/schema/types/types.go index 393f9fe62a..e5ab3c5277 100644 --- a/internal/request/graphql/schema/types/types.go +++ b/internal/request/graphql/schema/types/types.go @@ -51,6 +51,9 @@ const ( DefaultDirectivePropJSON = "json" DefaultDirectivePropBlob = "blob" + MaterializedDirectiveLabel = "materialized" + MaterializedDirectivePropIf = "if" + FieldOrderASC = "ASC" FieldOrderDESC = "DESC" ) @@ -217,6 +220,23 @@ func IndexDirective(orderingEnum *gql.Enum, indexFieldInputObject *gql.InputObje }) } +func MaterializedDirective() *gql.Directive { + return gql.NewDirective(gql.DirectiveConfig{ + Name: MaterializedDirectiveLabel, + Description: `@materialized is a directive that specifies whether a collection is cached or not. + It will default to true if ommited. If multiple @materialized directives are provided, they will aggregated + with OR logic (if any are true, the collection will be cached).`, + Args: gql.FieldConfigArgument{ + MaterializedDirectivePropIf: &gql.ArgumentConfig{ + Type: gql.Boolean, + }, + }, + Locations: []string{ + gql.DirectiveLocationSchema, + }, + }) +} + func CRDTEnum() *gql.Enum { return gql.NewEnum(gql.EnumConfig{ Name: "CRDTType", diff --git a/tests/clients/cli/wrapper.go b/tests/clients/cli/wrapper.go index 76b99ed69f..143a1e3534 100644 --- a/tests/clients/cli/wrapper.go +++ b/tests/clients/cli/wrapper.go @@ -281,6 +281,25 @@ func (w *Wrapper) AddView( return defs, nil } +func (w *Wrapper) RefreshViews(ctx context.Context, options client.CollectionFetchOptions) error { + args := []string{"client", "view", "refresh"} + if options.Name.HasValue() { + args = append(args, "--name", options.Name.Value()) + } + if options.SchemaVersionID.HasValue() { + args = append(args, "--version", options.SchemaVersionID.Value()) + } + if options.SchemaRoot.HasValue() { + args = append(args, "--schema", options.SchemaRoot.Value()) + } + if options.IncludeInactive.HasValue() { + args = append(args, "--get-inactive", strconv.FormatBool(options.IncludeInactive.Value())) + } + + _, err := w.cmd.execute(ctx, args) + return err +} + func (w *Wrapper) SetMigration(ctx context.Context, config client.LensConfig) error { args := []string{"client", "schema", "migration", "set"} diff --git a/tests/clients/http/wrapper.go b/tests/clients/http/wrapper.go index 734b7f2b28..f931732f09 100644 --- a/tests/clients/http/wrapper.go +++ b/tests/clients/http/wrapper.go @@ -134,6 +134,10 @@ func (w *Wrapper) AddView( return w.client.AddView(ctx, query, sdl, transform) } +func (w *Wrapper) RefreshViews(ctx context.Context, opts client.CollectionFetchOptions) error { + return w.client.RefreshViews(ctx, opts) +} + func (w *Wrapper) SetMigration(ctx context.Context, config client.LensConfig) error { return w.client.SetMigration(ctx, config) } diff --git a/tests/integration/collection_description/simple_test.go b/tests/integration/collection_description/simple_test.go index 1070e8cd99..ab65d5d647 100644 --- a/tests/integration/collection_description/simple_test.go +++ b/tests/integration/collection_description/simple_test.go @@ -30,8 +30,9 @@ func TestColDescrSimpleCreatesColGivenEmptyType(t *testing.T) { testUtils.GetCollections{ ExpectedResults: []client.CollectionDescription{ { - ID: 1, - Name: immutable.Some("Users"), + ID: 1, + Name: immutable.Some("Users"), + IsMaterialized: true, }, }, }, diff --git a/tests/integration/collection_description/updates/replace/materialized_test.go b/tests/integration/collection_description/updates/replace/materialized_test.go new file mode 100644 index 0000000000..b8fe87a4e4 --- /dev/null +++ b/tests/integration/collection_description/updates/replace/materialized_test.go @@ -0,0 +1,121 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package replace + +import ( + "testing" + + "github.com/sourcenetwork/immutable" + + "github.com/sourcenetwork/defradb/client" + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestColDescrUpdateReplaceIsMaterialized_GivenFalseAndCollection_Errors(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + } + `, + }, + testUtils.PatchCollection{ + Patch: ` + [ + { "op": "replace", "path": "/1/IsMaterialized", "value": false } + ] + `, + ExpectedError: "non-materialized collections are not supported. Collection: User", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestColDescrUpdateReplaceIsMaterialized_GivenFalseAndView(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + } + `, + }, + testUtils.CreateView{ + Query: ` + User { + name + } + `, + SDL: ` + type UserView { + name: String + } + `, + }, + testUtils.CreateDoc{ + // Create John when the view is materialized + DocMap: map[string]any{ + "name": "John", + }, + }, + testUtils.PatchCollection{ + Patch: ` + [ + { "op": "replace", "path": "/2/IsMaterialized", "value": false } + ] + `, + }, + testUtils.CreateDoc{ + // Create Fred when the view is not materialized, noting that there is no `RefreshView` + // call after this action, meaning that if the view was still materialized Fred would not + // be returned by the query. + DocMap: map[string]any{ + "name": "Fred", + }, + }, + testUtils.GetCollections{ + FilterOptions: client.CollectionFetchOptions{ + Name: immutable.Some("UserView"), + }, + ExpectedResults: []client.CollectionDescription{ + { + Name: immutable.Some("UserView"), + IsMaterialized: false, + }, + }, + }, + testUtils.Request{ + Request: `query { + User { + name + } + }`, + Results: map[string]any{ + "User": []map[string]any{ + { + "name": "John", + }, + { + "name": "Fred", + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/collection_description/updates/replace/name_test.go b/tests/integration/collection_description/updates/replace/name_test.go index e2fe9886d1..0993b850a1 100644 --- a/tests/integration/collection_description/updates/replace/name_test.go +++ b/tests/integration/collection_description/updates/replace/name_test.go @@ -44,8 +44,9 @@ func TestColDescrUpdateReplaceName_GivenExistingName(t *testing.T) { testUtils.GetCollections{ ExpectedResults: []client.CollectionDescription{ { - ID: 1, - Name: immutable.Some("Actors"), + ID: 1, + Name: immutable.Some("Actors"), + IsMaterialized: true, }, }, }, @@ -180,11 +181,13 @@ func TestColDescrUpdateReplaceName_RemoveExistingName(t *testing.T) { }, ExpectedResults: []client.CollectionDescription{ { - ID: 1, + ID: 1, + IsMaterialized: true, }, { - ID: 2, - Name: immutable.Some("Actors"), + ID: 2, + Name: immutable.Some("Actors"), + IsMaterialized: true, Sources: []any{ &client.CollectionSource{ SourceCollectionID: 1, diff --git a/tests/integration/collection_description/updates/replace/query_source_query_test.go b/tests/integration/collection_description/updates/replace/query_source_query_test.go index 1891923b46..cf93d9bc80 100644 --- a/tests/integration/collection_description/updates/replace/query_source_query_test.go +++ b/tests/integration/collection_description/updates/replace/query_source_query_test.go @@ -41,7 +41,7 @@ func TestColDescrUpdateReplaceQuerySourceQuery(t *testing.T) { } `, SDL: ` - type View { + type View @materialized(if: false) { name: String } `, @@ -105,7 +105,7 @@ func TestColDescrUpdateReplaceQuerySourceQueryName(t *testing.T) { } `, SDL: ` - type View { + type View @materialized(if: false) { name: String } `, diff --git a/tests/integration/collection_description/updates/replace/query_source_transform_test.go b/tests/integration/collection_description/updates/replace/query_source_transform_test.go index a66e3bc3b5..b2058c6286 100644 --- a/tests/integration/collection_description/updates/replace/query_source_transform_test.go +++ b/tests/integration/collection_description/updates/replace/query_source_transform_test.go @@ -58,7 +58,7 @@ func TestColDescrUpdateReplaceQuerySourceTransform(t *testing.T) { } `, SDL: ` - type UserView { + type UserView @materialized(if: false) { fullName: String } `, diff --git a/tests/integration/collection_description/updates/replace/view_policy_test.go b/tests/integration/collection_description/updates/replace/view_policy_test.go new file mode 100644 index 0000000000..db24ff2148 --- /dev/null +++ b/tests/integration/collection_description/updates/replace/view_policy_test.go @@ -0,0 +1,95 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package replace + +import ( + "testing" + + "github.com/sourcenetwork/immutable" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +// todo: The inverse of this test is not currently possible, make sure it also is tested when +// resolving https://github.com/sourcenetwork/defradb/issues/2983 +func TestColDescrUpdateReplaceIsMaterialized_GivenPolicyOnNonMAterializedView_Errors(t *testing.T) { + test := testUtils.TestCase{ + SupportedViewTypes: immutable.Some([]testUtils.ViewType{ + testUtils.CachelessViewType, + }), + Actions: []any{ + testUtils.AddPolicy{ + Identity: immutable.Some(1), + Policy: ` + name: test + description: a test policy which marks a collection in a database as a resource + + actor: + name: actor + + resources: + userView: + permissions: + read: + expr: owner + reader + write: + expr: owner + + relations: + owner: + types: + - actor + reader: + types: + - actor + admin: + manages: + - reader + types: + - actor + `, + ExpectedPolicyID: "7a698a9c5fe74a5854c2e1e8d00c606926c64ad883a157db2f345749e8609fcb", + }, + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + } + `, + }, + testUtils.CreateView{ + Query: ` + User { + name + } + `, + SDL: ` + type UserView @policy( + id: "7a698a9c5fe74a5854c2e1e8d00c606926c64ad883a157db2f345749e8609fcb", + resource: "userView" + ) @materialized(if: false) { + name: String + } + `, + }, + testUtils.PatchCollection{ + Patch: ` + [ + { "op": "replace", "path": "/2/IsMaterialized", "value": true } + ] + `, + ExpectedError: "materialized views do not support ACP. Collection: UserView", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/collection_description/with_default_fields_test.go b/tests/integration/collection_description/with_default_fields_test.go index 3821fd6359..4a0f86af77 100644 --- a/tests/integration/collection_description/with_default_fields_test.go +++ b/tests/integration/collection_description/with_default_fields_test.go @@ -38,7 +38,8 @@ func TestCollectionDescription_WithDefaultFieldValues(t *testing.T) { testUtils.GetCollections{ ExpectedResults: []client.CollectionDescription{ { - Name: immutable.Some("Users"), + Name: immutable.Some("Users"), + IsMaterialized: true, Fields: []client.CollectionFieldDescription{ { ID: 0, diff --git a/tests/integration/explain/debug/with_view_test.go b/tests/integration/explain/debug/with_view_test.go index 43d83a4dee..e04beb6e70 100644 --- a/tests/integration/explain/debug/with_view_test.go +++ b/tests/integration/explain/debug/with_view_test.go @@ -13,6 +13,8 @@ package test_explain_debug import ( "testing" + "github.com/sourcenetwork/immutable" + testUtils "github.com/sourcenetwork/defradb/tests/integration" ) @@ -38,8 +40,8 @@ var viewPattern = dataMap{ func TestDebugExplainRequestWithView(t *testing.T) { test := testUtils.TestCase{ - - Description: "Explain (debug) request with view", + SupportedViewTypes: immutable.Some([]testUtils.ViewType{testUtils.CachelessViewType}), + Description: "Explain (debug) request with view", Actions: []any{ testUtils.SchemaUpdate{ Schema: ` @@ -55,7 +57,7 @@ func TestDebugExplainRequestWithView(t *testing.T) { } `, SDL: ` - type UserView { + type UserView @materialized(if: false) { name: String } `, diff --git a/tests/integration/explain/debug/with_view_transform_test.go b/tests/integration/explain/debug/with_view_transform_test.go index b2da390e8a..098500d0a4 100644 --- a/tests/integration/explain/debug/with_view_transform_test.go +++ b/tests/integration/explain/debug/with_view_transform_test.go @@ -44,8 +44,8 @@ var transformViewPattern = dataMap{ func TestDebugExplainRequestWithViewWithTransform(t *testing.T) { test := testUtils.TestCase{ - - Description: "Explain (debug) request with view with transform", + SupportedViewTypes: immutable.Some([]testUtils.ViewType{testUtils.CachelessViewType}), + Description: "Explain (debug) request with view with transform", Actions: []any{ testUtils.SchemaUpdate{ Schema: ` @@ -61,7 +61,7 @@ func TestDebugExplainRequestWithViewWithTransform(t *testing.T) { } `, SDL: ` - type UserView { + type UserView @materialized(if: false) { fullName: String } `, diff --git a/tests/integration/index/create_unique_test.go b/tests/integration/index/create_unique_test.go index 9d7ffb8471..d44f46e844 100644 --- a/tests/integration/index/create_unique_test.go +++ b/tests/integration/index/create_unique_test.go @@ -339,7 +339,6 @@ func TestUniqueQueryWithIndex_UponAddingDocWithSameDateTime_Error(t *testing.T) }`, ExpectedError: db.NewErrCanNotIndexNonUniqueFields( "bae-7e20b26e-5d93-572a-9724-d8f862efbe63", - errors.NewKV("birthday", testUtils.MustParseTime("2000-07-23T03:00:00-00:00")), ).Error(), }, }, diff --git a/tests/integration/results.go b/tests/integration/results.go index e246aa5aa0..cc54565fe8 100644 --- a/tests/integration/results.go +++ b/tests/integration/results.go @@ -208,6 +208,7 @@ func assertCollectionDescriptions( } require.Equal(s.t, expected.Name, actual.Name) + require.Equal(s.t, expected.IsMaterialized, actual.IsMaterialized) if expected.Indexes != nil || len(actual.Indexes) != 0 { // Dont bother asserting this if the expected is nil and the actual is nil/empty. @@ -215,7 +216,7 @@ func assertCollectionDescriptions( require.Equal(s.t, expected.Indexes, actual.Indexes) } - if expected.Sources != nil || len(actual.Sources) != 0 { + if expected.Sources != nil { // Dont bother asserting this if the expected is nil and the actual is nil/empty. // This is to save each test action from having to bother declaring an empty slice (if there are no sources) require.Equal(s.t, expected.Sources, actual.Sources) diff --git a/tests/integration/schema/migrations/simple_test.go b/tests/integration/schema/migrations/simple_test.go index e36c9ec836..6427d053c1 100644 --- a/tests/integration/schema/migrations/simple_test.go +++ b/tests/integration/schema/migrations/simple_test.go @@ -52,10 +52,12 @@ func TestSchemaMigrationDoesNotErrorGivenUnknownSchemaRoots(t *testing.T) { { ID: 1, SchemaVersionID: "does not exist", + IsMaterialized: true, }, { ID: 2, SchemaVersionID: "also does not exist", + IsMaterialized: true, Sources: []any{ &client.CollectionSource{ SourceCollectionID: 1, @@ -129,10 +131,12 @@ func TestSchemaMigrationGetMigrationsReturnsMultiple(t *testing.T) { { ID: 1, SchemaVersionID: "does not exist", + IsMaterialized: true, }, { ID: 2, SchemaVersionID: "also does not exist", + IsMaterialized: true, Sources: []any{ &client.CollectionSource{ SourceCollectionID: 1, @@ -154,10 +158,12 @@ func TestSchemaMigrationGetMigrationsReturnsMultiple(t *testing.T) { }, { ID: 3, + IsMaterialized: true, SchemaVersionID: "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe", }, { ID: 4, + IsMaterialized: true, SchemaVersionID: "bafkreiahhaeagyfsxaxmv3d665qvnbtyn3ts6jshhghy5bijwztbe7efpq", Sources: []any{ &client.CollectionSource{ @@ -233,10 +239,12 @@ func TestSchemaMigrationReplacesExistingMigationBasedOnSourceID(t *testing.T) { { ID: 1, SchemaVersionID: "a", + IsMaterialized: true, }, { ID: 2, SchemaVersionID: "b", + IsMaterialized: true, Sources: []any{ &client.CollectionSource{ SourceCollectionID: 1, @@ -259,6 +267,7 @@ func TestSchemaMigrationReplacesExistingMigationBasedOnSourceID(t *testing.T) { { ID: 3, SchemaVersionID: "c", + IsMaterialized: true, Sources: []any{ &client.CollectionSource{ SourceCollectionID: 1, diff --git a/tests/integration/schema/migrations/with_txn_test.go b/tests/integration/schema/migrations/with_txn_test.go index 7fe80263a7..c9e93be52c 100644 --- a/tests/integration/schema/migrations/with_txn_test.go +++ b/tests/integration/schema/migrations/with_txn_test.go @@ -52,10 +52,12 @@ func TestSchemaMigrationGetMigrationsWithTxn(t *testing.T) { { ID: 1, SchemaVersionID: "does not exist", + IsMaterialized: true, }, { ID: 2, SchemaVersionID: "also does not exist", + IsMaterialized: true, Sources: []any{ &client.CollectionSource{ SourceCollectionID: 1, diff --git a/tests/integration/schema/one_many_test.go b/tests/integration/schema/one_many_test.go index f29d0bf34d..1ce7848a51 100644 --- a/tests/integration/schema/one_many_test.go +++ b/tests/integration/schema/one_many_test.go @@ -36,7 +36,8 @@ func TestSchemaOneMany_Primary(t *testing.T) { `, ExpectedResults: []client.CollectionDescription{ { - Name: immutable.Some("User"), + Name: immutable.Some("User"), + IsMaterialized: true, Fields: []client.CollectionFieldDescription{ { Name: "_docID", @@ -54,7 +55,8 @@ func TestSchemaOneMany_Primary(t *testing.T) { }, }, { - Name: immutable.Some("Dog"), + Name: immutable.Some("Dog"), + IsMaterialized: true, Fields: []client.CollectionFieldDescription{ { Name: "_docID", @@ -97,7 +99,8 @@ func TestSchemaOneMany_SelfReferenceOneFieldLexographicallyFirst(t *testing.T) { `, ExpectedResults: []client.CollectionDescription{ { - Name: immutable.Some("User"), + Name: immutable.Some("User"), + IsMaterialized: true, Fields: []client.CollectionFieldDescription{ { Name: "_docID", @@ -142,7 +145,8 @@ func TestSchemaOneMany_SelfReferenceManyFieldLexographicallyFirst(t *testing.T) `, ExpectedResults: []client.CollectionDescription{ { - Name: immutable.Some("User"), + Name: immutable.Some("User"), + IsMaterialized: true, Fields: []client.CollectionFieldDescription{ { Name: "_docID", @@ -192,7 +196,8 @@ func TestSchemaOneMany_SelfUsingActualName(t *testing.T) { testUtils.GetCollections{ ExpectedResults: []client.CollectionDescription{ { - Name: immutable.Some("User"), + Name: immutable.Some("User"), + IsMaterialized: true, Fields: []client.CollectionFieldDescription{ { Name: request.DocIDFieldName, diff --git a/tests/integration/schema/one_one_test.go b/tests/integration/schema/one_one_test.go index 425d950804..957b329e1e 100644 --- a/tests/integration/schema/one_one_test.go +++ b/tests/integration/schema/one_one_test.go @@ -80,7 +80,8 @@ func TestSchemaOneOne_SelfUsingActualName(t *testing.T) { testUtils.GetCollections{ ExpectedResults: []client.CollectionDescription{ { - Name: immutable.Some("User"), + Name: immutable.Some("User"), + IsMaterialized: true, Fields: []client.CollectionFieldDescription{ { Name: request.DocIDFieldName, diff --git a/tests/integration/schema/simple_test.go b/tests/integration/schema/simple_test.go index dd7e8ce2cd..c3603b4b11 100644 --- a/tests/integration/schema/simple_test.go +++ b/tests/integration/schema/simple_test.go @@ -31,7 +31,8 @@ func TestSchemaSimpleCreatesSchemaGivenEmptyType(t *testing.T) { `, ExpectedResults: []client.CollectionDescription{ { - Name: immutable.Some("Users"), + Name: immutable.Some("Users"), + IsMaterialized: true, Fields: []client.CollectionFieldDescription{ { Name: request.DocIDFieldName, diff --git a/tests/integration/schema/updates/with_schema_branch_test.go b/tests/integration/schema/updates/with_schema_branch_test.go index 312cb34d2a..34417f709f 100644 --- a/tests/integration/schema/updates/with_schema_branch_test.go +++ b/tests/integration/schema/updates/with_schema_branch_test.go @@ -139,12 +139,14 @@ func TestSchemaUpdates_WithBranchingSchema(t *testing.T) { // The original collection version is present, it has no source and is inactive (has no name). ID: 1, SchemaVersionID: schemaVersion1ID, + IsMaterialized: true, }, { // The collection version for schema version 2 is present, it has the first collection as a source // and is inactive. ID: 2, SchemaVersionID: schemaVersion2ID, + IsMaterialized: true, Sources: []any{ &client.CollectionSource{ SourceCollectionID: 1, @@ -157,6 +159,7 @@ func TestSchemaUpdates_WithBranchingSchema(t *testing.T) { ID: 3, Name: immutable.Some("Users"), SchemaVersionID: schemaVersion3ID, + IsMaterialized: true, Sources: []any{ &client.CollectionSource{ SourceCollectionID: 1, @@ -268,12 +271,14 @@ func TestSchemaUpdates_WithPatchOnBranchedSchema(t *testing.T) { // The original collection version is present, it has no source and is inactive (has no name). ID: 1, SchemaVersionID: schemaVersion1ID, + IsMaterialized: true, }, { // The collection version for schema version 2 is present, it has the first collection as a source // and is inactive. ID: 2, SchemaVersionID: schemaVersion2ID, + IsMaterialized: true, Sources: []any{ &client.CollectionSource{ SourceCollectionID: 1, @@ -285,6 +290,7 @@ func TestSchemaUpdates_WithPatchOnBranchedSchema(t *testing.T) { // as source. ID: 3, SchemaVersionID: schemaVersion3ID, + IsMaterialized: true, Sources: []any{ &client.CollectionSource{ SourceCollectionID: 1, @@ -297,6 +303,7 @@ func TestSchemaUpdates_WithPatchOnBranchedSchema(t *testing.T) { ID: 4, Name: immutable.Some("Users"), SchemaVersionID: schemaVersion4ID, + IsMaterialized: true, Sources: []any{ &client.CollectionSource{ SourceCollectionID: 3, @@ -378,12 +385,14 @@ func TestSchemaUpdates_WithBranchingSchemaAndSetActiveSchemaToOtherBranch(t *tes // The original collection version is present, it has no source and is inactive (has no name). ID: 1, SchemaVersionID: schemaVersion1ID, + IsMaterialized: true, }, { // The collection version for schema version 2 is present and is active, it has the first collection as a source ID: 2, Name: immutable.Some("Users"), SchemaVersionID: schemaVersion2ID, + IsMaterialized: true, Sources: []any{ &client.CollectionSource{ SourceCollectionID: 1, @@ -395,6 +404,7 @@ func TestSchemaUpdates_WithBranchingSchemaAndSetActiveSchemaToOtherBranch(t *tes // as source. ID: 3, SchemaVersionID: schemaVersion3ID, + IsMaterialized: true, Sources: []any{ &client.CollectionSource{ SourceCollectionID: 1, @@ -510,12 +520,14 @@ func TestSchemaUpdates_WithBranchingSchemaAndSetActiveSchemaToOtherBranchThenPat // The original collection version is present, it has no source and is inactive (has no name). ID: 1, SchemaVersionID: schemaVersion1ID, + IsMaterialized: true, }, { // The collection version for schema version 2 is present, it has the first collection as a source // and is inactive. ID: 2, SchemaVersionID: schemaVersion2ID, + IsMaterialized: true, Sources: []any{ &client.CollectionSource{ SourceCollectionID: 1, @@ -527,6 +539,7 @@ func TestSchemaUpdates_WithBranchingSchemaAndSetActiveSchemaToOtherBranchThenPat // as source. ID: 3, SchemaVersionID: schemaVersion3ID, + IsMaterialized: true, Sources: []any{ &client.CollectionSource{ SourceCollectionID: 1, @@ -539,6 +552,7 @@ func TestSchemaUpdates_WithBranchingSchemaAndSetActiveSchemaToOtherBranchThenPat ID: 4, Name: immutable.Some("Users"), SchemaVersionID: schemaVersion4ID, + IsMaterialized: true, Sources: []any{ &client.CollectionSource{ SourceCollectionID: 2, @@ -584,6 +598,7 @@ collection at a specific version`, // The original collection version is present, it has no source and is inactive (has no name). ID: 1, SchemaVersionID: schemaVersion1ID, + IsMaterialized: true, }, }, }, diff --git a/tests/integration/test_case.go b/tests/integration/test_case.go index 147ef5899b..389de2af35 100644 --- a/tests/integration/test_case.go +++ b/tests/integration/test_case.go @@ -52,6 +52,13 @@ type TestCase struct { // This is to only be used in the very rare cases where we really do want behavioural // differences between acp types, or we need to temporarily document a bug. SupportedACPTypes immutable.Option[[]ACPType] + + // If provided a value, SupportedACPTypes will cause this test to be skipped + // if the active view type is not within the given set. + // + // This is to only be used in the very rare cases where we really do want behavioural + // differences between view types, or we need to temporarily document a bug. + SupportedViewTypes immutable.Option[[]ViewType] } // SetupComplete is a flag to explicitly notify the change detector at which point @@ -220,6 +227,23 @@ type CreateView struct { ExpectedError string } +// RefreshViews action will execute a call to `store.RefreshViews` using the provided options. +type RefreshViews struct { + // NodeID may hold the ID (index) of a node to create this View on. + // + // If a value is not provided the view will be created on all nodes. + NodeID immutable.Option[int] + + // The set of fetch options for the views. + FilterOptions client.CollectionFetchOptions + + // Any error expected from the action. Optional. + // + // String can be a partial, and the test will pass if an error is returned that + // contains this string. + ExpectedError string +} + // CreateDoc will attempt to create the given document in the given collection // using the set [MutationType]. type CreateDoc struct { diff --git a/tests/integration/utils.go b/tests/integration/utils.go index b63692365b..fd6758929d 100644 --- a/tests/integration/utils.go +++ b/tests/integration/utils.go @@ -36,6 +36,7 @@ import ( "github.com/sourcenetwork/defradb/internal/db" "github.com/sourcenetwork/defradb/internal/encryption" "github.com/sourcenetwork/defradb/internal/request/graphql" + "github.com/sourcenetwork/defradb/internal/request/graphql/schema/types" "github.com/sourcenetwork/defradb/net" changeDetector "github.com/sourcenetwork/defradb/tests/change_detector" "github.com/sourcenetwork/defradb/tests/clients" @@ -45,6 +46,7 @@ import ( const ( mutationTypeEnvName = "DEFRA_MUTATION_TYPE" + viewTypeEnvName = "DEFRA_VIEW_TYPE" skipNetworkTestsEnvName = "DEFRA_SKIP_NETWORK_TESTS" ) @@ -75,9 +77,17 @@ const ( GQLRequestMutationType MutationType = "gql" ) +type ViewType string + +const ( + CachelessViewType ViewType = "cacheless" + MaterializedViewType ViewType = "materialized" +) + var ( log = corelog.NewLogger("tests.integration") mutationType MutationType + viewType ViewType // skipNetworkTests will skip any tests that involve network actions skipNetworkTests = false ) @@ -103,6 +113,13 @@ func init() { // mutation type. mutationType = CollectionSaveMutationType } + + if value, ok := os.LookupEnv(viewTypeEnvName); ok { + viewType = ViewType(value) + } else { + viewType = CachelessViewType + } + if value, ok := os.LookupEnv(skipNetworkTestsEnvName); ok { skipNetworkTests, _ = strconv.ParseBool(value) } @@ -144,6 +161,7 @@ func ExecuteTestCase( skipIfMutationTypeUnsupported(t, testCase.SupportedMutationTypes) skipIfACPTypeUnsupported(t, testCase.SupportedACPTypes) skipIfNetworkTest(t, testCase.Actions) + skipIfViewCacheTypeUnsupported(t, testCase.SupportedViewTypes) var clients []ClientType if httpClient { @@ -293,6 +311,9 @@ func performAction( case CreateView: createView(s, action) + case RefreshViews: + refreshViews(s, action) + case ConfigureMigration: configureMigration(s, action) @@ -1108,6 +1129,19 @@ func createView( s *state, action CreateView, ) { + if viewType == MaterializedViewType { + typeIndex := strings.Index(action.SDL, "\ttype ") + subStrSquigglyIndex := strings.Index(action.SDL[typeIndex:], "{") + squigglyIndex := typeIndex + subStrSquigglyIndex + action.SDL = strings.Join([]string{ + action.SDL[:squigglyIndex], + "@", + types.MaterializedDirectiveLabel, + action.SDL[squigglyIndex:], + "", + }, "") + } + for _, node := range getNodes(action.NodeID, s.nodes) { _, err := node.AddView(s.ctx, action.Query, action.SDL, action.Transform) expectedErrorRaised := AssertError(s.t, s.testCase.Description, err, action.ExpectedError) @@ -1116,6 +1150,17 @@ func createView( } } +func refreshViews( + s *state, + action RefreshViews, +) { + for _, node := range getNodes(action.NodeID, s.nodes) { + err := node.RefreshViews(s.ctx, action.FilterOptions) + expectedErrorRaised := AssertError(s.t, s.testCase.Description, err, action.ExpectedError) + assertExpectedErrorRaised(s.t, s.testCase.Description, action.ExpectedError, expectedErrorRaised) + } +} + // createDoc creates a document using the chosen [mutationType] and caches it in the // test state object. func createDoc( @@ -1711,6 +1756,15 @@ func executeRequest( if action.Variables.HasValue() { options = append(options, client.WithVariables(action.Variables.Value())) } + + if !expectedErrorRaised && viewType == MaterializedViewType { + err := node.RefreshViews(s.ctx, client.CollectionFetchOptions{}) + expectedErrorRaised = AssertError(s.t, s.testCase.Description, err, action.ExpectedError) + if expectedErrorRaised { + continue + } + } + result := node.ExecRequest(ctx, action.Request, options...) anyOfByFieldKey := map[docFieldKey][]any{} @@ -2132,6 +2186,22 @@ func skipIfMutationTypeUnsupported(t testing.TB, supportedMutationTypes immutabl } } +func skipIfViewCacheTypeUnsupported(t testing.TB, supportedViewTypes immutable.Option[[]ViewType]) { + if supportedViewTypes.HasValue() { + var isTypeSupported bool + for _, supportedViewType := range supportedViewTypes.Value() { + if supportedViewType == viewType { + isTypeSupported = true + break + } + } + + if !isTypeSupported { + t.Skipf("test does not support given view cache type. Type: %s", viewType) + } + } +} + // skipIfClientTypeUnsupported returns a new set of client types that match the given supported set. // // If supportedClientTypes is none no filtering will take place and the input client set will be returned. diff --git a/tests/integration/view/one_to_many/simple_test.go b/tests/integration/view/one_to_many/simple_test.go index 2f66c6ee50..47c3784c9d 100644 --- a/tests/integration/view/one_to_many/simple_test.go +++ b/tests/integration/view/one_to_many/simple_test.go @@ -42,7 +42,7 @@ func TestView_OneToMany(t *testing.T) { } `, SDL: ` - type AuthorView { + type AuthorView @materialized(if: false) { name: String books: [BookView] } @@ -118,7 +118,7 @@ func TestView_OneToManyWithMixedSDL_Errors(t *testing.T) { } `, SDL: ` - type AuthorView { + type AuthorView @materialized(if: false) { name: String books: [Book] } @@ -157,7 +157,7 @@ func TestView_OneToManyFromInnerSide_Errors(t *testing.T) { } `, SDL: ` - type AuthorView { + type AuthorView @materialized(if: false) { name: String books: [BookView] } @@ -212,7 +212,7 @@ func TestView_OneToManyOuterToInnerToOuter_Errors(t *testing.T) { } `, SDL: ` - type AuthorView { + type AuthorView @materialized(if: false) { name: String books: [BookView] } @@ -268,7 +268,7 @@ func TestView_OneToManyWithRelationInQueryButNotInSDL(t *testing.T) { } `, SDL: ` - type AuthorView { + type AuthorView @materialized(if: false) { name: String } `, @@ -333,7 +333,7 @@ func TestView_OneToManyMultipleViewsWithEmbeddedSchema(t *testing.T) { } `, SDL: ` - type BookView { + type BookView @materialized(if: false) { name: String author: AuthorView } @@ -352,7 +352,7 @@ func TestView_OneToManyMultipleViewsWithEmbeddedSchema(t *testing.T) { } `, SDL: ` - type BookView2 { + type BookView2 @materialized(if: false) { name: String author: AuthorView2 } @@ -393,7 +393,7 @@ func TestView_OneToManyWithDoubleSidedRelation_Errors(t *testing.T) { } `, SDL: ` - type AuthorView { + type AuthorView @materialized(if: false) { name: String books: [BookView] } @@ -412,7 +412,7 @@ func TestView_OneToManyWithDoubleSidedRelation_Errors(t *testing.T) { } `, SDL: ` - type AuthorViewView { + type AuthorViewView @materialized(if: false) { name: String books: [BookViewView] } diff --git a/tests/integration/view/one_to_many/with_alias_test.go b/tests/integration/view/one_to_many/with_alias_test.go index 29101fbc63..8be5d6a72c 100644 --- a/tests/integration/view/one_to_many/with_alias_test.go +++ b/tests/integration/view/one_to_many/with_alias_test.go @@ -42,7 +42,7 @@ func TestView_OneToManyWithAliasOnOuter(t *testing.T) { } `, SDL: ` - type AuthorView { + type AuthorView @materialized(if: false) { fullName: String books: [BookView] } @@ -118,7 +118,7 @@ func TestView_OneToManyWithAliasOnInner(t *testing.T) { } `, SDL: ` - type AuthorView { + type AuthorView @materialized(if: false) { name: String books: [BookView] } diff --git a/tests/integration/view/one_to_many/with_count_test.go b/tests/integration/view/one_to_many/with_count_test.go index a679c53f6b..eddd6e8bb1 100644 --- a/tests/integration/view/one_to_many/with_count_test.go +++ b/tests/integration/view/one_to_many/with_count_test.go @@ -42,7 +42,7 @@ func TestView_OneToManyWithCount_Errors(t *testing.T) { } `, SDL: ` - type AuthorView { + type AuthorView @materialized(if: false) { name: String _count: Int } @@ -108,7 +108,7 @@ func TestView_OneToManyWithAliasedCount(t *testing.T) { } `, SDL: ` - type AuthorView { + type AuthorView @materialized(if: false) { name: String numberOfBooks: Int } @@ -182,7 +182,7 @@ func TestView_OneToManyWithCountInQueryButNotSDL(t *testing.T) { } `, SDL: ` - type AuthorView { + type AuthorView @materialized(if: false) { name: String } `, diff --git a/tests/integration/view/one_to_many/with_introspection_test.go b/tests/integration/view/one_to_many/with_introspection_test.go index 8a8eeeb12d..b7aeb4a7bc 100644 --- a/tests/integration/view/one_to_many/with_introspection_test.go +++ b/tests/integration/view/one_to_many/with_introspection_test.go @@ -43,7 +43,7 @@ func TestView_OneToMany_GQLIntrospectionTest(t *testing.T) { } `, SDL: ` - type AuthorView { + type AuthorView @materialized(if: false) { name: String books: [BookView] } diff --git a/tests/integration/view/one_to_many/with_transform_test.go b/tests/integration/view/one_to_many/with_transform_test.go index f0ceeee197..4d9f9addf3 100644 --- a/tests/integration/view/one_to_many/with_transform_test.go +++ b/tests/integration/view/one_to_many/with_transform_test.go @@ -46,7 +46,7 @@ func TestView_OneToManyWithTransformOnOuter(t *testing.T) { } `, SDL: ` - type AuthorView { + type AuthorView @materialized(if: false) { fullName: String books: [BookView] } @@ -96,8 +96,8 @@ func TestView_OneToManyWithTransformOnOuter(t *testing.T) { "AuthorView": []map[string]any{ { "fullName": "Ferdowsi", - "books": []any{ - map[string]any{ + "books": []map[string]any{ + { "name": "Shahnameh", }, }, @@ -129,7 +129,7 @@ func TestView_OneToManyWithTransformAddingInnerDocs(t *testing.T) { } `, SDL: ` - type AuthorView { + type AuthorView @materialized(if: false) { name: String books: [BookView] } @@ -177,11 +177,11 @@ func TestView_OneToManyWithTransformAddingInnerDocs(t *testing.T) { "AuthorView": []map[string]any{ { "name": "Ferdowsi", - "books": []any{ - map[string]any{ + "books": []map[string]any{ + { "name": "The Tragedy of Sohrab and Rostam", }, - map[string]any{ + { "name": "The Legend of Seyavash", }, }, diff --git a/tests/integration/view/one_to_one/identical_schema_test.go b/tests/integration/view/one_to_one/identical_schema_test.go index aab5cedbbf..4e2bcf0a8f 100644 --- a/tests/integration/view/one_to_one/identical_schema_test.go +++ b/tests/integration/view/one_to_one/identical_schema_test.go @@ -46,7 +46,7 @@ func TestView_OneToOneSameSchema(t *testing.T) { // todo - such a setup appears to work, yet prevents the querying of `RightHand`s as the primary return object // thought - although, perhaps if the view is defined as such, Left and right hands *could* be merged by us into a single table SDL: ` - type HandView { + type HandView @materialized(if: false) { name: String holding: HandView @primary heldBy: HandView @@ -120,7 +120,7 @@ func TestView_OneToOneEmbeddedSchemaIsNotLostOnNextUpdate(t *testing.T) { } `, SDL: ` - type AuthorView { + type AuthorView @materialized(if: false) { name: String books: [BookView] } diff --git a/tests/integration/view/one_to_one/simple_test.go b/tests/integration/view/one_to_one/simple_test.go index 96967acf07..3045ee01f7 100644 --- a/tests/integration/view/one_to_one/simple_test.go +++ b/tests/integration/view/one_to_one/simple_test.go @@ -42,7 +42,7 @@ func TestView_OneToOneDuplicateEmbeddedSchema_Errors(t *testing.T) { } `, SDL: ` - type AuthorView { + type AuthorView @materialized(if: false) { name: String books: [BookView] } @@ -63,7 +63,7 @@ func TestView_OneToOneDuplicateEmbeddedSchema_Errors(t *testing.T) { } `, SDL: ` - type AuthorAliasView { + type AuthorAliasView @materialized(if: false) { authorName: String books: [BookView] } diff --git a/tests/integration/view/one_to_one/with_restart_test.go b/tests/integration/view/one_to_one/with_restart_test.go index a17886867f..c32134e222 100644 --- a/tests/integration/view/one_to_one/with_restart_test.go +++ b/tests/integration/view/one_to_one/with_restart_test.go @@ -42,7 +42,7 @@ func TestView_OneToOneEmbeddedSchemaIsNotLostORestart(t *testing.T) { } `, SDL: ` - type AuthorView { + type AuthorView @materialized(if: false) { name: String books: [BookView] } diff --git a/tests/integration/view/one_to_one/with_transform_test.go b/tests/integration/view/one_to_one/with_transform_test.go index 8fcee174f3..672bea8021 100644 --- a/tests/integration/view/one_to_one/with_transform_test.go +++ b/tests/integration/view/one_to_one/with_transform_test.go @@ -46,7 +46,7 @@ func TestView_OneToOneWithTransformOnOuter(t *testing.T) { } `, SDL: ` - type AuthorView { + type AuthorView @materialized(if: false) { fullName: String book: BookView } diff --git a/tests/integration/view/simple/materialized_test.go b/tests/integration/view/simple/materialized_test.go new file mode 100644 index 0000000000..52d91c7fa5 --- /dev/null +++ b/tests/integration/view/simple/materialized_test.go @@ -0,0 +1,129 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package simple + +import ( + "testing" + + "github.com/sourcenetwork/immutable" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestView_SimpleMaterialized_DoesNotAutoUpdateOnViewCreate(t *testing.T) { + test := testUtils.TestCase{ + SupportedViewTypes: immutable.Some([]testUtils.ViewType{ + // As the MaterializedViewType will auto refresh views immediately prior + // to executing requests, this test of materialized views actually only + // supports running with the CachelessViewType flag. + testUtils.CachelessViewType, + }), + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John" + }`, + }, + testUtils.CreateView{ + Query: ` + User { + name + } + `, + SDL: ` + type UserView { + name: String + } + `, + }, + testUtils.Request{ + Request: `query { + UserView { + name + } + }`, + Results: map[string]any{ + // Even though UserView was created after the document was created, the results are + // empty because the view will not populate until RefreshView is called. + "UserView": []map[string]any{}, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestView_SimpleMaterialized_DoesNotAutoUpdate(t *testing.T) { + test := testUtils.TestCase{ + SupportedViewTypes: immutable.Some([]testUtils.ViewType{ + // As the MaterializedViewType will auto refresh views immediately prior + // to executing requests, this test of materialized views actually only + // supports running with the CachelessViewType flag. + testUtils.CachelessViewType, + }), + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John" + }`, + }, + testUtils.CreateView{ + Query: ` + User { + name + } + `, + SDL: ` + type UserView { + name: String + } + `, + }, + testUtils.RefreshViews{}, + testUtils.CreateDoc{ + Doc: `{ + "name": "Fred" + }`, + }, + testUtils.Request{ + Request: `query { + UserView { + name + } + }`, + Results: map[string]any{ + "UserView": []map[string]any{ + { + "name": "John", + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/view/simple/simple_test.go b/tests/integration/view/simple/simple_test.go index df811a528f..4bad62c5ee 100644 --- a/tests/integration/view/simple/simple_test.go +++ b/tests/integration/view/simple/simple_test.go @@ -34,7 +34,7 @@ func TestView_Simple(t *testing.T) { } `, SDL: ` - type UserView { + type UserView @materialized(if: false) { name: String } `, @@ -82,7 +82,7 @@ func TestView_SimpleMultipleDocs(t *testing.T) { } `, SDL: ` - type UserView { + type UserView @materialized(if: false) { name: String } `, @@ -139,7 +139,7 @@ func TestView_SimpleWithFieldSubset_ErrorsSelectingExcludedField(t *testing.T) { } `, SDL: ` - type UserView { + type UserView @materialized(if: false) { name: String } `, @@ -185,7 +185,7 @@ func TestView_SimpleWithExtraFieldInViewSDL(t *testing.T) { `, // `age` is present in SDL but not the query SDL: ` - type UserView { + type UserView @materialized(if: false) { name: String age: Int } @@ -237,7 +237,7 @@ func TestView_SimpleWithExtraFieldInViewQuery(t *testing.T) { } `, SDL: ` - type UserView { + type UserView @materialized(if: false) { name: String } `, @@ -287,7 +287,7 @@ func TestView_SimpleViewOfView(t *testing.T) { } `, SDL: ` - type UserView { + type UserView @materialized(if: false) { name: String } `, @@ -299,7 +299,7 @@ func TestView_SimpleViewOfView(t *testing.T) { } `, SDL: ` - type UserViewView { + type UserViewView @materialized(if: false) { name: String } `, diff --git a/tests/integration/view/simple/with_alias_test.go b/tests/integration/view/simple/with_alias_test.go index b7818e25be..9e24dca38f 100644 --- a/tests/integration/view/simple/with_alias_test.go +++ b/tests/integration/view/simple/with_alias_test.go @@ -34,7 +34,7 @@ func TestView_SimpleWithAlias(t *testing.T) { } `, SDL: ` - type UserView { + type UserView @materialized(if: false) { fullname: String } `, diff --git a/tests/integration/view/simple/with_default_value_test.go b/tests/integration/view/simple/with_default_value_test.go index a7f5aa660c..d1d545dfea 100644 --- a/tests/integration/view/simple/with_default_value_test.go +++ b/tests/integration/view/simple/with_default_value_test.go @@ -34,7 +34,7 @@ func TestView_SimpleWithDefaultValue_DoesNotSetFieldValue(t *testing.T) { } `, SDL: ` - type UserView { + type UserView @materialized(if: false) { name: String age: Int @default(int: 40) } diff --git a/tests/integration/view/simple/with_filter_test.go b/tests/integration/view/simple/with_filter_test.go index 58dfadc926..1d3194f1c0 100644 --- a/tests/integration/view/simple/with_filter_test.go +++ b/tests/integration/view/simple/with_filter_test.go @@ -34,7 +34,7 @@ func TestView_SimpleWithFilter(t *testing.T) { } `, SDL: ` - type UserView { + type UserView @materialized(if: false) { name: String } `, @@ -91,7 +91,7 @@ func TestView_SimpleWithFilterOnViewAndQuery(t *testing.T) { } `, SDL: ` - type UserView { + type UserView @materialized(if: false) { name: String age: Int } diff --git a/tests/integration/view/simple/with_introspection_test.go b/tests/integration/view/simple/with_introspection_test.go index c8c45b9e8a..0ce928b6f4 100644 --- a/tests/integration/view/simple/with_introspection_test.go +++ b/tests/integration/view/simple/with_introspection_test.go @@ -35,7 +35,7 @@ func TestView_Simple_GQLIntrospectionTest(t *testing.T) { } `, SDL: ` - type UserView { + type UserView @materialized(if: false) { name: String } `, diff --git a/tests/integration/view/simple/with_transform_test.go b/tests/integration/view/simple/with_transform_test.go index 118d0b3991..5a29f73afc 100644 --- a/tests/integration/view/simple/with_transform_test.go +++ b/tests/integration/view/simple/with_transform_test.go @@ -38,7 +38,7 @@ func TestView_SimpleWithTransform(t *testing.T) { } `, SDL: ` - type UserView { + type UserView @materialized(if: false) { fullName: String } `, @@ -111,7 +111,7 @@ func TestView_SimpleWithMultipleTransforms(t *testing.T) { } `, SDL: ` - type UserView { + type UserView @materialized(if: false) { fullName: String age: Int } @@ -196,7 +196,7 @@ func TestView_SimpleWithTransformReturningMoreDocsThanInput(t *testing.T) { } `, SDL: ` - type UserView { + type UserView @materialized(if: false) { name: String } `, @@ -271,7 +271,7 @@ func TestView_SimpleWithTransformReturningFewerDocsThanInput(t *testing.T) { } `, SDL: ` - type UserView { + type UserView @materialized(if: false) { name: String } `, From e33140bfba6bd5c7a47acd1ac602aba749632686 Mon Sep 17 00:00:00 2001 From: Shahzad Lone Date: Tue, 17 Sep 2024 03:28:31 -0400 Subject: [PATCH 17/71] ci(i): Fix view-type typo to avoid coverage file name clash (#3015) ## Relevant issue(s) Resolves #3014 ## Description Failed run due to code-cov upload file's name clashing: https://github.com/sourcenetwork/defradb/actions/runs/10893445627/job/30228379591 Happened in merge commit: [#4989901](https://github.com/sourcenetwork/defradb/commit/49899011a75999543b246d9a852526aa96f60e5c) Just needed to fix the typo introduced in [`15f244d` (#3000)](https://github.com/sourcenetwork/defradb/pull/3000/commits/15f244d313e33abfa3704cc966560db6cb36276b) Should be `matrix.view-type` not `matrix.matrix.view-type`, my bad I missed it even while re-reviewing. --- .github/workflows/test-and-upload-coverage.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test-and-upload-coverage.yml b/.github/workflows/test-and-upload-coverage.yml index c002f2ec08..d1fca2a943 100644 --- a/.github/workflows/test-and-upload-coverage.yml +++ b/.github/workflows/test-and-upload-coverage.yml @@ -222,7 +222,7 @@ jobs: _${{ matrix.mutation-type }}\ _${{ matrix.lens-type }}\ _${{ matrix.acp-type }}\ - _${{ matrix.matrix.view-type }}\ + _${{ matrix.view-type }}\ _${{ matrix.database-encryption }}\ " path: coverage.txt From dfee791d68d1a25b1c31233e66077186fdc93c25 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Tue, 17 Sep 2024 04:01:59 -0400 Subject: [PATCH 18/71] bot: Update dependencies (bulk dependabot PRs) 16-09-2024 (#3011) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit In addition to below an audit fix was done to resolve a vulnerability. ✅ This PR was created by combining the following PRs: #3009 bot: Bump github.com/cometbft/cometbft from 0.38.10 to 0.38.12 #3008 bot: Bump go.opentelemetry.io/otel/metric from 1.29.0 to 1.30.0 #3007 bot: Bump google.golang.org/grpc from 1.66.0 to 1.66.2 #3005 bot: Bump @typescript-eslint/eslint-plugin from 8.4.0 to 8.5.0 in /playground #3004 bot: Bump vite from 5.4.3 to 5.4.5 in /playground #3003 bot: Bump @types/react from 18.3.5 to 18.3.6 in /playground #3002 bot: Bump eslint-plugin-react-refresh from 0.4.11 to 0.4.12 in /playground ⚠️ The following PRs were resolved manually due to merge conflicts: #3006 bot: Bump go.opentelemetry.io/otel/sdk/metric from 1.29.0 to 1.30.0 #3001 bot: Bump typescript from 5.5.4 to 5.6.2 in /playground --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Shahzad Lone --- go.mod | 29 ++-- go.sum | 62 +++++---- playground/package-lock.json | 262 ++++++++--------------------------- playground/package.json | 10 +- 4 files changed, 107 insertions(+), 256 deletions(-) diff --git a/go.mod b/go.mod index 4a033fa5eb..a23a90d299 100644 --- a/go.mod +++ b/go.mod @@ -58,12 +58,12 @@ require ( github.com/valyala/fastjson v1.6.4 github.com/vito/go-sse v1.1.2 github.com/zalando/go-keyring v0.2.5 - go.opentelemetry.io/otel/metric v1.29.0 - go.opentelemetry.io/otel/sdk/metric v1.29.0 + go.opentelemetry.io/otel/metric v1.30.0 + go.opentelemetry.io/otel/sdk/metric v1.30.0 go.uber.org/zap v1.27.0 golang.org/x/exp v0.0.0-20240808152545-0cdaa3abc0fa golang.org/x/term v0.24.0 - google.golang.org/grpc v1.66.0 + google.golang.org/grpc v1.66.2 google.golang.org/protobuf v1.34.2 ) @@ -102,7 +102,7 @@ require ( github.com/bgentry/go-netrc v0.0.0-20140422174119-9fd32a8b3d3d // indirect github.com/bgentry/speakeasy v0.1.1-0.20220910012023-760eaf8b6816 // indirect github.com/blang/semver/v4 v4.0.0 // indirect - github.com/btcsuite/btcd/btcec/v2 v2.3.2 // indirect + github.com/btcsuite/btcd/btcec/v2 v2.3.4 // indirect github.com/btcsuite/btcd/chaincfg/chainhash v1.0.2 // indirect github.com/bytecodealliance/wasmtime-go/v15 v15.0.0 // indirect github.com/cenkalti/backoff v2.2.1+incompatible // indirect @@ -112,13 +112,14 @@ require ( github.com/chzyer/readline v1.5.1 // indirect github.com/cloudflare/circl v1.3.7 // indirect github.com/cockroachdb/apd/v2 v2.0.2 // indirect - github.com/cockroachdb/errors v1.11.1 // indirect + github.com/cockroachdb/errors v1.11.3 // indirect + github.com/cockroachdb/fifo v0.0.0-20240606204812-0bbfbd93a7ce // indirect github.com/cockroachdb/logtags v0.0.0-20230118201751-21c54148d20b // indirect - github.com/cockroachdb/pebble v1.1.0 // indirect + github.com/cockroachdb/pebble v1.1.1 // indirect github.com/cockroachdb/redact v1.1.5 // indirect github.com/cockroachdb/tokenbucket v0.0.0-20230807174530-cc333fc44b06 // indirect - github.com/cometbft/cometbft v0.38.10 // indirect - github.com/cometbft/cometbft-db v0.9.1 // indirect + github.com/cometbft/cometbft v0.38.12 // indirect + github.com/cometbft/cometbft-db v0.11.0 // indirect github.com/containerd/cgroups v1.1.0 // indirect github.com/coreos/go-systemd/v22 v22.5.0 // indirect github.com/cosmos/btcutil v1.0.5 // indirect @@ -309,7 +310,7 @@ require ( github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect github.com/polydawn/refmt v0.89.0 // indirect github.com/pquerna/cachecontrol v0.1.0 // indirect - github.com/prometheus/client_golang v1.20.0 // indirect + github.com/prometheus/client_golang v1.20.1 // indirect github.com/prometheus/client_model v0.6.1 // indirect github.com/prometheus/common v0.55.0 // indirect github.com/prometheus/procfs v0.15.1 // indirect @@ -319,7 +320,7 @@ require ( github.com/raulk/go-watchdog v1.3.0 // indirect github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475 // indirect github.com/rogpeppe/go-internal v1.12.0 // indirect - github.com/rs/cors v1.11.0 // indirect + github.com/rs/cors v1.11.1 // indirect github.com/rs/zerolog v1.32.0 // indirect github.com/russross/blackfriday/v2 v2.1.0 // indirect github.com/sagikazarmark/locafero v0.4.0 // indirect @@ -347,13 +348,13 @@ require ( github.com/x448/float16 v0.8.4 // indirect github.com/zondax/hid v0.9.2 // indirect github.com/zondax/ledger-go v0.14.3 // indirect - go.etcd.io/bbolt v1.3.8 // indirect + go.etcd.io/bbolt v1.3.10 // indirect go.opencensus.io v0.24.0 // indirect go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0 // indirect go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.52.0 // indirect - go.opentelemetry.io/otel v1.29.0 // indirect - go.opentelemetry.io/otel/sdk v1.29.0 // indirect - go.opentelemetry.io/otel/trace v1.29.0 // indirect + go.opentelemetry.io/otel v1.30.0 // indirect + go.opentelemetry.io/otel/sdk v1.30.0 // indirect + go.opentelemetry.io/otel/trace v1.30.0 // indirect go.uber.org/dig v1.18.0 // indirect go.uber.org/fx v1.22.2 // indirect go.uber.org/mock v0.4.0 // indirect diff --git a/go.sum b/go.sum index 67e38ff226..cdd939b06e 100644 --- a/go.sum +++ b/go.sum @@ -300,10 +300,10 @@ github.com/blang/semver/v4 v4.0.0 h1:1PFHFE6yCCTv8C1TeyNNarDzntLi7wMI5i/pzqYIsAM github.com/blang/semver/v4 v4.0.0/go.mod h1:IbckMUScFkM3pff0VJDNKRiT6TG/YpiHIM2yvyW5YoQ= github.com/bradfitz/go-smtpd v0.0.0-20170404230938-deb6d6237625/go.mod h1:HYsPBTaaSFSlLx/70C2HPIMNZpVV8+vt/A+FMnYP11g= github.com/btcsuite/btcd v0.22.0-beta h1:LTDpDKUM5EeOFBPM8IXpinEcmZ6FWfNZbE3lfrfdnWo= -github.com/btcsuite/btcd/btcec/v2 v2.3.2 h1:5n0X6hX0Zk+6omWcihdYvdAlGf2DfasC0GMf7DClJ3U= -github.com/btcsuite/btcd/btcec/v2 v2.3.2/go.mod h1:zYzJ8etWJQIv1Ogk7OzpWjowwOdXY1W/17j2MW85J04= -github.com/btcsuite/btcd/btcutil v1.1.3 h1:xfbtw8lwpp0G6NwSHb+UE67ryTFHJAiNuipusjXSohQ= -github.com/btcsuite/btcd/btcutil v1.1.3/go.mod h1:UR7dsSJzJUfMmFiiLlIrMq1lS9jh9EdCV7FStZSnpi0= +github.com/btcsuite/btcd/btcec/v2 v2.3.4 h1:3EJjcN70HCu/mwqlUsGK8GcNVyLVxFDlWurTXGPFfiQ= +github.com/btcsuite/btcd/btcec/v2 v2.3.4/go.mod h1:zYzJ8etWJQIv1Ogk7OzpWjowwOdXY1W/17j2MW85J04= +github.com/btcsuite/btcd/btcutil v1.1.6 h1:zFL2+c3Lb9gEgqKNzowKUPQNb8jV7v5Oaodi/AYFd6c= +github.com/btcsuite/btcd/btcutil v1.1.6/go.mod h1:9dFymx8HpuLqBnsPELrImQeTQfKBQqzqGbbV3jK55aE= github.com/btcsuite/btcd/chaincfg/chainhash v1.0.2 h1:KdUfX2zKommPRa+PD0sWZUyXe9w277ABlgELO7H04IM= github.com/btcsuite/btcd/chaincfg/chainhash v1.0.2/go.mod h1:7SFka0XMvUgj3hfZtydOrQY2mwhPclbT2snogU7SQQc= github.com/btcsuite/btcutil v1.0.3-0.20201208143702-a53e38424cce h1:YtWJF7RHm2pYCvA5t0RPmAaLUhREsKuKd+SLhxFbFeQ= @@ -358,21 +358,23 @@ github.com/cockroachdb/apd/v2 v2.0.2/go.mod h1:DDxRlzC2lo3/vSlmSoS7JkqbbrARPuFOG github.com/cockroachdb/datadriven v0.0.0-20190809214429-80d97fb3cbaa/go.mod h1:zn76sxSg3SzpJ0PPJaLDCu+Bu0Lg3sKTORVIj19EIF8= github.com/cockroachdb/datadriven v1.0.3-0.20230413201302-be42291fc80f h1:otljaYPt5hWxV3MUfO5dFPFiOXg9CyG5/kCfayTqsJ4= github.com/cockroachdb/datadriven v1.0.3-0.20230413201302-be42291fc80f/go.mod h1:a9RdTaap04u637JoCzcUoIcDmvwSUtcUFtT/C3kJlTU= -github.com/cockroachdb/errors v1.11.1 h1:xSEW75zKaKCWzR3OfxXUxgrk/NtT4G1MiOv5lWZazG8= -github.com/cockroachdb/errors v1.11.1/go.mod h1:8MUxA3Gi6b25tYlFEBGLf+D8aISL+M4MIpiWMSNRfxw= +github.com/cockroachdb/errors v1.11.3 h1:5bA+k2Y6r+oz/6Z/RFlNeVCesGARKuC6YymtcDrbC/I= +github.com/cockroachdb/errors v1.11.3/go.mod h1:m4UIW4CDjx+R5cybPsNrRbreomiFqt8o1h1wUVazSd8= +github.com/cockroachdb/fifo v0.0.0-20240606204812-0bbfbd93a7ce h1:giXvy4KSc/6g/esnpM7Geqxka4WSqI1SZc7sMJFd3y4= +github.com/cockroachdb/fifo v0.0.0-20240606204812-0bbfbd93a7ce/go.mod h1:9/y3cnZ5GKakj/H4y9r9GTjCvAFta7KLgSHPJJYc52M= github.com/cockroachdb/logtags v0.0.0-20230118201751-21c54148d20b h1:r6VH0faHjZeQy818SGhaone5OnYfxFR/+AzdY3sf5aE= github.com/cockroachdb/logtags v0.0.0-20230118201751-21c54148d20b/go.mod h1:Vz9DsVWQQhf3vs21MhPMZpMGSht7O/2vFW2xusFUVOs= -github.com/cockroachdb/pebble v1.1.0 h1:pcFh8CdCIt2kmEpK0OIatq67Ln9uGDYY3d5XnE0LJG4= -github.com/cockroachdb/pebble v1.1.0/go.mod h1:sEHm5NOXxyiAoKWhoFxT8xMgd/f3RA6qUqQ1BXKrh2E= +github.com/cockroachdb/pebble v1.1.1 h1:XnKU22oiCLy2Xn8vp1re67cXg4SAasg/WDt1NtcRFaw= +github.com/cockroachdb/pebble v1.1.1/go.mod h1:4exszw1r40423ZsmkG/09AFEG83I0uDgfujJdbL6kYU= github.com/cockroachdb/redact v1.1.5 h1:u1PMllDkdFfPWaNGMyLD1+so+aq3uUItthCFqzwPJ30= github.com/cockroachdb/redact v1.1.5/go.mod h1:BVNblN9mBWFyMyqK1k3AAiSxhvhfK2oOZZ2lK+dpvRg= github.com/cockroachdb/tokenbucket v0.0.0-20230807174530-cc333fc44b06 h1:zuQyyAKVxetITBuuhv3BI9cMrmStnpT18zmgmTxunpo= github.com/cockroachdb/tokenbucket v0.0.0-20230807174530-cc333fc44b06/go.mod h1:7nc4anLGjupUW/PeY5qiNYsdNXj7zopG+eqsS7To5IQ= github.com/codahale/hdrhistogram v0.0.0-20161010025455-3a0bb77429bd/go.mod h1:sE/e/2PUdi/liOCUjSTXgM1o87ZssimdTWN964YiIeI= -github.com/cometbft/cometbft v0.38.10 h1:2ePuglchT+j0Iao+cfmt/nw5U7K2lnGDzXSUPGVdXaU= -github.com/cometbft/cometbft v0.38.10/go.mod h1:jHPx9vQpWzPHEAiYI/7EDKaB1NXhK6o3SArrrY8ExKc= -github.com/cometbft/cometbft-db v0.9.1 h1:MIhVX5ja5bXNHF8EYrThkG9F7r9kSfv8BX4LWaxWJ4M= -github.com/cometbft/cometbft-db v0.9.1/go.mod h1:iliyWaoV0mRwBJoizElCwwRA9Tf7jZJOURcRZF9m60U= +github.com/cometbft/cometbft v0.38.12 h1:OWsLZN2KcSSFe8bet9xCn07VwhBnavPea3VyPnNq1bg= +github.com/cometbft/cometbft v0.38.12/go.mod h1:GPHp3/pehPqgX1930HmK1BpBLZPxB75v/dZg8Viwy+o= +github.com/cometbft/cometbft-db v0.11.0 h1:M3Lscmpogx5NTbb1EGyGDaFRdsoLWrUWimFEyf7jej8= +github.com/cometbft/cometbft-db v0.11.0/go.mod h1:GDPJAC/iFHNjmZZPN8V8C1yr/eyityhi2W1hz2MGKSc= github.com/containerd/cgroups v0.0.0-20201119153540-4cbc285b3327/go.mod h1:ZJeTFisyysqgcCdecO57Dj79RfL0LNeGiFUqLYQRYLE= github.com/containerd/cgroups v1.1.0 h1:v8rEWFl6EoqHB+swVNjVoCJE8o3jX7e8nqBGPLaDFBM= github.com/containerd/cgroups v1.1.0/go.mod h1:6ppBcbh/NOOUU+dMKrykgaBnK9lCIBxHqJDGwsa1mIw= @@ -1276,8 +1278,8 @@ github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5Fsn github.com/prometheus/client_golang v1.3.0/go.mod h1:hJaj2vgQTGQmVCsAACORcieXFeDPbaTKGT+JTgUa3og= github.com/prometheus/client_golang v1.4.0/go.mod h1:e9GMxYsXl05ICDXkRhurwBS4Q3OK1iX/F2sw+iXX5zU= github.com/prometheus/client_golang v1.7.1/go.mod h1:PY5Wy2awLA44sXw4AOSfFBetzPP4j5+D6mVACh+pe2M= -github.com/prometheus/client_golang v1.20.0 h1:jBzTZ7B099Rg24tny+qngoynol8LtVYlA2bqx3vEloI= -github.com/prometheus/client_golang v1.20.0/go.mod h1:PIEt8X02hGcP8JWbeHyeZ53Y/jReSnHgO035n//V5WE= +github.com/prometheus/client_golang v1.20.1 h1:IMJXHOD6eARkQpxo8KkhgEVFlBNm+nkrFUyGlIu7Na8= +github.com/prometheus/client_golang v1.20.1/go.mod h1:PIEt8X02hGcP8JWbeHyeZ53Y/jReSnHgO035n//V5WE= github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= github.com/prometheus/client_model v0.0.0-20190115171406-56726106282f/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= @@ -1322,8 +1324,8 @@ github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/f github.com/rogpeppe/go-internal v1.12.0 h1:exVL4IDcn6na9z1rAb56Vxr+CgyK3nn3O+epU5NdKM8= github.com/rogpeppe/go-internal v1.12.0/go.mod h1:E+RYuTGaKKdloAfM02xzb0FW3Paa99yedzYV+kq4uf4= github.com/rs/cors v1.7.0/go.mod h1:gFx+x8UowdsKA9AchylcLynDq+nNFfI8FkUZdN/jGCU= -github.com/rs/cors v1.11.0 h1:0B9GE/r9Bc2UxRMMtymBkHTenPkHDv0CW4Y98GBY+po= -github.com/rs/cors v1.11.0/go.mod h1:XyqrcTp5zjWr1wsJ8PIRZssZ8b/WMcMf71DJnit4EMU= +github.com/rs/cors v1.11.1 h1:eU3gRzXLRK57F5rKMGMZURNdIG4EoAmX8k94r9wXWHA= +github.com/rs/cors v1.11.1/go.mod h1:XyqrcTp5zjWr1wsJ8PIRZssZ8b/WMcMf71DJnit4EMU= github.com/rs/xid v1.5.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg= github.com/rs/zerolog v1.32.0 h1:keLypqrlIjaFsbmJOBdB/qvyF8KEtCWHwobLp5l/mQ0= github.com/rs/zerolog v1.32.0/go.mod h1:/7mN4D5sKwJLZQ2b/znpjC3/GQWY/xaDXUM0kKWRHss= @@ -1516,8 +1518,8 @@ github.com/zondax/hid v0.9.2/go.mod h1:l5wttcP0jwtdLjqjMMWFVEE7d1zO0jvSPA9OPZxWp github.com/zondax/ledger-go v0.14.3 h1:wEpJt2CEcBJ428md/5MgSLsXLBos98sBOyxNmCjfUCw= github.com/zondax/ledger-go v0.14.3/go.mod h1:IKKaoxupuB43g4NxeQmbLXv7T9AlQyie1UpHb342ycI= go.etcd.io/bbolt v1.3.3/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU= -go.etcd.io/bbolt v1.3.8 h1:xs88BrvEv273UsB79e0hcVrlUWmS0a8upikMFhSyAtA= -go.etcd.io/bbolt v1.3.8/go.mod h1:N9Mkw9X8x5fupy0IKsmuqVtoGDyxsaDlbk4Rd05IAQw= +go.etcd.io/bbolt v1.3.10 h1:+BqfJTcCzTItrop8mq/lbzL8wSGtj94UO/3U31shqG0= +go.etcd.io/bbolt v1.3.10/go.mod h1:bK3UQLPJZly7IlNmV7uVHJDxfe5aK9Ll93e/74Y9oEQ= go.etcd.io/etcd v0.0.0-20191023171146-3cf2f69b5738/go.mod h1:dnLIgRNXwCJa5e+c6mIZCrds/GIG4ncV9HhK5PX7jPg= go.opencensus.io v0.18.0/go.mod h1:vKdFvxhtzZ9onBp9VKHK8z/sRpBMnKAsufL7wlDrCOA= go.opencensus.io v0.20.1/go.mod h1:6WKK9ahsWS3RSO+PY9ZHZUfv2irvY6gN279GOPZjmmk= @@ -1535,16 +1537,16 @@ go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.4 go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0/go.mod h1:Mjt1i1INqiaoZOMGR1RIUJN+i3ChKoFRqzrRQhlkbs0= go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.52.0 h1:9l89oX4ba9kHbBol3Xin3leYJ+252h0zszDtBwyKe2A= go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.52.0/go.mod h1:XLZfZboOJWHNKUv7eH0inh0E9VV6eWDFB/9yJyTLPp0= -go.opentelemetry.io/otel v1.29.0 h1:PdomN/Al4q/lN6iBJEN3AwPvUiHPMlt93c8bqTG5Llw= -go.opentelemetry.io/otel v1.29.0/go.mod h1:N/WtXPs1CNCUEx+Agz5uouwCba+i+bJGFicT8SR4NP8= -go.opentelemetry.io/otel/metric v1.29.0 h1:vPf/HFWTNkPu1aYeIsc98l4ktOQaL6LeSoeV2g+8YLc= -go.opentelemetry.io/otel/metric v1.29.0/go.mod h1:auu/QWieFVWx+DmQOUMgj0F8LHWdgalxXqvp7BII/W8= -go.opentelemetry.io/otel/sdk v1.29.0 h1:vkqKjk7gwhS8VaWb0POZKmIEDimRCMsopNYnriHyryo= -go.opentelemetry.io/otel/sdk v1.29.0/go.mod h1:pM8Dx5WKnvxLCb+8lG1PRNIDxu9g9b9g59Qr7hfAAok= -go.opentelemetry.io/otel/sdk/metric v1.29.0 h1:K2CfmJohnRgvZ9UAj2/FhIf/okdWcNdBwe1m8xFXiSY= -go.opentelemetry.io/otel/sdk/metric v1.29.0/go.mod h1:6zZLdCl2fkauYoZIOn/soQIDSWFmNSRcICarHfuhNJQ= -go.opentelemetry.io/otel/trace v1.29.0 h1:J/8ZNK4XgR7a21DZUAsbF8pZ5Jcw1VhACmnYt39JTi4= -go.opentelemetry.io/otel/trace v1.29.0/go.mod h1:eHl3w0sp3paPkYstJOmAimxhiFXPg+MMTlEh3nsQgWQ= +go.opentelemetry.io/otel v1.30.0 h1:F2t8sK4qf1fAmY9ua4ohFS/K+FUuOPemHUIXHtktrts= +go.opentelemetry.io/otel v1.30.0/go.mod h1:tFw4Br9b7fOS+uEao81PJjVMjW/5fvNCbpsDIXqP0pc= +go.opentelemetry.io/otel/metric v1.30.0 h1:4xNulvn9gjzo4hjg+wzIKG7iNFEaBMX00Qd4QIZs7+w= +go.opentelemetry.io/otel/metric v1.30.0/go.mod h1:aXTfST94tswhWEb+5QjlSqG+cZlmyXy/u8jFpor3WqQ= +go.opentelemetry.io/otel/sdk v1.30.0 h1:cHdik6irO49R5IysVhdn8oaiR9m8XluDaJAs4DfOrYE= +go.opentelemetry.io/otel/sdk v1.30.0/go.mod h1:p14X4Ok8S+sygzblytT1nqG98QG2KYKv++HE0LY/mhg= +go.opentelemetry.io/otel/sdk/metric v1.30.0 h1:QJLT8Pe11jyHBHfSAgYH7kEmT24eX792jZO1bo4BXkM= +go.opentelemetry.io/otel/sdk/metric v1.30.0/go.mod h1:waS6P3YqFNzeP01kuo/MBBYqaoBJl7efRQHOaydhy1Y= +go.opentelemetry.io/otel/trace v1.30.0 h1:7UBkkYzeg3C7kQX8VAidWh2biiQbtAKjyIML8dQ9wmc= +go.opentelemetry.io/otel/trace v1.30.0/go.mod h1:5EyKqTzzmyqB9bwtCCq6pDLktPK6fmGf/Dph+8VI02o= go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI= go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= @@ -2202,8 +2204,8 @@ google.golang.org/grpc v1.48.0/go.mod h1:vN9eftEi1UMyUsIF80+uQXhHjbXYbm0uXoFCACu google.golang.org/grpc v1.49.0/go.mod h1:ZgQEeidpAuNRZ8iRrlBKXZQP1ghovWIVhdJRyCDK+GI= google.golang.org/grpc v1.50.0/go.mod h1:ZgQEeidpAuNRZ8iRrlBKXZQP1ghovWIVhdJRyCDK+GI= google.golang.org/grpc v1.50.1/go.mod h1:ZgQEeidpAuNRZ8iRrlBKXZQP1ghovWIVhdJRyCDK+GI= -google.golang.org/grpc v1.66.0 h1:DibZuoBznOxbDQxRINckZcUvnCEvrW9pcWIE2yF9r1c= -google.golang.org/grpc v1.66.0/go.mod h1:s3/l6xSSCURdVfAnL+TqCNMyTDAGN6+lZeVxnZR128Y= +google.golang.org/grpc v1.66.2 h1:3QdXkuq3Bkh7w+ywLdLvM56cmGvQHUMZpiCzt6Rqaoo= +google.golang.org/grpc v1.66.2/go.mod h1:s3/l6xSSCURdVfAnL+TqCNMyTDAGN6+lZeVxnZR128Y= google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0/go.mod h1:6Kw0yEErY5E/yWrBtf03jp27GLLJujG4z/JK95pnjjw= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= diff --git a/playground/package-lock.json b/playground/package-lock.json index 71dcf11ed2..4ed925cac5 100644 --- a/playground/package-lock.json +++ b/playground/package-lock.json @@ -15,17 +15,17 @@ "swagger-ui-react": "^5.17.14" }, "devDependencies": { - "@types/react": "^18.3.5", + "@types/react": "^18.3.6", "@types/react-dom": "^18.3.0", "@types/swagger-ui-react": "^4.18.3", - "@typescript-eslint/eslint-plugin": "^8.3.0", + "@typescript-eslint/eslint-plugin": "^8.5.0", "@typescript-eslint/parser": "^8.5.0", "@vitejs/plugin-react-swc": "^3.7.0", "eslint": "^9.10.0", "eslint-plugin-react-hooks": "^4.6.2", - "eslint-plugin-react-refresh": "^0.4.9", - "typescript": "^5.5.4", - "vite": "^5.4.3" + "eslint-plugin-react-refresh": "^0.4.12", + "typescript": "^5.6.2", + "vite": "^5.4.5" } }, "node_modules/@babel/runtime": { @@ -59,37 +59,6 @@ "integrity": "sha512-NVf/1YycDMs6+FxS0Tb/W8MjJRDQdXF+tBfDtZ5UZeiRUkTmwKc4vmYCKZTyymfJk1gnMsauvZSX/HiV9jOABw==", "license": "MIT" }, - "node_modules/@codemirror/language": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/@codemirror/language/-/language-6.0.0.tgz", - "integrity": "sha512-rtjk5ifyMzOna1c7PBu7J1VCt0PvA5wy3o8eMVnxMKb7z8KA7JFecvD04dSn14vj/bBaAbqRsGed5OjtofEnLA==", - "peer": true, - "dependencies": { - "@codemirror/state": "^6.0.0", - "@codemirror/view": "^6.0.0", - "@lezer/common": "^1.0.0", - "@lezer/highlight": "^1.0.0", - "@lezer/lr": "^1.0.0", - "style-mod": "^4.0.0" - } - }, - "node_modules/@codemirror/state": { - "version": "6.4.1", - "resolved": "https://registry.npmjs.org/@codemirror/state/-/state-6.4.1.tgz", - "integrity": "sha512-QkEyUiLhsJoZkbumGZlswmAhA7CBU02Wrz7zvH4SrcifbsqwlXShVXg65f3v/ts57W3dqyamEriMhij1Z3Zz4A==", - "peer": true - }, - "node_modules/@codemirror/view": { - "version": "6.33.0", - "resolved": "https://registry.npmjs.org/@codemirror/view/-/view-6.33.0.tgz", - "integrity": "sha512-AroaR3BvnjRW8fiZBalAaK+ZzB5usGgI014YKElYZvQdNH5ZIidHlO+cyf/2rWzyBFRkvG6VhiXeAEbC53P2YQ==", - "peer": true, - "dependencies": { - "@codemirror/state": "^6.4.0", - "style-mod": "^4.1.0", - "w3c-keyname": "^2.2.4" - } - }, "node_modules/@emotion/is-prop-valid": { "version": "0.8.8", "resolved": "https://registry.npmjs.org/@emotion/is-prop-valid/-/is-prop-valid-0.8.8.tgz", @@ -760,30 +729,6 @@ "url": "https://github.com/sponsors/nzakas" } }, - "node_modules/@lezer/common": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/@lezer/common/-/common-1.2.1.tgz", - "integrity": "sha512-yemX0ZD2xS/73llMZIK6KplkjIjf2EvAHcinDi/TfJ9hS25G0388+ClHt6/3but0oOxinTcQHJLDXh6w1crzFQ==", - "peer": true - }, - "node_modules/@lezer/highlight": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/@lezer/highlight/-/highlight-1.2.1.tgz", - "integrity": "sha512-Z5duk4RN/3zuVO7Jq0pGLJ3qynpxUVsh7IbUbGj88+uV2ApSAn6kWg2au3iJb+0Zi7kKtqffIESgNcRXWZWmSA==", - "peer": true, - "dependencies": { - "@lezer/common": "^1.0.0" - } - }, - "node_modules/@lezer/lr": { - "version": "1.4.2", - "resolved": "https://registry.npmjs.org/@lezer/lr/-/lr-1.4.2.tgz", - "integrity": "sha512-pu0K1jCIdnQ12aWNaAVU5bzi7Bd1w54J3ECgANPmYLtQKP0HBj2cE/5coBD66MT10xbtIuUr7tg0Shbsvk0mDA==", - "peer": true, - "dependencies": { - "@lezer/common": "^1.0.0" - } - }, "node_modules/@motionone/animation": { "version": "10.18.0", "resolved": "https://registry.npmjs.org/@motionone/animation/-/animation-10.18.0.tgz", @@ -2455,7 +2400,7 @@ "version": "15.7.12", "resolved": "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.12.tgz", "integrity": "sha512-5zvhXYtRNRluoE/jAp4GVsSduVUzNWKkOZrCDBWYtE7biZywwdC2AcEzg+cSMLFRfVgeAFqpfNabiPjxFddV1Q==", - "devOptional": true, + "dev": true, "license": "MIT" }, "node_modules/@types/ramda": { @@ -2468,10 +2413,10 @@ } }, "node_modules/@types/react": { - "version": "18.3.5", - "resolved": "https://registry.npmjs.org/@types/react/-/react-18.3.5.tgz", - "integrity": "sha512-WeqMfGJLGuLCqHGYRGHxnKrXcTitc6L/nBUWfWPcTarG3t9PsquqUMuVeXZeca+mglY4Vo5GZjCi0A3Or2lnxA==", - "devOptional": true, + "version": "18.3.6", + "resolved": "https://registry.npmjs.org/@types/react/-/react-18.3.6.tgz", + "integrity": "sha512-CnGaRYNu2iZlkGXGrOYtdg5mLK8neySj0woZ4e2wF/eli2E6Sazmq5X+Nrj6OBrrFVQfJWTUFeqAzoRhWQXYvg==", + "dev": true, "dependencies": { "@types/prop-types": "*", "csstype": "^3.0.2" @@ -2481,7 +2426,7 @@ "version": "18.3.0", "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-18.3.0.tgz", "integrity": "sha512-EhwApuTmMBmXuFOikhQLIBUn6uFg81SwLMOAUgodJF14SOBOCMdU04gDoYi0WOJJHD144TL32z4yDqCW3dnkQg==", - "devOptional": true, + "dev": true, "license": "MIT", "dependencies": { "@types/react": "*" @@ -2518,17 +2463,16 @@ "license": "MIT" }, "node_modules/@typescript-eslint/eslint-plugin": { - "version": "8.4.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.4.0.tgz", - "integrity": "sha512-rg8LGdv7ri3oAlenMACk9e+AR4wUV0yrrG+XKsGKOK0EVgeEDqurkXMPILG2836fW4ibokTB5v4b6Z9+GYQDEw==", + "version": "8.5.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.5.0.tgz", + "integrity": "sha512-lHS5hvz33iUFQKuPFGheAB84LwcJ60G8vKnEhnfcK1l8kGVLro2SFYW6K0/tj8FUhRJ0VHyg1oAfg50QGbPPHw==", "dev": true, - "license": "MIT", "dependencies": { "@eslint-community/regexpp": "^4.10.0", - "@typescript-eslint/scope-manager": "8.4.0", - "@typescript-eslint/type-utils": "8.4.0", - "@typescript-eslint/utils": "8.4.0", - "@typescript-eslint/visitor-keys": "8.4.0", + "@typescript-eslint/scope-manager": "8.5.0", + "@typescript-eslint/type-utils": "8.5.0", + "@typescript-eslint/utils": "8.5.0", + "@typescript-eslint/visitor-keys": "8.5.0", "graphemer": "^1.4.0", "ignore": "^5.3.1", "natural-compare": "^1.4.0", @@ -2579,7 +2523,7 @@ } } }, - "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/scope-manager": { + "node_modules/@typescript-eslint/scope-manager": { "version": "8.5.0", "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.5.0.tgz", "integrity": "sha512-06JOQ9Qgj33yvBEx6tpC8ecP9o860rsR22hWMEd12WcTRrfaFgHr2RB/CA/B+7BMhHkXT4chg2MyboGdFGawYg==", @@ -2596,91 +2540,14 @@ "url": "https://opencollective.com/typescript-eslint" } }, - "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/types": { - "version": "8.5.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.5.0.tgz", - "integrity": "sha512-qjkormnQS5wF9pjSi6q60bKUHH44j2APxfh9TQRXK8wbYVeDYYdYJGIROL87LGZZ2gz3Rbmjc736qyL8deVtdw==", - "dev": true, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/typescript-estree": { - "version": "8.5.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.5.0.tgz", - "integrity": "sha512-vEG2Sf9P8BPQ+d0pxdfndw3xIXaoSjliG0/Ejk7UggByZPKXmJmw3GW5jV2gHNQNawBUyfahoSiCFVov0Ruf7Q==", - "dev": true, - "dependencies": { - "@typescript-eslint/types": "8.5.0", - "@typescript-eslint/visitor-keys": "8.5.0", - "debug": "^4.3.4", - "fast-glob": "^3.3.2", - "is-glob": "^4.0.3", - "minimatch": "^9.0.4", - "semver": "^7.6.0", - "ts-api-utils": "^1.3.0" - }, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - }, - "peerDependenciesMeta": { - "typescript": { - "optional": true - } - } - }, - "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/visitor-keys": { - "version": "8.5.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.5.0.tgz", - "integrity": "sha512-yTPqMnbAZJNy2Xq2XU8AdtOW9tJIr+UQb64aXB9f3B1498Zx9JorVgFJcZpEc9UBuCCrdzKID2RGAMkYcDtZOw==", - "dev": true, - "dependencies": { - "@typescript-eslint/types": "8.5.0", - "eslint-visitor-keys": "^3.4.3" - }, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@typescript-eslint/scope-manager": { - "version": "8.4.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.4.0.tgz", - "integrity": "sha512-n2jFxLeY0JmKfUqy3P70rs6vdoPjHK8P/w+zJcV3fk0b0BwRXC/zxRTEnAsgYT7MwdQDt/ZEbtdzdVC+hcpF0A==", - "dev": true, - "license": "MIT", - "dependencies": { - "@typescript-eslint/types": "8.4.0", - "@typescript-eslint/visitor-keys": "8.4.0" - }, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, "node_modules/@typescript-eslint/type-utils": { - "version": "8.4.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.4.0.tgz", - "integrity": "sha512-pu2PAmNrl9KX6TtirVOrbLPLwDmASpZhK/XU7WvoKoCUkdtq9zF7qQ7gna0GBZFN0hci0vHaSusiL2WpsQk37A==", + "version": "8.5.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.5.0.tgz", + "integrity": "sha512-N1K8Ix+lUM+cIDhL2uekVn/ZD7TZW+9/rwz8DclQpcQ9rk4sIL5CAlBC0CugWKREmDjBzI/kQqU4wkg46jWLYA==", "dev": true, - "license": "MIT", "dependencies": { - "@typescript-eslint/typescript-estree": "8.4.0", - "@typescript-eslint/utils": "8.4.0", + "@typescript-eslint/typescript-estree": "8.5.0", + "@typescript-eslint/utils": "8.5.0", "debug": "^4.3.4", "ts-api-utils": "^1.3.0" }, @@ -2698,11 +2565,10 @@ } }, "node_modules/@typescript-eslint/types": { - "version": "8.4.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.4.0.tgz", - "integrity": "sha512-T1RB3KQdskh9t3v/qv7niK6P8yvn7ja1mS7QK7XfRVL6wtZ8/mFs/FHf4fKvTA0rKnqnYxl/uHFNbnEt0phgbw==", + "version": "8.5.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.5.0.tgz", + "integrity": "sha512-qjkormnQS5wF9pjSi6q60bKUHH44j2APxfh9TQRXK8wbYVeDYYdYJGIROL87LGZZ2gz3Rbmjc736qyL8deVtdw==", "dev": true, - "license": "MIT", "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" }, @@ -2712,14 +2578,13 @@ } }, "node_modules/@typescript-eslint/typescript-estree": { - "version": "8.4.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.4.0.tgz", - "integrity": "sha512-kJ2OIP4dQw5gdI4uXsaxUZHRwWAGpREJ9Zq6D5L0BweyOrWsL6Sz0YcAZGWhvKnH7fm1J5YFE1JrQL0c9dd53A==", + "version": "8.5.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.5.0.tgz", + "integrity": "sha512-vEG2Sf9P8BPQ+d0pxdfndw3xIXaoSjliG0/Ejk7UggByZPKXmJmw3GW5jV2gHNQNawBUyfahoSiCFVov0Ruf7Q==", "dev": true, - "license": "BSD-2-Clause", "dependencies": { - "@typescript-eslint/types": "8.4.0", - "@typescript-eslint/visitor-keys": "8.4.0", + "@typescript-eslint/types": "8.5.0", + "@typescript-eslint/visitor-keys": "8.5.0", "debug": "^4.3.4", "fast-glob": "^3.3.2", "is-glob": "^4.0.3", @@ -2741,16 +2606,15 @@ } }, "node_modules/@typescript-eslint/utils": { - "version": "8.4.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.4.0.tgz", - "integrity": "sha512-swULW8n1IKLjRAgciCkTCafyTHHfwVQFt8DovmaF69sKbOxTSFMmIZaSHjqO9i/RV0wIblaawhzvtva8Nmm7lQ==", + "version": "8.5.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.5.0.tgz", + "integrity": "sha512-6yyGYVL0e+VzGYp60wvkBHiqDWOpT63pdMV2CVG4LVDd5uR6q1qQN/7LafBZtAtNIn/mqXjsSeS5ggv/P0iECw==", "dev": true, - "license": "MIT", "dependencies": { "@eslint-community/eslint-utils": "^4.4.0", - "@typescript-eslint/scope-manager": "8.4.0", - "@typescript-eslint/types": "8.4.0", - "@typescript-eslint/typescript-estree": "8.4.0" + "@typescript-eslint/scope-manager": "8.5.0", + "@typescript-eslint/types": "8.5.0", + "@typescript-eslint/typescript-estree": "8.5.0" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -2764,13 +2628,12 @@ } }, "node_modules/@typescript-eslint/visitor-keys": { - "version": "8.4.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.4.0.tgz", - "integrity": "sha512-zTQD6WLNTre1hj5wp09nBIDiOc2U5r/qmzo7wxPn4ZgAjHql09EofqhF9WF+fZHzL5aCyaIpPcT2hyxl73kr9A==", + "version": "8.5.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.5.0.tgz", + "integrity": "sha512-yTPqMnbAZJNy2Xq2XU8AdtOW9tJIr+UQb64aXB9f3B1498Zx9JorVgFJcZpEc9UBuCCrdzKID2RGAMkYcDtZOw==", "dev": true, - "license": "MIT", "dependencies": { - "@typescript-eslint/types": "8.4.0", + "@typescript-eslint/types": "8.5.0", "eslint-visitor-keys": "^3.4.3" }, "engines": { @@ -3207,7 +3070,7 @@ "version": "3.1.3", "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.1.3.tgz", "integrity": "sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw==", - "devOptional": true, + "dev": true, "license": "MIT" }, "node_modules/debounce-promise": { @@ -3459,11 +3322,10 @@ } }, "node_modules/eslint-plugin-react-refresh": { - "version": "0.4.11", - "resolved": "https://registry.npmjs.org/eslint-plugin-react-refresh/-/eslint-plugin-react-refresh-0.4.11.tgz", - "integrity": "sha512-wrAKxMbVr8qhXTtIKfXqAn5SAtRZt0aXxe5P23Fh4pUAdC6XEsybGLB8P0PI4j1yYqOgUEUlzKAGDfo7rJOjcw==", + "version": "0.4.12", + "resolved": "https://registry.npmjs.org/eslint-plugin-react-refresh/-/eslint-plugin-react-refresh-0.4.12.tgz", + "integrity": "sha512-9neVjoGv20FwYtCP6CB1dzR1vr57ZDNOXst21wd2xJ/cTlM2xLq0GWVlSNTdMn/4BtP6cHYBMCSp1wFBJ9jBsg==", "dev": true, - "license": "MIT", "peerDependencies": { "eslint": ">=7" } @@ -4401,11 +4263,10 @@ } }, "node_modules/micromatch": { - "version": "4.0.7", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.7.tgz", - "integrity": "sha512-LPP/3KorzCwBxfeUuZmaR6bG2kdeHSbe0P2tY3FLRU4vYrjYz5hI4QZwV0njUx3jeuKe67YukQ1LSPZBKDqO/Q==", + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", + "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", "dev": true, - "license": "MIT", "dependencies": { "braces": "^3.0.3", "picomatch": "^2.3.1" @@ -5644,12 +5505,6 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/style-mod": { - "version": "4.1.2", - "resolved": "https://registry.npmjs.org/style-mod/-/style-mod-4.1.2.tgz", - "integrity": "sha512-wnD1HyVqpJUI2+eKZ+eo1UwghftP6yuFheBqqe+bWCotBjC2K1YnteJILRMs3SM4V/0dLEW1SC27MWP5y+mwmw==", - "peer": true - }, "node_modules/style-value-types": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/style-value-types/-/style-value-types-5.0.0.tgz", @@ -5910,11 +5765,10 @@ } }, "node_modules/typescript": { - "version": "5.5.4", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.5.4.tgz", - "integrity": "sha512-Mtq29sKDAEYP7aljRgtPOpTvOfbwRWlS6dPRzwjdE+C0R4brX/GUyhHSecbHMFLNBLcJIPt9nl9yG5TZ1weH+Q==", + "version": "5.6.2", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.6.2.tgz", + "integrity": "sha512-NW8ByodCSNCwZeghjN3o+JX5OFH0Ojg6sadjEKY4huZ52TqbJTJnDo5+Tw98lSy63NZvi4n+ez5m2u5d4PkZyw==", "dev": true, - "license": "Apache-2.0", "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" @@ -6012,9 +5866,9 @@ "optional": true }, "node_modules/vite": { - "version": "5.4.3", - "resolved": "https://registry.npmjs.org/vite/-/vite-5.4.3.tgz", - "integrity": "sha512-IH+nl64eq9lJjFqU+/yrRnrHPVTlgy42/+IzbOdaFDVlyLgI/wDlf+FCobXLX1cT0X5+7LMyH1mIy2xJdLfo8Q==", + "version": "5.4.5", + "resolved": "https://registry.npmjs.org/vite/-/vite-5.4.5.tgz", + "integrity": "sha512-pXqR0qtb2bTwLkev4SE3r4abCNioP3GkjvIDLlzziPpXtHgiJIjuKl+1GN6ESOT3wMjG3JTeARopj2SwYaHTOA==", "dev": true, "dependencies": { "esbuild": "^0.21.3", @@ -6075,12 +5929,6 @@ "resolved": "https://registry.npmjs.org/vscode-languageserver-types/-/vscode-languageserver-types-3.17.5.tgz", "integrity": "sha512-Ld1VelNuX9pdF39h2Hgaeb5hEZM2Z3jUrrMgWQAu82jMtZp7p3vJT3BzToKtZI7NgQssZje5o0zryOrhQvzQAg==" }, - "node_modules/w3c-keyname": { - "version": "2.2.8", - "resolved": "https://registry.npmjs.org/w3c-keyname/-/w3c-keyname-2.2.8.tgz", - "integrity": "sha512-dpojBhNsCNN7T82Tm7k26A6G9ML3NkhDsnw9n/eoxSRlVBB4CEtIQ/KTCLI2Fwf3ataSXRhYFkQi3SlnFwPvPQ==", - "peer": true - }, "node_modules/web-streams-polyfill": { "version": "3.3.3", "resolved": "https://registry.npmjs.org/web-streams-polyfill/-/web-streams-polyfill-3.3.3.tgz", diff --git a/playground/package.json b/playground/package.json index 640cfe3a3e..62c8494eb2 100644 --- a/playground/package.json +++ b/playground/package.json @@ -17,16 +17,16 @@ "swagger-ui-react": "^5.17.14" }, "devDependencies": { - "@types/react": "^18.3.5", + "@types/react": "^18.3.6", "@types/react-dom": "^18.3.0", "@types/swagger-ui-react": "^4.18.3", - "@typescript-eslint/eslint-plugin": "^8.3.0", + "@typescript-eslint/eslint-plugin": "^8.5.0", "@typescript-eslint/parser": "^8.5.0", "@vitejs/plugin-react-swc": "^3.7.0", "eslint": "^9.10.0", "eslint-plugin-react-hooks": "^4.6.2", - "eslint-plugin-react-refresh": "^0.4.9", - "typescript": "^5.5.4", - "vite": "^5.4.3" + "eslint-plugin-react-refresh": "^0.4.12", + "typescript": "^5.6.2", + "vite": "^5.4.5" } } From ea668a45f7d9658c46e505004e0e614e5d056702 Mon Sep 17 00:00:00 2001 From: Islam Aliev Date: Tue, 17 Sep 2024 15:28:06 +0200 Subject: [PATCH 19/71] fix: Panic with filter on unique composite index on relation (#3020) ## Relevant issue(s) Resolves #3016 ## Description Fix panic with filter on unique composite index on relation. --- internal/db/fetcher/indexer_iterators.go | 3 + .../index/query_with_relation_filter_test.go | 22 +++--- ...th_unique_index_on_relation_filter_test.go | 72 +++++++++++++++++++ 3 files changed, 86 insertions(+), 11 deletions(-) create mode 100644 tests/integration/index/query_with_unique_index_on_relation_filter_test.go diff --git a/internal/db/fetcher/indexer_iterators.go b/internal/db/fetcher/indexer_iterators.go index 5d9da70c9f..e4e69c99c3 100644 --- a/internal/db/fetcher/indexer_iterators.go +++ b/internal/db/fetcher/indexer_iterators.go @@ -141,6 +141,9 @@ func (iter *indexPrefixIterator) Next() (indexIterResult, error) { } func (iter *indexPrefixIterator) Close() error { + if iter.resultIter == nil { + return nil + } return iter.resultIter.Close() } diff --git a/tests/integration/index/query_with_relation_filter_test.go b/tests/integration/index/query_with_relation_filter_test.go index e31685a0cf..deba6946e2 100644 --- a/tests/integration/index/query_with_relation_filter_test.go +++ b/tests/integration/index/query_with_relation_filter_test.go @@ -1018,36 +1018,36 @@ func TestQueryWithIndexOnManyToOne_MultipleViaOneToMany(t *testing.T) { testUtils.ExecuteTestCase(t, test) } -func TestQueryWithIndex_UniqueIndexOnChildWithEmptyParentCollection(t *testing.T) { +func TestQueryWithUniqueIndex_WithFilterOnChildIndexedField_ShouldFetch(t *testing.T) { test := testUtils.TestCase{ Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type Action { - key: String @index(unique: true) - playerActions: [PlayerAction] + type User { + name: String @index(unique: true) + devices: [Device] } - type PlayerAction { - deleted: Boolean - action: Action + type Device { + trusted: Boolean + owner: User } `, }, testUtils.CreateDoc{ CollectionID: 0, DocMap: map[string]any{ - "key": "ACTION_KEY", + "name": "John", }, }, testUtils.Request{ Request: `query { - PlayerAction(filter: {action: {key: {_eq: "ACTION_KEY"}}}) { - deleted + Device(filter: {owner: {name: {_eq: "John"}}}) { + trusted } }`, Results: map[string]any{ - "PlayerAction": []map[string]any{}, + "Device": []map[string]any{}, }, }, }, diff --git a/tests/integration/index/query_with_unique_index_on_relation_filter_test.go b/tests/integration/index/query_with_unique_index_on_relation_filter_test.go new file mode 100644 index 0000000000..05c4b05395 --- /dev/null +++ b/tests/integration/index/query_with_unique_index_on_relation_filter_test.go @@ -0,0 +1,72 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package index + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestQueryWithUniqueCompositeIndex_WithFilterOnIndexedRelation_ShouldFilter(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + devices: [Device] + } + + type Device { + manufacturer: String + owner: User @index(unique: true, includes: [{name: "manufacturer"}]) + } + `, + }, + testUtils.CreateDoc{ + CollectionID: 0, + DocMap: map[string]any{ + "name": "John", + }, + }, + testUtils.CreateDoc{ + CollectionID: 1, + DocMap: map[string]any{ + "manufacturer": "Apple", + "owner_id": testUtils.NewDocIndex(0, 0), + }, + }, + testUtils.Request{ + Request: `query { + User { + name + devices(filter: {owner_id: {_eq: "bae-1ef746f8-821e-586f-99b2-4cb1fb9b782f"}}) { + manufacturer + } + } + }`, + Results: map[string]any{ + "User": []map[string]any{ + { + "name": "John", + "devices": []map[string]any{ + {"manufacturer": "Apple"}, + }, + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} From b4773131c503228cfd152f9d97965c27ac923f4f Mon Sep 17 00:00:00 2001 From: AndrewSisley Date: Tue, 17 Sep 2024 14:36:56 -0400 Subject: [PATCH 20/71] fix: Handle missing type in an SDL (#3023) ## Relevant issue(s) Resolves #3022 ## Description Handles missing type in an SDL instead of panicing. --- internal/request/graphql/schema/collection.go | 14 +++++--- internal/request/graphql/schema/errors.go | 10 ++++++ tests/integration/schema/nil_type_test.go | 34 +++++++++++++++++++ 3 files changed, 54 insertions(+), 4 deletions(-) create mode 100644 tests/integration/schema/nil_type_test.go diff --git a/internal/request/graphql/schema/collection.go b/internal/request/graphql/schema/collection.go index 15c572921a..5c196f1f0f 100644 --- a/internal/request/graphql/schema/collection.go +++ b/internal/request/graphql/schema/collection.go @@ -454,7 +454,7 @@ func fieldsFromAST( cTypeByFieldNameByObjName map[string]map[string]client.CType, schemaOnly bool, ) ([]client.SchemaFieldDescription, []client.CollectionFieldDescription, error) { - kind, err := astTypeToKind(field.Type) + kind, err := astTypeToKind(hostObjectName, field) if err != nil { return nil, nil, err } @@ -618,8 +618,11 @@ func setCRDTType(field *ast.FieldDefinition, kind client.FieldKind) (client.CTyp return defaultCRDTForFieldKind[kind], nil } -func astTypeToKind(t ast.Type) (client.FieldKind, error) { - switch astTypeVal := t.(type) { +func astTypeToKind( + hostObjectName string, + field *ast.FieldDefinition, +) (client.FieldKind, error) { + switch astTypeVal := field.Type.(type) { case *ast.List: switch innerAstTypeVal := astTypeVal.Type.(type) { case *ast.NonNull: @@ -677,7 +680,10 @@ func astTypeToKind(t ast.Type) (client.FieldKind, error) { return client.FieldKind_None, ErrNonNullNotSupported default: - return client.FieldKind_None, NewErrTypeNotFound(t.String()) + if field.Type == nil { + return client.FieldKind_None, NewErrFieldTypeNotSpecified(hostObjectName, field.Name.Value) + } + return client.FieldKind_None, NewErrTypeNotFound(field.Type.String()) } } diff --git a/internal/request/graphql/schema/errors.go b/internal/request/graphql/schema/errors.go index a5150e291b..41f17bf373 100644 --- a/internal/request/graphql/schema/errors.go +++ b/internal/request/graphql/schema/errors.go @@ -32,6 +32,7 @@ const ( errPolicyInvalidResourceProp string = "policy directive with invalid resource property" errDefaultValueInvalid string = "default value type must match field type" errDefaultValueNotAllowed string = "default value is not allowed for this field type" + errFieldTypeNotSpecified string = "field type not specified" ) var ( @@ -58,6 +59,7 @@ var ( ErrPolicyWithUnknownArg = errors.New(errPolicyUnknownArgument) ErrPolicyInvalidIDProp = errors.New(errPolicyInvalidIDProp) ErrPolicyInvalidResourceProp = errors.New(errPolicyInvalidResourceProp) + ErrFieldTypeNotSpecified = errors.New(errFieldTypeNotSpecified) ) func NewErrDuplicateField(objectName, fieldName string) error { @@ -155,3 +157,11 @@ func NewErrDefaultValueNotAllowed(fieldName, fieldType string) error { errors.NewKV("Type", fieldType), ) } + +func NewErrFieldTypeNotSpecified(objectName, fieldName string) error { + return errors.New( + errFieldTypeNotSpecified, + errors.NewKV("Object", objectName), + errors.NewKV("Field", fieldName), + ) +} diff --git a/tests/integration/schema/nil_type_test.go b/tests/integration/schema/nil_type_test.go new file mode 100644 index 0000000000..c00dc204b1 --- /dev/null +++ b/tests/integration/schema/nil_type_test.go @@ -0,0 +1,34 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package schema + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestSchema_WithMissingType_Errors(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: + } + `, + ExpectedError: "field type not specified. Object: User, Field: name", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} From f5b67e4a93f2f9354c4e2e27eae5a4ec146be26f Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Tue, 17 Sep 2024 22:56:11 -0400 Subject: [PATCH 21/71] bot: Update dependencies (bulk dependabot PRs) 17-09-2024 (#3019) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ✅ This PR was created by combining the following PRs: #3018 bot: Bump vite from 5.4.5 to 5.4.6 in /playground #3017 bot: Bump @typescript-eslint/eslint-plugin from 8.5.0 to 8.6.0 in /playground --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- playground/package-lock.json | 301 +++++++++++++++++++++++++++++++---- playground/package.json | 4 +- 2 files changed, 276 insertions(+), 29 deletions(-) diff --git a/playground/package-lock.json b/playground/package-lock.json index 4ed925cac5..da6e09b004 100644 --- a/playground/package-lock.json +++ b/playground/package-lock.json @@ -18,14 +18,14 @@ "@types/react": "^18.3.6", "@types/react-dom": "^18.3.0", "@types/swagger-ui-react": "^4.18.3", - "@typescript-eslint/eslint-plugin": "^8.5.0", + "@typescript-eslint/eslint-plugin": "^8.6.0", "@typescript-eslint/parser": "^8.5.0", "@vitejs/plugin-react-swc": "^3.7.0", "eslint": "^9.10.0", "eslint-plugin-react-hooks": "^4.6.2", "eslint-plugin-react-refresh": "^0.4.12", "typescript": "^5.6.2", - "vite": "^5.4.5" + "vite": "^5.4.6" } }, "node_modules/@babel/runtime": { @@ -59,6 +59,37 @@ "integrity": "sha512-NVf/1YycDMs6+FxS0Tb/W8MjJRDQdXF+tBfDtZ5UZeiRUkTmwKc4vmYCKZTyymfJk1gnMsauvZSX/HiV9jOABw==", "license": "MIT" }, + "node_modules/@codemirror/language": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/@codemirror/language/-/language-6.0.0.tgz", + "integrity": "sha512-rtjk5ifyMzOna1c7PBu7J1VCt0PvA5wy3o8eMVnxMKb7z8KA7JFecvD04dSn14vj/bBaAbqRsGed5OjtofEnLA==", + "peer": true, + "dependencies": { + "@codemirror/state": "^6.0.0", + "@codemirror/view": "^6.0.0", + "@lezer/common": "^1.0.0", + "@lezer/highlight": "^1.0.0", + "@lezer/lr": "^1.0.0", + "style-mod": "^4.0.0" + } + }, + "node_modules/@codemirror/state": { + "version": "6.4.1", + "resolved": "https://registry.npmjs.org/@codemirror/state/-/state-6.4.1.tgz", + "integrity": "sha512-QkEyUiLhsJoZkbumGZlswmAhA7CBU02Wrz7zvH4SrcifbsqwlXShVXg65f3v/ts57W3dqyamEriMhij1Z3Zz4A==", + "peer": true + }, + "node_modules/@codemirror/view": { + "version": "6.33.0", + "resolved": "https://registry.npmjs.org/@codemirror/view/-/view-6.33.0.tgz", + "integrity": "sha512-AroaR3BvnjRW8fiZBalAaK+ZzB5usGgI014YKElYZvQdNH5ZIidHlO+cyf/2rWzyBFRkvG6VhiXeAEbC53P2YQ==", + "peer": true, + "dependencies": { + "@codemirror/state": "^6.4.0", + "style-mod": "^4.1.0", + "w3c-keyname": "^2.2.4" + } + }, "node_modules/@emotion/is-prop-valid": { "version": "0.8.8", "resolved": "https://registry.npmjs.org/@emotion/is-prop-valid/-/is-prop-valid-0.8.8.tgz", @@ -729,6 +760,30 @@ "url": "https://github.com/sponsors/nzakas" } }, + "node_modules/@lezer/common": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/@lezer/common/-/common-1.2.1.tgz", + "integrity": "sha512-yemX0ZD2xS/73llMZIK6KplkjIjf2EvAHcinDi/TfJ9hS25G0388+ClHt6/3but0oOxinTcQHJLDXh6w1crzFQ==", + "peer": true + }, + "node_modules/@lezer/highlight": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/@lezer/highlight/-/highlight-1.2.1.tgz", + "integrity": "sha512-Z5duk4RN/3zuVO7Jq0pGLJ3qynpxUVsh7IbUbGj88+uV2ApSAn6kWg2au3iJb+0Zi7kKtqffIESgNcRXWZWmSA==", + "peer": true, + "dependencies": { + "@lezer/common": "^1.0.0" + } + }, + "node_modules/@lezer/lr": { + "version": "1.4.2", + "resolved": "https://registry.npmjs.org/@lezer/lr/-/lr-1.4.2.tgz", + "integrity": "sha512-pu0K1jCIdnQ12aWNaAVU5bzi7Bd1w54J3ECgANPmYLtQKP0HBj2cE/5coBD66MT10xbtIuUr7tg0Shbsvk0mDA==", + "peer": true, + "dependencies": { + "@lezer/common": "^1.0.0" + } + }, "node_modules/@motionone/animation": { "version": "10.18.0", "resolved": "https://registry.npmjs.org/@motionone/animation/-/animation-10.18.0.tgz", @@ -2400,7 +2455,7 @@ "version": "15.7.12", "resolved": "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.12.tgz", "integrity": "sha512-5zvhXYtRNRluoE/jAp4GVsSduVUzNWKkOZrCDBWYtE7biZywwdC2AcEzg+cSMLFRfVgeAFqpfNabiPjxFddV1Q==", - "dev": true, + "devOptional": true, "license": "MIT" }, "node_modules/@types/ramda": { @@ -2416,7 +2471,7 @@ "version": "18.3.6", "resolved": "https://registry.npmjs.org/@types/react/-/react-18.3.6.tgz", "integrity": "sha512-CnGaRYNu2iZlkGXGrOYtdg5mLK8neySj0woZ4e2wF/eli2E6Sazmq5X+Nrj6OBrrFVQfJWTUFeqAzoRhWQXYvg==", - "dev": true, + "devOptional": true, "dependencies": { "@types/prop-types": "*", "csstype": "^3.0.2" @@ -2426,7 +2481,7 @@ "version": "18.3.0", "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-18.3.0.tgz", "integrity": "sha512-EhwApuTmMBmXuFOikhQLIBUn6uFg81SwLMOAUgodJF14SOBOCMdU04gDoYi0WOJJHD144TL32z4yDqCW3dnkQg==", - "dev": true, + "devOptional": true, "license": "MIT", "dependencies": { "@types/react": "*" @@ -2463,16 +2518,16 @@ "license": "MIT" }, "node_modules/@typescript-eslint/eslint-plugin": { - "version": "8.5.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.5.0.tgz", - "integrity": "sha512-lHS5hvz33iUFQKuPFGheAB84LwcJ60G8vKnEhnfcK1l8kGVLro2SFYW6K0/tj8FUhRJ0VHyg1oAfg50QGbPPHw==", + "version": "8.6.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.6.0.tgz", + "integrity": "sha512-UOaz/wFowmoh2G6Mr9gw60B1mm0MzUtm6Ic8G2yM1Le6gyj5Loi/N+O5mocugRGY+8OeeKmkMmbxNqUCq3B4Sg==", "dev": true, "dependencies": { "@eslint-community/regexpp": "^4.10.0", - "@typescript-eslint/scope-manager": "8.5.0", - "@typescript-eslint/type-utils": "8.5.0", - "@typescript-eslint/utils": "8.5.0", - "@typescript-eslint/visitor-keys": "8.5.0", + "@typescript-eslint/scope-manager": "8.6.0", + "@typescript-eslint/type-utils": "8.6.0", + "@typescript-eslint/utils": "8.6.0", + "@typescript-eslint/visitor-keys": "8.6.0", "graphemer": "^1.4.0", "ignore": "^5.3.1", "natural-compare": "^1.4.0", @@ -2495,6 +2550,53 @@ } } }, + "node_modules/@typescript-eslint/eslint-plugin/node_modules/@typescript-eslint/scope-manager": { + "version": "8.6.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.6.0.tgz", + "integrity": "sha512-ZuoutoS5y9UOxKvpc/GkvF4cuEmpokda4wRg64JEia27wX+PysIE9q+lzDtlHHgblwUWwo5/Qn+/WyTUvDwBHw==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "8.6.0", + "@typescript-eslint/visitor-keys": "8.6.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/eslint-plugin/node_modules/@typescript-eslint/types": { + "version": "8.6.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.6.0.tgz", + "integrity": "sha512-rojqFZGd4MQxw33SrOy09qIDS8WEldM8JWtKQLAjf/X5mGSeEFh5ixQlxssMNyPslVIk9yzWqXCsV2eFhYrYUw==", + "dev": true, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/eslint-plugin/node_modules/@typescript-eslint/visitor-keys": { + "version": "8.6.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.6.0.tgz", + "integrity": "sha512-wapVFfZg9H0qOYh4grNVQiMklJGluQrOUiOhYRrQWhx7BY/+I1IYb8BczWNbbUpO+pqy0rDciv3lQH5E1bCLrg==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "8.6.0", + "eslint-visitor-keys": "^3.4.3" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, "node_modules/@typescript-eslint/parser": { "version": "8.5.0", "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.5.0.tgz", @@ -2541,13 +2643,13 @@ } }, "node_modules/@typescript-eslint/type-utils": { - "version": "8.5.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.5.0.tgz", - "integrity": "sha512-N1K8Ix+lUM+cIDhL2uekVn/ZD7TZW+9/rwz8DclQpcQ9rk4sIL5CAlBC0CugWKREmDjBzI/kQqU4wkg46jWLYA==", + "version": "8.6.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.6.0.tgz", + "integrity": "sha512-dtePl4gsuenXVwC7dVNlb4mGDcKjDT/Ropsk4za/ouMBPplCLyznIaR+W65mvCvsyS97dymoBRrioEXI7k0XIg==", "dev": true, "dependencies": { - "@typescript-eslint/typescript-estree": "8.5.0", - "@typescript-eslint/utils": "8.5.0", + "@typescript-eslint/typescript-estree": "8.6.0", + "@typescript-eslint/utils": "8.6.0", "debug": "^4.3.4", "ts-api-utils": "^1.3.0" }, @@ -2564,6 +2666,64 @@ } } }, + "node_modules/@typescript-eslint/type-utils/node_modules/@typescript-eslint/types": { + "version": "8.6.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.6.0.tgz", + "integrity": "sha512-rojqFZGd4MQxw33SrOy09qIDS8WEldM8JWtKQLAjf/X5mGSeEFh5ixQlxssMNyPslVIk9yzWqXCsV2eFhYrYUw==", + "dev": true, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/type-utils/node_modules/@typescript-eslint/typescript-estree": { + "version": "8.6.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.6.0.tgz", + "integrity": "sha512-MOVAzsKJIPIlLK239l5s06YXjNqpKTVhBVDnqUumQJja5+Y94V3+4VUFRA0G60y2jNnTVwRCkhyGQpavfsbq/g==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "8.6.0", + "@typescript-eslint/visitor-keys": "8.6.0", + "debug": "^4.3.4", + "fast-glob": "^3.3.2", + "is-glob": "^4.0.3", + "minimatch": "^9.0.4", + "semver": "^7.6.0", + "ts-api-utils": "^1.3.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/type-utils/node_modules/@typescript-eslint/visitor-keys": { + "version": "8.6.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.6.0.tgz", + "integrity": "sha512-wapVFfZg9H0qOYh4grNVQiMklJGluQrOUiOhYRrQWhx7BY/+I1IYb8BczWNbbUpO+pqy0rDciv3lQH5E1bCLrg==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "8.6.0", + "eslint-visitor-keys": "^3.4.3" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, "node_modules/@typescript-eslint/types": { "version": "8.5.0", "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.5.0.tgz", @@ -2606,15 +2766,15 @@ } }, "node_modules/@typescript-eslint/utils": { - "version": "8.5.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.5.0.tgz", - "integrity": "sha512-6yyGYVL0e+VzGYp60wvkBHiqDWOpT63pdMV2CVG4LVDd5uR6q1qQN/7LafBZtAtNIn/mqXjsSeS5ggv/P0iECw==", + "version": "8.6.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.6.0.tgz", + "integrity": "sha512-eNp9cWnYf36NaOVjkEUznf6fEgVy1TWpE0o52e4wtojjBx7D1UV2WAWGzR+8Y5lVFtpMLPwNbC67T83DWSph4A==", "dev": true, "dependencies": { "@eslint-community/eslint-utils": "^4.4.0", - "@typescript-eslint/scope-manager": "8.5.0", - "@typescript-eslint/types": "8.5.0", - "@typescript-eslint/typescript-estree": "8.5.0" + "@typescript-eslint/scope-manager": "8.6.0", + "@typescript-eslint/types": "8.6.0", + "@typescript-eslint/typescript-estree": "8.6.0" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -2627,6 +2787,81 @@ "eslint": "^8.57.0 || ^9.0.0" } }, + "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/scope-manager": { + "version": "8.6.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.6.0.tgz", + "integrity": "sha512-ZuoutoS5y9UOxKvpc/GkvF4cuEmpokda4wRg64JEia27wX+PysIE9q+lzDtlHHgblwUWwo5/Qn+/WyTUvDwBHw==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "8.6.0", + "@typescript-eslint/visitor-keys": "8.6.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/types": { + "version": "8.6.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.6.0.tgz", + "integrity": "sha512-rojqFZGd4MQxw33SrOy09qIDS8WEldM8JWtKQLAjf/X5mGSeEFh5ixQlxssMNyPslVIk9yzWqXCsV2eFhYrYUw==", + "dev": true, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/typescript-estree": { + "version": "8.6.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.6.0.tgz", + "integrity": "sha512-MOVAzsKJIPIlLK239l5s06YXjNqpKTVhBVDnqUumQJja5+Y94V3+4VUFRA0G60y2jNnTVwRCkhyGQpavfsbq/g==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "8.6.0", + "@typescript-eslint/visitor-keys": "8.6.0", + "debug": "^4.3.4", + "fast-glob": "^3.3.2", + "is-glob": "^4.0.3", + "minimatch": "^9.0.4", + "semver": "^7.6.0", + "ts-api-utils": "^1.3.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/visitor-keys": { + "version": "8.6.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.6.0.tgz", + "integrity": "sha512-wapVFfZg9H0qOYh4grNVQiMklJGluQrOUiOhYRrQWhx7BY/+I1IYb8BczWNbbUpO+pqy0rDciv3lQH5E1bCLrg==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "8.6.0", + "eslint-visitor-keys": "^3.4.3" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, "node_modules/@typescript-eslint/visitor-keys": { "version": "8.5.0", "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.5.0.tgz", @@ -3070,7 +3305,7 @@ "version": "3.1.3", "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.1.3.tgz", "integrity": "sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw==", - "dev": true, + "devOptional": true, "license": "MIT" }, "node_modules/debounce-promise": { @@ -5505,6 +5740,12 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/style-mod": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/style-mod/-/style-mod-4.1.2.tgz", + "integrity": "sha512-wnD1HyVqpJUI2+eKZ+eo1UwghftP6yuFheBqqe+bWCotBjC2K1YnteJILRMs3SM4V/0dLEW1SC27MWP5y+mwmw==", + "peer": true + }, "node_modules/style-value-types": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/style-value-types/-/style-value-types-5.0.0.tgz", @@ -5866,9 +6107,9 @@ "optional": true }, "node_modules/vite": { - "version": "5.4.5", - "resolved": "https://registry.npmjs.org/vite/-/vite-5.4.5.tgz", - "integrity": "sha512-pXqR0qtb2bTwLkev4SE3r4abCNioP3GkjvIDLlzziPpXtHgiJIjuKl+1GN6ESOT3wMjG3JTeARopj2SwYaHTOA==", + "version": "5.4.6", + "resolved": "https://registry.npmjs.org/vite/-/vite-5.4.6.tgz", + "integrity": "sha512-IeL5f8OO5nylsgzd9tq4qD2QqI0k2CQLGrWD0rCN0EQJZpBK5vJAx0I+GDkMOXxQX/OfFHMuLIx6ddAxGX/k+Q==", "dev": true, "dependencies": { "esbuild": "^0.21.3", @@ -5929,6 +6170,12 @@ "resolved": "https://registry.npmjs.org/vscode-languageserver-types/-/vscode-languageserver-types-3.17.5.tgz", "integrity": "sha512-Ld1VelNuX9pdF39h2Hgaeb5hEZM2Z3jUrrMgWQAu82jMtZp7p3vJT3BzToKtZI7NgQssZje5o0zryOrhQvzQAg==" }, + "node_modules/w3c-keyname": { + "version": "2.2.8", + "resolved": "https://registry.npmjs.org/w3c-keyname/-/w3c-keyname-2.2.8.tgz", + "integrity": "sha512-dpojBhNsCNN7T82Tm7k26A6G9ML3NkhDsnw9n/eoxSRlVBB4CEtIQ/KTCLI2Fwf3ataSXRhYFkQi3SlnFwPvPQ==", + "peer": true + }, "node_modules/web-streams-polyfill": { "version": "3.3.3", "resolved": "https://registry.npmjs.org/web-streams-polyfill/-/web-streams-polyfill-3.3.3.tgz", diff --git a/playground/package.json b/playground/package.json index 62c8494eb2..2ce39d3073 100644 --- a/playground/package.json +++ b/playground/package.json @@ -20,13 +20,13 @@ "@types/react": "^18.3.6", "@types/react-dom": "^18.3.0", "@types/swagger-ui-react": "^4.18.3", - "@typescript-eslint/eslint-plugin": "^8.5.0", + "@typescript-eslint/eslint-plugin": "^8.6.0", "@typescript-eslint/parser": "^8.5.0", "@vitejs/plugin-react-swc": "^3.7.0", "eslint": "^9.10.0", "eslint-plugin-react-hooks": "^4.6.2", "eslint-plugin-react-refresh": "^0.4.12", "typescript": "^5.6.2", - "vite": "^5.4.5" + "vite": "^5.4.6" } } From 75adf5088dd29254c74828f8065554f0c2cb0323 Mon Sep 17 00:00:00 2001 From: AndrewSisley Date: Wed, 18 Sep 2024 09:11:05 -0400 Subject: [PATCH 22/71] feat: Add support for one sided relations (#3021) ## Relevant issue(s) Resolves #2830 ## Description Adds support for one sided relations. --- internal/planner/planner.go | 9 ++- internal/planner/type_join.go | 67 ++++++++++-------- .../query/one_to_many/one_sided_test.go | 70 +++++++++++++++++++ 3 files changed, 115 insertions(+), 31 deletions(-) create mode 100644 tests/integration/query/one_to_many/one_sided_test.go diff --git a/internal/planner/planner.go b/internal/planner/planner.go index e6f67668ea..4423183d75 100644 --- a/internal/planner/planner.go +++ b/internal/planner/planner.go @@ -286,6 +286,11 @@ func findFilteredByRelationFields( } func (p *Planner) tryOptimizeJoinDirection(node *invertibleTypeJoin, parentPlan *selectTopNode) error { + if !node.childSide.relFieldDef.HasValue() { + // If the relation is one sided we cannot invert the join, so return early + return nil + } + filteredSubFields := findFilteredByRelationFields( parentPlan.selectNode.filter.Conditions, node.documentMapping, @@ -295,8 +300,8 @@ func (p *Planner) tryOptimizeJoinDirection(node *invertibleTypeJoin, parentPlan for subFieldName, subFieldInd := range filteredSubFields { indexes := desc.GetIndexesOnField(subFieldName) if len(indexes) > 0 && !filter.IsComplex(parentPlan.selectNode.filter) { - subInd := node.documentMapping.FirstIndexOfName(node.parentSide.relFieldDef.Name) - relatedField := mapper.Field{Name: node.parentSide.relFieldDef.Name, Index: subInd} + subInd := node.documentMapping.FirstIndexOfName(node.parentSide.relFieldDef.Value().Name) + relatedField := mapper.Field{Name: node.parentSide.relFieldDef.Value().Name, Index: subInd} fieldFilter := filter.UnwrapRelation(filter.CopyField( parentPlan.selectNode.filter, relatedField, diff --git a/internal/planner/type_join.go b/internal/planner/type_join.go index 8b781b94b6..2102c74479 100644 --- a/internal/planner/type_join.go +++ b/internal/planner/type_join.go @@ -151,8 +151,10 @@ func (n *typeIndexJoin) simpleExplain() (map[string]any, error) { addExplainData := func(j *invertibleTypeJoin) error { // Add the attribute(s). - simpleExplainMap[joinRootLabel] = immutable.Some(j.childSide.relFieldDef.Name) - simpleExplainMap[joinSubTypeNameLabel] = j.parentSide.relFieldDef.Name + if j.childSide.relFieldDef.HasValue() { + simpleExplainMap[joinRootLabel] = immutable.Some(j.childSide.relFieldDef.Value().Name) + } + simpleExplainMap[joinSubTypeNameLabel] = j.parentSide.relFieldDef.Value().Name subTypeExplainGraph, err := buildSimpleExplainGraph(j.childSide.plan) if err != nil { @@ -327,23 +329,30 @@ func (p *Planner) newInvertableTypeJoin( return invertibleTypeJoin{}, err } + var childsRelFieldDef immutable.Option[client.FieldDefinition] + var childSideRelIDFieldMapIndex immutable.Option[int] childsRelFieldDesc, ok := subCol.Description().GetFieldByRelation( parentsRelFieldDef.RelationName, parent.collection.Name().Value(), parentsRelFieldDef.Name, ) - if !ok { - return invertibleTypeJoin{}, client.NewErrFieldNotExist(parentsRelFieldDef.Name) - } + if ok { + def, ok := subCol.Definition().GetFieldByName(childsRelFieldDesc.Name) + if !ok { + return invertibleTypeJoin{}, client.NewErrFieldNotExist(subSelect.Name) + } - childsRelFieldDef, ok := subCol.Definition().GetFieldByName(childsRelFieldDesc.Name) - if !ok { - return invertibleTypeJoin{}, client.NewErrFieldNotExist(subSelect.Name) + ind := subSelectPlan.DocumentMap().IndexesByName[def.Name+request.RelatedObjectID] + if len(ind) > 0 { + childSideRelIDFieldMapIndex = immutable.Some(ind[0]) + } + + childsRelFieldDef = immutable.Some(def) } parentSide := joinSide{ plan: sourcePlan, - relFieldDef: parentsRelFieldDef, + relFieldDef: immutable.Some(parentsRelFieldDef), relFieldMapIndex: immutable.Some(subSelect.Index), col: parent.collection, isFirst: true, @@ -356,16 +365,12 @@ func (p *Planner) newInvertableTypeJoin( } childSide := joinSide{ - plan: subSelectPlan, - relFieldDef: childsRelFieldDef, - col: subCol, - isFirst: false, - isParent: false, - } - - ind = subSelectPlan.DocumentMap().IndexesByName[childsRelFieldDef.Name+request.RelatedObjectID] - if len(ind) > 0 { - childSide.relIDFieldMapIndex = immutable.Some(ind[0]) + plan: subSelectPlan, + relFieldDef: childsRelFieldDef, + relIDFieldMapIndex: childSideRelIDFieldMapIndex, + col: subCol, + isFirst: false, + isParent: false, } return invertibleTypeJoin{ @@ -377,8 +382,12 @@ func (p *Planner) newInvertableTypeJoin( } type joinSide struct { - plan planNode - relFieldDef client.FieldDefinition + plan planNode + // The field definition of the relation-object field on this side of the relation. + // + // This will always have a value on the primary side, but it may not have a value on + // the secondary side, as the secondary half of the relation is optional. + relFieldDef immutable.Option[client.FieldDefinition] relFieldMapIndex immutable.Option[int] relIDFieldMapIndex immutable.Option[int] col client.Collection @@ -387,7 +396,7 @@ type joinSide struct { } func (s *joinSide) isPrimary() bool { - return s.relFieldDef.IsPrimaryRelation + return s.relFieldDef.HasValue() && s.relFieldDef.Value().IsPrimaryRelation } func (join *invertibleTypeJoin) getFirstSide() *joinSide { @@ -524,9 +533,9 @@ func newPrimaryObjectsRetriever( func (j *primaryObjectsRetriever) retrievePrimaryDocsReferencingSecondaryDoc() error { relIDFieldDef, ok := j.primarySide.col.Definition().GetFieldByName( - j.primarySide.relFieldDef.Name + request.RelatedObjectID) + j.primarySide.relFieldDef.Value().Name + request.RelatedObjectID) if !ok { - return client.NewErrFieldNotExist(j.primarySide.relFieldDef.Name + request.RelatedObjectID) + return client.NewErrFieldNotExist(j.primarySide.relFieldDef.Value().Name + request.RelatedObjectID) } j.primaryScan = getScanNode(j.primarySide.plan) @@ -620,7 +629,7 @@ func joinPrimaryDocs(primaryDocs []core.Doc, secondarySide, primarySide *joinSid secondaryDoc := secondarySide.plan.Value() if secondarySide.relFieldMapIndex.HasValue() { - if secondarySide.relFieldDef.Kind.IsArray() { + if !secondarySide.relFieldDef.HasValue() || secondarySide.relFieldDef.Value().Kind.IsArray() { secondaryDoc.Fields[secondarySide.relFieldMapIndex.Value()] = primaryDocs } else if len(primaryDocs) > 0 { secondaryDoc.Fields[secondarySide.relFieldMapIndex.Value()] = primaryDocs[0] @@ -628,7 +637,7 @@ func joinPrimaryDocs(primaryDocs []core.Doc, secondarySide, primarySide *joinSid } if secondarySide.relIDFieldMapIndex.HasValue() { - if secondarySide.relFieldDef.Kind.IsArray() { + if !secondarySide.relFieldDef.HasValue() || secondarySide.relFieldDef.Value().Kind.IsArray() { secondaryDoc.Fields[secondarySide.relIDFieldMapIndex.Value()] = docsToDocIDs(primaryDocs) } else if len(primaryDocs) > 0 { secondaryDoc.Fields[secondarySide.relIDFieldMapIndex.Value()] = primaryDocs[0].GetID() @@ -703,7 +712,7 @@ func (join *invertibleTypeJoin) nextJoinedSecondaryDoc() (bool, error) { firstSide := join.getFirstSide() secondSide := join.getSecondSide() - secondaryDocID := getForeignKey(firstSide.plan, firstSide.relFieldDef.Name) + secondaryDocID := getForeignKey(firstSide.plan, firstSide.relFieldDef.Value().Name) if secondaryDocID == "" { if firstSide.isParent { join.docsToYield = append(join.docsToYield, firstSide.plan.Value()) @@ -734,7 +743,7 @@ func (join *invertibleTypeJoin) nextJoinedSecondaryDoc() (bool, error) { return join.Next() } - if join.parentSide.relFieldDef.Kind.IsArray() { + if join.parentSide.relFieldDef.Value().Kind.IsArray() { var primaryDocs []core.Doc var secondaryDoc core.Doc // if child is not requested as part of the response, we just add the existing one (fetched by the secondary index @@ -771,7 +780,7 @@ func (join *invertibleTypeJoin) invertJoinDirectionWithIndex( ) error { p := join.childSide.plan s := getScanNode(p) - s.tryAddField(join.childSide.relFieldDef.Name + request.RelatedObjectID) + s.tryAddField(join.childSide.relFieldDef.Value().Name + request.RelatedObjectID) s.filter = fieldFilter s.initFetcher(immutable.Option[string]{}, immutable.Some(index)) diff --git a/tests/integration/query/one_to_many/one_sided_test.go b/tests/integration/query/one_to_many/one_sided_test.go new file mode 100644 index 0000000000..5a25598fd3 --- /dev/null +++ b/tests/integration/query/one_to_many/one_sided_test.go @@ -0,0 +1,70 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package one_to_many + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestQueryOneToMany_OneSided(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Author { + name: String + } + + type Book { + name: String + author: Author + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John Grisham" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + DocMap: map[string]any{ + "name": "Painted House", + "author": testUtils.NewDocIndex(0, 0), + }, + }, + testUtils.Request{ + Request: `query { + Book { + name + author { + name + } + } + }`, + Results: map[string]any{ + "Book": []map[string]any{ + { + "name": "Painted House", + "author": map[string]any{ + "name": "John Grisham", + }, + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} From 2675b2e8c060d0bfa93bcbbd3fa5ef12cc39b776 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Wed, 18 Sep 2024 10:43:41 -0700 Subject: [PATCH 23/71] feat: CLI purge command (#2998) ## Relevant issue(s) Resolves #2953 ## Description ~This PR adds a purge command that clears the data directory of the defradb instance on the local machine.~ This PR adds a purge command that clears the configured datastore and restarts the defradb instance. The purge command requires a `--force` flag to be run, and the defradb instance must have the `development` flag or config value enabled. - [x] Add `dev` flag and config parameter that is required for destructive operations ## Tasks - [x] I made sure the code is well commented, particularly hard-to-understand areas. - [x] I made sure the repository-held documentation is changed accordingly. - [x] I made sure the pull request title adheres to the conventional commit style (the subset used in the project can be found in [tools/configs/chglog/config.yml](tools/configs/chglog/config.yml)). - [x] I made sure to discuss its limitations such as threats to validity, vulnerability to mistake and misuse, robustness to invalidation of assumptions, resource requirements, ... ## How has this been tested? Added unit tests and tested manually Specify the platform(s) on which this was tested: - MacOS --- cli/cli.go | 1 + cli/config.go | 2 + cli/config_test.go | 2 + cli/errors.go | 1 + cli/purge.go | 36 +++++ cli/purge_test.go | 25 ++++ cli/start.go | 77 ++++++++--- docs/config.md | 4 + docs/website/references/cli/defradb_client.md | 1 + .../references/cli/defradb_client_purge.md | 45 ++++++ docs/website/references/cli/defradb_start.md | 1 + docs/website/references/http/openapi.json | 17 +++ event/event.go | 2 + http/client.go | 11 ++ http/handler.go | 2 + http/handler_extras.go | 47 +++++++ http/handler_extras_test.go | 42 ++++++ node/errors.go | 1 + node/node.go | 128 ++++++++++-------- node/node_test.go | 57 ++++++++ node/store.go | 18 +++ node/store_badger.go | 15 ++ node/store_memory.go | 5 + tests/gen/schema_parser.go | 2 +- tests/integration/db.go | 25 +++- 25 files changed, 484 insertions(+), 83 deletions(-) create mode 100644 cli/purge.go create mode 100644 cli/purge_test.go create mode 100644 docs/website/references/cli/defradb_client_purge.md create mode 100644 http/handler_extras.go create mode 100644 http/handler_extras_test.go diff --git a/cli/cli.go b/cli/cli.go index 0875e325fe..4453cbaafb 100644 --- a/cli/cli.go +++ b/cli/cli.go @@ -111,6 +111,7 @@ func NewDefraCommand() *cobra.Command { client := MakeClientCommand() client.AddCommand( + MakePurgeCommand(), MakeDumpCommand(), MakeRequestCommand(), schema, diff --git a/cli/config.go b/cli/config.go index 9a0290eb04..745d43e617 100644 --- a/cli/config.go +++ b/cli/config.go @@ -64,6 +64,7 @@ var configFlags = map[string]string{ "keyring-path": "keyring.path", "no-keyring": "keyring.disabled", "source-hub-address": "acp.sourceHub.address", + "development": "development", } // configDefaults contains default values for config entries. @@ -74,6 +75,7 @@ var configDefaults = map[string]any{ "datastore.maxtxnretries": 5, "datastore.store": "badger", "datastore.badger.valuelogfilesize": 1 << 30, + "development": false, "net.p2pdisabled": false, "net.p2paddresses": []string{"/ip4/127.0.0.1/tcp/9171"}, "net.peers": []string{}, diff --git a/cli/config_test.go b/cli/config_test.go index d3f6d954e3..36421bd42f 100644 --- a/cli/config_test.go +++ b/cli/config_test.go @@ -68,4 +68,6 @@ func TestLoadConfigNotExist(t *testing.T) { assert.Equal(t, false, cfg.GetBool("keyring.disabled")) assert.Equal(t, "defradb", cfg.GetString("keyring.namespace")) assert.Equal(t, "file", cfg.GetString("keyring.backend")) + + assert.Equal(t, false, cfg.GetBool("development")) } diff --git a/cli/errors.go b/cli/errors.go index 504cb9ca25..c22957ff51 100644 --- a/cli/errors.go +++ b/cli/errors.go @@ -40,6 +40,7 @@ var ( ErrSchemaVersionNotOfSchema = errors.New(errSchemaVersionNotOfSchema) ErrViewAddMissingArgs = errors.New("please provide a base query and output SDL for this view") ErrPolicyFileArgCanNotBeEmpty = errors.New("policy file argument can not be empty") + ErrPurgeForceFlagRequired = errors.New("run this command again with --force if you really want to purge all data") ) func NewErrRequiredFlagEmpty(longName string, shortName string) error { diff --git a/cli/purge.go b/cli/purge.go new file mode 100644 index 0000000000..5880e021b8 --- /dev/null +++ b/cli/purge.go @@ -0,0 +1,36 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package cli + +import ( + "github.com/spf13/cobra" + + "github.com/sourcenetwork/defradb/http" +) + +func MakePurgeCommand() *cobra.Command { + var force bool + var cmd = &cobra.Command{ + Use: "purge", + Short: "Delete all persisted data and restart", + Long: `Delete all persisted data and restart. +WARNING this operation cannot be reversed.`, + RunE: func(cmd *cobra.Command, args []string) error { + db := mustGetContextDB(cmd).(*http.Client) + if !force { + return ErrPurgeForceFlagRequired + } + return db.Purge(cmd.Context()) + }, + } + cmd.Flags().BoolVarP(&force, "force", "f", false, "Must be set for the operation to run") + return cmd +} diff --git a/cli/purge_test.go b/cli/purge_test.go new file mode 100644 index 0000000000..8a94d2eb21 --- /dev/null +++ b/cli/purge_test.go @@ -0,0 +1,25 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package cli + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +func TestPurgeCommandWithoutForceFlagReturnsError(t *testing.T) { + cmd := NewDefraCommand() + cmd.SetArgs([]string{"client", "purge"}) + + err := cmd.Execute() + require.ErrorIs(t, err, ErrPurgeForceFlagRequired) +} diff --git a/cli/start.go b/cli/start.go index 651360ab83..2b5b4fb734 100644 --- a/cli/start.go +++ b/cli/start.go @@ -19,6 +19,7 @@ import ( "github.com/spf13/cobra" "github.com/sourcenetwork/defradb/errors" + "github.com/sourcenetwork/defradb/event" "github.com/sourcenetwork/defradb/http" "github.com/sourcenetwork/defradb/internal/db" "github.com/sourcenetwork/defradb/keyring" @@ -26,6 +27,17 @@ import ( "github.com/sourcenetwork/defradb/node" ) +const devModeBanner = ` +****************************************** +** DEVELOPMENT MODE IS ENABLED ** +** ------------------------------------ ** +** if this is a production database ** +** disable development mode and restart ** +** or you may risk losing all data ** +****************************************** + +` + func MakeStartCommand() *cobra.Command { var cmd = &cobra.Command{ Use: "start", @@ -46,12 +58,15 @@ func MakeStartCommand() *cobra.Command { cfg := mustGetContextConfig(cmd) opts := []node.Option{ - node.WithStorePath(cfg.GetString("datastore.badger.path")), - node.WithBadgerInMemory(cfg.GetString("datastore.store") == configStoreMemory), node.WithDisableP2P(cfg.GetBool("net.p2pDisabled")), node.WithSourceHubChainID(cfg.GetString("acp.sourceHub.ChainID")), node.WithSourceHubGRPCAddress(cfg.GetString("acp.sourceHub.GRPCAddress")), node.WithSourceHubCometRPCAddress(cfg.GetString("acp.sourceHub.CometRPCAddress")), + node.WithLensRuntime(node.LensRuntimeType(cfg.GetString("lens.runtime"))), + node.WithEnableDevelopment(cfg.GetBool("development")), + // store options + node.WithStorePath(cfg.GetString("datastore.badger.path")), + node.WithBadgerInMemory(cfg.GetString("datastore.store") == configStoreMemory), // db options db.WithMaxRetries(cfg.GetInt("datastore.MaxTxnRetries")), // net node options @@ -64,7 +79,6 @@ func MakeStartCommand() *cobra.Command { http.WithAllowedOrigins(cfg.GetStringSlice("api.allowed-origins")...), http.WithTLSCertPath(cfg.GetString("api.pubKeyPath")), http.WithTLSKeyPath(cfg.GetString("api.privKeyPath")), - node.WithLensRuntime(node.LensRuntimeType(cfg.GetString("lens.runtime"))), } if cfg.GetString("datastore.store") != configStoreMemory { @@ -75,6 +89,11 @@ func MakeStartCommand() *cobra.Command { opts = append(opts, node.WithACPPath(rootDir)) } + acpType := cfg.GetString("acp.type") + if acpType != "" { + opts = append(opts, node.WithACPType(node.ACPType(acpType))) + } + if !cfg.GetBool("keyring.disabled") { kr, err := openKeyring(cmd) if err != nil { @@ -91,9 +110,8 @@ func MakeStartCommand() *cobra.Command { if err != nil && !errors.Is(err, keyring.ErrNotFound) { return err } - opts = append(opts, node.WithBadgerEncryptionKey(encryptionKey)) - + // setup the sourcehub transaction signer sourceHubKeyName := cfg.GetString("acp.sourceHub.KeyName") if sourceHubKeyName != "" { signer, err := keyring.NewTxSignerFromKeyringKey(kr, sourceHubKeyName) @@ -104,38 +122,54 @@ func MakeStartCommand() *cobra.Command { } } - acpType := cfg.GetString("acp.type") - if acpType != "" { - opts = append(opts, node.WithACPType(node.ACPType(acpType))) + isDevMode := cfg.GetBool("development") + if isDevMode { + cmd.Printf(devModeBanner) } - n, err := node.NewNode(cmd.Context(), opts...) + signalCh := make(chan os.Signal, 1) + signal.Notify(signalCh, os.Interrupt, syscall.SIGTERM) + + n, err := node.New(cmd.Context(), opts...) if err != nil { return err } - - defer func() { - if err := n.Close(cmd.Context()); err != nil { - log.ErrorContextE(cmd.Context(), "Stopping DefraDB", err) - } - }() - log.InfoContext(cmd.Context(), "Starting DefraDB") if err := n.Start(cmd.Context()); err != nil { return err } - signalCh := make(chan os.Signal, 1) - signal.Notify(signalCh, os.Interrupt, syscall.SIGTERM) + RESTART: + // after a restart we need to resubscribe + purgeSub, err := n.DB.Events().Subscribe(event.PurgeName) + if err != nil { + return err + } + SELECT: select { + case <-purgeSub.Message(): + log.InfoContext(cmd.Context(), "Received purge event; restarting...") + + err := n.PurgeAndRestart(cmd.Context()) + if err != nil { + log.ErrorContextE(cmd.Context(), "failed to purge", err) + } + if err == nil { + goto RESTART + } + if errors.Is(err, node.ErrPurgeWithDevModeDisabled) { + goto SELECT + } + case <-cmd.Context().Done(): log.InfoContext(cmd.Context(), "Received context cancellation; shutting down...") + case <-signalCh: log.InfoContext(cmd.Context(), "Received interrupt; shutting down...") } - return nil + return n.Close(cmd.Context()) }, } // set default flag values from config @@ -185,5 +219,10 @@ func MakeStartCommand() *cobra.Command { cfg.GetString(configFlags["privkeypath"]), "Path to the private key for tls", ) + cmd.PersistentFlags().Bool( + "development", + cfg.GetBool(configFlags["development"]), + "Enables a set of features that make development easier but should not be enabled in production", + ) return cmd } diff --git a/docs/config.md b/docs/config.md index 6b592059cb..0ac6e5dd52 100644 --- a/docs/config.md +++ b/docs/config.md @@ -4,6 +4,10 @@ The default DefraDB directory is `$HOME/.defradb`. It can be changed via the --r Relative paths are interpreted as being rooted in the DefraDB directory. +## `development` + +Enables a set of features that make development easier but should not be enabled in production. + ## `datastore.store` Store can be badger or memory. Defaults to `badger`. diff --git a/docs/website/references/cli/defradb_client.md b/docs/website/references/cli/defradb_client.md index dfd398f53a..27f840d7ae 100644 --- a/docs/website/references/cli/defradb_client.md +++ b/docs/website/references/cli/defradb_client.md @@ -43,6 +43,7 @@ Execute queries, add schema types, obtain node info, etc. * [defradb client dump](defradb_client_dump.md) - Dump the contents of DefraDB node-side * [defradb client index](defradb_client_index.md) - Manage collections' indexes of a running DefraDB instance * [defradb client p2p](defradb_client_p2p.md) - Interact with the DefraDB P2P system +* [defradb client purge](defradb_client_purge.md) - Delete all persisted data and restart * [defradb client query](defradb_client_query.md) - Send a DefraDB GraphQL query request * [defradb client schema](defradb_client_schema.md) - Interact with the schema system of a DefraDB node * [defradb client tx](defradb_client_tx.md) - Create, commit, and discard DefraDB transactions diff --git a/docs/website/references/cli/defradb_client_purge.md b/docs/website/references/cli/defradb_client_purge.md new file mode 100644 index 0000000000..3a1b4b2738 --- /dev/null +++ b/docs/website/references/cli/defradb_client_purge.md @@ -0,0 +1,45 @@ +## defradb client purge + +Delete all persisted data and restart + +### Synopsis + +Delete all persisted data and restart. +WARNING this operation cannot be reversed. + +``` +defradb client purge [flags] +``` + +### Options + +``` + -f, --force Must be set for the operation to run + -h, --help help for purge +``` + +### Options inherited from parent commands + +``` + -i, --identity string Hex formatted private key used to authenticate with ACP + --keyring-backend string Keyring backend to use. Options are file or system (default "file") + --keyring-namespace string Service name to use when using the system backend (default "defradb") + --keyring-path string Path to store encrypted keys when using the file backend (default "keys") + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs + --no-keyring Disable the keyring and generate ephemeral keys + --no-log-color Disable colored log output + --rootdir string Directory for persistent data (default: $HOME/.defradb) + --source-hub-address string The SourceHub address authorized by the client to make SourceHub transactions on behalf of the actor + --tx uint Transaction ID + --url string URL of HTTP endpoint to listen on or connect to (default "127.0.0.1:9181") +``` + +### SEE ALSO + +* [defradb client](defradb_client.md) - Interact with a DefraDB node + diff --git a/docs/website/references/cli/defradb_start.md b/docs/website/references/cli/defradb_start.md index d71dfb14e4..9b1e5a8d74 100644 --- a/docs/website/references/cli/defradb_start.md +++ b/docs/website/references/cli/defradb_start.md @@ -14,6 +14,7 @@ defradb start [flags] ``` --allowed-origins stringArray List of origins to allow for CORS requests + --development Enables a set of features that make development easier but should not be enabled in production -h, --help help for start --max-txn-retries int Specify the maximum number of retries per transaction (default 5) --no-p2p Disable the peer-to-peer network synchronization system diff --git a/docs/website/references/http/openapi.json b/docs/website/references/http/openapi.json index 470359097e..9a7198495a 100644 --- a/docs/website/references/http/openapi.json +++ b/docs/website/references/http/openapi.json @@ -1711,6 +1711,23 @@ ] } }, + "/purge": { + "post": { + "description": "Purge all persisted data and restart", + "operationId": "purge", + "responses": { + "200": { + "$ref": "#/components/responses/success" + }, + "400": { + "$ref": "#/components/responses/error" + }, + "default": { + "description": "" + } + } + } + }, "/schema": { "get": { "description": "Introspect schema(s) by name, schema root, or version id.", diff --git a/event/event.go b/event/event.go index 9d24a89c10..698cb8dc90 100644 --- a/event/event.go +++ b/event/event.go @@ -39,6 +39,8 @@ const ( P2PTopicCompletedName = Name("p2p-topic-completed") // ReplicatorCompletedName is the name of the replicator completed event. ReplicatorCompletedName = Name("replicator-completed") + // PurgeName is the name of the purge event. + PurgeName = Name("purge") ) // PubSub is an event that is published when diff --git a/http/client.go b/http/client.go index 0c59e23757..ba272572b8 100644 --- a/http/client.go +++ b/http/client.go @@ -472,6 +472,17 @@ func (c *Client) PrintDump(ctx context.Context) error { return err } +func (c *Client) Purge(ctx context.Context) error { + methodURL := c.http.baseURL.JoinPath("purge") + + req, err := http.NewRequestWithContext(ctx, http.MethodPost, methodURL.String(), nil) + if err != nil { + return err + } + _, err = c.http.request(req) + return err +} + func (c *Client) Close() { // do nothing } diff --git a/http/handler.go b/http/handler.go index 3ec33d9b2a..cdb09767c6 100644 --- a/http/handler.go +++ b/http/handler.go @@ -36,6 +36,7 @@ func NewApiRouter() (*Router, error) { p2p_handler := &p2pHandler{} lens_handler := &lensHandler{} ccip_handler := &ccipHandler{} + extras_handler := &extrasHandler{} router, err := NewRouter() if err != nil { @@ -47,6 +48,7 @@ func NewApiRouter() (*Router, error) { acp_handler.bindRoutes(router) p2p_handler.bindRoutes(router) ccip_handler.bindRoutes(router) + extras_handler.bindRoutes(router) router.AddRouteGroup(func(r *Router) { r.AddMiddleware(CollectionMiddleware) diff --git a/http/handler_extras.go b/http/handler_extras.go new file mode 100644 index 0000000000..c891e9befc --- /dev/null +++ b/http/handler_extras.go @@ -0,0 +1,47 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package http + +import ( + "net/http" + + "github.com/getkin/kin-openapi/openapi3" + + "github.com/sourcenetwork/defradb/client" + "github.com/sourcenetwork/defradb/event" +) + +// extrasHandler contains additional http handlers not found in client interfaces. +type extrasHandler struct{} + +func (s *extrasHandler) Purge(rw http.ResponseWriter, req *http.Request) { + db := req.Context().Value(dbContextKey).(client.DB) + rw.WriteHeader(http.StatusOK) // write the response before we restart to purge + db.Events().Publish(event.NewMessage(event.PurgeName, nil)) +} + +func (h *extrasHandler) bindRoutes(router *Router) { + errorResponse := &openapi3.ResponseRef{ + Ref: "#/components/responses/error", + } + successResponse := &openapi3.ResponseRef{ + Ref: "#/components/responses/success", + } + + purge := openapi3.NewOperation() + purge.Description = "Purge all persisted data and restart" + purge.OperationID = "purge" + purge.Responses = openapi3.NewResponses() + purge.Responses.Set("200", successResponse) + purge.Responses.Set("400", errorResponse) + + router.AddRoute("/purge", http.MethodPost, purge, h.Purge) +} diff --git a/http/handler_extras_test.go b/http/handler_extras_test.go new file mode 100644 index 0000000000..d7d1398e90 --- /dev/null +++ b/http/handler_extras_test.go @@ -0,0 +1,42 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package http + +import ( + "net/http" + "net/http/httptest" + "testing" + + "github.com/sourcenetwork/defradb/event" + + "github.com/stretchr/testify/require" +) + +func TestPurge(t *testing.T) { + cdb := setupDatabase(t) + url := "http://localhost:9181/api/v0/purge" + + req := httptest.NewRequest(http.MethodPost, url, nil) + rec := httptest.NewRecorder() + + purgeSub, err := cdb.Events().Subscribe(event.PurgeName) + require.NoError(t, err) + + handler, err := NewHandler(cdb) + require.NoError(t, err) + handler.ServeHTTP(rec, req) + + res := rec.Result() + require.Equal(t, 200, res.StatusCode) + + // test will timeout if purge never received + <-purgeSub.Message() +} diff --git a/node/errors.go b/node/errors.go index 84f0cdb006..2504f6ccb9 100644 --- a/node/errors.go +++ b/node/errors.go @@ -23,6 +23,7 @@ var ( ErrSignerMissingForSourceHubACP = errors.New("a txn signer must be provided for SourceHub ACP") ErrLensRuntimeNotSupported = errors.New(errLensRuntimeNotSupported) ErrStoreTypeNotSupported = errors.New(errStoreTypeNotSupported) + ErrPurgeWithDevModeDisabled = errors.New("cannot purge database when development mode is disabled") ) func NewErrLensRuntimeNotSupported(lens LensRuntimeType) error { diff --git a/node/node.go b/node/node.go index ffc4abd0ff..2603762684 100644 --- a/node/node.go +++ b/node/node.go @@ -39,8 +39,9 @@ type Option any // Options contains start configuration values. type Options struct { - disableP2P bool - disableAPI bool + disableP2P bool + disableAPI bool + enableDevelopment bool } // DefaultOptions returns options with default settings. @@ -65,103 +66,98 @@ func WithDisableAPI(disable bool) NodeOpt { } } +// WithEnableDevelopment sets the enable development mode flag. +func WithEnableDevelopment(enable bool) NodeOpt { + return func(o *Options) { + o.enableDevelopment = enable + } +} + // Node is a DefraDB instance with optional sub-systems. type Node struct { DB client.DB Peer *net.Peer Server *http.Server + + options *Options + dbOpts []db.Option + acpOpts []ACPOpt + netOpts []net.NodeOpt + storeOpts []StoreOpt + serverOpts []http.ServerOpt + lensOpts []LenOpt } -// NewNode returns a new node instance configured with the given options. -func NewNode(ctx context.Context, opts ...Option) (*Node, error) { - var ( - dbOpts []db.Option - acpOpts []ACPOpt - netOpts []net.NodeOpt - storeOpts []StoreOpt - serverOpts []http.ServerOpt - lensOpts []LenOpt - ) - - options := DefaultOptions() +// New returns a new node instance configured with the given options. +func New(ctx context.Context, opts ...Option) (*Node, error) { + n := Node{ + options: DefaultOptions(), + } for _, opt := range opts { switch t := opt.(type) { - case ACPOpt: - acpOpts = append(acpOpts, t) - case NodeOpt: - t(options) + t(n.options) + + case ACPOpt: + n.acpOpts = append(n.acpOpts, t) case StoreOpt: - storeOpts = append(storeOpts, t) + n.storeOpts = append(n.storeOpts, t) case db.Option: - dbOpts = append(dbOpts, t) + n.dbOpts = append(n.dbOpts, t) case http.ServerOpt: - serverOpts = append(serverOpts, t) + n.serverOpts = append(n.serverOpts, t) case net.NodeOpt: - netOpts = append(netOpts, t) + n.netOpts = append(n.netOpts, t) case LenOpt: - lensOpts = append(lensOpts, t) + n.lensOpts = append(n.lensOpts, t) } } + return &n, nil +} - rootstore, err := NewStore(ctx, storeOpts...) +// Start starts the node sub-systems. +func (n *Node) Start(ctx context.Context) error { + rootstore, err := NewStore(ctx, n.storeOpts...) if err != nil { - return nil, err + return err } - - acp, err := NewACP(ctx, acpOpts...) + acp, err := NewACP(ctx, n.acpOpts...) if err != nil { - return nil, err + return err } - - lens, err := NewLens(ctx, lensOpts...) + lens, err := NewLens(ctx, n.lensOpts...) if err != nil { - return nil, err + return err } - - db, err := db.NewDB(ctx, rootstore, acp, lens, dbOpts...) + n.DB, err = db.NewDB(ctx, rootstore, acp, lens, n.dbOpts...) if err != nil { - return nil, err + return err } - var peer *net.Peer - if !options.disableP2P { + if !n.options.disableP2P { // setup net node - peer, err = net.NewPeer(ctx, db.Blockstore(), db.Events(), netOpts...) + n.Peer, err = net.NewPeer(ctx, n.DB.Blockstore(), n.DB.Events(), n.netOpts...) if err != nil { - return nil, err + return err } } - var server *http.Server - if !options.disableAPI { + if !n.options.disableAPI { // setup http server - handler, err := http.NewHandler(db) + handler, err := http.NewHandler(n.DB) if err != nil { - return nil, err + return err } - server, err = http.NewServer(handler, serverOpts...) + n.Server, err = http.NewServer(handler, n.serverOpts...) if err != nil { - return nil, err + return err } - } - - return &Node{ - DB: db, - Peer: peer, - Server: server, - }, nil -} - -// Start starts the node sub-systems. -func (n *Node) Start(ctx context.Context) error { - if n.Server != nil { - err := n.Server.SetListener() + err = n.Server.SetListener() if err != nil { return err } @@ -174,6 +170,7 @@ func (n *Node) Start(ctx context.Context) error { } }() } + return nil } @@ -191,3 +188,20 @@ func (n *Node) Close(ctx context.Context) error { } return err } + +// PurgeAndRestart causes the node to shutdown, purge all data from +// its datastore, and restart. +func (n *Node) PurgeAndRestart(ctx context.Context) error { + if !n.options.enableDevelopment { + return ErrPurgeWithDevModeDisabled + } + err := n.Close(ctx) + if err != nil { + return err + } + err = purgeStore(ctx, n.storeOpts...) + if err != nil { + return err + } + return n.Start(ctx) +} diff --git a/node/node_test.go b/node/node_test.go index 1aa1dac92a..010f810da2 100644 --- a/node/node_test.go +++ b/node/node_test.go @@ -11,9 +11,13 @@ package node import ( + "context" "testing" + "github.com/sourcenetwork/defradb/client" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) func TestWithDisableP2P(t *testing.T) { @@ -27,3 +31,56 @@ func TestWithDisableAPI(t *testing.T) { WithDisableAPI(true)(options) assert.Equal(t, true, options.disableAPI) } + +func TestWithEnableDevelopment(t *testing.T) { + options := &Options{} + WithEnableDevelopment(true)(options) + assert.Equal(t, true, options.enableDevelopment) +} + +func TestPurgeAndRestartWithDevModeDisabled(t *testing.T) { + ctx := context.Background() + + opts := []Option{ + WithDisableAPI(true), + WithDisableP2P(true), + WithStorePath(t.TempDir()), + } + + n, err := New(ctx, opts...) + require.NoError(t, err) + + err = n.Start(ctx) + require.NoError(t, err) + + err = n.PurgeAndRestart(ctx) + require.ErrorIs(t, err, ErrPurgeWithDevModeDisabled) +} + +func TestPurgeAndRestartWithDevModeEnabled(t *testing.T) { + ctx := context.Background() + + opts := []Option{ + WithDisableAPI(true), + WithDisableP2P(true), + WithStorePath(t.TempDir()), + WithEnableDevelopment(true), + } + + n, err := New(ctx, opts...) + require.NoError(t, err) + + err = n.Start(ctx) + require.NoError(t, err) + + _, err = n.DB.AddSchema(ctx, "type User { name: String }") + require.NoError(t, err) + + err = n.PurgeAndRestart(ctx) + require.NoError(t, err) + + schemas, err := n.DB.GetSchemas(ctx, client.SchemaFetchOptions{}) + require.NoError(t, err) + + assert.Len(t, schemas, 0) +} diff --git a/node/store.go b/node/store.go index 373610b0e1..4a0e8d93da 100644 --- a/node/store.go +++ b/node/store.go @@ -31,6 +31,12 @@ const ( // allows it's population to be managed by build flags. var storeConstructors = map[StoreType]func(ctx context.Context, options *StoreOptions) (datastore.Rootstore, error){} +// storePurgeFuncs is a map of [StoreType]s to store purge functions. +// +// Is is populated by the `init` functions in the runtime-specific files - this +// allows it's population to be managed by build flags. +var storePurgeFuncs = map[StoreType]func(ctx context.Context, options *StoreOptions) error{} + // StoreOptions contains store configuration values. type StoreOptions struct { store StoreType @@ -77,3 +83,15 @@ func NewStore(ctx context.Context, opts ...StoreOpt) (datastore.Rootstore, error } return nil, NewErrStoreTypeNotSupported(options.store) } + +func purgeStore(ctx context.Context, opts ...StoreOpt) error { + options := DefaultStoreOptions() + for _, opt := range opts { + opt(options) + } + purgeFunc, ok := storePurgeFuncs[options.store] + if ok { + return purgeFunc(ctx, options) + } + return NewErrStoreTypeNotSupported(options.store) +} diff --git a/node/store_badger.go b/node/store_badger.go index 5c252d3607..94ee2c3dbf 100644 --- a/node/store_badger.go +++ b/node/store_badger.go @@ -38,8 +38,23 @@ func init() { return badger.NewDatastore(options.path, &badgerOpts) } + purge := func(ctx context.Context, options *StoreOptions) error { + store, err := constructor(ctx, options) + if err != nil { + return err + } + err = store.(*badger.Datastore).DB.DropAll() + if err != nil { + return err + } + return store.Close() + } + storeConstructors[BadgerStore] = constructor + storePurgeFuncs[BadgerStore] = purge + storeConstructors[DefaultStore] = constructor + storePurgeFuncs[DefaultStore] = purge } // WithBadgerInMemory sets the badger in memory option. diff --git a/node/store_memory.go b/node/store_memory.go index 352381fa9d..84911d5da8 100644 --- a/node/store_memory.go +++ b/node/store_memory.go @@ -24,9 +24,14 @@ func init() { constructor := func(ctx context.Context, options *StoreOptions) (datastore.Rootstore, error) { return memory.NewDatastore(ctx), nil } + purge := func(ctx context.Context, options *StoreOptions) error { + return nil + } // don't override the default constructor if previously set if _, ok := storeConstructors[DefaultStore]; !ok { storeConstructors[DefaultStore] = constructor + storePurgeFuncs[DefaultStore] = purge } storeConstructors[MemoryStore] = constructor + storePurgeFuncs[MemoryStore] = purge } diff --git a/tests/gen/schema_parser.go b/tests/gen/schema_parser.go index 3e08212b5c..ebf3a813ea 100644 --- a/tests/gen/schema_parser.go +++ b/tests/gen/schema_parser.go @@ -26,7 +26,7 @@ func ParseSDL(gqlSDL string) (map[string]client.CollectionDefinition, error) { // Spinning up a temporary in-memory node with all extras disabled is the // most reliable and cheapest maintainance-cost-wise way to fully parse // the SDL and correctly link all relations. - node, err := node.NewNode( + node, err := node.New( ctx, node.WithBadgerInMemory(true), node.WithDisableAPI(true), diff --git a/tests/integration/db.go b/tests/integration/db.go index 54175784c0..06737318d7 100644 --- a/tests/integration/db.go +++ b/tests/integration/db.go @@ -73,14 +73,19 @@ func init() { func NewBadgerMemoryDB(ctx context.Context) (client.DB, error) { opts := []node.Option{ + node.WithDisableP2P(true), + node.WithDisableAPI(true), node.WithBadgerInMemory(true), } - node, err := node.NewNode(ctx, opts...) + node, err := node.New(ctx, opts...) + if err != nil { + return nil, err + } + err = node.Start(ctx) if err != nil { return nil, err } - return node.DB, err } @@ -88,14 +93,19 @@ func NewBadgerFileDB(ctx context.Context, t testing.TB) (client.DB, error) { path := t.TempDir() opts := []node.Option{ + node.WithDisableP2P(true), + node.WithDisableAPI(true), node.WithStorePath(path), } - node, err := node.NewNode(ctx, opts...) + node, err := node.New(ctx, opts...) + if err != nil { + return nil, err + } + err = node.Start(ctx) if err != nil { return nil, err } - return node.DB, err } @@ -175,10 +185,13 @@ func setupNode(s *state) (*node.Node, string, error) { return nil, "", fmt.Errorf("invalid database type: %v", s.dbt) } - node, err := node.NewNode(s.ctx, opts...) + node, err := node.New(s.ctx, opts...) + if err != nil { + return nil, "", err + } + err = node.Start(s.ctx) if err != nil { return nil, "", err } - return node, path, nil } From 36eacbe064c4da99937a304b92f582a4a9d59a3f Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Wed, 18 Sep 2024 11:29:10 -0700 Subject: [PATCH 24/71] fix: GraphQL null argument parsing (#3013) ## Relevant issue(s) Resolves #3012 ## Description This PR fixes an issue where explicitly null GQL inputs would cause a panic. ## Tasks - [x] I made sure the code is well commented, particularly hard-to-understand areas. - [x] I made sure the repository-held documentation is changed accordingly. - [x] I made sure the pull request title adheres to the conventional commit style (the subset used in the project can be found in [tools/configs/chglog/config.yml](tools/configs/chglog/config.yml)). - [x] I made sure to discuss its limitations such as threats to validity, vulnerability to mistake and misuse, robustness to invalidation of assumptions, resource requirements, ... ## How has this been tested? Added tests Specify the platform(s) on which this was tested: - MacOS --- internal/planner/mapper/targetable.go | 16 +- internal/request/graphql/parser/commit.go | 71 +++- internal/request/graphql/parser/filter.go | 29 +- internal/request/graphql/parser/mutation.go | 76 ++-- internal/request/graphql/parser/query.go | 245 +++++++------ .../request/graphql/parser/subscription.go | 9 +- .../mutation/create/with_null_input_test.go | 141 ++++++++ .../mutation/delete/with_null_input_test.go | 125 +++++++ .../mutation/update/with_null_input_test.go | 125 +++++++ .../query/commits/with_null_input_test.go | 321 +++++++++++++++++ .../query/simple/with_null_input_test.go | 336 ++++++++++++++++++ 11 files changed, 1330 insertions(+), 164 deletions(-) create mode 100644 tests/integration/mutation/create/with_null_input_test.go create mode 100644 tests/integration/mutation/delete/with_null_input_test.go create mode 100644 tests/integration/mutation/update/with_null_input_test.go create mode 100644 tests/integration/query/commits/with_null_input_test.go create mode 100644 tests/integration/query/simple/with_null_input_test.go diff --git a/internal/planner/mapper/targetable.go b/internal/planner/mapper/targetable.go index 68f7f993ef..a45e99a516 100644 --- a/internal/planner/mapper/targetable.go +++ b/internal/planner/mapper/targetable.go @@ -110,13 +110,17 @@ func filterObjectToMap(mapping *core.DocumentMapping, obj map[connor.FilterKey]a for k, v := range obj { switch keyType := k.(type) { case *PropertyIndex: - subObj := v.(map[connor.FilterKey]any) outkey, _ := mapping.TryToFindNameFromIndex(keyType.Index) - childMapping, ok := tryGetChildMapping(mapping, keyType.Index) - if ok { - outmap[outkey] = filterObjectToMap(childMapping, subObj) - } else { - outmap[outkey] = filterObjectToMap(mapping, subObj) + switch subObj := v.(type) { + case map[connor.FilterKey]any: + childMapping, ok := tryGetChildMapping(mapping, keyType.Index) + if ok { + outmap[outkey] = filterObjectToMap(childMapping, subObj) + } else { + outmap[outkey] = filterObjectToMap(mapping, subObj) + } + case nil: + outmap[outkey] = nil } case *Operator: diff --git a/internal/request/graphql/parser/commit.go b/internal/request/graphql/parser/commit.go index a8074d031e..b08d80fc69 100644 --- a/internal/request/graphql/parser/commit.go +++ b/internal/request/graphql/parser/commit.go @@ -35,31 +35,61 @@ func parseCommitSelect( arguments := gql.GetArgumentValues(fieldDef.Args, field.Arguments, exe.VariableValues) for _, argument := range field.Arguments { - prop := argument.Name.Value - if prop == request.DocIDArgName { - commit.DocID = immutable.Some(arguments[prop].(string)) - } else if prop == request.Cid { - commit.CID = immutable.Some(arguments[prop].(string)) - } else if prop == request.FieldIDName { - commit.FieldID = immutable.Some(arguments[prop].(string)) - } else if prop == request.OrderClause { - conditions, err := ParseConditionsInOrder(argument.Value.(*ast.ObjectValue), arguments[prop].(map[string]any)) + name := argument.Name.Value + value := arguments[name] + + switch name { + case request.DocIDArgName: + if v, ok := value.(string); ok { + commit.DocID = immutable.Some(v) + } + + case request.Cid: + if v, ok := value.(string); ok { + commit.CID = immutable.Some(v) + } + + case request.FieldIDName: + if v, ok := value.(string); ok { + commit.FieldID = immutable.Some(v) + } + + case request.OrderClause: + v, ok := value.(map[string]any) + if !ok { + continue // value is nil + } + conditions, err := ParseConditionsInOrder(argument.Value.(*ast.ObjectValue), v) if err != nil { return nil, err } commit.OrderBy = immutable.Some(request.OrderBy{ Conditions: conditions, }) - } else if prop == request.LimitClause { - commit.Limit = immutable.Some(uint64(arguments[prop].(int32))) - } else if prop == request.OffsetClause { - commit.Offset = immutable.Some(uint64(arguments[prop].(int32))) - } else if prop == request.DepthClause { - commit.Depth = immutable.Some(uint64(arguments[prop].(int32))) - } else if prop == request.GroupByClause { - fields := []string{} - for _, v := range arguments[prop].([]any) { - fields = append(fields, v.(string)) + + case request.LimitClause: + if v, ok := value.(int32); ok { + commit.Limit = immutable.Some(uint64(v)) + } + + case request.OffsetClause: + if v, ok := value.(int32); ok { + commit.Offset = immutable.Some(uint64(v)) + } + + case request.DepthClause: + if v, ok := value.(int32); ok { + commit.Depth = immutable.Some(uint64(v)) + } + + case request.GroupByClause: + v, ok := value.([]any) + if !ok { + continue // value is nil + } + fields := make([]string, len(v)) + for i, c := range v { + fields[i] = c.(string) } commit.GroupBy = immutable.Some(request.GroupBy{ Fields: fields, @@ -91,6 +121,9 @@ func parseCommitSelect( } commit.Fields, err = parseSelectFields(exe, fieldObject, field.SelectionSet) + if err != nil { + return nil, err + } return commit, err } diff --git a/internal/request/graphql/parser/filter.go b/internal/request/graphql/parser/filter.go index a8ed1ae85f..aa65f77dd2 100644 --- a/internal/request/graphql/parser/filter.go +++ b/internal/request/graphql/parser/filter.go @@ -78,16 +78,9 @@ func ParseConditionsInOrder(stmt *ast.ObjectValue, args map[string]any) ([]reque for _, field := range stmt.Fields { switch v := args[field.Name.Value].(type) { case int: // base direction parsed (hopefully, check NameToOrderDirection) - var dir request.OrderDirection - switch v { - case 0: - dir = request.ASC - - case 1: - dir = request.DESC - - default: - return nil, ErrInvalidOrderDirection + dir, err := parseOrderDirection(v) + if err != nil { + return nil, err } conditions = append(conditions, request.OrderCondition{ Fields: []string{field.Name.Value}, @@ -109,6 +102,9 @@ func ParseConditionsInOrder(stmt *ast.ObjectValue, args map[string]any) ([]reque conditions = append(conditions, cond) } + case nil: + continue // ignore nil filter input + default: return nil, client.NewErrUnhandledType("parseConditionInOrder", v) } @@ -199,3 +195,16 @@ func parseFilterFieldsForDescriptionSlice( } return fields, nil } + +func parseOrderDirection(v int) (request.OrderDirection, error) { + switch v { + case 0: + return request.ASC, nil + + case 1: + return request.DESC, nil + + default: + return request.ASC, ErrInvalidOrderDirection + } +} diff --git a/internal/request/graphql/parser/mutation.go b/internal/request/graphql/parser/mutation.go index 95785b78c9..3fa10195f4 100644 --- a/internal/request/graphql/parser/mutation.go +++ b/internal/request/graphql/parser/mutation.go @@ -95,38 +95,60 @@ func parseMutation(exe *gql.ExecutionContext, parent *gql.Object, field *ast.Fie mut.Collection = strings.Join(mutNameParts[1:], "_") } - // parse arguments for _, argument := range field.Arguments { - prop := argument.Name.Value - // parse each individual arg type seperately - if prop == request.Input { // parse input - mut.Input = arguments[prop].(map[string]any) - } else if prop == request.Inputs { - inputsValue := arguments[prop].([]any) - inputs := make([]map[string]any, len(inputsValue)) - for i, v := range inputsValue { + name := argument.Name.Value + value := arguments[name] + + switch name { + case request.Input: + if v, ok := value.(map[string]any); ok { + mut.Input = v + } + + case request.Inputs: + v, ok := value.([]any) + if !ok { + continue // value is nil + } + inputs := make([]map[string]any, len(v)) + for i, v := range v { inputs[i] = v.(map[string]any) } mut.Inputs = inputs - } else if prop == request.FilterClause { // parse filter - mut.Filter = immutable.Some(request.Filter{ - Conditions: arguments[prop].(map[string]any), - }) - } else if prop == request.DocIDArgName { - mut.DocIDs = immutable.Some([]string{arguments[prop].(string)}) - } else if prop == request.DocIDsArgName { - docIDsValue := arguments[prop].([]any) - docIDs := make([]string, len(docIDsValue)) - for i, v := range docIDsValue { + + case request.FilterClause: + if v, ok := value.(map[string]any); ok { + mut.Filter = immutable.Some(request.Filter{Conditions: v}) + } + + case request.DocIDArgName: + if v, ok := value.(string); ok { + mut.DocIDs = immutable.Some([]string{v}) + } + + case request.DocIDsArgName: + v, ok := value.([]any) + if !ok { + continue // value is nil + } + docIDs := make([]string, len(v)) + for i, v := range v { docIDs[i] = v.(string) } mut.DocIDs = immutable.Some(docIDs) - } else if prop == request.EncryptDocArgName { - mut.Encrypt = arguments[prop].(bool) - } else if prop == request.EncryptFieldsArgName { - fieldsValue := arguments[prop].([]any) - fields := make([]string, len(fieldsValue)) - for i, v := range fieldsValue { + + case request.EncryptDocArgName: + if v, ok := value.(bool); ok { + mut.Encrypt = v + } + + case request.EncryptFieldsArgName: + v, ok := value.([]any) + if !ok { + continue // value is nil + } + fields := make([]string, len(v)) + for i, v := range v { fields[i] = v.(string) } mut.EncryptFields = fields @@ -144,5 +166,9 @@ func parseMutation(exe *gql.ExecutionContext, parent *gql.Object, field *ast.Fie } mut.Fields, err = parseSelectFields(exe, fieldObject, field.SelectionSet) + if err != nil { + return nil, err + } + return mut, err } diff --git a/internal/request/graphql/parser/query.go b/internal/request/graphql/parser/query.go index 1284463ac0..47d6a70493 100644 --- a/internal/request/graphql/parser/query.go +++ b/internal/request/graphql/parser/query.go @@ -101,53 +101,77 @@ func parseSelect( fieldDef := gql.GetFieldDef(exe.Schema, parent, field.Name.Value) arguments := gql.GetArgumentValues(fieldDef.Args, field.Arguments, exe.VariableValues) - // parse arguments for _, argument := range field.Arguments { name := argument.Name.Value value := arguments[name] - // parse filter switch name { case request.FilterClause: - slct.Filter = immutable.Some(request.Filter{ - Conditions: value.(map[string]any), - }) + if v, ok := value.(map[string]any); ok { + slct.Filter = immutable.Some(request.Filter{Conditions: v}) + } + case request.DocIDArgName: // parse single DocID field - slct.DocIDs = immutable.Some([]string{value.(string)}) + if v, ok := value.(string); ok { + slct.DocIDs = immutable.Some([]string{v}) + } + case request.DocIDsArgName: - docIDValues := value.([]any) - docIDs := make([]string, len(docIDValues)) - for i, value := range docIDValues { + v, ok := value.([]any) + if !ok { + continue // value is nil + } + docIDs := make([]string, len(v)) + for i, value := range v { docIDs[i] = value.(string) } slct.DocIDs = immutable.Some(docIDs) + case request.Cid: // parse single CID query field - slct.CID = immutable.Some(value.(string)) + if v, ok := value.(string); ok { + slct.CID = immutable.Some(v) + } + case request.LimitClause: // parse limit/offset - slct.Limit = immutable.Some(uint64(value.(int32))) + if v, ok := value.(int32); ok { + slct.Limit = immutable.Some(uint64(v)) + } + case request.OffsetClause: // parse limit/offset - slct.Offset = immutable.Some(uint64(value.(int32))) + if v, ok := value.(int32); ok { + slct.Offset = immutable.Some(uint64(v)) + } + case request.OrderClause: // parse order by - conditionsAST := argument.Value.(*ast.ObjectValue) - conditionsValue := value.(map[string]any) - conditions, err := ParseConditionsInOrder(conditionsAST, conditionsValue) + v, ok := value.(map[string]any) + if !ok { + continue // value is nil + } + conditions, err := ParseConditionsInOrder(argument.Value.(*ast.ObjectValue), v) if err != nil { return nil, err } slct.OrderBy = immutable.Some(request.OrderBy{ Conditions: conditions, }) + case request.GroupByClause: - fieldsValue := value.([]any) - fields := make([]string, len(fieldsValue)) - for i, v := range fieldsValue { - fields[i] = v.(string) + v, ok := value.([]any) + if !ok { + continue // value is nil + } + fields := make([]string, len(v)) + for i, c := range v { + fields[i] = c.(string) } slct.GroupBy = immutable.Some(request.GroupBy{ Fields: fields, }) + case request.ShowDeleted: - slct.ShowDeleted = value.(bool) + if v, ok := value.(bool); ok { + slct.ShowDeleted = v + } } } @@ -175,96 +199,29 @@ func parseAggregate( parent *gql.Object, field *ast.Field, ) (*request.Aggregate, error) { - targets := make([]*request.AggregateTarget, len(field.Arguments)) - fieldDef := gql.GetFieldDef(exe.Schema, parent, field.Name.Value) arguments := gql.GetArgumentValues(fieldDef.Args, field.Arguments, exe.VariableValues) - for i, argument := range field.Arguments { + var targets []*request.AggregateTarget + for _, argument := range field.Arguments { name := argument.Name.Value - value := arguments[name] - switch v := value.(type) { + switch v := arguments[name].(type) { case string: - targets[i] = &request.AggregateTarget{ + targets = append(targets, &request.AggregateTarget{ HostName: v, - } + }) + case map[string]any: - var childName string - var filter immutable.Option[request.Filter] - var limit immutable.Option[uint64] - var offset immutable.Option[uint64] - var order immutable.Option[request.OrderBy] - - for _, f := range argument.Value.(*ast.ObjectValue).Fields { - switch f.Name.Value { - case request.FieldName: - childName = v[request.FieldName].(string) - - case request.FilterClause: - filter = immutable.Some(request.Filter{ - Conditions: v[request.FilterClause].(map[string]any), - }) - - case request.LimitClause: - limit = immutable.Some(uint64(v[request.LimitClause].(int32))) - - case request.OffsetClause: - offset = immutable.Some(uint64(v[request.OffsetClause].(int32))) - - case request.OrderClause: - switch conditionsAST := f.Value.(type) { - case *ast.EnumValue: - // For inline arrays the order arg will be a simple enum declaring the order direction - var orderDirection request.OrderDirection - switch v[request.OrderClause].(int) { - case 0: - orderDirection = request.ASC - - case 1: - orderDirection = request.DESC - - default: - return nil, ErrInvalidOrderDirection - } - - order = immutable.Some(request.OrderBy{ - Conditions: []request.OrderCondition{{ - Direction: orderDirection, - }}, - }) - - case *ast.ObjectValue: - // For relations the order arg will be the complex order object as used by the host object - // for non-aggregate ordering - conditionsValue := v[request.OrderClause].(map[string]any) - conditions, err := ParseConditionsInOrder(conditionsAST, conditionsValue) - if err != nil { - return nil, err - } - order = immutable.Some(request.OrderBy{ - Conditions: conditions, - }) - } - } + value, ok := argument.Value.(*ast.ObjectValue) + if !ok { + continue // value is nil } - - targets[i] = &request.AggregateTarget{ - HostName: name, - ChildName: immutable.Some(childName), - Filterable: request.Filterable{ - Filter: filter, - }, - Limitable: request.Limitable{ - Limit: limit, - }, - Offsetable: request.Offsetable{ - Offset: offset, - }, - Orderable: request.Orderable{ - OrderBy: order, - }, + target, err := parseAggregateTarget(name, value, v) + if err != nil { + return nil, err } + targets = append(targets, target) } } @@ -276,3 +233,91 @@ func parseAggregate( Targets: targets, }, nil } + +func parseAggregateTarget( + hostName string, + value *ast.ObjectValue, + arguments map[string]any, +) (*request.AggregateTarget, error) { + var childName string + var filter immutable.Option[request.Filter] + var limit immutable.Option[uint64] + var offset immutable.Option[uint64] + var order immutable.Option[request.OrderBy] + + for _, f := range value.Fields { + name := f.Name.Value + value := arguments[name] + + switch name { + case request.FieldName: + if v, ok := value.(string); ok { + childName = v + } + + case request.FilterClause: + if v, ok := value.(map[string]any); ok { + filter = immutable.Some(request.Filter{Conditions: v}) + } + + case request.LimitClause: + if v, ok := value.(int32); ok { + limit = immutable.Some(uint64(v)) + } + + case request.OffsetClause: + if v, ok := value.(int32); ok { + offset = immutable.Some(uint64(v)) + } + + case request.OrderClause: + switch conditionsAST := f.Value.(type) { + case *ast.EnumValue: + // For inline arrays the order arg will be a simple enum declaring the order direction + v, ok := value.(int) + if !ok { + continue // value is nil + } + dir, err := parseOrderDirection(v) + if err != nil { + return nil, err + } + order = immutable.Some(request.OrderBy{ + Conditions: []request.OrderCondition{{Direction: dir}}, + }) + + case *ast.ObjectValue: + // For relations the order arg will be the complex order object as used by the host object + // for non-aggregate ordering + v, ok := value.(map[string]any) + if !ok { + continue // value is nil + } + conditions, err := ParseConditionsInOrder(conditionsAST, v) + if err != nil { + return nil, err + } + order = immutable.Some(request.OrderBy{ + Conditions: conditions, + }) + } + } + } + + return &request.AggregateTarget{ + HostName: hostName, + ChildName: immutable.Some(childName), + Filterable: request.Filterable{ + Filter: filter, + }, + Limitable: request.Limitable{ + Limit: limit, + }, + Offsetable: request.Offsetable{ + Offset: offset, + }, + Orderable: request.Orderable{ + OrderBy: order, + }, + }, nil +} diff --git a/internal/request/graphql/parser/subscription.go b/internal/request/graphql/parser/subscription.go index 4c6f5e3f5f..82aca83302 100644 --- a/internal/request/graphql/parser/subscription.go +++ b/internal/request/graphql/parser/subscription.go @@ -58,10 +58,8 @@ func parseSubscription(exe *gql.ExecutionContext, field *ast.Field) (*request.Ob fieldDef := gql.GetFieldDef(exe.Schema, exe.Schema.QueryType(), field.Name.Value) arguments := gql.GetArgumentValues(fieldDef.Args, field.Arguments, exe.VariableValues) - if v, ok := arguments[request.FilterClause]; ok { - sub.Filter = immutable.Some(request.Filter{ - Conditions: v.(map[string]any), - }) + if v, ok := arguments[request.FilterClause].(map[string]any); ok { + sub.Filter = immutable.Some(request.Filter{Conditions: v}) } // parse field selections @@ -71,5 +69,8 @@ func parseSubscription(exe *gql.ExecutionContext, field *ast.Field) (*request.Ob } sub.Fields, err = parseSelectFields(exe, fieldObject, field.SelectionSet) + if err != nil { + return nil, err + } return sub, err } diff --git a/tests/integration/mutation/create/with_null_input_test.go b/tests/integration/mutation/create/with_null_input_test.go new file mode 100644 index 0000000000..72cec7a7a0 --- /dev/null +++ b/tests/integration/mutation/create/with_null_input_test.go @@ -0,0 +1,141 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package create + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestMutationCreate_WithNullEncrypt_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple create mutation, with null encrypt", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.Request{ + Request: `mutation { + create_Users(encrypt: null, input: {name: "Bob"}) { + name + } + }`, + Results: map[string]any{ + "create_Users": []map[string]any{ + { + "name": "Bob", + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestMutationCreate_WithNullInput_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple create mutation, with null input", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.Request{ + Request: `mutation { + create_Users(input: null, inputs: [{name: "Bob"}]) { + name + } + }`, + Results: map[string]any{ + "create_Users": []map[string]any{ + { + "name": "Bob", + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestMutationCreate_WithNullInputs_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple create mutation, with null inputs", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.Request{ + Request: `mutation { + create_Users(inputs: null, input: {name: "Bob"}) { + name + } + }`, + Results: map[string]any{ + "create_Users": []map[string]any{ + { + "name": "Bob", + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestMutationCreate_WithNullEncryptFields_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple create mutation, with null encryptFields", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.Request{ + Request: `mutation { + create_Users(encryptFields: null, input: {name: "Bob"}) { + name + } + }`, + Results: map[string]any{ + "create_Users": []map[string]any{ + { + "name": "Bob", + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/mutation/delete/with_null_input_test.go b/tests/integration/mutation/delete/with_null_input_test.go new file mode 100644 index 0000000000..1619adc64f --- /dev/null +++ b/tests/integration/mutation/delete/with_null_input_test.go @@ -0,0 +1,125 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package delete + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestMutationDelete_WithNullFilter_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple delete mutation, with null filter", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.CreateDoc{ + DocMap: map[string]any{ + "name": "Bob", + }, + }, + testUtils.Request{ + Request: `mutation { + delete_Users(filter: null) { + name + } + }`, + Results: map[string]any{ + "delete_Users": []map[string]any{ + { + "name": "Bob", + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestMutationDelete_WithNullDocID_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple delete mutation, with null docID", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.CreateDoc{ + DocMap: map[string]any{ + "name": "Bob", + }, + }, + testUtils.Request{ + Request: `mutation { + delete_Users(docID: null) { + name + } + }`, + Results: map[string]any{ + "delete_Users": []map[string]any{ + { + "name": "Bob", + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestMutationDelete_WithNullDocIDs_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple delete mutation, with null docIDs", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.CreateDoc{ + DocMap: map[string]any{ + "name": "Bob", + }, + }, + testUtils.Request{ + Request: `mutation { + delete_Users(docIDs: null) { + name + } + }`, + Results: map[string]any{ + "delete_Users": []map[string]any{ + { + "name": "Bob", + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/mutation/update/with_null_input_test.go b/tests/integration/mutation/update/with_null_input_test.go new file mode 100644 index 0000000000..6c26db63a5 --- /dev/null +++ b/tests/integration/mutation/update/with_null_input_test.go @@ -0,0 +1,125 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package update + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestMutationUpdate_WithNullFilter_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple update mutation, with null filter", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.CreateDoc{ + DocMap: map[string]any{ + "name": "Bob", + }, + }, + testUtils.Request{ + Request: `mutation { + update_Users(filter: null, input: {name: "Alice"}) { + name + } + }`, + Results: map[string]any{ + "update_Users": []map[string]any{ + { + "name": "Alice", + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestMutationUpdate_WithNullDocID_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple update mutation, with null docID", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.CreateDoc{ + DocMap: map[string]any{ + "name": "Bob", + }, + }, + testUtils.Request{ + Request: `mutation { + update_Users(docID: null, input: {name: "Alice"}) { + name + } + }`, + Results: map[string]any{ + "update_Users": []map[string]any{ + { + "name": "Alice", + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestMutationUpdate_WithNullDocIDs_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple update mutation, with null docIDs", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.CreateDoc{ + DocMap: map[string]any{ + "name": "Bob", + }, + }, + testUtils.Request{ + Request: `mutation { + update_Users(docIDs: null, input: {name: "Alice"}) { + name + } + }`, + Results: map[string]any{ + "update_Users": []map[string]any{ + { + "name": "Alice", + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/query/commits/with_null_input_test.go b/tests/integration/query/commits/with_null_input_test.go new file mode 100644 index 0000000000..84a257b332 --- /dev/null +++ b/tests/integration/query/commits/with_null_input_test.go @@ -0,0 +1,321 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package commits + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestQueryCommitsWithNullDepth(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple all commits query with null depth", + Actions: []any{ + updateUserCollectionSchema(), + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ + "name": "John", + "age": 21 + }`, + }, + testUtils.Request{ + Request: `query { + commits(depth: null) { + cid + } + }`, + Results: map[string]any{ + "commits": []map[string]any{ + { + "cid": "bafyreifzyy7bmpx2eywj4lznxzrzrvh6vrz6l7bhthkpexdq3wtho3vz6i", + }, + { + "cid": "bafyreic2sba5sffkfnt32wfeoaw4qsqozjb5acwwtouxuzllb3aymjwute", + }, + { + "cid": "bafyreihv7jqe32wsuff5vwzlp7izoo6pqg6kgqf5edknp3mqm3344gu35q", + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryCommitsWithNullCID(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple all commits query with null cid", + Actions: []any{ + updateUserCollectionSchema(), + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ + "name": "John", + "age": 21 + }`, + }, + testUtils.Request{ + Request: `query { + commits(cid: null) { + cid + } + }`, + Results: map[string]any{ + "commits": []map[string]any{ + { + "cid": "bafyreifzyy7bmpx2eywj4lznxzrzrvh6vrz6l7bhthkpexdq3wtho3vz6i", + }, + { + "cid": "bafyreic2sba5sffkfnt32wfeoaw4qsqozjb5acwwtouxuzllb3aymjwute", + }, + { + "cid": "bafyreihv7jqe32wsuff5vwzlp7izoo6pqg6kgqf5edknp3mqm3344gu35q", + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryCommitsWithNullFieldID(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple all commits query with null fieldId", + Actions: []any{ + updateUserCollectionSchema(), + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ + "name": "John", + "age": 21 + }`, + }, + testUtils.Request{ + Request: `query { + commits(fieldId: null) { + cid + } + }`, + Results: map[string]any{ + "commits": []map[string]any{ + { + "cid": "bafyreifzyy7bmpx2eywj4lznxzrzrvh6vrz6l7bhthkpexdq3wtho3vz6i", + }, + { + "cid": "bafyreic2sba5sffkfnt32wfeoaw4qsqozjb5acwwtouxuzllb3aymjwute", + }, + { + "cid": "bafyreihv7jqe32wsuff5vwzlp7izoo6pqg6kgqf5edknp3mqm3344gu35q", + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryCommitsWithNullOrder(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple all commits query with null order", + Actions: []any{ + updateUserCollectionSchema(), + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ + "name": "John", + "age": 21 + }`, + }, + testUtils.Request{ + Request: `query { + commits(order: null) { + cid + } + }`, + Results: map[string]any{ + "commits": []map[string]any{ + { + "cid": "bafyreifzyy7bmpx2eywj4lznxzrzrvh6vrz6l7bhthkpexdq3wtho3vz6i", + }, + { + "cid": "bafyreic2sba5sffkfnt32wfeoaw4qsqozjb5acwwtouxuzllb3aymjwute", + }, + { + "cid": "bafyreihv7jqe32wsuff5vwzlp7izoo6pqg6kgqf5edknp3mqm3344gu35q", + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryCommitsWithNullOrderField(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple all commits query with null order field", + Actions: []any{ + updateUserCollectionSchema(), + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ + "name": "John", + "age": 21 + }`, + }, + testUtils.Request{ + Request: `query { + commits(docID: null) { + cid + } + }`, + Results: map[string]any{ + "commits": []map[string]any{ + { + "cid": "bafyreifzyy7bmpx2eywj4lznxzrzrvh6vrz6l7bhthkpexdq3wtho3vz6i", + }, + { + "cid": "bafyreic2sba5sffkfnt32wfeoaw4qsqozjb5acwwtouxuzllb3aymjwute", + }, + { + "cid": "bafyreihv7jqe32wsuff5vwzlp7izoo6pqg6kgqf5edknp3mqm3344gu35q", + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryCommitsWithNullLimit(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple all commits query with null limit", + Actions: []any{ + updateUserCollectionSchema(), + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ + "name": "John", + "age": 21 + }`, + }, + testUtils.Request{ + Request: `query { + commits(limit: null) { + cid + } + }`, + Results: map[string]any{ + "commits": []map[string]any{ + { + "cid": "bafyreifzyy7bmpx2eywj4lznxzrzrvh6vrz6l7bhthkpexdq3wtho3vz6i", + }, + { + "cid": "bafyreic2sba5sffkfnt32wfeoaw4qsqozjb5acwwtouxuzllb3aymjwute", + }, + { + "cid": "bafyreihv7jqe32wsuff5vwzlp7izoo6pqg6kgqf5edknp3mqm3344gu35q", + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryCommitsWithNullOffset(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple all commits query with null offset", + Actions: []any{ + updateUserCollectionSchema(), + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ + "name": "John", + "age": 21 + }`, + }, + testUtils.Request{ + Request: `query { + commits(offset: null) { + cid + } + }`, + Results: map[string]any{ + "commits": []map[string]any{ + { + "cid": "bafyreifzyy7bmpx2eywj4lznxzrzrvh6vrz6l7bhthkpexdq3wtho3vz6i", + }, + { + "cid": "bafyreic2sba5sffkfnt32wfeoaw4qsqozjb5acwwtouxuzllb3aymjwute", + }, + { + "cid": "bafyreihv7jqe32wsuff5vwzlp7izoo6pqg6kgqf5edknp3mqm3344gu35q", + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryCommitsWithNullGroupBy(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple all commits query with null groupBy", + Actions: []any{ + updateUserCollectionSchema(), + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ + "name": "John", + "age": 21 + }`, + }, + testUtils.Request{ + Request: `query { + commits(groupBy: null) { + cid + } + }`, + Results: map[string]any{ + "commits": []map[string]any{ + { + "cid": "bafyreifzyy7bmpx2eywj4lznxzrzrvh6vrz6l7bhthkpexdq3wtho3vz6i", + }, + { + "cid": "bafyreic2sba5sffkfnt32wfeoaw4qsqozjb5acwwtouxuzllb3aymjwute", + }, + { + "cid": "bafyreihv7jqe32wsuff5vwzlp7izoo6pqg6kgqf5edknp3mqm3344gu35q", + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/query/simple/with_null_input_test.go b/tests/integration/query/simple/with_null_input_test.go new file mode 100644 index 0000000000..ceba642887 --- /dev/null +++ b/tests/integration/query/simple/with_null_input_test.go @@ -0,0 +1,336 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package simple + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestQuerySimple_WithNullFilter_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query, with null filter", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "Name": "John" + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: null) { + Name + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Name": "John", + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQuerySimple_WithNullFilterFields_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query, with null filter fields", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "Name": null + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {Name: null}) { + Name + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Name": nil, + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQuerySimple_WithNullOrder_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query, with null order", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "Name": "John" + }`, + }, + testUtils.Request{ + Request: `query { + Users(order: null) { + Name + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Name": "John", + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQuerySimple_WithNullOrderFields_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query, with null order fields", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "Name": "John" + }`, + }, + testUtils.Request{ + Request: `query { + Users(order: {Name: null}) { + Name + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Name": "John", + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQuerySimple_WithNullLimit_Succeed(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query, with null limit", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "Name": "John" + }`, + }, + testUtils.Request{ + Request: `query { + Users(limit: null) { + Name + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Name": "John", + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQuerySimple_WithNullOffset_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query, with null offset", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "Name": "John" + }`, + }, + testUtils.Request{ + Request: `query { + Users(offset: null) { + Name + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Name": "John", + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQuerySimple_WithNullDocID_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query, with null docID", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "Name": "John" + }`, + }, + testUtils.Request{ + Request: `query { + Users(docID: null) { + Name + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Name": "John", + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQuerySimple_WithNullDocIDs_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query, with null docIDs", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "Name": "John" + }`, + }, + testUtils.Request{ + Request: `query { + Users(docIDs: null) { + Name + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Name": "John", + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQuerySimple_WithNullCID_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query, with null cid", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "Name": "John" + }`, + }, + testUtils.Request{ + Request: `query { + Users(cid: null) { + Name + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Name": "John", + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQuerySimple_WithNullGroupBy_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query, with null groupBy", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "Name": "John" + }`, + }, + testUtils.Request{ + Request: `query { + Users(groupBy: null) { + Name + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Name": "John", + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQuerySimple_WithNullShowDeleted_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query, with null showDeleted", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "Name": "John" + }`, + }, + testUtils.Request{ + Request: `query { + Users(showDeleted: null) { + Name + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Name": "John", + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} From 0e91a4912f41e5e487f1d718e0fb1d419cafd836 Mon Sep 17 00:00:00 2001 From: Shahzad Lone Date: Wed, 18 Sep 2024 17:02:33 -0400 Subject: [PATCH 25/71] ci(i): Use composites & split test matrix into smaller jobs (#3027) ## Relevant issue(s) Resolves #3025 ## Description ### Problem More about the problem in the linked issue #3025 ### Changes - Reusable composite actions are introduced under `.github/composites` folder. - Still the same number of jobs as before. - `badger-memory` name is now shown as `memory` - `badger-file` name is now shown as `file` - Split the basic 18 tests into a separate matric job. - All `includes` tests we had before are now split in their own jobs. - If a job fails, have more granular control now to re-run the job without waiting for all to finish. - The job names won't clash with github ui with `...` anymore. - The job names will remain static now. - Upon new tests being introduced we don't have to change previous required branch protection rules, just add new ones. - Upon new tests being introduced the author doesn't need to worry about making it required in the same PR. - Easy to install sourcehub locally for any ci action now - Easy to setup defradb with caching optimizations for any action now - Change detector also now utilizes caching logic now, due to ease of use of composites. - Updated the workflow name to be shorter as it looked odd in the readme before. ### New test names and action flow: ## How has this been tested? - Example run: https://github.com/sourcenetwork/defradb/actions/runs/10914739872?pr=3027 --- .../composites/install-sourcehub/action.yml | 36 +++ .github/composites/setup-defradb/action.yml | 70 +++++ .../test-coverage-with-artifact/action.yml | 44 +++ .../workflows/check-data-format-changes.yml | 13 +- .../workflows/test-and-upload-coverage.yml | 265 ----------------- .github/workflows/test-coverage.yml | 280 ++++++++++++++++++ README.md | 2 +- 7 files changed, 433 insertions(+), 277 deletions(-) create mode 100644 .github/composites/install-sourcehub/action.yml create mode 100644 .github/composites/setup-defradb/action.yml create mode 100644 .github/composites/test-coverage-with-artifact/action.yml delete mode 100644 .github/workflows/test-and-upload-coverage.yml create mode 100644 .github/workflows/test-coverage.yml diff --git a/.github/composites/install-sourcehub/action.yml b/.github/composites/install-sourcehub/action.yml new file mode 100644 index 0000000000..78c3235d14 --- /dev/null +++ b/.github/composites/install-sourcehub/action.yml @@ -0,0 +1,36 @@ +# Copyright 2024 Democratized Data Foundation +# +# Use of this software is governed by the Business Source License +# included in the file licenses/BSL.txt. +# +# As of the Change Date specified in that file, in accordance with +# the Business Source License, use of this software will be governed +# by the Apache License, Version 2.0, included in the file +# licenses/APL.txt. + +name: 'Install SourceHub' + +description: 'Composite action to install sourcehub' + +inputs: + ref: + description: 'The branch, tag or SHA to install' + required: true + default: 'coverage_default' + +runs: + # This is a composite action, setting this is required. + using: "composite" + + steps: + - name: Checkout sourcehub code into the directory + uses: actions/checkout@v4 + with: + repository: sourcenetwork/sourcehub + path: _sourceHub + ref: ${{ inputs.ref }} + + - name: Install SourceHub CLI + working-directory: _sourceHub + shell: bash # It's required for run step to specify shell in a composite action. + run: make install diff --git a/.github/composites/setup-defradb/action.yml b/.github/composites/setup-defradb/action.yml new file mode 100644 index 0000000000..23e9c831f4 --- /dev/null +++ b/.github/composites/setup-defradb/action.yml @@ -0,0 +1,70 @@ +# Copyright 2024 Democratized Data Foundation +# +# Use of this software is governed by the Business Source License +# included in the file licenses/BSL.txt. +# +# As of the Change Date specified in that file, in accordance with +# the Business Source License, use of this software will be governed +# by the Apache License, Version 2.0, included in the file +# licenses/APL.txt. + +name: 'Setup DefraDB Composite' + +description: 'Composite action to setup defradb' + +runs: + # This is a composite action, setting this is required. + using: "composite" + + steps: + - name: Setup Go environment explicitly + uses: actions/setup-go@v5 + with: + go-version-file: 'go.mod' + check-latest: true + cache: false + + - name: Set cache paths + id: cache-paths + shell: bash # It's required for run step to specify shell in a composite action. + run: | + echo "GO_CACHE=$(go env GOCACHE)" >> "${GITHUB_OUTPUT}" + echo "GO_MODCACHE=$(go env GOMODCACHE)" >> "${GITHUB_OUTPUT}" + echo "CARGO_CACHE=~/.cargo" >> "${GITHUB_OUTPUT}" + + - name: Go cache/restore + uses: actions/cache@v4 + with: + key: ${{ runner.os }}-go-${{ hashFiles('**/go.mod') }} + path: | + ${{ steps.cache-paths.outputs.GO_CACHE }} + ${{ steps.cache-paths.outputs.GO_MODCACHE }} + + - name: Cargo cache/restore + # A very cool post: https://blog.arriven.wtf/posts/rust-ci-cache + uses: actions/cache@v4 + with: + key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.toml') }} + # Here are some directories we shouldn't forget about: + # ~/.cargo/.* + # ~/.cargo/bin/ + # ~/.cargo/git/db/ + # ~/.cargo/registry/cache/ + # ~/.cargo/registry/index/ + # **/target/*/*.d + # **/target/*/*.rlib + # **/target/*/.fingerprint + # **/target/*/build + # **/target/*/deps + path: | + ${{ steps.cache-paths.outputs.CARGO_CACHE }} + **/target/ + + - name: Restore modified time + uses: chetan/git-restore-mtime-action@v2 + + - name: Build dependencies + shell: bash # It's required for run step to specify shell in a composite action. + run: | + make deps:modules + make deps:test diff --git a/.github/composites/test-coverage-with-artifact/action.yml b/.github/composites/test-coverage-with-artifact/action.yml new file mode 100644 index 0000000000..9d8983a005 --- /dev/null +++ b/.github/composites/test-coverage-with-artifact/action.yml @@ -0,0 +1,44 @@ +# Copyright 2024 Democratized Data Foundation +# +# Use of this software is governed by the Business Source License +# included in the file licenses/BSL.txt. +# +# As of the Change Date specified in that file, in accordance with +# the Business Source License, use of this software will be governed +# by the Apache License, Version 2.0, included in the file +# licenses/APL.txt. + +name: 'Test Coverage And Save Artifact' + +description: 'Composite action to run the test with coverage and save the report as artifact' + +inputs: + coverage-artifact-name: + description: 'Name of the artifact that will save coverage report' + required: true + default: 'coverage_default' + + coverage-path: + description: 'Path to coverage file' + required: false + default: 'coverage.txt' + +runs: + # This is a composite action, setting this is required. + using: "composite" + + steps: + - name: Run integration tests + shell: bash # It's required for run step to specify shell in a composite action. + run: make test:coverage + + - name: Upload coverage artifact + uses: actions/upload-artifact@v4 + with: + # Make sure the name is always unique per job as artifacts are now immutable. + # Note Issue: https://github.com/actions/upload-artifact/issues/478 + # Solve: https://github.com/actions/upload-artifact/issues/478#issuecomment-1885470013 + name: ${{ inputs.coverage-artifact-name }} + path: ${{ inputs.coverage-path }} + if-no-files-found: error + retention-days: 7 diff --git a/.github/workflows/check-data-format-changes.yml b/.github/workflows/check-data-format-changes.yml index b9b2406c89..fa2318204c 100644 --- a/.github/workflows/check-data-format-changes.yml +++ b/.github/workflows/check-data-format-changes.yml @@ -34,17 +34,8 @@ jobs: - name: Checkout code into the directory uses: actions/checkout@v4 - - name: Setup Go environment explicitly - uses: actions/setup-go@v5 - with: - go-version-file: 'go.mod' - check-latest: true - cache: false - - - name: Build dependencies - run: | - make deps:modules - make deps:test + - name: Setup defradb + uses: ./.github/composites/setup-defradb - name: Run data format change detection tests run: make test:changes diff --git a/.github/workflows/test-and-upload-coverage.yml b/.github/workflows/test-and-upload-coverage.yml deleted file mode 100644 index d1fca2a943..0000000000 --- a/.github/workflows/test-and-upload-coverage.yml +++ /dev/null @@ -1,265 +0,0 @@ -# Copyright 2022 Democratized Data Foundation -# -# Use of this software is governed by the Business Source License -# included in the file licenses/BSL.txt. -# -# As of the Change Date specified in that file, in accordance with -# the Business Source License, use of this software will be governed -# by the Apache License, Version 2.0, included in the file -# licenses/APL.txt. - -name: Test And Upload Coverage Workflow - -on: - pull_request: - branches: - - master - - develop - - push: - tags: - - 'v[0-9]+.[0-9]+.[0-9]+' - branches: - - master - - develop - -jobs: - run-tests: - name: Run tests matrix job - - strategy: - fail-fast: false - matrix: - os: [ubuntu-latest] - client-type: [go, http, cli] - database-type: [badger-file, badger-memory] - mutation-type: [gql, collection-named, collection-save] - lens-type: [wasm-time] - acp-type: [local] - database-encryption: [false] - view-type: [cacheless] - include: - - os: ubuntu-latest - client-type: go - database-type: badger-memory - mutation-type: collection-save - lens-type: wasm-time - acp-type: local - database-encryption: true - view-type: cacheless - - os: ubuntu-latest - client-type: go - database-type: badger-memory - mutation-type: collection-save - lens-type: wazero - acp-type: local - database-encryption: false - view-type: cacheless - - os: ubuntu-latest - client-type: go - database-type: badger-memory - mutation-type: collection-save - lens-type: wasmer - acp-type: local - database-encryption: false - view-type: cacheless - - os: ubuntu-latest - client-type: go - database-type: badger-memory - mutation-type: collection-save - lens-type: wasm-time - acp-type: source-hub - database-encryption: false - view-type: cacheless - - os: ubuntu-latest - client-type: http - database-type: badger-memory - mutation-type: collection-save - lens-type: wasm-time - acp-type: source-hub - database-encryption: false - view-type: cacheless - - os: ubuntu-latest - client-type: cli - database-type: badger-memory - mutation-type: collection-save - lens-type: wasm-time - acp-type: source-hub - database-encryption: false - view-type: cacheless - - os: ubuntu-latest - client-type: go - database-type: badger-memory - mutation-type: collection-save - lens-type: wasm-time - acp-type: local - database-encryption: false - view-type: materialized - - os: macos-latest - client-type: go - database-type: badger-memory - mutation-type: collection-save - lens-type: wasm-time - acp-type: local - database-encryption: false - view-type: cacheless -## TODO: https://github.com/sourcenetwork/defradb/issues/2080 -## Uncomment the lines below to Re-enable the windows build once this todo is resolved. -## - os: windows-latest -## client-type: go -## database-type: badger-memory -## mutation-type: collection-save -## lens-type: wasm-time -## acp-type: local -## database-encryption: false -## view-type: cacheless - - runs-on: ${{ matrix.os }} - - # We run all runners via the bash shell to provide us with a consistent set of env variables and commands - defaults: - run: - shell: bash - - env: - CGO_ENABLED: 1 - DEFRA_CLIENT_GO: ${{ matrix.client-type == 'go' }} - DEFRA_CLIENT_HTTP: ${{ matrix.client-type == 'http' }} - DEFRA_CLIENT_CLI: ${{ matrix.client-type == 'cli' }} - DEFRA_BADGER_MEMORY: ${{ matrix.database-type == 'badger-memory' }} - DEFRA_BADGER_FILE: ${{ matrix.database-type == 'badger-file' }} - DEFRA_BADGER_ENCRYPTION: ${{ matrix.database-encryption }} - DEFRA_MUTATION_TYPE: ${{ matrix.mutation-type }} - DEFRA_LENS_TYPE: ${{ matrix.lens-type }} - DEFRA_ACP_TYPE: ${{ matrix.acp-type }} - DEFRA_VIEW_TYPE: ${{ matrix.view-type }} - - steps: - - name: Checkout code into the directory - uses: actions/checkout@v4 - - - name: Setup Go environment explicitly - uses: actions/setup-go@v5 - with: - go-version-file: 'go.mod' - check-latest: true - cache: false - - - name: Set cache paths - id: cache-paths - shell: bash - run: | - echo "GO_CACHE=$(go env GOCACHE)" >> "${GITHUB_OUTPUT}" - echo "GO_MODCACHE=$(go env GOMODCACHE)" >> "${GITHUB_OUTPUT}" - echo "CARGO_CACHE=~/.cargo" >> "${GITHUB_OUTPUT}" - - - name: Go cache/restore - uses: actions/cache@v4 - with: - key: ${{ runner.os }}-go-${{ hashFiles('**/go.mod') }} - path: | - ${{ steps.cache-paths.outputs.GO_CACHE }} - ${{ steps.cache-paths.outputs.GO_MODCACHE }} - - - name: Cargo cache/restore - # A very cool post: https://blog.arriven.wtf/posts/rust-ci-cache - uses: actions/cache@v4 - with: - key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.toml') }} - # Here are some directories we shouldn't forget about: - # ~/.cargo/.* - # ~/.cargo/bin/ - # ~/.cargo/git/db/ - # ~/.cargo/registry/cache/ - # ~/.cargo/registry/index/ - # **/target/*/*.d - # **/target/*/*.rlib - # **/target/*/.fingerprint - # **/target/*/build - # **/target/*/deps - path: | - ${{ steps.cache-paths.outputs.CARGO_CACHE }} - **/target/ - - - name: Restore modified time - uses: chetan/git-restore-mtime-action@v2 - - - name: Build dependencies - run: | - make deps:modules - make deps:test - - # We have to checkout the source-hub repo and install it ourselves because it - # contains replace commands in its go.mod file. - - name: Checkout sourcehub code into the directory - if: ${{ matrix.acp-type == 'source-hub' }} - uses: actions/checkout@v4 - with: - repository: sourcenetwork/sourcehub - path: _sourceHub - # Lock the sourcehub version until the dev branch is stable - # remove this when closed https://github.com/sourcenetwork/defradb/issues/2865 - ref: c232133c35c96924509a4d955a7b450eb3624a15 - - - name: Install SourceHub CLI - if: ${{ matrix.acp-type == 'source-hub' }} - working-directory: _sourceHub - run: make install - - - name: Run integration tests - run: make test:coverage - - - name: Upload coverage artifact - uses: actions/upload-artifact@v4 - with: - # Make sure the name is always unique per job as artifacts are now immutable. - # Note Issue: https://github.com/actions/upload-artifact/issues/478 - # Solve: https://github.com/actions/upload-artifact/issues/478#issuecomment-1885470013 - name: "coverage\ - _${{ matrix.os }}\ - _${{ matrix.client-type }}\ - _${{ matrix.database-type }}\ - _${{ matrix.mutation-type }}\ - _${{ matrix.lens-type }}\ - _${{ matrix.acp-type }}\ - _${{ matrix.view-type }}\ - _${{ matrix.database-encryption }}\ - " - path: coverage.txt - if-no-files-found: error - retention-days: 7 - - upload-coverage: - name: Upload test code coverage job - - needs: run-tests - - # Important to know: - # - We didn't use `if: always()` here, so this job doesn't run if we manually canceled. - # - `if: success()` is always implied unless `always()` or `failure()` is specified. - if: success() || failure() - - runs-on: ubuntu-latest - - steps: - - name: Checkout code into the directory - uses: actions/checkout@v4 - - - name: Download coverage reports - uses: actions/download-artifact@v4 - with: - pattern: coverage_* - # Note: https://github.com/actions/download-artifact/blob/main/docs/MIGRATION.md - merge-multiple: false - path: coverage_reports - - - name: Upload coverage to Codecov - uses: codecov/codecov-action@v4 - with: - token: ${{ secrets.CODECOV_TOKEN }} - name: defradb-codecov - files: coverage_reports/**/*.txt - flags: all-tests - os: 'linux' - fail_ci_if_error: true - verbose: true diff --git a/.github/workflows/test-coverage.yml b/.github/workflows/test-coverage.yml new file mode 100644 index 0000000000..9b8be23ef2 --- /dev/null +++ b/.github/workflows/test-coverage.yml @@ -0,0 +1,280 @@ +# Copyright 2024 Democratized Data Foundation +# +# Use of this software is governed by the Business Source License +# included in the file licenses/BSL.txt. +# +# As of the Change Date specified in that file, in accordance with +# the Business Source License, use of this software will be governed +# by the Apache License, Version 2.0, included in the file +# licenses/APL.txt. + +name: Test Coverage Workflow + +on: + pull_request: + branches: + - master + - develop + + push: + tags: + - 'v[0-9]+.[0-9]+.[0-9]+' + branches: + - master + - develop + + +# Default environment configuration settings. +env: + CGO_ENABLED: 1 + + DEFRA_CLIENT_GO: true + DEFRA_CLIENT_HTTP: false + DEFRA_CLIENT_CLI: false + + DEFRA_BADGER_MEMORY: true + DEFRA_BADGER_FILE: false + + DEFRA_BADGER_ENCRYPTION: false + + DEFRA_MUTATION_TYPE: collection-save + DEFRA_LENS_TYPE: wasm-time + DEFRA_ACP_TYPE: local + DEFRA_VIEW_TYPE: cacheless + + +# We run all runners via the bash shell to provide us with a consistent set of env variables and commands +defaults: + run: + shell: bash + + +jobs: + # The basic matrix job tests the combination of client, database and mutation types using + # the default config settings for other options, all running on linux. + test-basic: + name: Test job + + strategy: + fail-fast: false + matrix: + client-type: [go, http, cli] + database-type: [file, memory] + mutation-type: [gql, collection-named, collection-save] + + runs-on: ubuntu-latest + + # Overwrite the defaults based on the basic matrix + env: + DEFRA_CLIENT_GO: ${{ matrix.client-type == 'go' }} + DEFRA_CLIENT_HTTP: ${{ matrix.client-type == 'http' }} + DEFRA_CLIENT_CLI: ${{ matrix.client-type == 'cli' }} + DEFRA_BADGER_MEMORY: ${{ matrix.database-type == 'memory' }} + DEFRA_BADGER_FILE: ${{ matrix.database-type == 'file' }} + DEFRA_MUTATION_TYPE: ${{ matrix.mutation-type }} + + steps: + - name: Checkout code into the directory + uses: actions/checkout@v4 + + - name: Setup defradb + uses: ./.github/composites/setup-defradb + + - name: Test coverage & save coverage report in an artifact + uses: ./.github/composites/test-coverage-with-artifact + with: + coverage-artifact-name: "coverage_basic\ + _${{ matrix.client-type }}\ + _${{ matrix.database-type }}\ + _${{ matrix.mutation-type }}\ + " + coverage-path: coverage.txt + + + # This job runs the tests on other operating systems using default configurations. + test-os: + name: Test os job + + strategy: + fail-fast: false + matrix: + os: + - macos-latest + # TODO: https://github.com/sourcenetwork/defradb/issues/2080 + # Uncomment the line below to re-enable the windows build once this todo is resolved. + # - windows-latest + + runs-on: ${{ matrix.os }} + + steps: + - name: Checkout code into the directory + uses: actions/checkout@v4 + + - name: Setup defradb + uses: ./.github/composites/setup-defradb + + - name: Test coverage & save coverage report in an artifact + uses: ./.github/composites/test-coverage-with-artifact + with: + coverage-artifact-name: "coverage_os_${{ matrix.os }}" + coverage-path: coverage.txt + + + # The acp matrix job tests the combinations of source-hub acp and client types on linux. + test-acp: + name: Test acp job + + strategy: + fail-fast: false + matrix: + client-type: [go, http, cli] + acp-type: [source-hub] + + runs-on: ubuntu-latest + + env: + DEFRA_ACP_TYPE: ${{ matrix.acp-type }} + DEFRA_CLIENT_GO: ${{ matrix.client-type == 'go' }} + DEFRA_CLIENT_HTTP: ${{ matrix.client-type == 'http' }} + DEFRA_CLIENT_CLI: ${{ matrix.client-type == 'cli' }} + + steps: + - name: Checkout code into the directory + uses: actions/checkout@v4 + + - name: Setup defradb + uses: ./.github/composites/setup-defradb + + # We have to install it ourselves because it contains replace commands in its go.mod file. + - name: Install sourcehub + uses: ./.github/composites/install-sourcehub + with: + # Lock the sourcehub version until the dev branch is stable + # remove this when closed https://github.com/sourcenetwork/defradb/issues/2865 + ref: c232133c35c96924509a4d955a7b450eb3624a15 + + - name: Test coverage & save coverage report in an artifact + uses: ./.github/composites/test-coverage-with-artifact + with: + coverage-artifact-name: "coverage_acp\ + _${{ matrix.acp-type }}\ + _${{ matrix.client-type }}\ + " + coverage-path: coverage.txt + + + # The lens matrix job tests the wazero and wasmer lens on linux. + test-lens: + name: Test lens job + + strategy: + fail-fast: false + matrix: + lens-type: [wazero, wasmer] + + runs-on: ubuntu-latest + + env: + DEFRA_LENS_TYPE: ${{ matrix.lens-type }} + + steps: + - name: Checkout code into the directory + uses: actions/checkout@v4 + + - name: Setup defradb + uses: ./.github/composites/setup-defradb + + - name: Test coverage & save coverage report in an artifact + uses: ./.github/composites/test-coverage-with-artifact + with: + coverage-artifact-name: "coverage_lens_${{ matrix.lens-type }}" + coverage-path: coverage.txt + + + # This job runs the materialized view tests using default configuration, on linux. + test-view: + name: Test view job + + runs-on: ubuntu-latest + + env: + DEFRA_VIEW_TYPE: materialized + + steps: + - name: Checkout code into the directory + uses: actions/checkout@v4 + + - name: Setup defradb + uses: ./.github/composites/setup-defradb + + - name: Test coverage & save coverage report in an artifact + uses: ./.github/composites/test-coverage-with-artifact + with: + coverage-artifact-name: "coverage_view_materialized" + coverage-path: coverage.txt + + + # This job runs the database with encryption tests using default configuration, on linux. + test-encryption: + name: Test encryption job + + runs-on: ubuntu-latest + + env: + DEFRA_BADGER_ENCRYPTION: true + + steps: + - name: Checkout code into the directory + uses: actions/checkout@v4 + + - name: Setup defradb + uses: ./.github/composites/setup-defradb + + - name: Test coverage & save coverage report in an artifact + uses: ./.github/composites/test-coverage-with-artifact + with: + coverage-artifact-name: "coverage_encryption" + coverage-path: coverage.txt + + + ## This job gathers all the coverage reports and uploads them to code-cov + upload-coverage: + name: Upload test code coverage job + + needs: + - test-basic # 18 test(s) + - test-os # 1 test(s) [excluding windows] + - test-acp # 3 test(s) + - test-lens # 2 test(s) + - test-view # 1 test(s) + - test-encryption # 1 test(s) + + # Important to know: + # - We didn't use `if: always()` here, so this job doesn't run if we manually canceled. + # - `if: success()` is always implied unless `always()` or `failure()` is specified. + if: success() || failure() + + runs-on: ubuntu-latest + + steps: + - name: Checkout code into the directory + uses: actions/checkout@v4 + + - name: Download coverage reports + uses: actions/download-artifact@v4 + with: + pattern: coverage_* + # Note: https://github.com/actions/download-artifact/blob/main/docs/MIGRATION.md + merge-multiple: false + path: coverage_reports + + - name: Upload coverage to Codecov + uses: codecov/codecov-action@v4 + with: + token: ${{ secrets.CODECOV_TOKEN }} + name: defradb-codecov + files: coverage_reports/**/*.txt + flags: all-tests + os: 'linux' + fail_ci_if_error: true + verbose: true diff --git a/README.md b/README.md index f61ec5d34d..57c350d8e1 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -![Tests Workflow](https://github.com/sourcenetwork/defradb/actions/workflows/test-and-upload-coverage.yml/badge.svg) +![Test Coverage Workflow](https://github.com/sourcenetwork/defradb/actions/workflows/test-coverage.yml/badge.svg) [![Go Report Card](https://goreportcard.com/badge/github.com/sourcenetwork/defradb)](https://goreportcard.com/report/github.com/sourcenetwork/defradb) [![codecov](https://codecov.io/gh/sourcenetwork/defradb/branch/develop/graph/badge.svg?token=RHAORX13PA)](https://codecov.io/gh/sourcenetwork/defradb) [![Discord](https://img.shields.io/discord/427944769851752448.svg?color=768AD4&label=discord&logo=https%3A%2F%2Fdiscordapp.com%2Fassets%2F8c9701b98ad4372b58f13fd9f65f966e.svg)](https://discord.gg/w7jYQVJ) From 5b58c19b4b1bac988cd54d827f11b304ca6db331 Mon Sep 17 00:00:00 2001 From: Fred Carle Date: Wed, 18 Sep 2024 18:01:40 -0400 Subject: [PATCH 26/71] chore: Make keyring non-interactive (#3026) ## Relevant issue(s) Resolves #2995 ## Description This PR makes the keyring interactions non-interactive by requiring the keyring password to be set as an environment variable secret. It also adds support for that secret to be stored in a `.env` file in the working directory or in a file at a path defined by the `--secret-file` flag. Making the keyring non-interactive is necessary to support automated deployments. --- .gitignore | 3 ++ README.md | 6 ++- cli/config.go | 10 +++++ cli/errors.go | 13 +------ cli/keyring_export.go | 4 ++ cli/keyring_export_test.go | 7 ++-- cli/keyring_generate.go | 8 +++- cli/keyring_generate_test.go | 25 ++++++------ cli/keyring_import.go | 4 ++ cli/keyring_import_test.go | 7 ++-- cli/root.go | 4 ++ cli/start.go | 36 +++++++++++++++--- cli/utils.go | 19 +++------- docs/config.md | 8 ++++ docs/website/references/cli/defradb.md | 1 + docs/website/references/cli/defradb_client.md | 1 + .../references/cli/defradb_client_acp.md | 1 + .../cli/defradb_client_acp_policy.md | 1 + .../cli/defradb_client_acp_policy_add.md | 1 + .../references/cli/defradb_client_backup.md | 1 + .../cli/defradb_client_backup_export.md | 1 + .../cli/defradb_client_backup_import.md | 1 + .../cli/defradb_client_collection.md | 1 + .../cli/defradb_client_collection_create.md | 1 + .../cli/defradb_client_collection_delete.md | 1 + .../cli/defradb_client_collection_describe.md | 1 + .../cli/defradb_client_collection_docIDs.md | 1 + .../cli/defradb_client_collection_get.md | 1 + .../cli/defradb_client_collection_patch.md | 1 + .../cli/defradb_client_collection_update.md | 1 + .../references/cli/defradb_client_dump.md | 1 + .../references/cli/defradb_client_index.md | 1 + .../cli/defradb_client_index_create.md | 1 + .../cli/defradb_client_index_drop.md | 1 + .../cli/defradb_client_index_list.md | 1 + .../references/cli/defradb_client_p2p.md | 1 + .../cli/defradb_client_p2p_collection.md | 1 + .../cli/defradb_client_p2p_collection_add.md | 1 + .../defradb_client_p2p_collection_getall.md | 1 + .../defradb_client_p2p_collection_remove.md | 1 + .../references/cli/defradb_client_p2p_info.md | 1 + .../cli/defradb_client_p2p_replicator.md | 1 + .../defradb_client_p2p_replicator_delete.md | 1 + .../defradb_client_p2p_replicator_getall.md | 1 + .../cli/defradb_client_p2p_replicator_set.md | 1 + .../references/cli/defradb_client_purge.md | 1 + .../references/cli/defradb_client_query.md | 1 + .../references/cli/defradb_client_schema.md | 1 + .../cli/defradb_client_schema_add.md | 1 + .../cli/defradb_client_schema_describe.md | 1 + .../cli/defradb_client_schema_migration.md | 1 + .../defradb_client_schema_migration_down.md | 1 + .../defradb_client_schema_migration_reload.md | 1 + ...db_client_schema_migration_set-registry.md | 1 + .../defradb_client_schema_migration_set.md | 1 + .../cli/defradb_client_schema_migration_up.md | 1 + .../cli/defradb_client_schema_patch.md | 1 + .../cli/defradb_client_schema_set-active.md | 1 + .../references/cli/defradb_client_tx.md | 1 + .../cli/defradb_client_tx_commit.md | 1 + .../cli/defradb_client_tx_create.md | 1 + .../cli/defradb_client_tx_discard.md | 1 + .../references/cli/defradb_client_view.md | 1 + .../references/cli/defradb_client_view_add.md | 1 + .../cli/defradb_client_view_refresh.md | 1 + .../references/cli/defradb_identity.md | 1 + .../references/cli/defradb_identity_new.md | 1 + .../website/references/cli/defradb_keyring.md | 1 + .../references/cli/defradb_keyring_export.md | 5 +++ .../cli/defradb_keyring_generate.md | 13 +++++-- .../references/cli/defradb_keyring_import.md | 5 +++ .../references/cli/defradb_server-dump.md | 1 + docs/website/references/cli/defradb_start.md | 2 + .../website/references/cli/defradb_version.md | 1 + go.mod | 3 +- go.sum | 2 + keyring/file.go | 38 +++---------------- keyring/file_test.go | 6 +-- tests/integration/acp.go | 4 +- 79 files changed, 186 insertions(+), 102 deletions(-) diff --git a/.gitignore b/.gitignore index 40eac1780c..fdbdc6e5f3 100644 --- a/.gitignore +++ b/.gitignore @@ -58,3 +58,6 @@ crash.log # Ignore .zip files, such as Lambda Function code slugs. **.zip + +# Ignore .env files containing sensitive information. +.env diff --git a/README.md b/README.md index 57c350d8e1..9d94ce30ac 100644 --- a/README.md +++ b/README.md @@ -68,7 +68,11 @@ The following keys are loaded from the keyring on start: - `peer-key` Ed25519 private key (required) - `encryption-key` AES-128, AES-192, or AES-256 key (optional) -To randomly generate the required keys, run the following command: +A secret to unlock the keyring is required on start and must be provided via the `DEFRADB_KEYRING_SECRET` environment variable. If a `.env` file is available in the working directory, the secret can be stored there or via a file at a path defined by the `--keyring-secret-file` flag. + +The keys will be randomly generated on the inital start of the node if they are not found. + +Alternatively, to randomly generate the required keys, run the following command: ``` defradb keyring generate diff --git a/cli/config.go b/cli/config.go index 745d43e617..f17b3e5ead 100644 --- a/cli/config.go +++ b/cli/config.go @@ -16,6 +16,7 @@ import ( "path/filepath" "strings" + "github.com/joho/godotenv" "github.com/sourcenetwork/corelog" "github.com/spf13/pflag" "github.com/spf13/viper" @@ -52,6 +53,7 @@ var configFlags = map[string]string{ "url": "api.address", "max-txn-retries": "datastore.maxtxnretries", "store": "datastore.store", + "no-encryption": "datastore.noencryption", "valuelogfilesize": "datastore.badger.valuelogfilesize", "peers": "net.peers", "p2paddr": "net.p2paddresses", @@ -65,6 +67,7 @@ var configFlags = map[string]string{ "no-keyring": "keyring.disabled", "source-hub-address": "acp.sourceHub.address", "development": "development", + "secret-file": "secretfile", } // configDefaults contains default values for config entries. @@ -92,6 +95,7 @@ var configDefaults = map[string]any{ "log.output": "stderr", "log.source": false, "log.stacktrace": false, + "secretfile": ".env", } // defaultConfig returns a new config with default values. @@ -159,6 +163,12 @@ func loadConfig(rootdir string, flags *pflag.FlagSet) (*viper.Viper, error) { } } + // load environment variables from .env file if one exists + err = godotenv.Load(cfg.GetString("secretfile")) + if err != nil && !errors.Is(err, os.ErrNotExist) { + return nil, err + } + // set logging config corelog.SetConfig(corelog.Config{ Level: cfg.GetString("log.level"), diff --git a/cli/errors.go b/cli/errors.go index c22957ff51..f084ed21b0 100644 --- a/cli/errors.go +++ b/cli/errors.go @@ -16,14 +16,6 @@ import ( "github.com/sourcenetwork/defradb/errors" ) -const errKeyringHelp = `%w - -Did you forget to initialize the keyring? - -Use the following command to generate the required keys: - defradb keyring generate -` - const ( errInvalidLensConfig string = "invalid lens configuration" errSchemaVersionNotOfSchema string = "the given schema version is from a different schema" @@ -41,6 +33,7 @@ var ( ErrViewAddMissingArgs = errors.New("please provide a base query and output SDL for this view") ErrPolicyFileArgCanNotBeEmpty = errors.New("policy file argument can not be empty") ErrPurgeForceFlagRequired = errors.New("run this command again with --force if you really want to purge all data") + ErrMissingKeyringSecret = errors.New("missing keyring secret") ) func NewErrRequiredFlagEmpty(longName string, shortName string) error { @@ -62,7 +55,3 @@ func NewErrSchemaVersionNotOfSchema(schemaRoot string, schemaVersionID string) e errors.NewKV("SchemaVersionID", schemaVersionID), ) } - -func NewErrKeyringHelp(inner error) error { - return fmt.Errorf(errKeyringHelp, inner) -} diff --git a/cli/keyring_export.go b/cli/keyring_export.go index 775672fc8a..a13f1d0f0f 100644 --- a/cli/keyring_export.go +++ b/cli/keyring_export.go @@ -21,6 +21,10 @@ func MakeKeyringExportCommand() *cobra.Command { Long: `Export a private key. Prints the hexadecimal representation of a private key. +The DEFRA_KEYRING_SECRET environment variable must be set to unlock the keyring. +This can also be done with a .env file in the working directory or at a path +defined with the --keyring-secret-file flag. + Example: defradb keyring export encryption-key`, Args: cobra.ExactArgs(1), diff --git a/cli/keyring_export_test.go b/cli/keyring_export_test.go index 8631ff70ab..15d1ebd5bd 100644 --- a/cli/keyring_export_test.go +++ b/cli/keyring_export_test.go @@ -13,21 +13,20 @@ package cli import ( "bytes" "encoding/hex" + "os" "strings" "testing" "github.com/sourcenetwork/defradb/crypto" - "github.com/spf13/cobra" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) func TestKeyringExport(t *testing.T) { rootdir := t.TempDir() - readPassword = func(_ *cobra.Command, _ string) ([]byte, error) { - return []byte("secret"), nil - } + err := os.Setenv("DEFRA_KEYRING_SECRET", "password") + require.NoError(t, err) keyBytes, err := crypto.GenerateAES256() require.NoError(t, err) diff --git a/cli/keyring_generate.go b/cli/keyring_generate.go index 34209671a5..d8864ab811 100644 --- a/cli/keyring_generate.go +++ b/cli/keyring_generate.go @@ -26,13 +26,17 @@ func MakeKeyringGenerateCommand() *cobra.Command { Randomly generate and store private keys in the keyring. By default peer and encryption keys will be generated. +The DEFRA_KEYRING_SECRET environment variable must be set to unlock the keyring. +This can also be done with a .env file in the working directory or at a path +defined with the --keyring-secret-file flag. + WARNING: This will overwrite existing keys in the keyring. Example: defradb keyring generate Example: with no encryption key - defradb keyring generate --no-encryption-key + defradb keyring generate --no-encryption Example: with no peer key defradb keyring generate --no-peer-key @@ -69,7 +73,7 @@ Example: with system keyring return nil }, } - cmd.Flags().BoolVar(&noEncryptionKey, "no-encryption-key", false, + cmd.Flags().BoolVar(&noEncryptionKey, "no-encryption", false, "Skip generating an encryption key. Encryption at rest will be disabled") cmd.Flags().BoolVar(&noPeerKey, "no-peer-key", false, "Skip generating a peer key.") diff --git a/cli/keyring_generate_test.go b/cli/keyring_generate_test.go index b29446bd15..82b14ce055 100644 --- a/cli/keyring_generate_test.go +++ b/cli/keyring_generate_test.go @@ -11,24 +11,23 @@ package cli import ( + "os" "path/filepath" "testing" - "github.com/spf13/cobra" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) func TestKeyringGenerate(t *testing.T) { rootdir := t.TempDir() - readPassword = func(_ *cobra.Command, _ string) ([]byte, error) { - return []byte("secret"), nil - } + err := os.Setenv("DEFRA_KEYRING_SECRET", "password") + require.NoError(t, err) cmd := NewDefraCommand() cmd.SetArgs([]string{"keyring", "generate", "--rootdir", rootdir}) - err := cmd.Execute() + err = cmd.Execute() require.NoError(t, err) assert.FileExists(t, filepath.Join(rootdir, "keys", encryptionKeyName)) @@ -37,14 +36,13 @@ func TestKeyringGenerate(t *testing.T) { func TestKeyringGenerateNoEncryptionKey(t *testing.T) { rootdir := t.TempDir() - readPassword = func(_ *cobra.Command, _ string) ([]byte, error) { - return []byte("secret"), nil - } + err := os.Setenv("DEFRA_KEYRING_SECRET", "password") + require.NoError(t, err) cmd := NewDefraCommand() - cmd.SetArgs([]string{"keyring", "generate", "--no-encryption-key", "--rootdir", rootdir}) + cmd.SetArgs([]string{"keyring", "generate", "--no-encryption", "--rootdir", rootdir}) - err := cmd.Execute() + err = cmd.Execute() require.NoError(t, err) assert.NoFileExists(t, filepath.Join(rootdir, "keys", encryptionKeyName)) @@ -53,14 +51,13 @@ func TestKeyringGenerateNoEncryptionKey(t *testing.T) { func TestKeyringGenerateNoPeerKey(t *testing.T) { rootdir := t.TempDir() - readPassword = func(_ *cobra.Command, _ string) ([]byte, error) { - return []byte("secret"), nil - } + err := os.Setenv("DEFRA_KEYRING_SECRET", "password") + require.NoError(t, err) cmd := NewDefraCommand() cmd.SetArgs([]string{"keyring", "generate", "--no-peer-key", "--rootdir", rootdir}) - err := cmd.Execute() + err = cmd.Execute() require.NoError(t, err) assert.FileExists(t, filepath.Join(rootdir, "keys", encryptionKeyName)) diff --git a/cli/keyring_import.go b/cli/keyring_import.go index 61f80f12a1..8f9d691193 100644 --- a/cli/keyring_import.go +++ b/cli/keyring_import.go @@ -23,6 +23,10 @@ func MakeKeyringImportCommand() *cobra.Command { Long: `Import a private key. Store an externally generated key in the keyring. +The DEFRA_KEYRING_SECRET environment variable must be set to unlock the keyring. +This can also be done with a .env file in the working directory or at a path +defined with the --keyring-secret-file flag. + Example: defradb keyring import encryption-key 0000000000000000`, Args: cobra.ExactArgs(2), diff --git a/cli/keyring_import_test.go b/cli/keyring_import_test.go index dac907e000..d0d51a2ce8 100644 --- a/cli/keyring_import_test.go +++ b/cli/keyring_import_test.go @@ -12,21 +12,20 @@ package cli import ( "encoding/hex" + "os" "path/filepath" "testing" "github.com/sourcenetwork/defradb/crypto" - "github.com/spf13/cobra" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) func TestKeyringImport(t *testing.T) { rootdir := t.TempDir() - readPassword = func(_ *cobra.Command, _ string) ([]byte, error) { - return []byte("secret"), nil - } + err := os.Setenv("DEFRA_KEYRING_SECRET", "password") + require.NoError(t, err) keyBytes, err := crypto.GenerateAES256() require.NoError(t, err) diff --git a/cli/root.go b/cli/root.go index 2a94202657..f9d5c811d6 100644 --- a/cli/root.go +++ b/cli/root.go @@ -102,5 +102,9 @@ Start a DefraDB node, interact with a local or remote node, and much more. cfg.GetString(configFlags["source-hub-address"]), "The SourceHub address authorized by the client to make SourceHub transactions on behalf of the actor", ) + cmd.PersistentFlags().String( + "secret-file", + cfg.GetString(configFlags["secret-file"]), + "Path to the file containing secrets") return cmd } diff --git a/cli/start.go b/cli/start.go index 2b5b4fb734..168e2a525c 100644 --- a/cli/start.go +++ b/cli/start.go @@ -18,6 +18,7 @@ import ( "github.com/sourcenetwork/immutable" "github.com/spf13/cobra" + "github.com/sourcenetwork/defradb/crypto" "github.com/sourcenetwork/defradb/errors" "github.com/sourcenetwork/defradb/event" "github.com/sourcenetwork/defradb/http" @@ -97,17 +98,38 @@ func MakeStartCommand() *cobra.Command { if !cfg.GetBool("keyring.disabled") { kr, err := openKeyring(cmd) if err != nil { - return NewErrKeyringHelp(err) + return err } - // load the required peer key + // load the required peer key or generate one if it doesn't exist peerKey, err := kr.Get(peerKeyName) - if err != nil { - return NewErrKeyringHelp(err) + if err != nil && errors.Is(err, keyring.ErrNotFound) { + peerKey, err = crypto.GenerateEd25519() + if err != nil { + return err + } + err = kr.Set(peerKeyName, peerKey) + if err != nil { + return err + } + log.Info("generated peer key") + } else if err != nil { + return err } opts = append(opts, net.WithPrivateKey(peerKey)) + // load the optional encryption key encryptionKey, err := kr.Get(encryptionKeyName) - if err != nil && !errors.Is(err, keyring.ErrNotFound) { + if err != nil && errors.Is(err, keyring.ErrNotFound) && !cfg.GetBool("datastore.noencryption") { + encryptionKey, err = crypto.GenerateAES256() + if err != nil { + return err + } + err = kr.Set(encryptionKeyName, encryptionKey) + if err != nil { + return err + } + log.Info("generated encryption key") + } else if err != nil && !errors.Is(err, keyring.ErrNotFound) { return err } opts = append(opts, node.WithBadgerEncryptionKey(encryptionKey)) @@ -224,5 +246,9 @@ func MakeStartCommand() *cobra.Command { cfg.GetBool(configFlags["development"]), "Enables a set of features that make development easier but should not be enabled in production", ) + cmd.Flags().Bool( + "no-encryption", + cfg.GetBool(configFlags["no-encryption"]), + "Skip generating an encryption key. Encryption at rest will be disabled. WARNING: This cannot be undone.") return cmd } diff --git a/cli/utils.go b/cli/utils.go index f0bd6a8098..845cea671b 100644 --- a/cli/utils.go +++ b/cli/utils.go @@ -16,14 +16,12 @@ import ( "encoding/json" "os" "path/filepath" - "syscall" "time" "github.com/decred/dcrd/dcrec/secp256k1/v4" "github.com/sourcenetwork/immutable" "github.com/spf13/cobra" "github.com/spf13/viper" - "golang.org/x/term" acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/client" @@ -58,14 +56,6 @@ const ( authTokenExpiration = time.Minute * 15 ) -// readPassword reads a user input password without echoing it to the terminal. -var readPassword = func(cmd *cobra.Command, msg string) ([]byte, error) { - cmd.Print(msg) - pass, err := term.ReadPassword(int(syscall.Stdin)) - cmd.Println("") - return pass, err -} - // mustGetContextDB returns the db for the current command context. // // If a db is not set in the current context this function panics. @@ -214,10 +204,11 @@ func openKeyring(cmd *cobra.Command) (keyring.Keyring, error) { if err := os.MkdirAll(path, 0755); err != nil { return nil, err } - prompt := keyring.PromptFunc(func(s string) ([]byte, error) { - return readPassword(cmd, s) - }) - return keyring.OpenFileKeyring(path, prompt) + secret := []byte(cfg.GetString("keyring.secret")) + if len(secret) == 0 { + return nil, ErrMissingKeyringSecret + } + return keyring.OpenFileKeyring(path, secret) } func writeJSON(cmd *cobra.Command, out any) error { diff --git a/docs/config.md b/docs/config.md index 0ac6e5dd52..b6df8e03a8 100644 --- a/docs/config.md +++ b/docs/config.md @@ -21,6 +21,10 @@ The number of retries to make in the event of a transaction conflict. Defaults t Currently this is only used within the P2P system and will not affect operations initiated by users. +## `datastore.noencryption` + +Skip generating an encryption key. Encryption at rest will be disabled. **WARNING**: This cannot be undone. + ## `datastore.badger.path` The path to the database data file(s). Defaults to `data`. @@ -156,3 +160,7 @@ transactions created by the node is stored. Required when using `acp.type`:`sour The SourceHub address of the actor that client-side actions should permit to make SourceHub actions on their behalf. This is a client-side only config param. It is required if the client wishes to make SourceHub ACP requests in order to create protected data. + +## `secretfile` + +Path to the file containing secrets. Defaults to `.env`. diff --git a/docs/website/references/cli/defradb.md b/docs/website/references/cli/defradb.md index 93dce2adef..9def574440 100644 --- a/docs/website/references/cli/defradb.md +++ b/docs/website/references/cli/defradb.md @@ -25,6 +25,7 @@ Start a DefraDB node, interact with a local or remote node, and much more. --no-keyring Disable the keyring and generate ephemeral keys --no-log-color Disable colored log output --rootdir string Directory for persistent data (default: $HOME/.defradb) + --secret-file string Path to the file containing secrets (default ".env") --source-hub-address string The SourceHub address authorized by the client to make SourceHub transactions on behalf of the actor --url string URL of HTTP endpoint to listen on or connect to (default "127.0.0.1:9181") ``` diff --git a/docs/website/references/cli/defradb_client.md b/docs/website/references/cli/defradb_client.md index 27f840d7ae..adbbb4eeaa 100644 --- a/docs/website/references/cli/defradb_client.md +++ b/docs/website/references/cli/defradb_client.md @@ -30,6 +30,7 @@ Execute queries, add schema types, obtain node info, etc. --no-keyring Disable the keyring and generate ephemeral keys --no-log-color Disable colored log output --rootdir string Directory for persistent data (default: $HOME/.defradb) + --secret-file string Path to the file containing secrets (default ".env") --source-hub-address string The SourceHub address authorized by the client to make SourceHub transactions on behalf of the actor --url string URL of HTTP endpoint to listen on or connect to (default "127.0.0.1:9181") ``` diff --git a/docs/website/references/cli/defradb_client_acp.md b/docs/website/references/cli/defradb_client_acp.md index d1dc39e36e..5a9c9aef80 100644 --- a/docs/website/references/cli/defradb_client_acp.md +++ b/docs/website/references/cli/defradb_client_acp.md @@ -32,6 +32,7 @@ Learn more about [ACP](/acp/README.md) --no-keyring Disable the keyring and generate ephemeral keys --no-log-color Disable colored log output --rootdir string Directory for persistent data (default: $HOME/.defradb) + --secret-file string Path to the file containing secrets (default ".env") --source-hub-address string The SourceHub address authorized by the client to make SourceHub transactions on behalf of the actor --tx uint Transaction ID --url string URL of HTTP endpoint to listen on or connect to (default "127.0.0.1:9181") diff --git a/docs/website/references/cli/defradb_client_acp_policy.md b/docs/website/references/cli/defradb_client_acp_policy.md index 720c072eae..c0c8d6ec12 100644 --- a/docs/website/references/cli/defradb_client_acp_policy.md +++ b/docs/website/references/cli/defradb_client_acp_policy.md @@ -28,6 +28,7 @@ Interact with the acp policy features of DefraDB instance --no-keyring Disable the keyring and generate ephemeral keys --no-log-color Disable colored log output --rootdir string Directory for persistent data (default: $HOME/.defradb) + --secret-file string Path to the file containing secrets (default ".env") --source-hub-address string The SourceHub address authorized by the client to make SourceHub transactions on behalf of the actor --tx uint Transaction ID --url string URL of HTTP endpoint to listen on or connect to (default "127.0.0.1:9181") diff --git a/docs/website/references/cli/defradb_client_acp_policy_add.md b/docs/website/references/cli/defradb_client_acp_policy_add.md index 596d7eb52f..bef375015d 100644 --- a/docs/website/references/cli/defradb_client_acp_policy_add.md +++ b/docs/website/references/cli/defradb_client_acp_policy_add.md @@ -79,6 +79,7 @@ defradb client acp policy add [-i --identity] [policy] [flags] --no-keyring Disable the keyring and generate ephemeral keys --no-log-color Disable colored log output --rootdir string Directory for persistent data (default: $HOME/.defradb) + --secret-file string Path to the file containing secrets (default ".env") --source-hub-address string The SourceHub address authorized by the client to make SourceHub transactions on behalf of the actor --tx uint Transaction ID --url string URL of HTTP endpoint to listen on or connect to (default "127.0.0.1:9181") diff --git a/docs/website/references/cli/defradb_client_backup.md b/docs/website/references/cli/defradb_client_backup.md index ba6028b287..cf7b2d058e 100644 --- a/docs/website/references/cli/defradb_client_backup.md +++ b/docs/website/references/cli/defradb_client_backup.md @@ -29,6 +29,7 @@ Currently only supports JSON format. --no-keyring Disable the keyring and generate ephemeral keys --no-log-color Disable colored log output --rootdir string Directory for persistent data (default: $HOME/.defradb) + --secret-file string Path to the file containing secrets (default ".env") --source-hub-address string The SourceHub address authorized by the client to make SourceHub transactions on behalf of the actor --tx uint Transaction ID --url string URL of HTTP endpoint to listen on or connect to (default "127.0.0.1:9181") diff --git a/docs/website/references/cli/defradb_client_backup_export.md b/docs/website/references/cli/defradb_client_backup_export.md index e48f848b27..fe0887f2f7 100644 --- a/docs/website/references/cli/defradb_client_backup_export.md +++ b/docs/website/references/cli/defradb_client_backup_export.md @@ -43,6 +43,7 @@ defradb client backup export [-c --collections | -p --pretty | -f --format] [flags] --no-keyring Disable the keyring and generate ephemeral keys --no-log-color Disable colored log output --rootdir string Directory for persistent data (default: $HOME/.defradb) + --secret-file string Path to the file containing secrets (default ".env") --source-hub-address string The SourceHub address authorized by the client to make SourceHub transactions on behalf of the actor --tx uint Transaction ID --url string URL of HTTP endpoint to listen on or connect to (default "127.0.0.1:9181") diff --git a/docs/website/references/cli/defradb_client_collection.md b/docs/website/references/cli/defradb_client_collection.md index ad6322d382..d4ea8c32a9 100644 --- a/docs/website/references/cli/defradb_client_collection.md +++ b/docs/website/references/cli/defradb_client_collection.md @@ -33,6 +33,7 @@ Create, read, update, and delete documents within a collection. --no-keyring Disable the keyring and generate ephemeral keys --no-log-color Disable colored log output --rootdir string Directory for persistent data (default: $HOME/.defradb) + --secret-file string Path to the file containing secrets (default ".env") --source-hub-address string The SourceHub address authorized by the client to make SourceHub transactions on behalf of the actor --url string URL of HTTP endpoint to listen on or connect to (default "127.0.0.1:9181") ``` diff --git a/docs/website/references/cli/defradb_client_collection_create.md b/docs/website/references/cli/defradb_client_collection_create.md index 92ec4a549b..8c8804140e 100644 --- a/docs/website/references/cli/defradb_client_collection_create.md +++ b/docs/website/references/cli/defradb_client_collection_create.md @@ -70,6 +70,7 @@ defradb client collection create [-i --identity] [-e --encrypt] [--encrypt-field --no-log-color Disable colored log output --rootdir string Directory for persistent data (default: $HOME/.defradb) --schema string Collection schema Root + --secret-file string Path to the file containing secrets (default ".env") --source-hub-address string The SourceHub address authorized by the client to make SourceHub transactions on behalf of the actor --tx uint Transaction ID --url string URL of HTTP endpoint to listen on or connect to (default "127.0.0.1:9181") diff --git a/docs/website/references/cli/defradb_client_collection_delete.md b/docs/website/references/cli/defradb_client_collection_delete.md index 68b98d13c5..d23c4804fd 100644 --- a/docs/website/references/cli/defradb_client_collection_delete.md +++ b/docs/website/references/cli/defradb_client_collection_delete.md @@ -48,6 +48,7 @@ defradb client collection delete [-i --identity] [--filter --docID [flags] --no-log-color Disable colored log output --rootdir string Directory for persistent data (default: $HOME/.defradb) --schema string Collection schema Root + --secret-file string Path to the file containing secrets (default ".env") --source-hub-address string The SourceHub address authorized by the client to make SourceHub transactions on behalf of the actor --tx uint Transaction ID --url string URL of HTTP endpoint to listen on or connect to (default "127.0.0.1:9181") diff --git a/docs/website/references/cli/defradb_client_collection_patch.md b/docs/website/references/cli/defradb_client_collection_patch.md index 689e897af0..ea2e581a04 100644 --- a/docs/website/references/cli/defradb_client_collection_patch.md +++ b/docs/website/references/cli/defradb_client_collection_patch.md @@ -49,6 +49,7 @@ defradb client collection patch [patch] [flags] --no-log-color Disable colored log output --rootdir string Directory for persistent data (default: $HOME/.defradb) --schema string Collection schema Root + --secret-file string Path to the file containing secrets (default ".env") --source-hub-address string The SourceHub address authorized by the client to make SourceHub transactions on behalf of the actor --tx uint Transaction ID --url string URL of HTTP endpoint to listen on or connect to (default "127.0.0.1:9181") diff --git a/docs/website/references/cli/defradb_client_collection_update.md b/docs/website/references/cli/defradb_client_collection_update.md index d00f44b07c..f21bba3437 100644 --- a/docs/website/references/cli/defradb_client_collection_update.md +++ b/docs/website/references/cli/defradb_client_collection_update.md @@ -54,6 +54,7 @@ defradb client collection update [-i --identity] [--filter --docID --fields [-n - --no-keyring Disable the keyring and generate ephemeral keys --no-log-color Disable colored log output --rootdir string Directory for persistent data (default: $HOME/.defradb) + --secret-file string Path to the file containing secrets (default ".env") --source-hub-address string The SourceHub address authorized by the client to make SourceHub transactions on behalf of the actor --tx uint Transaction ID --url string URL of HTTP endpoint to listen on or connect to (default "127.0.0.1:9181") diff --git a/docs/website/references/cli/defradb_client_index_drop.md b/docs/website/references/cli/defradb_client_index_drop.md index df2cac7fc3..081f5e2328 100644 --- a/docs/website/references/cli/defradb_client_index_drop.md +++ b/docs/website/references/cli/defradb_client_index_drop.md @@ -37,6 +37,7 @@ defradb client index drop -c --collection -n --name [flags] --no-keyring Disable the keyring and generate ephemeral keys --no-log-color Disable colored log output --rootdir string Directory for persistent data (default: $HOME/.defradb) + --secret-file string Path to the file containing secrets (default ".env") --source-hub-address string The SourceHub address authorized by the client to make SourceHub transactions on behalf of the actor --tx uint Transaction ID --url string URL of HTTP endpoint to listen on or connect to (default "127.0.0.1:9181") diff --git a/docs/website/references/cli/defradb_client_index_list.md b/docs/website/references/cli/defradb_client_index_list.md index 360fa7fbd9..c5cf21155d 100644 --- a/docs/website/references/cli/defradb_client_index_list.md +++ b/docs/website/references/cli/defradb_client_index_list.md @@ -39,6 +39,7 @@ defradb client index list [-c --collection ] [flags] --no-keyring Disable the keyring and generate ephemeral keys --no-log-color Disable colored log output --rootdir string Directory for persistent data (default: $HOME/.defradb) + --secret-file string Path to the file containing secrets (default ".env") --source-hub-address string The SourceHub address authorized by the client to make SourceHub transactions on behalf of the actor --tx uint Transaction ID --url string URL of HTTP endpoint to listen on or connect to (default "127.0.0.1:9181") diff --git a/docs/website/references/cli/defradb_client_p2p.md b/docs/website/references/cli/defradb_client_p2p.md index 77104e9c4f..d998508fcd 100644 --- a/docs/website/references/cli/defradb_client_p2p.md +++ b/docs/website/references/cli/defradb_client_p2p.md @@ -28,6 +28,7 @@ Interact with the DefraDB P2P system --no-keyring Disable the keyring and generate ephemeral keys --no-log-color Disable colored log output --rootdir string Directory for persistent data (default: $HOME/.defradb) + --secret-file string Path to the file containing secrets (default ".env") --source-hub-address string The SourceHub address authorized by the client to make SourceHub transactions on behalf of the actor --tx uint Transaction ID --url string URL of HTTP endpoint to listen on or connect to (default "127.0.0.1:9181") diff --git a/docs/website/references/cli/defradb_client_p2p_collection.md b/docs/website/references/cli/defradb_client_p2p_collection.md index 9cef79c61e..bc1f8c616f 100644 --- a/docs/website/references/cli/defradb_client_p2p_collection.md +++ b/docs/website/references/cli/defradb_client_p2p_collection.md @@ -29,6 +29,7 @@ The selected collections synchronize their events on the pubsub network. --no-keyring Disable the keyring and generate ephemeral keys --no-log-color Disable colored log output --rootdir string Directory for persistent data (default: $HOME/.defradb) + --secret-file string Path to the file containing secrets (default ".env") --source-hub-address string The SourceHub address authorized by the client to make SourceHub transactions on behalf of the actor --tx uint Transaction ID --url string URL of HTTP endpoint to listen on or connect to (default "127.0.0.1:9181") diff --git a/docs/website/references/cli/defradb_client_p2p_collection_add.md b/docs/website/references/cli/defradb_client_p2p_collection_add.md index 0faef4908b..836c06616d 100644 --- a/docs/website/references/cli/defradb_client_p2p_collection_add.md +++ b/docs/website/references/cli/defradb_client_p2p_collection_add.md @@ -40,6 +40,7 @@ defradb client p2p collection add [collectionIDs] [flags] --no-keyring Disable the keyring and generate ephemeral keys --no-log-color Disable colored log output --rootdir string Directory for persistent data (default: $HOME/.defradb) + --secret-file string Path to the file containing secrets (default ".env") --source-hub-address string The SourceHub address authorized by the client to make SourceHub transactions on behalf of the actor --tx uint Transaction ID --url string URL of HTTP endpoint to listen on or connect to (default "127.0.0.1:9181") diff --git a/docs/website/references/cli/defradb_client_p2p_collection_getall.md b/docs/website/references/cli/defradb_client_p2p_collection_getall.md index 2154256fc1..3df290b831 100644 --- a/docs/website/references/cli/defradb_client_p2p_collection_getall.md +++ b/docs/website/references/cli/defradb_client_p2p_collection_getall.md @@ -33,6 +33,7 @@ defradb client p2p collection getall [flags] --no-keyring Disable the keyring and generate ephemeral keys --no-log-color Disable colored log output --rootdir string Directory for persistent data (default: $HOME/.defradb) + --secret-file string Path to the file containing secrets (default ".env") --source-hub-address string The SourceHub address authorized by the client to make SourceHub transactions on behalf of the actor --tx uint Transaction ID --url string URL of HTTP endpoint to listen on or connect to (default "127.0.0.1:9181") diff --git a/docs/website/references/cli/defradb_client_p2p_collection_remove.md b/docs/website/references/cli/defradb_client_p2p_collection_remove.md index 24cbee423e..c3739dfb2a 100644 --- a/docs/website/references/cli/defradb_client_p2p_collection_remove.md +++ b/docs/website/references/cli/defradb_client_p2p_collection_remove.md @@ -40,6 +40,7 @@ defradb client p2p collection remove [collectionIDs] [flags] --no-keyring Disable the keyring and generate ephemeral keys --no-log-color Disable colored log output --rootdir string Directory for persistent data (default: $HOME/.defradb) + --secret-file string Path to the file containing secrets (default ".env") --source-hub-address string The SourceHub address authorized by the client to make SourceHub transactions on behalf of the actor --tx uint Transaction ID --url string URL of HTTP endpoint to listen on or connect to (default "127.0.0.1:9181") diff --git a/docs/website/references/cli/defradb_client_p2p_info.md b/docs/website/references/cli/defradb_client_p2p_info.md index 9a23b68535..cf78285cbc 100644 --- a/docs/website/references/cli/defradb_client_p2p_info.md +++ b/docs/website/references/cli/defradb_client_p2p_info.md @@ -32,6 +32,7 @@ defradb client p2p info [flags] --no-keyring Disable the keyring and generate ephemeral keys --no-log-color Disable colored log output --rootdir string Directory for persistent data (default: $HOME/.defradb) + --secret-file string Path to the file containing secrets (default ".env") --source-hub-address string The SourceHub address authorized by the client to make SourceHub transactions on behalf of the actor --tx uint Transaction ID --url string URL of HTTP endpoint to listen on or connect to (default "127.0.0.1:9181") diff --git a/docs/website/references/cli/defradb_client_p2p_replicator.md b/docs/website/references/cli/defradb_client_p2p_replicator.md index 9976113c65..3c1efbc894 100644 --- a/docs/website/references/cli/defradb_client_p2p_replicator.md +++ b/docs/website/references/cli/defradb_client_p2p_replicator.md @@ -29,6 +29,7 @@ A replicator replicates one or all collection(s) from one node to another. --no-keyring Disable the keyring and generate ephemeral keys --no-log-color Disable colored log output --rootdir string Directory for persistent data (default: $HOME/.defradb) + --secret-file string Path to the file containing secrets (default ".env") --source-hub-address string The SourceHub address authorized by the client to make SourceHub transactions on behalf of the actor --tx uint Transaction ID --url string URL of HTTP endpoint to listen on or connect to (default "127.0.0.1:9181") diff --git a/docs/website/references/cli/defradb_client_p2p_replicator_delete.md b/docs/website/references/cli/defradb_client_p2p_replicator_delete.md index cd153c081b..626aafe95f 100644 --- a/docs/website/references/cli/defradb_client_p2p_replicator_delete.md +++ b/docs/website/references/cli/defradb_client_p2p_replicator_delete.md @@ -38,6 +38,7 @@ defradb client p2p replicator delete [-c, --collection] [flags] --no-keyring Disable the keyring and generate ephemeral keys --no-log-color Disable colored log output --rootdir string Directory for persistent data (default: $HOME/.defradb) + --secret-file string Path to the file containing secrets (default ".env") --source-hub-address string The SourceHub address authorized by the client to make SourceHub transactions on behalf of the actor --tx uint Transaction ID --url string URL of HTTP endpoint to listen on or connect to (default "127.0.0.1:9181") diff --git a/docs/website/references/cli/defradb_client_p2p_replicator_getall.md b/docs/website/references/cli/defradb_client_p2p_replicator_getall.md index 063c70286c..c9f91420a6 100644 --- a/docs/website/references/cli/defradb_client_p2p_replicator_getall.md +++ b/docs/website/references/cli/defradb_client_p2p_replicator_getall.md @@ -37,6 +37,7 @@ defradb client p2p replicator getall [flags] --no-keyring Disable the keyring and generate ephemeral keys --no-log-color Disable colored log output --rootdir string Directory for persistent data (default: $HOME/.defradb) + --secret-file string Path to the file containing secrets (default ".env") --source-hub-address string The SourceHub address authorized by the client to make SourceHub transactions on behalf of the actor --tx uint Transaction ID --url string URL of HTTP endpoint to listen on or connect to (default "127.0.0.1:9181") diff --git a/docs/website/references/cli/defradb_client_p2p_replicator_set.md b/docs/website/references/cli/defradb_client_p2p_replicator_set.md index 6bf37ab3f4..75d6efe696 100644 --- a/docs/website/references/cli/defradb_client_p2p_replicator_set.md +++ b/docs/website/references/cli/defradb_client_p2p_replicator_set.md @@ -38,6 +38,7 @@ defradb client p2p replicator set [-c, --collection] [flags] --no-keyring Disable the keyring and generate ephemeral keys --no-log-color Disable colored log output --rootdir string Directory for persistent data (default: $HOME/.defradb) + --secret-file string Path to the file containing secrets (default ".env") --source-hub-address string The SourceHub address authorized by the client to make SourceHub transactions on behalf of the actor --tx uint Transaction ID --url string URL of HTTP endpoint to listen on or connect to (default "127.0.0.1:9181") diff --git a/docs/website/references/cli/defradb_client_purge.md b/docs/website/references/cli/defradb_client_purge.md index 3a1b4b2738..82adc92337 100644 --- a/docs/website/references/cli/defradb_client_purge.md +++ b/docs/website/references/cli/defradb_client_purge.md @@ -34,6 +34,7 @@ defradb client purge [flags] --no-keyring Disable the keyring and generate ephemeral keys --no-log-color Disable colored log output --rootdir string Directory for persistent data (default: $HOME/.defradb) + --secret-file string Path to the file containing secrets (default ".env") --source-hub-address string The SourceHub address authorized by the client to make SourceHub transactions on behalf of the actor --tx uint Transaction ID --url string URL of HTTP endpoint to listen on or connect to (default "127.0.0.1:9181") diff --git a/docs/website/references/cli/defradb_client_query.md b/docs/website/references/cli/defradb_client_query.md index 7519128bbd..abaea09428 100644 --- a/docs/website/references/cli/defradb_client_query.md +++ b/docs/website/references/cli/defradb_client_query.md @@ -52,6 +52,7 @@ defradb client query [-i --identity] [request] [flags] --no-keyring Disable the keyring and generate ephemeral keys --no-log-color Disable colored log output --rootdir string Directory for persistent data (default: $HOME/.defradb) + --secret-file string Path to the file containing secrets (default ".env") --source-hub-address string The SourceHub address authorized by the client to make SourceHub transactions on behalf of the actor --tx uint Transaction ID --url string URL of HTTP endpoint to listen on or connect to (default "127.0.0.1:9181") diff --git a/docs/website/references/cli/defradb_client_schema.md b/docs/website/references/cli/defradb_client_schema.md index 10f7a2237a..51e3ae0946 100644 --- a/docs/website/references/cli/defradb_client_schema.md +++ b/docs/website/references/cli/defradb_client_schema.md @@ -28,6 +28,7 @@ Make changes, updates, or look for existing schema types. --no-keyring Disable the keyring and generate ephemeral keys --no-log-color Disable colored log output --rootdir string Directory for persistent data (default: $HOME/.defradb) + --secret-file string Path to the file containing secrets (default ".env") --source-hub-address string The SourceHub address authorized by the client to make SourceHub transactions on behalf of the actor --tx uint Transaction ID --url string URL of HTTP endpoint to listen on or connect to (default "127.0.0.1:9181") diff --git a/docs/website/references/cli/defradb_client_schema_add.md b/docs/website/references/cli/defradb_client_schema_add.md index 3c20388acd..ecff04212c 100644 --- a/docs/website/references/cli/defradb_client_schema_add.md +++ b/docs/website/references/cli/defradb_client_schema_add.md @@ -49,6 +49,7 @@ defradb client schema add [schema] [flags] --no-keyring Disable the keyring and generate ephemeral keys --no-log-color Disable colored log output --rootdir string Directory for persistent data (default: $HOME/.defradb) + --secret-file string Path to the file containing secrets (default ".env") --source-hub-address string The SourceHub address authorized by the client to make SourceHub transactions on behalf of the actor --tx uint Transaction ID --url string URL of HTTP endpoint to listen on or connect to (default "127.0.0.1:9181") diff --git a/docs/website/references/cli/defradb_client_schema_describe.md b/docs/website/references/cli/defradb_client_schema_describe.md index 80e2e1b053..a3dd96f072 100644 --- a/docs/website/references/cli/defradb_client_schema_describe.md +++ b/docs/website/references/cli/defradb_client_schema_describe.md @@ -48,6 +48,7 @@ defradb client schema describe [flags] --no-keyring Disable the keyring and generate ephemeral keys --no-log-color Disable colored log output --rootdir string Directory for persistent data (default: $HOME/.defradb) + --secret-file string Path to the file containing secrets (default ".env") --source-hub-address string The SourceHub address authorized by the client to make SourceHub transactions on behalf of the actor --tx uint Transaction ID --url string URL of HTTP endpoint to listen on or connect to (default "127.0.0.1:9181") diff --git a/docs/website/references/cli/defradb_client_schema_migration.md b/docs/website/references/cli/defradb_client_schema_migration.md index dc30e11ebc..76be24e58e 100644 --- a/docs/website/references/cli/defradb_client_schema_migration.md +++ b/docs/website/references/cli/defradb_client_schema_migration.md @@ -28,6 +28,7 @@ Make set or look for existing schema migrations on a DefraDB node. --no-keyring Disable the keyring and generate ephemeral keys --no-log-color Disable colored log output --rootdir string Directory for persistent data (default: $HOME/.defradb) + --secret-file string Path to the file containing secrets (default ".env") --source-hub-address string The SourceHub address authorized by the client to make SourceHub transactions on behalf of the actor --tx uint Transaction ID --url string URL of HTTP endpoint to listen on or connect to (default "127.0.0.1:9181") diff --git a/docs/website/references/cli/defradb_client_schema_migration_down.md b/docs/website/references/cli/defradb_client_schema_migration_down.md index 69dca887ca..2ec8a2631f 100644 --- a/docs/website/references/cli/defradb_client_schema_migration_down.md +++ b/docs/website/references/cli/defradb_client_schema_migration_down.md @@ -45,6 +45,7 @@ defradb client schema migration down --collection [fl --no-keyring Disable the keyring and generate ephemeral keys --no-log-color Disable colored log output --rootdir string Directory for persistent data (default: $HOME/.defradb) + --secret-file string Path to the file containing secrets (default ".env") --source-hub-address string The SourceHub address authorized by the client to make SourceHub transactions on behalf of the actor --tx uint Transaction ID --url string URL of HTTP endpoint to listen on or connect to (default "127.0.0.1:9181") diff --git a/docs/website/references/cli/defradb_client_schema_migration_reload.md b/docs/website/references/cli/defradb_client_schema_migration_reload.md index d5a3ae3e82..07011ebb1a 100644 --- a/docs/website/references/cli/defradb_client_schema_migration_reload.md +++ b/docs/website/references/cli/defradb_client_schema_migration_reload.md @@ -32,6 +32,7 @@ defradb client schema migration reload [flags] --no-keyring Disable the keyring and generate ephemeral keys --no-log-color Disable colored log output --rootdir string Directory for persistent data (default: $HOME/.defradb) + --secret-file string Path to the file containing secrets (default ".env") --source-hub-address string The SourceHub address authorized by the client to make SourceHub transactions on behalf of the actor --tx uint Transaction ID --url string URL of HTTP endpoint to listen on or connect to (default "127.0.0.1:9181") diff --git a/docs/website/references/cli/defradb_client_schema_migration_set-registry.md b/docs/website/references/cli/defradb_client_schema_migration_set-registry.md index d49213920c..a62a1a5e31 100644 --- a/docs/website/references/cli/defradb_client_schema_migration_set-registry.md +++ b/docs/website/references/cli/defradb_client_schema_migration_set-registry.md @@ -38,6 +38,7 @@ defradb client schema migration set-registry [collectionID] [cfg] [flags] --no-keyring Disable the keyring and generate ephemeral keys --no-log-color Disable colored log output --rootdir string Directory for persistent data (default: $HOME/.defradb) + --secret-file string Path to the file containing secrets (default ".env") --source-hub-address string The SourceHub address authorized by the client to make SourceHub transactions on behalf of the actor --tx uint Transaction ID --url string URL of HTTP endpoint to listen on or connect to (default "127.0.0.1:9181") diff --git a/docs/website/references/cli/defradb_client_schema_migration_set.md b/docs/website/references/cli/defradb_client_schema_migration_set.md index b5063e2033..954313665f 100644 --- a/docs/website/references/cli/defradb_client_schema_migration_set.md +++ b/docs/website/references/cli/defradb_client_schema_migration_set.md @@ -45,6 +45,7 @@ defradb client schema migration set [src] [dst] [cfg] [flags] --no-keyring Disable the keyring and generate ephemeral keys --no-log-color Disable colored log output --rootdir string Directory for persistent data (default: $HOME/.defradb) + --secret-file string Path to the file containing secrets (default ".env") --source-hub-address string The SourceHub address authorized by the client to make SourceHub transactions on behalf of the actor --tx uint Transaction ID --url string URL of HTTP endpoint to listen on or connect to (default "127.0.0.1:9181") diff --git a/docs/website/references/cli/defradb_client_schema_migration_up.md b/docs/website/references/cli/defradb_client_schema_migration_up.md index 83ccd49e04..1b59edb56a 100644 --- a/docs/website/references/cli/defradb_client_schema_migration_up.md +++ b/docs/website/references/cli/defradb_client_schema_migration_up.md @@ -45,6 +45,7 @@ defradb client schema migration up --collection [flag --no-keyring Disable the keyring and generate ephemeral keys --no-log-color Disable colored log output --rootdir string Directory for persistent data (default: $HOME/.defradb) + --secret-file string Path to the file containing secrets (default ".env") --source-hub-address string The SourceHub address authorized by the client to make SourceHub transactions on behalf of the actor --tx uint Transaction ID --url string URL of HTTP endpoint to listen on or connect to (default "127.0.0.1:9181") diff --git a/docs/website/references/cli/defradb_client_schema_patch.md b/docs/website/references/cli/defradb_client_schema_patch.md index 7d18010a26..e2b58fedbc 100644 --- a/docs/website/references/cli/defradb_client_schema_patch.md +++ b/docs/website/references/cli/defradb_client_schema_patch.md @@ -48,6 +48,7 @@ defradb client schema patch [schema] [migration] [flags] --no-keyring Disable the keyring and generate ephemeral keys --no-log-color Disable colored log output --rootdir string Directory for persistent data (default: $HOME/.defradb) + --secret-file string Path to the file containing secrets (default ".env") --source-hub-address string The SourceHub address authorized by the client to make SourceHub transactions on behalf of the actor --tx uint Transaction ID --url string URL of HTTP endpoint to listen on or connect to (default "127.0.0.1:9181") diff --git a/docs/website/references/cli/defradb_client_schema_set-active.md b/docs/website/references/cli/defradb_client_schema_set-active.md index fc57c21006..7b637da7b9 100644 --- a/docs/website/references/cli/defradb_client_schema_set-active.md +++ b/docs/website/references/cli/defradb_client_schema_set-active.md @@ -33,6 +33,7 @@ defradb client schema set-active [versionID] [flags] --no-keyring Disable the keyring and generate ephemeral keys --no-log-color Disable colored log output --rootdir string Directory for persistent data (default: $HOME/.defradb) + --secret-file string Path to the file containing secrets (default ".env") --source-hub-address string The SourceHub address authorized by the client to make SourceHub transactions on behalf of the actor --tx uint Transaction ID --url string URL of HTTP endpoint to listen on or connect to (default "127.0.0.1:9181") diff --git a/docs/website/references/cli/defradb_client_tx.md b/docs/website/references/cli/defradb_client_tx.md index b341aa699e..01353b850c 100644 --- a/docs/website/references/cli/defradb_client_tx.md +++ b/docs/website/references/cli/defradb_client_tx.md @@ -28,6 +28,7 @@ Create, commit, and discard DefraDB transactions --no-keyring Disable the keyring and generate ephemeral keys --no-log-color Disable colored log output --rootdir string Directory for persistent data (default: $HOME/.defradb) + --secret-file string Path to the file containing secrets (default ".env") --source-hub-address string The SourceHub address authorized by the client to make SourceHub transactions on behalf of the actor --tx uint Transaction ID --url string URL of HTTP endpoint to listen on or connect to (default "127.0.0.1:9181") diff --git a/docs/website/references/cli/defradb_client_tx_commit.md b/docs/website/references/cli/defradb_client_tx_commit.md index 6c449593e2..557b9f061a 100644 --- a/docs/website/references/cli/defradb_client_tx_commit.md +++ b/docs/website/references/cli/defradb_client_tx_commit.md @@ -32,6 +32,7 @@ defradb client tx commit [id] [flags] --no-keyring Disable the keyring and generate ephemeral keys --no-log-color Disable colored log output --rootdir string Directory for persistent data (default: $HOME/.defradb) + --secret-file string Path to the file containing secrets (default ".env") --source-hub-address string The SourceHub address authorized by the client to make SourceHub transactions on behalf of the actor --tx uint Transaction ID --url string URL of HTTP endpoint to listen on or connect to (default "127.0.0.1:9181") diff --git a/docs/website/references/cli/defradb_client_tx_create.md b/docs/website/references/cli/defradb_client_tx_create.md index 0b860c7ace..f174a09504 100644 --- a/docs/website/references/cli/defradb_client_tx_create.md +++ b/docs/website/references/cli/defradb_client_tx_create.md @@ -34,6 +34,7 @@ defradb client tx create [flags] --no-keyring Disable the keyring and generate ephemeral keys --no-log-color Disable colored log output --rootdir string Directory for persistent data (default: $HOME/.defradb) + --secret-file string Path to the file containing secrets (default ".env") --source-hub-address string The SourceHub address authorized by the client to make SourceHub transactions on behalf of the actor --tx uint Transaction ID --url string URL of HTTP endpoint to listen on or connect to (default "127.0.0.1:9181") diff --git a/docs/website/references/cli/defradb_client_tx_discard.md b/docs/website/references/cli/defradb_client_tx_discard.md index d3e378e02b..671d4f2c66 100644 --- a/docs/website/references/cli/defradb_client_tx_discard.md +++ b/docs/website/references/cli/defradb_client_tx_discard.md @@ -32,6 +32,7 @@ defradb client tx discard [id] [flags] --no-keyring Disable the keyring and generate ephemeral keys --no-log-color Disable colored log output --rootdir string Directory for persistent data (default: $HOME/.defradb) + --secret-file string Path to the file containing secrets (default ".env") --source-hub-address string The SourceHub address authorized by the client to make SourceHub transactions on behalf of the actor --tx uint Transaction ID --url string URL of HTTP endpoint to listen on or connect to (default "127.0.0.1:9181") diff --git a/docs/website/references/cli/defradb_client_view.md b/docs/website/references/cli/defradb_client_view.md index faee845c64..bf21e0321e 100644 --- a/docs/website/references/cli/defradb_client_view.md +++ b/docs/website/references/cli/defradb_client_view.md @@ -28,6 +28,7 @@ Manage (add) views withing a running DefraDB instance --no-keyring Disable the keyring and generate ephemeral keys --no-log-color Disable colored log output --rootdir string Directory for persistent data (default: $HOME/.defradb) + --secret-file string Path to the file containing secrets (default ".env") --source-hub-address string The SourceHub address authorized by the client to make SourceHub transactions on behalf of the actor --tx uint Transaction ID --url string URL of HTTP endpoint to listen on or connect to (default "127.0.0.1:9181") diff --git a/docs/website/references/cli/defradb_client_view_add.md b/docs/website/references/cli/defradb_client_view_add.md index 41d64d202f..c5073d709b 100644 --- a/docs/website/references/cli/defradb_client_view_add.md +++ b/docs/website/references/cli/defradb_client_view_add.md @@ -38,6 +38,7 @@ defradb client view add [query] [sdl] [transform] [flags] --no-keyring Disable the keyring and generate ephemeral keys --no-log-color Disable colored log output --rootdir string Directory for persistent data (default: $HOME/.defradb) + --secret-file string Path to the file containing secrets (default ".env") --source-hub-address string The SourceHub address authorized by the client to make SourceHub transactions on behalf of the actor --tx uint Transaction ID --url string URL of HTTP endpoint to listen on or connect to (default "127.0.0.1:9181") diff --git a/docs/website/references/cli/defradb_client_view_refresh.md b/docs/website/references/cli/defradb_client_view_refresh.md index 0a91aec604..28e915190d 100644 --- a/docs/website/references/cli/defradb_client_view_refresh.md +++ b/docs/website/references/cli/defradb_client_view_refresh.md @@ -54,6 +54,7 @@ defradb client view refresh [flags] --no-keyring Disable the keyring and generate ephemeral keys --no-log-color Disable colored log output --rootdir string Directory for persistent data (default: $HOME/.defradb) + --secret-file string Path to the file containing secrets (default ".env") --source-hub-address string The SourceHub address authorized by the client to make SourceHub transactions on behalf of the actor --tx uint Transaction ID --url string URL of HTTP endpoint to listen on or connect to (default "127.0.0.1:9181") diff --git a/docs/website/references/cli/defradb_identity.md b/docs/website/references/cli/defradb_identity.md index 016561b39c..cb0d0a6c37 100644 --- a/docs/website/references/cli/defradb_identity.md +++ b/docs/website/references/cli/defradb_identity.md @@ -27,6 +27,7 @@ Interact with identity features of DefraDB instance --no-keyring Disable the keyring and generate ephemeral keys --no-log-color Disable colored log output --rootdir string Directory for persistent data (default: $HOME/.defradb) + --secret-file string Path to the file containing secrets (default ".env") --source-hub-address string The SourceHub address authorized by the client to make SourceHub transactions on behalf of the actor --url string URL of HTTP endpoint to listen on or connect to (default "127.0.0.1:9181") ``` diff --git a/docs/website/references/cli/defradb_identity_new.md b/docs/website/references/cli/defradb_identity_new.md index 05dcc6fe0a..0fc3a73c69 100644 --- a/docs/website/references/cli/defradb_identity_new.md +++ b/docs/website/references/cli/defradb_identity_new.md @@ -42,6 +42,7 @@ defradb identity new [flags] --no-keyring Disable the keyring and generate ephemeral keys --no-log-color Disable colored log output --rootdir string Directory for persistent data (default: $HOME/.defradb) + --secret-file string Path to the file containing secrets (default ".env") --source-hub-address string The SourceHub address authorized by the client to make SourceHub transactions on behalf of the actor --url string URL of HTTP endpoint to listen on or connect to (default "127.0.0.1:9181") ``` diff --git a/docs/website/references/cli/defradb_keyring.md b/docs/website/references/cli/defradb_keyring.md index c01c69bd1b..8dad08b542 100644 --- a/docs/website/references/cli/defradb_keyring.md +++ b/docs/website/references/cli/defradb_keyring.md @@ -42,6 +42,7 @@ To learn more about the available options: --no-keyring Disable the keyring and generate ephemeral keys --no-log-color Disable colored log output --rootdir string Directory for persistent data (default: $HOME/.defradb) + --secret-file string Path to the file containing secrets (default ".env") --source-hub-address string The SourceHub address authorized by the client to make SourceHub transactions on behalf of the actor --url string URL of HTTP endpoint to listen on or connect to (default "127.0.0.1:9181") ``` diff --git a/docs/website/references/cli/defradb_keyring_export.md b/docs/website/references/cli/defradb_keyring_export.md index 15029b484a..5ba8c64cc9 100644 --- a/docs/website/references/cli/defradb_keyring_export.md +++ b/docs/website/references/cli/defradb_keyring_export.md @@ -7,6 +7,10 @@ Export a private key Export a private key. Prints the hexadecimal representation of a private key. +The DEFRA_KEYRING_SECRET environment variable must be set to unlock the keyring. +This can also be done with a .env file in the working directory or at a path +defined with the --keyring-secret-file flag. + Example: defradb keyring export encryption-key @@ -35,6 +39,7 @@ defradb keyring export [flags] --no-keyring Disable the keyring and generate ephemeral keys --no-log-color Disable colored log output --rootdir string Directory for persistent data (default: $HOME/.defradb) + --secret-file string Path to the file containing secrets (default ".env") --source-hub-address string The SourceHub address authorized by the client to make SourceHub transactions on behalf of the actor --url string URL of HTTP endpoint to listen on or connect to (default "127.0.0.1:9181") ``` diff --git a/docs/website/references/cli/defradb_keyring_generate.md b/docs/website/references/cli/defradb_keyring_generate.md index 3651479823..747c6046fc 100644 --- a/docs/website/references/cli/defradb_keyring_generate.md +++ b/docs/website/references/cli/defradb_keyring_generate.md @@ -8,13 +8,17 @@ Generate private keys. Randomly generate and store private keys in the keyring. By default peer and encryption keys will be generated. +The DEFRA_KEYRING_SECRET environment variable must be set to unlock the keyring. +This can also be done with a .env file in the working directory or at a path +defined with the --keyring-secret-file flag. + WARNING: This will overwrite existing keys in the keyring. Example: defradb keyring generate Example: with no encryption key - defradb keyring generate --no-encryption-key + defradb keyring generate --no-encryption Example: with no peer key defradb keyring generate --no-peer-key @@ -29,9 +33,9 @@ defradb keyring generate [flags] ### Options ``` - -h, --help help for generate - --no-encryption-key Skip generating an encryption key. Encryption at rest will be disabled - --no-peer-key Skip generating a peer key. + -h, --help help for generate + --no-encryption Skip generating an encryption key. Encryption at rest will be disabled + --no-peer-key Skip generating a peer key. ``` ### Options inherited from parent commands @@ -49,6 +53,7 @@ defradb keyring generate [flags] --no-keyring Disable the keyring and generate ephemeral keys --no-log-color Disable colored log output --rootdir string Directory for persistent data (default: $HOME/.defradb) + --secret-file string Path to the file containing secrets (default ".env") --source-hub-address string The SourceHub address authorized by the client to make SourceHub transactions on behalf of the actor --url string URL of HTTP endpoint to listen on or connect to (default "127.0.0.1:9181") ``` diff --git a/docs/website/references/cli/defradb_keyring_import.md b/docs/website/references/cli/defradb_keyring_import.md index fe5df3f4ff..c76f2ec34c 100644 --- a/docs/website/references/cli/defradb_keyring_import.md +++ b/docs/website/references/cli/defradb_keyring_import.md @@ -7,6 +7,10 @@ Import a private key Import a private key. Store an externally generated key in the keyring. +The DEFRA_KEYRING_SECRET environment variable must be set to unlock the keyring. +This can also be done with a .env file in the working directory or at a path +defined with the --keyring-secret-file flag. + Example: defradb keyring import encryption-key 0000000000000000 @@ -35,6 +39,7 @@ defradb keyring import [flags] --no-keyring Disable the keyring and generate ephemeral keys --no-log-color Disable colored log output --rootdir string Directory for persistent data (default: $HOME/.defradb) + --secret-file string Path to the file containing secrets (default ".env") --source-hub-address string The SourceHub address authorized by the client to make SourceHub transactions on behalf of the actor --url string URL of HTTP endpoint to listen on or connect to (default "127.0.0.1:9181") ``` diff --git a/docs/website/references/cli/defradb_server-dump.md b/docs/website/references/cli/defradb_server-dump.md index 75dd56094e..3aafdcfca8 100644 --- a/docs/website/references/cli/defradb_server-dump.md +++ b/docs/website/references/cli/defradb_server-dump.md @@ -27,6 +27,7 @@ defradb server-dump [flags] --no-keyring Disable the keyring and generate ephemeral keys --no-log-color Disable colored log output --rootdir string Directory for persistent data (default: $HOME/.defradb) + --secret-file string Path to the file containing secrets (default ".env") --source-hub-address string The SourceHub address authorized by the client to make SourceHub transactions on behalf of the actor --url string URL of HTTP endpoint to listen on or connect to (default "127.0.0.1:9181") ``` diff --git a/docs/website/references/cli/defradb_start.md b/docs/website/references/cli/defradb_start.md index 9b1e5a8d74..79560ac62b 100644 --- a/docs/website/references/cli/defradb_start.md +++ b/docs/website/references/cli/defradb_start.md @@ -17,6 +17,7 @@ defradb start [flags] --development Enables a set of features that make development easier but should not be enabled in production -h, --help help for start --max-txn-retries int Specify the maximum number of retries per transaction (default 5) + --no-encryption Skip generating an encryption key. Encryption at rest will be disabled. WARNING: This cannot be undone. --no-p2p Disable the peer-to-peer network synchronization system --p2paddr strings Listen addresses for the p2p network (formatted as a libp2p MultiAddr) (default [/ip4/127.0.0.1/tcp/9171]) --peers stringArray List of peers to connect to @@ -41,6 +42,7 @@ defradb start [flags] --no-keyring Disable the keyring and generate ephemeral keys --no-log-color Disable colored log output --rootdir string Directory for persistent data (default: $HOME/.defradb) + --secret-file string Path to the file containing secrets (default ".env") --source-hub-address string The SourceHub address authorized by the client to make SourceHub transactions on behalf of the actor --url string URL of HTTP endpoint to listen on or connect to (default "127.0.0.1:9181") ``` diff --git a/docs/website/references/cli/defradb_version.md b/docs/website/references/cli/defradb_version.md index c752a6936e..fdd5010d25 100644 --- a/docs/website/references/cli/defradb_version.md +++ b/docs/website/references/cli/defradb_version.md @@ -29,6 +29,7 @@ defradb version [flags] --no-keyring Disable the keyring and generate ephemeral keys --no-log-color Disable colored log output --rootdir string Directory for persistent data (default: $HOME/.defradb) + --secret-file string Path to the file containing secrets (default ".env") --source-hub-address string The SourceHub address authorized by the client to make SourceHub transactions on behalf of the actor --url string URL of HTTP endpoint to listen on or connect to (default "127.0.0.1:9181") ``` diff --git a/go.mod b/go.mod index a23a90d299..4be484b96e 100644 --- a/go.mod +++ b/go.mod @@ -29,6 +29,7 @@ require ( github.com/ipld/go-ipld-prime/storage/bsadapter v0.0.0-20240322071758-198d7dba8fb8 github.com/ipld/go-ipld-prime/storage/bsrvadapter v0.0.0-20240322071758-198d7dba8fb8 github.com/jbenet/goprocess v0.1.4 + github.com/joho/godotenv v1.5.1 github.com/lens-vm/lens/host-go v0.0.0-20231127204031-8d858ed2926c github.com/lestrrat-go/jwx/v2 v2.1.1 github.com/libp2p/go-libp2p v0.36.3 @@ -62,7 +63,6 @@ require ( go.opentelemetry.io/otel/sdk/metric v1.30.0 go.uber.org/zap v1.27.0 golang.org/x/exp v0.0.0-20240808152545-0cdaa3abc0fa - golang.org/x/term v0.24.0 google.golang.org/grpc v1.66.2 google.golang.org/protobuf v1.34.2 ) @@ -365,6 +365,7 @@ require ( golang.org/x/oauth2 v0.21.0 // indirect golang.org/x/sync v0.8.0 // indirect golang.org/x/sys v0.25.0 // indirect + golang.org/x/term v0.24.0 // indirect golang.org/x/text v0.17.0 // indirect golang.org/x/time v0.5.0 // indirect golang.org/x/tools v0.24.0 // indirect diff --git a/go.sum b/go.sum index cdd939b06e..eaf23755d5 100644 --- a/go.sum +++ b/go.sum @@ -917,6 +917,8 @@ github.com/jmespath/go-jmespath/internal/testify v1.5.1 h1:shLQSRRSCCPj3f2gpwzGw github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfCI6z80xFu9LTZmf1ZRjMHUOPmWr69U= github.com/jmhodges/levigo v1.0.0 h1:q5EC36kV79HWeTBWsod3mG11EgStG3qArTKcvlksN1U= github.com/jmhodges/levigo v1.0.0/go.mod h1:Q6Qx+uH3RAqyK4rFQroq9RL7mdkABMcfhEI+nNuzMJQ= +github.com/joho/godotenv v1.5.1 h1:7eLL/+HRGLY0ldzfGMeQkb7vMd0as4CfYvUVzLqw0N0= +github.com/joho/godotenv v1.5.1/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4= github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo= github.com/jorrizza/ed2curve25519 v0.1.0 h1:P58ZEiVKW4vknYuGyOXuskMm82rTJyGhgRGrMRcCE8E= github.com/jorrizza/ed2curve25519 v0.1.0/go.mod h1:27VPNk2FnNqLQNvvVymiX41VE/nokPyn5HHP7gtfYlo= diff --git a/keyring/file.go b/keyring/file.go index 61191b3285..a8f7532274 100644 --- a/keyring/file.go +++ b/keyring/file.go @@ -28,30 +28,21 @@ type fileKeyring struct { dir string // password is the user defined password used to generate encryption keys password []byte - // prompt func is used to retrieve the user password - prompt PromptFunc } -// PromptFunc is a callback used to retrieve the user's password. -type PromptFunc func(s string) ([]byte, error) - // OpenFileKeyring opens the keyring in the given directory. -func OpenFileKeyring(dir string, prompt PromptFunc) (*fileKeyring, error) { +func OpenFileKeyring(dir string, password []byte) (*fileKeyring, error) { if err := os.MkdirAll(dir, 0755); err != nil { return nil, err } return &fileKeyring{ - dir: dir, - prompt: prompt, + dir: dir, + password: password, }, nil } func (f *fileKeyring) Set(name string, key []byte) error { - password, err := f.promptPassword() - if err != nil { - return err - } - cipher, err := jwe.Encrypt(key, jwe.WithKey(keyEncryptionAlgorithm, password)) + cipher, err := jwe.Encrypt(key, jwe.WithKey(keyEncryptionAlgorithm, f.password)) if err != nil { return err } @@ -63,11 +54,7 @@ func (f *fileKeyring) Get(name string) ([]byte, error) { if os.IsNotExist(err) { return nil, ErrNotFound } - password, err := f.promptPassword() - if err != nil { - return nil, err - } - return jwe.Decrypt(cipher, jwe.WithKey(keyEncryptionAlgorithm, password)) + return jwe.Decrypt(cipher, jwe.WithKey(keyEncryptionAlgorithm, f.password)) } func (f *fileKeyring) Delete(user string) error { @@ -77,18 +64,3 @@ func (f *fileKeyring) Delete(user string) error { } return err } - -// promptPassword returns the password from the user. -// -// If the password has been previously prompted it will be remembered. -func (f *fileKeyring) promptPassword() ([]byte, error) { - if len(f.password) > 0 { - return f.password, nil - } - password, err := f.prompt("Enter keystore password:") - if err != nil { - return nil, err - } - f.password = password - return password, nil -} diff --git a/keyring/file_test.go b/keyring/file_test.go index f3aa3529b1..5be5663cef 100644 --- a/keyring/file_test.go +++ b/keyring/file_test.go @@ -19,11 +19,7 @@ import ( ) func TestFileKeyring(t *testing.T) { - prompt := PromptFunc(func(s string) ([]byte, error) { - return []byte("secret"), nil - }) - - kr, err := OpenFileKeyring(t.TempDir(), prompt) + kr, err := OpenFileKeyring(t.TempDir(), []byte("secret")) require.NoError(t, err) err = kr.Set("peer_key", []byte("abc")) diff --git a/tests/integration/acp.go b/tests/integration/acp.go index 9242a266fc..44ac023bce 100644 --- a/tests/integration/acp.go +++ b/tests/integration/acp.go @@ -141,9 +141,7 @@ func setupSourceHub(s *state) ([]node.ACPOpt, error) { kr, err := keyring.OpenFileKeyring( directory, - keyring.PromptFunc(func(s string) ([]byte, error) { - return []byte("secret"), nil - }), + []byte("secret"), ) if err != nil { return nil, err From 09b49c64a2fc03ea91fe9a808e1a8b039e77c8e6 Mon Sep 17 00:00:00 2001 From: Fred Carle Date: Wed, 18 Sep 2024 20:21:01 -0400 Subject: [PATCH 27/71] docs(i): Fix incorrect flag and env variable (#3034) ## Relevant issue(s) Resolves #3033 ## Description The flag and environment variable was kept unchanged from an initial commit in the non-interactive keyring PR. This fixes the mistake. --- README.md | 2 +- cli/keyring_export.go | 2 +- cli/keyring_generate.go | 2 +- cli/keyring_import.go | 2 +- docs/website/references/cli/defradb_keyring_export.md | 2 +- docs/website/references/cli/defradb_keyring_generate.md | 2 +- docs/website/references/cli/defradb_keyring_import.md | 2 +- 7 files changed, 7 insertions(+), 7 deletions(-) diff --git a/README.md b/README.md index 9d94ce30ac..fd93db1d6f 100644 --- a/README.md +++ b/README.md @@ -68,7 +68,7 @@ The following keys are loaded from the keyring on start: - `peer-key` Ed25519 private key (required) - `encryption-key` AES-128, AES-192, or AES-256 key (optional) -A secret to unlock the keyring is required on start and must be provided via the `DEFRADB_KEYRING_SECRET` environment variable. If a `.env` file is available in the working directory, the secret can be stored there or via a file at a path defined by the `--keyring-secret-file` flag. +A secret to unlock the keyring is required on start and must be provided via the `DEFRA_KEYRING_SECRET` environment variable. If a `.env` file is available in the working directory, the secret can be stored there or via a file at a path defined by the `--secret-file` flag. The keys will be randomly generated on the inital start of the node if they are not found. diff --git a/cli/keyring_export.go b/cli/keyring_export.go index a13f1d0f0f..30aa679e28 100644 --- a/cli/keyring_export.go +++ b/cli/keyring_export.go @@ -23,7 +23,7 @@ Prints the hexadecimal representation of a private key. The DEFRA_KEYRING_SECRET environment variable must be set to unlock the keyring. This can also be done with a .env file in the working directory or at a path -defined with the --keyring-secret-file flag. +defined with the --secret-file flag. Example: defradb keyring export encryption-key`, diff --git a/cli/keyring_generate.go b/cli/keyring_generate.go index d8864ab811..09e2ed9b5f 100644 --- a/cli/keyring_generate.go +++ b/cli/keyring_generate.go @@ -28,7 +28,7 @@ By default peer and encryption keys will be generated. The DEFRA_KEYRING_SECRET environment variable must be set to unlock the keyring. This can also be done with a .env file in the working directory or at a path -defined with the --keyring-secret-file flag. +defined with the --secret-file flag. WARNING: This will overwrite existing keys in the keyring. diff --git a/cli/keyring_import.go b/cli/keyring_import.go index 8f9d691193..e39fa0121d 100644 --- a/cli/keyring_import.go +++ b/cli/keyring_import.go @@ -25,7 +25,7 @@ Store an externally generated key in the keyring. The DEFRA_KEYRING_SECRET environment variable must be set to unlock the keyring. This can also be done with a .env file in the working directory or at a path -defined with the --keyring-secret-file flag. +defined with the --secret-file flag. Example: defradb keyring import encryption-key 0000000000000000`, diff --git a/docs/website/references/cli/defradb_keyring_export.md b/docs/website/references/cli/defradb_keyring_export.md index 5ba8c64cc9..083654becc 100644 --- a/docs/website/references/cli/defradb_keyring_export.md +++ b/docs/website/references/cli/defradb_keyring_export.md @@ -9,7 +9,7 @@ Prints the hexadecimal representation of a private key. The DEFRA_KEYRING_SECRET environment variable must be set to unlock the keyring. This can also be done with a .env file in the working directory or at a path -defined with the --keyring-secret-file flag. +defined with the --secret-file flag. Example: defradb keyring export encryption-key diff --git a/docs/website/references/cli/defradb_keyring_generate.md b/docs/website/references/cli/defradb_keyring_generate.md index 747c6046fc..9c7b99bb9f 100644 --- a/docs/website/references/cli/defradb_keyring_generate.md +++ b/docs/website/references/cli/defradb_keyring_generate.md @@ -10,7 +10,7 @@ By default peer and encryption keys will be generated. The DEFRA_KEYRING_SECRET environment variable must be set to unlock the keyring. This can also be done with a .env file in the working directory or at a path -defined with the --keyring-secret-file flag. +defined with the --secret-file flag. WARNING: This will overwrite existing keys in the keyring. diff --git a/docs/website/references/cli/defradb_keyring_import.md b/docs/website/references/cli/defradb_keyring_import.md index c76f2ec34c..d6a275d9b8 100644 --- a/docs/website/references/cli/defradb_keyring_import.md +++ b/docs/website/references/cli/defradb_keyring_import.md @@ -9,7 +9,7 @@ Store an externally generated key in the keyring. The DEFRA_KEYRING_SECRET environment variable must be set to unlock the keyring. This can also be done with a .env file in the working directory or at a path -defined with the --keyring-secret-file flag. +defined with the --secret-file flag. Example: defradb keyring import encryption-key 0000000000000000 From 926c334b9bca77767f64aaa6ae3a0f385c881fe8 Mon Sep 17 00:00:00 2001 From: AndrewSisley Date: Thu, 19 Sep 2024 12:42:43 -0400 Subject: [PATCH 28/71] fix: Log GQL endpoint correctly on node start (#3037) ## Relevant issue(s) Resolves #3036 ## Description Logs the GQL endpoint correctly on node start. --- node/node.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/node/node.go b/node/node.go index 2603762684..5660d0d77c 100644 --- a/node/node.go +++ b/node/node.go @@ -163,7 +163,7 @@ func (n *Node) Start(ctx context.Context) error { } log.InfoContext(ctx, fmt.Sprintf("Providing HTTP API at %s PlaygroundEnabled=%t", n.Server.Address(), http.PlaygroundEnabled)) - log.InfoContext(ctx, fmt.Sprintf("Providing GraphQL endpoint at %s/v0/graphql", n.Server.Address())) + log.InfoContext(ctx, fmt.Sprintf("Providing GraphQL endpoint at %s/api/v0/graphql", n.Server.Address())) go func() { if err := n.Server.Serve(); err != nil && !errors.Is(err, gohttp.ErrServerClosed) { log.ErrorContextE(ctx, "HTTP server stopped", err) From 5754d7d65724d9a2e39cef4473002fd1fbe843b0 Mon Sep 17 00:00:00 2001 From: Islam Aliev Date: Thu, 19 Sep 2024 20:59:22 +0200 Subject: [PATCH 29/71] fix: Queries with filter on 2 rel fields of composite index (#3035) ## Relevant issue(s) Resolves #3032 #2928 ## Description Make parallel node perform a real merge instead of copying fields of a fetched doc, which resulted every next doc overwrite previous fields. --- internal/planner/multi.go | 11 ++- internal/planner/scan.go | 29 +++++-- internal/planner/type_join.go | 58 ++++++------- .../query_with_index_only_filter_test.go | 86 +++++++++++++++++++ 4 files changed, 142 insertions(+), 42 deletions(-) diff --git a/internal/planner/multi.go b/internal/planner/multi.go index ac564c4ed1..4b82826118 100644 --- a/internal/planner/multi.go +++ b/internal/planner/multi.go @@ -136,8 +136,15 @@ func (p *parallelNode) nextMerge(_ int, plan planNode) (bool, error) { return false, err } - doc := plan.Value() - copy(p.currentValue.Fields, doc.Fields) + // Field-by-fields check is necessary because parallelNode can have multiple children, and + // each child can return the same doc, but with different related fields available + // depending on what is requested. + newFields := plan.Value().Fields + for i := range newFields { + if p.currentValue.Fields[i] == nil { + p.currentValue.Fields[i] = newFields[i] + } + } return true, nil } diff --git a/internal/planner/scan.go b/internal/planner/scan.go index 151705a698..019cd1dee2 100644 --- a/internal/planner/scan.go +++ b/internal/planner/scan.go @@ -92,10 +92,10 @@ func (n *scanNode) initFields(fields []mapper.Requestable) error { switch requestable := r.(type) { // field is simple as its just a base level field case *mapper.Field: - n.tryAddField(requestable.GetName()) + n.tryAddFieldWithName(requestable.GetName()) // select might have its own select fields and filters fields case *mapper.Select: - n.tryAddField(requestable.Field.Name + request.RelatedObjectID) // foreign key for type joins + n.tryAddFieldWithName(requestable.Field.Name + request.RelatedObjectID) // foreign key for type joins err := n.initFields(requestable.Fields) if err != nil { return err @@ -112,13 +112,13 @@ func (n *scanNode) initFields(fields []mapper.Requestable) error { return err } for _, fd := range fieldDescs { - n.tryAddField(fd.Name) + n.tryAddFieldWithName(fd.Name) } } if target.ChildTarget.HasValue { - n.tryAddField(target.ChildTarget.Name) + n.tryAddFieldWithName(target.ChildTarget.Name) } else { - n.tryAddField(target.Field.Name) + n.tryAddFieldWithName(target.Field.Name) } } } @@ -126,7 +126,7 @@ func (n *scanNode) initFields(fields []mapper.Requestable) error { return nil } -func (n *scanNode) tryAddField(fieldName string) bool { +func (n *scanNode) tryAddFieldWithName(fieldName string) bool { fd, ok := n.col.Definition().GetFieldByName(fieldName) if !ok { // skip fields that are not part of the @@ -134,10 +134,25 @@ func (n *scanNode) tryAddField(fieldName string) bool { // is only responsible for basic fields return false } - n.fields = append(n.fields, fd) + n.addField(fd) return true } +// addField adds a field to the list of fields to be fetched. +// It will not add the field if it is already in the list. +func (n *scanNode) addField(field client.FieldDefinition) { + found := false + for i := range n.fields { + if n.fields[i].Name == field.Name { + found = true + break + } + } + if !found { + n.fields = append(n.fields, field) + } +} + func (scan *scanNode) initFetcher( cid immutable.Option[string], index immutable.Option[client.IndexDescription], diff --git a/internal/planner/type_join.go b/internal/planner/type_join.go index 2102c74479..fc5eb9bbaf 100644 --- a/internal/planner/type_join.go +++ b/internal/planner/type_join.go @@ -531,43 +531,35 @@ func newPrimaryObjectsRetriever( return j } -func (j *primaryObjectsRetriever) retrievePrimaryDocsReferencingSecondaryDoc() error { - relIDFieldDef, ok := j.primarySide.col.Definition().GetFieldByName( - j.primarySide.relFieldDef.Value().Name + request.RelatedObjectID) +func (r *primaryObjectsRetriever) retrievePrimaryDocsReferencingSecondaryDoc() error { + relIDFieldDef, ok := r.primarySide.col.Definition().GetFieldByName( + r.primarySide.relFieldDef.Value().Name + request.RelatedObjectID) if !ok { - return client.NewErrFieldNotExist(j.primarySide.relFieldDef.Value().Name + request.RelatedObjectID) + return client.NewErrFieldNotExist(r.primarySide.relFieldDef.Value().Name + request.RelatedObjectID) } - j.primaryScan = getScanNode(j.primarySide.plan) + r.primaryScan = getScanNode(r.primarySide.plan) - j.relIDFieldDef = relIDFieldDef + r.relIDFieldDef = relIDFieldDef - primaryDocs, err := j.retrievePrimaryDocs() + primaryDocs, err := r.retrievePrimaryDocs() if err != nil { return err } - j.resultPrimaryDocs, j.resultSecondaryDoc = joinPrimaryDocs(primaryDocs, j.secondarySide, j.primarySide) + r.resultPrimaryDocs, r.resultSecondaryDoc = joinPrimaryDocs(primaryDocs, r.secondarySide, r.primarySide) return nil } -func (j *primaryObjectsRetriever) addIDFieldToScanner() { - found := false - for i := range j.primaryScan.fields { - if j.primaryScan.fields[i].Name == j.relIDFieldDef.Name { - found = true - break - } - } - if !found { - j.primaryScan.fields = append(j.primaryScan.fields, j.relIDFieldDef) +func (r *primaryObjectsRetriever) collectDocs(numDocs int) ([]core.Doc, error) { + p := r.primarySide.plan + // If the primary side is a multiScanNode, we need to get the source node, as we are the only + // consumer (one, not multiple) of it. + if multiScan, ok := p.(*multiScanNode); ok { + p = multiScan.Source() } -} - -func (j *primaryObjectsRetriever) collectDocs(numDocs int) ([]core.Doc, error) { - p := j.primarySide.plan if err := p.Init(); err != nil { return nil, NewErrSubTypeInit(err) } @@ -591,28 +583,28 @@ func (j *primaryObjectsRetriever) collectDocs(numDocs int) ([]core.Doc, error) { return docs, nil } -func (j *primaryObjectsRetriever) retrievePrimaryDocs() ([]core.Doc, error) { - j.addIDFieldToScanner() +func (r *primaryObjectsRetriever) retrievePrimaryDocs() ([]core.Doc, error) { + r.primaryScan.addField(r.relIDFieldDef) - secondaryDoc := j.secondarySide.plan.Value() - addFilterOnIDField(j.primaryScan, j.primarySide.relIDFieldMapIndex.Value(), secondaryDoc.GetID()) + secondaryDoc := r.secondarySide.plan.Value() + addFilterOnIDField(r.primaryScan, r.primarySide.relIDFieldMapIndex.Value(), secondaryDoc.GetID()) - oldFetcher := j.primaryScan.fetcher + oldFetcher := r.primaryScan.fetcher - indexOnRelation := findIndexByFieldName(j.primaryScan.col, j.relIDFieldDef.Name) - j.primaryScan.initFetcher(immutable.None[string](), indexOnRelation) + indexOnRelation := findIndexByFieldName(r.primaryScan.col, r.relIDFieldDef.Name) + r.primaryScan.initFetcher(immutable.None[string](), indexOnRelation) - docs, err := j.collectDocs(0) + docs, err := r.collectDocs(0) if err != nil { return nil, err } - err = j.primaryScan.fetcher.Close() + err = r.primaryScan.fetcher.Close() if err != nil { return nil, err } - j.primaryScan.fetcher = oldFetcher + r.primaryScan.fetcher = oldFetcher return docs, nil } @@ -780,7 +772,7 @@ func (join *invertibleTypeJoin) invertJoinDirectionWithIndex( ) error { p := join.childSide.plan s := getScanNode(p) - s.tryAddField(join.childSide.relFieldDef.Value().Name + request.RelatedObjectID) + s.tryAddFieldWithName(join.childSide.relFieldDef.Value().Name + request.RelatedObjectID) s.filter = fieldFilter s.initFetcher(immutable.Option[string]{}, immutable.Some(index)) diff --git a/tests/integration/index/query_with_index_only_filter_test.go b/tests/integration/index/query_with_index_only_filter_test.go index 5de362ec81..f0aab40546 100644 --- a/tests/integration/index/query_with_index_only_filter_test.go +++ b/tests/integration/index/query_with_index_only_filter_test.go @@ -718,3 +718,89 @@ func TestQueryWithIndex_EmptyFilterOnIndexedField_ShouldSucceed(t *testing.T) { testUtils.ExecuteTestCase(t, test) } + +// This test checks if a query with a filter on 2 relations (one of which is indexed) works. +// Because of 2 relations in the query a parallelNode will be used with each child focusing +// on fetching one of the relations. This test makes sure the result of the second child +// (say Device with manufacturer) doesn't overwrite the result of the first child (say Device with owner). +// Also as the fetching is inverted (because of the index) we fetch first the secondary doc which +// is User and fetch all primary docs (Device) that reference that User. For fetching the primary +// docs we use the same planNode which in this case happens to be multiscanNode (source of parallelNode). +// For every second call multiscanNode will return the result of the first call, but in this case +// we have only one consumer, so take the source of the multiscanNode and use it to fetch the primary docs +// to avoid having all docs doubled. +func TestQueryWithIndex_WithFilterOn2Relations_ShouldFilter(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String @index + devices: [Device] + } + + type Manufacturer { + name: String + devices: [Device] + } + + type Device { + owner: User + manufacturer: Manufacturer + model: String + } + `, + }, + testUtils.CreateDoc{ + CollectionID: 0, + DocMap: map[string]any{ + "name": "John", + }, + }, + testUtils.CreateDoc{ + CollectionID: 1, + DocMap: map[string]any{ + "name": "Apple", + }, + }, + testUtils.CreateDoc{ + CollectionID: 2, + DocMap: map[string]any{ + "model": "iPhone", + "owner_id": testUtils.NewDocIndex(0, 0), + "manufacturer_id": testUtils.NewDocIndex(1, 0), + }, + }, + testUtils.CreateDoc{ + CollectionID: 2, + DocMap: map[string]any{ + "model": "MacBook", + "owner_id": testUtils.NewDocIndex(0, 0), + "manufacturer_id": testUtils.NewDocIndex(1, 0), + }, + }, + testUtils.Request{ + Request: `query { + Device (filter: { + manufacturer: {name: {_eq: "Apple"}}, + owner: {name: {_eq: "John"}} + }) { + model + } + }`, + Results: map[string]any{ + "Device": []map[string]any{ + { + "model": "iPhone", + }, + { + "model": "MacBook", + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} From cddc6d626a5da65029cd4982aad4ea19f89f2b58 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Thu, 19 Sep 2024 15:36:06 -0700 Subject: [PATCH 30/71] fix(i): Default field value overwrite on update (#3030) ## Relevant issue(s) Resolves #3029 ## Description This PR fixes an issue where default field values would overwrite any missing input fields. ## Tasks - [x] I made sure the code is well commented, particularly hard-to-understand areas. - [x] I made sure the repository-held documentation is changed accordingly. - [x] I made sure the pull request title adheres to the conventional commit style (the subset used in the project can be found in [tools/configs/chglog/config.yml](tools/configs/chglog/config.yml)). - [x] I made sure to discuss its limitations such as threats to validity, vulnerability to mistake and misuse, robustness to invalidation of assumptions, resource requirements, ... ## How has this been tested? Added integration test Specify the platform(s) on which this was tested: - MacOS --- internal/request/graphql/schema/generate.go | 3 +- .../update/with_default_values_test.go | 62 +++++++++++++++++++ 2 files changed, 63 insertions(+), 2 deletions(-) create mode 100644 tests/integration/mutation/update/with_default_values_test.go diff --git a/internal/request/graphql/schema/generate.go b/internal/request/graphql/schema/generate.go index 8ae36d230f..c198296ffb 100644 --- a/internal/request/graphql/schema/generate.go +++ b/internal/request/graphql/schema/generate.go @@ -583,8 +583,7 @@ func (g *Generator) buildMutationInputTypes(collections []client.CollectionDefin } fields[field.Name] = &gql.InputObjectFieldConfig{ - Type: ttype, - DefaultValue: field.DefaultValue, + Type: ttype, } } diff --git a/tests/integration/mutation/update/with_default_values_test.go b/tests/integration/mutation/update/with_default_values_test.go new file mode 100644 index 0000000000..78e8b7fb12 --- /dev/null +++ b/tests/integration/mutation/update/with_default_values_test.go @@ -0,0 +1,62 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package update + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestMutationUpdate_WithDefaultValues_DoesNotOverwrite(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple update mutation with default value does not overwrite", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + score: Int @default(int: 100) + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "score": 0 + }`, + }, + testUtils.UpdateDoc{ + Doc: `{ + "name": "Fred" + }`, + }, + testUtils.Request{ + Request: `query { + Users { + name + score + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "Fred", + "score": int64(0), + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} From 55e56a598ed9d087660cf9bdd2bf600fd8684566 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Fri, 20 Sep 2024 13:12:40 -0700 Subject: [PATCH 31/71] feat: Inline array filters (#3028) ## Relevant issue(s) Resolves #2857 ## Description This PR adds three new filter types for inline array types: `_all`, `_any`, and `_none`. ## Tasks - [x] I made sure the code is well commented, particularly hard-to-understand areas. - [x] I made sure the repository-held documentation is changed accordingly. - [x] I made sure the pull request title adheres to the conventional commit style (the subset used in the project can be found in [tools/configs/chglog/config.yml](tools/configs/chglog/config.yml)). - [x] I made sure to discuss its limitations such as threats to validity, vulnerability to mistake and misuse, robustness to invalidation of assumptions, resource requirements, ... ## How has this been tested? Added integration tests Specify the platform(s) on which this was tested: - MacOS --- internal/connor/all.go | 53 +++ internal/connor/and.go | 2 +- internal/connor/any.go | 53 +++ internal/connor/connor.go | 6 + internal/connor/eq.go | 69 ++-- internal/connor/none.go | 12 + internal/planner/mapper/mapper.go | 8 +- internal/request/graphql/schema/generate.go | 14 +- internal/request/graphql/schema/manager.go | 58 +++- internal/request/graphql/schema/types/base.go | 184 ++++++++++- .../graphql/schema/types/descriptions.go | 10 + .../inline_array/with_filter_all_test.go | 305 ++++++++++++++++++ .../inline_array/with_filter_any_test.go | 305 ++++++++++++++++++ .../inline_array/with_filter_none_test.go | 305 ++++++++++++++++++ .../query/simple/with_filter/with_and_test.go | 45 +++ .../query/simple/with_filter/with_or_test.go | 48 +++ .../schema/aggregates/inline_array_test.go | 108 ++++--- 17 files changed, 1467 insertions(+), 118 deletions(-) create mode 100644 internal/connor/all.go create mode 100644 internal/connor/any.go create mode 100644 internal/connor/none.go create mode 100644 tests/integration/query/inline_array/with_filter_all_test.go create mode 100644 tests/integration/query/inline_array/with_filter_any_test.go create mode 100644 tests/integration/query/inline_array/with_filter_none_test.go diff --git a/internal/connor/all.go b/internal/connor/all.go new file mode 100644 index 0000000000..0b9800de89 --- /dev/null +++ b/internal/connor/all.go @@ -0,0 +1,53 @@ +package connor + +import ( + "github.com/sourcenetwork/defradb/client" + + "github.com/sourcenetwork/immutable" +) + +// all is an operator which allows the evaluation of +// a number of conditions over a list of values +// matching if all of them match. +func all(condition, data any) (bool, error) { + switch t := data.(type) { + case []string: + return allSlice(condition, t) + + case []immutable.Option[string]: + return allSlice(condition, t) + + case []int64: + return allSlice(condition, t) + + case []immutable.Option[int64]: + return allSlice(condition, t) + + case []bool: + return allSlice(condition, t) + + case []immutable.Option[bool]: + return allSlice(condition, t) + + case []float64: + return allSlice(condition, t) + + case []immutable.Option[float64]: + return allSlice(condition, t) + + default: + return false, client.NewErrUnhandledType("data", data) + } +} + +func allSlice[T any](condition any, data []T) (bool, error) { + for _, c := range data { + m, err := eq(condition, c) + if err != nil { + return false, err + } else if !m { + return false, nil + } + } + return true, nil +} diff --git a/internal/connor/and.go b/internal/connor/and.go index 054718ee6a..be2e097309 100644 --- a/internal/connor/and.go +++ b/internal/connor/and.go @@ -14,8 +14,8 @@ func and(condition, data any) (bool, error) { return false, nil } } - return true, nil + default: return false, client.NewErrUnhandledType("condition", cn) } diff --git a/internal/connor/any.go b/internal/connor/any.go new file mode 100644 index 0000000000..a9c02b1369 --- /dev/null +++ b/internal/connor/any.go @@ -0,0 +1,53 @@ +package connor + +import ( + "github.com/sourcenetwork/defradb/client" + + "github.com/sourcenetwork/immutable" +) + +// anyOp is an operator which allows the evaluation of +// a number of conditions over a list of values +// matching if any of them match. +func anyOp(condition, data any) (bool, error) { + switch t := data.(type) { + case []string: + return anySlice(condition, t) + + case []immutable.Option[string]: + return anySlice(condition, t) + + case []int64: + return anySlice(condition, t) + + case []immutable.Option[int64]: + return anySlice(condition, t) + + case []bool: + return anySlice(condition, t) + + case []immutable.Option[bool]: + return anySlice(condition, t) + + case []float64: + return anySlice(condition, t) + + case []immutable.Option[float64]: + return anySlice(condition, t) + + default: + return false, client.NewErrUnhandledType("data", data) + } +} + +func anySlice[T any](condition any, data []T) (bool, error) { + for _, c := range data { + m, err := eq(condition, c) + if err != nil { + return false, err + } else if m { + return true, nil + } + } + return false, nil +} diff --git a/internal/connor/connor.go b/internal/connor/connor.go index 927b1dfffd..086ba0cd49 100644 --- a/internal/connor/connor.go +++ b/internal/connor/connor.go @@ -18,6 +18,10 @@ func matchWith(op string, conditions, data any) (bool, error) { switch op { case "_and": return and(conditions, data) + case "_any": + return anyOp(conditions, data) + case "_all": + return all(conditions, data) case "_eq": return eq(conditions, data) case "_ge": @@ -44,6 +48,8 @@ func matchWith(op string, conditions, data any) (bool, error) { return ilike(conditions, data) case "_nilike": return nilike(conditions, data) + case "_none": + return none(conditions, data) case "_not": return not(conditions, data) default: diff --git a/internal/connor/eq.go b/internal/connor/eq.go index 86888eef37..3f849348b8 100644 --- a/internal/connor/eq.go +++ b/internal/connor/eq.go @@ -16,74 +16,59 @@ import ( func eq(condition, data any) (bool, error) { switch arr := data.(type) { case []core.Doc: - for _, item := range arr { - m, err := eq(condition, item) - if err != nil { - return false, err - } - - if m { - return true, nil - } - } - return false, nil + return anySlice(condition, arr) case immutable.Option[bool]: - if !arr.HasValue() { - return condition == nil, nil - } - data = arr.Value() + data = immutableValueOrNil(arr) case immutable.Option[int64]: - if !arr.HasValue() { - return condition == nil, nil - } - data = arr.Value() + data = immutableValueOrNil(arr) case immutable.Option[float64]: - if !arr.HasValue() { - return condition == nil, nil - } - data = arr.Value() + data = immutableValueOrNil(arr) case immutable.Option[string]: - if !arr.HasValue() { - return condition == nil, nil - } - data = arr.Value() + data = immutableValueOrNil(arr) } switch cn := condition.(type) { + case map[FilterKey]any: + for prop, cond := range cn { + m, err := matchWith(prop.GetOperatorOrDefault("_eq"), cond, prop.GetProp(data)) + if err != nil { + return false, err + } else if !m { + return false, nil + } + } + return true, nil + case string: if d, ok := data.(string); ok { return d == cn, nil } return false, nil + case int64: return numbers.Equal(cn, data), nil + case int32: return numbers.Equal(cn, data), nil + case float64: return numbers.Equal(cn, data), nil - case map[FilterKey]any: - m := true - for prop, cond := range cn { - var err error - m, err = matchWith(prop.GetOperatorOrDefault("_eq"), cond, prop.GetProp(data)) - if err != nil { - return false, err - } - - if !m { - // No need to evaluate after we fail - break - } - } - return m, nil case time.Time: return ctime.Equal(cn, data), nil + default: return reflect.DeepEqual(condition, data), nil } } + +func immutableValueOrNil[T any](data immutable.Option[T]) any { + if data.HasValue() { + return data.Value() + } + return nil +} diff --git a/internal/connor/none.go b/internal/connor/none.go new file mode 100644 index 0000000000..16613b3e46 --- /dev/null +++ b/internal/connor/none.go @@ -0,0 +1,12 @@ +package connor + +// none is an operator which allows the evaluation of +// a number of conditions over a list of values +// matching if all of them do not match. +func none(condition, data any) (bool, error) { + m, err := anyOp(condition, data) + if err != nil { + return false, err + } + return !m, nil +} diff --git a/internal/planner/mapper/mapper.go b/internal/planner/mapper/mapper.go index 706f9235de..ac6bb80c78 100644 --- a/internal/planner/mapper/mapper.go +++ b/internal/planner/mapper/mapper.go @@ -1348,13 +1348,15 @@ func toFilterMap( returnClause := map[connor.FilterKey]any{} for innerSourceKey, innerSourceValue := range typedClause { var innerMapping *core.DocumentMapping - switch innerSourceValue.(type) { - case map[string]any: + // innerSourceValue may refer to a child mapping or + // an inline array if we don't have a child mapping + _, ok := innerSourceValue.(map[string]any) + if ok && index < len(mapping.ChildMappings) { // If the innerSourceValue is also a map, then we should parse the nested clause // using the child mapping, as this key must refer to a host property in a join // and deeper keys must refer to properties on the child items. innerMapping = mapping.ChildMappings[index] - default: + } else { innerMapping = mapping } rKey, rValue := toFilterMap(innerSourceKey, innerSourceValue, innerMapping) diff --git a/internal/request/graphql/schema/generate.go b/internal/request/graphql/schema/generate.go index c198296ffb..f5a2f4c624 100644 --- a/internal/request/graphql/schema/generate.go +++ b/internal/request/graphql/schema/generate.go @@ -1150,11 +1150,17 @@ func (g *Generator) genTypeFilterArgInput(obj *gql.Object) *gql.InputObject { } // scalars (leafs) if gql.IsLeafType(field.Type) { - if _, isList := field.Type.(*gql.List); isList { - // Filtering by inline array value is currently not supported - continue + var operatorName string + if list, isList := field.Type.(*gql.List); isList { + if notNull, isNotNull := list.OfType.(*gql.NonNull); isNotNull { + operatorName = "NotNull" + notNull.OfType.Name() + "ListOperatorBlock" + } else { + operatorName = list.OfType.Name() + "ListOperatorBlock" + } + } else { + operatorName = field.Type.Name() + "OperatorBlock" } - operatorType, isFilterable := g.manager.schema.TypeMap()[field.Type.Name()+"OperatorBlock"] + operatorType, isFilterable := g.manager.schema.TypeMap()[operatorName] if !isFilterable { continue } diff --git a/internal/request/graphql/schema/manager.go b/internal/request/graphql/schema/manager.go index 0385c50ac9..792535fda0 100644 --- a/internal/request/graphql/schema/manager.go +++ b/internal/request/graphql/schema/manager.go @@ -188,6 +188,22 @@ func defaultTypes( blobScalarType := schemaTypes.BlobScalarType() jsonScalarType := schemaTypes.JSONScalarType() + idOpBlock := schemaTypes.IDOperatorBlock() + intOpBlock := schemaTypes.IntOperatorBlock() + floatOpBlock := schemaTypes.FloatOperatorBlock() + booleanOpBlock := schemaTypes.BooleanOperatorBlock() + stringOpBlock := schemaTypes.StringOperatorBlock() + jsonOpBlock := schemaTypes.JSONOperatorBlock(jsonScalarType) + blobOpBlock := schemaTypes.BlobOperatorBlock(blobScalarType) + dateTimeOpBlock := schemaTypes.DateTimeOperatorBlock() + + notNullIntOpBlock := schemaTypes.NotNullIntOperatorBlock() + notNullFloatOpBlock := schemaTypes.NotNullFloatOperatorBlock() + notNullBooleanOpBlock := schemaTypes.NotNullBooleanOperatorBlock() + notNullStringOpBlock := schemaTypes.NotNullStringOperatorBlock() + notNullJSONOpBlock := schemaTypes.NotNullJSONOperatorBlock(jsonScalarType) + notNullBlobOpBlock := schemaTypes.NotNullBlobOperatorBlock(blobScalarType) + return []gql.Type{ // Base Scalar types gql.Boolean, @@ -207,20 +223,34 @@ func defaultTypes( orderEnum, // Filter scalar blocks - schemaTypes.BooleanOperatorBlock(), - schemaTypes.NotNullBooleanOperatorBlock(), - schemaTypes.DateTimeOperatorBlock(), - schemaTypes.FloatOperatorBlock(), - schemaTypes.NotNullFloatOperatorBlock(), - schemaTypes.IdOperatorBlock(), - schemaTypes.IntOperatorBlock(), - schemaTypes.NotNullIntOperatorBlock(), - schemaTypes.StringOperatorBlock(), - schemaTypes.NotNullstringOperatorBlock(), - schemaTypes.JSONOperatorBlock(jsonScalarType), - schemaTypes.NotNullJSONOperatorBlock(jsonScalarType), - schemaTypes.BlobOperatorBlock(blobScalarType), - schemaTypes.NotNullBlobOperatorBlock(blobScalarType), + idOpBlock, + intOpBlock, + floatOpBlock, + booleanOpBlock, + stringOpBlock, + jsonOpBlock, + blobOpBlock, + dateTimeOpBlock, + + // Filter non null scalar blocks + notNullIntOpBlock, + notNullFloatOpBlock, + notNullBooleanOpBlock, + notNullStringOpBlock, + notNullJSONOpBlock, + notNullBlobOpBlock, + + // Filter scalar list blocks + schemaTypes.IntListOperatorBlock(intOpBlock), + schemaTypes.FloatListOperatorBlock(floatOpBlock), + schemaTypes.BooleanListOperatorBlock(booleanOpBlock), + schemaTypes.StringListOperatorBlock(stringOpBlock), + + // Filter non null scalar list blocks + schemaTypes.NotNullIntListOperatorBlock(notNullIntOpBlock), + schemaTypes.NotNullFloatListOperatorBlock(notNullFloatOpBlock), + schemaTypes.NotNullBooleanListOperatorBlock(notNullBooleanOpBlock), + schemaTypes.NotNullStringListOperatorBlock(notNullStringOpBlock), commitsOrderArg, commitLinkObject, diff --git a/internal/request/graphql/schema/types/base.go b/internal/request/graphql/schema/types/base.go index fd49fbb45a..4675169989 100644 --- a/internal/request/graphql/schema/types/base.go +++ b/internal/request/graphql/schema/types/base.go @@ -40,6 +40,28 @@ func BooleanOperatorBlock() *gql.InputObject { }) } +// BooleanListOperatorBlock filter block for [Boolean] types. +func BooleanListOperatorBlock(op *gql.InputObject) *gql.InputObject { + return gql.NewInputObject(gql.InputObjectConfig{ + Name: "BooleanListOperatorBlock", + Description: "These are the set of filter operators available for use when filtering on [Boolean] values.", + Fields: gql.InputObjectConfigFieldMap{ + "_any": &gql.InputObjectFieldConfig{ + Description: anyOperatorDescription, + Type: op, + }, + "_all": &gql.InputObjectFieldConfig{ + Description: allOperatorDescription, + Type: op, + }, + "_none": &gql.InputObjectFieldConfig{ + Description: noneOperatorDescription, + Type: op, + }, + }, + }) +} + // NotNullBooleanOperatorBlock filter block for boolean! types. func NotNullBooleanOperatorBlock() *gql.InputObject { return gql.NewInputObject(gql.InputObjectConfig{ @@ -66,6 +88,28 @@ func NotNullBooleanOperatorBlock() *gql.InputObject { }) } +// NotNullBooleanListOperatorBlock filter block for [Boolean!] types. +func NotNullBooleanListOperatorBlock(op *gql.InputObject) *gql.InputObject { + return gql.NewInputObject(gql.InputObjectConfig{ + Name: "NotNullBooleanListOperatorBlock", + Description: "These are the set of filter operators available for use when filtering on [Boolean!] values.", + Fields: gql.InputObjectConfigFieldMap{ + "_any": &gql.InputObjectFieldConfig{ + Description: anyOperatorDescription, + Type: op, + }, + "_all": &gql.InputObjectFieldConfig{ + Description: allOperatorDescription, + Type: op, + }, + "_none": &gql.InputObjectFieldConfig{ + Description: noneOperatorDescription, + Type: op, + }, + }, + }) +} + // DateTimeOperatorBlock filter block for DateTime types. func DateTimeOperatorBlock() *gql.InputObject { return gql.NewInputObject(gql.InputObjectConfig{ @@ -150,6 +194,28 @@ func FloatOperatorBlock() *gql.InputObject { }) } +// FloatListOperatorBlock filter block for [Float] types. +func FloatListOperatorBlock(op *gql.InputObject) *gql.InputObject { + return gql.NewInputObject(gql.InputObjectConfig{ + Name: "FloatListOperatorBlock", + Description: "These are the set of filter operators available for use when filtering on [Float] values.", + Fields: gql.InputObjectConfigFieldMap{ + "_any": &gql.InputObjectFieldConfig{ + Description: anyOperatorDescription, + Type: op, + }, + "_all": &gql.InputObjectFieldConfig{ + Description: allOperatorDescription, + Type: op, + }, + "_none": &gql.InputObjectFieldConfig{ + Description: noneOperatorDescription, + Type: op, + }, + }, + }) +} + // NotNullFloatOperatorBlock filter block for Float! types. func NotNullFloatOperatorBlock() *gql.InputObject { return gql.NewInputObject(gql.InputObjectConfig{ @@ -192,6 +258,28 @@ func NotNullFloatOperatorBlock() *gql.InputObject { }) } +// NotNullFloatListOperatorBlock filter block for [NotNullFloat] types. +func NotNullFloatListOperatorBlock(op *gql.InputObject) *gql.InputObject { + return gql.NewInputObject(gql.InputObjectConfig{ + Name: "NotNullFloatListOperatorBlock", + Description: "These are the set of filter operators available for use when filtering on [Float!] values.", + Fields: gql.InputObjectConfigFieldMap{ + "_any": &gql.InputObjectFieldConfig{ + Description: anyOperatorDescription, + Type: op, + }, + "_all": &gql.InputObjectFieldConfig{ + Description: allOperatorDescription, + Type: op, + }, + "_none": &gql.InputObjectFieldConfig{ + Description: noneOperatorDescription, + Type: op, + }, + }, + }) +} + // IntOperatorBlock filter block for Int types. func IntOperatorBlock() *gql.InputObject { return gql.NewInputObject(gql.InputObjectConfig{ @@ -234,6 +322,28 @@ func IntOperatorBlock() *gql.InputObject { }) } +// IntListOperatorBlock filter block for [Int] types. +func IntListOperatorBlock(op *gql.InputObject) *gql.InputObject { + return gql.NewInputObject(gql.InputObjectConfig{ + Name: "IntListOperatorBlock", + Description: "These are the set of filter operators available for use when filtering on [Int] values.", + Fields: gql.InputObjectConfigFieldMap{ + "_any": &gql.InputObjectFieldConfig{ + Description: anyOperatorDescription, + Type: op, + }, + "_all": &gql.InputObjectFieldConfig{ + Description: allOperatorDescription, + Type: op, + }, + "_none": &gql.InputObjectFieldConfig{ + Description: noneOperatorDescription, + Type: op, + }, + }, + }) +} + // NotNullIntOperatorBlock filter block for Int! types. func NotNullIntOperatorBlock() *gql.InputObject { return gql.NewInputObject(gql.InputObjectConfig{ @@ -276,6 +386,28 @@ func NotNullIntOperatorBlock() *gql.InputObject { }) } +// NotNullIntListOperatorBlock filter block for [NotNullInt] types. +func NotNullIntListOperatorBlock(op *gql.InputObject) *gql.InputObject { + return gql.NewInputObject(gql.InputObjectConfig{ + Name: "NotNullIntListOperatorBlock", + Description: "These are the set of filter operators available for use when filtering on [Int!] values.", + Fields: gql.InputObjectConfigFieldMap{ + "_any": &gql.InputObjectFieldConfig{ + Description: anyOperatorDescription, + Type: op, + }, + "_all": &gql.InputObjectFieldConfig{ + Description: allOperatorDescription, + Type: op, + }, + "_none": &gql.InputObjectFieldConfig{ + Description: noneOperatorDescription, + Type: op, + }, + }, + }) +} + // StringOperatorBlock filter block for string types. func StringOperatorBlock() *gql.InputObject { return gql.NewInputObject(gql.InputObjectConfig{ @@ -318,8 +450,30 @@ func StringOperatorBlock() *gql.InputObject { }) } -// NotNullstringOperatorBlock filter block for string! types. -func NotNullstringOperatorBlock() *gql.InputObject { +// StringListOperatorBlock filter block for [String] types. +func StringListOperatorBlock(op *gql.InputObject) *gql.InputObject { + return gql.NewInputObject(gql.InputObjectConfig{ + Name: "StringListOperatorBlock", + Description: "These are the set of filter operators available for use when filtering on [String] values.", + Fields: gql.InputObjectConfigFieldMap{ + "_any": &gql.InputObjectFieldConfig{ + Description: anyOperatorDescription, + Type: op, + }, + "_all": &gql.InputObjectFieldConfig{ + Description: allOperatorDescription, + Type: op, + }, + "_none": &gql.InputObjectFieldConfig{ + Description: noneOperatorDescription, + Type: op, + }, + }, + }) +} + +// NotNullStringOperatorBlock filter block for string! types. +func NotNullStringOperatorBlock() *gql.InputObject { return gql.NewInputObject(gql.InputObjectConfig{ Name: "NotNullStringOperatorBlock", Description: notNullStringOperatorBlockDescription, @@ -360,6 +514,28 @@ func NotNullstringOperatorBlock() *gql.InputObject { }) } +// NotNullStringListOperatorBlock filter block for [String!] types. +func NotNullStringListOperatorBlock(op *gql.InputObject) *gql.InputObject { + return gql.NewInputObject(gql.InputObjectConfig{ + Name: "NotNullStringListOperatorBlock", + Description: "These are the set of filter operators available for use when filtering on [String!] values.", + Fields: gql.InputObjectConfigFieldMap{ + "_any": &gql.InputObjectFieldConfig{ + Description: anyOperatorDescription, + Type: op, + }, + "_all": &gql.InputObjectFieldConfig{ + Description: allOperatorDescription, + Type: op, + }, + "_none": &gql.InputObjectFieldConfig{ + Description: noneOperatorDescription, + Type: op, + }, + }, + }) +} + // JSONOperatorBlock filter block for string types. func JSONOperatorBlock(jsonScalarType *gql.Scalar) *gql.InputObject { return gql.NewInputObject(gql.InputObjectConfig{ @@ -527,8 +703,8 @@ func NotNullBlobOperatorBlock(blobScalarType *gql.Scalar) *gql.InputObject { }) } -// IdOperatorBlock filter block for ID types. -func IdOperatorBlock() *gql.InputObject { +// IDOperatorBlock filter block for ID types. +func IDOperatorBlock() *gql.InputObject { return gql.NewInputObject(gql.InputObjectConfig{ Name: "IDOperatorBlock", Description: idOperatorBlockDescription, diff --git a/internal/request/graphql/schema/types/descriptions.go b/internal/request/graphql/schema/types/descriptions.go index 27cd3a6f74..213266d891 100644 --- a/internal/request/graphql/schema/types/descriptions.go +++ b/internal/request/graphql/schema/types/descriptions.go @@ -222,6 +222,16 @@ The or operator - only one check within this clause must pass in order for this NotOperatorDescription string = ` The negative operator - this check will only pass if all checks within it fail. ` + anyOperatorDescription string = ` +The any operator - only one check within this clause must pass on each item in order for this check to pass. +` + allOperatorDescription string = ` +The all operator - all checks within this clause must pass on each item in order for this check to pass. +` + noneOperatorDescription string = ` +The none operator - only one check within this clause must fail on one item in order for this check to pass. +` + ascOrderDescription string = ` Sort the results in ascending order, e.g. null,1,2,3,a,b,c. ` diff --git a/tests/integration/query/inline_array/with_filter_all_test.go b/tests/integration/query/inline_array/with_filter_all_test.go new file mode 100644 index 0000000000..1661c54731 --- /dev/null +++ b/tests/integration/query/inline_array/with_filter_all_test.go @@ -0,0 +1,305 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package inline_array + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestQueryInlineStringArray_WithAllFilter_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple inline array, filtered all of string array", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "pageHeaders": ["first", "second"] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Fred", + "pageHeaders": [null, "second"] + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {pageHeaders: {_all: {_ne: null}}}) { + name + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "Shahzad", + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQueryInlineNotNullStringArray_WithAllFilter_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple inline array, filtered all of non null string array", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "preferredStrings": ["first", "second"] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Fred", + "preferredStrings": ["", "second"] + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {preferredStrings: {_all: {_ne: ""}}}) { + name + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "Shahzad", + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQueryInlineIntArray_WithAllFilter_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple inline array, filtered all of int array", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "testScores": [50, 80] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Fred", + "testScores": [null, 60] + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {testScores: {_all: {_ne: null}}}) { + name + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "Shahzad", + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQueryInlineNotNullIntArray_WithAllFilter_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple inline array, filtered all of non null int array", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "testScores": [50, 80] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Fred", + "testScores": [0, 60] + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {testScores: {_all: {_lt: 70}}}) { + name + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "Fred", + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQueryInlineFloatArray_WithAllFilter_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple inline array, filtered all of float array", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "pageRatings": [50, 80] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Fred", + "pageRatings": [null, 60] + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {pageRatings: {_all: {_ne: null}}}) { + name + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "Shahzad", + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQueryInlineNotNullFloatArray_WithAllFilter_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple inline array, filtered all of non null float array", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "pageRatings": [50, 80] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Fred", + "pageRatings": [0, 60] + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {pageRatings: {_all: {_lt: 70}}}) { + name + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "Fred", + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQueryInlineBooleanArray_WithAllFilter_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple inline array, filtered all of boolean array", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "indexLikesDislikes": [false, false] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Fred", + "indexLikesDislikes": [null, true] + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {indexLikesDislikes: {_all: {_ne: null}}}) { + name + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "Shahzad", + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQueryInlineNotNullBooleanArray_WithAllFilter_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple inline array, filtered all of non null boolean array", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "likedIndexes": [false, false] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Fred", + "likedIndexes": [true, true] + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {likedIndexes: {_all: {_eq: true}}}) { + name + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "Fred", + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} diff --git a/tests/integration/query/inline_array/with_filter_any_test.go b/tests/integration/query/inline_array/with_filter_any_test.go new file mode 100644 index 0000000000..0dfc815595 --- /dev/null +++ b/tests/integration/query/inline_array/with_filter_any_test.go @@ -0,0 +1,305 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package inline_array + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestQueryInlineStringArray_WithAnyFilter_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple inline array, filtered any of string array", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "pageHeaders": ["first", "second"] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Fred", + "pageHeaders": [null, "second"] + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {pageHeaders: {_any: {_eq: null}}}) { + name + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "Fred", + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQueryInlineNotNullStringArray_WithAnyFilter_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple inline array, filtered any of non null string array", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "preferredStrings": ["first", "second"] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Fred", + "preferredStrings": ["", "second"] + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {preferredStrings: {_any: {_eq: ""}}}) { + name + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "Fred", + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQueryInlineIntArray_WithAnyFilter_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple inline array, filtered any of int array", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "testScores": [50, 80] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Fred", + "testScores": [null, 60] + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {testScores: {_any: {_eq: null}}}) { + name + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "Fred", + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQueryInlineNotNullIntArray_WithAnyFilter_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple inline array, filtered any of non null int array", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "testScores": [50, 80] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Fred", + "testScores": [0, 60] + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {testScores: {_any: {_gt: 70}}}) { + name + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "Shahzad", + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQueryInlineFloatArray_WithAnyFilter_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple inline array, filtered any of float array", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "pageRatings": [50, 80] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Fred", + "pageRatings": [null, 60] + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {pageRatings: {_any: {_eq: null}}}) { + name + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "Fred", + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQueryInlineNotNullFloatArray_WithAnyFilter_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple inline array, filtered any of non null float array", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "pageRatings": [50, 80] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Fred", + "pageRatings": [0, 60] + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {pageRatings: {_any: {_gt: 70}}}) { + name + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "Shahzad", + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQueryInlineBooleanArray_WithAnyFilter_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple inline array, filtered any of boolean array", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "indexLikesDislikes": [false, false] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Fred", + "indexLikesDislikes": [null, true] + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {indexLikesDislikes: {_any: {_eq: null}}}) { + name + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "Fred", + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQueryInlineNotNullBooleanArray_WithAnyFilter_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple inline array, filtered any of non null boolean array", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "likedIndexes": [false, false] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Fred", + "likedIndexes": [true, true] + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {likedIndexes: {_any: {_eq: true}}}) { + name + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "Fred", + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} diff --git a/tests/integration/query/inline_array/with_filter_none_test.go b/tests/integration/query/inline_array/with_filter_none_test.go new file mode 100644 index 0000000000..0dcb45b4f7 --- /dev/null +++ b/tests/integration/query/inline_array/with_filter_none_test.go @@ -0,0 +1,305 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package inline_array + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestQueryInlineStringArrayWithNoneFilter(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple inline array, filtered none of string array", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "pageHeaders": ["first", "second"] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Fred", + "pageHeaders": [null, "second"] + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {pageHeaders: {_none: {_eq: null}}}) { + name + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "Shahzad", + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQueryInlineNonNullStringArrayWithNoneFilter(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple inline array, filtered none of non null string array", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "preferredStrings": ["first", "second"] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Fred", + "preferredStrings": ["", "second"] + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {preferredStrings: {_none: {_eq: ""}}}) { + name + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "Shahzad", + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQueryInlineIntArrayWithNoneFilter(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple inline array, filtered none of int array", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "testScores": [50, 80] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Fred", + "testScores": [null, 60] + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {testScores: {_none: {_eq: null}}}) { + name + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "Shahzad", + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQueryInlineNonNullIntArrayWithNoneFilter(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple inline array, filtered none of non null int array", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "testScores": [50, 80] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Fred", + "testScores": [0, 60] + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {testScores: {_none: {_gt: 70}}}) { + name + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "Fred", + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQueryInlineFloatArrayWithNoneFilter(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple inline array, filtered none of float array", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "pageRatings": [50, 80] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Fred", + "pageRatings": [null, 60] + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {pageRatings: {_none: {_eq: null}}}) { + name + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "Shahzad", + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQueryInlineNonNullFloatArrayWithNoneFilter(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple inline array, filtered none of non null float array", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "pageRatings": [50, 80] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Fred", + "pageRatings": [0, 60] + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {pageRatings: {_none: {_gt: 70}}}) { + name + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "Fred", + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQueryInlineBooleanArrayWithNoneFilter(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple inline array, filtered none of boolean array", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "indexLikesDislikes": [false, false] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Fred", + "indexLikesDislikes": [null, true] + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {indexLikesDislikes: {_none: {_eq: null}}}) { + name + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "Shahzad", + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQueryInlineNonNullBooleanArrayWithNoneFilter(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple inline array, filtered none of non null boolean array", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "likedIndexes": [false, false] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Fred", + "likedIndexes": [true, true] + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {likedIndexes: {_none: {_ne: true}}}) { + name + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "Fred", + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} diff --git a/tests/integration/query/simple/with_filter/with_and_test.go b/tests/integration/query/simple/with_filter/with_and_test.go index 81ccbeb35f..eb566fd751 100644 --- a/tests/integration/query/simple/with_filter/with_and_test.go +++ b/tests/integration/query/simple/with_filter/with_and_test.go @@ -69,3 +69,48 @@ func TestQuerySimpleWithIntGreaterThanAndIntLessThanFilter(t *testing.T) { executeTestCase(t, test) } + +func TestQuerySimple_WithInlineIntArray_GreaterThanAndLessThanFilter_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with logical compound filter (and) on inline int array", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: `type Users { + Name: String + FavoriteNumbers: [Int!] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Bob", + "FavoriteNumbers": [0, 10, 20] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Alice", + "FavoriteNumbers": [30, 40, 50] + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {_and: [ + {FavoriteNumbers: {_all: {_ge: 0}}}, + {FavoriteNumbers: {_all: {_lt: 30}}}, + ]}) { + Name + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Name": "Bob", + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/query/simple/with_filter/with_or_test.go b/tests/integration/query/simple/with_filter/with_or_test.go index fe63d7cc62..e208693049 100644 --- a/tests/integration/query/simple/with_filter/with_or_test.go +++ b/tests/integration/query/simple/with_filter/with_or_test.go @@ -69,3 +69,51 @@ func TestQuerySimpleWithIntEqualToXOrYFilter(t *testing.T) { executeTestCase(t, test) } + +func TestQuerySimple_WithInlineIntArray_EqualToXOrYFilter_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with logical compound filter (or) on inline int array", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: `type Users { + Name: String + FavoriteNumbers: [Int!] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Bob", + "FavoriteNumbers": [10, 20] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Alice", + "FavoriteNumbers": [30, 40] + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {_or: [ + {FavoriteNumbers: {_any: {_le: 100}}}, + {FavoriteNumbers: {_any: {_ge: 0}}}, + ]}) { + Name + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Name": "Alice", + }, + { + "Name": "Bob", + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/schema/aggregates/inline_array_test.go b/tests/integration/schema/aggregates/inline_array_test.go index 1dfaa4a858..a7fa6518fa 100644 --- a/tests/integration/schema/aggregates/inline_array_test.go +++ b/tests/integration/schema/aggregates/inline_array_test.go @@ -388,59 +388,67 @@ func TestSchemaAggregateInlineArrayCreatesUsersAverage(t *testing.T) { } */ -var aggregateGroupArg = map[string]any{ - "name": "_group", - "type": map[string]any{ - "name": "Users__CountSelector", - "inputFields": []any{ - map[string]any{ - "name": "filter", - "type": map[string]any{ - "name": "UsersFilterArg", - "inputFields": []any{ - map[string]any{ - "name": "_and", - "type": map[string]any{ - "name": nil, +func aggregateGroupArg(fieldType string) map[string]any { + return map[string]any{ + "name": "_group", + "type": map[string]any{ + "name": "Users__CountSelector", + "inputFields": []any{ + map[string]any{ + "name": "filter", + "type": map[string]any{ + "name": "UsersFilterArg", + "inputFields": []any{ + map[string]any{ + "name": "Favourites", + "type": map[string]any{ + "name": fieldType + "ListOperatorBlock", + }, }, - }, - map[string]any{ - "name": "_docID", - "type": map[string]any{ - "name": "IDOperatorBlock", + map[string]any{ + "name": "_and", + "type": map[string]any{ + "name": nil, + }, }, - }, - map[string]any{ - "name": "_not", - "type": map[string]any{ - "name": "UsersFilterArg", + map[string]any{ + "name": "_docID", + "type": map[string]any{ + "name": "IDOperatorBlock", + }, }, - }, - map[string]any{ - "name": "_or", - "type": map[string]any{ - "name": nil, + map[string]any{ + "name": "_not", + "type": map[string]any{ + "name": "UsersFilterArg", + }, + }, + map[string]any{ + "name": "_or", + "type": map[string]any{ + "name": nil, + }, }, }, }, }, - }, - map[string]any{ - "name": "limit", - "type": map[string]any{ - "name": "Int", - "inputFields": nil, + map[string]any{ + "name": "limit", + "type": map[string]any{ + "name": "Int", + "inputFields": nil, + }, }, - }, - map[string]any{ - "name": "offset", - "type": map[string]any{ - "name": "Int", - "inputFields": nil, + map[string]any{ + "name": "offset", + "type": map[string]any{ + "name": "Int", + "inputFields": nil, + }, }, }, }, - }, + } } var aggregateVersionArg = map[string]any{ @@ -578,7 +586,7 @@ func TestSchemaAggregateInlineArrayCreatesUsersNillableBooleanCountFilter(t *tes }, }, }, - aggregateGroupArg, + aggregateGroupArg("Boolean"), aggregateVersionArg, }, }, @@ -704,7 +712,7 @@ func TestSchemaAggregateInlineArrayCreatesUsersBooleanCountFilter(t *testing.T) }, }, }, - aggregateGroupArg, + aggregateGroupArg("NotNullBoolean"), aggregateVersionArg, }, }, @@ -854,7 +862,7 @@ func TestSchemaAggregateInlineArrayCreatesUsersNillableIntegerCountFilter(t *tes }, }, }, - aggregateGroupArg, + aggregateGroupArg("Int"), aggregateVersionArg, }, }, @@ -1004,7 +1012,7 @@ func TestSchemaAggregateInlineArrayCreatesUsersIntegerCountFilter(t *testing.T) }, }, }, - aggregateGroupArg, + aggregateGroupArg("NotNullInt"), aggregateVersionArg, }, }, @@ -1154,7 +1162,7 @@ func TestSchemaAggregateInlineArrayCreatesUsersNillableFloatCountFilter(t *testi }, }, }, - aggregateGroupArg, + aggregateGroupArg("Float"), aggregateVersionArg, }, }, @@ -1304,7 +1312,7 @@ func TestSchemaAggregateInlineArrayCreatesUsersFloatCountFilter(t *testing.T) { }, }, }, - aggregateGroupArg, + aggregateGroupArg("NotNullFloat"), aggregateVersionArg, }, }, @@ -1454,7 +1462,7 @@ func TestSchemaAggregateInlineArrayCreatesUsersNillableStringCountFilter(t *test }, }, }, - aggregateGroupArg, + aggregateGroupArg("String"), aggregateVersionArg, }, }, @@ -1604,7 +1612,7 @@ func TestSchemaAggregateInlineArrayCreatesUsersStringCountFilter(t *testing.T) { }, }, }, - aggregateGroupArg, + aggregateGroupArg("NotNullString"), aggregateVersionArg, }, }, From d872a0c26d98163182217bb31f1e690ea5012513 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Fri, 20 Sep 2024 15:19:06 -0700 Subject: [PATCH 32/71] fix: Make GraphQL errors spec compliant (#3040) ## Relevant issue(s) Resolves #3039 Resolves #3041 ## Description This PR fixes an issue with response errors not being spec compliant. It also adds a `GQLError` type to the client package. ## Tasks - [x] I made sure the code is well commented, particularly hard-to-understand areas. - [x] I made sure the repository-held documentation is changed accordingly. - [x] I made sure the pull request title adheres to the conventional commit style (the subset used in the project can be found in [tools/configs/chglog/config.yml](tools/configs/chglog/config.yml)). - [x] I made sure to discuss its limitations such as threats to validity, vulnerability to mistake and misuse, robustness to invalidation of assumptions, resource requirements, ... ## How has this been tested? Added tests here https://github.com/sourcenetwork/defradb-third-party-test Specify the platform(s) on which this was tested: - MacOS --- cli/request.go | 6 +- client/db.go | 44 +++++++++++ client/errors.go | 15 ++++ docs/website/references/http/openapi.json | 48 +++++++++--- http/client.go | 28 +++---- http/errors.go | 8 +- http/handler_store.go | 55 +++---------- http/handler_store_test.go | 95 +++++++++++++++++++++++ http/openapi.go | 1 - http/utils.go | 15 ---- internal/db/request.go | 8 +- internal/db/store.go | 4 +- internal/db/subscriptions.go | 2 +- internal/request/graphql/parser.go | 7 +- tests/clients/cli/wrapper.go | 31 +++----- 15 files changed, 243 insertions(+), 124 deletions(-) create mode 100644 http/handler_store_test.go diff --git a/cli/request.go b/cli/request.go index ae794dc2a1..aa7b1c5e6a 100644 --- a/cli/request.go +++ b/cli/request.go @@ -90,13 +90,9 @@ To learn more about the DefraDB GraphQL Query Language, refer to https://docs.so store := mustGetContextStore(cmd) result := store.ExecRequest(cmd.Context(), request, options...) - var errors []string - for _, err := range result.GQL.Errors { - errors = append(errors, err.Error()) - } if result.Subscription == nil { cmd.Print(REQ_RESULTS_HEADER) - return writeJSON(cmd, map[string]any{"data": result.GQL.Data, "errors": errors}) + return writeJSON(cmd, result.GQL) } cmd.Print(SUB_RESULTS_HEADER) for item := range result.Subscription { diff --git a/client/db.go b/client/db.go index ed8ba05b7c..50ee1f82dc 100644 --- a/client/db.go +++ b/client/db.go @@ -11,7 +11,9 @@ package client import ( + "bytes" "context" + "encoding/json" ds "github.com/ipfs/go-datastore" "github.com/lens-vm/lens/host-go/config/model" @@ -298,6 +300,48 @@ type GQLResult struct { Data any `json:"data"` } +// gqlError represents an error that was encountered during a GQL request. +// +// This is only used for marshalling to keep our responses spec compliant. +type gqlError struct { + // Message contains a description of the error. + Message string `json:"message"` +} + +// gqlResult is used to marshal and unmarshal GQLResults. +// +// The serialized data should always match the graphQL spec. +type gqlResult struct { + // Errors contains the formatted result errors + Errors []gqlError `json:"errors,omitempty"` + // Data contains the result data + Data any `json:"data"` +} + +func (res *GQLResult) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewBuffer(data)) + dec.UseNumber() + var out gqlResult + if err := dec.Decode(&out); err != nil { + return err + } + res.Data = out.Data + res.Errors = make([]error, len(out.Errors)) + for i, e := range out.Errors { + res.Errors[i] = ReviveError(e.Message) + } + return nil +} + +func (res GQLResult) MarshalJSON() ([]byte, error) { + out := gqlResult{Data: res.Data} + out.Errors = make([]gqlError, len(res.Errors)) + for i, e := range res.Errors { + out.Errors[i] = gqlError{Message: e.Error()} + } + return json.Marshal(out) +} + // RequestResult represents the results of a GQL request. type RequestResult struct { // GQL contains the immediate results of the GQL request. diff --git a/client/errors.go b/client/errors.go index 46b598b52c..866ad98ec4 100644 --- a/client/errors.go +++ b/client/errors.go @@ -13,6 +13,7 @@ package client import ( "fmt" + "github.com/sourcenetwork/defradb/datastore" "github.com/sourcenetwork/defradb/errors" ) @@ -174,3 +175,17 @@ func NewErrFailedToParseKind(kind []byte) error { errors.NewKV("Kind", kind), ) } + +// ReviveError attempts to return a client specific error from +// the given message. If no matching error is found the message +// is wrapped in a new anonymous error type. +func ReviveError(message string) error { + switch message { + case ErrDocumentNotFoundOrNotAuthorized.Error(): + return ErrDocumentNotFoundOrNotAuthorized + case datastore.ErrTxnConflict.Error(): + return datastore.ErrTxnConflict + default: + return fmt.Errorf("%s", message) + } +} diff --git a/docs/website/references/http/openapi.json b/docs/website/references/http/openapi.json index 9a7198495a..6b7686c7c1 100644 --- a/docs/website/references/http/openapi.json +++ b/docs/website/references/http/openapi.json @@ -337,16 +337,6 @@ }, "type": "object" }, - "graphql_response": { - "properties": { - "data": {}, - "errors": { - "items": {}, - "type": "array" - } - }, - "type": "object" - }, "index": { "properties": { "Fields": { @@ -1295,7 +1285,24 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/graphql_response" + "properties": { + "data": { + "additionalProperties": true, + "type": "object" + }, + "errors": { + "items": { + "properties": { + "message": { + "type": "string" + } + }, + "type": "object" + }, + "type": "array" + } + }, + "type": "object" } } }, @@ -1329,7 +1336,24 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/graphql_response" + "properties": { + "data": { + "additionalProperties": true, + "type": "object" + }, + "errors": { + "items": { + "properties": { + "message": { + "type": "string" + } + }, + "type": "object" + }, + "type": "array" + } + }, + "type": "object" } } }, diff --git a/http/client.go b/http/client.go index ba272572b8..e98eac7d07 100644 --- a/http/client.go +++ b/http/client.go @@ -383,13 +383,13 @@ func (c *Client) ExecRequest( body, err := json.Marshal(gqlRequest) if err != nil { - result.GQL.Errors = []error{err} + result.GQL.Errors = append(result.GQL.Errors, err) return result } req, err := http.NewRequestWithContext(ctx, http.MethodPost, methodURL.String(), bytes.NewBuffer(body)) if err != nil { - result.GQL.Errors = []error{err} + result.GQL.Errors = append(result.GQL.Errors, err) return result } err = c.http.setDefaultHeaders(req) @@ -397,13 +397,13 @@ func (c *Client) ExecRequest( setDocEncryptionFlagIfNeeded(ctx, req) if err != nil { - result.GQL.Errors = []error{err} + result.GQL.Errors = append(result.GQL.Errors, err) return result } res, err := c.http.client.Do(req) if err != nil { - result.GQL.Errors = []error{err} + result.GQL.Errors = append(result.GQL.Errors, err) return result } if res.Header.Get("Content-Type") == "text/event-stream" { @@ -417,16 +417,13 @@ func (c *Client) ExecRequest( data, err := io.ReadAll(res.Body) if err != nil { - result.GQL.Errors = []error{err} + result.GQL.Errors = append(result.GQL.Errors, err) return result } - var response GraphQLResponse - if err = json.Unmarshal(data, &response); err != nil { - result.GQL.Errors = []error{err} + if err = json.Unmarshal(data, &result.GQL); err != nil { + result.GQL.Errors = append(result.GQL.Errors, err) return result } - result.GQL.Data = response.Data - result.GQL.Errors = response.Errors return result } @@ -447,14 +444,11 @@ func (c *Client) execRequestSubscription(r io.ReadCloser) chan client.GQLResult if err != nil { return } - var response GraphQLResponse - if err := json.Unmarshal(evt.Data, &response); err != nil { - return - } - resCh <- client.GQLResult{ - Errors: response.Errors, - Data: response.Data, + var res client.GQLResult + if err := json.Unmarshal(evt.Data, &res); err != nil { + res.Errors = append(res.Errors, err) } + resCh <- res } }() diff --git a/http/errors.go b/http/errors.go index d4a72df516..aa6d6537ac 100644 --- a/http/errors.go +++ b/http/errors.go @@ -12,7 +12,9 @@ package http import ( "encoding/json" + "fmt" + "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/errors" ) @@ -53,7 +55,11 @@ func (e *errorResponse) UnmarshalJSON(data []byte) error { if err := json.Unmarshal(data, &out); err != nil { return err } - e.Error = parseError(out["error"]) + if msg, ok := out["error"].(string); ok { + e.Error = client.ReviveError(msg) + } else { + e.Error = fmt.Errorf("%s", out) + } return nil } diff --git a/http/handler_store.go b/http/handler_store.go index 3f4c52c800..2f98cda0ff 100644 --- a/http/handler_store.go +++ b/http/handler_store.go @@ -11,7 +11,6 @@ package http import ( - "bytes" "encoding/json" "fmt" "io" @@ -279,43 +278,6 @@ type GraphQLRequest struct { Variables map[string]any `json:"variables"` } -type GraphQLResponse struct { - Data any `json:"data"` - Errors []error `json:"errors,omitempty"` -} - -func (res GraphQLResponse) MarshalJSON() ([]byte, error) { - var errors []string - for _, err := range res.Errors { - errors = append(errors, err.Error()) - } - return json.Marshal(map[string]any{"data": res.Data, "errors": errors}) -} - -func (res *GraphQLResponse) UnmarshalJSON(data []byte) error { - // decode numbers to json.Number - dec := json.NewDecoder(bytes.NewBuffer(data)) - dec.UseNumber() - - var out map[string]any - if err := dec.Decode(&out); err != nil { - return err - } - res.Data = out["data"] - - // fix errors type to match tests - switch t := out["errors"].(type) { - case []any: - for _, v := range t { - res.Errors = append(res.Errors, parseError(v)) - } - default: - res.Errors = nil - } - - return nil -} - func (s *storeHandler) ExecRequest(rw http.ResponseWriter, req *http.Request) { store := req.Context().Value(dbContextKey).(client.Store) @@ -343,7 +305,7 @@ func (s *storeHandler) ExecRequest(rw http.ResponseWriter, req *http.Request) { result := store.ExecRequest(req.Context(), request.Query, options...) if result.Subscription == nil { - responseJSON(rw, http.StatusOK, GraphQLResponse{result.GQL.Data, result.GQL.Errors}) + responseJSON(rw, http.StatusOK, result.GQL) return } flusher, ok := rw.(http.Flusher) @@ -396,9 +358,6 @@ func (h *storeHandler) bindRoutes(router *Router) { graphQLRequestSchema := &openapi3.SchemaRef{ Ref: "#/components/schemas/graphql_request", } - graphQLResponseSchema := &openapi3.SchemaRef{ - Ref: "#/components/schemas/graphql_response", - } backupConfigSchema := &openapi3.SchemaRef{ Ref: "#/components/schemas/backup_config", } @@ -412,6 +371,16 @@ func (h *storeHandler) bindRoutes(router *Router) { Ref: "#/components/schemas/patch_schema_request", } + graphQLResponseSchema := openapi3.NewObjectSchema(). + WithProperties(map[string]*openapi3.Schema{ + "errors": openapi3.NewArraySchema().WithItems( + openapi3.NewObjectSchema().WithProperties(map[string]*openapi3.Schema{ + "message": openapi3.NewStringSchema(), + }), + ), + "data": openapi3.NewObjectSchema().WithAnyAdditionalProperties(), + }) + collectionArraySchema := openapi3.NewArraySchema() collectionArraySchema.Items = collectionSchema @@ -626,7 +595,7 @@ func (h *storeHandler) bindRoutes(router *Router) { graphQLResponse := openapi3.NewResponse(). WithDescription("GraphQL response"). - WithContent(openapi3.NewContentWithJSONSchemaRef(graphQLResponseSchema)) + WithContent(openapi3.NewContentWithJSONSchema(graphQLResponseSchema)) graphQLPost := openapi3.NewOperation() graphQLPost.Description = "GraphQL POST endpoint" diff --git a/http/handler_store_test.go b/http/handler_store_test.go new file mode 100644 index 0000000000..dabf9648bd --- /dev/null +++ b/http/handler_store_test.go @@ -0,0 +1,95 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package http + +import ( + "bytes" + "encoding/json" + "io" + "net/http" + "net/http/httptest" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestExecRequest_WithValidQuery_OmitsErrors(t *testing.T) { + cdb := setupDatabase(t) + + body, err := json.Marshal(&GraphQLRequest{ + Query: `query { + User { + name + } + }`, + }) + require.NoError(t, err) + + req := httptest.NewRequest(http.MethodPost, "http://localhost:9181/api/v0/graphql", bytes.NewBuffer(body)) + rec := httptest.NewRecorder() + + handler, err := NewHandler(cdb) + require.NoError(t, err) + handler.ServeHTTP(rec, req) + + res := rec.Result() + require.NotNil(t, res.Body) + + resData, err := io.ReadAll(res.Body) + require.NoError(t, err) + + var gqlResponse map[string]any + err = json.Unmarshal(resData, &gqlResponse) + require.NoError(t, err) + + // errors should be omitted + _, ok := gqlResponse["errors"] + assert.False(t, ok) +} + +func TestExecRequest_WithInvalidQuery_HasSpecCompliantErrors(t *testing.T) { + cdb := setupDatabase(t) + + body, err := json.Marshal(&GraphQLRequest{ + Query: `query { + User { + invalid + } + }`, + }) + require.NoError(t, err) + + req := httptest.NewRequest(http.MethodPost, "http://localhost:9181/api/v0/graphql", bytes.NewBuffer(body)) + rec := httptest.NewRecorder() + + handler, err := NewHandler(cdb) + require.NoError(t, err) + handler.ServeHTTP(rec, req) + + res := rec.Result() + require.NotNil(t, res.Body) + + resData, err := io.ReadAll(res.Body) + require.NoError(t, err) + + var gqlResponse map[string]any + err = json.Unmarshal(resData, &gqlResponse) + require.NoError(t, err) + + errList, ok := gqlResponse["errors"] + require.True(t, ok) + + // errors should contain spec compliant error objects + assert.ElementsMatch(t, errList, []any{map[string]any{ + "message": "Cannot query field \"invalid\" on type \"User\".", + }}) +} diff --git a/http/openapi.go b/http/openapi.go index f6816376ae..0bb5f71743 100644 --- a/http/openapi.go +++ b/http/openapi.go @@ -26,7 +26,6 @@ var openApiSchemas = map[string]any{ "collection_delete": &CollectionDeleteRequest{}, "peer_info": &peer.AddrInfo{}, "graphql_request": &GraphQLRequest{}, - "graphql_response": &GraphQLResponse{}, "backup_config": &client.BackupConfig{}, "collection": &client.CollectionDescription{}, "schema": &client.SchemaDescription{}, diff --git a/http/utils.go b/http/utils.go index 835382987b..176fe3d035 100644 --- a/http/utils.go +++ b/http/utils.go @@ -12,12 +12,8 @@ package http import ( "encoding/json" - "fmt" "io" "net/http" - - "github.com/sourcenetwork/defradb/client" - "github.com/sourcenetwork/defradb/datastore" ) func requestJSON(req *http.Request, out any) error { @@ -39,14 +35,3 @@ func responseJSON(rw http.ResponseWriter, status int, data any) { log.ErrorE("failed to write response", err) } } - -func parseError(msg any) error { - switch msg { - case client.ErrDocumentNotFoundOrNotAuthorized.Error(): - return client.ErrDocumentNotFoundOrNotAuthorized - case datastore.ErrTxnConflict.Error(): - return datastore.ErrTxnConflict - default: - return fmt.Errorf("%s", msg) - } -} diff --git a/internal/db/request.go b/internal/db/request.go index 8063ca3f51..560e270d0b 100644 --- a/internal/db/request.go +++ b/internal/db/request.go @@ -22,7 +22,7 @@ func (db *db) execRequest(ctx context.Context, request string, options *client.G res := &client.RequestResult{} ast, err := db.parser.BuildRequestAST(request) if err != nil { - res.GQL.Errors = []error{err} + res.GQL.Errors = append(res.GQL.Errors, err) return res } if db.parser.IsIntrospection(ast) { @@ -31,13 +31,13 @@ func (db *db) execRequest(ctx context.Context, request string, options *client.G parsedRequest, errors := db.parser.Parse(ast, options) if len(errors) > 0 { - res.GQL.Errors = errors + res.GQL.Errors = append(res.GQL.Errors, errors...) return res } pub, err := db.handleSubscription(ctx, parsedRequest) if err != nil { - res.GQL.Errors = []error{err} + res.GQL.Errors = append(res.GQL.Errors, err) return res } @@ -52,7 +52,7 @@ func (db *db) execRequest(ctx context.Context, request string, options *client.G results, err := planner.RunRequest(ctx, parsedRequest) if err != nil { - res.GQL.Errors = []error{err} + res.GQL.Errors = append(res.GQL.Errors, err) } res.GQL.Data = results return res diff --git a/internal/db/store.go b/internal/db/store.go index 89471b6ac6..bd23e69d28 100644 --- a/internal/db/store.go +++ b/internal/db/store.go @@ -25,7 +25,7 @@ func (db *db) ExecRequest(ctx context.Context, request string, opts ...client.Re ctx, txn, err := ensureContextTxn(ctx, db, false) if err != nil { res := &client.RequestResult{} - res.GQL.Errors = []error{err} + res.GQL.Errors = append(res.GQL.Errors, err) return res } defer txn.Discard(ctx) @@ -41,7 +41,7 @@ func (db *db) ExecRequest(ctx context.Context, request string, opts ...client.Re } if err := txn.Commit(ctx); err != nil { - res.GQL.Errors = []error{err} + res.GQL.Errors = append(res.GQL.Errors, err) return res } diff --git a/internal/db/subscriptions.go b/internal/db/subscriptions.go index 03fe41cc25..b876d6c90c 100644 --- a/internal/db/subscriptions.go +++ b/internal/db/subscriptions.go @@ -77,7 +77,7 @@ func (db *db) handleSubscription(ctx context.Context, r *request.Request) (<-cha } res := client.GQLResult{} if err != nil { - res.Errors = []error{err} + res.Errors = append(res.Errors, err) } res.Data = result diff --git a/internal/request/graphql/parser.go b/internal/request/graphql/parser.go index f2a86430e0..26e09230ad 100644 --- a/internal/request/graphql/parser.go +++ b/internal/request/graphql/parser.go @@ -72,13 +72,12 @@ func (p *parser) ExecuteIntrospection(request string) *client.RequestResult { res := &client.RequestResult{ GQL: client.GQLResult{ - Data: r.Data, - Errors: make([]error, len(r.Errors)), + Data: r.Data, }, } - for i, err := range r.Errors { - res.GQL.Errors[i] = err + for _, err := range r.Errors { + res.GQL.Errors = append(res.GQL.Errors, err) } return res diff --git a/tests/clients/cli/wrapper.go b/tests/clients/cli/wrapper.go index 143a1e3534..fbfc0e5e6a 100644 --- a/tests/clients/cli/wrapper.go +++ b/tests/clients/cli/wrapper.go @@ -432,7 +432,7 @@ func (w *Wrapper) ExecRequest( if len(options.Variables) > 0 { enc, err := json.Marshal(options.Variables) if err != nil { - result.GQL.Errors = []error{err} + result.GQL.Errors = append(result.GQL.Errors, err) return result } args = append(args, "--variables", string(enc)) @@ -440,13 +440,13 @@ func (w *Wrapper) ExecRequest( stdOut, stdErr, err := w.cmd.executeStream(ctx, args) if err != nil { - result.GQL.Errors = []error{err} + result.GQL.Errors = append(result.GQL.Errors, err) return result } buffer := bufio.NewReader(stdOut) header, err := buffer.ReadString('\n') if err != nil { - result.GQL.Errors = []error{err} + result.GQL.Errors = append(result.GQL.Errors, err) return result } if header == cli.SUB_RESULTS_HEADER { @@ -455,26 +455,22 @@ func (w *Wrapper) ExecRequest( } data, err := io.ReadAll(buffer) if err != nil { - result.GQL.Errors = []error{err} + result.GQL.Errors = append(result.GQL.Errors, err) return result } errData, err := io.ReadAll(stdErr) if err != nil { - result.GQL.Errors = []error{err} + result.GQL.Errors = append(result.GQL.Errors, err) return result } if len(errData) > 0 { - result.GQL.Errors = []error{fmt.Errorf("%s", errData)} + result.GQL.Errors = append(result.GQL.Errors, fmt.Errorf("%s", errData)) return result } - var response http.GraphQLResponse - if err = json.Unmarshal(data, &response); err != nil { - result.GQL.Errors = []error{err} - return result + if err = json.Unmarshal(data, &result.GQL); err != nil { + result.GQL.Errors = append(result.GQL.Errors, err) } - result.GQL.Data = response.Data - result.GQL.Errors = response.Errors return result } @@ -485,14 +481,11 @@ func (w *Wrapper) execRequestSubscription(r io.Reader) chan client.GQLResult { defer close(resCh) for { - var response http.GraphQLResponse - if err := dec.Decode(&response); err != nil { - return - } - resCh <- client.GQLResult{ - Errors: response.Errors, - Data: response.Data, + var res client.GQLResult + if err := dec.Decode(&res); err != nil { + res.Errors = append(res.Errors, err) } + resCh <- res } }() return resCh From 701a7a5b9e3fdac9d5bd41066b021653fcf0c2dc Mon Sep 17 00:00:00 2001 From: Islam Aliev Date: Mon, 23 Sep 2024 10:29:42 +0200 Subject: [PATCH 33/71] feat: Secure document encryption key exchange (#2891) ## Relevant issue(s) Resolves #2856 #2909 ## Description This PR introduces a secure mechanism for exchanging document encryption keys in our decentralized network. It supports both whole-document and field-level encryption, enhancing data privacy without compromising the system's distributed nature. ### Data Flow and Key Exchange Process 1. Document Creation: * When a user creates a document with encryption enabled (whole document or specific fields), the system generates a new DAG block containing the encrypted delta. * A new `Encryption` IPLD block is created, containing the docID, optional fieldName (for field-level encryption), and the encryption key itself. * The DAG block references the `Encryption` block by storing its CID. 2. Encryption Detection: * Upon receiving an update event for a document, a node checks if the block is encrypted by examining the reference to the `Encryption` block. * If encrypted and the node lacks the corresponding key, it initiates a key fetch request. 3. Key Request and Retrieval: * The node sends an "enc-keys-request" event on the event bus, which is picked up by the Key Management Service (KMS). * The KMS handles the key exchange process over the "encryption" pubsub channel. 4. Key Reception and Storage: * Upon receiving the encryption keys, the KMS stores them in a dedicated IPLD storage. * It then notifies the merging process that is waiting for the results. 5. Decryption and Merging: * During the `executeMerge` process, the node collects CIDs of all `Encryption` blocks that are not available locally. * After the initial merge attempt, if any encryption keys are missing, the node requests these keys via the KMS. * Once the keys are received, the merge process is restarted to decrypt and apply the previously encrypted deltas. ### Technical Details * Encryption: ECIES with X25519, HKDF (SHA-256), and AES-256-GCM * Key Storage: Encryption keys are stored in separate IPLD blocks, referenced by the main DAG blocks * Key Management: Introduced a new KMS abstraction for handling key exchange and storage * Pubsub: All peers listen on the "encryption" topic for key exchange messages * Event Bus: New "enc-keys-request" event for initiating key requests ### Current Limitations and Future Work * This version assumes all documents are public. Future iterations will integrate with access control mechanisms. * The system currently trusts the first received response. Future enhancements will implement more robust validation. * Further work is needed to optimize the key exchange process for large-scale deployments and to handle network partitions or failed exchanges gracefully. * The current KMS implementation can be replaced in the future, e.g., with an Orbis KMS, for more advanced key management features. ### Testing Improvements Introduced an "assert stack" for integration tests, providing detailed failure contexts (e.g., 'path: commits[2].links[1].cid' instead of 'doc: 1'). These changes significantly enhance our system's security, enabling confidential data storage and transmission in a decentralized environment, while laying the groundwork for more advanced encryption and access control features. The introduction of the KMS abstraction provides flexibility for future improvements in key management. --- cli/collection_create.go | 10 +- client/db.go | 5 + client/mocks/db.go | 47 + crypto/aes.go | 94 ++ crypto/aes_test.go | 175 ++++ crypto/ecies.go | 274 ++++++ crypto/ecies_test.go | 201 +++++ crypto/errors.go | 62 ++ {internal/encryption => crypto}/nonce.go | 11 +- datastore/mocks/blockstore.go | 493 +++++++++++ datastore/mocks/txn.go | 12 +- datastore/mocks/utils.go | 11 + datastore/multi.go | 9 +- datastore/store.go | 2 +- .../i2891-no-change-tests-updated.md | 3 + go.mod | 5 +- go.sum | 6 +- http/client.go | 4 + http/client_tx.go | 2 +- internal/core/block/block.go | 140 ++- internal/core/block/block_test.go | 89 +- internal/core/block/errors.go | 20 +- internal/core/crdt/composite.go | 6 +- internal/core/crdt/lwwreg_test.go | 22 +- internal/core/key.go | 49 +- internal/db/base/collection_keys.go | 4 +- internal/db/collection.go | 2 +- internal/db/collection_delete.go | 2 +- internal/db/config.go | 10 +- internal/db/config_test.go | 4 +- internal/db/context.go | 3 - internal/db/db.go | 12 +- internal/db/errors.go | 1 + internal/db/fetcher/dag.go | 2 +- internal/db/fetcher/versioned.go | 3 +- internal/db/indexed_docs_test.go | 4 +- internal/db/merge.go | 281 ++++-- internal/db/p2p_replicator.go | 2 +- internal/encryption/aes.go | 79 -- internal/encryption/context.go | 30 +- internal/encryption/encryptor.go | 198 ++--- internal/encryption/encryptor_test.go | 267 +++--- internal/encryption/errors.go | 6 +- internal/encryption/event.go | 75 ++ internal/kms/enc_store.go | 66 ++ internal/kms/errors.go | 27 + internal/kms/pubsub.go | 339 +++++++ internal/kms/service.go | 40 + internal/lens/fetcher.go | 4 +- internal/merkle/clock/clock.go | 133 ++- internal/merkle/clock/clock_test.go | 5 +- internal/merkle/clock/errors.go | 2 + internal/merkle/clock/heads_test.go | 2 +- internal/merkle/crdt/composite.go | 3 +- internal/merkle/crdt/counter.go | 2 +- internal/merkle/crdt/lwwreg.go | 2 +- internal/merkle/crdt/merklecrdt.go | 5 +- internal/merkle/crdt/merklecrdt_test.go | 2 +- internal/planner/commit.go | 12 +- net/client.go | 2 +- net/dialer_test.go | 6 + net/errors.go | 11 +- net/pb/Makefile | 17 +- net/pb/net.pb.go | 410 +++++---- net/pb/net.proto | 37 +- net/pb/net_grpc.pb.go | 27 +- net/pb/net_vtproto.pb.go | 835 ++++++++++-------- net/peer.go | 22 +- net/peer_test.go | 13 +- net/server.go | 40 +- net/server_test.go | 4 +- node/node.go | 33 +- tests/bench/query/planner/utils.go | 2 +- tests/clients/cli/wrapper.go | 4 + tests/clients/cli/wrapper_tx.go | 2 +- tests/clients/http/wrapper.go | 4 + tests/clients/http/wrapper_tx.go | 2 +- tests/integration/acp.go | 14 + tests/integration/assert_stack.go | 57 ++ tests/integration/db.go | 21 +- tests/integration/encryption/commit_test.go | 10 +- .../encryption/peer_sec_index_test.go | 161 ++++ .../integration/encryption/peer_share_test.go | 530 +++++++++++ tests/integration/encryption/peer_test.go | 98 +- tests/integration/encryption/utils.go | 4 +- tests/integration/events.go | 4 + tests/integration/state.go | 4 + tests/integration/test_case.go | 12 + tests/integration/utils.go | 101 ++- tools/configs/mockery.yaml | 1 + 90 files changed, 4601 insertions(+), 1268 deletions(-) create mode 100644 crypto/aes.go create mode 100644 crypto/aes_test.go create mode 100644 crypto/ecies.go create mode 100644 crypto/ecies_test.go create mode 100644 crypto/errors.go rename {internal/encryption => crypto}/nonce.go (86%) create mode 100644 datastore/mocks/blockstore.go create mode 100644 docs/data_format_changes/i2891-no-change-tests-updated.md delete mode 100644 internal/encryption/aes.go create mode 100644 internal/encryption/event.go create mode 100644 internal/kms/enc_store.go create mode 100644 internal/kms/errors.go create mode 100644 internal/kms/pubsub.go create mode 100644 internal/kms/service.go create mode 100644 tests/integration/assert_stack.go create mode 100644 tests/integration/encryption/peer_sec_index_test.go create mode 100644 tests/integration/encryption/peer_share_test.go diff --git a/cli/collection_create.go b/cli/collection_create.go index eecdfef2d8..002847d6ec 100644 --- a/cli/collection_create.go +++ b/cli/collection_create.go @@ -17,8 +17,6 @@ import ( "github.com/spf13/cobra" "github.com/sourcenetwork/defradb/client" - "github.com/sourcenetwork/defradb/datastore" - "github.com/sourcenetwork/defradb/internal/db" "github.com/sourcenetwork/defradb/internal/encryption" ) @@ -89,8 +87,7 @@ Example: create from stdin: return cmd.Usage() } - txn, _ := db.TryGetContextTxn(cmd.Context()) - setContextDocEncryption(cmd, shouldEncryptDoc, encryptedFields, txn) + setContextDocEncryption(cmd, shouldEncryptDoc, encryptedFields) if client.IsJSONArray(docData) { docs, err := client.NewDocsFromJSON(docData, col.Definition()) @@ -116,14 +113,11 @@ Example: create from stdin: } // setContextDocEncryption sets doc encryption for the current command context. -func setContextDocEncryption(cmd *cobra.Command, shouldEncryptDoc bool, encryptFields []string, txn datastore.Txn) { +func setContextDocEncryption(cmd *cobra.Command, shouldEncryptDoc bool, encryptFields []string) { if !shouldEncryptDoc && len(encryptFields) == 0 { return } ctx := cmd.Context() - if txn != nil { - ctx = encryption.ContextWithStore(ctx, txn) - } ctx = encryption.SetContextConfigFromParams(ctx, shouldEncryptDoc, encryptFields) cmd.SetContext(ctx) } diff --git a/client/db.go b/client/db.go index 50ee1f82dc..b8f5e91e35 100644 --- a/client/db.go +++ b/client/db.go @@ -52,6 +52,11 @@ type DB interface { // It sits within the rootstore returned by [Root]. Blockstore() datastore.Blockstore + // Encstore returns the store, that contains all known encryption keys for documents and their fields. + // + // It sits within the rootstore returned by [Root]. + Encstore() datastore.Blockstore + // Peerstore returns the peerstore where known host information is stored. // // It sits within the rootstore returned by [Root]. diff --git a/client/mocks/db.go b/client/mocks/db.go index b14aec5d05..8923e63d78 100644 --- a/client/mocks/db.go +++ b/client/mocks/db.go @@ -479,6 +479,53 @@ func (_c *DB_DeleteReplicator_Call) RunAndReturn(run func(context.Context, clien return _c } +// Encstore provides a mock function with given fields: +func (_m *DB) Encstore() datastore.Blockstore { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for Encstore") + } + + var r0 datastore.Blockstore + if rf, ok := ret.Get(0).(func() datastore.Blockstore); ok { + r0 = rf() + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(datastore.Blockstore) + } + } + + return r0 +} + +// DB_Encstore_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Encstore' +type DB_Encstore_Call struct { + *mock.Call +} + +// Encstore is a helper method to define mock.On call +func (_e *DB_Expecter) Encstore() *DB_Encstore_Call { + return &DB_Encstore_Call{Call: _e.mock.On("Encstore")} +} + +func (_c *DB_Encstore_Call) Run(run func()) *DB_Encstore_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *DB_Encstore_Call) Return(_a0 datastore.Blockstore) *DB_Encstore_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *DB_Encstore_Call) RunAndReturn(run func() datastore.Blockstore) *DB_Encstore_Call { + _c.Call.Return(run) + return _c +} + // Events provides a mock function with given fields: func (_m *DB) Events() *event.Bus { ret := _m.Called() diff --git a/crypto/aes.go b/crypto/aes.go new file mode 100644 index 0000000000..9fa2bd8deb --- /dev/null +++ b/crypto/aes.go @@ -0,0 +1,94 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package crypto + +import ( + "crypto/aes" + "crypto/cipher" +) + +// EncryptAES encrypts data using AES-GCM with a provided key and additional data. +// It generates a nonce internally and optionally prepends it to the cipherText. +// +// Parameters: +// - plainText: The data to be encrypted +// - key: The AES encryption key +// - additionalData: Additional authenticated data (AAD) to be used in the encryption +// - prependNonce: If true, the nonce is prepended to the returned cipherText +// +// Returns: +// - cipherText: The encrypted data, with the nonce prepended if prependNonce is true +// - nonce: The generated nonce +// - error: Any error encountered during the encryption process +func EncryptAES(plainText, key, additionalData []byte, prependNonce bool) ([]byte, []byte, error) { + block, err := aes.NewCipher(key) + if err != nil { + return nil, nil, err + } + + nonce, err := generateNonceFunc() + if err != nil { + return nil, nil, err + } + + aesGCM, err := cipher.NewGCM(block) + if err != nil { + return nil, nil, err + } + + var cipherText []byte + if prependNonce { + cipherText = aesGCM.Seal(nonce, nonce, plainText, additionalData) + } else { + cipherText = aesGCM.Seal(nil, nonce, plainText, additionalData) + } + + return cipherText, nonce, nil +} + +// DecryptAES decrypts AES-GCM encrypted data with a provided key and additional data. +// If no separate nonce is provided, it assumes the nonce is prepended to the cipherText. +// +// Parameters: +// - nonce: The nonce used for decryption. If empty, it's assumed to be prepended to cipherText +// - cipherText: The data to be decrypted +// - key: The AES decryption key +// - additionalData: Additional authenticated data (AAD) used during encryption +// +// Returns: +// - plainText: The decrypted data +// - error: Any error encountered during the decryption process, including authentication failures +func DecryptAES(nonce, cipherText, key, additionalData []byte) ([]byte, error) { + if len(nonce) == 0 { + if len(cipherText) < AESNonceSize { + return nil, ErrCipherTextTooShort + } + nonce = cipherText[:AESNonceSize] + cipherText = cipherText[AESNonceSize:] + } + + block, err := aes.NewCipher(key) + if err != nil { + return nil, err + } + + aesGCM, err := cipher.NewGCM(block) + if err != nil { + return nil, err + } + + plainText, err := aesGCM.Open(nil, nonce, cipherText, additionalData) + if err != nil { + return nil, err + } + + return plainText, nil +} diff --git a/crypto/aes_test.go b/crypto/aes_test.go new file mode 100644 index 0000000000..7218ca24b2 --- /dev/null +++ b/crypto/aes_test.go @@ -0,0 +1,175 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package crypto + +import ( + "bytes" + "crypto/rand" + "testing" + + "github.com/stretchr/testify/require" +) + +func TestEncryptAES(t *testing.T) { + validKey := make([]byte, 32) // AES-256 + _, err := rand.Read(validKey) + require.NoError(t, err) + validPlaintext := []byte("Hello, World!") + validAAD := []byte("Additional Authenticated Data") + + tests := []struct { + name string + plainText []byte + key []byte + additionalData []byte + prependNonce bool + expectError bool + errorContains string + }{ + { + name: "Valid encryption with prepended nonce", + plainText: validPlaintext, + key: validKey, + additionalData: validAAD, + prependNonce: true, + expectError: false, + }, + { + name: "Valid encryption without prepended nonce", + plainText: validPlaintext, + key: validKey, + additionalData: validAAD, + prependNonce: false, + expectError: false, + }, + { + name: "Invalid key size", + plainText: validPlaintext, + key: make([]byte, 31), // Invalid key size + additionalData: validAAD, + prependNonce: true, + expectError: true, + errorContains: "invalid key size", + }, + { + name: "Nil plaintext", + plainText: nil, + key: validKey, + additionalData: validAAD, + prependNonce: true, + expectError: false, // AES-GCM can encrypt nil/empty plaintext + }, + { + name: "Nil additional data", + plainText: validPlaintext, + key: validKey, + additionalData: nil, + prependNonce: true, + expectError: false, // Nil AAD is valid + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + cipherText, nonce, err := EncryptAES(tt.plainText, tt.key, tt.additionalData, tt.prependNonce) + + if tt.expectError { + require.Error(t, err) + require.Contains(t, err.Error(), tt.errorContains) + } else { + require.NoError(t, err) + if tt.prependNonce { + require.Greater(t, len(cipherText), len(nonce), "Ciphertext length not greater than nonce length") + } else { + require.Equal(t, AESNonceSize, len(nonce), "Nonce length != AESNonceSize") + } + } + }) + } +} + +func TestDecryptAES(t *testing.T) { + validKey := make([]byte, 32) // AES-256 + _, err := rand.Read(validKey) + require.NoError(t, err) + validPlaintext := []byte("Hello, World!") + validAAD := []byte("Additional Authenticated Data") + validCiphertext, validNonce, _ := EncryptAES(validPlaintext, validKey, validAAD, true) + + tests := []struct { + name string + nonce []byte + cipherText []byte + key []byte + additionalData []byte + expectError bool + errorContains string + }{ + { + name: "Valid decryption", + nonce: nil, // Should be extracted from cipherText + cipherText: validCiphertext, + key: validKey, + additionalData: validAAD, + expectError: false, + }, + { + name: "Invalid key size", + nonce: validNonce, + cipherText: validCiphertext[AESNonceSize:], + key: make([]byte, 31), // Invalid key size + additionalData: validAAD, + expectError: true, + errorContains: "invalid key size", + }, + { + name: "Ciphertext too short", + nonce: nil, + cipherText: make([]byte, AESNonceSize-1), // Too short to contain nonce + key: validKey, + additionalData: validAAD, + expectError: true, + errorContains: errCipherTextTooShort, + }, + { + name: "Invalid additional data", + nonce: validNonce, + cipherText: validCiphertext[AESNonceSize:], + key: validKey, + additionalData: []byte("Wrong AAD"), + expectError: true, + errorContains: "message authentication failed", + }, + { + name: "Tampered ciphertext", + nonce: validNonce, + cipherText: append([]byte{0}, validCiphertext[AESNonceSize+1:]...), + key: validKey, + additionalData: validAAD, + expectError: true, + errorContains: "message authentication failed", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + plainText, err := DecryptAES(tt.nonce, tt.cipherText, tt.key, tt.additionalData) + + if tt.expectError { + require.Error(t, err) + require.Contains(t, err.Error(), tt.errorContains) + } else { + require.NoError(t, err) + require.True(t, bytes.Equal(plainText, validPlaintext), "Decrypted plaintext does not match original") + } + }) + } +} diff --git a/crypto/ecies.go b/crypto/ecies.go new file mode 100644 index 0000000000..f025e87823 --- /dev/null +++ b/crypto/ecies.go @@ -0,0 +1,274 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package crypto + +import ( + "crypto/ecdh" + "crypto/hmac" + "crypto/rand" + "crypto/sha256" + + "golang.org/x/crypto/hkdf" +) + +const X25519PublicKeySize = 32 +const HMACSize = 32 +const AESKeySize = 32 + +const minCipherTextSize = 16 + +// GenerateX25519 generates a new X25519 private key. +func GenerateX25519() (*ecdh.PrivateKey, error) { + return ecdh.X25519().GenerateKey(rand.Reader) +} + +// X25519PublicKeyFromBytes creates a new X25519 public key from the given bytes. +func X25519PublicKeyFromBytes(publicKeyBytes []byte) (*ecdh.PublicKey, error) { + return ecdh.X25519().NewPublicKey(publicKeyBytes) +} + +type ECIESOption func(*eciesOptions) + +type eciesOptions struct { + associatedData []byte + privateKey *ecdh.PrivateKey + publicKeyBytes []byte + noPubKeyPrepended bool +} + +// WithAAD sets the associated data to use for authentication. +func WithAAD(aad []byte) ECIESOption { + return func(o *eciesOptions) { + o.associatedData = aad + } +} + +// WithPrivKey sets the private key to use for encryption. +// +// If not set, a new ephemeral key will be generated. +// This option has no effect on decryption. +func WithPrivKey(privKey *ecdh.PrivateKey) ECIESOption { + return func(o *eciesOptions) { + o.privateKey = privKey + } +} + +// WithPubKeyBytes sets the public key bytes to use for decryption. +// +// If not set, the cipherText is assumed to have the public key X25519 prepended. +// This option has no effect on encryption. +func WithPubKeyBytes(pubKeyBytes []byte) ECIESOption { + return func(o *eciesOptions) { + o.publicKeyBytes = pubKeyBytes + } +} + +// WithPubKeyPrepended sets whether the public key should is prepended to the cipherText. +// +// Upon encryption, if set to true (default value), the public key is prepended to the cipherText. +// Otherwise it's not and in this case a private key should be provided with the WithPrivKey option. +// +// Upon decryption, if set to true (default value), the public key is expected to be prepended to the cipherText. +// Otherwise it's not and in this case the public key bytes should be provided with the WithPubKeyBytes option. +func WithPubKeyPrepended(prepended bool) ECIESOption { + return func(o *eciesOptions) { + o.noPubKeyPrepended = !prepended + } +} + +// EncryptECIES encrypts plaintext using a custom Elliptic Curve Integrated Encryption Scheme (ECIES) +// with X25519 for key agreement, HKDF for key derivation, AES for encryption, and HMAC for authentication. +// +// The function: +// - Uses or generates an ephemeral X25519 key pair +// - Performs ECDH with the provided public key +// - Derives encryption and HMAC keys using HKDF +// - Encrypts the plaintext using a custom AES encryption function +// - Computes an HMAC over the ciphertext +// +// The default output format is: [ephemeral public key | encrypted data (including nonce) | HMAC] +// This can be modified using options. +// +// Parameters: +// - plainText: The message to encrypt +// - publicKey: The recipient's X25519 public key +// - opts: Optional ECIESOption functions to customize the encryption process +// +// Available options: +// - WithAAD(aad []byte): Sets the associated data for additional authentication +// - WithPrivKey(privKey *ecdh.PrivateKey): Uses the provided private key instead of generating a new one +// - WithPubKeyPrepended(prepended bool): Controls whether the public key is prepended to the ciphertext +// +// Returns: +// - Byte slice containing the encrypted message and necessary metadata for decryption +// - Error if any step of the encryption process fails +// +// Example usage: +// +// cipherText, err := EncryptECIES(plainText, recipientPublicKey, +// WithAAD(additionalData), +// WithPrivKey(senderPrivateKey), +// WithPubKeyPrepended(false)) +func EncryptECIES(plainText []byte, publicKey *ecdh.PublicKey, opts ...ECIESOption) ([]byte, error) { + options := &eciesOptions{} + for _, opt := range opts { + opt(options) + } + + ourPrivateKey := options.privateKey + if ourPrivateKey == nil { + if options.noPubKeyPrepended { + return nil, ErrNoPublicKeyForDecryption + } + var err error + ourPrivateKey, err = GenerateX25519() + if err != nil { + return nil, NewErrFailedToGenerateEphemeralKey(err) + } + } + ourPublicKey := ourPrivateKey.PublicKey() + + sharedSecret, err := ourPrivateKey.ECDH(publicKey) + if err != nil { + return nil, NewErrFailedECDHOperation(err) + } + + kdf := hkdf.New(sha256.New, sharedSecret, nil, nil) + aesKey := make([]byte, AESKeySize) + hmacKey := make([]byte, HMACSize) + if _, err := kdf.Read(aesKey); err != nil { + return nil, NewErrFailedKDFOperationForAESKey(err) + } + if _, err := kdf.Read(hmacKey); err != nil { + return nil, NewErrFailedKDFOperationForHMACKey(err) + } + + cipherText, _, err := EncryptAES(plainText, aesKey, makeAAD(ourPublicKey.Bytes(), options.associatedData), true) + if err != nil { + return nil, NewErrFailedToEncrypt(err) + } + + mac := hmac.New(sha256.New, hmacKey) + mac.Write(cipherText) + macSum := mac.Sum(nil) + + var result []byte + if options.noPubKeyPrepended { + result = cipherText + } else { + result = append(ourPublicKey.Bytes(), cipherText...) + } + result = append(result, macSum...) + + return result, nil +} + +// DecryptECIES decrypts ciphertext encrypted with EncryptECIES using the provided private key. +// +// The function: +// - Extracts or uses the provided ephemeral public key +// - Performs ECDH with the provided private key +// - Derives decryption and HMAC keys using HKDF +// - Verifies the HMAC +// - Decrypts the message using a custom AES decryption function +// +// The default expected input format is: [ephemeral public key | encrypted data (including nonce) | HMAC] +// This can be modified using options. +// +// Parameters: +// - cipherText: The encrypted message, including all necessary metadata +// - privateKey: The recipient's X25519 private key +// - opts: Optional ECIESOption functions to customize the decryption process +// +// Available options: +// - WithAAD(aad []byte): Sets the associated data used during encryption for additional authentication +// - WithPubKeyBytes(pubKeyBytes []byte): Provides the public key bytes if not prepended to the ciphertext +// - WithPubKeyPrepended(prepended bool): Indicates whether the public key is prepended to the ciphertext +// +// Returns: +// - Byte slice containing the decrypted plaintext +// - Error if any step of the decryption process fails, including authentication failure +// +// Example usage: +// +// plainText, err := DecryptECIES(cipherText, recipientPrivateKey, +// WithAAD(additionalData), +// WithPubKeyBytes(senderPublicKeyBytes), +// WithPubKeyPrepended(false)) +func DecryptECIES(cipherText []byte, ourPrivateKey *ecdh.PrivateKey, opts ...ECIESOption) ([]byte, error) { + options := &eciesOptions{} + for _, opt := range opts { + opt(options) + } + + minLength := X25519PublicKeySize + AESNonceSize + HMACSize + minCipherTextSize + if options.noPubKeyPrepended { + minLength -= X25519PublicKeySize + } + + if len(cipherText) < minLength { + return nil, ErrCipherTextTooShort + } + + publicKeyBytes := options.publicKeyBytes + if options.publicKeyBytes == nil { + if options.noPubKeyPrepended { + return nil, ErrNoPublicKeyForDecryption + } + publicKeyBytes = cipherText[:X25519PublicKeySize] + cipherText = cipherText[X25519PublicKeySize:] + } + publicKey, err := ecdh.X25519().NewPublicKey(publicKeyBytes) + if err != nil { + return nil, NewErrFailedToParseEphemeralPublicKey(err) + } + + sharedSecret, err := ourPrivateKey.ECDH(publicKey) + if err != nil { + return nil, NewErrFailedECDHOperation(err) + } + + kdf := hkdf.New(sha256.New, sharedSecret, nil, nil) + aesKey := make([]byte, AESKeySize) + hmacKey := make([]byte, HMACSize) + if _, err := kdf.Read(aesKey); err != nil { + return nil, NewErrFailedKDFOperationForAESKey(err) + } + if _, err := kdf.Read(hmacKey); err != nil { + return nil, NewErrFailedKDFOperationForHMACKey(err) + } + + macSum := cipherText[len(cipherText)-HMACSize:] + cipherTextWithNonce := cipherText[:len(cipherText)-HMACSize] + + mac := hmac.New(sha256.New, hmacKey) + mac.Write(cipherTextWithNonce) + expectedMAC := mac.Sum(nil) + if !hmac.Equal(macSum, expectedMAC) { + return nil, ErrVerificationWithHMACFailed + } + + plainText, err := DecryptAES(nil, cipherTextWithNonce, aesKey, makeAAD(publicKeyBytes, options.associatedData)) + if err != nil { + return nil, NewErrFailedToDecrypt(err) + } + + return plainText, nil +} + +// makeAAD concatenates the ephemeral public key and associated data for use as additional authenticated data. +func makeAAD(ephemeralPublicBytes, associatedData []byte) []byte { + l := len(ephemeralPublicBytes) + len(associatedData) + aad := make([]byte, l) + copy(aad, ephemeralPublicBytes) + copy(aad[len(ephemeralPublicBytes):], associatedData) + return aad +} diff --git a/crypto/ecies_test.go b/crypto/ecies_test.go new file mode 100644 index 0000000000..f4ed463c26 --- /dev/null +++ b/crypto/ecies_test.go @@ -0,0 +1,201 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package crypto + +import ( + "crypto/ecdh" + "strings" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestEncryptECIES_Errors(t *testing.T) { + validAssociatedData := []byte("associated data") + validPrivateKey, _ := GenerateX25519() + + tests := []struct { + name string + plainText []byte + publicKey *ecdh.PublicKey + opts []ECIESOption + expectError string + }{ + { + name: "Invalid public key", + plainText: []byte("test data"), + publicKey: &ecdh.PublicKey{}, + opts: []ECIESOption{WithAAD(validAssociatedData)}, + expectError: errFailedECDHOperation, + }, + { + name: "No public key prepended and no private key provided", + plainText: []byte("test data"), + publicKey: validPrivateKey.PublicKey(), + opts: []ECIESOption{WithPubKeyPrepended(false)}, + expectError: errNoPublicKeyForDecryption, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + _, err := EncryptECIES(tt.plainText, tt.publicKey, tt.opts...) + if err == nil { + t.Errorf("Expected an error, but got nil") + } else if !strings.Contains(err.Error(), tt.expectError) { + t.Errorf("Expected error containing '%s', got '%v'", tt.expectError, err) + } + }) + } +} + +func TestDecryptECIES_Errors(t *testing.T) { + validPrivateKey, _ := GenerateX25519() + aad := []byte("associated data") + validCipherText, _ := EncryptECIES([]byte("test data test data"), validPrivateKey.PublicKey(), WithAAD(aad)) + + tests := []struct { + name string + cipherText []byte + privateKey *ecdh.PrivateKey + opts []ECIESOption + expectError string + }{ + { + name: "Ciphertext too short", + cipherText: []byte("short"), + privateKey: validPrivateKey, + opts: []ECIESOption{WithAAD(aad)}, + expectError: errCipherTextTooShort, + }, + { + name: "Invalid private key", + cipherText: validCipherText, + privateKey: &ecdh.PrivateKey{}, + opts: []ECIESOption{WithAAD(aad)}, + expectError: errFailedECDHOperation, + }, + { + name: "Tampered ciphertext", + cipherText: append(validCipherText, byte(0)), + privateKey: validPrivateKey, + opts: []ECIESOption{WithAAD(aad)}, + expectError: errVerificationWithHMACFailed, + }, + { + name: "Wrong associated data", + cipherText: validCipherText, + privateKey: validPrivateKey, + opts: []ECIESOption{WithAAD([]byte("wrong data"))}, + expectError: errFailedToDecrypt, + }, + { + name: "No public key prepended and no public key bytes provided", + cipherText: validCipherText[X25519PublicKeySize:], + privateKey: validPrivateKey, + opts: []ECIESOption{WithAAD(aad), WithPubKeyPrepended(false)}, + expectError: errNoPublicKeyForDecryption, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + _, err := DecryptECIES(tt.cipherText, tt.privateKey, tt.opts...) + if err == nil || !strings.Contains(err.Error(), tt.expectError) { + t.Errorf("Expected error containing '%s', got %v", tt.expectError, err) + } + }) + } +} + +func TestEncryptDecryptECIES_DefaultOptions_Succeeds(t *testing.T) { + plainText := []byte("Hello, World!") + recipientPrivateKey := mustGenerateX25519(t) + + cipherText, err := EncryptECIES(plainText, recipientPrivateKey.PublicKey()) + require.NoError(t, err) + + decryptedText, err := DecryptECIES(cipherText, recipientPrivateKey) + require.NoError(t, err) + + assert.Equal(t, plainText, decryptedText) +} + +func TestEncryptDecryptECIES_WithAAD_Succeeds(t *testing.T) { + plainText := []byte("Secret message") + aad := []byte("extra authentication data") + recipientPrivateKey := mustGenerateX25519(t) + + cipherText, err := EncryptECIES(plainText, recipientPrivateKey.PublicKey(), WithAAD(aad)) + require.NoError(t, err) + + decryptedText, err := DecryptECIES(cipherText, recipientPrivateKey, WithAAD(aad)) + require.NoError(t, err) + + assert.Equal(t, plainText, decryptedText) +} + +func TestEncryptDecryptECIES_WithCustomPrivateKey_Succeeds(t *testing.T) { + plainText := []byte("Custom key message") + recipientPrivateKey := mustGenerateX25519(t) + senderPrivateKey := mustGenerateX25519(t) + + cipherText, err := EncryptECIES(plainText, recipientPrivateKey.PublicKey(), WithPrivKey(senderPrivateKey)) + require.NoError(t, err) + + require.Equal(t, senderPrivateKey.PublicKey().Bytes(), cipherText[:X25519PublicKeySize]) + + decryptedText, err := DecryptECIES(cipherText, recipientPrivateKey) + require.NoError(t, err) + + assert.Equal(t, plainText, decryptedText) +} + +func TestEncryptDecryptECIES_WithoutPublicKeyPrepended_Succeeds(t *testing.T) { + plainText := []byte("No prepended key") + recipientPrivateKey := mustGenerateX25519(t) + senderPrivateKey := mustGenerateX25519(t) + + cipherText, err := EncryptECIES(plainText, recipientPrivateKey.PublicKey(), + WithPubKeyPrepended(false), + WithPrivKey(senderPrivateKey)) + require.NoError(t, err) + + // In a real scenario, the public key would be transmitted separately + senderPublicKeyBytes := senderPrivateKey.PublicKey().Bytes() + + decryptedText, err := DecryptECIES(cipherText, recipientPrivateKey, + WithPubKeyPrepended(false), + WithPubKeyBytes(senderPublicKeyBytes)) + require.NoError(t, err) + + assert.Equal(t, plainText, decryptedText) +} + +func TestEncryptDecryptECIES_DifferentAAD_FailsToDecrypt(t *testing.T) { + plainText := []byte("AAD test message") + encryptAAD := []byte("encryption AAD") + decryptAAD := []byte("decryption AAD") + recipientPrivateKey := mustGenerateX25519(t) + + cipherText, err := EncryptECIES(plainText, recipientPrivateKey.PublicKey(), WithAAD(encryptAAD)) + require.NoError(t, err) + + _, err = DecryptECIES(cipherText, recipientPrivateKey, WithAAD(decryptAAD)) + assert.Error(t, err, "Decryption should fail with different AAD") +} + +func mustGenerateX25519(t *testing.T) *ecdh.PrivateKey { + key, err := GenerateX25519() + require.NoError(t, err) + return key +} diff --git a/crypto/errors.go b/crypto/errors.go new file mode 100644 index 0000000000..a6128f9860 --- /dev/null +++ b/crypto/errors.go @@ -0,0 +1,62 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package crypto + +import ( + "github.com/sourcenetwork/defradb/errors" +) + +const ( + errFailedToGenerateEphemeralKey string = "failed to generate ephemeral key" + errFailedECDHOperation string = "failed ECDH operation" + errFailedKDFOperationForAESKey string = "failed KDF operation for AES key" + errFailedKDFOperationForHMACKey string = "failed KDF operation for HMAC key" + errFailedToEncrypt string = "failed to encrypt" + errCipherTextTooShort string = "cipherText too short" + errFailedToParseEphemeralPublicKey string = "failed to parse ephemeral public key" + errVerificationWithHMACFailed string = "verification with HMAC failed" + errFailedToDecrypt string = "failed to decrypt" + errNoPublicKeyForDecryption string = "no public key provided for decryption" +) + +var ( + ErrCipherTextTooShort = errors.New(errCipherTextTooShort) + ErrVerificationWithHMACFailed = errors.New(errVerificationWithHMACFailed) + ErrNoPublicKeyForDecryption = errors.New(errNoPublicKeyForDecryption) +) + +func NewErrFailedToGenerateEphemeralKey(inner error) error { + return errors.Wrap(errFailedToGenerateEphemeralKey, inner) +} + +func NewErrFailedECDHOperation(inner error) error { + return errors.Wrap(errFailedECDHOperation, inner) +} + +func NewErrFailedKDFOperationForAESKey(inner error) error { + return errors.Wrap(errFailedKDFOperationForAESKey, inner) +} + +func NewErrFailedKDFOperationForHMACKey(inner error) error { + return errors.Wrap(errFailedKDFOperationForHMACKey, inner) +} + +func NewErrFailedToEncrypt(inner error) error { + return errors.Wrap(errFailedToEncrypt, inner) +} + +func NewErrFailedToParseEphemeralPublicKey(inner error) error { + return errors.Wrap(errFailedToParseEphemeralPublicKey, inner) +} + +func NewErrFailedToDecrypt(inner error) error { + return errors.Wrap(errFailedToDecrypt, inner) +} diff --git a/internal/encryption/nonce.go b/crypto/nonce.go similarity index 86% rename from internal/encryption/nonce.go rename to crypto/nonce.go index 67a5467a4e..9c8f00b31f 100644 --- a/internal/encryption/nonce.go +++ b/crypto/nonce.go @@ -8,7 +8,7 @@ // by the Apache License, Version 2.0, included in the file // licenses/APL.txt. -package encryption +package crypto import ( "crypto/rand" @@ -18,12 +18,12 @@ import ( "strings" ) -const nonceLength = 12 +const AESNonceSize = 12 var generateNonceFunc = generateNonce func generateNonce() ([]byte, error) { - nonce := make([]byte, nonceLength) + nonce := make([]byte, AESNonceSize) if _, err := io.ReadFull(rand.Reader, nonce); err != nil { return nil, err } @@ -35,11 +35,11 @@ func generateNonce() ([]byte, error) { func generateTestNonce() ([]byte, error) { nonce := []byte("deterministic nonce for testing") - if len(nonce) < nonceLength { + if len(nonce) < AESNonceSize { return nil, errors.New("nonce length is longer than available deterministic nonce") } - return nonce[:nonceLength], nil + return nonce[:AESNonceSize], nil } func init() { @@ -48,6 +48,5 @@ func init() { // TODO: We should try to find a better way to detect this https://github.com/sourcenetwork/defradb/issues/2801 if strings.HasSuffix(arg, ".test") || strings.Contains(arg, "/defradb/tests/") { generateNonceFunc = generateTestNonce - generateEncryptionKeyFunc = generateTestEncryptionKey } } diff --git a/datastore/mocks/blockstore.go b/datastore/mocks/blockstore.go new file mode 100644 index 0000000000..6dab79de7c --- /dev/null +++ b/datastore/mocks/blockstore.go @@ -0,0 +1,493 @@ +// Code generated by mockery. DO NOT EDIT. + +package mocks + +import ( + blocks "github.com/ipfs/go-block-format" + cid "github.com/ipfs/go-cid" + + context "context" + + datastore "github.com/sourcenetwork/defradb/datastore" + + mock "github.com/stretchr/testify/mock" +) + +// Blockstore is an autogenerated mock type for the Blockstore type +type Blockstore struct { + mock.Mock +} + +type Blockstore_Expecter struct { + mock *mock.Mock +} + +func (_m *Blockstore) EXPECT() *Blockstore_Expecter { + return &Blockstore_Expecter{mock: &_m.Mock} +} + +// AllKeysChan provides a mock function with given fields: ctx +func (_m *Blockstore) AllKeysChan(ctx context.Context) (<-chan cid.Cid, error) { + ret := _m.Called(ctx) + + if len(ret) == 0 { + panic("no return value specified for AllKeysChan") + } + + var r0 <-chan cid.Cid + var r1 error + if rf, ok := ret.Get(0).(func(context.Context) (<-chan cid.Cid, error)); ok { + return rf(ctx) + } + if rf, ok := ret.Get(0).(func(context.Context) <-chan cid.Cid); ok { + r0 = rf(ctx) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(<-chan cid.Cid) + } + } + + if rf, ok := ret.Get(1).(func(context.Context) error); ok { + r1 = rf(ctx) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// Blockstore_AllKeysChan_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'AllKeysChan' +type Blockstore_AllKeysChan_Call struct { + *mock.Call +} + +// AllKeysChan is a helper method to define mock.On call +// - ctx context.Context +func (_e *Blockstore_Expecter) AllKeysChan(ctx interface{}) *Blockstore_AllKeysChan_Call { + return &Blockstore_AllKeysChan_Call{Call: _e.mock.On("AllKeysChan", ctx)} +} + +func (_c *Blockstore_AllKeysChan_Call) Run(run func(ctx context.Context)) *Blockstore_AllKeysChan_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context)) + }) + return _c +} + +func (_c *Blockstore_AllKeysChan_Call) Return(_a0 <-chan cid.Cid, _a1 error) *Blockstore_AllKeysChan_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *Blockstore_AllKeysChan_Call) RunAndReturn(run func(context.Context) (<-chan cid.Cid, error)) *Blockstore_AllKeysChan_Call { + _c.Call.Return(run) + return _c +} + +// AsIPLDStorage provides a mock function with given fields: +func (_m *Blockstore) AsIPLDStorage() datastore.IPLDStorage { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for AsIPLDStorage") + } + + var r0 datastore.IPLDStorage + if rf, ok := ret.Get(0).(func() datastore.IPLDStorage); ok { + r0 = rf() + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(datastore.IPLDStorage) + } + } + + return r0 +} + +// Blockstore_AsIPLDStorage_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'AsIPLDStorage' +type Blockstore_AsIPLDStorage_Call struct { + *mock.Call +} + +// AsIPLDStorage is a helper method to define mock.On call +func (_e *Blockstore_Expecter) AsIPLDStorage() *Blockstore_AsIPLDStorage_Call { + return &Blockstore_AsIPLDStorage_Call{Call: _e.mock.On("AsIPLDStorage")} +} + +func (_c *Blockstore_AsIPLDStorage_Call) Run(run func()) *Blockstore_AsIPLDStorage_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *Blockstore_AsIPLDStorage_Call) Return(_a0 datastore.IPLDStorage) *Blockstore_AsIPLDStorage_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *Blockstore_AsIPLDStorage_Call) RunAndReturn(run func() datastore.IPLDStorage) *Blockstore_AsIPLDStorage_Call { + _c.Call.Return(run) + return _c +} + +// DeleteBlock provides a mock function with given fields: _a0, _a1 +func (_m *Blockstore) DeleteBlock(_a0 context.Context, _a1 cid.Cid) error { + ret := _m.Called(_a0, _a1) + + if len(ret) == 0 { + panic("no return value specified for DeleteBlock") + } + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, cid.Cid) error); ok { + r0 = rf(_a0, _a1) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// Blockstore_DeleteBlock_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteBlock' +type Blockstore_DeleteBlock_Call struct { + *mock.Call +} + +// DeleteBlock is a helper method to define mock.On call +// - _a0 context.Context +// - _a1 cid.Cid +func (_e *Blockstore_Expecter) DeleteBlock(_a0 interface{}, _a1 interface{}) *Blockstore_DeleteBlock_Call { + return &Blockstore_DeleteBlock_Call{Call: _e.mock.On("DeleteBlock", _a0, _a1)} +} + +func (_c *Blockstore_DeleteBlock_Call) Run(run func(_a0 context.Context, _a1 cid.Cid)) *Blockstore_DeleteBlock_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(cid.Cid)) + }) + return _c +} + +func (_c *Blockstore_DeleteBlock_Call) Return(_a0 error) *Blockstore_DeleteBlock_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *Blockstore_DeleteBlock_Call) RunAndReturn(run func(context.Context, cid.Cid) error) *Blockstore_DeleteBlock_Call { + _c.Call.Return(run) + return _c +} + +// Get provides a mock function with given fields: _a0, _a1 +func (_m *Blockstore) Get(_a0 context.Context, _a1 cid.Cid) (blocks.Block, error) { + ret := _m.Called(_a0, _a1) + + if len(ret) == 0 { + panic("no return value specified for Get") + } + + var r0 blocks.Block + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, cid.Cid) (blocks.Block, error)); ok { + return rf(_a0, _a1) + } + if rf, ok := ret.Get(0).(func(context.Context, cid.Cid) blocks.Block); ok { + r0 = rf(_a0, _a1) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(blocks.Block) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, cid.Cid) error); ok { + r1 = rf(_a0, _a1) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// Blockstore_Get_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Get' +type Blockstore_Get_Call struct { + *mock.Call +} + +// Get is a helper method to define mock.On call +// - _a0 context.Context +// - _a1 cid.Cid +func (_e *Blockstore_Expecter) Get(_a0 interface{}, _a1 interface{}) *Blockstore_Get_Call { + return &Blockstore_Get_Call{Call: _e.mock.On("Get", _a0, _a1)} +} + +func (_c *Blockstore_Get_Call) Run(run func(_a0 context.Context, _a1 cid.Cid)) *Blockstore_Get_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(cid.Cid)) + }) + return _c +} + +func (_c *Blockstore_Get_Call) Return(_a0 blocks.Block, _a1 error) *Blockstore_Get_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *Blockstore_Get_Call) RunAndReturn(run func(context.Context, cid.Cid) (blocks.Block, error)) *Blockstore_Get_Call { + _c.Call.Return(run) + return _c +} + +// GetSize provides a mock function with given fields: _a0, _a1 +func (_m *Blockstore) GetSize(_a0 context.Context, _a1 cid.Cid) (int, error) { + ret := _m.Called(_a0, _a1) + + if len(ret) == 0 { + panic("no return value specified for GetSize") + } + + var r0 int + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, cid.Cid) (int, error)); ok { + return rf(_a0, _a1) + } + if rf, ok := ret.Get(0).(func(context.Context, cid.Cid) int); ok { + r0 = rf(_a0, _a1) + } else { + r0 = ret.Get(0).(int) + } + + if rf, ok := ret.Get(1).(func(context.Context, cid.Cid) error); ok { + r1 = rf(_a0, _a1) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// Blockstore_GetSize_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetSize' +type Blockstore_GetSize_Call struct { + *mock.Call +} + +// GetSize is a helper method to define mock.On call +// - _a0 context.Context +// - _a1 cid.Cid +func (_e *Blockstore_Expecter) GetSize(_a0 interface{}, _a1 interface{}) *Blockstore_GetSize_Call { + return &Blockstore_GetSize_Call{Call: _e.mock.On("GetSize", _a0, _a1)} +} + +func (_c *Blockstore_GetSize_Call) Run(run func(_a0 context.Context, _a1 cid.Cid)) *Blockstore_GetSize_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(cid.Cid)) + }) + return _c +} + +func (_c *Blockstore_GetSize_Call) Return(_a0 int, _a1 error) *Blockstore_GetSize_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *Blockstore_GetSize_Call) RunAndReturn(run func(context.Context, cid.Cid) (int, error)) *Blockstore_GetSize_Call { + _c.Call.Return(run) + return _c +} + +// Has provides a mock function with given fields: _a0, _a1 +func (_m *Blockstore) Has(_a0 context.Context, _a1 cid.Cid) (bool, error) { + ret := _m.Called(_a0, _a1) + + if len(ret) == 0 { + panic("no return value specified for Has") + } + + var r0 bool + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, cid.Cid) (bool, error)); ok { + return rf(_a0, _a1) + } + if rf, ok := ret.Get(0).(func(context.Context, cid.Cid) bool); ok { + r0 = rf(_a0, _a1) + } else { + r0 = ret.Get(0).(bool) + } + + if rf, ok := ret.Get(1).(func(context.Context, cid.Cid) error); ok { + r1 = rf(_a0, _a1) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// Blockstore_Has_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Has' +type Blockstore_Has_Call struct { + *mock.Call +} + +// Has is a helper method to define mock.On call +// - _a0 context.Context +// - _a1 cid.Cid +func (_e *Blockstore_Expecter) Has(_a0 interface{}, _a1 interface{}) *Blockstore_Has_Call { + return &Blockstore_Has_Call{Call: _e.mock.On("Has", _a0, _a1)} +} + +func (_c *Blockstore_Has_Call) Run(run func(_a0 context.Context, _a1 cid.Cid)) *Blockstore_Has_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(cid.Cid)) + }) + return _c +} + +func (_c *Blockstore_Has_Call) Return(_a0 bool, _a1 error) *Blockstore_Has_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *Blockstore_Has_Call) RunAndReturn(run func(context.Context, cid.Cid) (bool, error)) *Blockstore_Has_Call { + _c.Call.Return(run) + return _c +} + +// HashOnRead provides a mock function with given fields: enabled +func (_m *Blockstore) HashOnRead(enabled bool) { + _m.Called(enabled) +} + +// Blockstore_HashOnRead_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'HashOnRead' +type Blockstore_HashOnRead_Call struct { + *mock.Call +} + +// HashOnRead is a helper method to define mock.On call +// - enabled bool +func (_e *Blockstore_Expecter) HashOnRead(enabled interface{}) *Blockstore_HashOnRead_Call { + return &Blockstore_HashOnRead_Call{Call: _e.mock.On("HashOnRead", enabled)} +} + +func (_c *Blockstore_HashOnRead_Call) Run(run func(enabled bool)) *Blockstore_HashOnRead_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(bool)) + }) + return _c +} + +func (_c *Blockstore_HashOnRead_Call) Return() *Blockstore_HashOnRead_Call { + _c.Call.Return() + return _c +} + +func (_c *Blockstore_HashOnRead_Call) RunAndReturn(run func(bool)) *Blockstore_HashOnRead_Call { + _c.Call.Return(run) + return _c +} + +// Put provides a mock function with given fields: _a0, _a1 +func (_m *Blockstore) Put(_a0 context.Context, _a1 blocks.Block) error { + ret := _m.Called(_a0, _a1) + + if len(ret) == 0 { + panic("no return value specified for Put") + } + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, blocks.Block) error); ok { + r0 = rf(_a0, _a1) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// Blockstore_Put_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Put' +type Blockstore_Put_Call struct { + *mock.Call +} + +// Put is a helper method to define mock.On call +// - _a0 context.Context +// - _a1 blocks.Block +func (_e *Blockstore_Expecter) Put(_a0 interface{}, _a1 interface{}) *Blockstore_Put_Call { + return &Blockstore_Put_Call{Call: _e.mock.On("Put", _a0, _a1)} +} + +func (_c *Blockstore_Put_Call) Run(run func(_a0 context.Context, _a1 blocks.Block)) *Blockstore_Put_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(blocks.Block)) + }) + return _c +} + +func (_c *Blockstore_Put_Call) Return(_a0 error) *Blockstore_Put_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *Blockstore_Put_Call) RunAndReturn(run func(context.Context, blocks.Block) error) *Blockstore_Put_Call { + _c.Call.Return(run) + return _c +} + +// PutMany provides a mock function with given fields: _a0, _a1 +func (_m *Blockstore) PutMany(_a0 context.Context, _a1 []blocks.Block) error { + ret := _m.Called(_a0, _a1) + + if len(ret) == 0 { + panic("no return value specified for PutMany") + } + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, []blocks.Block) error); ok { + r0 = rf(_a0, _a1) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// Blockstore_PutMany_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'PutMany' +type Blockstore_PutMany_Call struct { + *mock.Call +} + +// PutMany is a helper method to define mock.On call +// - _a0 context.Context +// - _a1 []blocks.Block +func (_e *Blockstore_Expecter) PutMany(_a0 interface{}, _a1 interface{}) *Blockstore_PutMany_Call { + return &Blockstore_PutMany_Call{Call: _e.mock.On("PutMany", _a0, _a1)} +} + +func (_c *Blockstore_PutMany_Call) Run(run func(_a0 context.Context, _a1 []blocks.Block)) *Blockstore_PutMany_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].([]blocks.Block)) + }) + return _c +} + +func (_c *Blockstore_PutMany_Call) Return(_a0 error) *Blockstore_PutMany_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *Blockstore_PutMany_Call) RunAndReturn(run func(context.Context, []blocks.Block) error) *Blockstore_PutMany_Call { + _c.Call.Return(run) + return _c +} + +// NewBlockstore creates a new instance of Blockstore. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewBlockstore(t interface { + mock.TestingT + Cleanup(func()) +}) *Blockstore { + mock := &Blockstore{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/datastore/mocks/txn.go b/datastore/mocks/txn.go index 41606260ea..ea923d5de4 100644 --- a/datastore/mocks/txn.go +++ b/datastore/mocks/txn.go @@ -196,19 +196,19 @@ func (_c *Txn_Discard_Call) RunAndReturn(run func(context.Context)) *Txn_Discard } // Encstore provides a mock function with given fields: -func (_m *Txn) Encstore() datastore.DSReaderWriter { +func (_m *Txn) Encstore() datastore.Blockstore { ret := _m.Called() if len(ret) == 0 { panic("no return value specified for Encstore") } - var r0 datastore.DSReaderWriter - if rf, ok := ret.Get(0).(func() datastore.DSReaderWriter); ok { + var r0 datastore.Blockstore + if rf, ok := ret.Get(0).(func() datastore.Blockstore); ok { r0 = rf() } else { if ret.Get(0) != nil { - r0 = ret.Get(0).(datastore.DSReaderWriter) + r0 = ret.Get(0).(datastore.Blockstore) } } @@ -232,12 +232,12 @@ func (_c *Txn_Encstore_Call) Run(run func()) *Txn_Encstore_Call { return _c } -func (_c *Txn_Encstore_Call) Return(_a0 datastore.DSReaderWriter) *Txn_Encstore_Call { +func (_c *Txn_Encstore_Call) Return(_a0 datastore.Blockstore) *Txn_Encstore_Call { _c.Call.Return(_a0) return _c } -func (_c *Txn_Encstore_Call) RunAndReturn(run func() datastore.DSReaderWriter) *Txn_Encstore_Call { +func (_c *Txn_Encstore_Call) RunAndReturn(run func() datastore.Blockstore) *Txn_Encstore_Call { _c.Call.Return(run) return _c } diff --git a/datastore/mocks/utils.go b/datastore/mocks/utils.go index af3c49fd0c..d6c69684be 100644 --- a/datastore/mocks/utils.go +++ b/datastore/mocks/utils.go @@ -24,6 +24,7 @@ type MultiStoreTxn struct { MockRootstore *DSReaderWriter MockDatastore *DSReaderWriter MockHeadstore *DSReaderWriter + MockEncstore *Blockstore MockDAGstore *DAGStore MockSystemstore *DSReaderWriter } @@ -36,6 +37,14 @@ func prepareDataStore(t *testing.T) *DSReaderWriter { return dataStore } +func prepareEncStore(t *testing.T) *Blockstore { + encStore := NewBlockstore(t) + encStore.EXPECT().Get(mock.Anything, mock.Anything).Return(nil, ds.ErrNotFound).Maybe() + encStore.EXPECT().Put(mock.Anything, mock.Anything).Return(nil).Maybe() + encStore.EXPECT().Has(mock.Anything, mock.Anything).Return(true, nil).Maybe() + return encStore +} + func prepareRootstore(t *testing.T) *DSReaderWriter { return NewDSReaderWriter(t) } @@ -75,6 +84,7 @@ func NewTxnWithMultistore(t *testing.T) *MultiStoreTxn { t: t, MockRootstore: prepareRootstore(t), MockDatastore: prepareDataStore(t), + MockEncstore: prepareEncStore(t), MockHeadstore: prepareHeadStore(t), MockDAGstore: prepareDAGStore(t), MockSystemstore: prepareSystemStore(t), @@ -82,6 +92,7 @@ func NewTxnWithMultistore(t *testing.T) *MultiStoreTxn { txn.EXPECT().Rootstore().Return(result.MockRootstore).Maybe() txn.EXPECT().Datastore().Return(result.MockDatastore).Maybe() + txn.EXPECT().Encstore().Return(result.MockEncstore).Maybe() txn.EXPECT().Headstore().Return(result.MockHeadstore).Maybe() txn.EXPECT().Blockstore().Return(result.MockDAGstore).Maybe() txn.EXPECT().Systemstore().Return(result.MockSystemstore).Maybe() diff --git a/datastore/multi.go b/datastore/multi.go index f863924d5d..cbbf80e23f 100644 --- a/datastore/multi.go +++ b/datastore/multi.go @@ -29,12 +29,11 @@ var ( type multistore struct { root DSReaderWriter data DSReaderWriter - enc DSReaderWriter + enc Blockstore head DSReaderWriter peer DSBatching system DSReaderWriter - // block DSReaderWriter - dag Blockstore + dag Blockstore } var _ MultiStore = (*multistore)(nil) @@ -45,7 +44,7 @@ func MultiStoreFrom(rootstore ds.Datastore) MultiStore { ms := &multistore{ root: rootRW, data: prefix(rootRW, dataStoreKey), - enc: prefix(rootRW, encStoreKey), + enc: newBlockstore(prefix(rootRW, encStoreKey)), head: prefix(rootRW, headStoreKey), peer: namespace.Wrap(rootstore, peerStoreKey), system: prefix(rootRW, systemStoreKey), @@ -61,7 +60,7 @@ func (ms multistore) Datastore() DSReaderWriter { } // Encstore implements MultiStore. -func (ms multistore) Encstore() DSReaderWriter { +func (ms multistore) Encstore() Blockstore { return ms.enc } diff --git a/datastore/store.go b/datastore/store.go index 516bfe0b65..641cd10b1a 100644 --- a/datastore/store.go +++ b/datastore/store.go @@ -40,7 +40,7 @@ type MultiStore interface { // Encstore is a wrapped root DSReaderWriter under the /enc namespace // This store is used for storing symmetric encryption keys for doc encryption. // The store keys are comprised of docID + field name. - Encstore() DSReaderWriter + Encstore() Blockstore // Headstore is a wrapped root DSReaderWriter under the /head namespace Headstore() DSReaderWriter diff --git a/docs/data_format_changes/i2891-no-change-tests-updated.md b/docs/data_format_changes/i2891-no-change-tests-updated.md new file mode 100644 index 0000000000..8d22b94c15 --- /dev/null +++ b/docs/data_format_changes/i2891-no-change-tests-updated.md @@ -0,0 +1,3 @@ +# Doc encryption key exchange + +For the key exchange mechanism we changed slightly the structure of DAG block to hold an additional information. diff --git a/go.mod b/go.mod index 4be484b96e..4788e9667c 100644 --- a/go.mod +++ b/go.mod @@ -44,6 +44,7 @@ require ( github.com/multiformats/go-multihash v0.2.3 github.com/pelletier/go-toml v1.9.5 github.com/pkg/errors v0.9.1 + github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10 github.com/sourcenetwork/acp_core v0.0.0-20240607160510-47a5306b2ad2 github.com/sourcenetwork/badger/v4 v4.2.1-0.20231113215945-a63444ca5276 github.com/sourcenetwork/corelog v0.0.8 @@ -62,6 +63,7 @@ require ( go.opentelemetry.io/otel/metric v1.30.0 go.opentelemetry.io/otel/sdk/metric v1.30.0 go.uber.org/zap v1.27.0 + golang.org/x/crypto v0.26.0 golang.org/x/exp v0.0.0-20240808152545-0cdaa3abc0fa google.golang.org/grpc v1.66.2 google.golang.org/protobuf v1.34.2 @@ -86,7 +88,7 @@ require ( cosmossdk.io/x/feegrant v0.1.0 // indirect cosmossdk.io/x/tx v0.13.4 // indirect cosmossdk.io/x/upgrade v0.1.1 // indirect - filippo.io/edwards25519 v1.0.0 // indirect + filippo.io/edwards25519 v1.1.0 // indirect github.com/99designs/go-keychain v0.0.0-20191008050251-8e49817e8af4 // indirect github.com/99designs/keyring v1.2.2 // indirect github.com/DataDog/datadog-go v3.2.0+incompatible // indirect @@ -359,7 +361,6 @@ require ( go.uber.org/fx v1.22.2 // indirect go.uber.org/mock v0.4.0 // indirect go.uber.org/multierr v1.11.0 // indirect - golang.org/x/crypto v0.26.0 // indirect golang.org/x/mod v0.20.0 // indirect golang.org/x/net v0.28.0 // indirect golang.org/x/oauth2 v0.21.0 // indirect diff --git a/go.sum b/go.sum index eaf23755d5..374d490ecf 100644 --- a/go.sum +++ b/go.sum @@ -219,8 +219,8 @@ dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7 dmitri.shuralyov.com/html/belt v0.0.0-20180602232347-f7d459c86be0/go.mod h1:JLBrvjyP0v+ecvNYvCpyZgu5/xkfAUhi6wJj28eUfSU= dmitri.shuralyov.com/service/change v0.0.0-20181023043359-a85b471d5412/go.mod h1:a1inKt/atXimZ4Mv927x+r7UpyzRUf4emIoiiSC2TN4= dmitri.shuralyov.com/state v0.0.0-20180228185332-28bcc343414c/go.mod h1:0PRwlb0D6DFvNNtx+9ybjezNCa8XF0xaYcETyp6rHWU= -filippo.io/edwards25519 v1.0.0 h1:0wAIcmJUqRdI8IJ/3eGi5/HwXZWPujYXXlkrQogz0Ek= -filippo.io/edwards25519 v1.0.0/go.mod h1:N1IkdkCkiLB6tki+MYJoSx2JTY9NUlxZE7eHn5EwJns= +filippo.io/edwards25519 v1.1.0 h1:FNf4tywRC1HmFuKW5xopWpigGjJKiJSV0Cqo0cJWDaA= +filippo.io/edwards25519 v1.1.0/go.mod h1:BxyFTGdWcka3PhytdK4V28tE5sGfRvvvRV7EaN4VDT4= git.apache.org/thrift.git v0.0.0-20180902110319-2566ecd5d999/go.mod h1:fPE2ZNJGynbRyZ4dJvy6G277gSllfV2HJqblrnkyeyg= github.com/99designs/go-keychain v0.0.0-20191008050251-8e49817e8af4 h1:/vQbFIOMbk2FiG/kXiLl8BRyzTWDw7gX/Hz7Dd5eDMs= github.com/99designs/go-keychain v0.0.0-20191008050251-8e49817e8af4/go.mod h1:hN7oaIRCjzsZ2dE+yG5k+rsdt3qcwykqK6HVGcKwsw4= @@ -1265,6 +1265,8 @@ github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINE github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/profile v1.2.1/go.mod h1:hJw3o1OdXxsrSjjVksARp5W95eeEaEfptyVZyv6JUPA= +github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10 h1:GFCKgmp0tecUJ0sJuv4pzYCqS9+RGSn52M3FUwPs+uo= +github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10/go.mod h1:t/avpk3KcrXxUnYOhZhMXJlSEyie6gQbtLq5NM3loB8= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U= github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= diff --git a/http/client.go b/http/client.go index e98eac7d07..777cf4a733 100644 --- a/http/client.go +++ b/http/client.go @@ -489,6 +489,10 @@ func (c *Client) Blockstore() datastore.Blockstore { panic("client side database") } +func (c *Client) Encstore() datastore.Blockstore { + panic("client side database") +} + func (c *Client) Peerstore() datastore.DSBatching { panic("client side database") } diff --git a/http/client_tx.go b/http/client_tx.go index 5b99f5aaad..daacb4128e 100644 --- a/http/client_tx.go +++ b/http/client_tx.go @@ -91,7 +91,7 @@ func (c *Transaction) Datastore() datastore.DSReaderWriter { panic("client side transaction") } -func (c *Transaction) Encstore() datastore.DSReaderWriter { +func (c *Transaction) Encstore() datastore.Blockstore { panic("client side transaction") } diff --git a/internal/core/block/block.go b/internal/core/block/block.go index d2caa610f7..1ec62fe939 100644 --- a/internal/core/block/block.go +++ b/internal/core/block/block.go @@ -29,12 +29,15 @@ import ( // Schema is the IPLD schema type that represents a `Block`. var ( - Schema schema.Type - SchemaPrototype ipld.NodePrototype + Schema schema.Type + SchemaPrototype ipld.NodePrototype + EncryptionSchema schema.Type + EncryptionSchemaPrototype ipld.NodePrototype ) func init() { Schema, SchemaPrototype = mustSetSchema( + "Block", &Block{}, &DAGLink{}, &crdt.CRDT{}, @@ -42,6 +45,11 @@ func init() { &crdt.CompositeDAGDelta{}, &crdt.CounterDelta{}, ) + + EncryptionSchema, EncryptionSchemaPrototype = mustSetSchema( + "Encryption", + &Encryption{}, + ) } type schemaDefinition interface { @@ -49,7 +57,7 @@ type schemaDefinition interface { IPLDSchemaBytes() []byte } -func mustSetSchema(schemas ...schemaDefinition) (schema.Type, ipld.NodePrototype) { +func mustSetSchema(schemaName string, schemas ...schemaDefinition) (schema.Type, ipld.NodePrototype) { schemaBytes := make([][]byte, 0, len(schemas)) for _, s := range schemas { schemaBytes = append(schemaBytes, s.IPLDSchemaBytes()) @@ -59,12 +67,12 @@ func mustSetSchema(schemas ...schemaDefinition) (schema.Type, ipld.NodePrototype if err != nil { panic(err) } - blockSchemaType := ts.TypeByName("Block") + blockSchemaType := ts.TypeByName(schemaName) // Calling bindnode.Prototype here ensure that [Block] and all the types it contains // are compatible with the IPLD schema defined by blockSchemaType. // If [Block] and `blockSchematype` do not match, this will panic. - proto := bindnode.Prototype(&Block{}, blockSchemaType) + proto := bindnode.Prototype(schemas[0], blockSchemaType) return blockSchemaType, proto.Representation() } @@ -97,27 +105,78 @@ func NewDAGLink(name string, link cidlink.Link) DAGLink { } } +// Encryption contains the encryption information for the block's delta. +type Encryption struct { + // DocID is the ID of the document that is encrypted with the associated encryption key. + DocID []byte + // FieldName is the name of the field that is encrypted with the associated encryption key. + // It is set if encryption is applied to a field instead of the whole doc. + // It needs to be a pointer so that it can be translated from and to `optional` in the IPLD schema. + FieldName *string + // Encryption key. + Key []byte +} + // Block is a block that contains a CRDT delta and links to other blocks. type Block struct { // Delta is the CRDT delta that is stored in the block. Delta crdt.CRDT // Links are the links to other blocks in the DAG. Links []DAGLink - // IsEncrypted is a flag that indicates if the block's delta is encrypted. - // It needs to be a pointer so that it can be translated from and to `optional Bool` in the IPLD schema. - IsEncrypted *bool + // Encryption contains the encryption information for the block's delta. + // It needs to be a pointer so that it can be translated from and to `optional` in the IPLD schema. + Encryption *cidlink.Link +} + +// IsEncrypted returns true if the block is encrypted. +func (block *Block) IsEncrypted() bool { + return block.Encryption != nil +} + +// Clone returns a shallow copy of the block with cloned delta. +func (block *Block) Clone() *Block { + return &Block{ + Delta: block.Delta.Clone(), + Links: block.Links, + Encryption: block.Encryption, + } +} + +// GetHeadLinks returns the CIDs of the previous blocks. There can be more than 1 with multiple heads. +func (block *Block) GetHeadLinks() []cid.Cid { + var heads []cid.Cid + for _, link := range block.Links { + if link.Name == core.HEAD { + heads = append(heads, link.Cid) + } + } + return heads } // IPLDSchemaBytes returns the IPLD schema representation for the block. // // This needs to match the [Block] struct or [mustSetSchema] will panic on init. -func (b Block) IPLDSchemaBytes() []byte { +func (block *Block) IPLDSchemaBytes() []byte { return []byte(` - type Block struct { - delta CRDT - links [ DAGLink ] - isEncrypted optional Bool - }`) + type Block struct { + delta CRDT + links [DAGLink] + encryption optional Link + } + `) +} + +// IPLDSchemaBytes returns the IPLD schema representation for the encryption block. +// +// This needs to match the [Encryption] struct or [mustSetSchema] will panic on init. +func (enc *Encryption) IPLDSchemaBytes() []byte { + return []byte(` + type Encryption struct { + docID Bytes + fieldName optional String + key Bytes + } + `) } // New creates a new block with the given delta and links. @@ -153,6 +212,16 @@ func New(delta core.Delta, links []DAGLink, heads ...cid.Cid) *Block { } } +// GetFromBytes returns a block from encoded bytes. +func GetEncryptionBlockFromBytes(b []byte) (*Encryption, error) { + enc := &Encryption{} + err := enc.Unmarshal(b) + if err != nil { + return nil, err + } + return enc, nil +} + // GetFromBytes returns a block from encoded bytes. func GetFromBytes(b []byte) (*Block, error) { block := &Block{} @@ -172,8 +241,17 @@ func GetFromNode(node ipld.Node) (*Block, error) { return block, nil } +// GetFromNode returns a block from a node. +func GetEncryptionBlockFromNode(node ipld.Node) (*Encryption, error) { + encBlock, ok := bindnode.Unwrap(node).(*Encryption) + if !ok { + return nil, NewErrNodeToBlock(node) + } + return encBlock, nil +} + // Marshal encodes the delta using CBOR encoding. -func (block *Block) Marshal() (data []byte, err error) { +func (block *Block) Marshal() ([]byte, error) { b, err := ipld.Marshal(dagcbor.Encode, block, Schema) if err != nil { return nil, NewErrEncodingBlock(err) @@ -183,12 +261,25 @@ func (block *Block) Marshal() (data []byte, err error) { // Unmarshal decodes the delta from CBOR encoding. func (block *Block) Unmarshal(b []byte) error { - _, err := ipld.Unmarshal( - b, - dagcbor.Decode, - block, - Schema, - ) + _, err := ipld.Unmarshal(b, dagcbor.Decode, block, Schema) + if err != nil { + return NewErrUnmarshallingBlock(err) + } + return nil +} + +// Marshal encodes the delta using CBOR encoding. +func (enc *Encryption) Marshal() ([]byte, error) { + b, err := ipld.Marshal(dagcbor.Encode, enc, EncryptionSchema) + if err != nil { + return nil, NewErrEncodingBlock(err) + } + return b, nil +} + +// Unmarshal decodes the delta from CBOR encoding. +func (enc *Encryption) Unmarshal(b []byte) error { + _, err := ipld.Unmarshal(b, dagcbor.Decode, enc, EncryptionSchema) if err != nil { return NewErrUnmarshallingBlock(err) } @@ -196,10 +287,15 @@ func (block *Block) Unmarshal(b []byte) error { } // GenerateNode generates an IPLD node from the block in its representation form. -func (block *Block) GenerateNode() (node ipld.Node) { +func (block *Block) GenerateNode() ipld.Node { return bindnode.Wrap(block, Schema).Representation() } +// GenerateNode generates an IPLD node from the encryption block in its representation form. +func (enc *Encryption) GenerateNode() ipld.Node { + return bindnode.Wrap(enc, EncryptionSchema).Representation() +} + // GetLinkByName returns the link by name. It will return false if the link does not exist. func (block *Block) GetLinkByName(name string) (cidlink.Link, bool) { for _, link := range block.Links { diff --git a/internal/core/block/block_test.go b/internal/core/block/block_test.go index 5b68cf9067..d7fe2d1bf0 100644 --- a/internal/core/block/block_test.go +++ b/internal/core/block/block_test.go @@ -180,13 +180,23 @@ func TestBlockDeltaPriority(t *testing.T) { require.Equal(t, uint64(2), block.Delta.GetPriority()) } -func TestBlockMarshal_IsEncryptedNotSet_ShouldNotContainIsEcryptedField(t *testing.T) { +func TestBlockMarshal_IfEncryptedNotSet_ShouldNotContainIsEncryptedField(t *testing.T) { lsys := cidlink.DefaultLinkSystem() store := memstore.Store{} lsys.SetReadStorage(&store) lsys.SetWriteStorage(&store) - fieldBlock := Block{ + encBlock := Encryption{ + DocID: []byte("docID"), + Key: []byte("keyID"), + } + + encBlockLink, err := lsys.Store(ipld.LinkContext{}, GetLinkPrototype(), encBlock.GenerateNode()) + require.NoError(t, err) + + link := encBlockLink.(cidlink.Link) + + block := Block{ Delta: crdt.CRDT{ LWWRegDelta: &crdt.LWWRegDelta{ DocID: []byte("docID"), @@ -196,11 +206,27 @@ func TestBlockMarshal_IsEncryptedNotSet_ShouldNotContainIsEcryptedField(t *testi Data: []byte("John"), }, }, + Encryption: &link, } - b, err := fieldBlock.Marshal() + blockLink, err := lsys.Store(ipld.LinkContext{}, GetLinkPrototype(), block.GenerateNode()) require.NoError(t, err) - require.NotContains(t, string(b), "isEncrypted") + + nd, err := lsys.Load(ipld.LinkContext{}, blockLink, SchemaPrototype) + require.NoError(t, err) + + loadedBlock, err := GetFromNode(nd) + require.NoError(t, err) + + require.NotNil(t, loadedBlock.Encryption) + + nd, err = lsys.Load(ipld.LinkContext{}, loadedBlock.Encryption, EncryptionSchemaPrototype) + require.NoError(t, err) + + loadedEncBlock, err := GetEncryptionBlockFromNode(nd) + require.NoError(t, err) + + require.Equal(t, encBlock, *loadedEncBlock) } func TestBlockMarshal_IsEncryptedNotSetWithLinkSystem_ShouldLoadWithNoError(t *testing.T) { @@ -228,3 +254,58 @@ func TestBlockMarshal_IsEncryptedNotSetWithLinkSystem_ShouldLoadWithNoError(t *t _, err = GetFromNode(nd) require.NoError(t, err) } + +func TestBlockUnmarshal_ValidInput_Succeed(t *testing.T) { + validBlock := Block{ + Delta: crdt.CRDT{ + LWWRegDelta: &crdt.LWWRegDelta{ + DocID: []byte("docID"), + FieldName: "name", + Priority: 1, + SchemaVersionID: "schemaVersionID", + Data: []byte("John"), + }, + }, + } + + marshaledData, err := validBlock.Marshal() + require.NoError(t, err) + + var unmarshaledBlock Block + err = unmarshaledBlock.Unmarshal(marshaledData) + require.NoError(t, err) + + require.Equal(t, validBlock, unmarshaledBlock) +} + +func TestBlockUnmarshal_InvalidCBOR_Error(t *testing.T) { + invalidData := []byte("invalid CBOR data") + var block Block + err := block.Unmarshal(invalidData) + require.Error(t, err) +} + +func TestEncryptionBlockUnmarshal_InvalidCBOR_Error(t *testing.T) { + invalidData := []byte("invalid CBOR data") + var encBlock Encryption + err := encBlock.Unmarshal(invalidData) + require.Error(t, err) +} + +func TestEncryptionBlockUnmarshal_ValidInput_Succeed(t *testing.T) { + fieldName := "fieldName" + encBlock := Encryption{ + DocID: []byte("docID"), + Key: []byte("keyID"), + FieldName: &fieldName, + } + + marshaledData, err := encBlock.Marshal() + require.NoError(t, err) + + var unmarshaledBlock Encryption + err = unmarshaledBlock.Unmarshal(marshaledData) + require.NoError(t, err) + + require.Equal(t, encBlock, unmarshaledBlock) +} diff --git a/internal/core/block/errors.go b/internal/core/block/errors.go index 9b6b0e8a95..ced4c4d6a1 100644 --- a/internal/core/block/errors.go +++ b/internal/core/block/errors.go @@ -17,10 +17,12 @@ import ( ) const ( - errNodeToBlock string = "failed to convert node to block" - errEncodingBlock string = "failed to encode block" - errUnmarshallingBlock string = "failed to unmarshal block" - errGeneratingLink string = "failed to generate link" + errNodeToBlock string = "failed to convert node to block" + errEncodingBlock string = "failed to encode block" + errUnmarshallingBlock string = "failed to unmarshal block" + errGeneratingLink string = "failed to generate link" + errInvalidBlockEncryptionType string = "invalid block encryption type" + errInvalidBlockEncryptionKeyID string = "invalid block encryption key id" ) // Errors returnable from this package. @@ -28,10 +30,12 @@ const ( // This list is incomplete and undefined errors may also be returned. // Errors returned from this package may be tested against these errors with errors.Is. var ( - ErrNodeToBlock = errors.New(errNodeToBlock) - ErrEncodingBlock = errors.New(errEncodingBlock) - ErrUnmarshallingBlock = errors.New(errUnmarshallingBlock) - ErrGeneratingLink = errors.New(errGeneratingLink) + ErrNodeToBlock = errors.New(errNodeToBlock) + ErrEncodingBlock = errors.New(errEncodingBlock) + ErrUnmarshallingBlock = errors.New(errUnmarshallingBlock) + ErrGeneratingLink = errors.New(errGeneratingLink) + ErrInvalidBlockEncryptionType = errors.New(errInvalidBlockEncryptionType) + ErrInvalidBlockEncryptionKeyID = errors.New(errInvalidBlockEncryptionKeyID) ) // NewErrFailedToGetPriority returns an error indicating that the priority could not be retrieved. diff --git a/internal/core/crdt/composite.go b/internal/core/crdt/composite.go index 58372cfb49..c730badcb6 100644 --- a/internal/core/crdt/composite.go +++ b/internal/core/crdt/composite.go @@ -106,13 +106,13 @@ func (c CompositeDAG) Merge(ctx context.Context, delta core.Delta) error { if err != nil { return err } - return c.deleteWithPrefix(ctx, c.key.WithValueFlag().WithFieldId("")) + return c.deleteWithPrefix(ctx, c.key.WithValueFlag().WithFieldID("")) } // We cannot rely on the dagDelta.Status here as it may have been deleted locally, this is not // reflected in `dagDelta.Status` if sourced via P2P. Updates synced via P2P should not undelete - // the local reperesentation of the document. - versionKey := c.key.WithValueFlag().WithFieldId(core.DATASTORE_DOC_VERSION_FIELD_ID) + // the local representation of the document. + versionKey := c.key.WithValueFlag().WithFieldID(core.DATASTORE_DOC_VERSION_FIELD_ID) objectMarker, err := c.store.Get(ctx, c.key.ToPrimaryDataStoreKey().ToDS()) hasObjectMarker := !errors.Is(err, ds.ErrNotFound) if err != nil && hasObjectMarker { diff --git a/internal/core/crdt/lwwreg_test.go b/internal/core/crdt/lwwreg_test.go index 136d5cd09d..5b56df7636 100644 --- a/internal/core/crdt/lwwreg_test.go +++ b/internal/core/crdt/lwwreg_test.go @@ -16,6 +16,7 @@ import ( "testing" ds "github.com/ipfs/go-datastore" + "github.com/stretchr/testify/require" "github.com/sourcenetwork/defradb/datastore" "github.com/sourcenetwork/defradb/internal/core" @@ -31,11 +32,12 @@ func setupLWWRegister() LWWRegister { return NewLWWRegister(store, core.CollectionSchemaVersionKey{}, key, "") } -func setupLoadedLWWRegister(ctx context.Context) LWWRegister { +func setupLoadedLWWRegister(t *testing.T, ctx context.Context) LWWRegister { lww := setupLWWRegister() addDelta := lww.Set([]byte("test")) addDelta.SetPriority(1) - lww.Merge(ctx, addDelta) + err := lww.Merge(ctx, addDelta) + require.NoError(t, err) return lww } @@ -71,12 +73,13 @@ func TestLWWRegisterInitialMerge(t *testing.T) { } } -func TestLWWReisterFollowupMerge(t *testing.T) { +func TestLWWRegisterFollowupMerge(t *testing.T) { ctx := context.Background() - lww := setupLoadedLWWRegister(ctx) + lww := setupLoadedLWWRegister(t, ctx) addDelta := lww.Set([]byte("test2")) addDelta.SetPriority(2) - lww.Merge(ctx, addDelta) + err := lww.Merge(ctx, addDelta) + require.NoError(t, err) val, err := lww.Value(ctx) if err != nil { @@ -90,10 +93,11 @@ func TestLWWReisterFollowupMerge(t *testing.T) { func TestLWWRegisterOldMerge(t *testing.T) { ctx := context.Background() - lww := setupLoadedLWWRegister(ctx) + lww := setupLoadedLWWRegister(t, ctx) addDelta := lww.Set([]byte("test-1")) addDelta.SetPriority(0) - lww.Merge(ctx, addDelta) + err := lww.Merge(ctx, addDelta) + require.NoError(t, err) val, err := lww.Value(ctx) if err != nil { @@ -106,9 +110,7 @@ func TestLWWRegisterOldMerge(t *testing.T) { } func TestLWWRegisterDeltaInit(t *testing.T) { - delta := &LWWRegDelta{ - Data: []byte("test"), - } + delta := &LWWRegDelta{} var _ core.Delta = delta // checks if LWWRegDelta implements core.Delta (also checked in the implementation code, but w.e) } diff --git a/internal/core/key.go b/internal/core/key.go index b913a75f54..0e7942411d 100644 --- a/internal/core/key.go +++ b/internal/core/key.go @@ -127,7 +127,7 @@ var _ Key = (*PrimaryDataStoreKey)(nil) type HeadStoreKey struct { DocID string - FieldId string //can be 'C' + FieldID string //can be 'C' Cid cid.Cid } @@ -285,7 +285,7 @@ func NewHeadStoreKey(key string) (HeadStoreKey, error) { return HeadStoreKey{ // elements[0] is empty (key has leading '/') DocID: elements[1], - FieldId: elements[2], + FieldID: elements[2], Cid: cid, }, nil } @@ -471,16 +471,16 @@ func (k DataStoreKey) WithInstanceInfo(key DataStoreKey) DataStoreKey { return newKey } -func (k DataStoreKey) WithFieldId(fieldId string) DataStoreKey { +func (k DataStoreKey) WithFieldID(fieldID string) DataStoreKey { newKey := k - newKey.FieldID = fieldId + newKey.FieldID = fieldID return newKey } func (k DataStoreKey) ToHeadStoreKey() HeadStoreKey { return HeadStoreKey{ DocID: k.DocID, - FieldId: k.FieldID, + FieldID: k.FieldID, } } @@ -496,9 +496,9 @@ func (k HeadStoreKey) WithCid(c cid.Cid) HeadStoreKey { return newKey } -func (k HeadStoreKey) WithFieldId(fieldId string) HeadStoreKey { +func (k HeadStoreKey) WithFieldID(fieldID string) HeadStoreKey { newKey := k - newKey.FieldId = fieldId + newKey.FieldID = fieldID return newKey } @@ -858,8 +858,8 @@ func (k HeadStoreKey) ToString() string { if k.DocID != "" { result = result + "/" + k.DocID } - if k.FieldId != "" { - result = result + "/" + k.FieldId + if k.FieldID != "" { + result = result + "/" + k.FieldID } if k.Cid.Defined() { result = result + "/" + k.Cid.String() @@ -927,34 +927,3 @@ func bytesPrefixEnd(b []byte) []byte { // maximal byte string (i.e. already \xff...). return b } - -// EncStoreDocKey is a key for the encryption store. -type EncStoreDocKey struct { - DocID string - FieldName string -} - -var _ Key = (*EncStoreDocKey)(nil) - -// NewEncStoreDocKey creates a new EncStoreDocKey from a docID and fieldID. -func NewEncStoreDocKey(docID string, fieldName string) EncStoreDocKey { - return EncStoreDocKey{ - DocID: docID, - FieldName: fieldName, - } -} - -func (k EncStoreDocKey) ToString() string { - if k.FieldName == "" { - return k.DocID - } - return fmt.Sprintf("%s/%s", k.DocID, k.FieldName) -} - -func (k EncStoreDocKey) Bytes() []byte { - return []byte(k.ToString()) -} - -func (k EncStoreDocKey) ToDS() ds.Key { - return ds.NewKey(k.ToString()) -} diff --git a/internal/db/base/collection_keys.go b/internal/db/base/collection_keys.go index e23707285c..8878d50b13 100644 --- a/internal/db/base/collection_keys.go +++ b/internal/db/base/collection_keys.go @@ -45,7 +45,7 @@ func MakePrimaryIndexKeyForCRDT( case client.COMPOSITE: return MakeDataStoreKeyWithCollectionDescription(c.Description). WithInstanceInfo(key). - WithFieldId(core.COMPOSITE_NAMESPACE), + WithFieldID(core.COMPOSITE_NAMESPACE), nil case client.LWW_REGISTER, client.PN_COUNTER, client.P_COUNTER: field, ok := c.GetFieldByName(fieldName) @@ -55,7 +55,7 @@ func MakePrimaryIndexKeyForCRDT( return MakeDataStoreKeyWithCollectionDescription(c.Description). WithInstanceInfo(key). - WithFieldId(fmt.Sprint(field.ID)), + WithFieldID(fmt.Sprint(field.ID)), nil } return core.DataStoreKey{}, ErrInvalidCrdtType diff --git a/internal/db/collection.go b/internal/db/collection.go index e6205fecd9..6165218f78 100644 --- a/internal/db/collection.go +++ b/internal/db/collection.go @@ -915,7 +915,7 @@ func (c *collection) saveCompositeToMerkleCRDT( status client.DocumentStatus, ) (cidlink.Link, []byte, error) { txn := mustGetContextTxn(ctx) - dsKey = dsKey.WithFieldId(core.COMPOSITE_NAMESPACE) + dsKey = dsKey.WithFieldID(core.COMPOSITE_NAMESPACE) merkleCRDT := merklecrdt.NewMerkleCompositeDAG( txn, core.NewCollectionSchemaVersionKey(c.Schema().VersionID, c.ID()), diff --git a/internal/db/collection_delete.go b/internal/db/collection_delete.go index 082a53caf2..9ccca92ed5 100644 --- a/internal/db/collection_delete.go +++ b/internal/db/collection_delete.go @@ -144,7 +144,7 @@ func (c *collection) applyDelete( dsKey := primaryKey.ToDataStoreKey() headset := clock.NewHeadSet( txn.Headstore(), - dsKey.WithFieldId(core.COMPOSITE_NAMESPACE).ToHeadStoreKey(), + dsKey.WithFieldID(core.COMPOSITE_NAMESPACE).ToHeadStoreKey(), ) cids, _, err := headset.List(ctx) if err != nil { diff --git a/internal/db/config.go b/internal/db/config.go index 8ce725ebd0..3d69e833c4 100644 --- a/internal/db/config.go +++ b/internal/db/config.go @@ -19,12 +19,16 @@ const ( updateEventBufferSize = 100 ) +type dbOptions struct { + maxTxnRetries immutable.Option[int] +} + // Option is a funtion that sets a config value on the db. -type Option func(*db) +type Option func(*dbOptions) // WithMaxRetries sets the maximum number of retries per transaction. func WithMaxRetries(num int) Option { - return func(db *db) { - db.maxTxnRetries = immutable.Some(num) + return func(opts *dbOptions) { + opts.maxTxnRetries = immutable.Some(num) } } diff --git a/internal/db/config_test.go b/internal/db/config_test.go index 405e192598..a52d494a21 100644 --- a/internal/db/config_test.go +++ b/internal/db/config_test.go @@ -17,8 +17,8 @@ import ( ) func TestWithMaxRetries(t *testing.T) { - d := &db{} - WithMaxRetries(10)(d) + d := dbOptions{} + WithMaxRetries(10)(&d) assert.True(t, d.maxTxnRetries.HasValue()) assert.Equal(t, 10, d.maxTxnRetries.Value()) } diff --git a/internal/db/context.go b/internal/db/context.go index 8ad51c86ce..a2fa50507f 100644 --- a/internal/db/context.go +++ b/internal/db/context.go @@ -17,7 +17,6 @@ import ( acpIdentity "github.com/sourcenetwork/defradb/acp/identity" "github.com/sourcenetwork/defradb/datastore" - "github.com/sourcenetwork/defradb/internal/encryption" ) // txnContextKey is the key type for transaction context values. @@ -58,12 +57,10 @@ func ensureContextTxn(ctx context.Context, db transactionDB, readOnly bool) (con if ok { return SetContextTxn(ctx, &explicitTxn{txn}), &explicitTxn{txn}, nil } - // implicit transaction txn, err := db.NewTxn(ctx, readOnly) if err != nil { return nil, txn, err } - ctx = encryption.ContextWithStore(ctx, txn) return SetContextTxn(ctx, txn), txn, nil } diff --git a/internal/db/db.go b/internal/db/db.go index 81ec48e199..d88c5920bc 100644 --- a/internal/db/db.go +++ b/internal/db/db.go @@ -117,8 +117,13 @@ func newDB( } // apply options + var opts dbOptions for _, opt := range options { - opt(db) + opt(&opts) + } + + if opts.maxTxnRetries.HasValue() { + db.maxTxnRetries = opts.maxTxnRetries } if lens != nil { @@ -161,6 +166,11 @@ func (db *db) Blockstore() datastore.Blockstore { return db.multistore.Blockstore() } +// Encstore returns the internal enc store which contains encryption key for documents and their fields. +func (db *db) Encstore() datastore.Blockstore { + return db.multistore.Encstore() +} + // Peerstore returns the internal DAG store which contains IPLD blocks. func (db *db) Peerstore() datastore.DSBatching { return db.multistore.Peerstore() diff --git a/internal/db/errors.go b/internal/db/errors.go index 2da8c9c734..612d5ddb40 100644 --- a/internal/db/errors.go +++ b/internal/db/errors.go @@ -101,6 +101,7 @@ const ( errReplicatorCollections string = "failed to get collections for replicator" errReplicatorNotFound string = "replicator not found" errCanNotEncryptBuiltinField string = "can not encrypt build-in field" + errFailedToHandleEncKeysReceivedEvent string = "failed to handle encryption-keys-received event" errSelfReferenceWithoutSelf string = "must specify 'Self' kind for self referencing relations" errColNotMaterialized string = "non-materialized collections are not supported" errMaterializedViewAndACPNotSupported string = "materialized views do not support ACP" diff --git a/internal/db/fetcher/dag.go b/internal/db/fetcher/dag.go index cec1121827..3d3a6dd85e 100644 --- a/internal/db/fetcher/dag.go +++ b/internal/db/fetcher/dag.go @@ -92,7 +92,7 @@ func (hf *HeadFetcher) FetchNext() (*cid.Cid, error) { return nil, err } - if hf.fieldId.HasValue() && hf.fieldId.Value() != headStoreKey.FieldId { + if hf.fieldId.HasValue() && hf.fieldId.Value() != headStoreKey.FieldID { // FieldIds do not match, continue to next row return hf.FetchNext() } diff --git a/internal/db/fetcher/versioned.go b/internal/db/fetcher/versioned.go index 0ff58c4eeb..80b71cdd88 100644 --- a/internal/db/fetcher/versioned.go +++ b/internal/db/fetcher/versioned.go @@ -415,8 +415,7 @@ func (vf *VersionedFetcher) processBlock( vf.mCRDTs[crdtIndex] = mcrdt } - err = mcrdt.Clock().ProcessBlock(vf.ctx, block, blockLink, false) - return err + return mcrdt.Clock().ProcessBlock(vf.ctx, block, blockLink) } func (vf *VersionedFetcher) getDAGBlock(c cid.Cid) (*coreblock.Block, error) { diff --git a/internal/db/indexed_docs_test.go b/internal/db/indexed_docs_test.go index fad45aa11f..9f4ea3fe72 100644 --- a/internal/db/indexed_docs_test.go +++ b/internal/db/indexed_docs_test.go @@ -309,7 +309,7 @@ func TestNonUnique_IfDocWithDescendingOrderIsAdded_ShouldBeIndexed(t *testing.T) assert.Len(t, data, 0) } -func TestNonUnique_IfFailsToStoredIndexedDoc_Error(t *testing.T) { +func TestNonUnique_IfFailsToStoreIndexedDoc_Error(t *testing.T) { f := newIndexTestFixture(t) defer f.db.Close() f.createUserCollectionIndexOnName() @@ -698,7 +698,7 @@ func TestNonUniqueCreate_IfDatastoreFailsToStoreIndex_ReturnError(t *testing.T) fieldKeyString := core.DataStoreKey{ CollectionRootID: f.users.Description().RootID, }.WithDocID(doc.ID().String()). - WithFieldId("1"). + WithFieldID("1"). WithValueFlag(). ToString() diff --git a/internal/db/merge.go b/internal/db/merge.go index e588cb60a4..58c89cfc4e 100644 --- a/internal/db/merge.go +++ b/internal/db/merge.go @@ -16,6 +16,7 @@ import ( "sync" "github.com/ipfs/go-cid" + ipld "github.com/ipfs/go-ipld-format" "github.com/ipld/go-ipld-prime/linking" cidlink "github.com/ipld/go-ipld-prime/linking/cid" @@ -28,6 +29,7 @@ import ( "github.com/sourcenetwork/defradb/internal/core" coreblock "github.com/sourcenetwork/defradb/internal/core/block" "github.com/sourcenetwork/defradb/internal/db/base" + "github.com/sourcenetwork/defradb/internal/encryption" "github.com/sourcenetwork/defradb/internal/merkle/clock" merklecrdt "github.com/sourcenetwork/defradb/internal/merkle/crdt" ) @@ -44,21 +46,18 @@ func (db *db) executeMerge(ctx context.Context, dagMerge event.Merge) error { return err } - ls := cidlink.DefaultLinkSystem() - ls.SetReadStorage(txn.Blockstore().AsIPLDStorage()) - docID, err := client.NewDocIDFromString(dagMerge.DocID) if err != nil { return err } dsKey := base.MakeDataStoreKeyWithCollectionAndDocID(col.Description(), docID.String()) - mp, err := db.newMergeProcessor(txn, ls, col, dsKey) + mp, err := db.newMergeProcessor(txn, col, dsKey) if err != nil { return err } - mt, err := getHeadsAsMergeTarget(ctx, txn, dsKey) + mt, err := getHeadsAsMergeTarget(ctx, txn, dsKey.WithFieldID(core.COMPOSITE_NAMESPACE)) if err != nil { return err } @@ -130,26 +129,40 @@ func (m *mergeQueue) done(docID string) { type mergeProcessor struct { txn datastore.Txn - lsys linking.LinkSystem + blockLS linking.LinkSystem + encBlockLS linking.LinkSystem mCRDTs map[string]merklecrdt.MerkleCRDT col *collection dsKey core.DataStoreKey + // composites is a list of composites that need to be merged. composites *list.List + // missingEncryptionBlocks is a list of blocks that we failed to fetch + missingEncryptionBlocks map[cidlink.Link]struct{} + // availableEncryptionBlocks is a list of blocks that we have successfully fetched + availableEncryptionBlocks map[cidlink.Link]*coreblock.Encryption } func (db *db) newMergeProcessor( txn datastore.Txn, - lsys linking.LinkSystem, col *collection, dsKey core.DataStoreKey, ) (*mergeProcessor, error) { + blockLS := cidlink.DefaultLinkSystem() + blockLS.SetReadStorage(txn.Blockstore().AsIPLDStorage()) + + encBlockLS := cidlink.DefaultLinkSystem() + encBlockLS.SetReadStorage(txn.Encstore().AsIPLDStorage()) + return &mergeProcessor{ - txn: txn, - lsys: lsys, - mCRDTs: make(map[string]merklecrdt.MerkleCRDT), - col: col, - dsKey: dsKey, - composites: list.New(), + txn: txn, + blockLS: blockLS, + encBlockLS: encBlockLS, + mCRDTs: make(map[string]merklecrdt.MerkleCRDT), + col: col, + dsKey: dsKey, + composites: list.New(), + missingEncryptionBlocks: make(map[cidlink.Link]struct{}), + availableEncryptionBlocks: make(map[cidlink.Link]*coreblock.Encryption), }, nil } @@ -165,7 +178,7 @@ func newMergeTarget() mergeTarget { } // loadComposites retrieves and stores into the merge processor the composite blocks for the given -// document until it reaches a block that has already been merged or until we reach the genesis block. +// CID until it reaches a block that has already been merged or until we reach the genesis block. func (mp *mergeProcessor) loadComposites( ctx context.Context, blockCid cid.Cid, @@ -176,7 +189,7 @@ func (mp *mergeProcessor) loadComposites( return nil } - nd, err := mp.lsys.Load(linking.LinkContext{Ctx: ctx}, cidlink.Link{Cid: blockCid}, coreblock.SchemaPrototype) + nd, err := mp.blockLS.Load(linking.LinkContext{Ctx: ctx}, cidlink.Link{Cid: blockCid}, coreblock.SchemaPrototype) if err != nil { return err } @@ -191,12 +204,10 @@ func (mp *mergeProcessor) loadComposites( // In this case, we also need to walk back the merge target's DAG until we reach a common block. if block.Delta.GetPriority() >= mt.headHeight { mp.composites.PushFront(block) - for _, link := range block.Links { - if link.Name == core.HEAD { - err := mp.loadComposites(ctx, link.Cid, mt) - if err != nil { - return err - } + for _, prevCid := range block.GetHeadLinks() { + err := mp.loadComposites(ctx, prevCid, mt) + if err != nil { + return err } } } else { @@ -204,7 +215,7 @@ func (mp *mergeProcessor) loadComposites( for _, b := range mt.heads { for _, link := range b.Links { if link.Name == core.HEAD { - nd, err := mp.lsys.Load(linking.LinkContext{Ctx: ctx}, link.Link, coreblock.SchemaPrototype) + nd, err := mp.blockLS.Load(linking.LinkContext{Ctx: ctx}, link.Link, coreblock.SchemaPrototype) if err != nil { return err } @@ -227,15 +238,50 @@ func (mp *mergeProcessor) loadComposites( func (mp *mergeProcessor) mergeComposites(ctx context.Context) error { for e := mp.composites.Front(); e != nil; e = e.Next() { block := e.Value.(*coreblock.Block) - var onlyHeads bool - if block.IsEncrypted != nil && *block.IsEncrypted { - onlyHeads = true - } link, err := block.GenerateLink() if err != nil { return err } - err = mp.processBlock(ctx, block, link, onlyHeads) + err = mp.processBlock(ctx, block, link) + if err != nil { + return err + } + } + + return mp.tryFetchMissingBlocksAndMerge(ctx) +} + +func (mp *mergeProcessor) tryFetchMissingBlocksAndMerge(ctx context.Context) error { + for len(mp.missingEncryptionBlocks) > 0 { + links := make([]cidlink.Link, 0, len(mp.missingEncryptionBlocks)) + for link := range mp.missingEncryptionBlocks { + links = append(links, link) + } + msg, results := encryption.NewRequestKeysMessage(links) + mp.col.db.events.Publish(msg) + + res := <-results.Get() + if res.Error != nil { + return res.Error + } + + clear(mp.missingEncryptionBlocks) + + for i := range res.Items { + _, link, err := cid.CidFromBytes(res.Items[i].Link) + if err != nil { + return err + } + var encBlock coreblock.Encryption + err = encBlock.Unmarshal(res.Items[i].Block) + if err != nil { + return err + } + + mp.availableEncryptionBlocks[cidlink.Link{Cid: link}] = &encBlock + } + + err := mp.mergeComposites(ctx) if err != nil { return err } @@ -243,36 +289,109 @@ func (mp *mergeProcessor) mergeComposites(ctx context.Context) error { return nil } +func (mp *mergeProcessor) loadEncryptionBlock( + ctx context.Context, + encLink cidlink.Link, +) (*coreblock.Encryption, error) { + nd, err := mp.encBlockLS.Load(linking.LinkContext{Ctx: ctx}, encLink, coreblock.EncryptionSchemaPrototype) + if err != nil { + if errors.Is(err, ipld.ErrNotFound{}) { + mp.missingEncryptionBlocks[encLink] = struct{}{} + return nil, nil + } + return nil, err + } + + return coreblock.GetEncryptionBlockFromNode(nd) +} + +func (mp *mergeProcessor) tryGetEncryptionBlock( + ctx context.Context, + encLink cidlink.Link, +) (*coreblock.Encryption, error) { + if encBlock, ok := mp.availableEncryptionBlocks[encLink]; ok { + return encBlock, nil + } + if _, ok := mp.missingEncryptionBlocks[encLink]; ok { + return nil, nil + } + + encBlock, err := mp.loadEncryptionBlock(ctx, encLink) + if err != nil { + return nil, err + } + + if encBlock != nil { + mp.availableEncryptionBlocks[encLink] = encBlock + } + + return encBlock, nil +} + +// processEncryptedBlock decrypts the block if it is encrypted and returns the decrypted block. +// If the block is encrypted and we were not able to decrypt it, it returns false as the second return value +// which indicates that the we can't read the block. +// If we were able to decrypt the block, we return the decrypted block and true as the second return value. +func (mp *mergeProcessor) processEncryptedBlock( + ctx context.Context, + dagBlock *coreblock.Block, +) (*coreblock.Block, bool, error) { + if dagBlock.IsEncrypted() { + encBlock, err := mp.tryGetEncryptionBlock(ctx, *dagBlock.Encryption) + if err != nil { + return nil, false, err + } + + if encBlock == nil { + return dagBlock, false, nil + } + + plainTextBlock, err := decryptBlock(ctx, dagBlock, encBlock) + if err != nil { + return nil, false, err + } + if plainTextBlock != nil { + return plainTextBlock, true, nil + } + } + return dagBlock, true, nil +} + // processBlock merges the block and its children to the datastore and sets the head accordingly. -// If onlyHeads is true, it will skip merging and update only the heads. func (mp *mergeProcessor) processBlock( ctx context.Context, - block *coreblock.Block, + dagBlock *coreblock.Block, blockLink cidlink.Link, - onlyHeads bool, ) error { - crdt, err := mp.initCRDTForType(block.Delta.GetFieldName()) + block, canRead, err := mp.processEncryptedBlock(ctx, dagBlock) if err != nil { return err } - // If the CRDT is nil, it means the field is not part - // of the schema and we can safely ignore it. - if crdt == nil { - return nil - } + if canRead { + crdt, err := mp.initCRDTForType(dagBlock.Delta.GetFieldName()) + if err != nil { + return err + } - err = crdt.Clock().ProcessBlock(ctx, block, blockLink, onlyHeads) - if err != nil { - return err + // If the CRDT is nil, it means the field is not part + // of the schema and we can safely ignore it. + if crdt == nil { + return nil + } + + err = crdt.Clock().ProcessBlock(ctx, block, blockLink) + if err != nil { + return err + } } - for _, link := range block.Links { + for _, link := range dagBlock.Links { if link.Name == core.HEAD { continue } - nd, err := mp.lsys.Load(linking.LinkContext{Ctx: ctx}, link.Link, coreblock.SchemaPrototype) + nd, err := mp.blockLS.Load(linking.LinkContext{Ctx: ctx}, link.Link, coreblock.SchemaPrototype) if err != nil { return err } @@ -282,7 +401,7 @@ func (mp *mergeProcessor) processBlock( return err } - if err := mp.processBlock(ctx, childBlock, link.Link, onlyHeads); err != nil { + if err := mp.processBlock(ctx, childBlock, link.Link); err != nil { return err } } @@ -290,9 +409,31 @@ func (mp *mergeProcessor) processBlock( return nil } -func (mp *mergeProcessor) initCRDTForType( - field string, -) (merklecrdt.MerkleCRDT, error) { +func decryptBlock( + ctx context.Context, + block *coreblock.Block, + encBlock *coreblock.Encryption, +) (*coreblock.Block, error) { + _, encryptor := encryption.EnsureContextWithEncryptor(ctx) + + if block.Delta.IsComposite() { + // for composite blocks there is nothing to decrypt + return block, nil + } + + bytes, err := encryptor.Decrypt(block.Delta.GetData(), encBlock.Key) + if err != nil { + return nil, err + } + if len(bytes) == 0 { + return nil, nil + } + newBlock := block.Clone() + newBlock.Delta.SetData(bytes) + return newBlock, nil +} + +func (mp *mergeProcessor) initCRDTForType(field string) (merklecrdt.MerkleCRDT, error) { mcrdt, exists := mp.mCRDTs[field] if exists { return mcrdt, nil @@ -307,7 +448,7 @@ func (mp *mergeProcessor) initCRDTForType( mcrdt = merklecrdt.NewMerkleCompositeDAG( mp.txn, schemaVersionKey, - mp.dsKey.WithFieldId(core.COMPOSITE_NAMESPACE), + mp.dsKey.WithFieldID(core.COMPOSITE_NAMESPACE), "", ) mp.mCRDTs[field] = mcrdt @@ -325,7 +466,7 @@ func (mp *mergeProcessor) initCRDTForType( schemaVersionKey, fd.Typ, fd.Kind, - mp.dsKey.WithFieldId(fd.ID.String()), + mp.dsKey.WithFieldID(fd.ID.String()), field, ) if err != nil { @@ -357,24 +498,15 @@ func getCollectionFromRootSchema(ctx context.Context, db *db, rootSchema string) // getHeadsAsMergeTarget retrieves the heads of the composite DAG for the given document // and returns them as a merge target. func getHeadsAsMergeTarget(ctx context.Context, txn datastore.Txn, dsKey core.DataStoreKey) (mergeTarget, error) { - headset := clock.NewHeadSet( - txn.Headstore(), - dsKey.WithFieldId(core.COMPOSITE_NAMESPACE).ToHeadStoreKey(), - ) + cids, err := getHeads(ctx, txn, dsKey) - cids, _, err := headset.List(ctx) if err != nil { return mergeTarget{}, err } mt := newMergeTarget() for _, cid := range cids { - b, err := txn.Blockstore().Get(ctx, cid) - if err != nil { - return mergeTarget{}, err - } - - block, err := coreblock.GetFromBytes(b.RawData()) + block, err := loadBlockFromBlockStore(ctx, txn, cid) if err != nil { return mergeTarget{}, err } @@ -386,6 +518,33 @@ func getHeadsAsMergeTarget(ctx context.Context, txn datastore.Txn, dsKey core.Da return mt, nil } +// getHeads retrieves the heads associated with the given datastore key. +func getHeads(ctx context.Context, txn datastore.Txn, dsKey core.DataStoreKey) ([]cid.Cid, error) { + headset := clock.NewHeadSet(txn.Headstore(), dsKey.ToHeadStoreKey()) + + cids, _, err := headset.List(ctx) + if err != nil { + return nil, err + } + + return cids, nil +} + +// loadBlockFromBlockStore loads a block from the blockstore. +func loadBlockFromBlockStore(ctx context.Context, txn datastore.Txn, cid cid.Cid) (*coreblock.Block, error) { + b, err := txn.Blockstore().Get(ctx, cid) + if err != nil { + return nil, err + } + + block, err := coreblock.GetFromBytes(b.RawData()) + if err != nil { + return nil, err + } + + return block, nil +} + func syncIndexedDoc( ctx context.Context, docID client.DocID, @@ -406,10 +565,10 @@ func syncIndexedDoc( return err } - if isDeletedDoc { - return col.deleteIndexedDoc(ctx, oldDoc) - } else if isNewDoc { + if isNewDoc { return col.indexNewDoc(ctx, doc) + } else if isDeletedDoc { + return col.deleteIndexedDoc(ctx, oldDoc) } else { return col.updateDocIndex(ctx, oldDoc, doc) } diff --git a/internal/db/p2p_replicator.go b/internal/db/p2p_replicator.go index 409419ea3f..b66ab4f2cf 100644 --- a/internal/db/p2p_replicator.go +++ b/internal/db/p2p_replicator.go @@ -161,7 +161,7 @@ func (db *db) getDocsHeads( docID := core.DataStoreKeyFromDocID(docIDResult.ID) headset := clock.NewHeadSet( txn.Headstore(), - docID.WithFieldId(core.COMPOSITE_NAMESPACE).ToHeadStoreKey(), + docID.WithFieldID(core.COMPOSITE_NAMESPACE).ToHeadStoreKey(), ) cids, _, err := headset.List(ctx) if err != nil { diff --git a/internal/encryption/aes.go b/internal/encryption/aes.go deleted file mode 100644 index e3a7feb563..0000000000 --- a/internal/encryption/aes.go +++ /dev/null @@ -1,79 +0,0 @@ -// Copyright 2024 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package encryption - -import ( - "crypto/aes" - "crypto/cipher" - "encoding/base64" - "fmt" -) - -// EncryptAES encrypts data using AES-GCM with a provided key. -func EncryptAES(plainText, key []byte) ([]byte, error) { - block, err := aes.NewCipher(key) - if err != nil { - return nil, err - } - - nonce, err := generateNonceFunc() - if err != nil { - return nil, err - } - - aesGCM, err := cipher.NewGCM(block) - if err != nil { - return nil, err - } - - cipherText := aesGCM.Seal(nonce, nonce, plainText, nil) - - buf := make([]byte, base64.StdEncoding.EncodedLen(len(cipherText))) - base64.StdEncoding.Encode(buf, cipherText) - - return buf, nil -} - -// DecryptAES decrypts AES-GCM encrypted data with a provided key. -func DecryptAES(cipherTextBase64, key []byte) ([]byte, error) { - cipherText := make([]byte, base64.StdEncoding.DecodedLen(len(cipherTextBase64))) - n, err := base64.StdEncoding.Decode(cipherText, []byte(cipherTextBase64)) - - if err != nil { - return nil, err - } - - cipherText = cipherText[:n] - - block, err := aes.NewCipher(key) - if err != nil { - return nil, err - } - - if len(cipherText) < nonceLength { - return nil, fmt.Errorf("cipherText too short") - } - - nonce := cipherText[:nonceLength] - cipherText = cipherText[nonceLength:] - - aesGCM, err := cipher.NewGCM(block) - if err != nil { - return nil, err - } - - plainText, err := aesGCM.Open(nil, nonce, cipherText, nil) - if err != nil { - return nil, err - } - - return plainText, nil -} diff --git a/internal/encryption/context.go b/internal/encryption/context.go index 96e90a7e0c..422bd97697 100644 --- a/internal/encryption/context.go +++ b/internal/encryption/context.go @@ -14,8 +14,6 @@ import ( "context" "github.com/sourcenetwork/immutable" - - "github.com/sourcenetwork/defradb/datastore" ) // docEncContextKey is the key type for document encryption context values. @@ -24,37 +22,34 @@ type docEncContextKey struct{} // configContextKey is the key type for encryption context values. type configContextKey struct{} -// TryGetContextDocEnc returns a document encryption and a bool indicating if -// it was retrieved from the given context. -func TryGetContextEncryptor(ctx context.Context) (*DocEncryptor, bool) { +// GetEncryptorFromContext returns a document encryptor from the given context. +// It returns nil if no encryptor exists in the context. +func GetEncryptorFromContext(ctx context.Context) *DocEncryptor { enc, ok := ctx.Value(docEncContextKey{}).(*DocEncryptor) if ok { setConfig(ctx, enc) } - return enc, ok + return enc } func setConfig(ctx context.Context, enc *DocEncryptor) { enc.SetConfig(GetContextConfig(ctx)) + enc.ctx = ctx } -func ensureContextWithDocEnc(ctx context.Context) (context.Context, *DocEncryptor) { - enc, ok := TryGetContextEncryptor(ctx) - if !ok { +// EnsureContextWithEncryptor returns a context with a document encryptor and the +// document encryptor itself. If the context already has an encryptor, it +// returns the context and encryptor as is. Otherwise, it creates a new +// document encryptor and stores it in the context. +func EnsureContextWithEncryptor(ctx context.Context) (context.Context, *DocEncryptor) { + enc := GetEncryptorFromContext(ctx) + if enc == nil { enc = newDocEncryptor(ctx) ctx = context.WithValue(ctx, docEncContextKey{}, enc) } return ctx, enc } -// ContextWithStore sets the store on the doc encryptor in the context. -// If the doc encryptor is not present, it will be created. -func ContextWithStore(ctx context.Context, txn datastore.Txn) context.Context { - ctx, encryptor := ensureContextWithDocEnc(ctx) - encryptor.SetStore(txn.Encstore()) - return ctx -} - // GetContextConfig returns the doc encryption config from the given context. func GetContextConfig(ctx context.Context) immutable.Option[DocEncConfig] { encConfig, ok := ctx.Value(configContextKey{}).(DocEncConfig) @@ -66,6 +61,7 @@ func GetContextConfig(ctx context.Context) immutable.Option[DocEncConfig] { // SetContextConfig returns a new context with the doc encryption config set. func SetContextConfig(ctx context.Context, encConfig DocEncConfig) context.Context { + ctx, _ = EnsureContextWithEncryptor(ctx) return context.WithValue(ctx, configContextKey{}, encConfig) } diff --git a/internal/encryption/encryptor.go b/internal/encryption/encryptor.go index 9a6cb8f6f0..fdd2efec3d 100644 --- a/internal/encryption/encryptor.go +++ b/internal/encryption/encryptor.go @@ -13,15 +13,13 @@ package encryption import ( "context" "crypto/rand" - "errors" "io" - - ds "github.com/ipfs/go-datastore" + "os" + "strings" "github.com/sourcenetwork/immutable" - "github.com/sourcenetwork/defradb/datastore" - "github.com/sourcenetwork/defradb/internal/core" + "github.com/sourcenetwork/defradb/crypto" ) var generateEncryptionKeyFunc = generateEncryptionKey @@ -31,7 +29,7 @@ const keyLength = 32 // 32 bytes for AES-256 const testEncryptionKey = "examplekey1234567890examplekey12" // generateEncryptionKey generates a random AES key. -func generateEncryptionKey(_, _ string) ([]byte, error) { +func generateEncryptionKey(_ string, _ immutable.Option[string]) ([]byte, error) { key := make([]byte, keyLength) if _, err := io.ReadFull(rand.Reader, key); err != nil { return nil, err @@ -42,21 +40,28 @@ func generateEncryptionKey(_, _ string) ([]byte, error) { // generateTestEncryptionKey generates a deterministic encryption key for testing. // While testing, we also want to make sure different keys are generated for different docs and fields // and that's why we use the docID and fieldName to generate the key. -func generateTestEncryptionKey(docID, fieldName string) ([]byte, error) { - return []byte(fieldName + docID + testEncryptionKey)[0:keyLength], nil +func generateTestEncryptionKey(docID string, fieldName immutable.Option[string]) ([]byte, error) { + return []byte(fieldName.Value() + docID + testEncryptionKey)[0:keyLength], nil } // DocEncryptor is a document encryptor that encrypts and decrypts individual document fields. // It acts based on the configuration [DocEncConfig] provided and data stored in the provided store. -// It uses [core.EncStoreDocKey] to store and retrieve encryption keys. +// DocEncryptor is a session-bound, i.e. once a user requests to create (or update) a document or a node +// receives an UpdateEvent on a document (or any other event) a new DocEncryptor is created and stored +// in the context, so that the same DocEncryptor can be used by other object down the call chain. type DocEncryptor struct { - conf immutable.Option[DocEncConfig] - ctx context.Context - store datastore.DSReaderWriter + conf immutable.Option[DocEncConfig] + ctx context.Context + generatedKeys map[genK][]byte +} + +type genK struct { + docID string + fieldName immutable.Option[string] } func newDocEncryptor(ctx context.Context) *DocEncryptor { - return &DocEncryptor{ctx: ctx} + return &DocEncryptor{ctx: ctx, generatedKeys: make(map[genK][]byte)} } // SetConfig sets the configuration for the document encryptor. @@ -64,141 +69,124 @@ func (d *DocEncryptor) SetConfig(conf immutable.Option[DocEncConfig]) { d.conf = conf } -// SetStore sets the store for the document encryptor. -func (d *DocEncryptor) SetStore(store datastore.DSReaderWriter) { - d.store = store -} - -func shouldEncryptIndividualField(conf immutable.Option[DocEncConfig], fieldName string) bool { - if !conf.HasValue() || fieldName == "" { +func shouldEncryptIndividualField(conf immutable.Option[DocEncConfig], fieldName immutable.Option[string]) bool { + if !conf.HasValue() || !fieldName.HasValue() { return false } for _, field := range conf.Value().EncryptedFields { - if field == fieldName { + if field == fieldName.Value() { return true } } return false } -func shouldEncryptField(conf immutable.Option[DocEncConfig], fieldName string) bool { +func shouldEncryptDocField(conf immutable.Option[DocEncConfig], fieldName immutable.Option[string]) bool { if !conf.HasValue() { return false } if conf.Value().IsDocEncrypted { return true } - if fieldName == "" { + if !fieldName.HasValue() { return false } for _, field := range conf.Value().EncryptedFields { - if field == fieldName { + if field == fieldName.Value() { return true } } return false } -// Encrypt encrypts the given plainText that is associated with the given docID and fieldName. -// If the current configuration is set to encrypt the given key individually, it will encrypt it with a new key. -// Otherwise, it will use document-level encryption key. -func (d *DocEncryptor) Encrypt(docID, fieldName string, plainText []byte) ([]byte, error) { - encryptionKey, err := d.fetchEncryptionKey(docID, fieldName) - if err != nil { - return nil, err +// Encrypt encrypts the given plainText with the encryption key that is associated with the given docID, +// fieldName and key id. +func (d *DocEncryptor) Encrypt( + plainText, encryptionKey []byte, +) ([]byte, error) { + var cipherText []byte + var err error + if len(plainText) > 0 { + cipherText, _, err = crypto.EncryptAES(plainText, encryptionKey, nil, true) } - if len(encryptionKey) == 0 { - if !shouldEncryptIndividualField(d.conf, fieldName) { - fieldName = "" - } - - if !shouldEncryptField(d.conf, fieldName) { - return plainText, nil - } - - encryptionKey, err = generateEncryptionKeyFunc(docID, fieldName) - if err != nil { - return nil, err - } - - storeKey := core.NewEncStoreDocKey(docID, fieldName) - err = d.store.Put(d.ctx, storeKey.ToDS(), encryptionKey) - if err != nil { - return nil, err - } - } - return EncryptAES(plainText, encryptionKey) + return cipherText, err } // Decrypt decrypts the given cipherText that is associated with the given docID and fieldName. // If the corresponding encryption key is not found, it returns nil. -func (d *DocEncryptor) Decrypt(docID, fieldName string, cipherText []byte) ([]byte, error) { - encKey, err := d.fetchEncryptionKey(docID, fieldName) - if err != nil { - return nil, err - } +func (d *DocEncryptor) Decrypt( + cipherText, encKey []byte, +) ([]byte, error) { if len(encKey) == 0 { return nil, nil } - return DecryptAES(cipherText, encKey) + return crypto.DecryptAES(nil, cipherText, encKey, nil) +} + +// getGeneratedKeyFor returns the generated key for the given docID and fieldName. +func (d *DocEncryptor) getGeneratedKeyFor( + docID string, + fieldName immutable.Option[string], +) []byte { + return d.generatedKeys[genK{docID, fieldName}] +} + +// GetOrGenerateEncryptionKey returns the generated encryption key for the given docID, (optional) fieldName. +// If the key is not generated before, it generates a new key and stores it. +func (d *DocEncryptor) GetOrGenerateEncryptionKey( + docID string, + fieldName immutable.Option[string], +) ([]byte, error) { + encryptionKey := d.getGeneratedKeyFor(docID, fieldName) + if len(encryptionKey) > 0 { + return encryptionKey, nil + } + + return d.generateEncryptionKey(docID, fieldName) } -// fetchEncryptionKey fetches the encryption key for the given docID and fieldName. -// If the key is not found, it returns an empty key. -func (d *DocEncryptor) fetchEncryptionKey(docID string, fieldName string) ([]byte, error) { - if d.store == nil { - return nil, ErrNoStorageProvided +// generateEncryptionKey generates a new encryption key for the given docID and fieldName. +func (d *DocEncryptor) generateEncryptionKey( + docID string, + fieldName immutable.Option[string], +) ([]byte, error) { + if !shouldEncryptIndividualField(d.conf, fieldName) { + fieldName = immutable.None[string]() + } + + if !shouldEncryptDocField(d.conf, fieldName) { + return nil, nil } - // first we try to find field-level key - storeKey := core.NewEncStoreDocKey(docID, fieldName) - encryptionKey, err := d.store.Get(d.ctx, storeKey.ToDS()) - isNotFound := errors.Is(err, ds.ErrNotFound) + + encryptionKey, err := generateEncryptionKeyFunc(docID, fieldName) if err != nil { - if !isNotFound { - return nil, err - } - // if previous fetch was for doc-level, there is nothing else to look for - if fieldName == "" { - return nil, nil - } - if shouldEncryptIndividualField(d.conf, fieldName) { - return nil, nil - } - // try to find doc-level key - storeKey.FieldName = "" - encryptionKey, err = d.store.Get(d.ctx, storeKey.ToDS()) - isNotFound = errors.Is(err, ds.ErrNotFound) - if err != nil && !isNotFound { - return nil, err - } + return nil, err } + + d.generatedKeys[genK{docID, fieldName}] = encryptionKey + return encryptionKey, nil } -// EncryptDoc encrypts the given plainText that is associated with the given docID and fieldName with -// encryptor in the context. -// If the current configuration is set to encrypt the given key individually, it will encrypt it with a new key. -// Otherwise, it will use document-level encryption key. -func EncryptDoc(ctx context.Context, docID string, fieldName string, plainText []byte) ([]byte, error) { - enc, ok := TryGetContextEncryptor(ctx) - if !ok { - return nil, nil - } - return enc.Encrypt(docID, fieldName, plainText) +// ShouldEncryptDocField returns true if the given field should be encrypted based on the context config. +func ShouldEncryptDocField(ctx context.Context, fieldName immutable.Option[string]) bool { + return shouldEncryptDocField(GetContextConfig(ctx), fieldName) } -// DecryptDoc decrypts the given cipherText that is associated with the given docID and fieldName with -// encryptor in the context. -func DecryptDoc(ctx context.Context, docID string, fieldName string, cipherText []byte) ([]byte, error) { - enc, ok := TryGetContextEncryptor(ctx) - if !ok { - return nil, nil - } - return enc.Decrypt(docID, fieldName, cipherText) +// ShouldEncryptIndividualField returns true if the given field should be encrypted individually based on +// the context config. +func ShouldEncryptIndividualField(ctx context.Context, fieldName immutable.Option[string]) bool { + return shouldEncryptIndividualField(GetContextConfig(ctx), fieldName) } -// ShouldEncryptField returns true if the given field should be encrypted based on the context config. -func ShouldEncryptField(ctx context.Context, fieldName string) bool { - return shouldEncryptField(GetContextConfig(ctx), fieldName) +func init() { + arg := os.Args[0] + // If the binary is a test binary, use a deterministic nonce. + // TODO: We should try to find a better way to detect this https://github.com/sourcenetwork/defradb/issues/2801 + if strings.HasSuffix(arg, ".test") || + strings.Contains(arg, "/defradb/tests/") || + strings.Contains(arg, "/__debug_bin") { + generateEncryptionKeyFunc = generateTestEncryptionKey + } } diff --git a/internal/encryption/encryptor_test.go b/internal/encryption/encryptor_test.go index 76888ed4f1..3c34ed819d 100644 --- a/internal/encryption/encryptor_test.go +++ b/internal/encryption/encryptor_test.go @@ -12,211 +12,192 @@ package encryption import ( "context" - "errors" "testing" - ds "github.com/ipfs/go-datastore" "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/mock" "github.com/sourcenetwork/immutable" - - "github.com/sourcenetwork/defradb/datastore/mocks" - "github.com/sourcenetwork/defradb/internal/core" ) -var testErr = errors.New("test error") - -const docID = "bae-c9fb0fa4-1195-589c-aa54-e68333fb90b3" - -const fieldName = "name" - -func getPlainText() []byte { - return []byte("test") +func TestContext_NoEncryptor_ReturnsNil(t *testing.T) { + ctx := context.Background() + enc := GetEncryptorFromContext(ctx) + assert.Nil(t, enc) } -func getEncKey(fieldName string) []byte { - key, _ := generateTestEncryptionKey(docID, fieldName) - return key -} +func TestContext_WithEncryptor_ReturnsEncryptor(t *testing.T) { + ctx := context.Background() + enc := newDocEncryptor(ctx) + ctx = context.WithValue(ctx, docEncContextKey{}, enc) -func getCipherText(t *testing.T, fieldName string) []byte { - cipherText, err := EncryptAES(getPlainText(), getEncKey(fieldName)) - assert.NoError(t, err) - return cipherText + retrievedEnc := GetEncryptorFromContext(ctx) + assert.NotNil(t, retrievedEnc) + assert.Equal(t, enc, retrievedEnc) } -func newDefaultEncryptor(t *testing.T) (*DocEncryptor, *mocks.DSReaderWriter) { - return newEncryptorWithConfig(t, DocEncConfig{IsDocEncrypted: true}) -} - -func newEncryptorWithConfig(t *testing.T, conf DocEncConfig) (*DocEncryptor, *mocks.DSReaderWriter) { - enc := newDocEncryptor(context.Background()) - st := mocks.NewDSReaderWriter(t) - enc.SetConfig(immutable.Some(conf)) - enc.SetStore(st) - return enc, st -} - -func TestEncryptorEncrypt_IfStorageReturnsError_Error(t *testing.T) { - enc, st := newDefaultEncryptor(t) - - st.EXPECT().Get(mock.Anything, mock.Anything).Return(nil, testErr) +func TestContext_EnsureEncryptor_CreatesNew(t *testing.T) { + ctx := context.Background() + newCtx, enc := EnsureContextWithEncryptor(ctx) - _, err := enc.Encrypt(docID, fieldName, []byte("test")) + assert.NotNil(t, enc) + assert.NotEqual(t, ctx, newCtx) - assert.ErrorIs(t, err, testErr) + retrievedEnc := GetEncryptorFromContext(newCtx) + assert.Equal(t, enc, retrievedEnc) } -func TestEncryptorEncrypt_IfStorageReturnsErrorOnSecondCall_Error(t *testing.T) { - enc, st := newDefaultEncryptor(t) +func TestContext_EnsureEncryptor_ReturnsExisting(t *testing.T) { + ctx := context.Background() + enc := newDocEncryptor(ctx) + ctx = context.WithValue(ctx, docEncContextKey{}, enc) - st.EXPECT().Get(mock.Anything, mock.Anything).Return(nil, ds.ErrNotFound).Once() - st.EXPECT().Get(mock.Anything, mock.Anything).Return(nil, testErr) - - _, err := enc.Encrypt(docID, fieldName, []byte("test")) - - assert.ErrorIs(t, err, testErr) + newCtx, retrievedEnc := EnsureContextWithEncryptor(ctx) + assert.Equal(t, ctx, newCtx) + assert.Equal(t, enc, retrievedEnc) } -func TestEncryptorEncrypt_WithEmptyFieldNameIfNoKeyFoundInStorage_ShouldGenerateKeyStoreItAndReturnCipherText(t *testing.T) { - enc, st := newDefaultEncryptor(t) - - storeKey := core.NewEncStoreDocKey(docID, "") - - st.EXPECT().Get(mock.Anything, storeKey.ToDS()).Return(nil, ds.ErrNotFound) - st.EXPECT().Put(mock.Anything, storeKey.ToDS(), getEncKey("")).Return(nil) +func TestConfig_GetFromContext_NoConfig(t *testing.T) { + ctx := context.Background() + config := GetContextConfig(ctx) + assert.False(t, config.HasValue()) +} - cipherText, err := enc.Encrypt(docID, "", getPlainText()) +func TestConfig_GetFromContext_ReturnCurrentConfig(t *testing.T) { + ctx := context.Background() + expectedConfig := DocEncConfig{IsDocEncrypted: true, EncryptedFields: []string{"field1", "field2"}} + ctx = context.WithValue(ctx, configContextKey{}, expectedConfig) - assert.NoError(t, err) - assert.Equal(t, getCipherText(t, ""), cipherText) + config := GetContextConfig(ctx) + assert.True(t, config.HasValue()) + assert.Equal(t, expectedConfig, config.Value()) } -func TestEncryptorEncrypt_IfNoFieldEncRequestedAndNoKeyInStorage_GenerateKeyStoreItAndReturnCipherText(t *testing.T) { - enc, st := newDefaultEncryptor(t) +func TestConfig_SetContextConfig_StoreConfig(t *testing.T) { + ctx := context.Background() + config := DocEncConfig{IsDocEncrypted: true, EncryptedFields: []string{"field1", "field2"}} - docStoreKey := core.NewEncStoreDocKey(docID, "").ToDS() - fieldStoreKey := core.NewEncStoreDocKey(docID, fieldName).ToDS() + newCtx := SetContextConfig(ctx, config) + retrievedConfig := GetContextConfig(newCtx) - st.EXPECT().Get(mock.Anything, fieldStoreKey).Return(nil, ds.ErrNotFound) - st.EXPECT().Get(mock.Anything, docStoreKey).Return(nil, ds.ErrNotFound) - st.EXPECT().Put(mock.Anything, docStoreKey, getEncKey("")).Return(nil) + assert.True(t, retrievedConfig.HasValue()) + assert.Equal(t, config, retrievedConfig.Value()) +} - cipherText, err := enc.Encrypt(docID, fieldName, getPlainText()) +func TestConfig_SetFromParamsWithDocEncryption_StoreConfig(t *testing.T) { + ctx := context.Background() + newCtx := SetContextConfigFromParams(ctx, true, []string{"field1", "field2"}) - assert.NoError(t, err) - assert.Equal(t, getCipherText(t, ""), cipherText) + config := GetContextConfig(newCtx) + assert.True(t, config.HasValue()) + assert.True(t, config.Value().IsDocEncrypted) + assert.Equal(t, []string{"field1", "field2"}, config.Value().EncryptedFields) } -func TestEncryptorEncrypt_IfNoKeyWithFieldFoundInStorage_ShouldGenerateKeyStoreItAndReturnCipherText(t *testing.T) { - enc, st := newEncryptorWithConfig(t, DocEncConfig{EncryptedFields: []string{fieldName}}) +func TestConfig_SetFromParamsWithFields_StoreConfig(t *testing.T) { + ctx := context.Background() + newCtx := SetContextConfigFromParams(ctx, false, []string{"field1", "field2"}) - storeKey := core.NewEncStoreDocKey(docID, fieldName) - - st.EXPECT().Get(mock.Anything, storeKey.ToDS()).Return(nil, ds.ErrNotFound) - st.EXPECT().Put(mock.Anything, storeKey.ToDS(), getEncKey(fieldName)).Return(nil) + config := GetContextConfig(newCtx) + assert.True(t, config.HasValue()) + assert.False(t, config.Value().IsDocEncrypted) + assert.Equal(t, []string{"field1", "field2"}, config.Value().EncryptedFields) +} - cipherText, err := enc.Encrypt(docID, fieldName, getPlainText()) +func TestConfig_SetFromParamsWithNoEncryptionSetting_NoConfig(t *testing.T) { + ctx := context.Background() + newCtx := SetContextConfigFromParams(ctx, false, nil) - assert.NoError(t, err) - assert.Equal(t, getCipherText(t, fieldName), cipherText) + config := GetContextConfig(newCtx) + assert.False(t, config.HasValue()) } -func TestEncryptorEncrypt_IfKeyWithFieldFoundInStorage_ShouldUseItToReturnCipherText(t *testing.T) { - enc, st := newEncryptorWithConfig(t, DocEncConfig{EncryptedFields: []string{fieldName}}) - - storeKey := core.NewEncStoreDocKey(docID, fieldName) - st.EXPECT().Get(mock.Anything, storeKey.ToDS()).Return(getEncKey(fieldName), nil) +func TestEncryptor_EncryptDecrypt_SuccessfulRoundTrip(t *testing.T) { + ctx := context.Background() + enc := newDocEncryptor(ctx) + enc.SetConfig(immutable.Some(DocEncConfig{EncryptedFields: []string{"field1"}})) - cipherText, err := enc.Encrypt(docID, fieldName, getPlainText()) + plainText := []byte("Hello, World!") + docID := "doc1" + fieldName := immutable.Some("field1") + key, err := enc.GetOrGenerateEncryptionKey(docID, fieldName) assert.NoError(t, err) - assert.Equal(t, getCipherText(t, fieldName), cipherText) -} - -func TestEncryptorEncrypt_IfKeyFoundInStorage_ShouldUseItToReturnCipherText(t *testing.T) { - enc, st := newDefaultEncryptor(t) - - st.EXPECT().Get(mock.Anything, mock.Anything).Return(getEncKey(""), nil) + assert.NotNil(t, key) - cipherText, err := enc.Encrypt(docID, "", getPlainText()) + cipherText, err := enc.Encrypt(plainText, key) + assert.NoError(t, err) + assert.NotEqual(t, plainText, cipherText) + decryptedText, err := enc.Decrypt(cipherText, key) assert.NoError(t, err) - assert.Equal(t, getCipherText(t, ""), cipherText) + assert.Equal(t, plainText, decryptedText) } -func TestEncryptorEncrypt_IfStorageFailsToStoreEncryptionKey_ReturnError(t *testing.T) { - enc, st := newDefaultEncryptor(t) - - st.EXPECT().Get(mock.Anything, mock.Anything).Return(nil, ds.ErrNotFound) +func TestEncryptor_GetOrGenerateKey_ReturnsExistingKey(t *testing.T) { + ctx := context.Background() + enc := newDocEncryptor(ctx) + enc.SetConfig(immutable.Some(DocEncConfig{EncryptedFields: []string{"field1"}})) - st.EXPECT().Put(mock.Anything, mock.Anything, mock.Anything).Return(testErr) + docID := "doc1" + fieldName := immutable.Some("field1") - _, err := enc.Encrypt(docID, fieldName, getPlainText()) + key1, err := enc.GetOrGenerateEncryptionKey(docID, fieldName) + assert.NoError(t, err) + assert.NotNil(t, key1) - assert.ErrorIs(t, err, testErr) + key2, err := enc.GetOrGenerateEncryptionKey(docID, fieldName) + assert.NoError(t, err) + assert.Equal(t, key1, key2) } -func TestEncryptorEncrypt_IfKeyGenerationIsNotEnabled_ShouldReturnPlainText(t *testing.T) { - enc, st := newDefaultEncryptor(t) - enc.SetConfig(immutable.None[DocEncConfig]()) - - st.EXPECT().Get(mock.Anything, mock.Anything).Return(nil, ds.ErrNotFound) +func TestEncryptor_GenerateKey_DifferentKeysForDifferentFields(t *testing.T) { + ctx := context.Background() + enc := newDocEncryptor(ctx) + enc.SetConfig(immutable.Some(DocEncConfig{EncryptedFields: []string{"field1", "field2"}})) - cipherText, err := enc.Encrypt(docID, fieldName, getPlainText()) + docID := "doc1" + fieldName1 := immutable.Some("field1") + fieldName2 := immutable.Some("field2") + key1, err := enc.GetOrGenerateEncryptionKey(docID, fieldName1) assert.NoError(t, err) - assert.Equal(t, getPlainText(), cipherText) -} - -func TestEncryptorEncrypt_IfNoStorageProvided_Error(t *testing.T) { - enc, _ := newDefaultEncryptor(t) - enc.SetStore(nil) + assert.NotNil(t, key1) - _, err := enc.Encrypt(docID, fieldName, getPlainText()) + key2, err := enc.GetOrGenerateEncryptionKey(docID, fieldName2) + assert.NoError(t, err) + assert.NotNil(t, key2) - assert.ErrorIs(t, err, ErrNoStorageProvided) + assert.NotEqual(t, key1, key2) } -func TestEncryptorDecrypt_IfNoStorageProvided_Error(t *testing.T) { - enc, _ := newDefaultEncryptor(t) - enc.SetStore(nil) +func TestShouldEncryptField_WithDocEncryption_True(t *testing.T) { + config := DocEncConfig{IsDocEncrypted: true} + ctx := SetContextConfig(context.Background(), config) - _, err := enc.Decrypt(docID, fieldName, getPlainText()) - - assert.ErrorIs(t, err, ErrNoStorageProvided) + assert.True(t, ShouldEncryptDocField(ctx, immutable.Some("field1"))) + assert.True(t, ShouldEncryptDocField(ctx, immutable.Some("field2"))) } -func TestEncryptorDecrypt_IfStorageReturnsError_Error(t *testing.T) { - enc, st := newDefaultEncryptor(t) - - st.EXPECT().Get(mock.Anything, mock.Anything).Return(nil, testErr) +func TestShouldEncryptField_WithFieldEncryption_TrueForMatchingField(t *testing.T) { + config := DocEncConfig{EncryptedFields: []string{"field1"}} + ctx := SetContextConfig(context.Background(), config) - _, err := enc.Decrypt(docID, fieldName, []byte("test")) - - assert.ErrorIs(t, err, testErr) + assert.True(t, ShouldEncryptDocField(ctx, immutable.Some("field1"))) + assert.False(t, ShouldEncryptDocField(ctx, immutable.Some("field2"))) } -func TestEncryptorDecrypt_IfKeyFoundInStorage_ShouldUseItToReturnPlainText(t *testing.T) { - enc, st := newDefaultEncryptor(t) - - st.EXPECT().Get(mock.Anything, mock.Anything).Return(getEncKey(""), nil) - - plainText, err := enc.Decrypt(docID, fieldName, getCipherText(t, "")) +func TestShouldEncryptIndividualField_WithDocEncryption_False(t *testing.T) { + config := DocEncConfig{IsDocEncrypted: true} + ctx := SetContextConfig(context.Background(), config) - assert.NoError(t, err) - assert.Equal(t, getPlainText(), plainText) + assert.False(t, ShouldEncryptIndividualField(ctx, immutable.Some("field1"))) + assert.False(t, ShouldEncryptIndividualField(ctx, immutable.Some("field2"))) } -func TestEncryptDoc_IfContextHasNoEncryptor_ReturnNil(t *testing.T) { - data, err := EncryptDoc(context.Background(), docID, fieldName, getPlainText()) - assert.Nil(t, data, "data should be nil") - assert.NoError(t, err, "error should be nil") -} +func TestShouldEncryptIndividualField_WithFieldEncryption_TrueForMatchingField(t *testing.T) { + config := DocEncConfig{EncryptedFields: []string{"field1"}} + ctx := SetContextConfig(context.Background(), config) -func TestDecryptDoc_IfContextHasNoEncryptor_ReturnNil(t *testing.T) { - data, err := DecryptDoc(context.Background(), docID, fieldName, getCipherText(t, fieldName)) - assert.Nil(t, data, "data should be nil") - assert.NoError(t, err, "error should be nil") + assert.True(t, ShouldEncryptIndividualField(ctx, immutable.Some("field1"))) + assert.False(t, ShouldEncryptIndividualField(ctx, immutable.Some("field2"))) } diff --git a/internal/encryption/errors.go b/internal/encryption/errors.go index 6a443ad834..a068c20fae 100644 --- a/internal/encryption/errors.go +++ b/internal/encryption/errors.go @@ -15,9 +15,11 @@ import ( ) const ( - errNoStorageProvided string = "no storage provided" + errNoStorageProvided string = "no storage provided" + errContextHasNoEncryptor string = "context has no encryptor" ) var ( - ErrNoStorageProvided = errors.New(errNoStorageProvided) + ErrNoStorageProvided = errors.New(errNoStorageProvided) + ErrContextHasNoEncryptor = errors.New(errContextHasNoEncryptor) ) diff --git a/internal/encryption/event.go b/internal/encryption/event.go new file mode 100644 index 0000000000..16c1442b64 --- /dev/null +++ b/internal/encryption/event.go @@ -0,0 +1,75 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package encryption + +import ( + cidlink "github.com/ipld/go-ipld-prime/linking/cid" + + "github.com/sourcenetwork/defradb/event" +) + +const RequestKeysEventName = event.Name("enc-keys-request") + +// RequestKeysEvent represents a request of a node to fetch an encryption key for a specific +// docID/field +// +// It must only contain public elements not protected by ACP. +type RequestKeysEvent struct { + // Keys is a list of the keys that are being requested. + Keys []cidlink.Link + + Resp chan<- Result +} + +// RequestedKeyEventData represents the data that was retrieved for a specific key. +type RequestedKeyEventData struct { + // Key is the encryption key that was retrieved. + Key []byte +} + +// KeyRetrievedEvent represents a key that was retrieved. +type Item struct { + Link []byte + Block []byte +} + +type Result struct { + Items []Item + Error error +} + +type Results struct { + output chan Result +} + +func (r *Results) Get() <-chan Result { + return r.output +} + +// NewResults creates a new Results object and a channel that can be used to send results to it. +// The Results object can be used to wait on the results, and the channel can be used to send results. +func NewResults() (*Results, chan<- Result) { + ch := make(chan Result, 1) + return &Results{ + output: ch, + }, ch +} + +// NewRequestKeysMessage creates a new event message for a request of a node to fetch an encryption key +// for a specific docID/field +// It returns the message and the results that that can be waited on. +func NewRequestKeysMessage(keys []cidlink.Link) (event.Message, *Results) { + res, ch := NewResults() + return event.NewMessage(RequestKeysEventName, RequestKeysEvent{ + Keys: keys, + Resp: ch, + }), res +} diff --git a/internal/kms/enc_store.go b/internal/kms/enc_store.go new file mode 100644 index 0000000000..bd60592f26 --- /dev/null +++ b/internal/kms/enc_store.go @@ -0,0 +1,66 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package kms + +import ( + "context" + + "github.com/ipfs/go-cid" + "github.com/ipld/go-ipld-prime/linking" + cidlink "github.com/ipld/go-ipld-prime/linking/cid" + + "github.com/sourcenetwork/defradb/datastore" + coreblock "github.com/sourcenetwork/defradb/internal/core/block" +) + +type ipldEncStorage struct { + encstore datastore.Blockstore +} + +func newIPLDEncryptionStorage(encstore datastore.Blockstore) *ipldEncStorage { + return &ipldEncStorage{encstore: encstore} +} + +func (s *ipldEncStorage) get(ctx context.Context, cidBytes []byte) (*coreblock.Encryption, error) { + lsys := cidlink.DefaultLinkSystem() + lsys.SetReadStorage(s.encstore.AsIPLDStorage()) + + _, blockCid, err := cid.CidFromBytes(cidBytes) + if err != nil { + return nil, err + } + + nd, err := lsys.Load(linking.LinkContext{Ctx: ctx}, cidlink.Link{Cid: blockCid}, + coreblock.EncryptionSchemaPrototype) + if err != nil { + return nil, err + } + + return coreblock.GetEncryptionBlockFromNode(nd) +} + +func (s *ipldEncStorage) put(ctx context.Context, blockBytes []byte) ([]byte, error) { + lsys := cidlink.DefaultLinkSystem() + lsys.SetWriteStorage(s.encstore.AsIPLDStorage()) + + var encBlock coreblock.Encryption + err := encBlock.Unmarshal(blockBytes) + if err != nil { + return nil, err + } + + link, err := lsys.Store(linking.LinkContext{Ctx: ctx}, coreblock.GetLinkPrototype(), encBlock.GenerateNode()) + if err != nil { + return nil, err + } + + return []byte(link.String()), nil +} diff --git a/internal/kms/errors.go b/internal/kms/errors.go new file mode 100644 index 0000000000..603d3c3232 --- /dev/null +++ b/internal/kms/errors.go @@ -0,0 +1,27 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package kms + +import ( + "github.com/sourcenetwork/defradb/errors" +) + +const ( + errUnknownKMSType string = "unknown KMS type" +) + +var ( + ErrUnknownKMSType = errors.New(errUnknownKMSType) +) + +func NewErrUnknownKMSType(t ServiceType) error { + return errors.New(errUnknownKMSType, errors.NewKV("Type", t)) +} diff --git a/internal/kms/pubsub.go b/internal/kms/pubsub.go new file mode 100644 index 0000000000..ca67603a7c --- /dev/null +++ b/internal/kms/pubsub.go @@ -0,0 +1,339 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package kms + +import ( + "bytes" + "context" + "crypto/ecdh" + "encoding/base64" + + cidlink "github.com/ipld/go-ipld-prime/linking/cid" + libpeer "github.com/libp2p/go-libp2p/core/peer" + rpc "github.com/sourcenetwork/go-libp2p-pubsub-rpc" + grpcpeer "google.golang.org/grpc/peer" + "google.golang.org/protobuf/proto" + + "github.com/sourcenetwork/defradb/crypto" + "github.com/sourcenetwork/defradb/datastore" + "github.com/sourcenetwork/defradb/errors" + "github.com/sourcenetwork/defradb/event" + "github.com/sourcenetwork/defradb/internal/encryption" + pb "github.com/sourcenetwork/defradb/net/pb" +) + +const pubsubTopic = "encryption" + +type PubSubServer interface { + AddPubSubTopic(string, rpc.MessageHandler) error + SendPubSubMessage(context.Context, string, []byte) (<-chan rpc.Response, error) +} + +type pubSubService struct { + ctx context.Context + peerID libpeer.ID + pubsub PubSubServer + keyRequestedSub *event.Subscription + eventBus *event.Bus + encStore *ipldEncStorage +} + +var _ Service = (*pubSubService)(nil) + +func (s *pubSubService) GetKeys(ctx context.Context, cids ...cidlink.Link) (*encryption.Results, error) { + res, ch := encryption.NewResults() + + err := s.requestEncryptionKeyFromPeers(ctx, cids, ch) + if err != nil { + return nil, err + } + + return res, nil +} + +// NewPubSubService creates a new instance of the KMS service that is connected to the given PubSubServer, +// event bus and encryption storage. +// +// The service will subscribe to the "encryption" topic on the PubSubServer and to the +// "enc-keys-request" event on the event bus. +func NewPubSubService( + ctx context.Context, + peerID libpeer.ID, + pubsub PubSubServer, + eventBus *event.Bus, + encstore datastore.Blockstore, +) (*pubSubService, error) { + s := &pubSubService{ + ctx: ctx, + peerID: peerID, + pubsub: pubsub, + eventBus: eventBus, + encStore: newIPLDEncryptionStorage(encstore), + } + err := pubsub.AddPubSubTopic(pubsubTopic, s.handleRequestFromPeer) + if err != nil { + return nil, err + } + s.keyRequestedSub, err = eventBus.Subscribe(encryption.RequestKeysEventName) + if err != nil { + return nil, err + } + go s.handleKeyRequestedEvent() + return s, nil +} + +func (s *pubSubService) handleKeyRequestedEvent() { + for { + msg, isOpen := <-s.keyRequestedSub.Message() + if !isOpen { + return + } + + if keyReqEvent, ok := msg.Data.(encryption.RequestKeysEvent); ok { + go func() { + results, err := s.GetKeys(s.ctx, keyReqEvent.Keys...) + if err != nil { + log.ErrorContextE(s.ctx, "Failed to get encryption keys", err) + } + + defer close(keyReqEvent.Resp) + + select { + case <-s.ctx.Done(): + return + case encResult := <-results.Get(): + for _, encItem := range encResult.Items { + _, err = s.encStore.put(s.ctx, encItem.Block) + if err != nil { + log.ErrorContextE(s.ctx, "Failed to save encryption key", err) + return + } + } + + keyReqEvent.Resp <- encResult + } + }() + } else { + log.ErrorContext(s.ctx, "Failed to cast event data to RequestKeysEvent") + } + } +} + +// handleEncryptionMessage handles incoming FetchEncryptionKeyRequest messages from the pubsub network. +func (s *pubSubService) handleRequestFromPeer(peerID libpeer.ID, topic string, msg []byte) ([]byte, error) { + req := new(pb.FetchEncryptionKeyRequest) + if err := proto.Unmarshal(msg, req); err != nil { + log.ErrorContextE(s.ctx, "Failed to unmarshal pubsub message %s", err) + return nil, err + } + + ctx := grpcpeer.NewContext(s.ctx, newGRPCPeer(peerID)) + res, err := s.tryGenEncryptionKeyLocally(ctx, req) + if err != nil { + log.ErrorContextE(s.ctx, "failed attempt to get encryption key", err) + return nil, errors.Wrap("failed attempt to get encryption key", err) + } + return res.MarshalVT() +} + +func (s *pubSubService) prepareFetchEncryptionKeyRequest( + cids []cidlink.Link, + ephemeralPublicKey []byte, +) (*pb.FetchEncryptionKeyRequest, error) { + req := &pb.FetchEncryptionKeyRequest{ + EphemeralPublicKey: ephemeralPublicKey, + } + + req.Links = make([][]byte, len(cids)) + for i, cid := range cids { + req.Links[i] = cid.Bytes() + } + + return req, nil +} + +// requestEncryptionKeyFromPeers publishes the given FetchEncryptionKeyRequest object on the PubSub network +func (s *pubSubService) requestEncryptionKeyFromPeers( + ctx context.Context, + cids []cidlink.Link, + result chan<- encryption.Result, +) error { + ephPrivKey, err := crypto.GenerateX25519() + if err != nil { + return err + } + + ephPubKeyBytes := ephPrivKey.PublicKey().Bytes() + req, err := s.prepareFetchEncryptionKeyRequest(cids, ephPubKeyBytes) + if err != nil { + return err + } + + data, err := req.MarshalVT() + if err != nil { + return errors.Wrap("failed to marshal pubsub message", err) + } + + respChan, err := s.pubsub.SendPubSubMessage(ctx, pubsubTopic, data) + if err != nil { + return errors.Wrap("failed publishing to encryption thread", err) + } + + go func() { + s.handleFetchEncryptionKeyResponse(<-respChan, req, ephPrivKey, result) + }() + + return nil +} + +// handleFetchEncryptionKeyResponse handles incoming FetchEncryptionKeyResponse messages +func (s *pubSubService) handleFetchEncryptionKeyResponse( + resp rpc.Response, + req *pb.FetchEncryptionKeyRequest, + privateKey *ecdh.PrivateKey, + result chan<- encryption.Result, +) { + defer close(result) + + var keyResp pb.FetchEncryptionKeyReply + if err := proto.Unmarshal(resp.Data, &keyResp); err != nil { + log.ErrorContextE(s.ctx, "Failed to unmarshal encryption key response", err) + result <- encryption.Result{Error: err} + return + } + + resultEncItems := make([]encryption.Item, 0, len(keyResp.Blocks)) + for i, block := range keyResp.Blocks { + decryptedData, err := crypto.DecryptECIES( + block, + privateKey, + crypto.WithAAD(makeAssociatedData(req, resp.From)), + crypto.WithPubKeyBytes(keyResp.EphemeralPublicKey), + crypto.WithPubKeyPrepended(false), + ) + + if err != nil { + log.ErrorContextE(s.ctx, "Failed to decrypt encryption key", err) + result <- encryption.Result{Error: err} + return + } + + resultEncItems = append(resultEncItems, encryption.Item{ + Link: keyResp.Links[i], + Block: decryptedData, + }) + } + + result <- encryption.Result{ + Items: resultEncItems, + } +} + +// makeAssociatedData creates the associated data for the encryption key request +func makeAssociatedData(req *pb.FetchEncryptionKeyRequest, peerID libpeer.ID) []byte { + return encodeToBase64(bytes.Join([][]byte{ + req.EphemeralPublicKey, + []byte(peerID), + }, []byte{})) +} + +func (s *pubSubService) tryGenEncryptionKeyLocally( + ctx context.Context, + req *pb.FetchEncryptionKeyRequest, +) (*pb.FetchEncryptionKeyReply, error) { + blocks, err := s.getEncryptionKeysLocally(ctx, req) + if err != nil || len(blocks) == 0 { + return nil, err + } + + reqEphPubKey, err := crypto.X25519PublicKeyFromBytes(req.EphemeralPublicKey) + if err != nil { + return nil, errors.Wrap("failed to unmarshal ephemeral public key", err) + } + + privKey, err := crypto.GenerateX25519() + if err != nil { + return nil, err + } + + res := &pb.FetchEncryptionKeyReply{ + Links: req.Links, + EphemeralPublicKey: privKey.PublicKey().Bytes(), + } + + res.Blocks = make([][]byte, 0, len(blocks)) + + for _, block := range blocks { + encryptedBlock, err := crypto.EncryptECIES( + block, + reqEphPubKey, + crypto.WithAAD(makeAssociatedData(req, s.peerID)), + crypto.WithPrivKey(privKey), + crypto.WithPubKeyPrepended(false), + ) + if err != nil { + return nil, errors.Wrap("failed to encrypt key for requester", err) + } + + res.Blocks = append(res.Blocks, encryptedBlock) + } + + return res, nil +} + +// getEncryptionKeys retrieves the encryption keys for the given targets. +// It returns the encryption keys and the targets for which the keys were found. +func (s *pubSubService) getEncryptionKeysLocally( + ctx context.Context, + req *pb.FetchEncryptionKeyRequest, +) ([][]byte, error) { + blocks := make([][]byte, 0, len(req.Links)) + for _, link := range req.Links { + encBlock, err := s.encStore.get(ctx, link) + if err != nil { + return nil, err + } + // TODO: we should test it somehow. For this this one peer should have some keys and + // another one should have the others. https://github.com/sourcenetwork/defradb/issues/2895 + if encBlock == nil { + continue + } + + encBlockBytes, err := encBlock.Marshal() + if err != nil { + return nil, err + } + + blocks = append(blocks, encBlockBytes) + } + return blocks, nil +} + +func encodeToBase64(data []byte) []byte { + encoded := make([]byte, base64.StdEncoding.EncodedLen(len(data))) + base64.StdEncoding.Encode(encoded, data) + return encoded +} + +func newGRPCPeer(peerID libpeer.ID) *grpcpeer.Peer { + return &grpcpeer.Peer{ + Addr: addr{peerID}, + } +} + +// addr implements net.Addr and holds a libp2p peer ID. +type addr struct{ id libpeer.ID } + +// Network returns the name of the network that this address belongs to (libp2p). +func (a addr) Network() string { return "libp2p" } + +// String returns the peer ID of this address in string form (B58-encoded). +func (a addr) String() string { return a.id.String() } diff --git a/internal/kms/service.go b/internal/kms/service.go new file mode 100644 index 0000000000..97985c9a43 --- /dev/null +++ b/internal/kms/service.go @@ -0,0 +1,40 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package kms + +import ( + "context" + + cidlink "github.com/ipld/go-ipld-prime/linking/cid" + "github.com/sourcenetwork/corelog" + + "github.com/sourcenetwork/defradb/internal/encryption" +) + +var ( + log = corelog.NewLogger("kms") +) + +type ServiceType string + +const ( + // PubSubServiceType is the type of KMS that uses PubSub mechanism to exchange keys + // between peers. + PubSubServiceType ServiceType = "pubsub" +) + +// Service is interface for key management service (KMS) +type Service interface { + // GetKeys retrieves the encryption blocks containing encryption keys for the given links. + // Blocks are fetched asynchronously, so the method returns an [encryption.Results] object + // that can be used to wait for the results. + GetKeys(ctx context.Context, cids ...cidlink.Link) (*encryption.Results, error) +} diff --git a/internal/lens/fetcher.go b/internal/lens/fetcher.go index 357bbe9677..bbe0c45a0d 100644 --- a/internal/lens/fetcher.go +++ b/internal/lens/fetcher.go @@ -307,7 +307,7 @@ func (f *lensedFetcher) updateDataStore(ctx context.Context, original map[string // in which case we have to skip them for now. continue } - fieldKey := datastoreKeyBase.WithFieldId(fieldDesc.ID.String()) + fieldKey := datastoreKeyBase.WithFieldID(fieldDesc.ID.String()) bytes, err := cbor.Marshal(value) if err != nil { @@ -320,7 +320,7 @@ func (f *lensedFetcher) updateDataStore(ctx context.Context, original map[string } } - versionKey := datastoreKeyBase.WithFieldId(core.DATASTORE_DOC_VERSION_FIELD_ID) + versionKey := datastoreKeyBase.WithFieldID(core.DATASTORE_DOC_VERSION_FIELD_ID) err := f.txn.Datastore().Put(ctx, versionKey.ToDS(), []byte(f.targetVersionID)) if err != nil { return err diff --git a/internal/merkle/clock/clock.go b/internal/merkle/clock/clock.go index b5b5f2631c..b5b55e1374 100644 --- a/internal/merkle/clock/clock.go +++ b/internal/merkle/clock/clock.go @@ -21,6 +21,7 @@ import ( cidlink "github.com/ipld/go-ipld-prime/linking/cid" "github.com/sourcenetwork/corelog" + "github.com/sourcenetwork/immutable" "github.com/sourcenetwork/defradb/datastore" "github.com/sourcenetwork/defradb/internal/core" @@ -36,6 +37,7 @@ var ( type MerkleClock struct { headstore datastore.DSReaderWriter blockstore datastore.Blockstore + encstore datastore.Blockstore headset *heads crdt core.ReplicatedData } @@ -44,12 +46,14 @@ type MerkleClock struct { func NewMerkleClock( headstore datastore.DSReaderWriter, blockstore datastore.Blockstore, + encstore datastore.Blockstore, namespace core.HeadStoreKey, crdt core.ReplicatedData, ) *MerkleClock { return &MerkleClock{ headstore: headstore, blockstore: blockstore, + encstore: encstore, headset: NewHeadSet(headstore, namespace), crdt: crdt, } @@ -59,10 +63,23 @@ func (mc *MerkleClock) putBlock( ctx context.Context, block *coreblock.Block, ) (cidlink.Link, error) { - nd := block.GenerateNode() lsys := cidlink.DefaultLinkSystem() lsys.SetWriteStorage(mc.blockstore.AsIPLDStorage()) - link, err := lsys.Store(linking.LinkContext{Ctx: ctx}, coreblock.GetLinkPrototype(), nd) + link, err := lsys.Store(linking.LinkContext{Ctx: ctx}, coreblock.GetLinkPrototype(), block.GenerateNode()) + if err != nil { + return cidlink.Link{}, NewErrWritingBlock(err) + } + + return link.(cidlink.Link), nil +} + +func (mc *MerkleClock) putEncBlock( + ctx context.Context, + encBlock *coreblock.Encryption, +) (cidlink.Link, error) { + lsys := cidlink.DefaultLinkSystem() + lsys.SetWriteStorage(mc.encstore.AsIPLDStorage()) + link, err := lsys.Store(linking.LinkContext{Ctx: ctx}, coreblock.GetLinkPrototype(), encBlock.GenerateNode()) if err != nil { return cidlink.Link{}, NewErrWritingBlock(err) } @@ -86,21 +103,22 @@ func (mc *MerkleClock) AddDelta( delta.SetPriority(height) block := coreblock.New(delta, links, heads...) - isEncrypted, err := mc.checkIfBlockEncryptionEnabled(ctx, block.Delta.GetFieldName(), heads) + fieldName := immutable.None[string]() + if block.Delta.GetFieldName() != "" { + fieldName = immutable.Some(block.Delta.GetFieldName()) + } + encBlock, encLink, err := mc.determineBlockEncryption(ctx, string(block.Delta.GetDocID()), fieldName, heads) if err != nil { return cidlink.Link{}, nil, err } dagBlock := block - if isEncrypted { - if !block.Delta.IsComposite() { - dagBlock, err = encryptBlock(ctx, block) - if err != nil { - return cidlink.Link{}, nil, err - } - } else { - dagBlock.IsEncrypted = &isEncrypted + if encBlock != nil { + dagBlock, err = encryptBlock(ctx, block, encBlock) + if err != nil { + return cidlink.Link{}, nil, err } + dagBlock.Encryption = &encLink } link, err := mc.putBlock(ctx, dagBlock) @@ -109,12 +127,7 @@ func (mc *MerkleClock) AddDelta( } // merge the delta and update the state - err = mc.ProcessBlock( - ctx, - block, - link, - false, - ) + err = mc.ProcessBlock(ctx, block, link) if err != nil { return cidlink.Link{}, nil, err } @@ -127,57 +140,95 @@ func (mc *MerkleClock) AddDelta( return link, b, err } -func (mc *MerkleClock) checkIfBlockEncryptionEnabled( +func (mc *MerkleClock) determineBlockEncryption( ctx context.Context, - fieldName string, + docID string, + fieldName immutable.Option[string], heads []cid.Cid, -) (bool, error) { - if encryption.ShouldEncryptField(ctx, fieldName) { - return true, nil +) (*coreblock.Encryption, cidlink.Link, error) { + // if new encryption was requested by the user + if encryption.ShouldEncryptDocField(ctx, fieldName) { + encBlock := &coreblock.Encryption{DocID: []byte(docID)} + if encryption.ShouldEncryptIndividualField(ctx, fieldName) { + f := fieldName.Value() + encBlock.FieldName = &f + } + encryptor := encryption.GetEncryptorFromContext(ctx) + if encryptor != nil { + encKey, err := encryptor.GetOrGenerateEncryptionKey(docID, fieldName) + if err != nil { + return nil, cidlink.Link{}, err + } + if len(encKey) > 0 { + encBlock.Key = encKey + } + + link, err := mc.putEncBlock(ctx, encBlock) + if err != nil { + return nil, cidlink.Link{}, err + } + return encBlock, link, nil + } } + // otherwise we use the same encryption as the previous block for _, headCid := range heads { - bytes, err := mc.blockstore.AsIPLDStorage().Get(ctx, headCid.KeyString()) + prevBlockBytes, err := mc.blockstore.AsIPLDStorage().Get(ctx, headCid.KeyString()) if err != nil { - return false, NewErrCouldNotFindBlock(headCid, err) + return nil, cidlink.Link{}, NewErrCouldNotFindBlock(headCid, err) } - prevBlock, err := coreblock.GetFromBytes(bytes) + prevBlock, err := coreblock.GetFromBytes(prevBlockBytes) if err != nil { - return false, err + return nil, cidlink.Link{}, err } - if prevBlock.IsEncrypted != nil && *prevBlock.IsEncrypted { - return true, nil + if prevBlock.Encryption != nil { + prevBlockEncBytes, err := mc.encstore.AsIPLDStorage().Get(ctx, prevBlock.Encryption.Cid.KeyString()) + if err != nil { + return nil, cidlink.Link{}, NewErrCouldNotFindBlock(headCid, err) + } + prevEncBlock, err := coreblock.GetEncryptionBlockFromBytes(prevBlockEncBytes) + if err != nil { + return nil, cidlink.Link{}, err + } + return &coreblock.Encryption{ + DocID: prevEncBlock.DocID, + FieldName: prevEncBlock.FieldName, + Key: prevEncBlock.Key, + }, *prevBlock.Encryption, nil } } - return false, nil + return nil, cidlink.Link{}, nil } -func encryptBlock(ctx context.Context, block *coreblock.Block) (*coreblock.Block, error) { +func encryptBlock( + ctx context.Context, + block *coreblock.Block, + encBlock *coreblock.Encryption, +) (*coreblock.Block, error) { + if block.Delta.IsComposite() { + return block, nil + } + clonedCRDT := block.Delta.Clone() - bytes, err := encryption.EncryptDoc(ctx, string(clonedCRDT.GetDocID()), - clonedCRDT.GetFieldName(), clonedCRDT.GetData()) + _, encryptor := encryption.EnsureContextWithEncryptor(ctx) + bytes, err := encryptor.Encrypt(clonedCRDT.GetData(), encBlock.Key) if err != nil { return nil, err } clonedCRDT.SetData(bytes) - isEncrypted := true - return &coreblock.Block{Delta: clonedCRDT, Links: block.Links, IsEncrypted: &isEncrypted}, nil + return &coreblock.Block{Delta: clonedCRDT, Links: block.Links}, nil } // ProcessBlock merges the delta CRDT and updates the state accordingly. -// If onlyHeads is true, it will skip merging and update only the heads. func (mc *MerkleClock) ProcessBlock( ctx context.Context, block *coreblock.Block, blockLink cidlink.Link, - onlyHeads bool, ) error { - if !onlyHeads { - err := mc.crdt.Merge(ctx, block.Delta.GetDelta()) - if err != nil { - return NewErrMergingDelta(blockLink.Cid, err) - } + err := mc.crdt.Merge(ctx, block.Delta.GetDelta()) + if err != nil { + return NewErrMergingDelta(blockLink.Cid, err) } return mc.updateHeads(ctx, block, blockLink) diff --git a/internal/merkle/clock/clock_test.go b/internal/merkle/clock/clock_test.go index 7effc02fef..fe008971e4 100644 --- a/internal/merkle/clock/clock_test.go +++ b/internal/merkle/clock/clock_test.go @@ -37,7 +37,8 @@ func newTestMerkleClock() *MerkleClock { return NewMerkleClock( multistore.Headstore(), multistore.Blockstore(), - core.HeadStoreKey{DocID: request.DocIDArgName, FieldId: "1"}, + multistore.Encstore(), + core.HeadStoreKey{DocID: request.DocIDArgName, FieldID: "1"}, reg, ) } @@ -46,7 +47,7 @@ func TestNewMerkleClock(t *testing.T) { s := newDS() multistore := datastore.MultiStoreFrom(s) reg := crdt.NewLWWRegister(multistore.Rootstore(), core.CollectionSchemaVersionKey{}, core.DataStoreKey{}, "") - clk := NewMerkleClock(multistore.Headstore(), multistore.Blockstore(), core.HeadStoreKey{}, reg) + clk := NewMerkleClock(multistore.Headstore(), multistore.Blockstore(), multistore.Encstore(), core.HeadStoreKey{}, reg) if clk.headstore != multistore.Headstore() { t.Error("MerkleClock store not correctly set") diff --git a/internal/merkle/clock/errors.go b/internal/merkle/clock/errors.go index 9903f777a9..a20ce30731 100644 --- a/internal/merkle/clock/errors.go +++ b/internal/merkle/clock/errors.go @@ -26,6 +26,7 @@ const ( errReplacingHead = "error replacing head" errCouldNotFindBlock = "error checking for known block " errFailedToGetNextQResult = "failed to get next query result" + errCouldNotGetEncKey = "could not get encryption key" ) var ( @@ -39,6 +40,7 @@ var ( ErrCouldNotFindBlock = errors.New(errCouldNotFindBlock) ErrFailedToGetNextQResult = errors.New(errFailedToGetNextQResult) ErrDecodingHeight = errors.New("error decoding height") + ErrCouldNotGetEncKey = errors.New(errCouldNotGetEncKey) ) func NewErrCreatingBlock(inner error) error { diff --git a/internal/merkle/clock/heads_test.go b/internal/merkle/clock/heads_test.go index 94680569a8..0eb7acdd0e 100644 --- a/internal/merkle/clock/heads_test.go +++ b/internal/merkle/clock/heads_test.go @@ -45,7 +45,7 @@ func newHeadSet() *heads { return NewHeadSet( datastore.AsDSReaderWriter(s), - core.HeadStoreKey{}.WithDocID("myDocID").WithFieldId("1"), + core.HeadStoreKey{}.WithDocID("myDocID").WithFieldID("1"), ) } diff --git a/internal/merkle/crdt/composite.go b/internal/merkle/crdt/composite.go index 26ab4134e5..f8211b9f0a 100644 --- a/internal/merkle/crdt/composite.go +++ b/internal/merkle/crdt/composite.go @@ -44,7 +44,8 @@ func NewMerkleCompositeDAG( fieldName, ) - clock := clock.NewMerkleClock(store.Headstore(), store.Blockstore(), key.ToHeadStoreKey(), compositeDag) + clock := clock.NewMerkleClock(store.Headstore(), store.Blockstore(), store.Encstore(), + key.ToHeadStoreKey(), compositeDag) base := &baseMerkleCRDT{clock: clock, crdt: compositeDag} return &MerkleCompositeDAG{ diff --git a/internal/merkle/crdt/counter.go b/internal/merkle/crdt/counter.go index 1ff6874b08..21b26785b6 100644 --- a/internal/merkle/crdt/counter.go +++ b/internal/merkle/crdt/counter.go @@ -39,7 +39,7 @@ func NewMerkleCounter( kind client.ScalarKind, ) *MerkleCounter { register := crdt.NewCounter(store.Datastore(), schemaVersionKey, key, fieldName, allowDecrement, kind) - clk := clock.NewMerkleClock(store.Headstore(), store.Blockstore(), key.ToHeadStoreKey(), register) + clk := clock.NewMerkleClock(store.Headstore(), store.Blockstore(), store.Encstore(), key.ToHeadStoreKey(), register) base := &baseMerkleCRDT{clock: clk, crdt: register} return &MerkleCounter{ baseMerkleCRDT: base, diff --git a/internal/merkle/crdt/lwwreg.go b/internal/merkle/crdt/lwwreg.go index 11e73089bf..00c70dc4a9 100644 --- a/internal/merkle/crdt/lwwreg.go +++ b/internal/merkle/crdt/lwwreg.go @@ -37,7 +37,7 @@ func NewMerkleLWWRegister( fieldName string, ) *MerkleLWWRegister { register := corecrdt.NewLWWRegister(store.Datastore(), schemaVersionKey, key, fieldName) - clk := clock.NewMerkleClock(store.Headstore(), store.Blockstore(), key.ToHeadStoreKey(), register) + clk := clock.NewMerkleClock(store.Headstore(), store.Blockstore(), store.Encstore(), key.ToHeadStoreKey(), register) base := &baseMerkleCRDT{clock: clk, crdt: register} return &MerkleLWWRegister{ baseMerkleCRDT: base, diff --git a/internal/merkle/crdt/merklecrdt.go b/internal/merkle/crdt/merklecrdt.go index c7733be778..457ba0f200 100644 --- a/internal/merkle/crdt/merklecrdt.go +++ b/internal/merkle/crdt/merklecrdt.go @@ -27,6 +27,7 @@ import ( type Stores interface { Datastore() datastore.DSReaderWriter Blockstore() datastore.Blockstore + Encstore() datastore.Blockstore Headstore() datastore.DSReaderWriter } @@ -48,9 +49,7 @@ type MerkleClock interface { links ...coreblock.DAGLink, ) (cidlink.Link, []byte, error) // ProcessBlock processes a block and updates the CRDT state. - // The bool argument indicates whether only heads need to be updated. It is needed in case - // merge should be skipped for example if the block is encrypted. - ProcessBlock(context.Context, *coreblock.Block, cidlink.Link, bool) error + ProcessBlock(ctx context.Context, block *coreblock.Block, cid cidlink.Link) error } // baseMerkleCRDT handles the MerkleCRDT overhead functions that aren't CRDT specific like the mutations and state diff --git a/internal/merkle/crdt/merklecrdt_test.go b/internal/merkle/crdt/merklecrdt_test.go index 29482b28bf..74f4814ca3 100644 --- a/internal/merkle/crdt/merklecrdt_test.go +++ b/internal/merkle/crdt/merklecrdt_test.go @@ -32,7 +32,7 @@ func newTestBaseMerkleCRDT() (*baseMerkleCRDT, datastore.DSReaderWriter) { multistore := datastore.MultiStoreFrom(s) reg := crdt.NewLWWRegister(multistore.Datastore(), core.CollectionSchemaVersionKey{}, core.DataStoreKey{}, "") - clk := clock.NewMerkleClock(multistore.Headstore(), multistore.Blockstore(), core.HeadStoreKey{}, reg) + clk := clock.NewMerkleClock(multistore.Headstore(), multistore.Blockstore(), multistore.Encstore(), core.HeadStoreKey{}, reg) return &baseMerkleCRDT{clock: clk, crdt: reg}, multistore.Rootstore() } diff --git a/internal/planner/commit.go b/internal/planner/commit.go index 1e6a1f7b92..76825afe15 100644 --- a/internal/planner/commit.go +++ b/internal/planner/commit.go @@ -73,7 +73,7 @@ func (n *dagScanNode) Init() error { if n.commitSelect.FieldID.HasValue() { field := n.commitSelect.FieldID.Value() - dsKey = dsKey.WithFieldId(field) + dsKey = dsKey.WithFieldID(field) } n.spans = core.NewSpans(core.NewSpan(dsKey, dsKey.PrefixEnd())) @@ -104,16 +104,16 @@ func (n *dagScanNode) Spans(spans core.Spans) { } copy(headSetSpans.Value, spans.Value) - var fieldId string + var fieldID string if n.commitSelect.FieldID.HasValue() { - fieldId = n.commitSelect.FieldID.Value() + fieldID = n.commitSelect.FieldID.Value() } else { - fieldId = core.COMPOSITE_NAMESPACE + fieldID = core.COMPOSITE_NAMESPACE } for i, span := range headSetSpans.Value { - if span.Start().FieldID != fieldId { - headSetSpans.Value[i] = core.NewSpan(span.Start().WithFieldId(fieldId), core.DataStoreKey{}) + if span.Start().FieldID != fieldID { + headSetSpans.Value[i] = core.NewSpan(span.Start().WithFieldID(fieldID), core.DataStoreKey{}) } } diff --git a/net/client.go b/net/client.go index 77eb28d4d6..9d11a968d4 100644 --- a/net/client.go +++ b/net/client.go @@ -37,7 +37,7 @@ func (s *server) pushLog(evt event.Update, pid peer.ID) error { Cid: evt.Cid.Bytes(), SchemaRoot: []byte(evt.SchemaRoot), Creator: s.peer.host.ID().String(), - Log: &pb.Document_Log{ + Log: &pb.Log{ Block: evt.Block, }, } diff --git a/net/dialer_test.go b/net/dialer_test.go index 64060f2660..4ed8bcf68b 100644 --- a/net/dialer_test.go +++ b/net/dialer_test.go @@ -27,6 +27,7 @@ func TestDial_WithConnectedPeer_NoError(t *testing.T) { n1, err := NewPeer( ctx, db1.Blockstore(), + db1.Encstore(), db1.Events(), WithListenAddresses("/ip4/127.0.0.1/tcp/0"), ) @@ -35,6 +36,7 @@ func TestDial_WithConnectedPeer_NoError(t *testing.T) { n2, err := NewPeer( ctx, db2.Blockstore(), + db1.Encstore(), db2.Events(), WithListenAddresses("/ip4/127.0.0.1/tcp/0"), ) @@ -57,6 +59,7 @@ func TestDial_WithConnectedPeerAndSecondConnection_NoError(t *testing.T) { n1, err := NewPeer( ctx, db1.Blockstore(), + db1.Encstore(), db1.Events(), WithListenAddresses("/ip4/127.0.0.1/tcp/0"), ) @@ -65,6 +68,7 @@ func TestDial_WithConnectedPeerAndSecondConnection_NoError(t *testing.T) { n2, err := NewPeer( ctx, db2.Blockstore(), + db1.Encstore(), db2.Events(), WithListenAddresses("/ip4/127.0.0.1/tcp/0"), ) @@ -90,6 +94,7 @@ func TestDial_WithConnectedPeerAndSecondConnectionWithConnectionShutdown_Closing n1, err := NewPeer( ctx, db1.Blockstore(), + db1.Encstore(), db1.Events(), WithListenAddresses("/ip4/127.0.0.1/tcp/0"), ) @@ -98,6 +103,7 @@ func TestDial_WithConnectedPeerAndSecondConnectionWithConnectionShutdown_Closing n2, err := NewPeer( ctx, db2.Blockstore(), + db1.Encstore(), db2.Events(), WithListenAddresses("/ip4/127.0.0.1/tcp/0"), ) diff --git a/net/errors.go b/net/errors.go index 615f1088ef..3a21c8e5c1 100644 --- a/net/errors.go +++ b/net/errors.go @@ -22,6 +22,9 @@ const ( errPublishingToDocIDTopic = "can't publish log %s for docID %s" errPublishingToSchemaTopic = "can't publish log %s for schema %s" errCheckingForExistingBlock = "failed to check for existing block" + errRequestingEncryptionKeys = "failed to request encryption keys with %v" + errTopicAlreadyExist = "topic with name \"%s\" already exists" + errTopicDoesNotExist = "topic with name \"%s\" does not exists" ) var ( @@ -49,6 +52,10 @@ func NewErrPublishingToSchemaTopic(inner error, cid, docID string, kv ...errors. return errors.Wrap(fmt.Sprintf(errPublishingToSchemaTopic, cid, docID), inner, kv...) } -func NewErrCheckingForExistingBlock(inner error, cid string) error { - return errors.Wrap(errCheckingForExistingBlock, inner, errors.NewKV("cid", cid)) +func NewErrTopicAlreadyExist(topic string) error { + return errors.New(fmt.Sprintf(errTopicAlreadyExist, topic)) +} + +func NewErrTopicDoesNotExist(topic string) error { + return errors.New(fmt.Sprintf(errTopicDoesNotExist, topic)) } diff --git a/net/pb/Makefile b/net/pb/Makefile index 233665c334..30b0e92dfa 100644 --- a/net/pb/Makefile +++ b/net/pb/Makefile @@ -1,8 +1,13 @@ PB = $(wildcard *.proto) GO = $(PB:.proto=.pb.go) +PROTOC_GEN_GO := $(shell which protoc-gen-go) +PROTOC_GEN_GO_GRPC := $(shell which protoc-gen-go-grpc) +PROTOC_GEN_GO_VTPROTO := $(shell which protoc-gen-go-vtproto) + all: $(GO) +.PHONY: deps deps: go install google.golang.org/protobuf/cmd/protoc-gen-go@latest go install google.golang.org/grpc/cmd/protoc-gen-go-grpc@latest @@ -10,14 +15,14 @@ deps: %.pb.go: %.proto protoc \ - --go_out=. --plugin protoc-gen-go="${GOBIN}/protoc-gen-go" \ - --go-grpc_out=. --plugin protoc-gen-go-grpc="${GOBIN}/protoc-gen-go-grpc" \ - --go-vtproto_out=. --plugin protoc-gen-go-vtproto="${GOBIN}/protoc-gen-go-vtproto" \ + -I. \ + --go_out=. --plugin protoc-gen-go="$(PROTOC_GEN_GO)" \ + --go-grpc_out=. --plugin protoc-gen-go-grpc="$(PROTOC_GEN_GO_GRPC)" \ + --go-vtproto_out=. --plugin protoc-gen-go-vtproto="$(PROTOC_GEN_GO_VTPROTO)" \ --go-vtproto_opt=features=marshal+unmarshal+size \ - $< + $< # This line specifies the input file +.PHONY: clean clean: rm -f *.pb.go rm -f *pb_test.go - -.PHONY: clean \ No newline at end of file diff --git a/net/pb/net.pb.go b/net/pb/net.pb.go index a9b5a2162d..dbac6829d0 100644 --- a/net/pb/net.pb.go +++ b/net/pb/net.pb.go @@ -1,7 +1,7 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: -// protoc-gen-go v1.31.0 -// protoc v4.25.1 +// protoc-gen-go v1.34.2 +// protoc v5.27.1 // source: net.proto package net_pb @@ -21,19 +21,17 @@ const ( ) // Log represents a thread log. -type Document struct { +type Log struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields - // ID of the document. - DocID []byte `protobuf:"bytes,1,opt,name=docID,proto3" json:"docID,omitempty"` - // head of the log. - Head []byte `protobuf:"bytes,4,opt,name=head,proto3" json:"head,omitempty"` + // block is the top-level node's raw data as an ipld.Block. + Block []byte `protobuf:"bytes,1,opt,name=block,proto3" json:"block,omitempty"` } -func (x *Document) Reset() { - *x = Document{} +func (x *Log) Reset() { + *x = Log{} if protoimpl.UnsafeEnabled { mi := &file_net_proto_msgTypes[0] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) @@ -41,13 +39,13 @@ func (x *Document) Reset() { } } -func (x *Document) String() string { +func (x *Log) String() string { return protoimpl.X.MessageStringOf(x) } -func (*Document) ProtoMessage() {} +func (*Log) ProtoMessage() {} -func (x *Document) ProtoReflect() protoreflect.Message { +func (x *Log) ProtoReflect() protoreflect.Message { mi := &file_net_proto_msgTypes[0] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) @@ -59,21 +57,14 @@ func (x *Document) ProtoReflect() protoreflect.Message { return mi.MessageOf(x) } -// Deprecated: Use Document.ProtoReflect.Descriptor instead. -func (*Document) Descriptor() ([]byte, []int) { +// Deprecated: Use Log.ProtoReflect.Descriptor instead. +func (*Log) Descriptor() ([]byte, []int) { return file_net_proto_rawDescGZIP(), []int{0} } -func (x *Document) GetDocID() []byte { - if x != nil { - return x.DocID - } - return nil -} - -func (x *Document) GetHead() []byte { +func (x *Log) GetBlock() []byte { if x != nil { - return x.Head + return x.Block } return nil } @@ -353,14 +344,21 @@ func (x *PushLogRequest) GetBody() *PushLogRequest_Body { return nil } -type GetHeadLogRequest struct { +// FetchEncryptionKeyRequest is a request to receive a doc encryption key +// from a peer that holds it. +type FetchEncryptionKeyRequest struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields + + // links is the list of cid links of the blocks containing encryption keys. + Links [][]byte `protobuf:"bytes,1,rep,name=links,proto3" json:"links,omitempty"` + // ephemeralPublicKey is an ephemeral public of the requesting peer for deriving shared secret + EphemeralPublicKey []byte `protobuf:"bytes,2,opt,name=ephemeralPublicKey,proto3" json:"ephemeralPublicKey,omitempty"` } -func (x *GetHeadLogRequest) Reset() { - *x = GetHeadLogRequest{} +func (x *FetchEncryptionKeyRequest) Reset() { + *x = FetchEncryptionKeyRequest{} if protoimpl.UnsafeEnabled { mi := &file_net_proto_msgTypes[8] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) @@ -368,13 +366,13 @@ func (x *GetHeadLogRequest) Reset() { } } -func (x *GetHeadLogRequest) String() string { +func (x *FetchEncryptionKeyRequest) String() string { return protoimpl.X.MessageStringOf(x) } -func (*GetHeadLogRequest) ProtoMessage() {} +func (*FetchEncryptionKeyRequest) ProtoMessage() {} -func (x *GetHeadLogRequest) ProtoReflect() protoreflect.Message { +func (x *FetchEncryptionKeyRequest) ProtoReflect() protoreflect.Message { mi := &file_net_proto_msgTypes[8] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) @@ -386,19 +384,43 @@ func (x *GetHeadLogRequest) ProtoReflect() protoreflect.Message { return mi.MessageOf(x) } -// Deprecated: Use GetHeadLogRequest.ProtoReflect.Descriptor instead. -func (*GetHeadLogRequest) Descriptor() ([]byte, []int) { +// Deprecated: Use FetchEncryptionKeyRequest.ProtoReflect.Descriptor instead. +func (*FetchEncryptionKeyRequest) Descriptor() ([]byte, []int) { return file_net_proto_rawDescGZIP(), []int{8} } -type PushLogReply struct { +func (x *FetchEncryptionKeyRequest) GetLinks() [][]byte { + if x != nil { + return x.Links + } + return nil +} + +func (x *FetchEncryptionKeyRequest) GetEphemeralPublicKey() []byte { + if x != nil { + return x.EphemeralPublicKey + } + return nil +} + +// FetchEncryptionKeyReply is a response to FetchEncryptionKeyRequest request +// by a peer that holds the requested doc encryption key. +type FetchEncryptionKeyReply struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields + + // links is the list of cid links of the blocks containing encryption keys. + Links [][]byte `protobuf:"bytes,1,rep,name=links,proto3" json:"links,omitempty"` + // blocks is the list of blocks containing encryption keys. The order of blocks should match the order of links. + // Every block is encrypted and contains a nonce. + Blocks [][]byte `protobuf:"bytes,2,rep,name=blocks,proto3" json:"blocks,omitempty"` + // ephemeralPublicKey is an ephemeral public of the responding peer for deriving shared secret + EphemeralPublicKey []byte `protobuf:"bytes,3,opt,name=ephemeralPublicKey,proto3" json:"ephemeralPublicKey,omitempty"` } -func (x *PushLogReply) Reset() { - *x = PushLogReply{} +func (x *FetchEncryptionKeyReply) Reset() { + *x = FetchEncryptionKeyReply{} if protoimpl.UnsafeEnabled { mi := &file_net_proto_msgTypes[9] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) @@ -406,13 +428,13 @@ func (x *PushLogReply) Reset() { } } -func (x *PushLogReply) String() string { +func (x *FetchEncryptionKeyReply) String() string { return protoimpl.X.MessageStringOf(x) } -func (*PushLogReply) ProtoMessage() {} +func (*FetchEncryptionKeyReply) ProtoMessage() {} -func (x *PushLogReply) ProtoReflect() protoreflect.Message { +func (x *FetchEncryptionKeyReply) ProtoReflect() protoreflect.Message { mi := &file_net_proto_msgTypes[9] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) @@ -424,19 +446,40 @@ func (x *PushLogReply) ProtoReflect() protoreflect.Message { return mi.MessageOf(x) } -// Deprecated: Use PushLogReply.ProtoReflect.Descriptor instead. -func (*PushLogReply) Descriptor() ([]byte, []int) { +// Deprecated: Use FetchEncryptionKeyReply.ProtoReflect.Descriptor instead. +func (*FetchEncryptionKeyReply) Descriptor() ([]byte, []int) { return file_net_proto_rawDescGZIP(), []int{9} } -type GetHeadLogReply struct { +func (x *FetchEncryptionKeyReply) GetLinks() [][]byte { + if x != nil { + return x.Links + } + return nil +} + +func (x *FetchEncryptionKeyReply) GetBlocks() [][]byte { + if x != nil { + return x.Blocks + } + return nil +} + +func (x *FetchEncryptionKeyReply) GetEphemeralPublicKey() []byte { + if x != nil { + return x.EphemeralPublicKey + } + return nil +} + +type GetHeadLogRequest struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields } -func (x *GetHeadLogReply) Reset() { - *x = GetHeadLogReply{} +func (x *GetHeadLogRequest) Reset() { + *x = GetHeadLogRequest{} if protoimpl.UnsafeEnabled { mi := &file_net_proto_msgTypes[10] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) @@ -444,13 +487,13 @@ func (x *GetHeadLogReply) Reset() { } } -func (x *GetHeadLogReply) String() string { +func (x *GetHeadLogRequest) String() string { return protoimpl.X.MessageStringOf(x) } -func (*GetHeadLogReply) ProtoMessage() {} +func (*GetHeadLogRequest) ProtoMessage() {} -func (x *GetHeadLogReply) ProtoReflect() protoreflect.Message { +func (x *GetHeadLogRequest) ProtoReflect() protoreflect.Message { mi := &file_net_proto_msgTypes[10] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) @@ -462,23 +505,19 @@ func (x *GetHeadLogReply) ProtoReflect() protoreflect.Message { return mi.MessageOf(x) } -// Deprecated: Use GetHeadLogReply.ProtoReflect.Descriptor instead. -func (*GetHeadLogReply) Descriptor() ([]byte, []int) { +// Deprecated: Use GetHeadLogRequest.ProtoReflect.Descriptor instead. +func (*GetHeadLogRequest) Descriptor() ([]byte, []int) { return file_net_proto_rawDescGZIP(), []int{10} } -// Record is a thread record containing link data. -type Document_Log struct { +type PushLogReply struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields - - // block is the top-level node's raw data as an ipld.Block. - Block []byte `protobuf:"bytes,1,opt,name=block,proto3" json:"block,omitempty"` } -func (x *Document_Log) Reset() { - *x = Document_Log{} +func (x *PushLogReply) Reset() { + *x = PushLogReply{} if protoimpl.UnsafeEnabled { mi := &file_net_proto_msgTypes[11] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) @@ -486,13 +525,13 @@ func (x *Document_Log) Reset() { } } -func (x *Document_Log) String() string { +func (x *PushLogReply) String() string { return protoimpl.X.MessageStringOf(x) } -func (*Document_Log) ProtoMessage() {} +func (*PushLogReply) ProtoMessage() {} -func (x *Document_Log) ProtoReflect() protoreflect.Message { +func (x *PushLogReply) ProtoReflect() protoreflect.Message { mi := &file_net_proto_msgTypes[11] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) @@ -504,16 +543,47 @@ func (x *Document_Log) ProtoReflect() protoreflect.Message { return mi.MessageOf(x) } -// Deprecated: Use Document_Log.ProtoReflect.Descriptor instead. -func (*Document_Log) Descriptor() ([]byte, []int) { - return file_net_proto_rawDescGZIP(), []int{0, 0} +// Deprecated: Use PushLogReply.ProtoReflect.Descriptor instead. +func (*PushLogReply) Descriptor() ([]byte, []int) { + return file_net_proto_rawDescGZIP(), []int{11} } -func (x *Document_Log) GetBlock() []byte { - if x != nil { - return x.Block +type GetHeadLogReply struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *GetHeadLogReply) Reset() { + *x = GetHeadLogReply{} + if protoimpl.UnsafeEnabled { + mi := &file_net_proto_msgTypes[12] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } - return nil +} + +func (x *GetHeadLogReply) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GetHeadLogReply) ProtoMessage() {} + +func (x *GetHeadLogReply) ProtoReflect() protoreflect.Message { + mi := &file_net_proto_msgTypes[12] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GetHeadLogReply.ProtoReflect.Descriptor instead. +func (*GetHeadLogReply) Descriptor() ([]byte, []int) { + return file_net_proto_rawDescGZIP(), []int{12} } type PushLogRequest_Body struct { @@ -530,13 +600,13 @@ type PushLogRequest_Body struct { // creator is the PeerID of the peer that created the log. Creator string `protobuf:"bytes,4,opt,name=creator,proto3" json:"creator,omitempty"` // log hold the block that represent version of the document. - Log *Document_Log `protobuf:"bytes,6,opt,name=log,proto3" json:"log,omitempty"` + Log *Log `protobuf:"bytes,6,opt,name=log,proto3" json:"log,omitempty"` } func (x *PushLogRequest_Body) Reset() { *x = PushLogRequest_Body{} if protoimpl.UnsafeEnabled { - mi := &file_net_proto_msgTypes[12] + mi := &file_net_proto_msgTypes[13] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -549,7 +619,7 @@ func (x *PushLogRequest_Body) String() string { func (*PushLogRequest_Body) ProtoMessage() {} func (x *PushLogRequest_Body) ProtoReflect() protoreflect.Message { - mi := &file_net_proto_msgTypes[12] + mi := &file_net_proto_msgTypes[13] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -593,7 +663,7 @@ func (x *PushLogRequest_Body) GetCreator() string { return "" } -func (x *PushLogRequest_Body) GetLog() *Document_Log { +func (x *PushLogRequest_Body) GetLog() *Log { if x != nil { return x.Log } @@ -604,59 +674,68 @@ var File_net_proto protoreflect.FileDescriptor var file_net_proto_rawDesc = []byte{ 0x0a, 0x09, 0x6e, 0x65, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x06, 0x6e, 0x65, 0x74, - 0x2e, 0x70, 0x62, 0x22, 0x51, 0x0a, 0x08, 0x44, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x12, - 0x14, 0x0a, 0x05, 0x64, 0x6f, 0x63, 0x49, 0x44, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, - 0x64, 0x6f, 0x63, 0x49, 0x44, 0x12, 0x12, 0x0a, 0x04, 0x68, 0x65, 0x61, 0x64, 0x18, 0x04, 0x20, - 0x01, 0x28, 0x0c, 0x52, 0x04, 0x68, 0x65, 0x61, 0x64, 0x1a, 0x1b, 0x0a, 0x03, 0x4c, 0x6f, 0x67, - 0x12, 0x14, 0x0a, 0x05, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, - 0x05, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x22, 0x14, 0x0a, 0x12, 0x47, 0x65, 0x74, 0x44, 0x6f, 0x63, - 0x47, 0x72, 0x61, 0x70, 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x22, 0x12, 0x0a, 0x10, - 0x47, 0x65, 0x74, 0x44, 0x6f, 0x63, 0x47, 0x72, 0x61, 0x70, 0x68, 0x52, 0x65, 0x70, 0x6c, 0x79, - 0x22, 0x15, 0x0a, 0x13, 0x50, 0x75, 0x73, 0x68, 0x44, 0x6f, 0x63, 0x47, 0x72, 0x61, 0x70, 0x68, - 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x22, 0x13, 0x0a, 0x11, 0x50, 0x75, 0x73, 0x68, 0x44, - 0x6f, 0x63, 0x47, 0x72, 0x61, 0x70, 0x68, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x0f, 0x0a, 0x0d, - 0x47, 0x65, 0x74, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x22, 0x0d, 0x0a, - 0x0b, 0x47, 0x65, 0x74, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0xd4, 0x01, 0x0a, - 0x0e, 0x50, 0x75, 0x73, 0x68, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, - 0x2f, 0x0a, 0x04, 0x62, 0x6f, 0x64, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, - 0x6e, 0x65, 0x74, 0x2e, 0x70, 0x62, 0x2e, 0x50, 0x75, 0x73, 0x68, 0x4c, 0x6f, 0x67, 0x52, 0x65, - 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x42, 0x6f, 0x64, 0x79, 0x52, 0x04, 0x62, 0x6f, 0x64, 0x79, - 0x1a, 0x90, 0x01, 0x0a, 0x04, 0x42, 0x6f, 0x64, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x64, 0x6f, 0x63, - 0x49, 0x44, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x64, 0x6f, 0x63, 0x49, 0x44, 0x12, - 0x10, 0x0a, 0x03, 0x63, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x03, 0x63, 0x69, - 0x64, 0x12, 0x1e, 0x0a, 0x0a, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x52, 0x6f, 0x6f, 0x74, 0x18, - 0x03, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0a, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x52, 0x6f, 0x6f, - 0x74, 0x12, 0x18, 0x0a, 0x07, 0x63, 0x72, 0x65, 0x61, 0x74, 0x6f, 0x72, 0x18, 0x04, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x07, 0x63, 0x72, 0x65, 0x61, 0x74, 0x6f, 0x72, 0x12, 0x26, 0x0a, 0x03, 0x6c, - 0x6f, 0x67, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x6e, 0x65, 0x74, 0x2e, 0x70, - 0x62, 0x2e, 0x44, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x4c, 0x6f, 0x67, 0x52, 0x03, - 0x6c, 0x6f, 0x67, 0x22, 0x13, 0x0a, 0x11, 0x47, 0x65, 0x74, 0x48, 0x65, 0x61, 0x64, 0x4c, 0x6f, - 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x22, 0x0e, 0x0a, 0x0c, 0x50, 0x75, 0x73, 0x68, - 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x11, 0x0a, 0x0f, 0x47, 0x65, 0x74, 0x48, - 0x65, 0x61, 0x64, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x32, 0xd1, 0x02, 0x0a, 0x07, - 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x45, 0x0a, 0x0b, 0x47, 0x65, 0x74, 0x44, 0x6f, - 0x63, 0x47, 0x72, 0x61, 0x70, 0x68, 0x12, 0x1a, 0x2e, 0x6e, 0x65, 0x74, 0x2e, 0x70, 0x62, 0x2e, - 0x47, 0x65, 0x74, 0x44, 0x6f, 0x63, 0x47, 0x72, 0x61, 0x70, 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, - 0x73, 0x74, 0x1a, 0x18, 0x2e, 0x6e, 0x65, 0x74, 0x2e, 0x70, 0x62, 0x2e, 0x47, 0x65, 0x74, 0x44, - 0x6f, 0x63, 0x47, 0x72, 0x61, 0x70, 0x68, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x12, 0x48, - 0x0a, 0x0c, 0x50, 0x75, 0x73, 0x68, 0x44, 0x6f, 0x63, 0x47, 0x72, 0x61, 0x70, 0x68, 0x12, 0x1b, - 0x2e, 0x6e, 0x65, 0x74, 0x2e, 0x70, 0x62, 0x2e, 0x50, 0x75, 0x73, 0x68, 0x44, 0x6f, 0x63, 0x47, - 0x72, 0x61, 0x70, 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x19, 0x2e, 0x6e, 0x65, + 0x2e, 0x70, 0x62, 0x22, 0x1b, 0x0a, 0x03, 0x4c, 0x6f, 0x67, 0x12, 0x14, 0x0a, 0x05, 0x62, 0x6c, + 0x6f, 0x63, 0x6b, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x62, 0x6c, 0x6f, 0x63, 0x6b, + 0x22, 0x14, 0x0a, 0x12, 0x47, 0x65, 0x74, 0x44, 0x6f, 0x63, 0x47, 0x72, 0x61, 0x70, 0x68, 0x52, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x22, 0x12, 0x0a, 0x10, 0x47, 0x65, 0x74, 0x44, 0x6f, 0x63, + 0x47, 0x72, 0x61, 0x70, 0x68, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x15, 0x0a, 0x13, 0x50, 0x75, + 0x73, 0x68, 0x44, 0x6f, 0x63, 0x47, 0x72, 0x61, 0x70, 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x22, 0x13, 0x0a, 0x11, 0x50, 0x75, 0x73, 0x68, 0x44, 0x6f, 0x63, 0x47, 0x72, 0x61, 0x70, + 0x68, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x0f, 0x0a, 0x0d, 0x47, 0x65, 0x74, 0x4c, 0x6f, 0x67, + 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x22, 0x0d, 0x0a, 0x0b, 0x47, 0x65, 0x74, 0x4c, 0x6f, + 0x67, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0xcb, 0x01, 0x0a, 0x0e, 0x50, 0x75, 0x73, 0x68, 0x4c, + 0x6f, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x2f, 0x0a, 0x04, 0x62, 0x6f, 0x64, + 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x6e, 0x65, 0x74, 0x2e, 0x70, 0x62, + 0x2e, 0x50, 0x75, 0x73, 0x68, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, + 0x42, 0x6f, 0x64, 0x79, 0x52, 0x04, 0x62, 0x6f, 0x64, 0x79, 0x1a, 0x87, 0x01, 0x0a, 0x04, 0x42, + 0x6f, 0x64, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x64, 0x6f, 0x63, 0x49, 0x44, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x0c, 0x52, 0x05, 0x64, 0x6f, 0x63, 0x49, 0x44, 0x12, 0x10, 0x0a, 0x03, 0x63, 0x69, 0x64, + 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x03, 0x63, 0x69, 0x64, 0x12, 0x1e, 0x0a, 0x0a, 0x73, + 0x63, 0x68, 0x65, 0x6d, 0x61, 0x52, 0x6f, 0x6f, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x52, + 0x0a, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x52, 0x6f, 0x6f, 0x74, 0x12, 0x18, 0x0a, 0x07, 0x63, + 0x72, 0x65, 0x61, 0x74, 0x6f, 0x72, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x63, 0x72, + 0x65, 0x61, 0x74, 0x6f, 0x72, 0x12, 0x1d, 0x0a, 0x03, 0x6c, 0x6f, 0x67, 0x18, 0x06, 0x20, 0x01, + 0x28, 0x0b, 0x32, 0x0b, 0x2e, 0x6e, 0x65, 0x74, 0x2e, 0x70, 0x62, 0x2e, 0x4c, 0x6f, 0x67, 0x52, + 0x03, 0x6c, 0x6f, 0x67, 0x22, 0x61, 0x0a, 0x19, 0x46, 0x65, 0x74, 0x63, 0x68, 0x45, 0x6e, 0x63, + 0x72, 0x79, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x4b, 0x65, 0x79, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x12, 0x14, 0x0a, 0x05, 0x6c, 0x69, 0x6e, 0x6b, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0c, + 0x52, 0x05, 0x6c, 0x69, 0x6e, 0x6b, 0x73, 0x12, 0x2e, 0x0a, 0x12, 0x65, 0x70, 0x68, 0x65, 0x6d, + 0x65, 0x72, 0x61, 0x6c, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x4b, 0x65, 0x79, 0x18, 0x02, 0x20, + 0x01, 0x28, 0x0c, 0x52, 0x12, 0x65, 0x70, 0x68, 0x65, 0x6d, 0x65, 0x72, 0x61, 0x6c, 0x50, 0x75, + 0x62, 0x6c, 0x69, 0x63, 0x4b, 0x65, 0x79, 0x22, 0x77, 0x0a, 0x17, 0x46, 0x65, 0x74, 0x63, 0x68, + 0x45, 0x6e, 0x63, 0x72, 0x79, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x4b, 0x65, 0x79, 0x52, 0x65, 0x70, + 0x6c, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x6c, 0x69, 0x6e, 0x6b, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, + 0x0c, 0x52, 0x05, 0x6c, 0x69, 0x6e, 0x6b, 0x73, 0x12, 0x16, 0x0a, 0x06, 0x62, 0x6c, 0x6f, 0x63, + 0x6b, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0c, 0x52, 0x06, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x73, + 0x12, 0x2e, 0x0a, 0x12, 0x65, 0x70, 0x68, 0x65, 0x6d, 0x65, 0x72, 0x61, 0x6c, 0x50, 0x75, 0x62, + 0x6c, 0x69, 0x63, 0x4b, 0x65, 0x79, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x12, 0x65, 0x70, + 0x68, 0x65, 0x6d, 0x65, 0x72, 0x61, 0x6c, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x4b, 0x65, 0x79, + 0x22, 0x13, 0x0a, 0x11, 0x47, 0x65, 0x74, 0x48, 0x65, 0x61, 0x64, 0x4c, 0x6f, 0x67, 0x52, 0x65, + 0x71, 0x75, 0x65, 0x73, 0x74, 0x22, 0x0e, 0x0a, 0x0c, 0x50, 0x75, 0x73, 0x68, 0x4c, 0x6f, 0x67, + 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x11, 0x0a, 0x0f, 0x47, 0x65, 0x74, 0x48, 0x65, 0x61, 0x64, + 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x32, 0xd1, 0x02, 0x0a, 0x07, 0x53, 0x65, 0x72, + 0x76, 0x69, 0x63, 0x65, 0x12, 0x45, 0x0a, 0x0b, 0x47, 0x65, 0x74, 0x44, 0x6f, 0x63, 0x47, 0x72, + 0x61, 0x70, 0x68, 0x12, 0x1a, 0x2e, 0x6e, 0x65, 0x74, 0x2e, 0x70, 0x62, 0x2e, 0x47, 0x65, 0x74, + 0x44, 0x6f, 0x63, 0x47, 0x72, 0x61, 0x70, 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, + 0x18, 0x2e, 0x6e, 0x65, 0x74, 0x2e, 0x70, 0x62, 0x2e, 0x47, 0x65, 0x74, 0x44, 0x6f, 0x63, 0x47, + 0x72, 0x61, 0x70, 0x68, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x12, 0x48, 0x0a, 0x0c, 0x50, + 0x75, 0x73, 0x68, 0x44, 0x6f, 0x63, 0x47, 0x72, 0x61, 0x70, 0x68, 0x12, 0x1b, 0x2e, 0x6e, 0x65, 0x74, 0x2e, 0x70, 0x62, 0x2e, 0x50, 0x75, 0x73, 0x68, 0x44, 0x6f, 0x63, 0x47, 0x72, 0x61, 0x70, - 0x68, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x12, 0x36, 0x0a, 0x06, 0x47, 0x65, 0x74, 0x4c, - 0x6f, 0x67, 0x12, 0x15, 0x2e, 0x6e, 0x65, 0x74, 0x2e, 0x70, 0x62, 0x2e, 0x47, 0x65, 0x74, 0x4c, - 0x6f, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x13, 0x2e, 0x6e, 0x65, 0x74, 0x2e, - 0x70, 0x62, 0x2e, 0x47, 0x65, 0x74, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, - 0x12, 0x39, 0x0a, 0x07, 0x50, 0x75, 0x73, 0x68, 0x4c, 0x6f, 0x67, 0x12, 0x16, 0x2e, 0x6e, 0x65, - 0x74, 0x2e, 0x70, 0x62, 0x2e, 0x50, 0x75, 0x73, 0x68, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x71, 0x75, - 0x65, 0x73, 0x74, 0x1a, 0x14, 0x2e, 0x6e, 0x65, 0x74, 0x2e, 0x70, 0x62, 0x2e, 0x50, 0x75, 0x73, - 0x68, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x12, 0x42, 0x0a, 0x0a, 0x47, - 0x65, 0x74, 0x48, 0x65, 0x61, 0x64, 0x4c, 0x6f, 0x67, 0x12, 0x19, 0x2e, 0x6e, 0x65, 0x74, 0x2e, - 0x70, 0x62, 0x2e, 0x47, 0x65, 0x74, 0x48, 0x65, 0x61, 0x64, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x71, - 0x75, 0x65, 0x73, 0x74, 0x1a, 0x17, 0x2e, 0x6e, 0x65, 0x74, 0x2e, 0x70, 0x62, 0x2e, 0x47, 0x65, - 0x74, 0x48, 0x65, 0x61, 0x64, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x42, - 0x0a, 0x5a, 0x08, 0x2f, 0x3b, 0x6e, 0x65, 0x74, 0x5f, 0x70, 0x62, 0x62, 0x06, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x33, + 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x19, 0x2e, 0x6e, 0x65, 0x74, 0x2e, 0x70, + 0x62, 0x2e, 0x50, 0x75, 0x73, 0x68, 0x44, 0x6f, 0x63, 0x47, 0x72, 0x61, 0x70, 0x68, 0x52, 0x65, + 0x70, 0x6c, 0x79, 0x22, 0x00, 0x12, 0x36, 0x0a, 0x06, 0x47, 0x65, 0x74, 0x4c, 0x6f, 0x67, 0x12, + 0x15, 0x2e, 0x6e, 0x65, 0x74, 0x2e, 0x70, 0x62, 0x2e, 0x47, 0x65, 0x74, 0x4c, 0x6f, 0x67, 0x52, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x13, 0x2e, 0x6e, 0x65, 0x74, 0x2e, 0x70, 0x62, 0x2e, + 0x47, 0x65, 0x74, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x12, 0x39, 0x0a, + 0x07, 0x50, 0x75, 0x73, 0x68, 0x4c, 0x6f, 0x67, 0x12, 0x16, 0x2e, 0x6e, 0x65, 0x74, 0x2e, 0x70, + 0x62, 0x2e, 0x50, 0x75, 0x73, 0x68, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, + 0x1a, 0x14, 0x2e, 0x6e, 0x65, 0x74, 0x2e, 0x70, 0x62, 0x2e, 0x50, 0x75, 0x73, 0x68, 0x4c, 0x6f, + 0x67, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x12, 0x42, 0x0a, 0x0a, 0x47, 0x65, 0x74, 0x48, + 0x65, 0x61, 0x64, 0x4c, 0x6f, 0x67, 0x12, 0x19, 0x2e, 0x6e, 0x65, 0x74, 0x2e, 0x70, 0x62, 0x2e, + 0x47, 0x65, 0x74, 0x48, 0x65, 0x61, 0x64, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x1a, 0x17, 0x2e, 0x6e, 0x65, 0x74, 0x2e, 0x70, 0x62, 0x2e, 0x47, 0x65, 0x74, 0x48, 0x65, + 0x61, 0x64, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x42, 0x0a, 0x5a, 0x08, + 0x2f, 0x3b, 0x6e, 0x65, 0x74, 0x5f, 0x70, 0x62, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( @@ -671,35 +750,36 @@ func file_net_proto_rawDescGZIP() []byte { return file_net_proto_rawDescData } -var file_net_proto_msgTypes = make([]protoimpl.MessageInfo, 13) -var file_net_proto_goTypes = []interface{}{ - (*Document)(nil), // 0: net.pb.Document - (*GetDocGraphRequest)(nil), // 1: net.pb.GetDocGraphRequest - (*GetDocGraphReply)(nil), // 2: net.pb.GetDocGraphReply - (*PushDocGraphRequest)(nil), // 3: net.pb.PushDocGraphRequest - (*PushDocGraphReply)(nil), // 4: net.pb.PushDocGraphReply - (*GetLogRequest)(nil), // 5: net.pb.GetLogRequest - (*GetLogReply)(nil), // 6: net.pb.GetLogReply - (*PushLogRequest)(nil), // 7: net.pb.PushLogRequest - (*GetHeadLogRequest)(nil), // 8: net.pb.GetHeadLogRequest - (*PushLogReply)(nil), // 9: net.pb.PushLogReply - (*GetHeadLogReply)(nil), // 10: net.pb.GetHeadLogReply - (*Document_Log)(nil), // 11: net.pb.Document.Log - (*PushLogRequest_Body)(nil), // 12: net.pb.PushLogRequest.Body +var file_net_proto_msgTypes = make([]protoimpl.MessageInfo, 14) +var file_net_proto_goTypes = []any{ + (*Log)(nil), // 0: net.pb.Log + (*GetDocGraphRequest)(nil), // 1: net.pb.GetDocGraphRequest + (*GetDocGraphReply)(nil), // 2: net.pb.GetDocGraphReply + (*PushDocGraphRequest)(nil), // 3: net.pb.PushDocGraphRequest + (*PushDocGraphReply)(nil), // 4: net.pb.PushDocGraphReply + (*GetLogRequest)(nil), // 5: net.pb.GetLogRequest + (*GetLogReply)(nil), // 6: net.pb.GetLogReply + (*PushLogRequest)(nil), // 7: net.pb.PushLogRequest + (*FetchEncryptionKeyRequest)(nil), // 8: net.pb.FetchEncryptionKeyRequest + (*FetchEncryptionKeyReply)(nil), // 9: net.pb.FetchEncryptionKeyReply + (*GetHeadLogRequest)(nil), // 10: net.pb.GetHeadLogRequest + (*PushLogReply)(nil), // 11: net.pb.PushLogReply + (*GetHeadLogReply)(nil), // 12: net.pb.GetHeadLogReply + (*PushLogRequest_Body)(nil), // 13: net.pb.PushLogRequest.Body } var file_net_proto_depIdxs = []int32{ - 12, // 0: net.pb.PushLogRequest.body:type_name -> net.pb.PushLogRequest.Body - 11, // 1: net.pb.PushLogRequest.Body.log:type_name -> net.pb.Document.Log + 13, // 0: net.pb.PushLogRequest.body:type_name -> net.pb.PushLogRequest.Body + 0, // 1: net.pb.PushLogRequest.Body.log:type_name -> net.pb.Log 1, // 2: net.pb.Service.GetDocGraph:input_type -> net.pb.GetDocGraphRequest 3, // 3: net.pb.Service.PushDocGraph:input_type -> net.pb.PushDocGraphRequest 5, // 4: net.pb.Service.GetLog:input_type -> net.pb.GetLogRequest 7, // 5: net.pb.Service.PushLog:input_type -> net.pb.PushLogRequest - 8, // 6: net.pb.Service.GetHeadLog:input_type -> net.pb.GetHeadLogRequest + 10, // 6: net.pb.Service.GetHeadLog:input_type -> net.pb.GetHeadLogRequest 2, // 7: net.pb.Service.GetDocGraph:output_type -> net.pb.GetDocGraphReply 4, // 8: net.pb.Service.PushDocGraph:output_type -> net.pb.PushDocGraphReply 6, // 9: net.pb.Service.GetLog:output_type -> net.pb.GetLogReply - 9, // 10: net.pb.Service.PushLog:output_type -> net.pb.PushLogReply - 10, // 11: net.pb.Service.GetHeadLog:output_type -> net.pb.GetHeadLogReply + 11, // 10: net.pb.Service.PushLog:output_type -> net.pb.PushLogReply + 12, // 11: net.pb.Service.GetHeadLog:output_type -> net.pb.GetHeadLogReply 7, // [7:12] is the sub-list for method output_type 2, // [2:7] is the sub-list for method input_type 2, // [2:2] is the sub-list for extension type_name @@ -713,8 +793,8 @@ func file_net_proto_init() { return } if !protoimpl.UnsafeEnabled { - file_net_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Document); i { + file_net_proto_msgTypes[0].Exporter = func(v any, i int) any { + switch v := v.(*Log); i { case 0: return &v.state case 1: @@ -725,7 +805,7 @@ func file_net_proto_init() { return nil } } - file_net_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { + file_net_proto_msgTypes[1].Exporter = func(v any, i int) any { switch v := v.(*GetDocGraphRequest); i { case 0: return &v.state @@ -737,7 +817,7 @@ func file_net_proto_init() { return nil } } - file_net_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { + file_net_proto_msgTypes[2].Exporter = func(v any, i int) any { switch v := v.(*GetDocGraphReply); i { case 0: return &v.state @@ -749,7 +829,7 @@ func file_net_proto_init() { return nil } } - file_net_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { + file_net_proto_msgTypes[3].Exporter = func(v any, i int) any { switch v := v.(*PushDocGraphRequest); i { case 0: return &v.state @@ -761,7 +841,7 @@ func file_net_proto_init() { return nil } } - file_net_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { + file_net_proto_msgTypes[4].Exporter = func(v any, i int) any { switch v := v.(*PushDocGraphReply); i { case 0: return &v.state @@ -773,7 +853,7 @@ func file_net_proto_init() { return nil } } - file_net_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { + file_net_proto_msgTypes[5].Exporter = func(v any, i int) any { switch v := v.(*GetLogRequest); i { case 0: return &v.state @@ -785,7 +865,7 @@ func file_net_proto_init() { return nil } } - file_net_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { + file_net_proto_msgTypes[6].Exporter = func(v any, i int) any { switch v := v.(*GetLogReply); i { case 0: return &v.state @@ -797,7 +877,7 @@ func file_net_proto_init() { return nil } } - file_net_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { + file_net_proto_msgTypes[7].Exporter = func(v any, i int) any { switch v := v.(*PushLogRequest); i { case 0: return &v.state @@ -809,8 +889,8 @@ func file_net_proto_init() { return nil } } - file_net_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GetHeadLogRequest); i { + file_net_proto_msgTypes[8].Exporter = func(v any, i int) any { + switch v := v.(*FetchEncryptionKeyRequest); i { case 0: return &v.state case 1: @@ -821,8 +901,8 @@ func file_net_proto_init() { return nil } } - file_net_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*PushLogReply); i { + file_net_proto_msgTypes[9].Exporter = func(v any, i int) any { + switch v := v.(*FetchEncryptionKeyReply); i { case 0: return &v.state case 1: @@ -833,8 +913,20 @@ func file_net_proto_init() { return nil } } - file_net_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GetHeadLogReply); i { + file_net_proto_msgTypes[10].Exporter = func(v any, i int) any { + switch v := v.(*GetHeadLogRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_net_proto_msgTypes[11].Exporter = func(v any, i int) any { + switch v := v.(*PushLogReply); i { case 0: return &v.state case 1: @@ -845,8 +937,8 @@ func file_net_proto_init() { return nil } } - file_net_proto_msgTypes[11].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Document_Log); i { + file_net_proto_msgTypes[12].Exporter = func(v any, i int) any { + switch v := v.(*GetHeadLogReply); i { case 0: return &v.state case 1: @@ -857,7 +949,7 @@ func file_net_proto_init() { return nil } } - file_net_proto_msgTypes[12].Exporter = func(v interface{}, i int) interface{} { + file_net_proto_msgTypes[13].Exporter = func(v any, i int) any { switch v := v.(*PushLogRequest_Body); i { case 0: return &v.state @@ -876,7 +968,7 @@ func file_net_proto_init() { GoPackagePath: reflect.TypeOf(x{}).PkgPath(), RawDescriptor: file_net_proto_rawDesc, NumEnums: 0, - NumMessages: 13, + NumMessages: 14, NumExtensions: 0, NumServices: 1, }, diff --git a/net/pb/net.proto b/net/pb/net.proto index 5b0ee35dfb..8dc8fe8a46 100644 --- a/net/pb/net.proto +++ b/net/pb/net.proto @@ -4,17 +4,9 @@ package net.pb; option go_package = "/;net_pb"; // Log represents a thread log. -message Document { - // ID of the document. - bytes docID = 1; - // head of the log. - bytes head = 4; - - // Record is a thread record containing link data. - message Log { - // block is the top-level node's raw data as an ipld.Block. - bytes block = 1; - } +message Log { + // block is the top-level node's raw data as an ipld.Block. + bytes block = 1; } message GetDocGraphRequest {} @@ -42,10 +34,31 @@ message PushLogRequest { // creator is the PeerID of the peer that created the log. string creator = 4; // log hold the block that represent version of the document. - Document.Log log = 6; + Log log = 6; } } +// FetchEncryptionKeyRequest is a request to receive a doc encryption key +// from a peer that holds it. +message FetchEncryptionKeyRequest { + // links is the list of cid links of the blocks containing encryption keys. + repeated bytes links = 1; + // ephemeralPublicKey is an ephemeral public of the requesting peer for deriving shared secret + bytes ephemeralPublicKey = 2; +} + +// FetchEncryptionKeyReply is a response to FetchEncryptionKeyRequest request +// by a peer that holds the requested doc encryption key. +message FetchEncryptionKeyReply { + // links is the list of cid links of the blocks containing encryption keys. + repeated bytes links = 1; + // blocks is the list of blocks containing encryption keys. The order of blocks should match the order of links. + // Every block is encrypted and contains a nonce. + repeated bytes blocks = 2; + // ephemeralPublicKey is an ephemeral public of the responding peer for deriving shared secret + bytes ephemeralPublicKey = 3; +} + message GetHeadLogRequest {} message PushLogReply {} diff --git a/net/pb/net_grpc.pb.go b/net/pb/net_grpc.pb.go index 75ae790ab6..84564d6bec 100644 --- a/net/pb/net_grpc.pb.go +++ b/net/pb/net_grpc.pb.go @@ -1,7 +1,7 @@ // Code generated by protoc-gen-go-grpc. DO NOT EDIT. // versions: -// - protoc-gen-go-grpc v1.3.0 -// - protoc v4.25.1 +// - protoc-gen-go-grpc v1.4.0 +// - protoc v5.27.1 // source: net.proto package net_pb @@ -15,8 +15,8 @@ import ( // This is a compile-time assertion to ensure that this generated file // is compatible with the grpc package it is being compiled against. -// Requires gRPC-Go v1.32.0 or later. -const _ = grpc.SupportPackageIsVersion7 +// Requires gRPC-Go v1.62.0 or later. +const _ = grpc.SupportPackageIsVersion8 const ( Service_GetDocGraph_FullMethodName = "/net.pb.Service/GetDocGraph" @@ -29,6 +29,8 @@ const ( // ServiceClient is the client API for Service service. // // For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. +// +// Service is the peer-to-peer network API for document sync type ServiceClient interface { // GetDocGraph from this peer. GetDocGraph(ctx context.Context, in *GetDocGraphRequest, opts ...grpc.CallOption) (*GetDocGraphReply, error) @@ -51,8 +53,9 @@ func NewServiceClient(cc grpc.ClientConnInterface) ServiceClient { } func (c *serviceClient) GetDocGraph(ctx context.Context, in *GetDocGraphRequest, opts ...grpc.CallOption) (*GetDocGraphReply, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) out := new(GetDocGraphReply) - err := c.cc.Invoke(ctx, Service_GetDocGraph_FullMethodName, in, out, opts...) + err := c.cc.Invoke(ctx, Service_GetDocGraph_FullMethodName, in, out, cOpts...) if err != nil { return nil, err } @@ -60,8 +63,9 @@ func (c *serviceClient) GetDocGraph(ctx context.Context, in *GetDocGraphRequest, } func (c *serviceClient) PushDocGraph(ctx context.Context, in *PushDocGraphRequest, opts ...grpc.CallOption) (*PushDocGraphReply, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) out := new(PushDocGraphReply) - err := c.cc.Invoke(ctx, Service_PushDocGraph_FullMethodName, in, out, opts...) + err := c.cc.Invoke(ctx, Service_PushDocGraph_FullMethodName, in, out, cOpts...) if err != nil { return nil, err } @@ -69,8 +73,9 @@ func (c *serviceClient) PushDocGraph(ctx context.Context, in *PushDocGraphReques } func (c *serviceClient) GetLog(ctx context.Context, in *GetLogRequest, opts ...grpc.CallOption) (*GetLogReply, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) out := new(GetLogReply) - err := c.cc.Invoke(ctx, Service_GetLog_FullMethodName, in, out, opts...) + err := c.cc.Invoke(ctx, Service_GetLog_FullMethodName, in, out, cOpts...) if err != nil { return nil, err } @@ -78,8 +83,9 @@ func (c *serviceClient) GetLog(ctx context.Context, in *GetLogRequest, opts ...g } func (c *serviceClient) PushLog(ctx context.Context, in *PushLogRequest, opts ...grpc.CallOption) (*PushLogReply, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) out := new(PushLogReply) - err := c.cc.Invoke(ctx, Service_PushLog_FullMethodName, in, out, opts...) + err := c.cc.Invoke(ctx, Service_PushLog_FullMethodName, in, out, cOpts...) if err != nil { return nil, err } @@ -87,8 +93,9 @@ func (c *serviceClient) PushLog(ctx context.Context, in *PushLogRequest, opts .. } func (c *serviceClient) GetHeadLog(ctx context.Context, in *GetHeadLogRequest, opts ...grpc.CallOption) (*GetHeadLogReply, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) out := new(GetHeadLogReply) - err := c.cc.Invoke(ctx, Service_GetHeadLog_FullMethodName, in, out, opts...) + err := c.cc.Invoke(ctx, Service_GetHeadLog_FullMethodName, in, out, cOpts...) if err != nil { return nil, err } @@ -98,6 +105,8 @@ func (c *serviceClient) GetHeadLog(ctx context.Context, in *GetHeadLogRequest, o // ServiceServer is the server API for Service service. // All implementations must embed UnimplementedServiceServer // for forward compatibility +// +// Service is the peer-to-peer network API for document sync type ServiceServer interface { // GetDocGraph from this peer. GetDocGraph(context.Context, *GetDocGraphRequest) (*GetDocGraphReply, error) diff --git a/net/pb/net_vtproto.pb.go b/net/pb/net_vtproto.pb.go index 2bae8f83f3..bf1c93e8e8 100644 --- a/net/pb/net_vtproto.pb.go +++ b/net/pb/net_vtproto.pb.go @@ -1,14 +1,14 @@ // Code generated by protoc-gen-go-vtproto. DO NOT EDIT. -// protoc-gen-go-vtproto version: v0.5.0 +// protoc-gen-go-vtproto version: v0.6.0 // source: net.proto package net_pb import ( fmt "fmt" + protohelpers "github.com/planetscale/vtprotobuf/protohelpers" protoimpl "google.golang.org/protobuf/runtime/protoimpl" io "io" - bits "math/bits" ) const ( @@ -18,7 +18,7 @@ const ( _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) ) -func (m *Document_Log) MarshalVT() (dAtA []byte, err error) { +func (m *Log) MarshalVT() (dAtA []byte, err error) { if m == nil { return nil, nil } @@ -31,12 +31,12 @@ func (m *Document_Log) MarshalVT() (dAtA []byte, err error) { return dAtA[:n], nil } -func (m *Document_Log) MarshalToVT(dAtA []byte) (int, error) { +func (m *Log) MarshalToVT(dAtA []byte) (int, error) { size := m.SizeVT() return m.MarshalToSizedBufferVT(dAtA[:size]) } -func (m *Document_Log) MarshalToSizedBufferVT(dAtA []byte) (int, error) { +func (m *Log) MarshalToSizedBufferVT(dAtA []byte) (int, error) { if m == nil { return 0, nil } @@ -51,54 +51,7 @@ func (m *Document_Log) MarshalToSizedBufferVT(dAtA []byte) (int, error) { if len(m.Block) > 0 { i -= len(m.Block) copy(dAtA[i:], m.Block) - i = encodeVarint(dAtA, i, uint64(len(m.Block))) - i-- - dAtA[i] = 0xa - } - return len(dAtA) - i, nil -} - -func (m *Document) MarshalVT() (dAtA []byte, err error) { - if m == nil { - return nil, nil - } - size := m.SizeVT() - dAtA = make([]byte, size) - n, err := m.MarshalToSizedBufferVT(dAtA[:size]) - if err != nil { - return nil, err - } - return dAtA[:n], nil -} - -func (m *Document) MarshalToVT(dAtA []byte) (int, error) { - size := m.SizeVT() - return m.MarshalToSizedBufferVT(dAtA[:size]) -} - -func (m *Document) MarshalToSizedBufferVT(dAtA []byte) (int, error) { - if m == nil { - return 0, nil - } - i := len(dAtA) - _ = i - var l int - _ = l - if m.unknownFields != nil { - i -= len(m.unknownFields) - copy(dAtA[i:], m.unknownFields) - } - if len(m.Head) > 0 { - i -= len(m.Head) - copy(dAtA[i:], m.Head) - i = encodeVarint(dAtA, i, uint64(len(m.Head))) - i-- - dAtA[i] = 0x22 - } - if len(m.DocID) > 0 { - i -= len(m.DocID) - copy(dAtA[i:], m.DocID) - i = encodeVarint(dAtA, i, uint64(len(m.DocID))) + i = protohelpers.EncodeVarint(dAtA, i, uint64(len(m.Block))) i-- dAtA[i] = 0xa } @@ -339,35 +292,35 @@ func (m *PushLogRequest_Body) MarshalToSizedBufferVT(dAtA []byte) (int, error) { return 0, err } i -= size - i = encodeVarint(dAtA, i, uint64(size)) + i = protohelpers.EncodeVarint(dAtA, i, uint64(size)) i-- dAtA[i] = 0x32 } if len(m.Creator) > 0 { i -= len(m.Creator) copy(dAtA[i:], m.Creator) - i = encodeVarint(dAtA, i, uint64(len(m.Creator))) + i = protohelpers.EncodeVarint(dAtA, i, uint64(len(m.Creator))) i-- dAtA[i] = 0x22 } if len(m.SchemaRoot) > 0 { i -= len(m.SchemaRoot) copy(dAtA[i:], m.SchemaRoot) - i = encodeVarint(dAtA, i, uint64(len(m.SchemaRoot))) + i = protohelpers.EncodeVarint(dAtA, i, uint64(len(m.SchemaRoot))) i-- dAtA[i] = 0x1a } if len(m.Cid) > 0 { i -= len(m.Cid) copy(dAtA[i:], m.Cid) - i = encodeVarint(dAtA, i, uint64(len(m.Cid))) + i = protohelpers.EncodeVarint(dAtA, i, uint64(len(m.Cid))) i-- dAtA[i] = 0x12 } if len(m.DocID) > 0 { i -= len(m.DocID) copy(dAtA[i:], m.DocID) - i = encodeVarint(dAtA, i, uint64(len(m.DocID))) + i = protohelpers.EncodeVarint(dAtA, i, uint64(len(m.DocID))) i-- dAtA[i] = 0xa } @@ -410,13 +363,120 @@ func (m *PushLogRequest) MarshalToSizedBufferVT(dAtA []byte) (int, error) { return 0, err } i -= size - i = encodeVarint(dAtA, i, uint64(size)) + i = protohelpers.EncodeVarint(dAtA, i, uint64(size)) i-- dAtA[i] = 0xa } return len(dAtA) - i, nil } +func (m *FetchEncryptionKeyRequest) MarshalVT() (dAtA []byte, err error) { + if m == nil { + return nil, nil + } + size := m.SizeVT() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBufferVT(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *FetchEncryptionKeyRequest) MarshalToVT(dAtA []byte) (int, error) { + size := m.SizeVT() + return m.MarshalToSizedBufferVT(dAtA[:size]) +} + +func (m *FetchEncryptionKeyRequest) MarshalToSizedBufferVT(dAtA []byte) (int, error) { + if m == nil { + return 0, nil + } + i := len(dAtA) + _ = i + var l int + _ = l + if m.unknownFields != nil { + i -= len(m.unknownFields) + copy(dAtA[i:], m.unknownFields) + } + if len(m.EphemeralPublicKey) > 0 { + i -= len(m.EphemeralPublicKey) + copy(dAtA[i:], m.EphemeralPublicKey) + i = protohelpers.EncodeVarint(dAtA, i, uint64(len(m.EphemeralPublicKey))) + i-- + dAtA[i] = 0x12 + } + if len(m.Links) > 0 { + for iNdEx := len(m.Links) - 1; iNdEx >= 0; iNdEx-- { + i -= len(m.Links[iNdEx]) + copy(dAtA[i:], m.Links[iNdEx]) + i = protohelpers.EncodeVarint(dAtA, i, uint64(len(m.Links[iNdEx]))) + i-- + dAtA[i] = 0xa + } + } + return len(dAtA) - i, nil +} + +func (m *FetchEncryptionKeyReply) MarshalVT() (dAtA []byte, err error) { + if m == nil { + return nil, nil + } + size := m.SizeVT() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBufferVT(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *FetchEncryptionKeyReply) MarshalToVT(dAtA []byte) (int, error) { + size := m.SizeVT() + return m.MarshalToSizedBufferVT(dAtA[:size]) +} + +func (m *FetchEncryptionKeyReply) MarshalToSizedBufferVT(dAtA []byte) (int, error) { + if m == nil { + return 0, nil + } + i := len(dAtA) + _ = i + var l int + _ = l + if m.unknownFields != nil { + i -= len(m.unknownFields) + copy(dAtA[i:], m.unknownFields) + } + if len(m.EphemeralPublicKey) > 0 { + i -= len(m.EphemeralPublicKey) + copy(dAtA[i:], m.EphemeralPublicKey) + i = protohelpers.EncodeVarint(dAtA, i, uint64(len(m.EphemeralPublicKey))) + i-- + dAtA[i] = 0x1a + } + if len(m.Blocks) > 0 { + for iNdEx := len(m.Blocks) - 1; iNdEx >= 0; iNdEx-- { + i -= len(m.Blocks[iNdEx]) + copy(dAtA[i:], m.Blocks[iNdEx]) + i = protohelpers.EncodeVarint(dAtA, i, uint64(len(m.Blocks[iNdEx]))) + i-- + dAtA[i] = 0x12 + } + } + if len(m.Links) > 0 { + for iNdEx := len(m.Links) - 1; iNdEx >= 0; iNdEx-- { + i -= len(m.Links[iNdEx]) + copy(dAtA[i:], m.Links[iNdEx]) + i = protohelpers.EncodeVarint(dAtA, i, uint64(len(m.Links[iNdEx]))) + i-- + dAtA[i] = 0xa + } + } + return len(dAtA) - i, nil +} + func (m *GetHeadLogRequest) MarshalVT() (dAtA []byte, err error) { if m == nil { return nil, nil @@ -516,18 +576,7 @@ func (m *GetHeadLogReply) MarshalToSizedBufferVT(dAtA []byte) (int, error) { return len(dAtA) - i, nil } -func encodeVarint(dAtA []byte, offset int, v uint64) int { - offset -= sov(v) - base := offset - for v >= 1<<7 { - dAtA[offset] = uint8(v&0x7f | 0x80) - v >>= 7 - offset++ - } - dAtA[offset] = uint8(v) - return base -} -func (m *Document_Log) SizeVT() (n int) { +func (m *Log) SizeVT() (n int) { if m == nil { return 0 } @@ -535,25 +584,7 @@ func (m *Document_Log) SizeVT() (n int) { _ = l l = len(m.Block) if l > 0 { - n += 1 + l + sov(uint64(l)) - } - n += len(m.unknownFields) - return n -} - -func (m *Document) SizeVT() (n int) { - if m == nil { - return 0 - } - var l int - _ = l - l = len(m.DocID) - if l > 0 { - n += 1 + l + sov(uint64(l)) - } - l = len(m.Head) - if l > 0 { - n += 1 + l + sov(uint64(l)) + n += 1 + l + protohelpers.SizeOfVarint(uint64(l)) } n += len(m.unknownFields) return n @@ -627,23 +658,23 @@ func (m *PushLogRequest_Body) SizeVT() (n int) { _ = l l = len(m.DocID) if l > 0 { - n += 1 + l + sov(uint64(l)) + n += 1 + l + protohelpers.SizeOfVarint(uint64(l)) } l = len(m.Cid) if l > 0 { - n += 1 + l + sov(uint64(l)) + n += 1 + l + protohelpers.SizeOfVarint(uint64(l)) } l = len(m.SchemaRoot) if l > 0 { - n += 1 + l + sov(uint64(l)) + n += 1 + l + protohelpers.SizeOfVarint(uint64(l)) } l = len(m.Creator) if l > 0 { - n += 1 + l + sov(uint64(l)) + n += 1 + l + protohelpers.SizeOfVarint(uint64(l)) } if m.Log != nil { l = m.Log.SizeVT() - n += 1 + l + sov(uint64(l)) + n += 1 + l + protohelpers.SizeOfVarint(uint64(l)) } n += len(m.unknownFields) return n @@ -657,7 +688,53 @@ func (m *PushLogRequest) SizeVT() (n int) { _ = l if m.Body != nil { l = m.Body.SizeVT() - n += 1 + l + sov(uint64(l)) + n += 1 + l + protohelpers.SizeOfVarint(uint64(l)) + } + n += len(m.unknownFields) + return n +} + +func (m *FetchEncryptionKeyRequest) SizeVT() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + if len(m.Links) > 0 { + for _, b := range m.Links { + l = len(b) + n += 1 + l + protohelpers.SizeOfVarint(uint64(l)) + } + } + l = len(m.EphemeralPublicKey) + if l > 0 { + n += 1 + l + protohelpers.SizeOfVarint(uint64(l)) + } + n += len(m.unknownFields) + return n +} + +func (m *FetchEncryptionKeyReply) SizeVT() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + if len(m.Links) > 0 { + for _, b := range m.Links { + l = len(b) + n += 1 + l + protohelpers.SizeOfVarint(uint64(l)) + } + } + if len(m.Blocks) > 0 { + for _, b := range m.Blocks { + l = len(b) + n += 1 + l + protohelpers.SizeOfVarint(uint64(l)) + } + } + l = len(m.EphemeralPublicKey) + if l > 0 { + n += 1 + l + protohelpers.SizeOfVarint(uint64(l)) } n += len(m.unknownFields) return n @@ -693,13 +770,7 @@ func (m *GetHeadLogReply) SizeVT() (n int) { return n } -func sov(x uint64) (n int) { - return (bits.Len64(x|1) + 6) / 7 -} -func soz(x uint64) (n int) { - return sov(uint64((x << 1) ^ uint64((int64(x) >> 63)))) -} -func (m *Document_Log) UnmarshalVT(dAtA []byte) error { +func (m *Log) UnmarshalVT(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { @@ -707,7 +778,7 @@ func (m *Document_Log) UnmarshalVT(dAtA []byte) error { var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { - return ErrIntOverflow + return protohelpers.ErrIntOverflow } if iNdEx >= l { return io.ErrUnexpectedEOF @@ -722,10 +793,10 @@ func (m *Document_Log) UnmarshalVT(dAtA []byte) error { fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { - return fmt.Errorf("proto: Document_Log: wiretype end group for non-group") + return fmt.Errorf("proto: Log: wiretype end group for non-group") } if fieldNum <= 0 { - return fmt.Errorf("proto: Document_Log: illegal tag %d (wire type %d)", fieldNum, wire) + return fmt.Errorf("proto: Log: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 1: @@ -735,7 +806,7 @@ func (m *Document_Log) UnmarshalVT(dAtA []byte) error { var byteLen int for shift := uint(0); ; shift += 7 { if shift >= 64 { - return ErrIntOverflow + return protohelpers.ErrIntOverflow } if iNdEx >= l { return io.ErrUnexpectedEOF @@ -748,11 +819,11 @@ func (m *Document_Log) UnmarshalVT(dAtA []byte) error { } } if byteLen < 0 { - return ErrInvalidLength + return protohelpers.ErrInvalidLength } postIndex := iNdEx + byteLen if postIndex < 0 { - return ErrInvalidLength + return protohelpers.ErrInvalidLength } if postIndex > l { return io.ErrUnexpectedEOF @@ -764,131 +835,12 @@ func (m *Document_Log) UnmarshalVT(dAtA []byte) error { iNdEx = postIndex default: iNdEx = preIndex - skippy, err := skip(dAtA[iNdEx:]) + skippy, err := protohelpers.Skip(dAtA[iNdEx:]) if err != nil { return err } if (skippy < 0) || (iNdEx+skippy) < 0 { - return ErrInvalidLength - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.unknownFields = append(m.unknownFields, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func (m *Document) UnmarshalVT(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflow - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: Document: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: Document: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - case 1: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field DocID", wireType) - } - var byteLen int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflow - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - byteLen |= int(b&0x7F) << shift - if b < 0x80 { - break - } - } - if byteLen < 0 { - return ErrInvalidLength - } - postIndex := iNdEx + byteLen - if postIndex < 0 { - return ErrInvalidLength - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.DocID = append(m.DocID[:0], dAtA[iNdEx:postIndex]...) - if m.DocID == nil { - m.DocID = []byte{} - } - iNdEx = postIndex - case 4: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Head", wireType) - } - var byteLen int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflow - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - byteLen |= int(b&0x7F) << shift - if b < 0x80 { - break - } - } - if byteLen < 0 { - return ErrInvalidLength - } - postIndex := iNdEx + byteLen - if postIndex < 0 { - return ErrInvalidLength - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.Head = append(m.Head[:0], dAtA[iNdEx:postIndex]...) - if m.Head == nil { - m.Head = []byte{} - } - iNdEx = postIndex - default: - iNdEx = preIndex - skippy, err := skip(dAtA[iNdEx:]) - if err != nil { - return err - } - if (skippy < 0) || (iNdEx+skippy) < 0 { - return ErrInvalidLength + return protohelpers.ErrInvalidLength } if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF @@ -911,7 +863,7 @@ func (m *GetDocGraphRequest) UnmarshalVT(dAtA []byte) error { var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { - return ErrIntOverflow + return protohelpers.ErrIntOverflow } if iNdEx >= l { return io.ErrUnexpectedEOF @@ -934,12 +886,12 @@ func (m *GetDocGraphRequest) UnmarshalVT(dAtA []byte) error { switch fieldNum { default: iNdEx = preIndex - skippy, err := skip(dAtA[iNdEx:]) + skippy, err := protohelpers.Skip(dAtA[iNdEx:]) if err != nil { return err } if (skippy < 0) || (iNdEx+skippy) < 0 { - return ErrInvalidLength + return protohelpers.ErrInvalidLength } if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF @@ -962,7 +914,7 @@ func (m *GetDocGraphReply) UnmarshalVT(dAtA []byte) error { var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { - return ErrIntOverflow + return protohelpers.ErrIntOverflow } if iNdEx >= l { return io.ErrUnexpectedEOF @@ -985,12 +937,12 @@ func (m *GetDocGraphReply) UnmarshalVT(dAtA []byte) error { switch fieldNum { default: iNdEx = preIndex - skippy, err := skip(dAtA[iNdEx:]) + skippy, err := protohelpers.Skip(dAtA[iNdEx:]) if err != nil { return err } if (skippy < 0) || (iNdEx+skippy) < 0 { - return ErrInvalidLength + return protohelpers.ErrInvalidLength } if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF @@ -1013,7 +965,7 @@ func (m *PushDocGraphRequest) UnmarshalVT(dAtA []byte) error { var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { - return ErrIntOverflow + return protohelpers.ErrIntOverflow } if iNdEx >= l { return io.ErrUnexpectedEOF @@ -1036,12 +988,12 @@ func (m *PushDocGraphRequest) UnmarshalVT(dAtA []byte) error { switch fieldNum { default: iNdEx = preIndex - skippy, err := skip(dAtA[iNdEx:]) + skippy, err := protohelpers.Skip(dAtA[iNdEx:]) if err != nil { return err } if (skippy < 0) || (iNdEx+skippy) < 0 { - return ErrInvalidLength + return protohelpers.ErrInvalidLength } if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF @@ -1064,7 +1016,7 @@ func (m *PushDocGraphReply) UnmarshalVT(dAtA []byte) error { var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { - return ErrIntOverflow + return protohelpers.ErrIntOverflow } if iNdEx >= l { return io.ErrUnexpectedEOF @@ -1087,12 +1039,12 @@ func (m *PushDocGraphReply) UnmarshalVT(dAtA []byte) error { switch fieldNum { default: iNdEx = preIndex - skippy, err := skip(dAtA[iNdEx:]) + skippy, err := protohelpers.Skip(dAtA[iNdEx:]) if err != nil { return err } if (skippy < 0) || (iNdEx+skippy) < 0 { - return ErrInvalidLength + return protohelpers.ErrInvalidLength } if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF @@ -1115,7 +1067,7 @@ func (m *GetLogRequest) UnmarshalVT(dAtA []byte) error { var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { - return ErrIntOverflow + return protohelpers.ErrIntOverflow } if iNdEx >= l { return io.ErrUnexpectedEOF @@ -1138,12 +1090,12 @@ func (m *GetLogRequest) UnmarshalVT(dAtA []byte) error { switch fieldNum { default: iNdEx = preIndex - skippy, err := skip(dAtA[iNdEx:]) + skippy, err := protohelpers.Skip(dAtA[iNdEx:]) if err != nil { return err } if (skippy < 0) || (iNdEx+skippy) < 0 { - return ErrInvalidLength + return protohelpers.ErrInvalidLength } if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF @@ -1166,7 +1118,7 @@ func (m *GetLogReply) UnmarshalVT(dAtA []byte) error { var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { - return ErrIntOverflow + return protohelpers.ErrIntOverflow } if iNdEx >= l { return io.ErrUnexpectedEOF @@ -1189,12 +1141,12 @@ func (m *GetLogReply) UnmarshalVT(dAtA []byte) error { switch fieldNum { default: iNdEx = preIndex - skippy, err := skip(dAtA[iNdEx:]) + skippy, err := protohelpers.Skip(dAtA[iNdEx:]) if err != nil { return err } if (skippy < 0) || (iNdEx+skippy) < 0 { - return ErrInvalidLength + return protohelpers.ErrInvalidLength } if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF @@ -1217,7 +1169,7 @@ func (m *PushLogRequest_Body) UnmarshalVT(dAtA []byte) error { var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { - return ErrIntOverflow + return protohelpers.ErrIntOverflow } if iNdEx >= l { return io.ErrUnexpectedEOF @@ -1245,7 +1197,7 @@ func (m *PushLogRequest_Body) UnmarshalVT(dAtA []byte) error { var byteLen int for shift := uint(0); ; shift += 7 { if shift >= 64 { - return ErrIntOverflow + return protohelpers.ErrIntOverflow } if iNdEx >= l { return io.ErrUnexpectedEOF @@ -1258,11 +1210,11 @@ func (m *PushLogRequest_Body) UnmarshalVT(dAtA []byte) error { } } if byteLen < 0 { - return ErrInvalidLength + return protohelpers.ErrInvalidLength } postIndex := iNdEx + byteLen if postIndex < 0 { - return ErrInvalidLength + return protohelpers.ErrInvalidLength } if postIndex > l { return io.ErrUnexpectedEOF @@ -1279,7 +1231,7 @@ func (m *PushLogRequest_Body) UnmarshalVT(dAtA []byte) error { var byteLen int for shift := uint(0); ; shift += 7 { if shift >= 64 { - return ErrIntOverflow + return protohelpers.ErrIntOverflow } if iNdEx >= l { return io.ErrUnexpectedEOF @@ -1292,11 +1244,11 @@ func (m *PushLogRequest_Body) UnmarshalVT(dAtA []byte) error { } } if byteLen < 0 { - return ErrInvalidLength + return protohelpers.ErrInvalidLength } postIndex := iNdEx + byteLen if postIndex < 0 { - return ErrInvalidLength + return protohelpers.ErrInvalidLength } if postIndex > l { return io.ErrUnexpectedEOF @@ -1313,7 +1265,7 @@ func (m *PushLogRequest_Body) UnmarshalVT(dAtA []byte) error { var byteLen int for shift := uint(0); ; shift += 7 { if shift >= 64 { - return ErrIntOverflow + return protohelpers.ErrIntOverflow } if iNdEx >= l { return io.ErrUnexpectedEOF @@ -1326,11 +1278,11 @@ func (m *PushLogRequest_Body) UnmarshalVT(dAtA []byte) error { } } if byteLen < 0 { - return ErrInvalidLength + return protohelpers.ErrInvalidLength } postIndex := iNdEx + byteLen if postIndex < 0 { - return ErrInvalidLength + return protohelpers.ErrInvalidLength } if postIndex > l { return io.ErrUnexpectedEOF @@ -1347,7 +1299,7 @@ func (m *PushLogRequest_Body) UnmarshalVT(dAtA []byte) error { var stringLen uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { - return ErrIntOverflow + return protohelpers.ErrIntOverflow } if iNdEx >= l { return io.ErrUnexpectedEOF @@ -1361,11 +1313,11 @@ func (m *PushLogRequest_Body) UnmarshalVT(dAtA []byte) error { } intStringLen := int(stringLen) if intStringLen < 0 { - return ErrInvalidLength + return protohelpers.ErrInvalidLength } postIndex := iNdEx + intStringLen if postIndex < 0 { - return ErrInvalidLength + return protohelpers.ErrInvalidLength } if postIndex > l { return io.ErrUnexpectedEOF @@ -1379,7 +1331,7 @@ func (m *PushLogRequest_Body) UnmarshalVT(dAtA []byte) error { var msglen int for shift := uint(0); ; shift += 7 { if shift >= 64 { - return ErrIntOverflow + return protohelpers.ErrIntOverflow } if iNdEx >= l { return io.ErrUnexpectedEOF @@ -1392,17 +1344,17 @@ func (m *PushLogRequest_Body) UnmarshalVT(dAtA []byte) error { } } if msglen < 0 { - return ErrInvalidLength + return protohelpers.ErrInvalidLength } postIndex := iNdEx + msglen if postIndex < 0 { - return ErrInvalidLength + return protohelpers.ErrInvalidLength } if postIndex > l { return io.ErrUnexpectedEOF } if m.Log == nil { - m.Log = &Document_Log{} + m.Log = &Log{} } if err := m.Log.UnmarshalVT(dAtA[iNdEx:postIndex]); err != nil { return err @@ -1410,12 +1362,12 @@ func (m *PushLogRequest_Body) UnmarshalVT(dAtA []byte) error { iNdEx = postIndex default: iNdEx = preIndex - skippy, err := skip(dAtA[iNdEx:]) + skippy, err := protohelpers.Skip(dAtA[iNdEx:]) if err != nil { return err } if (skippy < 0) || (iNdEx+skippy) < 0 { - return ErrInvalidLength + return protohelpers.ErrInvalidLength } if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF @@ -1438,7 +1390,7 @@ func (m *PushLogRequest) UnmarshalVT(dAtA []byte) error { var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { - return ErrIntOverflow + return protohelpers.ErrIntOverflow } if iNdEx >= l { return io.ErrUnexpectedEOF @@ -1466,7 +1418,7 @@ func (m *PushLogRequest) UnmarshalVT(dAtA []byte) error { var msglen int for shift := uint(0); ; shift += 7 { if shift >= 64 { - return ErrIntOverflow + return protohelpers.ErrIntOverflow } if iNdEx >= l { return io.ErrUnexpectedEOF @@ -1479,11 +1431,11 @@ func (m *PushLogRequest) UnmarshalVT(dAtA []byte) error { } } if msglen < 0 { - return ErrInvalidLength + return protohelpers.ErrInvalidLength } postIndex := iNdEx + msglen if postIndex < 0 { - return ErrInvalidLength + return protohelpers.ErrInvalidLength } if postIndex > l { return io.ErrUnexpectedEOF @@ -1497,12 +1449,12 @@ func (m *PushLogRequest) UnmarshalVT(dAtA []byte) error { iNdEx = postIndex default: iNdEx = preIndex - skippy, err := skip(dAtA[iNdEx:]) + skippy, err := protohelpers.Skip(dAtA[iNdEx:]) if err != nil { return err } if (skippy < 0) || (iNdEx+skippy) < 0 { - return ErrInvalidLength + return protohelpers.ErrInvalidLength } if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF @@ -1517,7 +1469,7 @@ func (m *PushLogRequest) UnmarshalVT(dAtA []byte) error { } return nil } -func (m *GetHeadLogRequest) UnmarshalVT(dAtA []byte) error { +func (m *FetchEncryptionKeyRequest) UnmarshalVT(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { @@ -1525,7 +1477,7 @@ func (m *GetHeadLogRequest) UnmarshalVT(dAtA []byte) error { var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { - return ErrIntOverflow + return protohelpers.ErrIntOverflow } if iNdEx >= l { return io.ErrUnexpectedEOF @@ -1540,20 +1492,86 @@ func (m *GetHeadLogRequest) UnmarshalVT(dAtA []byte) error { fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { - return fmt.Errorf("proto: GetHeadLogRequest: wiretype end group for non-group") + return fmt.Errorf("proto: FetchEncryptionKeyRequest: wiretype end group for non-group") } if fieldNum <= 0 { - return fmt.Errorf("proto: GetHeadLogRequest: illegal tag %d (wire type %d)", fieldNum, wire) + return fmt.Errorf("proto: FetchEncryptionKeyRequest: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Links", wireType) + } + var byteLen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return protohelpers.ErrIntOverflow + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + byteLen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if byteLen < 0 { + return protohelpers.ErrInvalidLength + } + postIndex := iNdEx + byteLen + if postIndex < 0 { + return protohelpers.ErrInvalidLength + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.Links = append(m.Links, make([]byte, postIndex-iNdEx)) + copy(m.Links[len(m.Links)-1], dAtA[iNdEx:postIndex]) + iNdEx = postIndex + case 2: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field EphemeralPublicKey", wireType) + } + var byteLen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return protohelpers.ErrIntOverflow + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + byteLen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if byteLen < 0 { + return protohelpers.ErrInvalidLength + } + postIndex := iNdEx + byteLen + if postIndex < 0 { + return protohelpers.ErrInvalidLength + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.EphemeralPublicKey = append(m.EphemeralPublicKey[:0], dAtA[iNdEx:postIndex]...) + if m.EphemeralPublicKey == nil { + m.EphemeralPublicKey = []byte{} + } + iNdEx = postIndex default: iNdEx = preIndex - skippy, err := skip(dAtA[iNdEx:]) + skippy, err := protohelpers.Skip(dAtA[iNdEx:]) if err != nil { return err } if (skippy < 0) || (iNdEx+skippy) < 0 { - return ErrInvalidLength + return protohelpers.ErrInvalidLength } if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF @@ -1568,7 +1586,7 @@ func (m *GetHeadLogRequest) UnmarshalVT(dAtA []byte) error { } return nil } -func (m *PushLogReply) UnmarshalVT(dAtA []byte) error { +func (m *FetchEncryptionKeyReply) UnmarshalVT(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { @@ -1576,7 +1594,7 @@ func (m *PushLogReply) UnmarshalVT(dAtA []byte) error { var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { - return ErrIntOverflow + return protohelpers.ErrIntOverflow } if iNdEx >= l { return io.ErrUnexpectedEOF @@ -1591,20 +1609,118 @@ func (m *PushLogReply) UnmarshalVT(dAtA []byte) error { fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { - return fmt.Errorf("proto: PushLogReply: wiretype end group for non-group") + return fmt.Errorf("proto: FetchEncryptionKeyReply: wiretype end group for non-group") } if fieldNum <= 0 { - return fmt.Errorf("proto: PushLogReply: illegal tag %d (wire type %d)", fieldNum, wire) + return fmt.Errorf("proto: FetchEncryptionKeyReply: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Links", wireType) + } + var byteLen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return protohelpers.ErrIntOverflow + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + byteLen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if byteLen < 0 { + return protohelpers.ErrInvalidLength + } + postIndex := iNdEx + byteLen + if postIndex < 0 { + return protohelpers.ErrInvalidLength + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.Links = append(m.Links, make([]byte, postIndex-iNdEx)) + copy(m.Links[len(m.Links)-1], dAtA[iNdEx:postIndex]) + iNdEx = postIndex + case 2: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Blocks", wireType) + } + var byteLen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return protohelpers.ErrIntOverflow + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + byteLen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if byteLen < 0 { + return protohelpers.ErrInvalidLength + } + postIndex := iNdEx + byteLen + if postIndex < 0 { + return protohelpers.ErrInvalidLength + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.Blocks = append(m.Blocks, make([]byte, postIndex-iNdEx)) + copy(m.Blocks[len(m.Blocks)-1], dAtA[iNdEx:postIndex]) + iNdEx = postIndex + case 3: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field EphemeralPublicKey", wireType) + } + var byteLen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return protohelpers.ErrIntOverflow + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + byteLen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if byteLen < 0 { + return protohelpers.ErrInvalidLength + } + postIndex := iNdEx + byteLen + if postIndex < 0 { + return protohelpers.ErrInvalidLength + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.EphemeralPublicKey = append(m.EphemeralPublicKey[:0], dAtA[iNdEx:postIndex]...) + if m.EphemeralPublicKey == nil { + m.EphemeralPublicKey = []byte{} + } + iNdEx = postIndex default: iNdEx = preIndex - skippy, err := skip(dAtA[iNdEx:]) + skippy, err := protohelpers.Skip(dAtA[iNdEx:]) if err != nil { return err } if (skippy < 0) || (iNdEx+skippy) < 0 { - return ErrInvalidLength + return protohelpers.ErrInvalidLength } if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF @@ -1619,7 +1735,7 @@ func (m *PushLogReply) UnmarshalVT(dAtA []byte) error { } return nil } -func (m *GetHeadLogReply) UnmarshalVT(dAtA []byte) error { +func (m *GetHeadLogRequest) UnmarshalVT(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { @@ -1627,7 +1743,7 @@ func (m *GetHeadLogReply) UnmarshalVT(dAtA []byte) error { var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { - return ErrIntOverflow + return protohelpers.ErrIntOverflow } if iNdEx >= l { return io.ErrUnexpectedEOF @@ -1642,20 +1758,20 @@ func (m *GetHeadLogReply) UnmarshalVT(dAtA []byte) error { fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { - return fmt.Errorf("proto: GetHeadLogReply: wiretype end group for non-group") + return fmt.Errorf("proto: GetHeadLogRequest: wiretype end group for non-group") } if fieldNum <= 0 { - return fmt.Errorf("proto: GetHeadLogReply: illegal tag %d (wire type %d)", fieldNum, wire) + return fmt.Errorf("proto: GetHeadLogRequest: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { default: iNdEx = preIndex - skippy, err := skip(dAtA[iNdEx:]) + skippy, err := protohelpers.Skip(dAtA[iNdEx:]) if err != nil { return err } if (skippy < 0) || (iNdEx+skippy) < 0 { - return ErrInvalidLength + return protohelpers.ErrInvalidLength } if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF @@ -1670,88 +1786,105 @@ func (m *GetHeadLogReply) UnmarshalVT(dAtA []byte) error { } return nil } - -func skip(dAtA []byte) (n int, err error) { +func (m *PushLogReply) UnmarshalVT(dAtA []byte) error { l := len(dAtA) iNdEx := 0 - depth := 0 for iNdEx < l { + preIndex := iNdEx var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { - return 0, ErrIntOverflow + return protohelpers.ErrIntOverflow } if iNdEx >= l { - return 0, io.ErrUnexpectedEOF + return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ - wire |= (uint64(b) & 0x7F) << shift + wire |= uint64(b&0x7F) << shift if b < 0x80 { break } } + fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) - switch wireType { - case 0: - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return 0, ErrIntOverflow - } - if iNdEx >= l { - return 0, io.ErrUnexpectedEOF - } - iNdEx++ - if dAtA[iNdEx-1] < 0x80 { - break - } + if wireType == 4 { + return fmt.Errorf("proto: PushLogReply: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: PushLogReply: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + default: + iNdEx = preIndex + skippy, err := protohelpers.Skip(dAtA[iNdEx:]) + if err != nil { + return err } - case 1: - iNdEx += 8 - case 2: - var length int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return 0, ErrIntOverflow - } - if iNdEx >= l { - return 0, io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - length |= (int(b) & 0x7F) << shift - if b < 0x80 { - break - } + if (skippy < 0) || (iNdEx+skippy) < 0 { + return protohelpers.ErrInvalidLength } - if length < 0 { - return 0, ErrInvalidLength + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF } - iNdEx += length - case 3: - depth++ - case 4: - if depth == 0 { - return 0, ErrUnexpectedEndOfGroup + m.unknownFields = append(m.unknownFields, dAtA[iNdEx:iNdEx+skippy]...) + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func (m *GetHeadLogReply) UnmarshalVT(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return protohelpers.ErrIntOverflow + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break } - depth-- - case 5: - iNdEx += 4 - default: - return 0, fmt.Errorf("proto: illegal wireType %d", wireType) } - if iNdEx < 0 { - return 0, ErrInvalidLength + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: GetHeadLogReply: wiretype end group for non-group") } - if depth == 0 { - return iNdEx, nil + if fieldNum <= 0 { + return fmt.Errorf("proto: GetHeadLogReply: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + default: + iNdEx = preIndex + skippy, err := protohelpers.Skip(dAtA[iNdEx:]) + if err != nil { + return err + } + if (skippy < 0) || (iNdEx+skippy) < 0 { + return protohelpers.ErrInvalidLength + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + m.unknownFields = append(m.unknownFields, dAtA[iNdEx:iNdEx+skippy]...) + iNdEx += skippy } } - return 0, io.ErrUnexpectedEOF -} -var ( - ErrInvalidLength = fmt.Errorf("proto: negative length found during unmarshaling") - ErrIntOverflow = fmt.Errorf("proto: integer overflow") - ErrUnexpectedEndOfGroup = fmt.Errorf("proto: unexpected end of group") -) + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} diff --git a/net/peer.go b/net/peer.go index 301c080edb..24976ed388 100644 --- a/net/peer.go +++ b/net/peer.go @@ -21,7 +21,6 @@ import ( "github.com/ipfs/boxo/bitswap/network" "github.com/ipfs/boxo/blockservice" "github.com/ipfs/boxo/bootstrap" - exchange "github.com/ipfs/boxo/exchange" blocks "github.com/ipfs/go-block-format" "github.com/ipfs/go-cid" gostream "github.com/libp2p/go-libp2p-gostream" @@ -46,6 +45,7 @@ import ( // to the underlying DefraDB instance. type Peer struct { blockstore datastore.Blockstore + encstore datastore.Blockstore bus *event.Bus updateSub *event.Subscription @@ -61,7 +61,6 @@ type Peer struct { p2pRPC *grpc.Server // rpc server over the P2P network // peer DAG service - exch exchange.Interface bserv blockservice.BlockService bootCloser io.Closer @@ -71,6 +70,7 @@ type Peer struct { func NewPeer( ctx context.Context, blockstore datastore.Blockstore, + encstore datastore.Blockstore, bus *event.Bus, opts ...NodeOpt, ) (p *Peer, err error) { @@ -83,7 +83,7 @@ func NewPeer( } }() - if blockstore == nil { + if blockstore == nil || encstore == nil { return nil, ErrNilDB } @@ -120,12 +120,12 @@ func NewPeer( host: h, dht: ddht, blockstore: blockstore, + encstore: encstore, ctx: ctx, cancel: cancel, bus: bus, p2pRPC: grpc.NewServer(options.GRPCServerOptions...), bserv: blockservice.New(blockstore, bswap), - exch: bswap, } if options.EnablePubSub { @@ -151,7 +151,7 @@ func NewPeer( return nil, err } - p2plistener, err := gostream.Listen(h, corenet.Protocol) + p2pListener, err := gostream.Listen(h, corenet.Protocol) if err != nil { return nil, err } @@ -164,7 +164,7 @@ func NewPeer( // register the P2P gRPC server go func() { pb.RegisterServiceServer(p.p2pRPC, p.server) - if err := p.p2pRPC.Serve(p2plistener); err != nil && + if err := p.p2pRPC.Serve(p2pListener); err != nil && !errors.Is(err, grpc.ErrServerStopped) { log.ErrorE("Fatal P2P RPC server error", err) } @@ -270,7 +270,7 @@ func (p *Peer) RegisterNewDocument( schemaRoot string, ) error { // register topic - err := p.server.addPubSubTopic(docID.String(), !p.server.hasPubSubTopic(schemaRoot)) + err := p.server.addPubSubTopic(docID.String(), !p.server.hasPubSubTopic(schemaRoot), nil) if err != nil { log.ErrorE( "Failed to create new pubsub topic", @@ -287,7 +287,7 @@ func (p *Peer) RegisterNewDocument( Cid: c.Bytes(), SchemaRoot: []byte(schemaRoot), Creator: p.host.ID().String(), - Log: &pb.Document_Log{ + Log: &pb.Log{ Block: rawBlock, }, }, @@ -325,7 +325,7 @@ func (p *Peer) handleDocUpdateLog(evt event.Update) error { Cid: evt.Cid.Bytes(), SchemaRoot: []byte(evt.SchemaRoot), Creator: p.host.ID().String(), - Log: &pb.Document_Log{ + Log: &pb.Log{ Block: evt.Block, }, } @@ -408,3 +408,7 @@ func (p *Peer) PeerInfo() peer.AddrInfo { Addrs: p.host.Network().ListenAddresses(), } } + +func (p *Peer) Server() *server { + return p.server +} diff --git a/net/peer_test.go b/net/peer_test.go index 5322d32f6e..10af3a3ab4 100644 --- a/net/peer_test.go +++ b/net/peer_test.go @@ -73,6 +73,7 @@ func newTestPeer(ctx context.Context, t *testing.T) (client.DB, *Peer) { n, err := NewPeer( ctx, db.Blockstore(), + db.Encstore(), db.Events(), WithListenAddresses(randomMultiaddr), ) @@ -87,14 +88,14 @@ func TestNewPeer_NoError(t *testing.T) { db, err := db.NewDB(ctx, store, acp.NoACP, nil) require.NoError(t, err) defer db.Close() - p, err := NewPeer(ctx, db.Blockstore(), db.Events()) + p, err := NewPeer(ctx, db.Blockstore(), db.Encstore(), db.Events()) require.NoError(t, err) p.Close() } func TestNewPeer_NoDB_NilDBError(t *testing.T) { ctx := context.Background() - _, err := NewPeer(ctx, nil, nil, nil) + _, err := NewPeer(ctx, nil, nil, nil, nil) require.ErrorIs(t, err, ErrNilDB) } @@ -113,6 +114,7 @@ func TestStart_WithKnownPeer_NoError(t *testing.T) { n1, err := NewPeer( ctx, db1.Blockstore(), + db1.Encstore(), db1.Events(), WithListenAddresses("/ip4/127.0.0.1/tcp/0"), ) @@ -121,6 +123,7 @@ func TestStart_WithKnownPeer_NoError(t *testing.T) { n2, err := NewPeer( ctx, db2.Blockstore(), + db1.Encstore(), db2.Events(), WithListenAddresses("/ip4/127.0.0.1/tcp/0"), ) @@ -385,6 +388,7 @@ func TestNewPeer_WithEnableRelay_NoError(t *testing.T) { n, err := NewPeer( context.Background(), db.Blockstore(), + db.Encstore(), db.Events(), WithEnableRelay(true), ) @@ -402,6 +406,7 @@ func TestNewPeer_NoPubSub_NoError(t *testing.T) { n, err := NewPeer( context.Background(), db.Blockstore(), + db.Encstore(), db.Events(), WithEnablePubSub(false), ) @@ -420,6 +425,7 @@ func TestNewPeer_WithEnablePubSub_NoError(t *testing.T) { n, err := NewPeer( ctx, db.Blockstore(), + db.Encstore(), db.Events(), WithEnablePubSub(true), ) @@ -439,6 +445,7 @@ func TestNodeClose_NoError(t *testing.T) { n, err := NewPeer( context.Background(), db.Blockstore(), + db.Encstore(), db.Events(), ) require.NoError(t, err) @@ -455,6 +462,7 @@ func TestListenAddrs_WithListenAddresses_NoError(t *testing.T) { n, err := NewPeer( context.Background(), db.Blockstore(), + db.Encstore(), db.Events(), WithListenAddresses("/ip4/127.0.0.1/tcp/0"), ) @@ -473,6 +481,7 @@ func TestPeer_WithBootstrapPeers_NoError(t *testing.T) { n, err := NewPeer( context.Background(), db.Blockstore(), + db.Encstore(), db.Events(), WithBootstrapPeers("/ip4/127.0.0.1/tcp/6666/p2p/QmaCpDMGvV2BGHeYERUEnRQAwe3N8SzbUtfsmvsqQLuvuJ"), ) diff --git a/net/server.go b/net/server.go index 2f129d19cf..42ff15f5fb 100644 --- a/net/server.go +++ b/net/server.go @@ -34,7 +34,7 @@ import ( pb "github.com/sourcenetwork/defradb/net/pb" ) -// Server is the request/response instance for all P2P RPC communication. +// server is the request/response instance for all P2P RPC communication. // Implements gRPC server. See net/pb/net.proto for corresponding service definitions. // // Specifically, server handles the push/get request/response aspects of the RPC service @@ -144,9 +144,9 @@ func (s *server) PushLog(ctx context.Context, req *pb.PushLogRequest) (*pb.PushL corelog.Any("DocID", docID.String())) // Once processed, subscribe to the DocID topic on the pubsub network unless we already - // suscribe to the collection. + // subscribed to the collection. if !s.hasPubSubTopic(string(req.Body.SchemaRoot)) { - err = s.addPubSubTopic(docID.String(), true) + err = s.addPubSubTopic(docID.String(), true, nil) if err != nil { return nil, err } @@ -172,7 +172,9 @@ func (s *server) GetHeadLog( } // addPubSubTopic subscribes to a topic on the pubsub network -func (s *server) addPubSubTopic(topic string, subscribe bool) error { +// A custom message handler can be provided to handle incoming messages. If not provided, +// the default message handler will be used. +func (s *server) addPubSubTopic(topic string, subscribe bool, handler rpc.MessageHandler) error { if s.peer.ps == nil { return nil } @@ -200,8 +202,12 @@ func (s *server) addPubSubTopic(topic string, subscribe bool) error { return err } + if handler == nil { + handler = s.pubSubMessageHandler + } + t.SetEventHandler(s.pubSubEventHandler) - t.SetMessageHandler(s.pubSubMessageHandler) + t.SetMessageHandler(handler) s.topics[topic] = pubsubTopic{ Topic: t, subscribed: subscribe, @@ -209,6 +215,10 @@ func (s *server) addPubSubTopic(topic string, subscribe bool) error { return nil } +func (s *server) AddPubSubTopic(topicName string, handler rpc.MessageHandler) error { + return s.addPubSubTopic(topicName, true, handler) +} + // hasPubSubTopic checks if we are subscribed to a topic. func (s *server) hasPubSubTopic(topic string) bool { s.mu.Lock() @@ -269,7 +279,7 @@ func (s *server) publishLog(ctx context.Context, topic string, req *pb.PushLogRe t, ok := s.topics[topic] s.mu.Unlock() if !ok { - err := s.addPubSubTopic(topic, false) + err := s.addPubSubTopic(topic, false, nil) if err != nil { return errors.Wrap(fmt.Sprintf("failed to created single use topic %s", topic), err) } @@ -278,7 +288,7 @@ func (s *server) publishLog(ctx context.Context, topic string, req *pb.PushLogRe data, err := req.MarshalVT() if err != nil { - return errors.Wrap("failed marshling pubsub message", err) + return errors.Wrap("failed to marshal pubsub message", err) } _, err = t.Publish(ctx, data, rpc.WithIgnoreResponse(true)) @@ -347,7 +357,7 @@ func peerIDFromContext(ctx context.Context) (libpeer.ID, error) { func (s *server) updatePubSubTopics(evt event.P2PTopic) { for _, topic := range evt.ToAdd { - err := s.addPubSubTopic(topic, true) + err := s.addPubSubTopic(topic, true, nil) if err != nil { log.ErrorE("Failed to add pubsub topic.", err) } @@ -409,3 +419,17 @@ func (s *server) updateReplicators(evt event.Replicator) { } s.peer.bus.Publish(event.NewMessage(event.ReplicatorCompletedName, nil)) } + +func (s *server) SendPubSubMessage( + ctx context.Context, + topic string, + data []byte, +) (<-chan rpc.Response, error) { + s.mu.Lock() + t, ok := s.topics[topic] + s.mu.Unlock() + if !ok { + return nil, NewErrTopicDoesNotExist(topic) + } + return t.Publish(ctx, data) +} diff --git a/net/server_test.go b/net/server_test.go index 0e23e3b019..11a13604b1 100644 --- a/net/server_test.go +++ b/net/server_test.go @@ -75,7 +75,7 @@ func TestGetHeadLog(t *testing.T) { } func getHead(ctx context.Context, db client.DB, docID client.DocID) (cid.Cid, error) { - prefix := core.DataStoreKeyFromDocID(docID).ToHeadStoreKey().WithFieldId(core.COMPOSITE_NAMESPACE).ToString() + prefix := core.DataStoreKeyFromDocID(docID).ToHeadStoreKey().WithFieldID(core.COMPOSITE_NAMESPACE).ToString() results, err := db.Headstore().Query(ctx, query.Query{Prefix: prefix}) if err != nil { return cid.Undef, err @@ -132,7 +132,7 @@ func TestPushLog(t *testing.T) { Cid: headCID.Bytes(), SchemaRoot: []byte(col.SchemaRoot()), Creator: p.PeerID().String(), - Log: &net_pb.Document_Log{ + Log: &net_pb.Log{ Block: b, }, }, diff --git a/node/node.go b/node/node.go index 5660d0d77c..d5e62bc1bb 100644 --- a/node/node.go +++ b/node/node.go @@ -17,10 +17,12 @@ import ( gohttp "net/http" "github.com/sourcenetwork/corelog" + "github.com/sourcenetwork/immutable" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/http" "github.com/sourcenetwork/defradb/internal/db" + "github.com/sourcenetwork/defradb/internal/kms" "github.com/sourcenetwork/defradb/net" ) @@ -42,6 +44,7 @@ type Options struct { disableP2P bool disableAPI bool enableDevelopment bool + kmsType immutable.Option[kms.ServiceType] } // DefaultOptions returns options with default settings. @@ -66,6 +69,12 @@ func WithDisableAPI(disable bool) NodeOpt { } } +func WithKMS(kms kms.ServiceType) NodeOpt { + return func(o *Options) { + o.kmsType = immutable.Some(kms) + } +} + // WithEnableDevelopment sets the enable development mode flag. func WithEnableDevelopment(enable bool) NodeOpt { return func(o *Options) { @@ -75,9 +84,10 @@ func WithEnableDevelopment(enable bool) NodeOpt { // Node is a DefraDB instance with optional sub-systems. type Node struct { - DB client.DB - Peer *net.Peer - Server *http.Server + DB client.DB + Peer *net.Peer + Server *http.Server + kmsService kms.Service options *Options dbOpts []db.Option @@ -141,10 +151,25 @@ func (n *Node) Start(ctx context.Context) error { if !n.options.disableP2P { // setup net node - n.Peer, err = net.NewPeer(ctx, n.DB.Blockstore(), n.DB.Events(), n.netOpts...) + n.Peer, err = net.NewPeer(ctx, n.DB.Blockstore(), n.DB.Encstore(), n.DB.Events(), n.netOpts...) if err != nil { return err } + if n.options.kmsType.HasValue() { + switch n.options.kmsType.Value() { + case kms.PubSubServiceType: + n.kmsService, err = kms.NewPubSubService( + ctx, + n.Peer.PeerID(), + n.Peer.Server(), + n.DB.Events(), + n.DB.Encstore(), + ) + } + if err != nil { + return err + } + } } if !n.options.disableAPI { diff --git a/tests/bench/query/planner/utils.go b/tests/bench/query/planner/utils.go index 43669fa53f..b9e077867b 100644 --- a/tests/bench/query/planner/utils.go +++ b/tests/bench/query/planner/utils.go @@ -135,7 +135,7 @@ type dummyTxn struct{} func (*dummyTxn) Rootstore() datastore.DSReaderWriter { return nil } func (*dummyTxn) Datastore() datastore.DSReaderWriter { return nil } -func (*dummyTxn) Encstore() datastore.DSReaderWriter { return nil } +func (*dummyTxn) Encstore() datastore.Blockstore { return nil } func (*dummyTxn) Headstore() datastore.DSReaderWriter { return nil } func (*dummyTxn) Peerstore() datastore.DSBatching { return nil } func (*dummyTxn) Blockstore() datastore.Blockstore { return nil } diff --git a/tests/clients/cli/wrapper.go b/tests/clients/cli/wrapper.go index fbfc0e5e6a..7a2f28fd4a 100644 --- a/tests/clients/cli/wrapper.go +++ b/tests/clients/cli/wrapper.go @@ -539,6 +539,10 @@ func (w *Wrapper) Rootstore() datastore.Rootstore { return w.node.DB.Rootstore() } +func (w *Wrapper) Encstore() datastore.Blockstore { + return w.node.DB.Encstore() +} + func (w *Wrapper) Blockstore() datastore.Blockstore { return w.node.DB.Blockstore() } diff --git a/tests/clients/cli/wrapper_tx.go b/tests/clients/cli/wrapper_tx.go index 46aefd000d..e3bf41d818 100644 --- a/tests/clients/cli/wrapper_tx.go +++ b/tests/clients/cli/wrapper_tx.go @@ -75,7 +75,7 @@ func (w *Transaction) Datastore() datastore.DSReaderWriter { return w.tx.Datastore() } -func (w *Transaction) Encstore() datastore.DSReaderWriter { +func (w *Transaction) Encstore() datastore.Blockstore { return w.tx.Encstore() } diff --git a/tests/clients/http/wrapper.go b/tests/clients/http/wrapper.go index f931732f09..2b84bfc701 100644 --- a/tests/clients/http/wrapper.go +++ b/tests/clients/http/wrapper.go @@ -208,6 +208,10 @@ func (w *Wrapper) Rootstore() datastore.Rootstore { return w.node.DB.Rootstore() } +func (w *Wrapper) Encstore() datastore.Blockstore { + return w.node.DB.Encstore() +} + func (w *Wrapper) Blockstore() datastore.Blockstore { return w.node.DB.Blockstore() } diff --git a/tests/clients/http/wrapper_tx.go b/tests/clients/http/wrapper_tx.go index e4b838a2e9..baf841871a 100644 --- a/tests/clients/http/wrapper_tx.go +++ b/tests/clients/http/wrapper_tx.go @@ -69,7 +69,7 @@ func (w *TxWrapper) Datastore() datastore.DSReaderWriter { return w.server.Datastore() } -func (w *TxWrapper) Encstore() datastore.DSReaderWriter { +func (w *TxWrapper) Encstore() datastore.Blockstore { return w.server.Encstore() } diff --git a/tests/integration/acp.go b/tests/integration/acp.go index 44ac023bce..a6efd64110 100644 --- a/tests/integration/acp.go +++ b/tests/integration/acp.go @@ -57,6 +57,20 @@ var ( acpType ACPType ) +// KMSType is the type of KMS to use. +type KMSType string + +const ( + // NoneKMSType is the none KMS type. It is used to indicate that no KMS should be used. + NoneKMSType KMSType = "none" + // PubSubKMSType is the PubSub KMS type. + PubSubKMSType KMSType = "pubsub" +) + +func getKMSTypes() []KMSType { + return []KMSType{PubSubKMSType} +} + func init() { acpType = ACPType(os.Getenv(acpTypeEnvName)) if acpType == "" { diff --git a/tests/integration/assert_stack.go b/tests/integration/assert_stack.go new file mode 100644 index 0000000000..a341c96a31 --- /dev/null +++ b/tests/integration/assert_stack.go @@ -0,0 +1,57 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package tests + +import ( + "strconv" + "strings" +) + +// assertStack keeps track of the current assertion path. +// GraphQL response can be traversed by a key of a map and/or an index of an array. +// So whenever we have a mismatch in a large response, we can use this stack to find the exact path. +// Example output: "commits[2].links[1].cid" +type assertStack struct { + stack []string + isMap []bool +} + +func (a *assertStack) pushMap(key string) { + a.stack = append(a.stack, key) + a.isMap = append(a.isMap, true) +} + +func (a *assertStack) pushArray(index int) { + a.stack = append(a.stack, strconv.Itoa(index)) + a.isMap = append(a.isMap, false) +} + +func (a *assertStack) pop() { + a.stack = a.stack[:len(a.stack)-1] + a.isMap = a.isMap[:len(a.isMap)-1] +} + +func (a *assertStack) String() string { + var b strings.Builder + for i, key := range a.stack { + if a.isMap[i] { + if i > 0 { + b.WriteString(".") + } + b.WriteString(key) + } else { + b.WriteString("[") + b.WriteString(key) + b.WriteString("]") + } + } + return b.String() +} diff --git a/tests/integration/db.go b/tests/integration/db.go index 06737318d7..b9c1b3791d 100644 --- a/tests/integration/db.go +++ b/tests/integration/db.go @@ -21,6 +21,7 @@ import ( "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/crypto" + "github.com/sourcenetwork/defradb/internal/kms" "github.com/sourcenetwork/defradb/node" changeDetector "github.com/sourcenetwork/defradb/tests/change_detector" ) @@ -109,10 +110,7 @@ func NewBadgerFileDB(ctx context.Context, t testing.TB) (client.DB, error) { return node.DB, err } -// setupNode returns the database implementation for the current -// testing state. The database type on the test state is used to -// select the datastore implementation to use. -func setupNode(s *state) (*node.Node, string, error) { +func getDefaultNodeOpts() []node.Option { opts := []node.Option{ node.WithLensPoolSize(lensPoolSize), // The test framework sets this up elsewhere when required so that it may be wrapped @@ -127,7 +125,7 @@ func setupNode(s *state) (*node.Node, string, error) { if badgerEncryption && encryptionKey == nil { key, err := crypto.GenerateAES256() if err != nil { - return nil, "", err + return nil } encryptionKey = key } @@ -136,6 +134,15 @@ func setupNode(s *state) (*node.Node, string, error) { opts = append(opts, node.WithBadgerEncryptionKey(encryptionKey)) } + return opts +} + +// setupNode returns the database implementation for the current +// testing state. The database type on the test state is used to +// select the datastore implementation to use. +func setupNode(s *state, opts ...node.Option) (*node.Node, string, error) { + opts = append(getDefaultNodeOpts(), opts...) + switch acpType { case LocalACPType: opts = append(opts, node.WithACPType(node.LocalACPType)) @@ -185,6 +192,10 @@ func setupNode(s *state) (*node.Node, string, error) { return nil, "", fmt.Errorf("invalid database type: %v", s.dbt) } + if s.kms == PubSubKMSType { + opts = append(opts, node.WithKMS(kms.PubSubServiceType)) + } + node, err := node.New(s.ctx, opts...) if err != nil { return nil, "", err diff --git a/tests/integration/encryption/commit_test.go b/tests/integration/encryption/commit_test.go index 86ea5d88df..da493e097f 100644 --- a/tests/integration/encryption/commit_test.go +++ b/tests/integration/encryption/commit_test.go @@ -48,7 +48,7 @@ func TestDocEncryption_WithEncryptionOnLWWCRDT_ShouldStoreCommitsDeltaEncrypted( Results: map[string]any{ "commits": []map[string]any{ { - "cid": "bafyreibdjepzhhiez4o27srv33xcd52yr336tpzqtkv36rdf3h3oue2l5m", + "cid": "bafyreidkuvcdxxkyoeapnmttu6l2vk43qnm3zuzpxegbifpj6w24jrvrxq", "collectionID": int64(1), "delta": encrypt(testUtils.CBORValue(21), john21DocID, ""), "docID": john21DocID, @@ -58,7 +58,7 @@ func TestDocEncryption_WithEncryptionOnLWWCRDT_ShouldStoreCommitsDeltaEncrypted( "links": []map[string]any{}, }, { - "cid": "bafyreihkiua7jpwkye3xlex6s5hh2azckcaljfi2h3iscgub5sikacyrbu", + "cid": "bafyreihdlv4fvvptetghxzyerxt4jc4zgprecybhoijrfjuyxqe55qw3x4", "collectionID": int64(1), "delta": encrypt(testUtils.CBORValue("John"), john21DocID, ""), "docID": john21DocID, @@ -68,7 +68,7 @@ func TestDocEncryption_WithEncryptionOnLWWCRDT_ShouldStoreCommitsDeltaEncrypted( "links": []map[string]any{}, }, { - "cid": "bafyreidxdhzhwjrv5s4x6cho5drz6xq2tc7oymzupf4p4gfk6eelsnc7ke", + "cid": "bafyreie5jegw4c2hg56bbiv6cgxmfz336jruukjakbjuyapockfnn6b5le", "collectionID": int64(1), "delta": nil, "docID": john21DocID, @@ -77,11 +77,11 @@ func TestDocEncryption_WithEncryptionOnLWWCRDT_ShouldStoreCommitsDeltaEncrypted( "height": int64(1), "links": []map[string]any{ { - "cid": "bafyreibdjepzhhiez4o27srv33xcd52yr336tpzqtkv36rdf3h3oue2l5m", + "cid": "bafyreidkuvcdxxkyoeapnmttu6l2vk43qnm3zuzpxegbifpj6w24jrvrxq", "name": "age", }, { - "cid": "bafyreihkiua7jpwkye3xlex6s5hh2azckcaljfi2h3iscgub5sikacyrbu", + "cid": "bafyreihdlv4fvvptetghxzyerxt4jc4zgprecybhoijrfjuyxqe55qw3x4", "name": "name", }, }, diff --git a/tests/integration/encryption/peer_sec_index_test.go b/tests/integration/encryption/peer_sec_index_test.go new file mode 100644 index 0000000000..e6fd3548cf --- /dev/null +++ b/tests/integration/encryption/peer_sec_index_test.go @@ -0,0 +1,161 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package encryption + +import ( + "testing" + + "github.com/sourcenetwork/immutable" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestDocEncryptionPeer_IfEncryptedDocHasIndexedField_ShouldIndexAfterDecryption(t *testing.T) { + test := testUtils.TestCase{ + KMS: testUtils.KMS{Activated: true}, + Actions: []any{ + testUtils.RandomNetworkingConfig(), + testUtils.RandomNetworkingConfig(), + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + age: Int @index + } + `, + }, + testUtils.ConnectPeers{ + SourceNodeID: 1, + TargetNodeID: 0, + }, + testUtils.SubscribeToCollection{ + NodeID: 1, + CollectionIDs: []int{0}, + }, + testUtils.CreateDoc{ + NodeID: immutable.Some(0), + Doc: `{ + "name": "Shahzad", + "age": 25 + }`, + }, + testUtils.CreateDoc{ + NodeID: immutable.Some(0), + Doc: islam33Doc, + IsDocEncrypted: true, + }, + testUtils.CreateDoc{ + NodeID: immutable.Some(0), + Doc: `{ + "name": "Andy", + "age": 21 + }`, + }, + testUtils.CreateDoc{ + NodeID: immutable.Some(0), + Doc: john21Doc, + IsDocEncrypted: true, + }, + testUtils.WaitForSync{}, + testUtils.Request{ + Request: ` + query @explain(type: execute) { + User(filter: {age: {_eq: 21}}) { + age + } + }`, + Asserter: testUtils.NewExplainAsserter().WithIndexFetches(2), + }, + testUtils.Request{ + Request: ` + query { + User(filter: {age: {_eq: 21}}) { + name + } + }`, + Results: map[string]any{ + "User": []map[string]any{ + { + "name": "Andy", + }, + { + "name": "John", + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestDocEncryptionPeer_IfDocDocHasEncryptedIndexedField_ShouldIndexAfterDecryption(t *testing.T) { + test := testUtils.TestCase{ + KMS: testUtils.KMS{Activated: true}, + Actions: []any{ + testUtils.RandomNetworkingConfig(), + testUtils.RandomNetworkingConfig(), + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + age: Int @index + } + `, + }, + testUtils.ConnectPeers{ + SourceNodeID: 1, + TargetNodeID: 0, + }, + testUtils.SubscribeToCollection{ + NodeID: 1, + CollectionIDs: []int{0}, + }, + testUtils.CreateDoc{ + NodeID: immutable.Some(0), + Doc: `{ + "name": "Shahzad", + "age": 25 + }`, + }, + testUtils.CreateDoc{ + NodeID: immutable.Some(0), + Doc: islam33Doc, + EncryptedFields: []string{"age"}, + }, + testUtils.CreateDoc{ + NodeID: immutable.Some(0), + Doc: `{ + "name": "Andy", + "age": 21 + }`, + }, + testUtils.CreateDoc{ + NodeID: immutable.Some(0), + Doc: john21Doc, + EncryptedFields: []string{"age"}, + }, + testUtils.WaitForSync{}, + testUtils.Request{ + Request: ` + query @explain(type: execute) { + User(filter: {age: {_eq: 21}}) { + age + } + }`, + Asserter: testUtils.NewExplainAsserter().WithIndexFetches(2), + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/encryption/peer_share_test.go b/tests/integration/encryption/peer_share_test.go new file mode 100644 index 0000000000..c04d204a84 --- /dev/null +++ b/tests/integration/encryption/peer_share_test.go @@ -0,0 +1,530 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package encryption + +import ( + "testing" + + "github.com/sourcenetwork/immutable" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestDocEncryptionPeer_IfDocIsPublic_ShouldFetchKeyAndDecrypt(t *testing.T) { + test := testUtils.TestCase{ + KMS: testUtils.KMS{Activated: true}, + Actions: []any{ + testUtils.RandomNetworkingConfig(), + testUtils.RandomNetworkingConfig(), + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + age: Int + } + `, + }, + testUtils.ConnectPeers{ + SourceNodeID: 1, + TargetNodeID: 0, + }, + testUtils.SubscribeToCollection{ + NodeID: 1, + CollectionIDs: []int{0}, + }, + testUtils.CreateDoc{ + NodeID: immutable.Some(0), + Doc: john21Doc, + IsDocEncrypted: true, + }, + testUtils.WaitForSync{}, + testUtils.Request{ + NodeID: immutable.Some(1), + Request: `query { + User { + age + } + }`, + Results: map[string]any{ + "User": []map[string]any{ + { + "age": int64(21), + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestDocEncryptionPeer_IfPublicDocHasEncryptedField_ShouldFetchKeyAndDecrypt(t *testing.T) { + test := testUtils.TestCase{ + KMS: testUtils.KMS{Activated: true}, + Actions: []any{ + testUtils.RandomNetworkingConfig(), + testUtils.RandomNetworkingConfig(), + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + age: Int + } + `, + }, + testUtils.ConnectPeers{ + SourceNodeID: 1, + TargetNodeID: 0, + }, + testUtils.SubscribeToCollection{ + NodeID: 1, + CollectionIDs: []int{0}, + }, + testUtils.CreateDoc{ + NodeID: immutable.Some(0), + Doc: john21Doc, + EncryptedFields: []string{"age"}, + }, + testUtils.WaitForSync{}, + testUtils.Request{ + NodeID: immutable.Some(1), + Request: `query { + User { + name + age + } + }`, + Results: map[string]any{ + "User": []map[string]any{ + { + "name": "John", + "age": int64(21), + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestDocEncryptionPeer_IfEncryptedPublicDocHasEncryptedField_ShouldFetchKeysAndDecrypt(t *testing.T) { + test := testUtils.TestCase{ + KMS: testUtils.KMS{Activated: true}, + Actions: []any{ + testUtils.RandomNetworkingConfig(), + testUtils.RandomNetworkingConfig(), + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + age: Int + } + `, + }, + testUtils.ConnectPeers{ + SourceNodeID: 1, + TargetNodeID: 0, + }, + testUtils.SubscribeToCollection{ + NodeID: 1, + CollectionIDs: []int{0}, + }, + testUtils.CreateDoc{ + NodeID: immutable.Some(0), + Doc: john21Doc, + IsDocEncrypted: true, + EncryptedFields: []string{"age"}, + }, + testUtils.WaitForSync{}, + testUtils.Request{ + NodeID: immutable.Some(1), + Request: `query { + User { + name + age + } + }`, + Results: map[string]any{ + "User": []map[string]any{ + { + "name": "John", + "age": int64(21), + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestDocEncryptionPeer_IfAllFieldsOfEncryptedPublicDocAreIndividuallyEncrypted_ShouldFetchKeysAndDecrypt(t *testing.T) { + test := testUtils.TestCase{ + KMS: testUtils.KMS{Activated: true}, + Actions: []any{ + testUtils.RandomNetworkingConfig(), + testUtils.RandomNetworkingConfig(), + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + age: Int + } + `, + }, + testUtils.ConnectPeers{ + SourceNodeID: 1, + TargetNodeID: 0, + }, + testUtils.SubscribeToCollection{ + NodeID: 1, + CollectionIDs: []int{0}, + }, + testUtils.CreateDoc{ + NodeID: immutable.Some(0), + Doc: john21Doc, + IsDocEncrypted: true, + EncryptedFields: []string{"name", "age"}, + }, + testUtils.WaitForSync{}, + testUtils.Request{ + NodeID: immutable.Some(1), + Request: `query { + User { + name + age + } + }`, + Results: map[string]any{ + "User": []map[string]any{ + { + "name": "John", + "age": int64(21), + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestDocEncryptionPeer_IfAllFieldsOfPublicDocAreIndividuallyEncrypted_ShouldFetchKeysAndDecrypt(t *testing.T) { + test := testUtils.TestCase{ + KMS: testUtils.KMS{Activated: true}, + Actions: []any{ + testUtils.RandomNetworkingConfig(), + testUtils.RandomNetworkingConfig(), + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + age: Int + } + `, + }, + testUtils.ConnectPeers{ + SourceNodeID: 1, + TargetNodeID: 0, + }, + testUtils.SubscribeToCollection{ + NodeID: 1, + CollectionIDs: []int{0}, + }, + testUtils.CreateDoc{ + NodeID: immutable.Some(0), + Doc: john21Doc, + EncryptedFields: []string{"name", "age"}, + }, + testUtils.WaitForSync{}, + testUtils.Request{ + NodeID: immutable.Some(1), + Request: `query { + User { + name + age + } + }`, + Results: map[string]any{ + "User": []map[string]any{ + { + "name": "John", + "age": int64(21), + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestDocEncryptionPeer_WithUpdatesOnEncryptedDeltaBasedCRDTField_ShouldDecryptAndCorrectlyMerge(t *testing.T) { + test := testUtils.TestCase{ + KMS: testUtils.KMS{Activated: true}, + Actions: []any{ + testUtils.RandomNetworkingConfig(), + testUtils.RandomNetworkingConfig(), + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + age: Int @crdt(type: "pcounter") + } + `, + }, + testUtils.ConnectPeers{ + SourceNodeID: 1, + TargetNodeID: 0, + }, + testUtils.CreateDoc{ + NodeID: immutable.Some(0), + Doc: john21Doc, + EncryptedFields: []string{"age"}, + }, + testUtils.UpdateDoc{ + NodeID: immutable.Some(0), + Doc: `{"age": 3}`, + }, + testUtils.SubscribeToCollection{ + NodeID: 1, + CollectionIDs: []int{0}, + }, + testUtils.UpdateDoc{ + NodeID: immutable.Some(0), + Doc: `{"age": 2}`, + }, + testUtils.WaitForSync{}, + testUtils.Request{ + NodeID: immutable.Some(1), + Request: `query { + User { + name + age + } + }`, + Results: map[string]any{ + "User": []map[string]any{ + { + "name": "John", + "age": int64(26), + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestDocEncryptionPeer_WithUpdatesOnDeltaBasedCRDTFieldOfEncryptedDoc_ShouldDecryptAndCorrectlyMerge(t *testing.T) { + test := testUtils.TestCase{ + KMS: testUtils.KMS{Activated: true}, + Actions: []any{ + testUtils.RandomNetworkingConfig(), + testUtils.RandomNetworkingConfig(), + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + age: Int @crdt(type: "pcounter") + } + `, + }, + testUtils.ConnectPeers{ + SourceNodeID: 1, + TargetNodeID: 0, + }, + testUtils.CreateDoc{ + NodeID: immutable.Some(0), + Doc: john21Doc, + IsDocEncrypted: true, + }, + testUtils.UpdateDoc{ + NodeID: immutable.Some(0), + Doc: `{"age": 3}`, + }, + testUtils.SubscribeToCollection{ + NodeID: 1, + CollectionIDs: []int{0}, + }, + testUtils.UpdateDoc{ + NodeID: immutable.Some(0), + Doc: `{"age": 2}`, + }, + testUtils.WaitForSync{}, + testUtils.Request{ + NodeID: immutable.Some(1), + Request: `query { + User { + name + age + } + }`, + Results: map[string]any{ + "User": []map[string]any{ + { + "name": "John", + "age": int64(26), + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestDocEncryptionPeer_WithUpdatesThatSetsEmptyString_ShouldDecryptAndCorrectlyMerge(t *testing.T) { + test := testUtils.TestCase{ + KMS: testUtils.KMS{Activated: true}, + Actions: []any{ + testUtils.RandomNetworkingConfig(), + testUtils.RandomNetworkingConfig(), + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + age: Int + } + `, + }, + testUtils.ConnectPeers{ + SourceNodeID: 1, + TargetNodeID: 0, + }, + testUtils.CreateDoc{ + NodeID: immutable.Some(0), + Doc: john21Doc, + IsDocEncrypted: true, + }, + testUtils.SubscribeToCollection{ + NodeID: 1, + CollectionIDs: []int{0}, + }, + testUtils.UpdateDoc{ + NodeID: immutable.Some(0), + Doc: `{"name": ""}`, + }, + testUtils.WaitForSync{}, + testUtils.Request{ + NodeID: immutable.Some(1), + Request: `query { + User { + name + } + }`, + Results: map[string]any{ + "User": []map[string]any{ + {"name": ""}, + }, + }, + }, + testUtils.UpdateDoc{ + NodeID: immutable.Some(0), + Doc: `{"name": "John"}`, + }, + testUtils.WaitForSync{}, + testUtils.Request{ + NodeID: immutable.Some(1), + Request: `query { + User { + name + } + }`, + Results: map[string]any{ + "User": []map[string]any{ + {"name": "John"}, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestDocEncryptionPeer_WithUpdatesThatSetsStringToNull_ShouldDecryptAndCorrectlyMerge(t *testing.T) { + test := testUtils.TestCase{ + KMS: testUtils.KMS{Activated: true}, + Actions: []any{ + testUtils.RandomNetworkingConfig(), + testUtils.RandomNetworkingConfig(), + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + age: Int + } + `, + }, + testUtils.ConnectPeers{ + SourceNodeID: 1, + TargetNodeID: 0, + }, + testUtils.CreateDoc{ + NodeID: immutable.Some(0), + Doc: john21Doc, + IsDocEncrypted: true, + }, + testUtils.SubscribeToCollection{ + NodeID: 1, + CollectionIDs: []int{0}, + }, + testUtils.UpdateDoc{ + NodeID: immutable.Some(0), + Doc: `{"name": null}`, + }, + testUtils.WaitForSync{}, + testUtils.Request{ + NodeID: immutable.Some(1), + Request: `query { + User { + name + } + }`, + Results: map[string]any{ + "User": []map[string]any{ + {"name": nil}, + }, + }, + }, + testUtils.UpdateDoc{ + NodeID: immutable.Some(0), + Doc: `{"name": "John"}`, + }, + testUtils.WaitForSync{}, + testUtils.Request{ + NodeID: immutable.Some(1), + Request: `query { + User { + name + } + }`, + Results: map[string]any{ + "User": []map[string]any{ + {"name": "John"}, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/encryption/peer_test.go b/tests/integration/encryption/peer_test.go index 7a94c22e13..9f5b875586 100644 --- a/tests/integration/encryption/peer_test.go +++ b/tests/integration/encryption/peer_test.go @@ -18,45 +18,9 @@ import ( testUtils "github.com/sourcenetwork/defradb/tests/integration" ) -func TestDocEncryptionPeer_IfPeerHasNoKey_ShouldNotFetch(t *testing.T) { - test := testUtils.TestCase{ - Actions: []any{ - testUtils.RandomNetworkingConfig(), - testUtils.RandomNetworkingConfig(), - updateUserCollectionSchema(), - testUtils.ConnectPeers{ - SourceNodeID: 1, - TargetNodeID: 0, - }, - testUtils.SubscribeToCollection{ - NodeID: 1, - CollectionIDs: []int{0}, - }, - testUtils.CreateDoc{ - NodeID: immutable.Some(0), - Doc: john21Doc, - IsDocEncrypted: true, - }, - testUtils.WaitForSync{}, - testUtils.Request{ - NodeID: immutable.Some(1), - Request: `query { - Users { - age - } - }`, - Results: map[string]any{ - "Users": []map[string]any{}, - }, - }, - }, - } - - testUtils.ExecuteTestCase(t, test) -} - func TestDocEncryptionPeer_UponSync_ShouldSyncEncryptedDAG(t *testing.T) { test := testUtils.TestCase{ + KMS: testUtils.KMS{Activated: true}, Actions: []any{ testUtils.RandomNetworkingConfig(), testUtils.RandomNetworkingConfig(), @@ -97,7 +61,7 @@ func TestDocEncryptionPeer_UponSync_ShouldSyncEncryptedDAG(t *testing.T) { Results: map[string]any{ "commits": []map[string]any{ { - "cid": "bafyreibdjepzhhiez4o27srv33xcd52yr336tpzqtkv36rdf3h3oue2l5m", + "cid": "bafyreidkuvcdxxkyoeapnmttu6l2vk43qnm3zuzpxegbifpj6w24jrvrxq", "collectionID": int64(1), "delta": encrypt(testUtils.CBORValue(21), john21DocID, ""), "docID": john21DocID, @@ -107,7 +71,7 @@ func TestDocEncryptionPeer_UponSync_ShouldSyncEncryptedDAG(t *testing.T) { "links": []map[string]any{}, }, { - "cid": "bafyreihkiua7jpwkye3xlex6s5hh2azckcaljfi2h3iscgub5sikacyrbu", + "cid": "bafyreihdlv4fvvptetghxzyerxt4jc4zgprecybhoijrfjuyxqe55qw3x4", "collectionID": int64(1), "delta": encrypt(testUtils.CBORValue("John"), john21DocID, ""), "docID": john21DocID, @@ -117,7 +81,7 @@ func TestDocEncryptionPeer_UponSync_ShouldSyncEncryptedDAG(t *testing.T) { "links": []map[string]any{}, }, { - "cid": "bafyreidxdhzhwjrv5s4x6cho5drz6xq2tc7oymzupf4p4gfk6eelsnc7ke", + "cid": "bafyreie5jegw4c2hg56bbiv6cgxmfz336jruukjakbjuyapockfnn6b5le", "collectionID": int64(1), "delta": nil, "docID": john21DocID, @@ -126,11 +90,11 @@ func TestDocEncryptionPeer_UponSync_ShouldSyncEncryptedDAG(t *testing.T) { "height": int64(1), "links": []map[string]any{ { - "cid": "bafyreibdjepzhhiez4o27srv33xcd52yr336tpzqtkv36rdf3h3oue2l5m", + "cid": "bafyreidkuvcdxxkyoeapnmttu6l2vk43qnm3zuzpxegbifpj6w24jrvrxq", "name": "age", }, { - "cid": "bafyreihkiua7jpwkye3xlex6s5hh2azckcaljfi2h3iscgub5sikacyrbu", + "cid": "bafyreihdlv4fvvptetghxzyerxt4jc4zgprecybhoijrfjuyxqe55qw3x4", "name": "name", }, }, @@ -143,3 +107,53 @@ func TestDocEncryptionPeer_UponSync_ShouldSyncEncryptedDAG(t *testing.T) { testUtils.ExecuteTestCase(t, test) } + +func TestDocEncryptionPeer_IfPeerDidNotReceiveKey_ShouldNotFetch(t *testing.T) { + test := testUtils.TestCase{ + KMS: testUtils.KMS{Activated: true}, + Actions: []any{ + testUtils.RandomNetworkingConfig(), + testUtils.RandomNetworkingConfig(), + updateUserCollectionSchema(), + testUtils.ConnectPeers{ + SourceNodeID: 1, + TargetNodeID: 0, + }, + testUtils.SubscribeToCollection{ + NodeID: 1, + CollectionIDs: []int{0}, + }, + testUtils.CreateDoc{ + NodeID: immutable.Some(0), + Doc: john21Doc, + IsDocEncrypted: true, + }, + testUtils.WaitForSync{}, + // Do not wait for the key sync and request the document as soon as the dag has synced + // The document will be returned if the key-sync has taken place already, if not, the set will + // be empty. + testUtils.Request{ + NodeID: immutable.Some(1), + Request: `query { + Users { + age + } + }`, + Results: map[string]any{ + "Users": testUtils.AnyOf{ + // The key-sync has not yet completed + []map[string]any{}, + // The key-sync has completed + []map[string]any{ + { + "age": int64(21), + }, + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/encryption/utils.go b/tests/integration/encryption/utils.go index fd9c1d17c0..685a2567fb 100644 --- a/tests/integration/encryption/utils.go +++ b/tests/integration/encryption/utils.go @@ -11,7 +11,7 @@ package encryption import ( - "github.com/sourcenetwork/defradb/internal/encryption" + "github.com/sourcenetwork/defradb/crypto" testUtils "github.com/sourcenetwork/defradb/tests/integration" ) @@ -50,6 +50,6 @@ func updateUserCollectionSchema() testUtils.SchemaUpdate { func encrypt(plaintext []byte, docID, fieldName string) []byte { const keyLength = 32 const testEncKey = "examplekey1234567890examplekey12" - val, _ := encryption.EncryptAES(plaintext, []byte(fieldName + docID + testEncKey)[0:keyLength]) + val, _, _ := crypto.EncryptAES(plaintext, []byte(fieldName + docID + testEncKey)[0:keyLength], nil, true) return val } diff --git a/tests/integration/events.go b/tests/integration/events.go index 87b157a662..bf004b99aa 100644 --- a/tests/integration/events.go +++ b/tests/integration/events.go @@ -349,6 +349,10 @@ func getEventsForCreateDoc(s *state, action CreateDoc) map[string]struct{} { return expect } +func waitForSync(s *state) { + waitForMergeEvents(s) +} + // getEventsForUpdateWithFilter returns a map of docIDs that should be // published to the local event bus after a UpdateWithFilter action. // diff --git a/tests/integration/state.go b/tests/integration/state.go index e594285318..9e65458531 100644 --- a/tests/integration/state.go +++ b/tests/integration/state.go @@ -116,6 +116,8 @@ type state struct { // The TestCase currently being executed. testCase TestCase + kms KMSType + // The type of database currently being tested. dbt DatabaseType @@ -191,6 +193,7 @@ func newState( ctx context.Context, t testing.TB, testCase TestCase, + kms KMSType, dbt DatabaseType, clientType ClientType, collectionNames []string, @@ -199,6 +202,7 @@ func newState( ctx: ctx, t: t, testCase: testCase, + kms: kms, dbt: dbt, clientType: clientType, txns: []datastore.Txn{}, diff --git a/tests/integration/test_case.go b/tests/integration/test_case.go index 389de2af35..9b0bce913b 100644 --- a/tests/integration/test_case.go +++ b/tests/integration/test_case.go @@ -59,6 +59,18 @@ type TestCase struct { // This is to only be used in the very rare cases where we really do want behavioural // differences between view types, or we need to temporarily document a bug. SupportedViewTypes immutable.Option[[]ViewType] + + // Configuration for KMS to be used in the test + KMS KMS +} + +// KMS contains the configuration for KMS to be used in the test +type KMS struct { + // Activated indicates if the KMS should be used in the test + Activated bool + // ExcludedTypes specifies the KMS types that should be excluded from the test. + // If none are specified all types will be used. + ExcludedTypes []KMSType } // SetupComplete is a flag to explicitly notify the change detector at which point diff --git a/tests/integration/utils.go b/tests/integration/utils.go index fd6758929d..62e27e0b73 100644 --- a/tests/integration/utils.go +++ b/tests/integration/utils.go @@ -14,14 +14,15 @@ import ( "context" "encoding/json" "fmt" + "log/slog" "os" "reflect" + "slices" "strconv" "strings" "testing" "time" - "github.com/bxcodec/faker/support/slice" "github.com/fxamacker/cbor/v2" "github.com/sourcenetwork/corelog" "github.com/sourcenetwork/immutable" @@ -38,6 +39,7 @@ import ( "github.com/sourcenetwork/defradb/internal/request/graphql" "github.com/sourcenetwork/defradb/internal/request/graphql/schema/types" "github.com/sourcenetwork/defradb/net" + "github.com/sourcenetwork/defradb/node" changeDetector "github.com/sourcenetwork/defradb/tests/change_detector" "github.com/sourcenetwork/defradb/tests/clients" "github.com/sourcenetwork/defradb/tests/gen" @@ -185,6 +187,17 @@ func ExecuteTestCase( databases = append(databases, defraIMType) } + var kmsList []KMSType + if testCase.KMS.Activated { + kmsList = getKMSTypes() + for _, excluded := range testCase.KMS.ExcludedTypes { + kmsList = slices.DeleteFunc(kmsList, func(t KMSType) bool { return t == excluded }) + } + } + if len(kmsList) == 0 { + kmsList = []KMSType{NoneKMSType} + } + // Assert that these are not empty to protect against accidental mis-configurations, // otherwise an empty set would silently pass all the tests. require.NotEmpty(t, databases) @@ -195,7 +208,9 @@ func ExecuteTestCase( ctx := context.Background() for _, ct := range clients { for _, dbt := range databases { - executeTestCase(ctx, t, collectionNames, testCase, dbt, ct) + for _, kms := range kmsList { + executeTestCase(ctx, t, collectionNames, testCase, kms, dbt, ct) + } } } } @@ -205,12 +220,11 @@ func executeTestCase( t testing.TB, collectionNames []string, testCase TestCase, + kms KMSType, dbt DatabaseType, clientType ClientType, ) { - log.InfoContext( - ctx, - testCase.Description, + logAttrs := []slog.Attr{ corelog.Any("database", dbt), corelog.Any("client", clientType), corelog.Any("mutationType", mutationType), @@ -222,11 +236,17 @@ func executeTestCase( corelog.String("changeDetector.SourceBranch", changeDetector.SourceBranch), corelog.String("changeDetector.TargetBranch", changeDetector.TargetBranch), corelog.String("changeDetector.Repository", changeDetector.Repository), - ) + } + + if kms != NoneKMSType { + logAttrs = append(logAttrs, corelog.Any("KMS", kms)) + } + + log.InfoContext(ctx, testCase.Description, logAttrs...) startActionIndex, endActionIndex := getActionRange(t, testCase) - s := newState(ctx, t, testCase, dbt, clientType, collectionNames) + s := newState(ctx, t, testCase, kms, dbt, clientType, collectionNames) setStartingNodes(s) // It is very important that the databases are always closed, otherwise resources will leak @@ -366,7 +386,7 @@ func performAction( assertClientIntrospectionResults(s, action) case WaitForSync: - waitForMergeEvents(s) + waitForSync(s) case Benchmark: benchmarkAction(s, actionIndex, action) @@ -403,7 +423,7 @@ func generateDocs(s *state, action GenerateDocs) { collections := getNodeCollections(action.NodeID, s.collections) defs := make([]client.CollectionDefinition, 0, len(collections[0])) for _, col := range collections[0] { - if len(action.ForCollections) == 0 || slice.Contains(action.ForCollections, col.Name().Value()) { + if len(action.ForCollections) == 0 || slices.Contains(action.ForCollections, col.Name().Value()) { defs = append(defs, col.Definition()) } } @@ -730,7 +750,7 @@ func restartNodes( nodeOpts := s.nodeConfigs[i] nodeOpts = append(nodeOpts, net.WithListenAddresses(addresses...)) - node.Peer, err = net.NewPeer(s.ctx, node.DB.Blockstore(), node.DB.Events(), nodeOpts...) + node.Peer, err = net.NewPeer(s.ctx, node.DB.Blockstore(), node.DB.Encstore(), node.DB.Events(), nodeOpts...) require.NoError(s.t, err) c, err := setupClient(s, node) @@ -802,20 +822,21 @@ func configureNode( return } - node, path, err := setupNode(s) //disable change dector, or allow it? - require.NoError(s.t, err) - privateKey, err := crypto.GenerateEd25519() require.NoError(s.t, err) - nodeOpts := action() - nodeOpts = append(nodeOpts, net.WithPrivateKey(privateKey)) + netNodeOpts := action() + netNodeOpts = append(netNodeOpts, net.WithPrivateKey(privateKey)) - node.Peer, err = net.NewPeer(s.ctx, node.DB.Blockstore(), node.DB.Events(), nodeOpts...) + nodeOpts := []node.Option{node.WithDisableP2P(false)} + for _, opt := range netNodeOpts { + nodeOpts = append(nodeOpts, opt) + } + node, path, err := setupNode(s, nodeOpts...) //disable change dector, or allow it? require.NoError(s.t, err) s.nodeAddresses = append(s.nodeAddresses, node.Peer.PeerInfo()) - s.nodeConfigs = append(s.nodeConfigs, nodeOpts) + s.nodeConfigs = append(s.nodeConfigs, netNodeOpts) c, err := setupClient(s, node) require.NoError(s.t, err) @@ -1767,7 +1788,6 @@ func executeRequest( result := node.ExecRequest(ctx, action.Request, options...) - anyOfByFieldKey := map[docFieldKey][]any{} expectedErrorRaised = assertRequestResults( s, &result.GQL, @@ -1775,7 +1795,6 @@ func executeRequest( action.ExpectedError, action.Asserter, nodeID, - anyOfByFieldKey, ) } @@ -1825,9 +1844,7 @@ func executeSubscriptionRequest( r, action.ExpectedError, nil, - // anyof is not yet supported by subscription requests 0, - map[docFieldKey][]any{}, ) assertExpectedErrorRaised(s.t, s.testCase.Description, action.ExpectedError, expectedErrorRaised) @@ -1884,12 +1901,6 @@ func AssertErrors( return false } -// docFieldKey is an internal key type that wraps docIndex and fieldName -type docFieldKey struct { - docIndex int - fieldName string -} - func assertRequestResults( s *state, result *client.GQLResult, @@ -1897,7 +1908,6 @@ func assertRequestResults( expectedError string, asserter ResultAsserter, nodeID int, - anyOfByField map[docFieldKey][]any, ) bool { // we skip assertion benchmark because you don't specify expected result for benchmark. if AssertErrors(s.t, s.testCase.Description, result.Errors, expectedError) || s.isBench { @@ -1926,31 +1936,37 @@ func assertRequestResults( keys[key] = struct{}{} } + stack := &assertStack{} for key := range keys { + stack.pushMap(key) expect, ok := expectedResults[key] require.True(s.t, ok, "expected key not found: %s", key) actual, ok := resultantData[key] require.True(s.t, ok, "result key not found: %s", key) - expectDocs, ok := expect.([]map[string]any) - if ok { + switch exp := expect.(type) { + case []map[string]any: actualDocs := ConvertToArrayOfMaps(s.t, actual) assertRequestResultDocs( s, nodeID, - expectDocs, + exp, actualDocs, - anyOfByField) - } else { + stack, + ) + case AnyOf: + assertResultsAnyOf(s.t, s.clientType, exp, actual) + default: assertResultsEqual( s.t, s.clientType, expect, actual, - fmt.Sprintf("node: %v, key: %v", nodeID, key), + fmt.Sprintf("node: %v, path: %s", nodeID, stack), ) } + stack.pop() } return false @@ -1961,13 +1977,14 @@ func assertRequestResultDocs( nodeID int, expectedResults []map[string]any, actualResults []map[string]any, - anyOfByField map[docFieldKey][]any, + stack *assertStack, ) bool { // compare results require.Equal(s.t, len(expectedResults), len(actualResults), s.testCase.Description+" \n(number of results don't match)") for actualDocIndex, actualDoc := range actualResults { + stack.pushArray(actualDocIndex) expectedDoc := expectedResults[actualDocIndex] require.Equal( @@ -1982,14 +1999,10 @@ func assertRequestResultDocs( ) for field, actualValue := range actualDoc { + stack.pushMap(field) switch expectedValue := expectedDoc[field].(type) { case AnyOf: assertResultsAnyOf(s.t, s.clientType, expectedValue, actualValue) - - dfk := docFieldKey{actualDocIndex, field} - valueSet := anyOfByField[dfk] - valueSet = append(valueSet, actualValue) - anyOfByField[dfk] = valueSet case DocIndex: expectedDocID := s.docIDs[expectedValue.CollectionIndex][expectedValue.Index].String() assertResultsEqual( @@ -1997,7 +2010,7 @@ func assertRequestResultDocs( s.clientType, expectedDocID, actualValue, - fmt.Sprintf("node: %v, doc: %v", nodeID, actualDocIndex), + fmt.Sprintf("node: %v, path: %s", nodeID, stack), ) case []map[string]any: actualValueMap := ConvertToArrayOfMaps(s.t, actualValue) @@ -2007,7 +2020,7 @@ func assertRequestResultDocs( nodeID, expectedValue, actualValueMap, - anyOfByField, + stack, ) default: @@ -2016,10 +2029,12 @@ func assertRequestResultDocs( s.clientType, expectedValue, actualValue, - fmt.Sprintf("node: %v, doc: %v", nodeID, actualDocIndex), + fmt.Sprintf("node: %v, path: %s", nodeID, stack), ) } + stack.pop() } + stack.pop() } return false diff --git a/tools/configs/mockery.yaml b/tools/configs/mockery.yaml index 451ae55771..504dbd1be1 100644 --- a/tools/configs/mockery.yaml +++ b/tools/configs/mockery.yaml @@ -32,6 +32,7 @@ packages: # Packages and their interfaces to generate mocks for. DSReaderWriter: RootStore: Txn: + Blockstore: github.com/sourcenetwork/defradb/client: config: From 0b2a90dc790e1b91174a31217611e7ff16188f28 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Mon, 23 Sep 2024 09:03:10 -0700 Subject: [PATCH 34/71] refactor: Merge duplicate input args (#3046) ## Relevant issue(s) Resolves #3045 Resolves #3042 ## Description This PR merges the duplicate input args `docID`, `docIDs`, `input`, and `inputs` into a single list arg. With GraphQL input coercing the inputs can remain the same for lists as well as single items. ## Tasks - [x] I made sure the code is well commented, particularly hard-to-understand areas. - [x] I made sure the repository-held documentation is changed accordingly. - [x] I made sure the pull request title adheres to the conventional commit style (the subset used in the project can be found in [tools/configs/chglog/config.yml](tools/configs/chglog/config.yml)). - [x] I made sure to discuss its limitations such as threats to validity, vulnerability to mistake and misuse, robustness to invalidation of assumptions, resource requirements, ... ## How has this been tested? Updated integration tests Specify the platform(s) on which this was tested: - MacOS --- client/request/consts.go | 4 +-- client/request/mutation.go | 10 ++----- internal/planner/create.go | 5 +--- internal/planner/delete.go | 2 +- internal/planner/group.go | 4 +-- internal/planner/mapper/mapper.go | 1 - internal/planner/mapper/mutation.go | 7 ++--- internal/planner/select.go | 4 +-- internal/planner/update.go | 8 +++-- internal/request/graphql/parser/mutation.go | 30 ++++++++----------- internal/request/graphql/parser/query.go | 5 ---- internal/request/graphql/schema/generate.go | 25 +++++++--------- .../query/simple/with_multi_lookup_test.go | 2 +- .../integration/explain/debug/delete_test.go | 6 ++-- .../explain/debug/delete_with_error_test.go | 2 +- .../debug/group_with_doc_id_child_test.go | 2 +- .../explain/debug/group_with_doc_id_test.go | 2 +- .../type_join_with_filter_doc_id_test.go | 4 +-- .../integration/explain/debug/update_test.go | 4 +-- .../explain/debug/with_filter_doc_id_test.go | 6 ++-- .../integration/explain/default/basic_test.go | 2 +- .../explain/default/delete_test.go | 18 +++++------ .../explain/default/delete_with_error_test.go | 2 +- tests/integration/explain/default/fixture.go | 2 +- .../default/group_with_average_test.go | 8 ++--- .../default/group_with_doc_id_child_test.go | 4 +-- .../explain/default/group_with_doc_id_test.go | 2 +- .../default/group_with_filter_child_test.go | 4 +-- .../default/group_with_limit_child_test.go | 6 ++-- .../explain/default/group_with_limit_test.go | 2 +- .../default/group_with_order_child_test.go | 6 ++-- .../explain/default/group_with_order_test.go | 2 +- .../explain/default/type_join_many_test.go | 2 +- .../explain/default/type_join_one_test.go | 4 +-- .../explain/default/type_join_test.go | 4 +-- .../type_join_with_filter_doc_id_test.go | 8 ++--- .../default/type_join_with_filter_test.go | 4 +-- .../explain/default/update_test.go | 12 ++++---- .../default/with_filter_doc_id_test.go | 16 +++++----- .../explain/execute/delete_test.go | 2 +- .../execute/query_deleted_docs_test.go | 2 +- .../explain/execute/update_test.go | 2 +- .../integration/explain/simple/basic_test.go | 2 +- .../mutation/create/with_null_input_test.go | 22 ++++---------- .../mutation/create/with_variables_test.go | 4 +-- .../mutation/delete/with_id_alias_test.go | 2 +- .../mutation/delete/with_id_test.go | 4 +-- .../mutation/delete/with_id_txn_test.go | 2 +- .../mutation/delete/with_ids_alias_test.go | 2 +- .../mutation/delete/with_ids_filter_test.go | 2 +- .../mutation/delete/with_ids_test.go | 10 +++---- .../mutation/delete/with_ids_txn_test.go | 4 +-- .../delete/with_ids_update_alias_test.go | 2 +- .../mutation/delete/with_null_input_test.go | 2 +- .../mutation/update/with_ids_test.go | 2 +- .../mutation/update/with_null_input_test.go | 2 +- .../query/one_to_many/with_doc_ids_test.go | 2 +- .../query/simple/with_doc_ids_test.go | 10 +++---- .../query/simple/with_group_doc_ids_test.go | 2 +- .../query/simple/with_null_input_test.go | 2 +- tests/integration/schema/default_fields.go | 7 ----- tests/integration/schema/filter_test.go | 2 -- tests/integration/schema/input_type_test.go | 2 -- tests/integration/utils.go | 1 - 64 files changed, 144 insertions(+), 193 deletions(-) diff --git a/client/request/consts.go b/client/request/consts.go index 8b98199827..157cab8b5f 100644 --- a/client/request/consts.go +++ b/client/request/consts.go @@ -21,7 +21,6 @@ const ( Cid = "cid" Input = "input" - Inputs = "inputs" FieldName = "field" FieldIDName = "fieldId" ShowDeleted = "showDeleted" @@ -36,8 +35,7 @@ const ( OrderClause = "order" DepthClause = "depth" - DocIDArgName = "docID" - DocIDsArgName = "docIDs" + DocIDArgName = "docID" AverageFieldName = "_avg" CountFieldName = "_count" diff --git a/client/request/mutation.go b/client/request/mutation.go index 4f4f06fe7b..146a7ac8b5 100644 --- a/client/request/mutation.go +++ b/client/request/mutation.go @@ -36,17 +36,11 @@ type ObjectMutation struct { // Collection is the target collection name. Collection string - // Input is the json representation of the fieldName-value pairs of document properties - // to mutate. - // - // This is ignored for [DeleteObjects] mutations. - Input map[string]any - - // Inputs is the array of json representations of the fieldName-value pairs of document + // Input is the array of json representations of the fieldName-value pairs of document // properties to mutate. // // This is ignored for [DeleteObjects] mutations. - Inputs []map[string]any + Input []map[string]any // Encrypt is a boolean flag that indicates whether the input data should be encrypted. Encrypt bool diff --git a/internal/planner/create.go b/internal/planner/create.go index 89acfabc5d..0c36658a14 100644 --- a/internal/planner/create.go +++ b/internal/planner/create.go @@ -151,13 +151,10 @@ func (p *Planner) CreateDocs(parsed *mapper.Mutation) (planNode, error) { // create a mutation createNode. create := &createNode{ p: p, - input: parsed.Inputs, + input: parsed.Input, results: results, docMapper: docMapper{parsed.DocumentMapping}, } - if parsed.Input != nil { - create.input = []map[string]any{parsed.Input} - } p.ctx = encryption.SetContextConfigFromParams(p.ctx, parsed.Encrypt, parsed.EncryptFields) diff --git a/internal/planner/delete.go b/internal/planner/delete.go index 093bf527dc..e470f45956 100644 --- a/internal/planner/delete.go +++ b/internal/planner/delete.go @@ -95,7 +95,7 @@ func (n *deleteNode) simpleExplain() (map[string]any, error) { simpleExplainMap := map[string]any{} // Add the document id(s) that request wants to delete. - simpleExplainMap[request.DocIDsArgName] = n.docIDs + simpleExplainMap[request.DocIDArgName] = n.docIDs // Add the filter attribute if it exists, otherwise have it nil. if n.filter == nil { diff --git a/internal/planner/group.go b/internal/planner/group.go index b412b9e86e..32a98c2330 100644 --- a/internal/planner/group.go +++ b/internal/planner/group.go @@ -231,9 +231,9 @@ func (n *groupNode) simpleExplain() (map[string]any, error) { // Get targetable attribute(s) of this child. if c.DocIDs.HasValue() { - childExplainGraph[request.DocIDsArgName] = c.DocIDs.Value() + childExplainGraph[request.DocIDArgName] = c.DocIDs.Value() } else { - childExplainGraph[request.DocIDsArgName] = nil + childExplainGraph[request.DocIDArgName] = nil } if c.Filter == nil { diff --git a/internal/planner/mapper/mapper.go b/internal/planner/mapper/mapper.go index ac6bb80c78..68a7924806 100644 --- a/internal/planner/mapper/mapper.go +++ b/internal/planner/mapper/mapper.go @@ -1246,7 +1246,6 @@ func toMutation( Select: *underlyingSelect, Type: MutationType(mutationRequest.Type), Input: mutationRequest.Input, - Inputs: mutationRequest.Inputs, Encrypt: mutationRequest.Encrypt, EncryptFields: mutationRequest.EncryptFields, }, nil diff --git a/internal/planner/mapper/mutation.go b/internal/planner/mapper/mutation.go index f02bc9d067..6c4ab4c56f 100644 --- a/internal/planner/mapper/mutation.go +++ b/internal/planner/mapper/mutation.go @@ -27,11 +27,8 @@ type Mutation struct { // The type of mutation. For example a create request. Type MutationType - // Input is the map of fields and values used for the mutation. - Input map[string]any - - // Inputs is the array of maps of fields and values used for the mutation. - Inputs []map[string]any + // Input is the array of maps of fields and values used for the mutation. + Input []map[string]any // Encrypt is a flag to indicate if the input data should be encrypted. Encrypt bool diff --git a/internal/planner/select.go b/internal/planner/select.go index 5d7e448c73..3c5cc58bee 100644 --- a/internal/planner/select.go +++ b/internal/planner/select.go @@ -201,9 +201,9 @@ func (n *selectNode) simpleExplain() (map[string]any, error) { // Add the docIDs attribute if it exists. if !n.docIDs.HasValue() { - simpleExplainMap[request.DocIDsArgName] = nil + simpleExplainMap[request.DocIDArgName] = nil } else { - simpleExplainMap[request.DocIDsArgName] = n.docIDs.Value() + simpleExplainMap[request.DocIDArgName] = n.docIDs.Value() } return simpleExplainMap, nil diff --git a/internal/planner/update.go b/internal/planner/update.go index 2baf3f971f..2f282af292 100644 --- a/internal/planner/update.go +++ b/internal/planner/update.go @@ -125,7 +125,7 @@ func (n *updateNode) simpleExplain() (map[string]any, error) { simpleExplainMap := map[string]any{} // Add the document id(s) that request wants to update. - simpleExplainMap[request.DocIDsArgName] = n.docIDs + simpleExplainMap[request.DocIDArgName] = n.docIDs // Add the filter attribute if it exists, otherwise have it nil. if n.filter == nil { @@ -164,10 +164,14 @@ func (p *Planner) UpdateDocs(parsed *mapper.Mutation) (planNode, error) { filter: parsed.Filter, docIDs: parsed.DocIDs.Value(), isUpdating: true, - input: parsed.Input, docMapper: docMapper{parsed.DocumentMapping}, } + // update mutation only supports a single input + if len(parsed.Input) > 0 { + update.input = parsed.Input[0] + } + // get collection col, err := p.db.GetCollectionByName(p.ctx, parsed.Name) if err != nil { diff --git a/internal/request/graphql/parser/mutation.go b/internal/request/graphql/parser/mutation.go index 3fa10195f4..b7ce4a1723 100644 --- a/internal/request/graphql/parser/mutation.go +++ b/internal/request/graphql/parser/mutation.go @@ -101,20 +101,19 @@ func parseMutation(exe *gql.ExecutionContext, parent *gql.Object, field *ast.Fie switch name { case request.Input: - if v, ok := value.(map[string]any); ok { - mut.Input = v - } - - case request.Inputs: - v, ok := value.([]any) - if !ok { - continue // value is nil - } - inputs := make([]map[string]any, len(v)) - for i, v := range v { - inputs[i] = v.(map[string]any) + switch v := value.(type) { + case []any: + // input for create is a list + inputs := make([]map[string]any, len(v)) + for i, v := range v { + inputs[i] = v.(map[string]any) + } + mut.Input = inputs + + case map[string]any: + // input for update is an object + mut.Input = []map[string]any{v} } - mut.Inputs = inputs case request.FilterClause: if v, ok := value.(map[string]any); ok { @@ -122,11 +121,6 @@ func parseMutation(exe *gql.ExecutionContext, parent *gql.Object, field *ast.Fie } case request.DocIDArgName: - if v, ok := value.(string); ok { - mut.DocIDs = immutable.Some([]string{v}) - } - - case request.DocIDsArgName: v, ok := value.([]any) if !ok { continue // value is nil diff --git a/internal/request/graphql/parser/query.go b/internal/request/graphql/parser/query.go index 47d6a70493..29ba695dec 100644 --- a/internal/request/graphql/parser/query.go +++ b/internal/request/graphql/parser/query.go @@ -112,11 +112,6 @@ func parseSelect( } case request.DocIDArgName: // parse single DocID field - if v, ok := value.(string); ok { - slct.DocIDs = immutable.Some([]string{v}) - } - - case request.DocIDsArgName: v, ok := value.([]any) if !ok { continue // value is nil diff --git a/internal/request/graphql/schema/generate.go b/internal/request/graphql/schema/generate.go index f5a2f4c624..3d1fe26610 100644 --- a/internal/request/graphql/schema/generate.go +++ b/internal/request/graphql/schema/generate.go @@ -385,8 +385,7 @@ func (g *Generator) createExpandedFieldList( Description: f.Description, Type: gql.NewList(t), Args: gql.FieldConfigArgument{ - request.DocIDArgName: schemaTypes.NewArgConfig(gql.String, docIDArgDescription), - request.DocIDsArgName: schemaTypes.NewArgConfig(gql.NewList(gql.NewNonNull(gql.String)), docIDsArgDescription), + request.DocIDArgName: schemaTypes.NewArgConfig(gql.NewList(gql.NewNonNull(gql.String)), docIDsArgDescription), "filter": schemaTypes.NewArgConfig( g.manager.schema.TypeMap()[typeName+filterInputNameSuffix], listFieldFilterArgDescription, @@ -1054,8 +1053,7 @@ func (g *Generator) GenerateMutationInputForGQLType(obj *gql.Object) ([]*gql.Fie Description: createDocumentDescription, Type: gql.NewList(obj), Args: gql.FieldConfigArgument{ - request.Input: schemaTypes.NewArgConfig(mutationInput, "Create a "+obj.Name()+" document"), - request.Inputs: schemaTypes.NewArgConfig(gql.NewList(gql.NewNonNull(mutationInput)), + request.Input: schemaTypes.NewArgConfig(gql.NewList(gql.NewNonNull(mutationInput)), "Create "+obj.Name()+" documents"), request.EncryptDocArgName: schemaTypes.NewArgConfig(gql.Boolean, encryptArgDescription), request.EncryptFieldsArgName: schemaTypes.NewArgConfig(gql.NewList(gql.NewNonNull(explicitUserFieldsEnum)), @@ -1068,10 +1066,9 @@ func (g *Generator) GenerateMutationInputForGQLType(obj *gql.Object) ([]*gql.Fie Description: updateDocumentsDescription, Type: gql.NewList(obj), Args: gql.FieldConfigArgument{ - request.DocIDArgName: schemaTypes.NewArgConfig(gql.ID, updateIDArgDescription), - request.DocIDsArgName: schemaTypes.NewArgConfig(gql.NewList(gql.ID), updateIDsArgDescription), - "filter": schemaTypes.NewArgConfig(filterInput, updateFilterArgDescription), - request.Input: schemaTypes.NewArgConfig(mutationInput, "Update field values"), + request.DocIDArgName: schemaTypes.NewArgConfig(gql.NewList(gql.ID), updateIDsArgDescription), + "filter": schemaTypes.NewArgConfig(filterInput, updateFilterArgDescription), + request.Input: schemaTypes.NewArgConfig(mutationInput, "Update field values"), }, } @@ -1080,9 +1077,8 @@ func (g *Generator) GenerateMutationInputForGQLType(obj *gql.Object) ([]*gql.Fie Description: deleteDocumentsDescription, Type: gql.NewList(obj), Args: gql.FieldConfigArgument{ - request.DocIDArgName: schemaTypes.NewArgConfig(gql.ID, deleteIDArgDescription), - request.DocIDsArgName: schemaTypes.NewArgConfig(gql.NewList(gql.ID), deleteIDsArgDescription), - "filter": schemaTypes.NewArgConfig(filterInput, deleteFilterArgDescription), + request.DocIDArgName: schemaTypes.NewArgConfig(gql.NewList(gql.ID), deleteIDsArgDescription), + "filter": schemaTypes.NewArgConfig(filterInput, deleteFilterArgDescription), }, } @@ -1301,10 +1297,9 @@ func (g *Generator) genTypeQueryableFieldList( Description: obj.Description(), Type: gql.NewList(obj), Args: gql.FieldConfigArgument{ - request.DocIDArgName: schemaTypes.NewArgConfig(gql.String, docIDArgDescription), - request.DocIDsArgName: schemaTypes.NewArgConfig(gql.NewList(gql.NewNonNull(gql.String)), docIDsArgDescription), - "cid": schemaTypes.NewArgConfig(gql.String, cidArgDescription), - "filter": schemaTypes.NewArgConfig(config.filter, selectFilterArgDescription), + request.DocIDArgName: schemaTypes.NewArgConfig(gql.NewList(gql.NewNonNull(gql.String)), docIDsArgDescription), + "cid": schemaTypes.NewArgConfig(gql.String, cidArgDescription), + "filter": schemaTypes.NewArgConfig(config.filter, selectFilterArgDescription), "groupBy": schemaTypes.NewArgConfig( gql.NewList(gql.NewNonNull(config.groupBy)), schemaTypes.GroupByArgDescription, diff --git a/tests/bench/query/simple/with_multi_lookup_test.go b/tests/bench/query/simple/with_multi_lookup_test.go index f862095189..b74ebec2c5 100644 --- a/tests/bench/query/simple/with_multi_lookup_test.go +++ b/tests/bench/query/simple/with_multi_lookup_test.go @@ -21,7 +21,7 @@ var ( // 10x `docID`s will be replaced in the bench runner func userSimpleWithMultiLookupQuery = ` query { - User(docIDs: ["{{docID}}", "{{docID}}", "{{docID}}", "{{docID}}", "{{docID}}", "{{docID}}", "{{docID}}", "{{docID}}", "{{docID}}", "{{docID}}"]) { + User(docID: ["{{docID}}", "{{docID}}", "{{docID}}", "{{docID}}", "{{docID}}", "{{docID}}", "{{docID}}", "{{docID}}", "{{docID}}", "{{docID}}"]) { _docID Name Age diff --git a/tests/integration/explain/debug/delete_test.go b/tests/integration/explain/debug/delete_test.go index 7162b1b428..2f568897a9 100644 --- a/tests/integration/explain/debug/delete_test.go +++ b/tests/integration/explain/debug/delete_test.go @@ -116,7 +116,7 @@ func TestDebugExplainMutationRequestWithDeleteUsingIds(t *testing.T) { testUtils.ExplainRequest{ Request: `mutation @explain(type: debug) { - delete_Author(docIDs: [ + delete_Author(docID: [ "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d", "bae-bfbfc89c-0d63-5ea4-81a3-3ebd295be67f" ]) { @@ -143,7 +143,7 @@ func TestDebugExplainMutationRequestWithDeleteUsingNoIds(t *testing.T) { testUtils.ExplainRequest{ Request: `mutation @explain(type: debug) { - delete_Author(docIDs: []) { + delete_Author(docID: []) { _docID } }`, @@ -168,7 +168,7 @@ func TestDebugExplainMutationRequestWithDeleteUsingFilterAndIds(t *testing.T) { Request: `mutation @explain(type: debug) { delete_Author( - docIDs: ["bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d", "test"], + docID: ["bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d", "test"], filter: { _and: [ {age: {_lt: 26}}, diff --git a/tests/integration/explain/debug/delete_with_error_test.go b/tests/integration/explain/debug/delete_with_error_test.go index a7c5fc1d3a..0768a62ae4 100644 --- a/tests/integration/explain/debug/delete_with_error_test.go +++ b/tests/integration/explain/debug/delete_with_error_test.go @@ -29,7 +29,7 @@ func TestDebugExplainMutationRequestWithDeleteHavingNoSubSelection(t *testing.T) Request: `mutation @explain(type: debug) { delete_Author( - docIDs: [ + docID: [ "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d", "bae-bfbfc89c-0d63-5ea4-81a3-3ebd295be67f" ] diff --git a/tests/integration/explain/debug/group_with_doc_id_child_test.go b/tests/integration/explain/debug/group_with_doc_id_child_test.go index 7da680dbbc..0baf560434 100644 --- a/tests/integration/explain/debug/group_with_doc_id_child_test.go +++ b/tests/integration/explain/debug/group_with_doc_id_child_test.go @@ -32,7 +32,7 @@ func TestDebugExplainRequestWithDocIDsOnInnerGroupSelection(t *testing.T) { groupBy: [age] ) { age - _group(docIDs: ["bae-6a4c5bc5-b044-5a03-a868-8260af6f2254"]) { + _group(docID: ["bae-6a4c5bc5-b044-5a03-a868-8260af6f2254"]) { name } } diff --git a/tests/integration/explain/debug/group_with_doc_id_test.go b/tests/integration/explain/debug/group_with_doc_id_test.go index 75728e77e0..00c6c678ec 100644 --- a/tests/integration/explain/debug/group_with_doc_id_test.go +++ b/tests/integration/explain/debug/group_with_doc_id_test.go @@ -61,7 +61,7 @@ func TestDebugExplainRequestWithDocIDsAndFilterOnParentGroupBy(t *testing.T) { Author( groupBy: [age], filter: {age: {_eq: 20}}, - docIDs: [ + docID: [ "bae-6a4c5bc5-b044-5a03-a868-8260af6f2254", "bae-4ea9d148-13f3-5a48-a0ef-9ffd344caeed" ] diff --git a/tests/integration/explain/debug/type_join_with_filter_doc_id_test.go b/tests/integration/explain/debug/type_join_with_filter_doc_id_test.go index 351ada42ef..3c1a10a9cf 100644 --- a/tests/integration/explain/debug/type_join_with_filter_doc_id_test.go +++ b/tests/integration/explain/debug/type_join_with_filter_doc_id_test.go @@ -33,7 +33,7 @@ func TestDebugExplainRequestWithRelatedAndRegularFilterAndDocIDs(t *testing.T) { name: {_eq: "John Grisham"}, books: {name: {_eq: "Painted House"}} }, - docIDs: [ + docID: [ "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d", "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f8e" ] @@ -82,7 +82,7 @@ func TestDebugExplainRequestWithManyRelatedFiltersAndDocID(t *testing.T) { articles: {name: {_eq: "To my dear readers"}}, books: {name: {_eq: "Theif Lord"}} }, - docIDs: ["bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d"] + docID: ["bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d"] ) { name age diff --git a/tests/integration/explain/debug/update_test.go b/tests/integration/explain/debug/update_test.go index dc69553619..caf2ac16e4 100644 --- a/tests/integration/explain/debug/update_test.go +++ b/tests/integration/explain/debug/update_test.go @@ -78,7 +78,7 @@ func TestDebugExplainMutationRequestWithUpdateUsingIds(t *testing.T) { Request: `mutation @explain(type: debug) { update_Author( - docIDs: [ + docID: [ "bae-bfbfc89c-0d63-5ea4-81a3-3ebd295be67f", "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d" ], @@ -144,7 +144,7 @@ func TestDebugExplainMutationRequestWithUpdateUsingIdsAndFilter(t *testing.T) { _eq: true } }, - docIDs: [ + docID: [ "bae-bfbfc89c-0d63-5ea4-81a3-3ebd295be67f", "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d" ], diff --git a/tests/integration/explain/debug/with_filter_doc_id_test.go b/tests/integration/explain/debug/with_filter_doc_id_test.go index 800a3a3c77..fb68a3bea0 100644 --- a/tests/integration/explain/debug/with_filter_doc_id_test.go +++ b/tests/integration/explain/debug/with_filter_doc_id_test.go @@ -53,7 +53,7 @@ func TestDebugExplainRequestWithDocIDsFilterUsingOneID(t *testing.T) { testUtils.ExplainRequest{ Request: `query @explain(type: debug) { - Author(docIDs: ["bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d"]) { + Author(docID: ["bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d"]) { name age } @@ -79,7 +79,7 @@ func TestDebugExplainRequestWithDocIDsFilterUsingMultipleButDuplicateIDs(t *test Request: `query @explain(type: debug) { Author( - docIDs: [ + docID: [ "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d", "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d" ] @@ -109,7 +109,7 @@ func TestDebugExplainRequestWithDocIDsFilterUsingMultipleUniqueIDs(t *testing.T) Request: `query @explain(type: debug) { Author( - docIDs: [ + docID: [ "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d", "bae-bfbfc89c-0d63-5ea4-81a3-3ebd295be67f" ] diff --git a/tests/integration/explain/default/basic_test.go b/tests/integration/explain/default/basic_test.go index 2f02f6c67c..69089cd15d 100644 --- a/tests/integration/explain/default/basic_test.go +++ b/tests/integration/explain/default/basic_test.go @@ -65,7 +65,7 @@ func TestDefaultExplainRequestWithFullBasicGraph(t *testing.T) { { "selectTopNode": dataMap{ "selectNode": dataMap{ - "docIDs": nil, + "docID": nil, "filter": nil, "scanNode": dataMap{ "filter": nil, diff --git a/tests/integration/explain/default/delete_test.go b/tests/integration/explain/default/delete_test.go index a8918b9141..e220ae4d86 100644 --- a/tests/integration/explain/default/delete_test.go +++ b/tests/integration/explain/default/delete_test.go @@ -61,7 +61,7 @@ func TestDefaultExplainMutationRequestWithDeleteUsingFilter(t *testing.T) { "_eq": "Shahzad", }, }, - "docIDs": []string(nil), + "docID": []string(nil), }, }, @@ -116,7 +116,7 @@ func TestDefaultExplainMutationRequestWithDeleteUsingFilterToMatchEverything(t * IncludeChildNodes: false, ExpectedAttributes: dataMap{ "filter": nil, - "docIDs": []string(nil), + "docID": []string(nil), }, }, @@ -167,7 +167,7 @@ func TestDefaultExplainMutationRequestWithDeleteUsingId(t *testing.T) { IncludeChildNodes: false, ExpectedAttributes: dataMap{ "filter": nil, - "docIDs": []string{ + "docID": []string{ "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d", }, }, @@ -207,7 +207,7 @@ func TestDefaultExplainMutationRequestWithDeleteUsingIds(t *testing.T) { testUtils.ExplainRequest{ Request: `mutation @explain { - delete_Author(docIDs: [ + delete_Author(docID: [ "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d", "bae-bfbfc89c-0d63-5ea4-81a3-3ebd295be67f" ]) { @@ -223,7 +223,7 @@ func TestDefaultExplainMutationRequestWithDeleteUsingIds(t *testing.T) { IncludeChildNodes: false, ExpectedAttributes: dataMap{ "filter": nil, - "docIDs": []string{ + "docID": []string{ "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d", "bae-bfbfc89c-0d63-5ea4-81a3-3ebd295be67f", }, @@ -268,7 +268,7 @@ func TestDefaultExplainMutationRequestWithDeleteUsingNoIds(t *testing.T) { testUtils.ExplainRequest{ Request: `mutation @explain { - delete_Author(docIDs: []) { + delete_Author(docID: []) { _docID } }`, @@ -281,7 +281,7 @@ func TestDefaultExplainMutationRequestWithDeleteUsingNoIds(t *testing.T) { IncludeChildNodes: false, ExpectedAttributes: dataMap{ "filter": nil, - "docIDs": []string{}, + "docID": []string{}, }, }, @@ -315,7 +315,7 @@ func TestDefaultExplainMutationRequestWithDeleteUsingFilterAndIds(t *testing.T) Request: `mutation @explain { delete_Author( - docIDs: ["bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d", "test"], + docID: ["bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d", "test"], filter: { _and: [ {age: {_lt: 26}}, @@ -348,7 +348,7 @@ func TestDefaultExplainMutationRequestWithDeleteUsingFilterAndIds(t *testing.T) }, }, }, - "docIDs": []string{ + "docID": []string{ "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d", "test", }, diff --git a/tests/integration/explain/default/delete_with_error_test.go b/tests/integration/explain/default/delete_with_error_test.go index 14ef207451..69af033eb5 100644 --- a/tests/integration/explain/default/delete_with_error_test.go +++ b/tests/integration/explain/default/delete_with_error_test.go @@ -29,7 +29,7 @@ func TestDefaultExplainMutationRequestWithDeleteHavingNoSubSelection(t *testing. Request: `mutation @explain { delete_Author( - docIDs: [ + docID: [ "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d", "bae-bfbfc89c-0d63-5ea4-81a3-3ebd295be67f" ] diff --git a/tests/integration/explain/default/fixture.go b/tests/integration/explain/default/fixture.go index 01d1c31238..55343126d0 100644 --- a/tests/integration/explain/default/fixture.go +++ b/tests/integration/explain/default/fixture.go @@ -28,7 +28,7 @@ var basicPattern = dataMap{ var emptyChildSelectsAttributeForAuthor = dataMap{ "collectionName": "Author", - "docIDs": nil, + "docID": nil, "filter": nil, "groupBy": nil, "limit": nil, diff --git a/tests/integration/explain/default/group_with_average_test.go b/tests/integration/explain/default/group_with_average_test.go index aa04a737a1..f8e1b6412b 100644 --- a/tests/integration/explain/default/group_with_average_test.go +++ b/tests/integration/explain/default/group_with_average_test.go @@ -67,7 +67,7 @@ func TestDefaultExplainRequestWithGroupByWithAverageOnAnInnerField(t *testing.T) "childSelects": []dataMap{ { "collectionName": "Author", - "docIDs": nil, + "docID": nil, "groupBy": nil, "limit": nil, "orderBy": nil, @@ -159,7 +159,7 @@ func TestDefaultExplainRequestWithAverageInsideTheInnerGroupOnAField(t *testing. { "collectionName": "Author", "groupBy": []string{"verified", "name"}, - "docIDs": nil, + "docID": nil, "limit": nil, "orderBy": nil, "filter": nil, @@ -241,7 +241,7 @@ func TestDefaultExplainRequestWithAverageInsideTheInnerGroupOnAFieldAndNestedGro { "collectionName": "Author", "groupBy": []string{"verified", "name"}, - "docIDs": nil, + "docID": nil, "limit": nil, "orderBy": nil, "filter": nil, @@ -324,7 +324,7 @@ func TestDefaultExplainRequestWithAverageInsideTheInnerGroupAndNestedGroupByWith { "collectionName": "Author", "groupBy": []string{"verified", "name"}, - "docIDs": nil, + "docID": nil, "limit": nil, "orderBy": nil, "filter": nil, diff --git a/tests/integration/explain/default/group_with_doc_id_child_test.go b/tests/integration/explain/default/group_with_doc_id_child_test.go index a721bb571a..38a8d27fe5 100644 --- a/tests/integration/explain/default/group_with_doc_id_child_test.go +++ b/tests/integration/explain/default/group_with_doc_id_child_test.go @@ -32,7 +32,7 @@ func TestDefaultExplainRequestWithDocIDsOnInnerGroupSelection(t *testing.T) { groupBy: [age] ) { age - _group(docIDs: ["bae-6a4c5bc5-b044-5a03-a868-8260af6f2254"]) { + _group(docID: ["bae-6a4c5bc5-b044-5a03-a868-8260af6f2254"]) { name } } @@ -49,7 +49,7 @@ func TestDefaultExplainRequestWithDocIDsOnInnerGroupSelection(t *testing.T) { "childSelects": []dataMap{ { "collectionName": "Author", - "docIDs": []string{"bae-6a4c5bc5-b044-5a03-a868-8260af6f2254"}, + "docID": []string{"bae-6a4c5bc5-b044-5a03-a868-8260af6f2254"}, "filter": nil, "groupBy": nil, "limit": nil, diff --git a/tests/integration/explain/default/group_with_doc_id_test.go b/tests/integration/explain/default/group_with_doc_id_test.go index 2178701a4e..8146a26566 100644 --- a/tests/integration/explain/default/group_with_doc_id_test.go +++ b/tests/integration/explain/default/group_with_doc_id_test.go @@ -89,7 +89,7 @@ func TestDefaultExplainRequestWithDocIDsAndFilterOnParentGroupBy(t *testing.T) { Author( groupBy: [age], filter: {age: {_eq: 20}}, - docIDs: [ + docID: [ "bae-6a4c5bc5-b044-5a03-a868-8260af6f2254", "bae-4ea9d148-13f3-5a48-a0ef-9ffd344caeed" ] diff --git a/tests/integration/explain/default/group_with_filter_child_test.go b/tests/integration/explain/default/group_with_filter_child_test.go index 9ce44342d1..e6f4a42a0d 100644 --- a/tests/integration/explain/default/group_with_filter_child_test.go +++ b/tests/integration/explain/default/group_with_filter_child_test.go @@ -47,7 +47,7 @@ func TestDefaultExplainRequestWithFilterOnInnerGroupSelection(t *testing.T) { "childSelects": []dataMap{ { "collectionName": "Author", - "docIDs": nil, + "docID": nil, "filter": dataMap{ "age": dataMap{ "_gt": int32(63), @@ -116,7 +116,7 @@ func TestDefaultExplainRequestWithFilterOnParentGroupByAndInnerGroupSelection(t "childSelects": []dataMap{ { "collectionName": "Author", - "docIDs": nil, + "docID": nil, "filter": dataMap{ "age": dataMap{ "_gt": int32(63), diff --git a/tests/integration/explain/default/group_with_limit_child_test.go b/tests/integration/explain/default/group_with_limit_child_test.go index a62859aeda..7e32b494aa 100644 --- a/tests/integration/explain/default/group_with_limit_child_test.go +++ b/tests/integration/explain/default/group_with_limit_child_test.go @@ -51,7 +51,7 @@ func TestDefaultExplainRequestWithLimitAndOffsetOnInnerGroupSelection(t *testing "limit": uint64(2), "offset": uint64(1), }, - "docIDs": nil, + "docID": nil, "filter": nil, "groupBy": nil, "orderBy": nil, @@ -104,7 +104,7 @@ func TestDefaultExplainRequestWithLimitAndOffsetOnMultipleInnerGroupSelections(t "limit": uint64(1), "offset": uint64(2), }, - "docIDs": nil, + "docID": nil, "filter": nil, "groupBy": nil, "orderBy": nil, @@ -115,7 +115,7 @@ func TestDefaultExplainRequestWithLimitAndOffsetOnMultipleInnerGroupSelections(t "limit": uint64(2), "offset": uint64(0), }, - "docIDs": nil, + "docID": nil, "filter": nil, "groupBy": nil, "orderBy": nil, diff --git a/tests/integration/explain/default/group_with_limit_test.go b/tests/integration/explain/default/group_with_limit_test.go index 1696e3ccd2..d51453c583 100644 --- a/tests/integration/explain/default/group_with_limit_test.go +++ b/tests/integration/explain/default/group_with_limit_test.go @@ -125,7 +125,7 @@ func TestDefaultExplainRequestWithLimitOnParentGroupByAndInnerGroupSelection(t * "offset": uint64(0), }, "orderBy": nil, - "docIDs": nil, + "docID": nil, "groupBy": nil, "filter": nil, }, diff --git a/tests/integration/explain/default/group_with_order_child_test.go b/tests/integration/explain/default/group_with_order_child_test.go index 969f048c0c..4fe9c02b5f 100644 --- a/tests/integration/explain/default/group_with_order_child_test.go +++ b/tests/integration/explain/default/group_with_order_child_test.go @@ -53,7 +53,7 @@ func TestDefaultExplainRequestWithDescendingOrderOnInnerGroupSelection(t *testin "fields": []string{"age"}, }, }, - "docIDs": nil, + "docID": nil, "groupBy": nil, "limit": nil, "filter": nil, @@ -105,7 +105,7 @@ func TestDefaultExplainRequestWithAscendingOrderOnInnerGroupSelection(t *testing "fields": []string{"age"}, }, }, - "docIDs": nil, + "docID": nil, "groupBy": nil, "limit": nil, "filter": nil, @@ -164,7 +164,7 @@ func TestDefaultExplainRequestWithOrderOnNestedParentGroupByAndOnNestedParentsIn }, }, "groupBy": []string{"verified", "name"}, - "docIDs": nil, + "docID": nil, "limit": nil, "filter": nil, }, diff --git a/tests/integration/explain/default/group_with_order_test.go b/tests/integration/explain/default/group_with_order_test.go index 4ab526474e..d91f4b1a85 100644 --- a/tests/integration/explain/default/group_with_order_test.go +++ b/tests/integration/explain/default/group_with_order_test.go @@ -184,7 +184,7 @@ func TestDefaultExplainRequestWithOrderOnParentGroupByAndOnInnerGroupSelection(t "fields": []string{"age"}, }, }, - "docIDs": nil, + "docID": nil, "groupBy": nil, "limit": nil, "filter": nil, diff --git a/tests/integration/explain/default/type_join_many_test.go b/tests/integration/explain/default/type_join_many_test.go index 30bd530a99..3790d43dfa 100644 --- a/tests/integration/explain/default/type_join_many_test.go +++ b/tests/integration/explain/default/type_join_many_test.go @@ -88,7 +88,7 @@ func TestDefaultExplainRequestWithAOneToManyJoin(t *testing.T) { ExpectedAttributes: dataMap{ "selectTopNode": dataMap{ "selectNode": dataMap{ - "docIDs": nil, + "docID": nil, "filter": nil, "scanNode": dataMap{ "filter": nil, diff --git a/tests/integration/explain/default/type_join_one_test.go b/tests/integration/explain/default/type_join_one_test.go index 05daf41539..97b17fbf1d 100644 --- a/tests/integration/explain/default/type_join_one_test.go +++ b/tests/integration/explain/default/type_join_one_test.go @@ -89,7 +89,7 @@ func TestDefaultExplainRequestWithAOneToOneJoin(t *testing.T) { ExpectedAttributes: dataMap{ "selectTopNode": dataMap{ "selectNode": dataMap{ - "docIDs": nil, + "docID": nil, "filter": nil, "scanNode": dataMap{ "filter": nil, @@ -231,7 +231,7 @@ func TestDefaultExplainRequestWithTwoLevelDeepNestedJoins(t *testing.T) { ExpectedAttributes: dataMap{ "selectTopNode": dataMap{ "selectNode": dataMap{ - "docIDs": nil, + "docID": nil, "filter": nil, "scanNode": dataMap{ "filter": nil, diff --git a/tests/integration/explain/default/type_join_test.go b/tests/integration/explain/default/type_join_test.go index 5f4d93df3b..c88c7980be 100644 --- a/tests/integration/explain/default/type_join_test.go +++ b/tests/integration/explain/default/type_join_test.go @@ -123,7 +123,7 @@ func TestDefaultExplainRequestWith2SingleJoinsAnd1ManyJoin(t *testing.T) { ExpectedAttributes: dataMap{ "selectTopNode": dataMap{ "selectNode": dataMap{ - "docIDs": nil, + "docID": nil, "filter": nil, "scanNode": dataMap{ "filter": nil, @@ -181,7 +181,7 @@ func TestDefaultExplainRequestWith2SingleJoinsAnd1ManyJoin(t *testing.T) { ExpectedAttributes: dataMap{ "selectTopNode": dataMap{ "selectNode": dataMap{ - "docIDs": nil, + "docID": nil, "filter": nil, "scanNode": dataMap{ "filter": nil, diff --git a/tests/integration/explain/default/type_join_with_filter_doc_id_test.go b/tests/integration/explain/default/type_join_with_filter_doc_id_test.go index 63aa689fa6..8a29156009 100644 --- a/tests/integration/explain/default/type_join_with_filter_doc_id_test.go +++ b/tests/integration/explain/default/type_join_with_filter_doc_id_test.go @@ -33,7 +33,7 @@ func TestDefaultExplainRequestWithRelatedAndRegularFilterAndDocIDs(t *testing.T) name: {_eq: "John Grisham"}, books: {name: {_eq: "Painted House"}} }, - docIDs: [ + docID: [ "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d", "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f8e" ] @@ -61,7 +61,7 @@ func TestDefaultExplainRequestWithRelatedAndRegularFilterAndDocIDs(t *testing.T) { TargetNodeName: "selectNode", ExpectedAttributes: dataMap{ - "docIDs": []string{ + "docID": []string{ "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d", "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f8e", }, @@ -122,7 +122,7 @@ func TestDefaultExplainRequestWithManyRelatedFiltersAndDocID(t *testing.T) { articles: {name: {_eq: "To my dear readers"}}, books: {name: {_eq: "Theif Lord"}} }, - docIDs: ["bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d"] + docID: ["bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d"] ) { name age @@ -154,7 +154,7 @@ func TestDefaultExplainRequestWithManyRelatedFiltersAndDocID(t *testing.T) { { TargetNodeName: "selectNode", ExpectedAttributes: dataMap{ - "docIDs": []string{ + "docID": []string{ "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d", }, "filter": dataMap{ diff --git a/tests/integration/explain/default/type_join_with_filter_test.go b/tests/integration/explain/default/type_join_with_filter_test.go index 43880c9d79..1c7a35c1ba 100644 --- a/tests/integration/explain/default/type_join_with_filter_test.go +++ b/tests/integration/explain/default/type_join_with_filter_test.go @@ -57,7 +57,7 @@ func TestDefaultExplainRequestWithRelatedAndRegularFilter(t *testing.T) { { TargetNodeName: "selectNode", ExpectedAttributes: dataMap{ - "docIDs": nil, + "docID": nil, "filter": dataMap{ "books": dataMap{ "name": dataMap{ @@ -142,7 +142,7 @@ func TestDefaultExplainRequestWithManyRelatedFilters(t *testing.T) { { TargetNodeName: "selectNode", ExpectedAttributes: dataMap{ - "docIDs": nil, + "docID": nil, "filter": dataMap{ "articles": dataMap{ "name": dataMap{ diff --git a/tests/integration/explain/default/update_test.go b/tests/integration/explain/default/update_test.go index 12f69568dd..8941a710ec 100644 --- a/tests/integration/explain/default/update_test.go +++ b/tests/integration/explain/default/update_test.go @@ -73,7 +73,7 @@ func TestDefaultExplainMutationRequestWithUpdateUsingBooleanFilter(t *testing.T) "_eq": true, }, }, - "docIDs": []string(nil), + "docID": []string(nil), }, }, { @@ -115,7 +115,7 @@ func TestDefaultExplainMutationRequestWithUpdateUsingIds(t *testing.T) { Request: `mutation @explain { update_Author( - docIDs: [ + docID: [ "bae-bfbfc89c-0d63-5ea4-81a3-3ebd295be67f", "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d" ], @@ -138,7 +138,7 @@ func TestDefaultExplainMutationRequestWithUpdateUsingIds(t *testing.T) { "age": int32(59), }, "filter": nil, - "docIDs": []string{ + "docID": []string{ "bae-bfbfc89c-0d63-5ea4-81a3-3ebd295be67f", "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d", }, @@ -203,7 +203,7 @@ func TestDefaultExplainMutationRequestWithUpdateUsingId(t *testing.T) { "age": int32(59), }, "filter": nil, - "docIDs": []string{ + "docID": []string{ "bae-bfbfc89c-0d63-5ea4-81a3-3ebd295be67f", }, }, @@ -248,7 +248,7 @@ func TestDefaultExplainMutationRequestWithUpdateUsingIdsAndFilter(t *testing.T) _eq: true } }, - docIDs: [ + docID: [ "bae-bfbfc89c-0d63-5ea4-81a3-3ebd295be67f", "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d" ], @@ -275,7 +275,7 @@ func TestDefaultExplainMutationRequestWithUpdateUsingIdsAndFilter(t *testing.T) "_eq": true, }, }, - "docIDs": []string{ + "docID": []string{ "bae-bfbfc89c-0d63-5ea4-81a3-3ebd295be67f", "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d", }, diff --git a/tests/integration/explain/default/with_filter_doc_id_test.go b/tests/integration/explain/default/with_filter_doc_id_test.go index 4e69361e2e..3e08aedb3b 100644 --- a/tests/integration/explain/default/with_filter_doc_id_test.go +++ b/tests/integration/explain/default/with_filter_doc_id_test.go @@ -40,7 +40,7 @@ func TestDefaultExplainRequestWithDocIDFilter(t *testing.T) { { TargetNodeName: "selectNode", ExpectedAttributes: dataMap{ - "docIDs": []string{ + "docID": []string{ "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d", }, "filter": nil, @@ -80,7 +80,7 @@ func TestDefaultExplainRequestWithDocIDsFilterUsingOneID(t *testing.T) { testUtils.ExplainRequest{ Request: `query @explain { - Author(docIDs: ["bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d"]) { + Author(docID: ["bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d"]) { name age } @@ -92,7 +92,7 @@ func TestDefaultExplainRequestWithDocIDsFilterUsingOneID(t *testing.T) { { TargetNodeName: "selectNode", ExpectedAttributes: dataMap{ - "docIDs": []string{ + "docID": []string{ "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d", }, "filter": nil, @@ -133,7 +133,7 @@ func TestDefaultExplainRequestWithDocIDsFilterUsingMultipleButDuplicateIDs(t *te Request: `query @explain { Author( - docIDs: [ + docID: [ "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d", "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d" ] @@ -149,7 +149,7 @@ func TestDefaultExplainRequestWithDocIDsFilterUsingMultipleButDuplicateIDs(t *te { TargetNodeName: "selectNode", ExpectedAttributes: dataMap{ - "docIDs": []string{ + "docID": []string{ "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d", "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d", }, @@ -195,7 +195,7 @@ func TestDefaultExplainRequestWithDocIDsFilterUsingMultipleUniqueIDs(t *testing. Request: `query @explain { Author( - docIDs: [ + docID: [ "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d", "bae-bfbfc89c-0d63-5ea4-81a3-3ebd295be67f" ] @@ -211,7 +211,7 @@ func TestDefaultExplainRequestWithDocIDsFilterUsingMultipleUniqueIDs(t *testing. { TargetNodeName: "selectNode", ExpectedAttributes: dataMap{ - "docIDs": []string{ + "docID": []string{ "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d", "bae-bfbfc89c-0d63-5ea4-81a3-3ebd295be67f", }, @@ -274,7 +274,7 @@ func TestDefaultExplainRequestWithMatchingIDFilter(t *testing.T) { { TargetNodeName: "selectNode", ExpectedAttributes: dataMap{ - "docIDs": nil, + "docID": nil, "filter": nil, }, }, diff --git a/tests/integration/explain/execute/delete_test.go b/tests/integration/explain/execute/delete_test.go index e80189fd8a..6e43b3dbbd 100644 --- a/tests/integration/explain/execute/delete_test.go +++ b/tests/integration/explain/execute/delete_test.go @@ -30,7 +30,7 @@ func TestExecuteExplainMutationRequestWithDeleteUsingID(t *testing.T) { testUtils.ExplainRequest{ Request: `mutation @explain(type: execute) { - delete_ContactAddress(docIDs: ["bae-49f715e7-7f01-5509-a213-ed98cb81583f"]) { + delete_ContactAddress(docID: ["bae-49f715e7-7f01-5509-a213-ed98cb81583f"]) { city } }`, diff --git a/tests/integration/explain/execute/query_deleted_docs_test.go b/tests/integration/explain/execute/query_deleted_docs_test.go index e925910bb6..a0e152a8c1 100644 --- a/tests/integration/explain/execute/query_deleted_docs_test.go +++ b/tests/integration/explain/execute/query_deleted_docs_test.go @@ -26,7 +26,7 @@ func TestExecuteExplainQueryDeletedDocs(t *testing.T) { create2AddressDocuments(), testUtils.Request{ Request: `mutation { - delete_ContactAddress(docIDs: ["bae-49f715e7-7f01-5509-a213-ed98cb81583f"]) { + delete_ContactAddress(docID: ["bae-49f715e7-7f01-5509-a213-ed98cb81583f"]) { _docID } }`, diff --git a/tests/integration/explain/execute/update_test.go b/tests/integration/explain/execute/update_test.go index 4ed5e471c2..4a525b2cd5 100644 --- a/tests/integration/explain/execute/update_test.go +++ b/tests/integration/explain/execute/update_test.go @@ -31,7 +31,7 @@ func TestExecuteExplainMutationRequestWithUpdateUsingIDs(t *testing.T) { testUtils.ExplainRequest{ Request: `mutation @explain(type: execute) { update_ContactAddress( - docIDs: [ + docID: [ "bae-14f20db7-3654-58de-9156-596ef2cfd790", "bae-49f715e7-7f01-5509-a213-ed98cb81583f" ], diff --git a/tests/integration/explain/simple/basic_test.go b/tests/integration/explain/simple/basic_test.go index 04de88cd45..f061785fde 100644 --- a/tests/integration/explain/simple/basic_test.go +++ b/tests/integration/explain/simple/basic_test.go @@ -41,7 +41,7 @@ func TestSimpleExplainRequest(t *testing.T) { { "selectTopNode": dataMap{ "selectNode": dataMap{ - "docIDs": nil, + "docID": nil, "filter": nil, "scanNode": dataMap{ "filter": nil, diff --git a/tests/integration/mutation/create/with_null_input_test.go b/tests/integration/mutation/create/with_null_input_test.go index 72cec7a7a0..af37208164 100644 --- a/tests/integration/mutation/create/with_null_input_test.go +++ b/tests/integration/mutation/create/with_null_input_test.go @@ -60,16 +60,12 @@ func TestMutationCreate_WithNullInput_Succeeds(t *testing.T) { }, testUtils.Request{ Request: `mutation { - create_Users(input: null, inputs: [{name: "Bob"}]) { + create_Users(input: null) { name } }`, Results: map[string]any{ - "create_Users": []map[string]any{ - { - "name": "Bob", - }, - }, + "create_Users": []map[string]any{}, }, }, }, @@ -78,9 +74,9 @@ func TestMutationCreate_WithNullInput_Succeeds(t *testing.T) { testUtils.ExecuteTestCase(t, test) } -func TestMutationCreate_WithNullInputs_Succeeds(t *testing.T) { +func TestMutationCreate_WithNullInputEntry_ReturnsError(t *testing.T) { test := testUtils.TestCase{ - Description: "Simple create mutation, with null inputs", + Description: "Simple create mutation, with null input entry returns error", Actions: []any{ testUtils.SchemaUpdate{ Schema: ` @@ -91,17 +87,11 @@ func TestMutationCreate_WithNullInputs_Succeeds(t *testing.T) { }, testUtils.Request{ Request: `mutation { - create_Users(inputs: null, input: {name: "Bob"}) { + create_Users(input: [null]) { name } }`, - Results: map[string]any{ - "create_Users": []map[string]any{ - { - "name": "Bob", - }, - }, - }, + ExpectedError: "Expected \"UsersMutationInputArg!\", found null.", }, }, } diff --git a/tests/integration/mutation/create/with_variables_test.go b/tests/integration/mutation/create/with_variables_test.go index a77e788efb..5bd4814b16 100644 --- a/tests/integration/mutation/create/with_variables_test.go +++ b/tests/integration/mutation/create/with_variables_test.go @@ -35,7 +35,7 @@ func TestMutationCreateWithNonNullVariable(t *testing.T) { "name": "Bob", }, }), - Request: `mutation($user: UsersMutationInputArg!) { + Request: `mutation($user: [UsersMutationInputArg!]!) { create_Users(input: $user) { name } @@ -66,7 +66,7 @@ func TestMutationCreateWithDefaultVariable(t *testing.T) { `, }, testUtils.Request{ - Request: `mutation($user: UsersMutationInputArg = {name: "Bob"}) { + Request: `mutation($user: [UsersMutationInputArg!] = {name: "Bob"}) { create_Users(input: $user) { name } diff --git a/tests/integration/mutation/delete/with_id_alias_test.go b/tests/integration/mutation/delete/with_id_alias_test.go index 882125c933..643a0bf4ec 100644 --- a/tests/integration/mutation/delete/with_id_alias_test.go +++ b/tests/integration/mutation/delete/with_id_alias_test.go @@ -34,7 +34,7 @@ func TestMutationDeletion_WithIDAndAlias(t *testing.T) { }, testUtils.Request{ Request: `mutation { - delete_User(docIDs: ["bae-22dacd35-4560-583a-9a80-8edbf28aa85c"]) { + delete_User(docID: ["bae-22dacd35-4560-583a-9a80-8edbf28aa85c"]) { fancyKey: _docID } }`, diff --git a/tests/integration/mutation/delete/with_id_test.go b/tests/integration/mutation/delete/with_id_test.go index 6e6b8bb5b8..9d496103e2 100644 --- a/tests/integration/mutation/delete/with_id_test.go +++ b/tests/integration/mutation/delete/with_id_test.go @@ -29,7 +29,7 @@ func TestMutationDeletion_WithIDUnknownValue(t *testing.T) { }, testUtils.Request{ Request: `mutation { - delete_User(docIDs: ["bae-22dacd35-4560-583a-9a80-8edbf28aa85c"]) { + delete_User(docID: ["bae-22dacd35-4560-583a-9a80-8edbf28aa85c"]) { _docID } }`, @@ -61,7 +61,7 @@ func TestMutationDeletion_WithIDUnknownValueAndUnrelatedRecordInCollection(t *te }, testUtils.Request{ Request: `mutation { - delete_User(docIDs: ["bae-22dacd35-4560-583a-9a80-8edbf28aa85c"]) { + delete_User(docID: ["bae-22dacd35-4560-583a-9a80-8edbf28aa85c"]) { _docID } }`, diff --git a/tests/integration/mutation/delete/with_id_txn_test.go b/tests/integration/mutation/delete/with_id_txn_test.go index aa33ee447f..764fce69a8 100644 --- a/tests/integration/mutation/delete/with_id_txn_test.go +++ b/tests/integration/mutation/delete/with_id_txn_test.go @@ -37,7 +37,7 @@ func TestMutationDeletion_WithIDAndTxn(t *testing.T) { testUtils.Request{ TransactionID: immutable.Some(0), Request: `mutation { - delete_User(docIDs: ["bae-22dacd35-4560-583a-9a80-8edbf28aa85c"]) { + delete_User(docID: ["bae-22dacd35-4560-583a-9a80-8edbf28aa85c"]) { _docID } }`, diff --git a/tests/integration/mutation/delete/with_ids_alias_test.go b/tests/integration/mutation/delete/with_ids_alias_test.go index 23fa4e2dc4..9c66333b7e 100644 --- a/tests/integration/mutation/delete/with_ids_alias_test.go +++ b/tests/integration/mutation/delete/with_ids_alias_test.go @@ -48,7 +48,7 @@ func TestMutationDeletion_WithIDsAndSelectAlias(t *testing.T) { }, testUtils.Request{ Request: `mutation { - delete_User(docIDs: ["bae-959725a4-17cb-5e04-8908-98bc78fd06dd", "bae-3eed37ed-5c7b-53ff-b125-d04fb173f6c0"]) { + delete_User(docID: ["bae-959725a4-17cb-5e04-8908-98bc78fd06dd", "bae-3eed37ed-5c7b-53ff-b125-d04fb173f6c0"]) { AliasID: _docID } }`, diff --git a/tests/integration/mutation/delete/with_ids_filter_test.go b/tests/integration/mutation/delete/with_ids_filter_test.go index 94024a4113..666085238a 100644 --- a/tests/integration/mutation/delete/with_ids_filter_test.go +++ b/tests/integration/mutation/delete/with_ids_filter_test.go @@ -34,7 +34,7 @@ func TestMutationDeletion_WithIDsAndEmptyFilter(t *testing.T) { }, testUtils.Request{ Request: `mutation { - delete_User(docIDs: ["bae-22dacd35-4560-583a-9a80-8edbf28aa85c"], filter: {}) { + delete_User(docID: ["bae-22dacd35-4560-583a-9a80-8edbf28aa85c"], filter: {}) { _docID } }`, diff --git a/tests/integration/mutation/delete/with_ids_test.go b/tests/integration/mutation/delete/with_ids_test.go index 1469990ce7..dd6f501303 100644 --- a/tests/integration/mutation/delete/with_ids_test.go +++ b/tests/integration/mutation/delete/with_ids_test.go @@ -39,7 +39,7 @@ func TestMutationDeletion_WithIDs(t *testing.T) { }, testUtils.Request{ Request: `mutation { - delete_User(docIDs: ["bae-22dacd35-4560-583a-9a80-8edbf28aa85c", "bae-1ef746f8-821e-586f-99b2-4cb1fb9b782f"]) { + delete_User(docID: ["bae-22dacd35-4560-583a-9a80-8edbf28aa85c", "bae-1ef746f8-821e-586f-99b2-4cb1fb9b782f"]) { _docID } }`, @@ -83,7 +83,7 @@ func TestMutationDeletion_WithEmptyIDs(t *testing.T) { }, testUtils.Request{ Request: `mutation { - delete_User(docIDs: []) { + delete_User(docID: []) { _docID } }`, @@ -128,7 +128,7 @@ func TestMutationDeletion_WithIDsSingleUnknownID(t *testing.T) { }, testUtils.Request{ Request: `mutation { - delete_User(docIDs: ["bae-6a6482a8-24e1-5c73-a237-ca569e41507e"]) { + delete_User(docID: ["bae-6a6482a8-24e1-5c73-a237-ca569e41507e"]) { _docID } }`, @@ -155,7 +155,7 @@ func TestMutationDeletion_WithIDsMultipleUnknownID(t *testing.T) { }, testUtils.Request{ Request: `mutation { - delete_User(docIDs: ["bae-028383cc-d6ba-5df7-959f-2bdce3536a05", "bae-028383cc-d6ba-5df7-959f-2bdce3536a03"]) { + delete_User(docID: ["bae-028383cc-d6ba-5df7-959f-2bdce3536a05", "bae-028383cc-d6ba-5df7-959f-2bdce3536a03"]) { _docID } }`, @@ -187,7 +187,7 @@ func TestMutationDeletion_WithIDsKnownAndUnknown(t *testing.T) { }, testUtils.Request{ Request: `mutation { - delete_User(docIDs: ["bae-22dacd35-4560-583a-9a80-8edbf28aa85c", "bae-1ef746f8-821e-586f-99b2-4cb1fb9b782f"]) { + delete_User(docID: ["bae-22dacd35-4560-583a-9a80-8edbf28aa85c", "bae-1ef746f8-821e-586f-99b2-4cb1fb9b782f"]) { _docID } }`, diff --git a/tests/integration/mutation/delete/with_ids_txn_test.go b/tests/integration/mutation/delete/with_ids_txn_test.go index 85c3624c7f..9f386bc768 100644 --- a/tests/integration/mutation/delete/with_ids_txn_test.go +++ b/tests/integration/mutation/delete/with_ids_txn_test.go @@ -43,7 +43,7 @@ func TestMutationDeletion_WithIDsAndTxn(t *testing.T) { testUtils.Request{ TransactionID: immutable.Some(0), Request: `mutation { - delete_User(docIDs: ["bae-959725a4-17cb-5e04-8908-98bc78fd06dd"]) { + delete_User(docID: ["bae-959725a4-17cb-5e04-8908-98bc78fd06dd"]) { _docID } }`, @@ -58,7 +58,7 @@ func TestMutationDeletion_WithIDsAndTxn(t *testing.T) { testUtils.Request{ TransactionID: immutable.Some(0), Request: `query { - User(docIDs: ["bae-959725a4-17cb-5e04-8908-98bc78fd06dd"]) { + User(docID: ["bae-959725a4-17cb-5e04-8908-98bc78fd06dd"]) { _docID } }`, diff --git a/tests/integration/mutation/delete/with_ids_update_alias_test.go b/tests/integration/mutation/delete/with_ids_update_alias_test.go index 89ea5f8c06..82b975f5ad 100644 --- a/tests/integration/mutation/delete/with_ids_update_alias_test.go +++ b/tests/integration/mutation/delete/with_ids_update_alias_test.go @@ -56,7 +56,7 @@ func TestMutationDeletion_WithUpdateAndIDsAndSelectAlias(t *testing.T) { }, testUtils.Request{ Request: `mutation { - delete_User(docIDs: ["bae-959725a4-17cb-5e04-8908-98bc78fd06dd", "bae-3eed37ed-5c7b-53ff-b125-d04fb173f6c0"]) { + delete_User(docID: ["bae-959725a4-17cb-5e04-8908-98bc78fd06dd", "bae-3eed37ed-5c7b-53ff-b125-d04fb173f6c0"]) { AliasID: _docID } }`, diff --git a/tests/integration/mutation/delete/with_null_input_test.go b/tests/integration/mutation/delete/with_null_input_test.go index 1619adc64f..96de017257 100644 --- a/tests/integration/mutation/delete/with_null_input_test.go +++ b/tests/integration/mutation/delete/with_null_input_test.go @@ -106,7 +106,7 @@ func TestMutationDelete_WithNullDocIDs_Succeeds(t *testing.T) { }, testUtils.Request{ Request: `mutation { - delete_Users(docIDs: null) { + delete_Users(docID: null) { name } }`, diff --git a/tests/integration/mutation/update/with_ids_test.go b/tests/integration/mutation/update/with_ids_test.go index 9afa6a4d02..6e848e4689 100644 --- a/tests/integration/mutation/update/with_ids_test.go +++ b/tests/integration/mutation/update/with_ids_test.go @@ -51,7 +51,7 @@ func TestMutationUpdate_WithIds(t *testing.T) { testUtils.Request{ Request: `mutation { update_Users( - docIDs: ["bae-0289c22a-aec7-5b59-adfc-60968698fcdf", "bae-fcc8673d-25f9-5f24-a529-4bc997035278"], + docID: ["bae-0289c22a-aec7-5b59-adfc-60968698fcdf", "bae-fcc8673d-25f9-5f24-a529-4bc997035278"], input: {points: 59} ) { name diff --git a/tests/integration/mutation/update/with_null_input_test.go b/tests/integration/mutation/update/with_null_input_test.go index 6c26db63a5..bc6be4f7f9 100644 --- a/tests/integration/mutation/update/with_null_input_test.go +++ b/tests/integration/mutation/update/with_null_input_test.go @@ -106,7 +106,7 @@ func TestMutationUpdate_WithNullDocIDs_Succeeds(t *testing.T) { }, testUtils.Request{ Request: `mutation { - update_Users(docIDs: null, input: {name: "Alice"}) { + update_Users(docID: null, input: {name: "Alice"}) { name } }`, diff --git a/tests/integration/query/one_to_many/with_doc_ids_test.go b/tests/integration/query/one_to_many/with_doc_ids_test.go index 5b561fe98e..6a8926964f 100644 --- a/tests/integration/query/one_to_many/with_doc_ids_test.go +++ b/tests/integration/query/one_to_many/with_doc_ids_test.go @@ -65,7 +65,7 @@ func TestQueryOneToManyWithChildDocIDs(t *testing.T) { Author { name published ( - docIDs: ["bae-064f13c1-7726-5d53-8eec-c395d94da4d0", "bae-649c8101-76b8-5d18-a701-21c97a5c66b3"] + docID: ["bae-064f13c1-7726-5d53-8eec-c395d94da4d0", "bae-649c8101-76b8-5d18-a701-21c97a5c66b3"] ) { name } diff --git a/tests/integration/query/simple/with_doc_ids_test.go b/tests/integration/query/simple/with_doc_ids_test.go index 4b2aa16ce0..b77ec55f05 100644 --- a/tests/integration/query/simple/with_doc_ids_test.go +++ b/tests/integration/query/simple/with_doc_ids_test.go @@ -29,7 +29,7 @@ func TestQuerySimpleWithDocIDsFilter(t *testing.T) { }, testUtils.Request{ Request: `query { - Users(docIDs: ["bae-d4303725-7db9-53d2-b324-f3ee44020e52"]) { + Users(docID: ["bae-d4303725-7db9-53d2-b324-f3ee44020e52"]) { Name Age } @@ -56,7 +56,7 @@ func TestQuerySimpleWithDocIDsFilter(t *testing.T) { }, testUtils.Request{ Request: `query { - Users(docIDs: ["bae-52b9170d-b77a-5887-b877-cbdbb99b009g"]) { + Users(docID: ["bae-52b9170d-b77a-5887-b877-cbdbb99b009g"]) { Name Age } @@ -84,7 +84,7 @@ func TestQuerySimpleWithDocIDsFilter(t *testing.T) { }, testUtils.Request{ Request: `query { - Users(docIDs: ["bae-d4303725-7db9-53d2-b324-f3ee44020e52", "bae-d4303725-7db9-53d2-b324-f3ee44020e52"]) { + Users(docID: ["bae-d4303725-7db9-53d2-b324-f3ee44020e52", "bae-d4303725-7db9-53d2-b324-f3ee44020e52"]) { Name Age } @@ -123,7 +123,7 @@ func TestQuerySimpleWithDocIDsFilter(t *testing.T) { }, testUtils.Request{ Request: `query { - Users(docIDs: ["bae-d4303725-7db9-53d2-b324-f3ee44020e52", "bae-428c6d76-3491-520b-ad1f-a218f4dad787"]) { + Users(docID: ["bae-d4303725-7db9-53d2-b324-f3ee44020e52", "bae-428c6d76-3491-520b-ad1f-a218f4dad787"]) { Name Age } @@ -162,7 +162,7 @@ func TestQuerySimpleReturnsNothinGivenEmptyDocIDsFilter(t *testing.T) { }, testUtils.Request{ Request: `query { - Users(docIDs: []) { + Users(docID: []) { Name Age } diff --git a/tests/integration/query/simple/with_group_doc_ids_test.go b/tests/integration/query/simple/with_group_doc_ids_test.go index 5798bd387f..4abcb2b4bd 100644 --- a/tests/integration/query/simple/with_group_doc_ids_test.go +++ b/tests/integration/query/simple/with_group_doc_ids_test.go @@ -48,7 +48,7 @@ func TestQuerySimpleWithGroupByWithGroupWithDocIDs(t *testing.T) { Request: `query { Users(groupBy: [Age]) { Age - _group(docIDs: ["bae-d4303725-7db9-53d2-b324-f3ee44020e52", "bae-19b16890-5f24-5e5b-8822-ed2a97ebcc24"]) { + _group(docID: ["bae-d4303725-7db9-53d2-b324-f3ee44020e52", "bae-19b16890-5f24-5e5b-8822-ed2a97ebcc24"]) { Name } } diff --git a/tests/integration/query/simple/with_null_input_test.go b/tests/integration/query/simple/with_null_input_test.go index ceba642887..9f11d14a8e 100644 --- a/tests/integration/query/simple/with_null_input_test.go +++ b/tests/integration/query/simple/with_null_input_test.go @@ -230,7 +230,7 @@ func TestQuerySimple_WithNullDocIDs_Succeeds(t *testing.T) { }, testUtils.Request{ Request: `query { - Users(docIDs: null) { + Users(docID: null) { Name } }`, diff --git a/tests/integration/schema/default_fields.go b/tests/integration/schema/default_fields.go index 23f1697938..5e77356081 100644 --- a/tests/integration/schema/default_fields.go +++ b/tests/integration/schema/default_fields.go @@ -144,13 +144,6 @@ var cidArg = Field{ } var docIDArg = Field{ "name": request.DocIDArgName, - "type": map[string]any{ - "name": "String", - "inputFields": nil, - }, -} -var docIDsArg = Field{ - "name": request.DocIDsArgName, "type": map[string]any{ "name": nil, "inputFields": nil, diff --git a/tests/integration/schema/filter_test.go b/tests/integration/schema/filter_test.go index e3780a3653..4f42e44a3a 100644 --- a/tests/integration/schema/filter_test.go +++ b/tests/integration/schema/filter_test.go @@ -133,7 +133,6 @@ var defaultUserArgsWithoutFilter = trimFields( fields{ cidArg, docIDArg, - docIDsArg, showDeletedArg, groupByArg, limitArg, @@ -285,7 +284,6 @@ var defaultBookArgsWithoutFilter = trimFields( fields{ cidArg, docIDArg, - docIDsArg, showDeletedArg, groupByArg, limitArg, diff --git a/tests/integration/schema/input_type_test.go b/tests/integration/schema/input_type_test.go index c9798ffc9f..c92e2f3343 100644 --- a/tests/integration/schema/input_type_test.go +++ b/tests/integration/schema/input_type_test.go @@ -78,7 +78,6 @@ func TestInputTypeOfOrderFieldWhereSchemaHasManyRelationType(t *testing.T) { trimFields( fields{ docIDArg, - docIDsArg, buildFilterArg("group", []argDef{ { fieldName: "members", @@ -259,7 +258,6 @@ var testInputTypeOfOrderFieldWhereSchemaHasRelationTypeArgProps = map[string]any var defaultGroupArgsWithoutOrder = trimFields( fields{ docIDArg, - docIDsArg, buildFilterArg("author", []argDef{ { fieldName: "age", diff --git a/tests/integration/utils.go b/tests/integration/utils.go index 62e27e0b73..85ba2f870d 100644 --- a/tests/integration/utils.go +++ b/tests/integration/utils.go @@ -1320,7 +1320,6 @@ func createDocViaGQL( var docMaps []map[string]any err = json.Unmarshal([]byte(action.Doc), &docMaps) require.NoError(s.t, err) - paramName = request.Inputs input, err = arrayToGQL(docMaps) } else { input, err = jsonToGQL(action.Doc) From 17b3de94814616107102c924ad6883b1dfc1c945 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Mon, 23 Sep 2024 22:20:16 -0400 Subject: [PATCH 35/71] bot: Update dependencies (bulk dependabot PRs) 23-09-2024 (#3055) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ✅ This PR was created by combining the following PRs: #3054 bot: Bump @typescript-eslint/parser from 8.5.0 to 8.6.0 in /playground #3053 bot: Bump @types/react from 18.3.6 to 18.3.8 in /playground #3052 bot: Bump vite from 5.4.6 to 5.4.7 in /playground #3051 bot: Bump eslint from 9.10.0 to 9.11.0 in /playground #3050 bot: Bump google.golang.org/grpc from 1.66.2 to 1.67.0 #3049 bot: Bump golang.org/x/crypto from 0.26.0 to 0.27.0 #3048 bot: Bump github.com/cosmos/cosmos-sdk from 0.50.9 to 0.50.10 --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Shahzad Lone --- go.mod | 30 ++--- go.sum | 60 ++++----- playground/package-lock.json | 250 +++++------------------------------ playground/package.json | 8 +- 4 files changed, 84 insertions(+), 264 deletions(-) diff --git a/go.mod b/go.mod index 4788e9667c..cf59b00c0e 100644 --- a/go.mod +++ b/go.mod @@ -7,7 +7,7 @@ toolchain go1.22.0 require ( github.com/bits-and-blooms/bitset v1.14.3 github.com/bxcodec/faker v2.0.1+incompatible - github.com/cosmos/cosmos-sdk v0.50.9 + github.com/cosmos/cosmos-sdk v0.50.10 github.com/cosmos/gogoproto v1.7.0 github.com/cyware/ssi-sdk v0.0.0-20231229164914-f93f3006379f github.com/decred/dcrd/dcrec/secp256k1/v4 v4.3.0 @@ -63,16 +63,16 @@ require ( go.opentelemetry.io/otel/metric v1.30.0 go.opentelemetry.io/otel/sdk/metric v1.30.0 go.uber.org/zap v1.27.0 - golang.org/x/crypto v0.26.0 + golang.org/x/crypto v0.27.0 golang.org/x/exp v0.0.0-20240808152545-0cdaa3abc0fa - google.golang.org/grpc v1.66.2 + google.golang.org/grpc v1.67.0 google.golang.org/protobuf v1.34.2 ) require ( buf.build/gen/go/bufbuild/protovalidate/protocolbuffers/go v1.31.0-20230802163732-1c33ebd9ecfa.1 // indirect cloud.google.com/go v0.112.1 // indirect - cloud.google.com/go/compute/metadata v0.3.0 // indirect + cloud.google.com/go/compute/metadata v0.5.0 // indirect cloud.google.com/go/iam v1.1.6 // indirect cloud.google.com/go/storage v1.38.0 // indirect cosmossdk.io/api v0.7.5 // indirect @@ -80,13 +80,13 @@ require ( cosmossdk.io/core v0.11.1 // indirect cosmossdk.io/depinject v1.0.0 // indirect cosmossdk.io/errors v1.0.1 // indirect - cosmossdk.io/log v1.3.1 // indirect + cosmossdk.io/log v1.4.1 // indirect cosmossdk.io/math v1.3.0 // indirect - cosmossdk.io/store v1.1.0 // indirect + cosmossdk.io/store v1.1.1 // indirect cosmossdk.io/x/circuit v0.1.0 // indirect cosmossdk.io/x/evidence v0.1.0 // indirect cosmossdk.io/x/feegrant v0.1.0 // indirect - cosmossdk.io/x/tx v0.13.4 // indirect + cosmossdk.io/x/tx v0.13.5 // indirect cosmossdk.io/x/upgrade v0.1.1 // indirect filippo.io/edwards25519 v1.1.0 // indirect github.com/99designs/go-keychain v0.0.0-20191008050251-8e49817e8af4 // indirect @@ -129,10 +129,10 @@ require ( github.com/cosmos/cosmos-proto v1.0.0-beta.5 // indirect github.com/cosmos/go-bip39 v1.0.0 // indirect github.com/cosmos/gogogateway v1.2.0 // indirect - github.com/cosmos/iavl v1.1.2 // indirect + github.com/cosmos/iavl v1.2.0 // indirect github.com/cosmos/ibc-go/modules/capability v1.0.0 // indirect github.com/cosmos/ibc-go/v8 v8.2.0 // indirect - github.com/cosmos/ics23/go v0.10.0 // indirect + github.com/cosmos/ics23/go v0.11.0 // indirect github.com/cosmos/ledger-cosmos-go v0.13.3 // indirect github.com/cpuguy83/go-md2man/v2 v2.0.4 // indirect github.com/cskr/pubsub v1.0.2 // indirect @@ -172,7 +172,7 @@ require ( github.com/godbus/dbus/v5 v5.1.0 // indirect github.com/gogo/googleapis v1.4.1 // indirect github.com/gogo/protobuf v1.3.2 // indirect - github.com/golang/glog v1.2.1 // indirect + github.com/golang/glog v1.2.2 // indirect github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect github.com/golang/mock v1.6.0 // indirect github.com/golang/protobuf v1.5.4 // indirect @@ -323,7 +323,7 @@ require ( github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475 // indirect github.com/rogpeppe/go-internal v1.12.0 // indirect github.com/rs/cors v1.11.1 // indirect - github.com/rs/zerolog v1.32.0 // indirect + github.com/rs/zerolog v1.33.0 // indirect github.com/russross/blackfriday/v2 v2.1.0 // indirect github.com/sagikazarmark/locafero v0.4.0 // indirect github.com/sagikazarmark/slog-shim v0.1.0 // indirect @@ -363,18 +363,18 @@ require ( go.uber.org/multierr v1.11.0 // indirect golang.org/x/mod v0.20.0 // indirect golang.org/x/net v0.28.0 // indirect - golang.org/x/oauth2 v0.21.0 // indirect + golang.org/x/oauth2 v0.22.0 // indirect golang.org/x/sync v0.8.0 // indirect golang.org/x/sys v0.25.0 // indirect golang.org/x/term v0.24.0 // indirect - golang.org/x/text v0.17.0 // indirect + golang.org/x/text v0.18.0 // indirect golang.org/x/time v0.5.0 // indirect golang.org/x/tools v0.24.0 // indirect gonum.org/v1/gonum v0.15.0 // indirect google.golang.org/api v0.171.0 // indirect google.golang.org/genproto v0.0.0-20240227224415-6ceb2ff114de // indirect - google.golang.org/genproto/googleapis/api v0.0.0-20240617180043-68d350f18fd4 // indirect - google.golang.org/genproto/googleapis/rpc v0.0.0-20240709173604-40e1e62336c5 // indirect + google.golang.org/genproto/googleapis/api v0.0.0-20240814211410-ddb44dafa142 // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20240814211410-ddb44dafa142 // indirect gopkg.in/ini.v1 v1.67.0 // indirect gopkg.in/yaml.v2 v2.4.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect diff --git a/go.sum b/go.sum index 374d490ecf..43308a4372 100644 --- a/go.sum +++ b/go.sum @@ -72,8 +72,8 @@ cloud.google.com/go/compute v1.6.0/go.mod h1:T29tfhtVbq1wvAPo0E3+7vhgmkOYeXjhFvz cloud.google.com/go/compute v1.6.1/go.mod h1:g85FgpzFvNULZ+S8AYq87axRKuf2Kh7deLqV/jJ3thU= cloud.google.com/go/compute v1.7.0/go.mod h1:435lt8av5oL9P3fv1OEzSbSUe+ybHXGMPQHHZWZxy9U= cloud.google.com/go/compute v1.10.0/go.mod h1:ER5CLbMxl90o2jtNbGSbtfOpQKR0t15FOtRsugnLrlU= -cloud.google.com/go/compute/metadata v0.3.0 h1:Tz+eQXMEqDIKRsmY3cHTL6FVaynIjX2QxYC4trgAKZc= -cloud.google.com/go/compute/metadata v0.3.0/go.mod h1:zFmK7XCadkQkj6TtorcaGlCW1hT1fIilQDwofLpJ20k= +cloud.google.com/go/compute/metadata v0.5.0 h1:Zr0eK8JbFv6+Wi4ilXAR8FJ3wyNdpxHKJNPos6LTZOY= +cloud.google.com/go/compute/metadata v0.5.0/go.mod h1:aHnloV2TPI38yx4s9+wAZhHykWvVCfu7hQbF+9CWoiY= cloud.google.com/go/containeranalysis v0.5.1/go.mod h1:1D92jd8gRR/c0fGMlymRgxWD3Qw9C1ff6/T7mLgVL8I= cloud.google.com/go/containeranalysis v0.6.0/go.mod h1:HEJoiEIu+lEXM+k7+qLCci0h33lX3ZqoYFdmPcoO7s4= cloud.google.com/go/datacatalog v1.3.0/go.mod h1:g9svFY6tuR+j+hrTw3J2dNcmI0dzmSiyOzm8kpLq0a0= @@ -198,20 +198,20 @@ cosmossdk.io/depinject v1.0.0 h1:dQaTu6+O6askNXO06+jyeUAnF2/ssKwrrszP9t5q050= cosmossdk.io/depinject v1.0.0/go.mod h1:zxK/h3HgHoA/eJVtiSsoaRaRA2D5U4cJ5thIG4ssbB8= cosmossdk.io/errors v1.0.1 h1:bzu+Kcr0kS/1DuPBtUFdWjzLqyUuCiyHjyJB6srBV/0= cosmossdk.io/errors v1.0.1/go.mod h1:MeelVSZThMi4bEakzhhhE/CKqVv3nOJDA25bIqRDu/U= -cosmossdk.io/log v1.3.1 h1:UZx8nWIkfbbNEWusZqzAx3ZGvu54TZacWib3EzUYmGI= -cosmossdk.io/log v1.3.1/go.mod h1:2/dIomt8mKdk6vl3OWJcPk2be3pGOS8OQaLUM/3/tCM= +cosmossdk.io/log v1.4.1 h1:wKdjfDRbDyZRuWa8M+9nuvpVYxrEOwbD/CA8hvhU8QM= +cosmossdk.io/log v1.4.1/go.mod h1:k08v0Pyq+gCP6phvdI6RCGhLf/r425UT6Rk/m+o74rU= cosmossdk.io/math v1.3.0 h1:RC+jryuKeytIiictDslBP9i1fhkVm6ZDmZEoNP316zE= cosmossdk.io/math v1.3.0/go.mod h1:vnRTxewy+M7BtXBNFybkuhSH4WfedVAAnERHgVFhp3k= -cosmossdk.io/store v1.1.0 h1:LnKwgYMc9BInn9PhpTFEQVbL9UK475G2H911CGGnWHk= -cosmossdk.io/store v1.1.0/go.mod h1:oZfW/4Fc/zYqu3JmQcQdUJ3fqu5vnYTn3LZFFy8P8ng= +cosmossdk.io/store v1.1.1 h1:NA3PioJtWDVU7cHHeyvdva5J/ggyLDkyH0hGHl2804Y= +cosmossdk.io/store v1.1.1/go.mod h1:8DwVTz83/2PSI366FERGbWSH7hL6sB7HbYp8bqksNwM= cosmossdk.io/x/circuit v0.1.0 h1:IAej8aRYeuOMritczqTlljbUVHq1E85CpBqaCTwYgXs= cosmossdk.io/x/circuit v0.1.0/go.mod h1:YDzblVE8+E+urPYQq5kq5foRY/IzhXovSYXb4nwd39w= cosmossdk.io/x/evidence v0.1.0 h1:J6OEyDl1rbykksdGynzPKG5R/zm6TacwW2fbLTW4nCk= cosmossdk.io/x/evidence v0.1.0/go.mod h1:hTaiiXsoiJ3InMz1uptgF0BnGqROllAN8mwisOMMsfw= cosmossdk.io/x/feegrant v0.1.0 h1:c7s3oAq/8/UO0EiN1H5BIjwVntujVTkYs35YPvvrdQk= cosmossdk.io/x/feegrant v0.1.0/go.mod h1:4r+FsViJRpcZif/yhTn+E0E6OFfg4n0Lx+6cCtnZElU= -cosmossdk.io/x/tx v0.13.4 h1:Eg0PbJgeO0gM8p5wx6xa0fKR7hIV6+8lC56UrsvSo0Y= -cosmossdk.io/x/tx v0.13.4/go.mod h1:BkFqrnGGgW50Y6cwTy+JvgAhiffbGEKW6KF9ufcDpvk= +cosmossdk.io/x/tx v0.13.5 h1:FdnU+MdmFWn1pTsbfU0OCf2u6mJ8cqc1H4OMG418MLw= +cosmossdk.io/x/tx v0.13.5/go.mod h1:V6DImnwJMTq5qFjeGWpXNiT/fjgE4HtmclRmTqRVM3w= cosmossdk.io/x/upgrade v0.1.1 h1:aoPe2gNvH+Gwt/Pgq3dOxxQVU3j5P6Xf+DaUJTDZATc= cosmossdk.io/x/upgrade v0.1.1/go.mod h1:MNLptLPcIFK9CWt7Ra//8WUZAxweyRDNcbs5nkOcQy0= dmitri.shuralyov.com/app/changes v0.0.0-20180602232624-0a106ad413e3/go.mod h1:Yl+fi1br7+Rr3LqpNJf1/uxUdtRUV+Tnj0o93V2B9MU= @@ -395,8 +395,8 @@ github.com/cosmos/cosmos-db v1.0.2 h1:hwMjozuY1OlJs/uh6vddqnk9j7VamLv+0DBlbEXbAK github.com/cosmos/cosmos-db v1.0.2/go.mod h1:Z8IXcFJ9PqKK6BIsVOB3QXtkKoqUOp1vRvPT39kOXEA= github.com/cosmos/cosmos-proto v1.0.0-beta.5 h1:eNcayDLpip+zVLRLYafhzLvQlSmyab+RC5W7ZfmxJLA= github.com/cosmos/cosmos-proto v1.0.0-beta.5/go.mod h1:hQGLpiIUloJBMdQMMWb/4wRApmI9hjHH05nefC0Ojec= -github.com/cosmos/cosmos-sdk v0.50.9 h1:gt2usjz0H0qW6KwAxWw7ZJ3XU8uDwmhN+hYG3nTLeSg= -github.com/cosmos/cosmos-sdk v0.50.9/go.mod h1:TMH6wpoYBcg7Cp5BEg8fneLr+8XloNQkf2MRNF9V6JE= +github.com/cosmos/cosmos-sdk v0.50.10 h1:zXfeu/z653tWZARr/jESzAEiCUYjgJwwG4ytnYWMoDM= +github.com/cosmos/cosmos-sdk v0.50.10/go.mod h1:6Eesrx3ZE7vxBZWpK++30H+Uc7Q4ahQWCL7JKU/LEdU= github.com/cosmos/go-bip39 v1.0.0 h1:pcomnQdrdH22njcAatO0yWojsUnCO3y2tNoV1cb6hHY= github.com/cosmos/go-bip39 v1.0.0/go.mod h1:RNJv0H/pOIVgxw6KS7QeX2a0Uo0aKUlfhZ4xuwvCdJw= github.com/cosmos/gogogateway v1.2.0 h1:Ae/OivNhp8DqBi/sh2A8a1D0y638GpL3tkmLQAiKxTE= @@ -404,14 +404,14 @@ github.com/cosmos/gogogateway v1.2.0/go.mod h1:iQpLkGWxYcnCdz5iAdLcRBSw3h7NXeOkZ github.com/cosmos/gogoproto v1.4.2/go.mod h1:cLxOsn1ljAHSV527CHOtaIP91kK6cCrZETRBrkzItWU= github.com/cosmos/gogoproto v1.7.0 h1:79USr0oyXAbxg3rspGh/m4SWNyoz/GLaAh0QlCe2fro= github.com/cosmos/gogoproto v1.7.0/go.mod h1:yWChEv5IUEYURQasfyBW5ffkMHR/90hiHgbNgrtp4j0= -github.com/cosmos/iavl v1.1.2 h1:zL9FK7C4L/P4IF1Dm5fIwz0WXCnn7Bp1M2FxH0ayM7Y= -github.com/cosmos/iavl v1.1.2/go.mod h1:jLeUvm6bGT1YutCaL2fIar/8vGUE8cPZvh/gXEWDaDM= +github.com/cosmos/iavl v1.2.0 h1:kVxTmjTh4k0Dh1VNL046v6BXqKziqMDzxo93oh3kOfM= +github.com/cosmos/iavl v1.2.0/go.mod h1:HidWWLVAtODJqFD6Hbne2Y0q3SdxByJepHUOeoH4LiI= github.com/cosmos/ibc-go/modules/capability v1.0.0 h1:r/l++byFtn7jHYa09zlAdSeevo8ci1mVZNO9+V0xsLE= github.com/cosmos/ibc-go/modules/capability v1.0.0/go.mod h1:D81ZxzjZAe0ZO5ambnvn1qedsFQ8lOwtqicG6liLBco= github.com/cosmos/ibc-go/v8 v8.2.0 h1:7oCzyy1sZCcgpeQLnHxC56brsSz3KWwQGKXalXwXFzE= github.com/cosmos/ibc-go/v8 v8.2.0/go.mod h1:wj3qx75iC/XNnsMqbPDCIGs0G6Y3E/lo3bdqCyoCy+8= -github.com/cosmos/ics23/go v0.10.0 h1:iXqLLgp2Lp+EdpIuwXTYIQU+AiHj9mOC2X9ab++bZDM= -github.com/cosmos/ics23/go v0.10.0/go.mod h1:ZfJSmng/TBNTBkFemHHHj5YY7VAU/MBU980F4VU1NG0= +github.com/cosmos/ics23/go v0.11.0 h1:jk5skjT0TqX5e5QJbEnwXIS2yI2vnmLOgpQPeM5RtnU= +github.com/cosmos/ics23/go v0.11.0/go.mod h1:A8OjxPE67hHST4Icw94hOxxFEJMBG031xIGF/JHNIY0= github.com/cosmos/ledger-cosmos-go v0.13.3 h1:7ehuBGuyIytsXbd4MP43mLeoN2LTOEnk5nvue4rK+yM= github.com/cosmos/ledger-cosmos-go v0.13.3/go.mod h1:HENcEP+VtahZFw38HZ3+LS3Iv5XV6svsnkk9vdJtLr8= github.com/cpuguy83/go-md2man v1.0.10/go.mod h1:SmD6nW6nTyfqj6ABTjUi3V3JVMnlJmwcJI5acqYI6dE= @@ -604,8 +604,8 @@ github.com/gogo/protobuf v1.3.1/go.mod h1:SlYgWuQ5SjCEi6WLHjHCa1yvBfUnHcTbrrZtXP github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= -github.com/golang/glog v1.2.1 h1:OptwRhECazUx5ix5TTWC3EZhsZEHWcYWY4FQHTIubm4= -github.com/golang/glog v1.2.1/go.mod h1:6AhwSGph0fcJtXVM/PEHPqZlFeoLxhs7/t5UDAwmO+w= +github.com/golang/glog v1.2.2 h1:1+mZ9upx1Dh6FmUTFR1naJ77miKiXgALjWOZ3NVFPmY= +github.com/golang/glog v1.2.2/go.mod h1:6AhwSGph0fcJtXVM/PEHPqZlFeoLxhs7/t5UDAwmO+w= github.com/golang/groupcache v0.0.0-20160516000752-02826c3e7903/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= @@ -1331,8 +1331,8 @@ github.com/rs/cors v1.7.0/go.mod h1:gFx+x8UowdsKA9AchylcLynDq+nNFfI8FkUZdN/jGCU= github.com/rs/cors v1.11.1 h1:eU3gRzXLRK57F5rKMGMZURNdIG4EoAmX8k94r9wXWHA= github.com/rs/cors v1.11.1/go.mod h1:XyqrcTp5zjWr1wsJ8PIRZssZ8b/WMcMf71DJnit4EMU= github.com/rs/xid v1.5.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg= -github.com/rs/zerolog v1.32.0 h1:keLypqrlIjaFsbmJOBdB/qvyF8KEtCWHwobLp5l/mQ0= -github.com/rs/zerolog v1.32.0/go.mod h1:/7mN4D5sKwJLZQ2b/znpjC3/GQWY/xaDXUM0kKWRHss= +github.com/rs/zerolog v1.33.0 h1:1cU2KZkvPxNyfgEmhHAz/1A9Bz+llsdYzklWFzgp0r8= +github.com/rs/zerolog v1.33.0/go.mod h1:/7mN4D5sKwJLZQ2b/znpjC3/GQWY/xaDXUM0kKWRHss= github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g= github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk= @@ -1604,8 +1604,8 @@ golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5y golang.org/x/crypto v0.8.0/go.mod h1:mRqEX+O9/h5TFCrQhkgjo2yKi0yYA+9ecGkdQoHrywE= golang.org/x/crypto v0.12.0/go.mod h1:NF0Gs7EO5K4qLn+Ylc+fih8BSTeIjAP05siRnAh98yw= golang.org/x/crypto v0.18.0/go.mod h1:R0j02AL6hcrfOiy9T4ZYp/rcWeMxM3L6QYxlOuEG1mg= -golang.org/x/crypto v0.26.0 h1:RrRspgV4mU+YwB4FYnuBoKsUapNIL5cohGAmSH3azsw= -golang.org/x/crypto v0.26.0/go.mod h1:GY7jblb9wI+FOo5y8/S2oY4zWP07AkOJ4+jxCqdqn54= +golang.org/x/crypto v0.27.0 h1:GXm2NjJrPaiv/h1tb2UH8QfgC/hOf/+z0p6PT8o1w7A= +golang.org/x/crypto v0.27.0/go.mod h1:1Xngt8kV6Dvbssa53Ziq6Eqn0HqbZi5Z6R0ZpwQzt70= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= @@ -1745,8 +1745,8 @@ golang.org/x/oauth2 v0.0.0-20220822191816-0ebed06d0094/go.mod h1:h4gKUeWbJ4rQPri golang.org/x/oauth2 v0.0.0-20220909003341-f21342109be1/go.mod h1:h4gKUeWbJ4rQPri7E0u6Gs4e9Ri2zaLxzw5DI5XGrYg= golang.org/x/oauth2 v0.0.0-20221014153046-6fdb5e3db783/go.mod h1:h4gKUeWbJ4rQPri7E0u6Gs4e9Ri2zaLxzw5DI5XGrYg= golang.org/x/oauth2 v0.1.0/go.mod h1:G9FE4dLTsbXUu90h/Pf85g4w1D+SSAgR+q46nJZ8M4A= -golang.org/x/oauth2 v0.21.0 h1:tsimM75w1tF/uws5rbeHzIWxEqElMehnc+iW793zsZs= -golang.org/x/oauth2 v0.21.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI= +golang.org/x/oauth2 v0.22.0 h1:BzDx2FehcG7jJwgWLELCdmLuxk2i+x9UDpSiss2u0ZA= +golang.org/x/oauth2 v0.22.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI= golang.org/x/perf v0.0.0-20180704124530-6e6d33e29852/go.mod h1:JLpeXjPJfIyPr5TlbXLkXWLhP8nz10XfvxElABhCtcw= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -1900,8 +1900,8 @@ golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= golang.org/x/text v0.12.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= -golang.org/x/text v0.17.0 h1:XtiM5bkSOt+ewxlOE/aE/AKEHibwj/6gvWMl9Rsh0Qc= -golang.org/x/text v0.17.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY= +golang.org/x/text v0.18.0 h1:XvMDiNzPAl0jr17s6W9lcaIhGUfUORdGCNsuLmPG224= +golang.org/x/text v0.18.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY= golang.org/x/time v0.0.0-20180412165947-fbb02b2291d2/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= @@ -2161,10 +2161,10 @@ google.golang.org/genproto v0.0.0-20221014213838-99cd37c6964a/go.mod h1:1vXfmgAz google.golang.org/genproto v0.0.0-20221025140454-527a21cfbd71/go.mod h1:9qHF0xnpdSfF6knlcsnpzUu5y+rpwgbvsyGAZPBMg4s= google.golang.org/genproto v0.0.0-20240227224415-6ceb2ff114de h1:F6qOa9AZTYJXOUEr4jDysRDLrm4PHePlge4v4TGAlxY= google.golang.org/genproto v0.0.0-20240227224415-6ceb2ff114de/go.mod h1:VUhTRKeHn9wwcdrk73nvdC9gF178Tzhmt/qyaFcPLSo= -google.golang.org/genproto/googleapis/api v0.0.0-20240617180043-68d350f18fd4 h1:MuYw1wJzT+ZkybKfaOXKp5hJiZDn2iHaXRw0mRYdHSc= -google.golang.org/genproto/googleapis/api v0.0.0-20240617180043-68d350f18fd4/go.mod h1:px9SlOOZBg1wM1zdnr8jEL4CNGUBZ+ZKYtNPApNQc4c= -google.golang.org/genproto/googleapis/rpc v0.0.0-20240709173604-40e1e62336c5 h1:SbSDUWW1PAO24TNpLdeheoYPd7kllICcLU52x6eD4kQ= -google.golang.org/genproto/googleapis/rpc v0.0.0-20240709173604-40e1e62336c5/go.mod h1:Ue6ibwXGpU+dqIcODieyLOcgj7z8+IcskoNIgZxtrFY= +google.golang.org/genproto/googleapis/api v0.0.0-20240814211410-ddb44dafa142 h1:wKguEg1hsxI2/L3hUYrpo1RVi48K+uTyzKqprwLXsb8= +google.golang.org/genproto/googleapis/api v0.0.0-20240814211410-ddb44dafa142/go.mod h1:d6be+8HhtEtucleCbxpPW9PA9XwISACu8nvpPqF0BVo= +google.golang.org/genproto/googleapis/rpc v0.0.0-20240814211410-ddb44dafa142 h1:e7S5W7MGGLaSu8j3YjdezkZ+m1/Nm0uRVRMEMGk26Xs= +google.golang.org/genproto/googleapis/rpc v0.0.0-20240814211410-ddb44dafa142/go.mod h1:UqMtugtsSgubUsoxbuAoiCXvqvErP7Gf0so0mK9tHxU= google.golang.org/grpc v1.14.0/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw= google.golang.org/grpc v1.16.0/go.mod h1:0JHn/cJsOMiMfNA9+DeHDlAU7KAAB5GDlYFpa9MZMio= google.golang.org/grpc v1.17.0/go.mod h1:6QZJwpn2B+Zp71q/5VxRsJ6NXXVCE5NRUHRo+f3cWCs= @@ -2208,8 +2208,8 @@ google.golang.org/grpc v1.48.0/go.mod h1:vN9eftEi1UMyUsIF80+uQXhHjbXYbm0uXoFCACu google.golang.org/grpc v1.49.0/go.mod h1:ZgQEeidpAuNRZ8iRrlBKXZQP1ghovWIVhdJRyCDK+GI= google.golang.org/grpc v1.50.0/go.mod h1:ZgQEeidpAuNRZ8iRrlBKXZQP1ghovWIVhdJRyCDK+GI= google.golang.org/grpc v1.50.1/go.mod h1:ZgQEeidpAuNRZ8iRrlBKXZQP1ghovWIVhdJRyCDK+GI= -google.golang.org/grpc v1.66.2 h1:3QdXkuq3Bkh7w+ywLdLvM56cmGvQHUMZpiCzt6Rqaoo= -google.golang.org/grpc v1.66.2/go.mod h1:s3/l6xSSCURdVfAnL+TqCNMyTDAGN6+lZeVxnZR128Y= +google.golang.org/grpc v1.67.0 h1:IdH9y6PF5MPSdAntIcpjQ+tXO41pcQsfZV2RxtQgVcw= +google.golang.org/grpc v1.67.0/go.mod h1:1gLDyUQU7CTLJI90u3nXZ9ekeghjeM7pTDZlqFNg2AA= google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0/go.mod h1:6Kw0yEErY5E/yWrBtf03jp27GLLJujG4z/JK95pnjjw= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= diff --git a/playground/package-lock.json b/playground/package-lock.json index da6e09b004..6875e28d89 100644 --- a/playground/package-lock.json +++ b/playground/package-lock.json @@ -15,17 +15,17 @@ "swagger-ui-react": "^5.17.14" }, "devDependencies": { - "@types/react": "^18.3.6", + "@types/react": "^18.3.8", "@types/react-dom": "^18.3.0", "@types/swagger-ui-react": "^4.18.3", "@typescript-eslint/eslint-plugin": "^8.6.0", - "@typescript-eslint/parser": "^8.5.0", + "@typescript-eslint/parser": "^8.6.0", "@vitejs/plugin-react-swc": "^3.7.0", - "eslint": "^9.10.0", + "eslint": "^9.11.0", "eslint-plugin-react-hooks": "^4.6.2", "eslint-plugin-react-refresh": "^0.4.12", "typescript": "^5.6.2", - "vite": "^5.4.6" + "vite": "^5.4.7" } }, "node_modules/@babel/runtime": { @@ -607,9 +607,9 @@ } }, "node_modules/@eslint/js": { - "version": "9.10.0", - "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.10.0.tgz", - "integrity": "sha512-fuXtbiP5GWIn8Fz+LWoOMVf/Jxm+aajZYkhi6CuEm4SxymFM+eUWzbO9qXT+L0iCkL5+KGYMCSGxo686H19S1g==", + "version": "9.11.0", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.11.0.tgz", + "integrity": "sha512-LPkkenkDqyzTFauZLLAPhIb48fj6drrfMvRGSL9tS3AcZBSVTllemLSNyCvHNNL2t797S/6DJNSIwRwXgMO/eQ==", "dev": true, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -625,9 +625,9 @@ } }, "node_modules/@eslint/plugin-kit": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@eslint/plugin-kit/-/plugin-kit-0.1.0.tgz", - "integrity": "sha512-autAXT203ixhqei9xt+qkYOvY8l6LAFIdT2UXc/RPNeUVfqRF1BV94GTJyVPFKT8nFM6MyVJhjLj9E8JWvf5zQ==", + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/@eslint/plugin-kit/-/plugin-kit-0.2.0.tgz", + "integrity": "sha512-vH9PiIMMwvhCx31Af3HiGzsVNULDbyVkHXwlemn/B0TFj/00ho3y55efXrUZTfQipxoHC5u4xq6zblww1zm1Ig==", "dev": true, "dependencies": { "levn": "^0.4.1" @@ -2468,9 +2468,9 @@ } }, "node_modules/@types/react": { - "version": "18.3.6", - "resolved": "https://registry.npmjs.org/@types/react/-/react-18.3.6.tgz", - "integrity": "sha512-CnGaRYNu2iZlkGXGrOYtdg5mLK8neySj0woZ4e2wF/eli2E6Sazmq5X+Nrj6OBrrFVQfJWTUFeqAzoRhWQXYvg==", + "version": "18.3.8", + "resolved": "https://registry.npmjs.org/@types/react/-/react-18.3.8.tgz", + "integrity": "sha512-syBUrW3/XpnW4WJ41Pft+I+aPoDVbrBVQGEnbD7NijDGlVC+8gV/XKRY+7vMDlfPpbwYt0l1vd/Sj8bJGMbs9Q==", "devOptional": true, "dependencies": { "@types/prop-types": "*", @@ -2550,63 +2550,16 @@ } } }, - "node_modules/@typescript-eslint/eslint-plugin/node_modules/@typescript-eslint/scope-manager": { - "version": "8.6.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.6.0.tgz", - "integrity": "sha512-ZuoutoS5y9UOxKvpc/GkvF4cuEmpokda4wRg64JEia27wX+PysIE9q+lzDtlHHgblwUWwo5/Qn+/WyTUvDwBHw==", - "dev": true, - "dependencies": { - "@typescript-eslint/types": "8.6.0", - "@typescript-eslint/visitor-keys": "8.6.0" - }, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@typescript-eslint/eslint-plugin/node_modules/@typescript-eslint/types": { - "version": "8.6.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.6.0.tgz", - "integrity": "sha512-rojqFZGd4MQxw33SrOy09qIDS8WEldM8JWtKQLAjf/X5mGSeEFh5ixQlxssMNyPslVIk9yzWqXCsV2eFhYrYUw==", - "dev": true, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@typescript-eslint/eslint-plugin/node_modules/@typescript-eslint/visitor-keys": { + "node_modules/@typescript-eslint/parser": { "version": "8.6.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.6.0.tgz", - "integrity": "sha512-wapVFfZg9H0qOYh4grNVQiMklJGluQrOUiOhYRrQWhx7BY/+I1IYb8BczWNbbUpO+pqy0rDciv3lQH5E1bCLrg==", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.6.0.tgz", + "integrity": "sha512-eQcbCuA2Vmw45iGfcyG4y6rS7BhWfz9MQuk409WD47qMM+bKCGQWXxvoOs1DUp+T7UBMTtRTVT+kXr7Sh4O9Ow==", "dev": true, "dependencies": { + "@typescript-eslint/scope-manager": "8.6.0", "@typescript-eslint/types": "8.6.0", - "eslint-visitor-keys": "^3.4.3" - }, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@typescript-eslint/parser": { - "version": "8.5.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.5.0.tgz", - "integrity": "sha512-gF77eNv0Xz2UJg/NbpWJ0kqAm35UMsvZf1GHj8D9MRFTj/V3tAciIWXfmPLsAAF/vUlpWPvUDyH1jjsr0cMVWw==", - "dev": true, - "dependencies": { - "@typescript-eslint/scope-manager": "8.5.0", - "@typescript-eslint/types": "8.5.0", - "@typescript-eslint/typescript-estree": "8.5.0", - "@typescript-eslint/visitor-keys": "8.5.0", + "@typescript-eslint/typescript-estree": "8.6.0", + "@typescript-eslint/visitor-keys": "8.6.0", "debug": "^4.3.4" }, "engines": { @@ -2626,13 +2579,13 @@ } }, "node_modules/@typescript-eslint/scope-manager": { - "version": "8.5.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.5.0.tgz", - "integrity": "sha512-06JOQ9Qgj33yvBEx6tpC8ecP9o860rsR22hWMEd12WcTRrfaFgHr2RB/CA/B+7BMhHkXT4chg2MyboGdFGawYg==", + "version": "8.6.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.6.0.tgz", + "integrity": "sha512-ZuoutoS5y9UOxKvpc/GkvF4cuEmpokda4wRg64JEia27wX+PysIE9q+lzDtlHHgblwUWwo5/Qn+/WyTUvDwBHw==", "dev": true, "dependencies": { - "@typescript-eslint/types": "8.5.0", - "@typescript-eslint/visitor-keys": "8.5.0" + "@typescript-eslint/types": "8.6.0", + "@typescript-eslint/visitor-keys": "8.6.0" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -2666,7 +2619,7 @@ } } }, - "node_modules/@typescript-eslint/type-utils/node_modules/@typescript-eslint/types": { + "node_modules/@typescript-eslint/types": { "version": "8.6.0", "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.6.0.tgz", "integrity": "sha512-rojqFZGd4MQxw33SrOy09qIDS8WEldM8JWtKQLAjf/X5mGSeEFh5ixQlxssMNyPslVIk9yzWqXCsV2eFhYrYUw==", @@ -2679,7 +2632,7 @@ "url": "https://opencollective.com/typescript-eslint" } }, - "node_modules/@typescript-eslint/type-utils/node_modules/@typescript-eslint/typescript-estree": { + "node_modules/@typescript-eslint/typescript-estree": { "version": "8.6.0", "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.6.0.tgz", "integrity": "sha512-MOVAzsKJIPIlLK239l5s06YXjNqpKTVhBVDnqUumQJja5+Y94V3+4VUFRA0G60y2jNnTVwRCkhyGQpavfsbq/g==", @@ -2707,64 +2660,6 @@ } } }, - "node_modules/@typescript-eslint/type-utils/node_modules/@typescript-eslint/visitor-keys": { - "version": "8.6.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.6.0.tgz", - "integrity": "sha512-wapVFfZg9H0qOYh4grNVQiMklJGluQrOUiOhYRrQWhx7BY/+I1IYb8BczWNbbUpO+pqy0rDciv3lQH5E1bCLrg==", - "dev": true, - "dependencies": { - "@typescript-eslint/types": "8.6.0", - "eslint-visitor-keys": "^3.4.3" - }, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@typescript-eslint/types": { - "version": "8.5.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.5.0.tgz", - "integrity": "sha512-qjkormnQS5wF9pjSi6q60bKUHH44j2APxfh9TQRXK8wbYVeDYYdYJGIROL87LGZZ2gz3Rbmjc736qyL8deVtdw==", - "dev": true, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@typescript-eslint/typescript-estree": { - "version": "8.5.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.5.0.tgz", - "integrity": "sha512-vEG2Sf9P8BPQ+d0pxdfndw3xIXaoSjliG0/Ejk7UggByZPKXmJmw3GW5jV2gHNQNawBUyfahoSiCFVov0Ruf7Q==", - "dev": true, - "dependencies": { - "@typescript-eslint/types": "8.5.0", - "@typescript-eslint/visitor-keys": "8.5.0", - "debug": "^4.3.4", - "fast-glob": "^3.3.2", - "is-glob": "^4.0.3", - "minimatch": "^9.0.4", - "semver": "^7.6.0", - "ts-api-utils": "^1.3.0" - }, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - }, - "peerDependenciesMeta": { - "typescript": { - "optional": true - } - } - }, "node_modules/@typescript-eslint/utils": { "version": "8.6.0", "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.6.0.tgz", @@ -2787,65 +2682,7 @@ "eslint": "^8.57.0 || ^9.0.0" } }, - "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/scope-manager": { - "version": "8.6.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.6.0.tgz", - "integrity": "sha512-ZuoutoS5y9UOxKvpc/GkvF4cuEmpokda4wRg64JEia27wX+PysIE9q+lzDtlHHgblwUWwo5/Qn+/WyTUvDwBHw==", - "dev": true, - "dependencies": { - "@typescript-eslint/types": "8.6.0", - "@typescript-eslint/visitor-keys": "8.6.0" - }, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/types": { - "version": "8.6.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.6.0.tgz", - "integrity": "sha512-rojqFZGd4MQxw33SrOy09qIDS8WEldM8JWtKQLAjf/X5mGSeEFh5ixQlxssMNyPslVIk9yzWqXCsV2eFhYrYUw==", - "dev": true, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/typescript-estree": { - "version": "8.6.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.6.0.tgz", - "integrity": "sha512-MOVAzsKJIPIlLK239l5s06YXjNqpKTVhBVDnqUumQJja5+Y94V3+4VUFRA0G60y2jNnTVwRCkhyGQpavfsbq/g==", - "dev": true, - "dependencies": { - "@typescript-eslint/types": "8.6.0", - "@typescript-eslint/visitor-keys": "8.6.0", - "debug": "^4.3.4", - "fast-glob": "^3.3.2", - "is-glob": "^4.0.3", - "minimatch": "^9.0.4", - "semver": "^7.6.0", - "ts-api-utils": "^1.3.0" - }, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - }, - "peerDependenciesMeta": { - "typescript": { - "optional": true - } - } - }, - "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/visitor-keys": { + "node_modules/@typescript-eslint/visitor-keys": { "version": "8.6.0", "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.6.0.tgz", "integrity": "sha512-wapVFfZg9H0qOYh4grNVQiMklJGluQrOUiOhYRrQWhx7BY/+I1IYb8BczWNbbUpO+pqy0rDciv3lQH5E1bCLrg==", @@ -2862,23 +2699,6 @@ "url": "https://opencollective.com/typescript-eslint" } }, - "node_modules/@typescript-eslint/visitor-keys": { - "version": "8.5.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.5.0.tgz", - "integrity": "sha512-yTPqMnbAZJNy2Xq2XU8AdtOW9tJIr+UQb64aXB9f3B1498Zx9JorVgFJcZpEc9UBuCCrdzKID2RGAMkYcDtZOw==", - "dev": true, - "dependencies": { - "@typescript-eslint/types": "8.5.0", - "eslint-visitor-keys": "^3.4.3" - }, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, "node_modules/@vitejs/plugin-react-swc": { "version": "3.7.0", "resolved": "https://registry.npmjs.org/@vitejs/plugin-react-swc/-/plugin-react-swc-3.7.0.tgz", @@ -3485,17 +3305,17 @@ } }, "node_modules/eslint": { - "version": "9.10.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-9.10.0.tgz", - "integrity": "sha512-Y4D0IgtBZfOcOUAIQTSXBKoNGfY0REGqHJG6+Q81vNippW5YlKjHFj4soMxamKK1NXHUWuBZTLdU3Km+L/pcHw==", + "version": "9.11.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-9.11.0.tgz", + "integrity": "sha512-yVS6XODx+tMFMDFcG4+Hlh+qG7RM6cCJXtQhCKLSsr3XkLvWggHjCqjfh0XsPPnt1c56oaT6PMgW9XWQQjdHXA==", "dev": true, "dependencies": { "@eslint-community/eslint-utils": "^4.2.0", "@eslint-community/regexpp": "^4.11.0", "@eslint/config-array": "^0.18.0", "@eslint/eslintrc": "^3.1.0", - "@eslint/js": "9.10.0", - "@eslint/plugin-kit": "^0.1.0", + "@eslint/js": "9.11.0", + "@eslint/plugin-kit": "^0.2.0", "@humanwhocodes/module-importer": "^1.0.1", "@humanwhocodes/retry": "^0.3.0", "@nodelib/fs.walk": "^1.2.8", @@ -6107,9 +5927,9 @@ "optional": true }, "node_modules/vite": { - "version": "5.4.6", - "resolved": "https://registry.npmjs.org/vite/-/vite-5.4.6.tgz", - "integrity": "sha512-IeL5f8OO5nylsgzd9tq4qD2QqI0k2CQLGrWD0rCN0EQJZpBK5vJAx0I+GDkMOXxQX/OfFHMuLIx6ddAxGX/k+Q==", + "version": "5.4.7", + "resolved": "https://registry.npmjs.org/vite/-/vite-5.4.7.tgz", + "integrity": "sha512-5l2zxqMEPVENgvzTuBpHer2awaetimj2BGkhBPdnwKbPNOlHsODU+oiazEZzLK7KhAnOrO+XGYJYn4ZlUhDtDQ==", "dev": true, "dependencies": { "esbuild": "^0.21.3", diff --git a/playground/package.json b/playground/package.json index 2ce39d3073..64244a2535 100644 --- a/playground/package.json +++ b/playground/package.json @@ -17,16 +17,16 @@ "swagger-ui-react": "^5.17.14" }, "devDependencies": { - "@types/react": "^18.3.6", + "@types/react": "^18.3.8", "@types/react-dom": "^18.3.0", "@types/swagger-ui-react": "^4.18.3", "@typescript-eslint/eslint-plugin": "^8.6.0", - "@typescript-eslint/parser": "^8.5.0", + "@typescript-eslint/parser": "^8.6.0", "@vitejs/plugin-react-swc": "^3.7.0", - "eslint": "^9.10.0", + "eslint": "^9.11.0", "eslint-plugin-react-hooks": "^4.6.2", "eslint-plugin-react-refresh": "^0.4.12", "typescript": "^5.6.2", - "vite": "^5.4.6" + "vite": "^5.4.7" } } From e891cb9de73bbe882e292c7e3d0da104480aa733 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 24 Sep 2024 10:42:58 -0400 Subject: [PATCH 36/71] bot: Bump eslint from 9.11.0 to 9.11.1 in /playground (#3059) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [eslint](https://github.com/eslint/eslint) from 9.11.0 to 9.11.1.
Release notes

Sourced from eslint's releases.

v9.11.1

Bug Fixes

  • 20fd916 fix: add @eslint/core, @types/estree, & @types/json-schema deps (#18938) (Nitin Kumar)
  • 2738322 fix: add missing types for require-atomic-updates rule (#18937) (Kristóf Poduszló)
  • d71ff30 fix: add missing types for object-shorthand rule (#18935) (Kristóf Poduszló)
  • 561cadc fix: add missing types for no-unsafe-negation rule (#18932) (Kristóf Poduszló)
  • 8843656 fix: add missing types for no-underscore-dangle rule (#18931) (Kristóf Poduszló)
  • 92cde5c fix: add missing types for no-shadow rule (#18930) (Kristóf Poduszló)
  • b3cbe11 fix: add missing types for no-sequences rule (#18929) (Kristóf Poduszló)
  • 976f77f fix: add missing types for no-unused-expressions rule (#18933) (Kristóf Poduszló)

Documentation

  • 3eff709 docs: replace deprecated Linter.FlatConfig type with Linter.Config (#18941) (Carlos Meira)

Chores

  • df4a859 chore: upgrade @​eslint/js@​9.11.1 (#18943) (Milos Djermanovic)
  • 36d8095 chore: package.json update for @​eslint/js release (Jenkins)
Changelog

Sourced from eslint's changelog.

v9.11.1 - September 23, 2024

  • df4a859 chore: upgrade @​eslint/js@​9.11.1 (#18943) (Milos Djermanovic)
  • 36d8095 chore: package.json update for @​eslint/js release (Jenkins)
  • 20fd916 fix: add @eslint/core, @types/estree, & @types/json-schema deps (#18938) (Nitin Kumar)
  • 3eff709 docs: replace deprecated Linter.FlatConfig type with Linter.Config (#18941) (Carlos Meira)
  • 2738322 fix: add missing types for require-atomic-updates rule (#18937) (Kristóf Poduszló)
  • d71ff30 fix: add missing types for object-shorthand rule (#18935) (Kristóf Poduszló)
  • 561cadc fix: add missing types for no-unsafe-negation rule (#18932) (Kristóf Poduszló)
  • 8843656 fix: add missing types for no-underscore-dangle rule (#18931) (Kristóf Poduszló)
  • 92cde5c fix: add missing types for no-shadow rule (#18930) (Kristóf Poduszló)
  • b3cbe11 fix: add missing types for no-sequences rule (#18929) (Kristóf Poduszló)
  • 976f77f fix: add missing types for no-unused-expressions rule (#18933) (Kristóf Poduszló)
Commits
  • 69e9459 9.11.1
  • fcdac44 Build: changelog update for 9.11.1
  • df4a859 chore: upgrade @​eslint/js@​9.11.1 (#18943)
  • 36d8095 chore: package.json update for @​eslint/js release
  • 20fd916 fix: add @eslint/core, @types/estree, & @types/json-schema deps (#18938)
  • 3eff709 docs: replace deprecated Linter.FlatConfig type with Linter.Config (#18941)
  • 2738322 fix: add missing types for require-atomic-updates rule (#18937)
  • d71ff30 fix: add missing types for object-shorthand rule (#18935)
  • 561cadc fix: add missing types for no-unsafe-negation rule (#18932)
  • 8843656 fix: add missing types for no-underscore-dangle rule (#18931)
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=eslint&package-manager=npm_and_yarn&previous-version=9.11.0&new-version=9.11.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- playground/package-lock.json | 40 ++++++++++++++++++++++++++++-------- playground/package.json | 2 +- 2 files changed, 33 insertions(+), 9 deletions(-) diff --git a/playground/package-lock.json b/playground/package-lock.json index 6875e28d89..672ae1c19d 100644 --- a/playground/package-lock.json +++ b/playground/package-lock.json @@ -21,7 +21,7 @@ "@typescript-eslint/eslint-plugin": "^8.6.0", "@typescript-eslint/parser": "^8.6.0", "@vitejs/plugin-react-swc": "^3.7.0", - "eslint": "^9.11.0", + "eslint": "^9.11.1", "eslint-plugin-react-hooks": "^4.6.2", "eslint-plugin-react-refresh": "^0.4.12", "typescript": "^5.6.2", @@ -558,6 +558,15 @@ "node": "*" } }, + "node_modules/@eslint/core": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/@eslint/core/-/core-0.6.0.tgz", + "integrity": "sha512-8I2Q8ykA4J0x0o7cg67FPVnehcqWTBehu/lmY+bolPFHGjh49YzGBMXTvpqVgEbBdvNCSxj6iFgiIyHzf03lzg==", + "dev": true, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + } + }, "node_modules/@eslint/eslintrc": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-3.1.0.tgz", @@ -607,9 +616,9 @@ } }, "node_modules/@eslint/js": { - "version": "9.11.0", - "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.11.0.tgz", - "integrity": "sha512-LPkkenkDqyzTFauZLLAPhIb48fj6drrfMvRGSL9tS3AcZBSVTllemLSNyCvHNNL2t797S/6DJNSIwRwXgMO/eQ==", + "version": "9.11.1", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.11.1.tgz", + "integrity": "sha512-/qu+TWz8WwPWc7/HcIJKi+c+MOm46GdVaSlTTQcaqaL53+GsoA6MxWp5PtTx48qbSP7ylM1Kn7nhvkugfJvRSA==", "dev": true, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -2451,6 +2460,12 @@ "@types/unist": "^2" } }, + "node_modules/@types/json-schema": { + "version": "7.0.15", + "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.15.tgz", + "integrity": "sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==", + "dev": true + }, "node_modules/@types/prop-types": { "version": "15.7.12", "resolved": "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.12.tgz", @@ -3305,20 +3320,23 @@ } }, "node_modules/eslint": { - "version": "9.11.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-9.11.0.tgz", - "integrity": "sha512-yVS6XODx+tMFMDFcG4+Hlh+qG7RM6cCJXtQhCKLSsr3XkLvWggHjCqjfh0XsPPnt1c56oaT6PMgW9XWQQjdHXA==", + "version": "9.11.1", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-9.11.1.tgz", + "integrity": "sha512-MobhYKIoAO1s1e4VUrgx1l1Sk2JBR/Gqjjgw8+mfgoLE2xwsHur4gdfTxyTgShrhvdVFTaJSgMiQBl1jv/AWxg==", "dev": true, "dependencies": { "@eslint-community/eslint-utils": "^4.2.0", "@eslint-community/regexpp": "^4.11.0", "@eslint/config-array": "^0.18.0", + "@eslint/core": "^0.6.0", "@eslint/eslintrc": "^3.1.0", - "@eslint/js": "9.11.0", + "@eslint/js": "9.11.1", "@eslint/plugin-kit": "^0.2.0", "@humanwhocodes/module-importer": "^1.0.1", "@humanwhocodes/retry": "^0.3.0", "@nodelib/fs.walk": "^1.2.8", + "@types/estree": "^1.0.6", + "@types/json-schema": "^7.0.15", "ajv": "^6.12.4", "chalk": "^4.0.0", "cross-spawn": "^7.0.2", @@ -3415,6 +3433,12 @@ "url": "https://opencollective.com/eslint" } }, + "node_modules/eslint/node_modules/@types/estree": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.6.tgz", + "integrity": "sha512-AYnb1nQyY49te+VRAVgmzfcgjYS91mY5P0TKUDCLEM+gNnA+3T6rWITXRLYCpahpqSQbN5cE+gHpnPyXjHWxcw==", + "dev": true + }, "node_modules/eslint/node_modules/brace-expansion": { "version": "1.1.11", "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", diff --git a/playground/package.json b/playground/package.json index 64244a2535..eb14b3e0aa 100644 --- a/playground/package.json +++ b/playground/package.json @@ -23,7 +23,7 @@ "@typescript-eslint/eslint-plugin": "^8.6.0", "@typescript-eslint/parser": "^8.6.0", "@vitejs/plugin-react-swc": "^3.7.0", - "eslint": "^9.11.0", + "eslint": "^9.11.1", "eslint-plugin-react-hooks": "^4.6.2", "eslint-plugin-react-refresh": "^0.4.12", "typescript": "^5.6.2", From 228fb992ee44d5bddaa6a8b2962346605f1c2cff Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 24 Sep 2024 11:07:28 -0400 Subject: [PATCH 37/71] bot: Bump rollup from 4.21.0 to 4.22.4 in /playground (#3058) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [rollup](https://github.com/rollup/rollup) from 4.21.0 to 4.22.4.
Release notes

Sourced from rollup's releases.

v4.22.4

4.22.4

2024-09-21

Bug Fixes

  • Fix a vulnerability in generated code that affects IIFE, UMD and CJS bundles when run in a browser context (#5671)

Pull Requests

v4.22.3

4.22.3

2024-09-21

Bug Fixes

  • Ensure that mutations in modules without side effects are observed while properly handling transitive dependencies (#5669)

Pull Requests

v4.22.2

4.22.2

2024-09-20

Bug Fixes

  • Revert fix for side effect free modules until other issues are investigated (#5667)

Pull Requests

v4.22.1

4.22.1

2024-09-20

Bug Fixes

  • Revert #5644 "stable chunk hashes" while issues are being investigated

Pull Requests

... (truncated)

Changelog

Sourced from rollup's changelog.

4.22.4

2024-09-21

Bug Fixes

  • Fix a vulnerability in generated code that affects IIFE, UMD and CJS bundles when run in a browser context (#5671)

Pull Requests

4.22.3

2024-09-21

Bug Fixes

  • Ensure that mutations in modules without side effects are observed while properly handling transitive dependencies (#5669)

Pull Requests

4.22.2

2024-09-20

Bug Fixes

  • Revert fix for side effect free modules until other issues are investigated (#5667)

Pull Requests

4.22.1

2024-09-20

Bug Fixes

  • Revert #5644 "stable chunk hashes" while issues are being investigated

Pull Requests

... (truncated)

Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=rollup&package-manager=npm_and_yarn&previous-version=4.21.0&new-version=4.22.4)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) You can disable automated security fix PRs for this repo from the [Security Alerts page](https://github.com/sourcenetwork/defradb/network/alerts).
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- playground/package-lock.json | 151 ++++++++++++++++------------------- 1 file changed, 67 insertions(+), 84 deletions(-) diff --git a/playground/package-lock.json b/playground/package-lock.json index 672ae1c19d..b36597dc6f 100644 --- a/playground/package-lock.json +++ b/playground/package-lock.json @@ -1477,224 +1477,208 @@ "integrity": "sha512-A9+lCBZoaMJlVKcRBz2YByCG+Cp2t6nAnMnNba+XiWxnj6r4JUFqfsgwocMBZU9LPtdxC6wB56ySYpc7LQIoJg==" }, "node_modules/@rollup/rollup-android-arm-eabi": { - "version": "4.21.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.21.0.tgz", - "integrity": "sha512-WTWD8PfoSAJ+qL87lE7votj3syLavxunWhzCnx3XFxFiI/BA/r3X7MUM8dVrH8rb2r4AiO8jJsr3ZjdaftmnfA==", + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.22.4.tgz", + "integrity": "sha512-Fxamp4aEZnfPOcGA8KSNEohV8hX7zVHOemC8jVBoBUHu5zpJK/Eu3uJwt6BMgy9fkvzxDaurgj96F/NiLukF2w==", "cpu": [ "arm" ], "dev": true, - "license": "MIT", "optional": true, "os": [ "android" ] }, "node_modules/@rollup/rollup-android-arm64": { - "version": "4.21.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.21.0.tgz", - "integrity": "sha512-a1sR2zSK1B4eYkiZu17ZUZhmUQcKjk2/j9Me2IDjk1GHW7LB5Z35LEzj9iJch6gtUfsnvZs1ZNyDW2oZSThrkA==", + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.22.4.tgz", + "integrity": "sha512-VXoK5UMrgECLYaMuGuVTOx5kcuap1Jm8g/M83RnCHBKOqvPPmROFJGQaZhGccnsFtfXQ3XYa4/jMCJvZnbJBdA==", "cpu": [ "arm64" ], "dev": true, - "license": "MIT", "optional": true, "os": [ "android" ] }, "node_modules/@rollup/rollup-darwin-arm64": { - "version": "4.21.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.21.0.tgz", - "integrity": "sha512-zOnKWLgDld/svhKO5PD9ozmL6roy5OQ5T4ThvdYZLpiOhEGY+dp2NwUmxK0Ld91LrbjrvtNAE0ERBwjqhZTRAA==", + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.22.4.tgz", + "integrity": "sha512-xMM9ORBqu81jyMKCDP+SZDhnX2QEVQzTcC6G18KlTQEzWK8r/oNZtKuZaCcHhnsa6fEeOBionoyl5JsAbE/36Q==", "cpu": [ "arm64" ], "dev": true, - "license": "MIT", "optional": true, "os": [ "darwin" ] }, "node_modules/@rollup/rollup-darwin-x64": { - "version": "4.21.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.21.0.tgz", - "integrity": "sha512-7doS8br0xAkg48SKE2QNtMSFPFUlRdw9+votl27MvT46vo44ATBmdZdGysOevNELmZlfd+NEa0UYOA8f01WSrg==", + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.22.4.tgz", + "integrity": "sha512-aJJyYKQwbHuhTUrjWjxEvGnNNBCnmpHDvrb8JFDbeSH3m2XdHcxDd3jthAzvmoI8w/kSjd2y0udT+4okADsZIw==", "cpu": [ "x64" ], "dev": true, - "license": "MIT", "optional": true, "os": [ "darwin" ] }, "node_modules/@rollup/rollup-linux-arm-gnueabihf": { - "version": "4.21.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.21.0.tgz", - "integrity": "sha512-pWJsfQjNWNGsoCq53KjMtwdJDmh/6NubwQcz52aEwLEuvx08bzcy6tOUuawAOncPnxz/3siRtd8hiQ32G1y8VA==", + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.22.4.tgz", + "integrity": "sha512-j63YtCIRAzbO+gC2L9dWXRh5BFetsv0j0va0Wi9epXDgU/XUi5dJKo4USTttVyK7fGw2nPWK0PbAvyliz50SCQ==", "cpu": [ "arm" ], "dev": true, - "license": "MIT", "optional": true, "os": [ "linux" ] }, "node_modules/@rollup/rollup-linux-arm-musleabihf": { - "version": "4.21.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.21.0.tgz", - "integrity": "sha512-efRIANsz3UHZrnZXuEvxS9LoCOWMGD1rweciD6uJQIx2myN3a8Im1FafZBzh7zk1RJ6oKcR16dU3UPldaKd83w==", + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.22.4.tgz", + "integrity": "sha512-dJnWUgwWBX1YBRsuKKMOlXCzh2Wu1mlHzv20TpqEsfdZLb3WoJW2kIEsGwLkroYf24IrPAvOT/ZQ2OYMV6vlrg==", "cpu": [ "arm" ], "dev": true, - "license": "MIT", "optional": true, "os": [ "linux" ] }, "node_modules/@rollup/rollup-linux-arm64-gnu": { - "version": "4.21.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.21.0.tgz", - "integrity": "sha512-ZrPhydkTVhyeGTW94WJ8pnl1uroqVHM3j3hjdquwAcWnmivjAwOYjTEAuEDeJvGX7xv3Z9GAvrBkEzCgHq9U1w==", + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.22.4.tgz", + "integrity": "sha512-AdPRoNi3NKVLolCN/Sp4F4N1d98c4SBnHMKoLuiG6RXgoZ4sllseuGioszumnPGmPM2O7qaAX/IJdeDU8f26Aw==", "cpu": [ "arm64" ], "dev": true, - "license": "MIT", "optional": true, "os": [ "linux" ] }, "node_modules/@rollup/rollup-linux-arm64-musl": { - "version": "4.21.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.21.0.tgz", - "integrity": "sha512-cfaupqd+UEFeURmqNP2eEvXqgbSox/LHOyN9/d2pSdV8xTrjdg3NgOFJCtc1vQ/jEke1qD0IejbBfxleBPHnPw==", + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.22.4.tgz", + "integrity": "sha512-Gl0AxBtDg8uoAn5CCqQDMqAx22Wx22pjDOjBdmG0VIWX3qUBHzYmOKh8KXHL4UpogfJ14G4wk16EQogF+v8hmA==", "cpu": [ "arm64" ], "dev": true, - "license": "MIT", "optional": true, "os": [ "linux" ] }, "node_modules/@rollup/rollup-linux-powerpc64le-gnu": { - "version": "4.21.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.21.0.tgz", - "integrity": "sha512-ZKPan1/RvAhrUylwBXC9t7B2hXdpb/ufeu22pG2psV7RN8roOfGurEghw1ySmX/CmDDHNTDDjY3lo9hRlgtaHg==", + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.22.4.tgz", + "integrity": "sha512-3aVCK9xfWW1oGQpTsYJJPF6bfpWfhbRnhdlyhak2ZiyFLDaayz0EP5j9V1RVLAAxlmWKTDfS9wyRyY3hvhPoOg==", "cpu": [ "ppc64" ], "dev": true, - "license": "MIT", "optional": true, "os": [ "linux" ] }, "node_modules/@rollup/rollup-linux-riscv64-gnu": { - "version": "4.21.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.21.0.tgz", - "integrity": "sha512-H1eRaCwd5E8eS8leiS+o/NqMdljkcb1d6r2h4fKSsCXQilLKArq6WS7XBLDu80Yz+nMqHVFDquwcVrQmGr28rg==", + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.22.4.tgz", + "integrity": "sha512-ePYIir6VYnhgv2C5Xe9u+ico4t8sZWXschR6fMgoPUK31yQu7hTEJb7bCqivHECwIClJfKgE7zYsh1qTP3WHUA==", "cpu": [ "riscv64" ], "dev": true, - "license": "MIT", "optional": true, "os": [ "linux" ] }, "node_modules/@rollup/rollup-linux-s390x-gnu": { - "version": "4.21.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.21.0.tgz", - "integrity": "sha512-zJ4hA+3b5tu8u7L58CCSI0A9N1vkfwPhWd/puGXwtZlsB5bTkwDNW/+JCU84+3QYmKpLi+XvHdmrlwUwDA6kqw==", + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.22.4.tgz", + "integrity": "sha512-GqFJ9wLlbB9daxhVlrTe61vJtEY99/xB3C8e4ULVsVfflcpmR6c8UZXjtkMA6FhNONhj2eA5Tk9uAVw5orEs4Q==", "cpu": [ "s390x" ], "dev": true, - "license": "MIT", "optional": true, "os": [ "linux" ] }, "node_modules/@rollup/rollup-linux-x64-gnu": { - "version": "4.21.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.21.0.tgz", - "integrity": "sha512-e2hrvElFIh6kW/UNBQK/kzqMNY5mO+67YtEh9OA65RM5IJXYTWiXjX6fjIiPaqOkBthYF1EqgiZ6OXKcQsM0hg==", + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.22.4.tgz", + "integrity": "sha512-87v0ol2sH9GE3cLQLNEy0K/R0pz1nvg76o8M5nhMR0+Q+BBGLnb35P0fVz4CQxHYXaAOhE8HhlkaZfsdUOlHwg==", "cpu": [ "x64" ], "dev": true, - "license": "MIT", "optional": true, "os": [ "linux" ] }, "node_modules/@rollup/rollup-linux-x64-musl": { - "version": "4.21.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.21.0.tgz", - "integrity": "sha512-1vvmgDdUSebVGXWX2lIcgRebqfQSff0hMEkLJyakQ9JQUbLDkEaMsPTLOmyccyC6IJ/l3FZuJbmrBw/u0A0uCQ==", + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.22.4.tgz", + "integrity": "sha512-UV6FZMUgePDZrFjrNGIWzDo/vABebuXBhJEqrHxrGiU6HikPy0Z3LfdtciIttEUQfuDdCn8fqh7wiFJjCNwO+g==", "cpu": [ "x64" ], "dev": true, - "license": "MIT", "optional": true, "os": [ "linux" ] }, "node_modules/@rollup/rollup-win32-arm64-msvc": { - "version": "4.21.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.21.0.tgz", - "integrity": "sha512-s5oFkZ/hFcrlAyBTONFY1TWndfyre1wOMwU+6KCpm/iatybvrRgmZVM+vCFwxmC5ZhdlgfE0N4XorsDpi7/4XQ==", + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.22.4.tgz", + "integrity": "sha512-BjI+NVVEGAXjGWYHz/vv0pBqfGoUH0IGZ0cICTn7kB9PyjrATSkX+8WkguNjWoj2qSr1im/+tTGRaY+4/PdcQw==", "cpu": [ "arm64" ], "dev": true, - "license": "MIT", "optional": true, "os": [ "win32" ] }, "node_modules/@rollup/rollup-win32-ia32-msvc": { - "version": "4.21.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.21.0.tgz", - "integrity": "sha512-G9+TEqRnAA6nbpqyUqgTiopmnfgnMkR3kMukFBDsiyy23LZvUCpiUwjTRx6ezYCjJODXrh52rBR9oXvm+Fp5wg==", + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.22.4.tgz", + "integrity": "sha512-SiWG/1TuUdPvYmzmYnmd3IEifzR61Tragkbx9D3+R8mzQqDBz8v+BvZNDlkiTtI9T15KYZhP0ehn3Dld4n9J5g==", "cpu": [ "ia32" ], "dev": true, - "license": "MIT", "optional": true, "os": [ "win32" ] }, "node_modules/@rollup/rollup-win32-x64-msvc": { - "version": "4.21.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.21.0.tgz", - "integrity": "sha512-2jsCDZwtQvRhejHLfZ1JY6w6kEuEtfF9nzYsZxzSlNVKDX+DpsDJ+Rbjkm74nvg2rdx0gwBS+IMdvwJuq3S9pQ==", + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.22.4.tgz", + "integrity": "sha512-j8pPKp53/lq9lMXN57S8cFz0MynJk8OWNuUnXct/9KCpKU7DgU3bYMJhwWmcqC0UU29p8Lr0/7KEVcaM6bf47Q==", "cpu": [ "x64" ], "dev": true, - "license": "MIT", "optional": true, "os": [ "win32" @@ -5297,11 +5281,10 @@ } }, "node_modules/rollup": { - "version": "4.21.0", - "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.21.0.tgz", - "integrity": "sha512-vo+S/lfA2lMS7rZ2Qoubi6I5hwZwzXeUIctILZLbHI+laNtvhhOIon2S1JksA5UEDQ7l3vberd0fxK44lTYjbQ==", + "version": "4.22.4", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.22.4.tgz", + "integrity": "sha512-vD8HJ5raRcWOyymsR6Z3o6+RzfEPCnVLMFJ6vRslO1jt4LO6dUo5Qnpg7y4RkZFM2DMe3WUirkI5c16onjrc6A==", "dev": true, - "license": "MIT", "dependencies": { "@types/estree": "1.0.5" }, @@ -5313,22 +5296,22 @@ "npm": ">=8.0.0" }, "optionalDependencies": { - "@rollup/rollup-android-arm-eabi": "4.21.0", - "@rollup/rollup-android-arm64": "4.21.0", - "@rollup/rollup-darwin-arm64": "4.21.0", - "@rollup/rollup-darwin-x64": "4.21.0", - "@rollup/rollup-linux-arm-gnueabihf": "4.21.0", - "@rollup/rollup-linux-arm-musleabihf": "4.21.0", - "@rollup/rollup-linux-arm64-gnu": "4.21.0", - "@rollup/rollup-linux-arm64-musl": "4.21.0", - "@rollup/rollup-linux-powerpc64le-gnu": "4.21.0", - "@rollup/rollup-linux-riscv64-gnu": "4.21.0", - "@rollup/rollup-linux-s390x-gnu": "4.21.0", - "@rollup/rollup-linux-x64-gnu": "4.21.0", - "@rollup/rollup-linux-x64-musl": "4.21.0", - "@rollup/rollup-win32-arm64-msvc": "4.21.0", - "@rollup/rollup-win32-ia32-msvc": "4.21.0", - "@rollup/rollup-win32-x64-msvc": "4.21.0", + "@rollup/rollup-android-arm-eabi": "4.22.4", + "@rollup/rollup-android-arm64": "4.22.4", + "@rollup/rollup-darwin-arm64": "4.22.4", + "@rollup/rollup-darwin-x64": "4.22.4", + "@rollup/rollup-linux-arm-gnueabihf": "4.22.4", + "@rollup/rollup-linux-arm-musleabihf": "4.22.4", + "@rollup/rollup-linux-arm64-gnu": "4.22.4", + "@rollup/rollup-linux-arm64-musl": "4.22.4", + "@rollup/rollup-linux-powerpc64le-gnu": "4.22.4", + "@rollup/rollup-linux-riscv64-gnu": "4.22.4", + "@rollup/rollup-linux-s390x-gnu": "4.22.4", + "@rollup/rollup-linux-x64-gnu": "4.22.4", + "@rollup/rollup-linux-x64-musl": "4.22.4", + "@rollup/rollup-win32-arm64-msvc": "4.22.4", + "@rollup/rollup-win32-ia32-msvc": "4.22.4", + "@rollup/rollup-win32-x64-msvc": "4.22.4", "fsevents": "~2.3.2" } }, From 98befd6b8bdcdb0855daa14e66494ae5f5c3f554 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Tue, 24 Sep 2024 09:41:26 -0700 Subject: [PATCH 38/71] refactor: GraphQL order input (#3044) ## Relevant issue(s) Resolves #3043 ## Description This PR fixes an issue where GQL variables could not be used with ordering input args. The order input type on collections and aggregates has been refactored to a list so that ordering is preserved in all environments. ## Tasks - [x] I made sure the code is well commented, particularly hard-to-understand areas. - [x] I made sure the repository-held documentation is changed accordingly. - [x] I made sure the pull request title adheres to the conventional commit style (the subset used in the project can be found in [tools/configs/chglog/config.yml](tools/configs/chglog/config.yml)). - [x] I made sure to discuss its limitations such as threats to validity, vulnerability to mistake and misuse, robustness to invalidation of assumptions, resource requirements, ... ## How has this been tested? Added integration tests Specify the platform(s) on which this was tested: - MacOS --- .../sorting-and-ordering.md | 8 +- internal/request/graphql/parser/commit.go | 4 +- internal/request/graphql/parser/errors.go | 1 + internal/request/graphql/parser/filter.go | 77 +--------------- internal/request/graphql/parser/order.go | 90 +++++++++++++++++++ internal/request/graphql/parser/query.go | 34 ++----- internal/request/graphql/schema/generate.go | 6 +- .../request/graphql/schema/types/commits.go | 2 +- .../explain/debug/with_order_test.go | 2 +- .../explain/default/with_order_test.go | 2 +- .../explain/execute/with_order_test.go | 2 +- .../one_to_many_to_one/with_order_test.go | 20 ++--- .../query/simple/with_order_test.go | 59 +++++++++++- .../query/simple/with_variables_test.go | 84 +++++++++++++++++ .../schema/aggregates/inline_array_test.go | 2 +- .../schema/aggregates/simple_test.go | 16 ++-- .../schema/aggregates/top_level_test.go | 16 ++-- tests/integration/schema/default_fields.go | 21 ++--- tests/integration/schema/filter_test.go | 22 +---- tests/integration/schema/input_type_test.go | 66 +++----------- 20 files changed, 309 insertions(+), 225 deletions(-) create mode 100644 internal/request/graphql/parser/order.go diff --git a/docs/website/references/query-specification/sorting-and-ordering.md b/docs/website/references/query-specification/sorting-and-ordering.md index 444a244905..f6c06b5a0f 100644 --- a/docs/website/references/query-specification/sorting-and-ordering.md +++ b/docs/website/references/query-specification/sorting-and-ordering.md @@ -25,7 +25,7 @@ Sorting can be applied to multiple fields in the same query. The sort order is s The query below finds all books ordered by earliest published date and then by descending order of titles. ```graphql { - Books(order: { published_at: ASC, title: DESC }) { + Books(order: [{ published_at: ASC }, { title: DESC }]) { title genre description @@ -38,7 +38,7 @@ Additionally, you can sort sub-object fields along with root object fields. The query below finds all books ordered by earliest published date and then by the latest authors' birthday. ```graphql { - Books(order: { published_at: ASC, Author: { birthday: DESC }}) { + Books(order: [{ published_at: ASC }, { Author: { birthday: DESC }}]) { title description published_at @@ -63,7 +63,7 @@ If the DocKey is included in the sort fields, any field included afterwards will *So, instead of:* ```graphql { - Authors(order: { name: DESC, Books: { title: ASC }}) { + Authors(order: [{ name: DESC }, { Books: { title: ASC }}]) { name Books { title @@ -114,7 +114,7 @@ If you have the following objects in the database: > and the following query ```graphql { - Authors(order: { name: DESC, books: { title: ASC }}) { + Authors(order: [{ name: DESC }, { books: { title: ASC }}]) { name books { title diff --git a/internal/request/graphql/parser/commit.go b/internal/request/graphql/parser/commit.go index b08d80fc69..22e5afe0b0 100644 --- a/internal/request/graphql/parser/commit.go +++ b/internal/request/graphql/parser/commit.go @@ -55,11 +55,11 @@ func parseCommitSelect( } case request.OrderClause: - v, ok := value.(map[string]any) + v, ok := value.([]any) if !ok { continue // value is nil } - conditions, err := ParseConditionsInOrder(argument.Value.(*ast.ObjectValue), v) + conditions, err := parseOrderConditionList(v) if err != nil { return nil, err } diff --git a/internal/request/graphql/parser/errors.go b/internal/request/graphql/parser/errors.go index c629f11c19..658f50219c 100644 --- a/internal/request/graphql/parser/errors.go +++ b/internal/request/graphql/parser/errors.go @@ -24,4 +24,5 @@ var ( ErrUnknownExplainType = errors.New("invalid / unknown explain type") ErrUnknownGQLOperation = errors.New("unknown GraphQL operation type") ErrInvalidFilterConditions = errors.New("invalid filter condition type, expected map") + ErrMultipleOrderFieldsDefined = errors.New("each order argument can only define one field") ) diff --git a/internal/request/graphql/parser/filter.go b/internal/request/graphql/parser/filter.go index aa65f77dd2..40d4a798f4 100644 --- a/internal/request/graphql/parser/filter.go +++ b/internal/request/graphql/parser/filter.go @@ -64,77 +64,19 @@ func NewFilterFromString( return NewFilter(obj, filterType) } -// ParseConditionsInOrder is similar to ParseConditions, except instead -// of returning a map[string]any, we return a []any. This -// is to maintain the ordering info of the statements within the ObjectValue. -// This function is mostly used by the Order parser, which needs to parse -// conditions in the same way as the Filter object, however the order -// of the arguments is important. -func ParseConditionsInOrder(stmt *ast.ObjectValue, args map[string]any) ([]request.OrderCondition, error) { - conditions := make([]request.OrderCondition, 0) - if stmt == nil { - return conditions, nil - } - for _, field := range stmt.Fields { - switch v := args[field.Name.Value].(type) { - case int: // base direction parsed (hopefully, check NameToOrderDirection) - dir, err := parseOrderDirection(v) - if err != nil { - return nil, err - } - conditions = append(conditions, request.OrderCondition{ - Fields: []string{field.Name.Value}, - Direction: dir, - }) - - case map[string]any: // flatten and incorporate the parsed slice into our current one - sub, err := ParseConditionsInOrder(field.Value.(*ast.ObjectValue), v) - if err != nil { - return nil, err - } - for _, cond := range sub { - // prepend the current field name, to the parsed condition from the slice - // Eg. order: {author: {name: ASC, birthday: DESC}} - // This results in an array of [name, birthday] converted to - // [author.name, author.birthday]. - // etc. - cond.Fields = append([]string{field.Name.Value}, cond.Fields...) - conditions = append(conditions, cond) - } - - case nil: - continue // ignore nil filter input - - default: - return nil, client.NewErrUnhandledType("parseConditionInOrder", v) - } - } - - return conditions, nil -} - // parseConditions loops over the stmt ObjectValue fields, and extracts // all the relevant name/value pairs. func ParseConditions(stmt *ast.ObjectValue, inputType gql.Input) (map[string]any, error) { - cond, err := parseConditions(stmt, inputType) - if err != nil { - return nil, err + cond := gql.ValueFromAST(stmt, inputType, nil) + if cond == nil { + return nil, ErrFailedToParseConditionsFromAST } - if v, ok := cond.(map[string]any); ok { return v, nil } return nil, client.NewErrUnexpectedType[map[string]any]("condition", cond) } -func parseConditions(stmt *ast.ObjectValue, inputArg gql.Input) (any, error) { - val := gql.ValueFromAST(stmt, inputArg, nil) - if val == nil { - return nil, ErrFailedToParseConditionsFromAST - } - return val, nil -} - // ParseFilterFieldsForDescription parses the fields that are defined in the SchemaDescription // from the filter conditions“ func ParseFilterFieldsForDescription( @@ -195,16 +137,3 @@ func parseFilterFieldsForDescriptionSlice( } return fields, nil } - -func parseOrderDirection(v int) (request.OrderDirection, error) { - switch v { - case 0: - return request.ASC, nil - - case 1: - return request.DESC, nil - - default: - return request.ASC, ErrInvalidOrderDirection - } -} diff --git a/internal/request/graphql/parser/order.go b/internal/request/graphql/parser/order.go new file mode 100644 index 0000000000..983988f5f9 --- /dev/null +++ b/internal/request/graphql/parser/order.go @@ -0,0 +1,90 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package parser + +import ( + "github.com/sourcenetwork/defradb/client/request" +) + +func parseOrderConditionList(args []any) ([]request.OrderCondition, error) { + var conditions []request.OrderCondition + for _, a := range args { + v, ok := a.(map[string]any) + if !ok { + continue // order value is nil + } + condition, err := parseOrderCondition(v) + if err != nil { + return nil, err + } + if condition != nil { + conditions = append(conditions, *condition) + } + } + return conditions, nil +} + +func parseOrderCondition(arg map[string]any) (*request.OrderCondition, error) { + if len(arg) == 0 { + return nil, nil + } + if len(arg) != 1 { + return nil, ErrMultipleOrderFieldsDefined + } + var fieldName string + for name := range arg { + fieldName = name + } + switch t := arg[fieldName].(type) { + case int: + dir, err := parseOrderDirection(t) + if err != nil { + return nil, err + } + return &request.OrderCondition{ + Fields: []string{fieldName}, + Direction: dir, + }, nil + + case map[string]any: + cond, err := parseOrderCondition(t) + if err != nil { + return nil, err + } + if cond == nil { + return nil, nil + } + // prepend the current field name, to the parsed condition from the slice + // Eg. order: [{author: {name: ASC}}, {author: {birthday: DESC}}] + // This results in an array of [name, birthday] converted to + // [author.name, author.birthday]. + // etc. + cond.Fields = append([]string{fieldName}, cond.Fields...) + return cond, nil + + default: + // field value is null so don't include the condition + return nil, nil + } +} + +func parseOrderDirection(v int) (request.OrderDirection, error) { + switch v { + case 0: + return request.ASC, nil + + case 1: + return request.DESC, nil + + default: + return request.ASC, ErrInvalidOrderDirection + } +} diff --git a/internal/request/graphql/parser/query.go b/internal/request/graphql/parser/query.go index 29ba695dec..871580f2f2 100644 --- a/internal/request/graphql/parser/query.go +++ b/internal/request/graphql/parser/query.go @@ -138,11 +138,11 @@ func parseSelect( } case request.OrderClause: // parse order by - v, ok := value.(map[string]any) + v, ok := value.([]any) if !ok { continue // value is nil } - conditions, err := ParseConditionsInOrder(argument.Value.(*ast.ObjectValue), v) + conditions, err := parseOrderConditionList(v) if err != nil { return nil, err } @@ -208,11 +208,7 @@ func parseAggregate( }) case map[string]any: - value, ok := argument.Value.(*ast.ObjectValue) - if !ok { - continue // value is nil - } - target, err := parseAggregateTarget(name, value, v) + target, err := parseAggregateTarget(name, v) if err != nil { return nil, err } @@ -231,7 +227,6 @@ func parseAggregate( func parseAggregateTarget( hostName string, - value *ast.ObjectValue, arguments map[string]any, ) (*request.AggregateTarget, error) { var childName string @@ -240,10 +235,7 @@ func parseAggregateTarget( var offset immutable.Option[uint64] var order immutable.Option[request.OrderBy] - for _, f := range value.Fields { - name := f.Name.Value - value := arguments[name] - + for name, value := range arguments { switch name { case request.FieldName: if v, ok := value.(string); ok { @@ -266,14 +258,10 @@ func parseAggregateTarget( } case request.OrderClause: - switch conditionsAST := f.Value.(type) { - case *ast.EnumValue: + switch t := value.(type) { + case int: // For inline arrays the order arg will be a simple enum declaring the order direction - v, ok := value.(int) - if !ok { - continue // value is nil - } - dir, err := parseOrderDirection(v) + dir, err := parseOrderDirection(t) if err != nil { return nil, err } @@ -281,14 +269,10 @@ func parseAggregateTarget( Conditions: []request.OrderCondition{{Direction: dir}}, }) - case *ast.ObjectValue: + case []any: // For relations the order arg will be the complex order object as used by the host object // for non-aggregate ordering - v, ok := value.(map[string]any) - if !ok { - continue // value is nil - } - conditions, err := ParseConditionsInOrder(conditionsAST, v) + conditions, err := parseOrderConditionList(t) if err != nil { return nil, err } diff --git a/internal/request/graphql/schema/generate.go b/internal/request/graphql/schema/generate.go index 3d1fe26610..b0f0163f06 100644 --- a/internal/request/graphql/schema/generate.go +++ b/internal/request/graphql/schema/generate.go @@ -395,7 +395,7 @@ func (g *Generator) createExpandedFieldList( schemaTypes.GroupByArgDescription, ), "order": schemaTypes.NewArgConfig( - g.manager.schema.TypeMap()[typeName+"OrderArg"], + gql.NewList(g.manager.schema.TypeMap()[typeName+"OrderArg"]), schemaTypes.OrderArgDescription, ), request.LimitClause: schemaTypes.NewArgConfig(gql.Int, schemaTypes.LimitArgDescription), @@ -979,7 +979,7 @@ func (g *Generator) genNumericAggregateBaseArgInputs(obj *gql.Object) *gql.Input Description: schemaTypes.OffsetArgDescription, }, request.OrderClause: &gql.InputObjectFieldConfig{ - Type: g.manager.schema.TypeMap()[genTypeName(obj, "OrderArg")], + Type: gql.NewList(g.manager.schema.TypeMap()[genTypeName(obj, "OrderArg")]), Description: schemaTypes.OrderArgDescription, }, }, nil @@ -1304,7 +1304,7 @@ func (g *Generator) genTypeQueryableFieldList( gql.NewList(gql.NewNonNull(config.groupBy)), schemaTypes.GroupByArgDescription, ), - "order": schemaTypes.NewArgConfig(config.order, schemaTypes.OrderArgDescription), + "order": schemaTypes.NewArgConfig(gql.NewList(config.order), schemaTypes.OrderArgDescription), request.ShowDeleted: schemaTypes.NewArgConfig(gql.Boolean, showDeletedArgDescription), request.LimitClause: schemaTypes.NewArgConfig(gql.Int, schemaTypes.LimitArgDescription), request.OffsetClause: schemaTypes.NewArgConfig(gql.Int, schemaTypes.OffsetArgDescription), diff --git a/internal/request/graphql/schema/types/commits.go b/internal/request/graphql/schema/types/commits.go index be05b5feb6..2b0fd0e747 100644 --- a/internal/request/graphql/schema/types/commits.go +++ b/internal/request/graphql/schema/types/commits.go @@ -148,7 +148,7 @@ func QueryCommits(commitObject *gql.Object, commitsOrderArg *gql.InputObject) *g Args: gql.FieldConfigArgument{ request.DocIDArgName: NewArgConfig(gql.ID, commitDocIDArgDescription), request.FieldIDName: NewArgConfig(gql.String, commitFieldIDArgDescription), - "order": NewArgConfig(commitsOrderArg, OrderArgDescription), + "order": NewArgConfig(gql.NewList(commitsOrderArg), OrderArgDescription), "cid": NewArgConfig(gql.ID, commitCIDArgDescription), "groupBy": NewArgConfig( gql.NewList( diff --git a/tests/integration/explain/debug/with_order_test.go b/tests/integration/explain/debug/with_order_test.go index 663514f558..740b7931e0 100644 --- a/tests/integration/explain/debug/with_order_test.go +++ b/tests/integration/explain/debug/with_order_test.go @@ -69,7 +69,7 @@ func TestDebugExplainRequestWithMultiOrderFieldsOnParent(t *testing.T) { testUtils.ExplainRequest{ Request: `query @explain(type: debug) { - Author(order: {name: ASC, age: DESC}) { + Author(order: [{name: ASC}, {age: DESC}]) { name age } diff --git a/tests/integration/explain/default/with_order_test.go b/tests/integration/explain/default/with_order_test.go index 6f0e3909f0..ad87ddde29 100644 --- a/tests/integration/explain/default/with_order_test.go +++ b/tests/integration/explain/default/with_order_test.go @@ -86,7 +86,7 @@ func TestDefaultExplainRequestWithMultiOrderFieldsOnParent(t *testing.T) { testUtils.ExplainRequest{ Request: `query @explain { - Author(order: {name: ASC, age: DESC}) { + Author(order: [{name: ASC}, {age: DESC}]) { name age } diff --git a/tests/integration/explain/execute/with_order_test.go b/tests/integration/explain/execute/with_order_test.go index 55f0745928..cc6e0c68d1 100644 --- a/tests/integration/explain/execute/with_order_test.go +++ b/tests/integration/explain/execute/with_order_test.go @@ -117,7 +117,7 @@ func TestExecuteExplainRequestWithMultiOrderFieldsOnParent(t *testing.T) { testUtils.ExplainRequest{ Request: `query @explain(type: execute) { - Author(order: {age: ASC, name: DESC}) { + Author(order: [{age: ASC}, {name: DESC}]) { name age } diff --git a/tests/integration/query/one_to_many_to_one/with_order_test.go b/tests/integration/query/one_to_many_to_one/with_order_test.go index 5036faf6d9..38c8ce75dd 100644 --- a/tests/integration/query/one_to_many_to_one/with_order_test.go +++ b/tests/integration/query/one_to_many_to_one/with_order_test.go @@ -24,7 +24,7 @@ func TestMultipleOrderByWithDepthGreaterThanOne(t *testing.T) { createDocsWith6BooksAnd5Publishers(), testUtils.Request{ Request: `query { - Book (order: {rating: ASC, publisher: {yearOpened: DESC}}) { + Book (order: [{rating: ASC}, {publisher: {yearOpened: DESC}}]) { name rating publisher{ @@ -97,15 +97,15 @@ func TestMultipleOrderByWithDepthGreaterThanOneOrderSwitched(t *testing.T) { createDocsWith6BooksAnd5Publishers(), testUtils.Request{ Request: `query { - Book (order: {publisher: {yearOpened: DESC}, rating: ASC}) { - name - rating - publisher{ - name - yearOpened - } - } - }`, + Book (order: [{publisher: {yearOpened: DESC}}, {rating: ASC}]) { + name + rating + publisher{ + name + yearOpened + } + } + }`, Results: map[string]any{ "Book": []map[string]any{ { diff --git a/tests/integration/query/simple/with_order_test.go b/tests/integration/query/simple/with_order_test.go index 000e3ee391..82245de369 100644 --- a/tests/integration/query/simple/with_order_test.go +++ b/tests/integration/query/simple/with_order_test.go @@ -358,7 +358,7 @@ func TestQuerySimpleWithNumericOrderDescendingAndBooleanOrderAscending(t *testin }, testUtils.Request{ Request: `query { - Users(order: {Age: DESC, Verified: ASC}) { + Users(order: [{Age: DESC}, {Verified: ASC}]) { Name Age Verified @@ -394,3 +394,60 @@ func TestQuerySimpleWithNumericOrderDescendingAndBooleanOrderAscending(t *testin executeTestCase(t, test) } + +func TestQuerySimple_WithInvalidOrderEnum_ReturnsError(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with invalid order", + Actions: []any{ + testUtils.Request{ + Request: `query { + Users(order: {Age: INVALID}) { + Name + Age + Verified + } + }`, + ExpectedError: `Argument "order" has invalid value {Age: INVALID}`, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQuerySimple_WithMultipleOrderFields_ReturnsError(t *testing.T) { + tests := []testUtils.TestCase{ + { + Description: "Simple query with multiple order fields and a single entry", + Actions: []any{ + testUtils.Request{ + Request: `query { + Users(order: {Age: ASC, Name: DESC}) { + Name + Age + } + }`, + ExpectedError: "each order argument can only define one field", + }, + }, + }, + { + Description: "Simple query with multiple order fields and multiple entries", + Actions: []any{ + testUtils.Request{ + Request: `query { + Users(order: [{Age: ASC}, {Age: ASC, Name: DESC}]) { + Name + Age + } + }`, + ExpectedError: "each order argument can only define one field", + }, + }, + }, + } + + for _, test := range tests { + executeTestCase(t, test) + } +} diff --git a/tests/integration/query/simple/with_variables_test.go b/tests/integration/query/simple/with_variables_test.go index 5192c00ce4..bd2b88c0a0 100644 --- a/tests/integration/query/simple/with_variables_test.go +++ b/tests/integration/query/simple/with_variables_test.go @@ -168,3 +168,87 @@ func TestQuerySimpleWithVariableDefaultValueOverride(t *testing.T) { executeTestCase(t, test) } + +func TestQuerySimpleWithOrderVariable(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with order variable", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "Name": "Alice", + "Age": 40 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Bob", + "Age": 21 + }`, + }, + testUtils.Request{ + Variables: immutable.Some(map[string]any{ + "order": []map[string]any{ + {"Name": "DESC"}, + {"Age": "ASC"}, + }, + }), + Request: `query($order: [UsersOrderArg]) { + Users(order: $order) { + Name + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Name": "Bob", + }, + { + "Name": "Alice", + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQuerySimpleWithAggregateCountVariable(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with aggregate count variable", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "Name": "Alice", + "Age": 40 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Bob", + "Age": 21 + }`, + }, + testUtils.Request{ + Variables: immutable.Some(map[string]any{ + "usersCount": map[string]any{ + "filter": map[string]any{ + "Name": map[string]any{ + "_eq": "Bob", + }, + }, + }, + }), + Request: `query($usersCount: Users__CountSelector) { + _count(Users: $usersCount) + }`, + Results: map[string]any{ + "_count": 1, + }, + }, + }, + } + + executeTestCase(t, test) +} diff --git a/tests/integration/schema/aggregates/inline_array_test.go b/tests/integration/schema/aggregates/inline_array_test.go index a7fa6518fa..067f17c6ed 100644 --- a/tests/integration/schema/aggregates/inline_array_test.go +++ b/tests/integration/schema/aggregates/inline_array_test.go @@ -246,7 +246,7 @@ func TestSchemaAggregateInlineArrayCreatesUsersSum(t *testing.T) { map[string]any{ "name": "order", "type": map[string]any{ - "name": "UsersOrderArg", + "name": nil, }, }, }, diff --git a/tests/integration/schema/aggregates/simple_test.go b/tests/integration/schema/aggregates/simple_test.go index ef9eef19a6..789e25a1eb 100644 --- a/tests/integration/schema/aggregates/simple_test.go +++ b/tests/integration/schema/aggregates/simple_test.go @@ -196,9 +196,11 @@ func TestSchemaAggregateSimpleCreatesUsersSum(t *testing.T) { map[string]any{ "name": "order", "type": map[string]any{ - "name": "UsersOrderArg", - "kind": "INPUT_OBJECT", - "ofType": nil, + "name": nil, + "kind": "LIST", + "ofType": map[string]any{ + "name": "UsersOrderArg", + }, }, }, }, @@ -300,9 +302,11 @@ func TestSchemaAggregateSimpleCreatesUsersAverage(t *testing.T) { map[string]any{ "name": "order", "type": map[string]any{ - "name": "UsersOrderArg", - "kind": "INPUT_OBJECT", - "ofType": nil, + "name": nil, + "kind": "LIST", + "ofType": map[string]any{ + "name": "UsersOrderArg", + }, }, }, }, diff --git a/tests/integration/schema/aggregates/top_level_test.go b/tests/integration/schema/aggregates/top_level_test.go index 5dd10c0f07..92c0a12421 100644 --- a/tests/integration/schema/aggregates/top_level_test.go +++ b/tests/integration/schema/aggregates/top_level_test.go @@ -179,9 +179,11 @@ func TestSchemaAggregateTopLevelCreatesSumGivenSchema(t *testing.T) { map[string]any{ "name": "order", "type": map[string]any{ - "name": "UsersOrderArg", - "kind": "INPUT_OBJECT", - "ofType": nil, + "name": nil, + "kind": "LIST", + "ofType": map[string]any{ + "name": "UsersOrderArg", + }, }, }, }, @@ -285,9 +287,11 @@ func TestSchemaAggregateTopLevelCreatesAverageGivenSchema(t *testing.T) { map[string]any{ "name": "order", "type": map[string]any{ - "name": "UsersOrderArg", - "kind": "INPUT_OBJECT", - "ofType": nil, + "name": nil, + "kind": "LIST", + "ofType": map[string]any{ + "name": "UsersOrderArg", + }, }, }, }, diff --git a/tests/integration/schema/default_fields.go b/tests/integration/schema/default_fields.go index 5e77356081..51b224bd93 100644 --- a/tests/integration/schema/default_fields.go +++ b/tests/integration/schema/default_fields.go @@ -196,21 +196,16 @@ type argDef struct { typeName string } -func buildOrderArg(objectName string, fields []argDef) Field { - inputFields := []any{ - makeInputObject("_docID", "Ordering", nil), - } - - for _, field := range fields { - inputFields = append(inputFields, makeInputObject(field.fieldName, field.typeName, nil)) - } - +func buildOrderArg(objectName string) Field { return Field{ "name": "order", - "type": Field{ - "name": objectName + "OrderArg", - "ofType": nil, - "inputFields": inputFields, + "type": map[string]any{ + "name": nil, + "ofType": map[string]any{ + "kind": "INPUT_OBJECT", + "name": objectName + "OrderArg", + }, + "inputFields": nil, }, } } diff --git a/tests/integration/schema/filter_test.go b/tests/integration/schema/filter_test.go index 4f42e44a3a..bf7617a3b6 100644 --- a/tests/integration/schema/filter_test.go +++ b/tests/integration/schema/filter_test.go @@ -137,12 +137,7 @@ var defaultUserArgsWithoutFilter = trimFields( groupByArg, limitArg, offsetArg, - buildOrderArg("Users", []argDef{ - { - fieldName: "name", - typeName: "Ordering", - }, - }), + buildOrderArg("Users"), }, testFilterForSimpleSchemaArgProps, ) @@ -288,20 +283,7 @@ var defaultBookArgsWithoutFilter = trimFields( groupByArg, limitArg, offsetArg, - buildOrderArg("Book", []argDef{ - { - fieldName: "author", - typeName: "AuthorOrderArg", - }, - { - fieldName: "author_id", - typeName: "Ordering", - }, - { - fieldName: "name", - typeName: "Ordering", - }, - }), + buildOrderArg("Book"), }, testFilterForOneToOneSchemaArgProps, ) diff --git a/tests/integration/schema/input_type_test.go b/tests/integration/schema/input_type_test.go index c92e2f3343..7fac10e20e 100644 --- a/tests/integration/schema/input_type_test.go +++ b/tests/integration/schema/input_type_test.go @@ -93,17 +93,12 @@ func TestInputTypeOfOrderFieldWhereSchemaHasManyRelationType(t *testing.T) { map[string]any{ "name": "order", "type": map[string]any{ - "name": "groupOrderArg", - "ofType": nil, - "inputFields": []any{ - map[string]any{ - "name": "_docID", - "type": map[string]any{ - "name": "Ordering", - "ofType": nil, - }, - }, + "name": nil, + "ofType": map[string]any{ + "kind": "INPUT_OBJECT", + "name": "groupOrderArg", }, + "inputFields": nil, }, }, ).Tidy(), @@ -182,52 +177,11 @@ func TestInputTypeOfOrderFieldWhereSchemaHasRelationType(t *testing.T) { map[string]any{ "name": "order", "type": map[string]any{ - "name": "authorOrderArg", - "ofType": nil, - "inputFields": []any{ - map[string]any{ - "name": "_docID", - "type": map[string]any{ - "name": "Ordering", - "ofType": nil, - }, - }, - map[string]any{ - "name": "age", - "type": map[string]any{ - "name": "Ordering", - "ofType": nil, - }, - }, - map[string]any{ - "name": "name", - "type": map[string]any{ - "name": "Ordering", - "ofType": nil, - }, - }, - map[string]any{ - "name": "verified", - "type": map[string]any{ - "name": "Ordering", - "ofType": nil, - }, - }, - // Without the relation type we won't have the following ordering type(s). - map[string]any{ - "name": "wrote", - "type": map[string]any{ - "name": "bookOrderArg", - "ofType": nil, - }, - }, - map[string]any{ - "name": "wrote_id", - "type": map[string]any{ - "name": "Ordering", - "ofType": nil, - }, - }, + "name": nil, + "inputFields": nil, + "ofType": map[string]any{ + "kind": "INPUT_OBJECT", + "name": "authorOrderArg", }, }, }, From 8dae109b6cabbd3b2d2a7faee9c9c3e7d63dc022 Mon Sep 17 00:00:00 2001 From: Fred Carle Date: Tue, 24 Sep 2024 14:42:44 -0400 Subject: [PATCH 39/71] refactor: Change from protobuf to cbor for gRPC (#3061) ## Relevant issue(s) Resolves #3057 ## Description This PR makes the net gRPC server use cbor instead of protobuf. By doing this we remove the protobuf overhead and the related dependencies which will make compilation to wasm simpler and simplify browser support. --- go.mod | 3 +- go.sum | 2 - internal/kms/pubsub.go | 42 +- net/client.go | 23 +- net/codec.go | 40 + net/dialer.go | 7 +- net/grpc.go | 105 +++ net/pb/Makefile | 28 - net/pb/net.pb.go | 983 -------------------- net/pb/net.proto | 80 -- net/pb/net_grpc.pb.go | 276 ------ net/pb/net_vtproto.pb.go | 1890 -------------------------------------- net/peer.go | 43 +- net/server.go | 44 +- net/server_test.go | 25 +- 15 files changed, 230 insertions(+), 3361 deletions(-) create mode 100644 net/codec.go create mode 100644 net/grpc.go delete mode 100644 net/pb/Makefile delete mode 100644 net/pb/net.pb.go delete mode 100644 net/pb/net.proto delete mode 100644 net/pb/net_grpc.pb.go delete mode 100644 net/pb/net_vtproto.pb.go diff --git a/go.mod b/go.mod index cf59b00c0e..e7edd3cdcd 100644 --- a/go.mod +++ b/go.mod @@ -44,7 +44,6 @@ require ( github.com/multiformats/go-multihash v0.2.3 github.com/pelletier/go-toml v1.9.5 github.com/pkg/errors v0.9.1 - github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10 github.com/sourcenetwork/acp_core v0.0.0-20240607160510-47a5306b2ad2 github.com/sourcenetwork/badger/v4 v4.2.1-0.20231113215945-a63444ca5276 github.com/sourcenetwork/corelog v0.0.8 @@ -66,7 +65,6 @@ require ( golang.org/x/crypto v0.27.0 golang.org/x/exp v0.0.0-20240808152545-0cdaa3abc0fa google.golang.org/grpc v1.67.0 - google.golang.org/protobuf v1.34.2 ) require ( @@ -375,6 +373,7 @@ require ( google.golang.org/genproto v0.0.0-20240227224415-6ceb2ff114de // indirect google.golang.org/genproto/googleapis/api v0.0.0-20240814211410-ddb44dafa142 // indirect google.golang.org/genproto/googleapis/rpc v0.0.0-20240814211410-ddb44dafa142 // indirect + google.golang.org/protobuf v1.34.2 // indirect gopkg.in/ini.v1 v1.67.0 // indirect gopkg.in/yaml.v2 v2.4.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect diff --git a/go.sum b/go.sum index 43308a4372..ed7cf49e3d 100644 --- a/go.sum +++ b/go.sum @@ -1265,8 +1265,6 @@ github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINE github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/profile v1.2.1/go.mod h1:hJw3o1OdXxsrSjjVksARp5W95eeEaEfptyVZyv6JUPA= -github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10 h1:GFCKgmp0tecUJ0sJuv4pzYCqS9+RGSn52M3FUwPs+uo= -github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10/go.mod h1:t/avpk3KcrXxUnYOhZhMXJlSEyie6gQbtLq5NM3loB8= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U= github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= diff --git a/internal/kms/pubsub.go b/internal/kms/pubsub.go index ca67603a7c..cbcd6ee141 100644 --- a/internal/kms/pubsub.go +++ b/internal/kms/pubsub.go @@ -16,18 +16,17 @@ import ( "crypto/ecdh" "encoding/base64" + "github.com/fxamacker/cbor/v2" cidlink "github.com/ipld/go-ipld-prime/linking/cid" libpeer "github.com/libp2p/go-libp2p/core/peer" rpc "github.com/sourcenetwork/go-libp2p-pubsub-rpc" grpcpeer "google.golang.org/grpc/peer" - "google.golang.org/protobuf/proto" "github.com/sourcenetwork/defradb/crypto" "github.com/sourcenetwork/defradb/datastore" "github.com/sourcenetwork/defradb/errors" "github.com/sourcenetwork/defradb/event" "github.com/sourcenetwork/defradb/internal/encryption" - pb "github.com/sourcenetwork/defradb/net/pb" ) const pubsubTopic = "encryption" @@ -127,10 +126,15 @@ func (s *pubSubService) handleKeyRequestedEvent() { } } +type fetchEncryptionKeyRequest struct { + Links [][]byte + EphemeralPublicKey []byte +} + // handleEncryptionMessage handles incoming FetchEncryptionKeyRequest messages from the pubsub network. func (s *pubSubService) handleRequestFromPeer(peerID libpeer.ID, topic string, msg []byte) ([]byte, error) { - req := new(pb.FetchEncryptionKeyRequest) - if err := proto.Unmarshal(msg, req); err != nil { + req := new(fetchEncryptionKeyRequest) + if err := cbor.Unmarshal(msg, req); err != nil { log.ErrorContextE(s.ctx, "Failed to unmarshal pubsub message %s", err) return nil, err } @@ -141,14 +145,14 @@ func (s *pubSubService) handleRequestFromPeer(peerID libpeer.ID, topic string, m log.ErrorContextE(s.ctx, "failed attempt to get encryption key", err) return nil, errors.Wrap("failed attempt to get encryption key", err) } - return res.MarshalVT() + return cbor.Marshal(res) } func (s *pubSubService) prepareFetchEncryptionKeyRequest( cids []cidlink.Link, ephemeralPublicKey []byte, -) (*pb.FetchEncryptionKeyRequest, error) { - req := &pb.FetchEncryptionKeyRequest{ +) (*fetchEncryptionKeyRequest, error) { + req := &fetchEncryptionKeyRequest{ EphemeralPublicKey: ephemeralPublicKey, } @@ -177,7 +181,7 @@ func (s *pubSubService) requestEncryptionKeyFromPeers( return err } - data, err := req.MarshalVT() + data, err := cbor.Marshal(req) if err != nil { return errors.Wrap("failed to marshal pubsub message", err) } @@ -194,17 +198,23 @@ func (s *pubSubService) requestEncryptionKeyFromPeers( return nil } +type fetchEncryptionKeyReply struct { + Links [][]byte + Blocks [][]byte + EphemeralPublicKey []byte +} + // handleFetchEncryptionKeyResponse handles incoming FetchEncryptionKeyResponse messages func (s *pubSubService) handleFetchEncryptionKeyResponse( resp rpc.Response, - req *pb.FetchEncryptionKeyRequest, + req *fetchEncryptionKeyRequest, privateKey *ecdh.PrivateKey, result chan<- encryption.Result, ) { defer close(result) - var keyResp pb.FetchEncryptionKeyReply - if err := proto.Unmarshal(resp.Data, &keyResp); err != nil { + var keyResp fetchEncryptionKeyReply + if err := cbor.Unmarshal(resp.Data, &keyResp); err != nil { log.ErrorContextE(s.ctx, "Failed to unmarshal encryption key response", err) result <- encryption.Result{Error: err} return @@ -238,7 +248,7 @@ func (s *pubSubService) handleFetchEncryptionKeyResponse( } // makeAssociatedData creates the associated data for the encryption key request -func makeAssociatedData(req *pb.FetchEncryptionKeyRequest, peerID libpeer.ID) []byte { +func makeAssociatedData(req *fetchEncryptionKeyRequest, peerID libpeer.ID) []byte { return encodeToBase64(bytes.Join([][]byte{ req.EphemeralPublicKey, []byte(peerID), @@ -247,8 +257,8 @@ func makeAssociatedData(req *pb.FetchEncryptionKeyRequest, peerID libpeer.ID) [] func (s *pubSubService) tryGenEncryptionKeyLocally( ctx context.Context, - req *pb.FetchEncryptionKeyRequest, -) (*pb.FetchEncryptionKeyReply, error) { + req *fetchEncryptionKeyRequest, +) (*fetchEncryptionKeyReply, error) { blocks, err := s.getEncryptionKeysLocally(ctx, req) if err != nil || len(blocks) == 0 { return nil, err @@ -264,7 +274,7 @@ func (s *pubSubService) tryGenEncryptionKeyLocally( return nil, err } - res := &pb.FetchEncryptionKeyReply{ + res := &fetchEncryptionKeyReply{ Links: req.Links, EphemeralPublicKey: privKey.PublicKey().Bytes(), } @@ -293,7 +303,7 @@ func (s *pubSubService) tryGenEncryptionKeyLocally( // It returns the encryption keys and the targets for which the keys were found. func (s *pubSubService) getEncryptionKeysLocally( ctx context.Context, - req *pb.FetchEncryptionKeyRequest, + req *fetchEncryptionKeyRequest, ) ([][]byte, error) { blocks := make([][]byte, 0, len(req.Links)) for _, link := range req.Links { diff --git a/net/client.go b/net/client.go index 9d11a968d4..35c1de139d 100644 --- a/net/client.go +++ b/net/client.go @@ -20,7 +20,6 @@ import ( "github.com/sourcenetwork/defradb/errors" "github.com/sourcenetwork/defradb/event" - pb "github.com/sourcenetwork/defradb/net/pb" ) var ( @@ -32,19 +31,6 @@ var ( // pushLog creates a pushLog request and sends it to another node // over libp2p grpc connection func (s *server) pushLog(evt event.Update, pid peer.ID) error { - body := &pb.PushLogRequest_Body{ - DocID: []byte(evt.DocID), - Cid: evt.Cid.Bytes(), - SchemaRoot: []byte(evt.SchemaRoot), - Creator: s.peer.host.ID().String(), - Log: &pb.Log{ - Block: evt.Block, - }, - } - req := &pb.PushLogRequest{ - Body: body, - } - client, err := s.dial(pid) // grpc dial over P2P stream if err != nil { return NewErrPushLog(err) @@ -53,7 +39,14 @@ func (s *server) pushLog(evt event.Update, pid peer.ID) error { ctx, cancel := context.WithTimeout(s.peer.ctx, PushTimeout) defer cancel() - if _, err := client.PushLog(ctx, req); err != nil { + req := pushLogRequest{ + DocID: evt.DocID, + CID: evt.Cid.Bytes(), + SchemaRoot: evt.SchemaRoot, + Creator: s.peer.host.ID().String(), + Block: evt.Block, + } + if err := client.Invoke(ctx, servicePushLogName, req, nil); err != nil { return NewErrPushLog( err, errors.NewKV("CID", evt.Cid), diff --git a/net/codec.go b/net/codec.go new file mode 100644 index 0000000000..258e66b091 --- /dev/null +++ b/net/codec.go @@ -0,0 +1,40 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package net + +import ( + "github.com/fxamacker/cbor/v2" + "google.golang.org/grpc/encoding" +) + +const cborCodecName = "cbor" + +// cborCodec is a gRPC Codec implementation with CBOR encoding. +type cborCodec struct{} + +func (c *cborCodec) Marshal(v any) ([]byte, error) { + return cbor.Marshal(v) +} + +func (c *cborCodec) Unmarshal(data []byte, v any) error { + if v == nil { + return nil + } + return cbor.Unmarshal(data, v) +} + +func (c *cborCodec) Name() string { + return cborCodecName +} + +func init() { + encoding.RegisterCodec(&cborCodec{}) +} diff --git a/net/dialer.go b/net/dialer.go index 0202da8d9d..08a22ffc5e 100644 --- a/net/dialer.go +++ b/net/dialer.go @@ -23,11 +23,10 @@ import ( "github.com/sourcenetwork/defradb/errors" corenet "github.com/sourcenetwork/defradb/internal/core/net" - pb "github.com/sourcenetwork/defradb/net/pb" ) // dial attempts to open a gRPC connection over libp2p to a peer. -func (s *server) dial(peerID libpeer.ID) (pb.ServiceClient, error) { +func (s *server) dial(peerID libpeer.ID) (*grpc.ClientConn, error) { s.mu.Lock() defer s.mu.Unlock() conn, ok := s.conns[peerID] @@ -37,7 +36,7 @@ func (s *server) dial(peerID libpeer.ID) (pb.ServiceClient, error) { return nil, err } } else { - return pb.NewServiceClient(conn), nil + return conn, nil } } // We need the "passthrough:" in the beginning of the target, @@ -54,7 +53,7 @@ func (s *server) dial(peerID libpeer.ID) (pb.ServiceClient, error) { return nil, err } s.conns[peerID] = conn - return pb.NewServiceClient(conn), nil + return conn, nil } // getLibp2pDialer returns a WithContextDialer option for libp2p dialing. diff --git a/net/grpc.go b/net/grpc.go new file mode 100644 index 0000000000..8e526de102 --- /dev/null +++ b/net/grpc.go @@ -0,0 +1,105 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package net + +import ( + "context" + + "google.golang.org/grpc" +) + +const ( + grpcServiceName = "defradb.net.Service" + + serviceGetDocGraphName = "/" + grpcServiceName + "/GetDocGraph" + servicePushDocGraphName = "/" + grpcServiceName + "/PushDocGraph" + serviceGetLogName = "/" + grpcServiceName + "/GetLog" + servicePushLogName = "/" + grpcServiceName + "/PushLog" + serviceGetHeadLogName = "/" + grpcServiceName + "/GetHeadLog" +) + +type getDocGraphRequest struct{} + +type getDocGraphReply struct{} + +type getHeadLogRequest struct{} + +type getHeadLogReply struct{} + +type getLogRequest struct{} + +type getLogReply struct{} + +type pushDocGraphRequest struct{} + +type pushDocGraphReply struct{} + +type pushLogRequest struct { + DocID string + CID []byte + SchemaRoot string + Creator string + Block []byte +} + +type pushLogReply struct{} + +type serviceServer interface { + // GetDocGraph from this peer. + GetDocGraph(context.Context, *getDocGraphRequest) (*getDocGraphReply, error) + // PushDocGraph to this peer. + PushDocGraph(context.Context, *pushDocGraphRequest) (*pushDocGraphReply, error) + // GetLog from this peer. + GetLog(context.Context, *getLogRequest) (*getLogReply, error) + // PushLog to this peer. + PushLog(context.Context, *pushLogRequest) (*pushLogReply, error) + // GetHeadLog from this peer + GetHeadLog(context.Context, *getHeadLogRequest) (*getHeadLogReply, error) +} + +func pushLogHandler( + srv any, + ctx context.Context, + dec func(any) error, + interceptor grpc.UnaryServerInterceptor, +) (any, error) { + in := new(pushLogRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(serviceServer).PushLog(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: servicePushLogName, + } + handler := func(ctx context.Context, req any) (any, error) { + return srv.(serviceServer).PushLog(ctx, req.(*pushLogRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func registerServiceServer(s grpc.ServiceRegistrar, srv serviceServer) { + desc := &grpc.ServiceDesc{ + ServiceName: grpcServiceName, + HandlerType: (*serviceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "PushLog", + Handler: pushLogHandler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "defradb.cbor", + } + s.RegisterService(desc, srv) +} diff --git a/net/pb/Makefile b/net/pb/Makefile deleted file mode 100644 index 30b0e92dfa..0000000000 --- a/net/pb/Makefile +++ /dev/null @@ -1,28 +0,0 @@ -PB = $(wildcard *.proto) -GO = $(PB:.proto=.pb.go) - -PROTOC_GEN_GO := $(shell which protoc-gen-go) -PROTOC_GEN_GO_GRPC := $(shell which protoc-gen-go-grpc) -PROTOC_GEN_GO_VTPROTO := $(shell which protoc-gen-go-vtproto) - -all: $(GO) - -.PHONY: deps -deps: - go install google.golang.org/protobuf/cmd/protoc-gen-go@latest - go install google.golang.org/grpc/cmd/protoc-gen-go-grpc@latest - go install github.com/planetscale/vtprotobuf/cmd/protoc-gen-go-vtproto@latest - -%.pb.go: %.proto - protoc \ - -I. \ - --go_out=. --plugin protoc-gen-go="$(PROTOC_GEN_GO)" \ - --go-grpc_out=. --plugin protoc-gen-go-grpc="$(PROTOC_GEN_GO_GRPC)" \ - --go-vtproto_out=. --plugin protoc-gen-go-vtproto="$(PROTOC_GEN_GO_VTPROTO)" \ - --go-vtproto_opt=features=marshal+unmarshal+size \ - $< # This line specifies the input file - -.PHONY: clean -clean: - rm -f *.pb.go - rm -f *pb_test.go diff --git a/net/pb/net.pb.go b/net/pb/net.pb.go deleted file mode 100644 index dbac6829d0..0000000000 --- a/net/pb/net.pb.go +++ /dev/null @@ -1,983 +0,0 @@ -// Code generated by protoc-gen-go. DO NOT EDIT. -// versions: -// protoc-gen-go v1.34.2 -// protoc v5.27.1 -// source: net.proto - -package net_pb - -import ( - protoreflect "google.golang.org/protobuf/reflect/protoreflect" - protoimpl "google.golang.org/protobuf/runtime/protoimpl" - reflect "reflect" - sync "sync" -) - -const ( - // Verify that this generated code is sufficiently up-to-date. - _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) - // Verify that runtime/protoimpl is sufficiently up-to-date. - _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) -) - -// Log represents a thread log. -type Log struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // block is the top-level node's raw data as an ipld.Block. - Block []byte `protobuf:"bytes,1,opt,name=block,proto3" json:"block,omitempty"` -} - -func (x *Log) Reset() { - *x = Log{} - if protoimpl.UnsafeEnabled { - mi := &file_net_proto_msgTypes[0] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *Log) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*Log) ProtoMessage() {} - -func (x *Log) ProtoReflect() protoreflect.Message { - mi := &file_net_proto_msgTypes[0] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use Log.ProtoReflect.Descriptor instead. -func (*Log) Descriptor() ([]byte, []int) { - return file_net_proto_rawDescGZIP(), []int{0} -} - -func (x *Log) GetBlock() []byte { - if x != nil { - return x.Block - } - return nil -} - -type GetDocGraphRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields -} - -func (x *GetDocGraphRequest) Reset() { - *x = GetDocGraphRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_net_proto_msgTypes[1] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *GetDocGraphRequest) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*GetDocGraphRequest) ProtoMessage() {} - -func (x *GetDocGraphRequest) ProtoReflect() protoreflect.Message { - mi := &file_net_proto_msgTypes[1] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use GetDocGraphRequest.ProtoReflect.Descriptor instead. -func (*GetDocGraphRequest) Descriptor() ([]byte, []int) { - return file_net_proto_rawDescGZIP(), []int{1} -} - -type GetDocGraphReply struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields -} - -func (x *GetDocGraphReply) Reset() { - *x = GetDocGraphReply{} - if protoimpl.UnsafeEnabled { - mi := &file_net_proto_msgTypes[2] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *GetDocGraphReply) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*GetDocGraphReply) ProtoMessage() {} - -func (x *GetDocGraphReply) ProtoReflect() protoreflect.Message { - mi := &file_net_proto_msgTypes[2] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use GetDocGraphReply.ProtoReflect.Descriptor instead. -func (*GetDocGraphReply) Descriptor() ([]byte, []int) { - return file_net_proto_rawDescGZIP(), []int{2} -} - -type PushDocGraphRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields -} - -func (x *PushDocGraphRequest) Reset() { - *x = PushDocGraphRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_net_proto_msgTypes[3] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *PushDocGraphRequest) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*PushDocGraphRequest) ProtoMessage() {} - -func (x *PushDocGraphRequest) ProtoReflect() protoreflect.Message { - mi := &file_net_proto_msgTypes[3] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use PushDocGraphRequest.ProtoReflect.Descriptor instead. -func (*PushDocGraphRequest) Descriptor() ([]byte, []int) { - return file_net_proto_rawDescGZIP(), []int{3} -} - -type PushDocGraphReply struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields -} - -func (x *PushDocGraphReply) Reset() { - *x = PushDocGraphReply{} - if protoimpl.UnsafeEnabled { - mi := &file_net_proto_msgTypes[4] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *PushDocGraphReply) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*PushDocGraphReply) ProtoMessage() {} - -func (x *PushDocGraphReply) ProtoReflect() protoreflect.Message { - mi := &file_net_proto_msgTypes[4] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use PushDocGraphReply.ProtoReflect.Descriptor instead. -func (*PushDocGraphReply) Descriptor() ([]byte, []int) { - return file_net_proto_rawDescGZIP(), []int{4} -} - -type GetLogRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields -} - -func (x *GetLogRequest) Reset() { - *x = GetLogRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_net_proto_msgTypes[5] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *GetLogRequest) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*GetLogRequest) ProtoMessage() {} - -func (x *GetLogRequest) ProtoReflect() protoreflect.Message { - mi := &file_net_proto_msgTypes[5] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use GetLogRequest.ProtoReflect.Descriptor instead. -func (*GetLogRequest) Descriptor() ([]byte, []int) { - return file_net_proto_rawDescGZIP(), []int{5} -} - -type GetLogReply struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields -} - -func (x *GetLogReply) Reset() { - *x = GetLogReply{} - if protoimpl.UnsafeEnabled { - mi := &file_net_proto_msgTypes[6] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *GetLogReply) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*GetLogReply) ProtoMessage() {} - -func (x *GetLogReply) ProtoReflect() protoreflect.Message { - mi := &file_net_proto_msgTypes[6] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use GetLogReply.ProtoReflect.Descriptor instead. -func (*GetLogReply) Descriptor() ([]byte, []int) { - return file_net_proto_rawDescGZIP(), []int{6} -} - -type PushLogRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Body *PushLogRequest_Body `protobuf:"bytes,1,opt,name=body,proto3" json:"body,omitempty"` -} - -func (x *PushLogRequest) Reset() { - *x = PushLogRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_net_proto_msgTypes[7] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *PushLogRequest) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*PushLogRequest) ProtoMessage() {} - -func (x *PushLogRequest) ProtoReflect() protoreflect.Message { - mi := &file_net_proto_msgTypes[7] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use PushLogRequest.ProtoReflect.Descriptor instead. -func (*PushLogRequest) Descriptor() ([]byte, []int) { - return file_net_proto_rawDescGZIP(), []int{7} -} - -func (x *PushLogRequest) GetBody() *PushLogRequest_Body { - if x != nil { - return x.Body - } - return nil -} - -// FetchEncryptionKeyRequest is a request to receive a doc encryption key -// from a peer that holds it. -type FetchEncryptionKeyRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // links is the list of cid links of the blocks containing encryption keys. - Links [][]byte `protobuf:"bytes,1,rep,name=links,proto3" json:"links,omitempty"` - // ephemeralPublicKey is an ephemeral public of the requesting peer for deriving shared secret - EphemeralPublicKey []byte `protobuf:"bytes,2,opt,name=ephemeralPublicKey,proto3" json:"ephemeralPublicKey,omitempty"` -} - -func (x *FetchEncryptionKeyRequest) Reset() { - *x = FetchEncryptionKeyRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_net_proto_msgTypes[8] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *FetchEncryptionKeyRequest) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*FetchEncryptionKeyRequest) ProtoMessage() {} - -func (x *FetchEncryptionKeyRequest) ProtoReflect() protoreflect.Message { - mi := &file_net_proto_msgTypes[8] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use FetchEncryptionKeyRequest.ProtoReflect.Descriptor instead. -func (*FetchEncryptionKeyRequest) Descriptor() ([]byte, []int) { - return file_net_proto_rawDescGZIP(), []int{8} -} - -func (x *FetchEncryptionKeyRequest) GetLinks() [][]byte { - if x != nil { - return x.Links - } - return nil -} - -func (x *FetchEncryptionKeyRequest) GetEphemeralPublicKey() []byte { - if x != nil { - return x.EphemeralPublicKey - } - return nil -} - -// FetchEncryptionKeyReply is a response to FetchEncryptionKeyRequest request -// by a peer that holds the requested doc encryption key. -type FetchEncryptionKeyReply struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // links is the list of cid links of the blocks containing encryption keys. - Links [][]byte `protobuf:"bytes,1,rep,name=links,proto3" json:"links,omitempty"` - // blocks is the list of blocks containing encryption keys. The order of blocks should match the order of links. - // Every block is encrypted and contains a nonce. - Blocks [][]byte `protobuf:"bytes,2,rep,name=blocks,proto3" json:"blocks,omitempty"` - // ephemeralPublicKey is an ephemeral public of the responding peer for deriving shared secret - EphemeralPublicKey []byte `protobuf:"bytes,3,opt,name=ephemeralPublicKey,proto3" json:"ephemeralPublicKey,omitempty"` -} - -func (x *FetchEncryptionKeyReply) Reset() { - *x = FetchEncryptionKeyReply{} - if protoimpl.UnsafeEnabled { - mi := &file_net_proto_msgTypes[9] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *FetchEncryptionKeyReply) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*FetchEncryptionKeyReply) ProtoMessage() {} - -func (x *FetchEncryptionKeyReply) ProtoReflect() protoreflect.Message { - mi := &file_net_proto_msgTypes[9] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use FetchEncryptionKeyReply.ProtoReflect.Descriptor instead. -func (*FetchEncryptionKeyReply) Descriptor() ([]byte, []int) { - return file_net_proto_rawDescGZIP(), []int{9} -} - -func (x *FetchEncryptionKeyReply) GetLinks() [][]byte { - if x != nil { - return x.Links - } - return nil -} - -func (x *FetchEncryptionKeyReply) GetBlocks() [][]byte { - if x != nil { - return x.Blocks - } - return nil -} - -func (x *FetchEncryptionKeyReply) GetEphemeralPublicKey() []byte { - if x != nil { - return x.EphemeralPublicKey - } - return nil -} - -type GetHeadLogRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields -} - -func (x *GetHeadLogRequest) Reset() { - *x = GetHeadLogRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_net_proto_msgTypes[10] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *GetHeadLogRequest) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*GetHeadLogRequest) ProtoMessage() {} - -func (x *GetHeadLogRequest) ProtoReflect() protoreflect.Message { - mi := &file_net_proto_msgTypes[10] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use GetHeadLogRequest.ProtoReflect.Descriptor instead. -func (*GetHeadLogRequest) Descriptor() ([]byte, []int) { - return file_net_proto_rawDescGZIP(), []int{10} -} - -type PushLogReply struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields -} - -func (x *PushLogReply) Reset() { - *x = PushLogReply{} - if protoimpl.UnsafeEnabled { - mi := &file_net_proto_msgTypes[11] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *PushLogReply) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*PushLogReply) ProtoMessage() {} - -func (x *PushLogReply) ProtoReflect() protoreflect.Message { - mi := &file_net_proto_msgTypes[11] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use PushLogReply.ProtoReflect.Descriptor instead. -func (*PushLogReply) Descriptor() ([]byte, []int) { - return file_net_proto_rawDescGZIP(), []int{11} -} - -type GetHeadLogReply struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields -} - -func (x *GetHeadLogReply) Reset() { - *x = GetHeadLogReply{} - if protoimpl.UnsafeEnabled { - mi := &file_net_proto_msgTypes[12] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *GetHeadLogReply) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*GetHeadLogReply) ProtoMessage() {} - -func (x *GetHeadLogReply) ProtoReflect() protoreflect.Message { - mi := &file_net_proto_msgTypes[12] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use GetHeadLogReply.ProtoReflect.Descriptor instead. -func (*GetHeadLogReply) Descriptor() ([]byte, []int) { - return file_net_proto_rawDescGZIP(), []int{12} -} - -type PushLogRequest_Body struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // docID is the ID of the document that is affected by the log. - DocID []byte `protobuf:"bytes,1,opt,name=docID,proto3" json:"docID,omitempty"` - // cid is the CID of the composite of the document. - Cid []byte `protobuf:"bytes,2,opt,name=cid,proto3" json:"cid,omitempty"` - // schemaRoot is the SchemaRoot of the collection that the document resides in. - SchemaRoot []byte `protobuf:"bytes,3,opt,name=schemaRoot,proto3" json:"schemaRoot,omitempty"` - // creator is the PeerID of the peer that created the log. - Creator string `protobuf:"bytes,4,opt,name=creator,proto3" json:"creator,omitempty"` - // log hold the block that represent version of the document. - Log *Log `protobuf:"bytes,6,opt,name=log,proto3" json:"log,omitempty"` -} - -func (x *PushLogRequest_Body) Reset() { - *x = PushLogRequest_Body{} - if protoimpl.UnsafeEnabled { - mi := &file_net_proto_msgTypes[13] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *PushLogRequest_Body) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*PushLogRequest_Body) ProtoMessage() {} - -func (x *PushLogRequest_Body) ProtoReflect() protoreflect.Message { - mi := &file_net_proto_msgTypes[13] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use PushLogRequest_Body.ProtoReflect.Descriptor instead. -func (*PushLogRequest_Body) Descriptor() ([]byte, []int) { - return file_net_proto_rawDescGZIP(), []int{7, 0} -} - -func (x *PushLogRequest_Body) GetDocID() []byte { - if x != nil { - return x.DocID - } - return nil -} - -func (x *PushLogRequest_Body) GetCid() []byte { - if x != nil { - return x.Cid - } - return nil -} - -func (x *PushLogRequest_Body) GetSchemaRoot() []byte { - if x != nil { - return x.SchemaRoot - } - return nil -} - -func (x *PushLogRequest_Body) GetCreator() string { - if x != nil { - return x.Creator - } - return "" -} - -func (x *PushLogRequest_Body) GetLog() *Log { - if x != nil { - return x.Log - } - return nil -} - -var File_net_proto protoreflect.FileDescriptor - -var file_net_proto_rawDesc = []byte{ - 0x0a, 0x09, 0x6e, 0x65, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x06, 0x6e, 0x65, 0x74, - 0x2e, 0x70, 0x62, 0x22, 0x1b, 0x0a, 0x03, 0x4c, 0x6f, 0x67, 0x12, 0x14, 0x0a, 0x05, 0x62, 0x6c, - 0x6f, 0x63, 0x6b, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x62, 0x6c, 0x6f, 0x63, 0x6b, - 0x22, 0x14, 0x0a, 0x12, 0x47, 0x65, 0x74, 0x44, 0x6f, 0x63, 0x47, 0x72, 0x61, 0x70, 0x68, 0x52, - 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x22, 0x12, 0x0a, 0x10, 0x47, 0x65, 0x74, 0x44, 0x6f, 0x63, - 0x47, 0x72, 0x61, 0x70, 0x68, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x15, 0x0a, 0x13, 0x50, 0x75, - 0x73, 0x68, 0x44, 0x6f, 0x63, 0x47, 0x72, 0x61, 0x70, 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, - 0x74, 0x22, 0x13, 0x0a, 0x11, 0x50, 0x75, 0x73, 0x68, 0x44, 0x6f, 0x63, 0x47, 0x72, 0x61, 0x70, - 0x68, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x0f, 0x0a, 0x0d, 0x47, 0x65, 0x74, 0x4c, 0x6f, 0x67, - 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x22, 0x0d, 0x0a, 0x0b, 0x47, 0x65, 0x74, 0x4c, 0x6f, - 0x67, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0xcb, 0x01, 0x0a, 0x0e, 0x50, 0x75, 0x73, 0x68, 0x4c, - 0x6f, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x2f, 0x0a, 0x04, 0x62, 0x6f, 0x64, - 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x6e, 0x65, 0x74, 0x2e, 0x70, 0x62, - 0x2e, 0x50, 0x75, 0x73, 0x68, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, - 0x42, 0x6f, 0x64, 0x79, 0x52, 0x04, 0x62, 0x6f, 0x64, 0x79, 0x1a, 0x87, 0x01, 0x0a, 0x04, 0x42, - 0x6f, 0x64, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x64, 0x6f, 0x63, 0x49, 0x44, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x0c, 0x52, 0x05, 0x64, 0x6f, 0x63, 0x49, 0x44, 0x12, 0x10, 0x0a, 0x03, 0x63, 0x69, 0x64, - 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x03, 0x63, 0x69, 0x64, 0x12, 0x1e, 0x0a, 0x0a, 0x73, - 0x63, 0x68, 0x65, 0x6d, 0x61, 0x52, 0x6f, 0x6f, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x52, - 0x0a, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x52, 0x6f, 0x6f, 0x74, 0x12, 0x18, 0x0a, 0x07, 0x63, - 0x72, 0x65, 0x61, 0x74, 0x6f, 0x72, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x63, 0x72, - 0x65, 0x61, 0x74, 0x6f, 0x72, 0x12, 0x1d, 0x0a, 0x03, 0x6c, 0x6f, 0x67, 0x18, 0x06, 0x20, 0x01, - 0x28, 0x0b, 0x32, 0x0b, 0x2e, 0x6e, 0x65, 0x74, 0x2e, 0x70, 0x62, 0x2e, 0x4c, 0x6f, 0x67, 0x52, - 0x03, 0x6c, 0x6f, 0x67, 0x22, 0x61, 0x0a, 0x19, 0x46, 0x65, 0x74, 0x63, 0x68, 0x45, 0x6e, 0x63, - 0x72, 0x79, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x4b, 0x65, 0x79, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, - 0x74, 0x12, 0x14, 0x0a, 0x05, 0x6c, 0x69, 0x6e, 0x6b, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0c, - 0x52, 0x05, 0x6c, 0x69, 0x6e, 0x6b, 0x73, 0x12, 0x2e, 0x0a, 0x12, 0x65, 0x70, 0x68, 0x65, 0x6d, - 0x65, 0x72, 0x61, 0x6c, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x4b, 0x65, 0x79, 0x18, 0x02, 0x20, - 0x01, 0x28, 0x0c, 0x52, 0x12, 0x65, 0x70, 0x68, 0x65, 0x6d, 0x65, 0x72, 0x61, 0x6c, 0x50, 0x75, - 0x62, 0x6c, 0x69, 0x63, 0x4b, 0x65, 0x79, 0x22, 0x77, 0x0a, 0x17, 0x46, 0x65, 0x74, 0x63, 0x68, - 0x45, 0x6e, 0x63, 0x72, 0x79, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x4b, 0x65, 0x79, 0x52, 0x65, 0x70, - 0x6c, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x6c, 0x69, 0x6e, 0x6b, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, - 0x0c, 0x52, 0x05, 0x6c, 0x69, 0x6e, 0x6b, 0x73, 0x12, 0x16, 0x0a, 0x06, 0x62, 0x6c, 0x6f, 0x63, - 0x6b, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0c, 0x52, 0x06, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x73, - 0x12, 0x2e, 0x0a, 0x12, 0x65, 0x70, 0x68, 0x65, 0x6d, 0x65, 0x72, 0x61, 0x6c, 0x50, 0x75, 0x62, - 0x6c, 0x69, 0x63, 0x4b, 0x65, 0x79, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x12, 0x65, 0x70, - 0x68, 0x65, 0x6d, 0x65, 0x72, 0x61, 0x6c, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x4b, 0x65, 0x79, - 0x22, 0x13, 0x0a, 0x11, 0x47, 0x65, 0x74, 0x48, 0x65, 0x61, 0x64, 0x4c, 0x6f, 0x67, 0x52, 0x65, - 0x71, 0x75, 0x65, 0x73, 0x74, 0x22, 0x0e, 0x0a, 0x0c, 0x50, 0x75, 0x73, 0x68, 0x4c, 0x6f, 0x67, - 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x11, 0x0a, 0x0f, 0x47, 0x65, 0x74, 0x48, 0x65, 0x61, 0x64, - 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x32, 0xd1, 0x02, 0x0a, 0x07, 0x53, 0x65, 0x72, - 0x76, 0x69, 0x63, 0x65, 0x12, 0x45, 0x0a, 0x0b, 0x47, 0x65, 0x74, 0x44, 0x6f, 0x63, 0x47, 0x72, - 0x61, 0x70, 0x68, 0x12, 0x1a, 0x2e, 0x6e, 0x65, 0x74, 0x2e, 0x70, 0x62, 0x2e, 0x47, 0x65, 0x74, - 0x44, 0x6f, 0x63, 0x47, 0x72, 0x61, 0x70, 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, - 0x18, 0x2e, 0x6e, 0x65, 0x74, 0x2e, 0x70, 0x62, 0x2e, 0x47, 0x65, 0x74, 0x44, 0x6f, 0x63, 0x47, - 0x72, 0x61, 0x70, 0x68, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x12, 0x48, 0x0a, 0x0c, 0x50, - 0x75, 0x73, 0x68, 0x44, 0x6f, 0x63, 0x47, 0x72, 0x61, 0x70, 0x68, 0x12, 0x1b, 0x2e, 0x6e, 0x65, - 0x74, 0x2e, 0x70, 0x62, 0x2e, 0x50, 0x75, 0x73, 0x68, 0x44, 0x6f, 0x63, 0x47, 0x72, 0x61, 0x70, - 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x19, 0x2e, 0x6e, 0x65, 0x74, 0x2e, 0x70, - 0x62, 0x2e, 0x50, 0x75, 0x73, 0x68, 0x44, 0x6f, 0x63, 0x47, 0x72, 0x61, 0x70, 0x68, 0x52, 0x65, - 0x70, 0x6c, 0x79, 0x22, 0x00, 0x12, 0x36, 0x0a, 0x06, 0x47, 0x65, 0x74, 0x4c, 0x6f, 0x67, 0x12, - 0x15, 0x2e, 0x6e, 0x65, 0x74, 0x2e, 0x70, 0x62, 0x2e, 0x47, 0x65, 0x74, 0x4c, 0x6f, 0x67, 0x52, - 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x13, 0x2e, 0x6e, 0x65, 0x74, 0x2e, 0x70, 0x62, 0x2e, - 0x47, 0x65, 0x74, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x12, 0x39, 0x0a, - 0x07, 0x50, 0x75, 0x73, 0x68, 0x4c, 0x6f, 0x67, 0x12, 0x16, 0x2e, 0x6e, 0x65, 0x74, 0x2e, 0x70, - 0x62, 0x2e, 0x50, 0x75, 0x73, 0x68, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, - 0x1a, 0x14, 0x2e, 0x6e, 0x65, 0x74, 0x2e, 0x70, 0x62, 0x2e, 0x50, 0x75, 0x73, 0x68, 0x4c, 0x6f, - 0x67, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x12, 0x42, 0x0a, 0x0a, 0x47, 0x65, 0x74, 0x48, - 0x65, 0x61, 0x64, 0x4c, 0x6f, 0x67, 0x12, 0x19, 0x2e, 0x6e, 0x65, 0x74, 0x2e, 0x70, 0x62, 0x2e, - 0x47, 0x65, 0x74, 0x48, 0x65, 0x61, 0x64, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, - 0x74, 0x1a, 0x17, 0x2e, 0x6e, 0x65, 0x74, 0x2e, 0x70, 0x62, 0x2e, 0x47, 0x65, 0x74, 0x48, 0x65, - 0x61, 0x64, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x42, 0x0a, 0x5a, 0x08, - 0x2f, 0x3b, 0x6e, 0x65, 0x74, 0x5f, 0x70, 0x62, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, -} - -var ( - file_net_proto_rawDescOnce sync.Once - file_net_proto_rawDescData = file_net_proto_rawDesc -) - -func file_net_proto_rawDescGZIP() []byte { - file_net_proto_rawDescOnce.Do(func() { - file_net_proto_rawDescData = protoimpl.X.CompressGZIP(file_net_proto_rawDescData) - }) - return file_net_proto_rawDescData -} - -var file_net_proto_msgTypes = make([]protoimpl.MessageInfo, 14) -var file_net_proto_goTypes = []any{ - (*Log)(nil), // 0: net.pb.Log - (*GetDocGraphRequest)(nil), // 1: net.pb.GetDocGraphRequest - (*GetDocGraphReply)(nil), // 2: net.pb.GetDocGraphReply - (*PushDocGraphRequest)(nil), // 3: net.pb.PushDocGraphRequest - (*PushDocGraphReply)(nil), // 4: net.pb.PushDocGraphReply - (*GetLogRequest)(nil), // 5: net.pb.GetLogRequest - (*GetLogReply)(nil), // 6: net.pb.GetLogReply - (*PushLogRequest)(nil), // 7: net.pb.PushLogRequest - (*FetchEncryptionKeyRequest)(nil), // 8: net.pb.FetchEncryptionKeyRequest - (*FetchEncryptionKeyReply)(nil), // 9: net.pb.FetchEncryptionKeyReply - (*GetHeadLogRequest)(nil), // 10: net.pb.GetHeadLogRequest - (*PushLogReply)(nil), // 11: net.pb.PushLogReply - (*GetHeadLogReply)(nil), // 12: net.pb.GetHeadLogReply - (*PushLogRequest_Body)(nil), // 13: net.pb.PushLogRequest.Body -} -var file_net_proto_depIdxs = []int32{ - 13, // 0: net.pb.PushLogRequest.body:type_name -> net.pb.PushLogRequest.Body - 0, // 1: net.pb.PushLogRequest.Body.log:type_name -> net.pb.Log - 1, // 2: net.pb.Service.GetDocGraph:input_type -> net.pb.GetDocGraphRequest - 3, // 3: net.pb.Service.PushDocGraph:input_type -> net.pb.PushDocGraphRequest - 5, // 4: net.pb.Service.GetLog:input_type -> net.pb.GetLogRequest - 7, // 5: net.pb.Service.PushLog:input_type -> net.pb.PushLogRequest - 10, // 6: net.pb.Service.GetHeadLog:input_type -> net.pb.GetHeadLogRequest - 2, // 7: net.pb.Service.GetDocGraph:output_type -> net.pb.GetDocGraphReply - 4, // 8: net.pb.Service.PushDocGraph:output_type -> net.pb.PushDocGraphReply - 6, // 9: net.pb.Service.GetLog:output_type -> net.pb.GetLogReply - 11, // 10: net.pb.Service.PushLog:output_type -> net.pb.PushLogReply - 12, // 11: net.pb.Service.GetHeadLog:output_type -> net.pb.GetHeadLogReply - 7, // [7:12] is the sub-list for method output_type - 2, // [2:7] is the sub-list for method input_type - 2, // [2:2] is the sub-list for extension type_name - 2, // [2:2] is the sub-list for extension extendee - 0, // [0:2] is the sub-list for field type_name -} - -func init() { file_net_proto_init() } -func file_net_proto_init() { - if File_net_proto != nil { - return - } - if !protoimpl.UnsafeEnabled { - file_net_proto_msgTypes[0].Exporter = func(v any, i int) any { - switch v := v.(*Log); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_net_proto_msgTypes[1].Exporter = func(v any, i int) any { - switch v := v.(*GetDocGraphRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_net_proto_msgTypes[2].Exporter = func(v any, i int) any { - switch v := v.(*GetDocGraphReply); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_net_proto_msgTypes[3].Exporter = func(v any, i int) any { - switch v := v.(*PushDocGraphRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_net_proto_msgTypes[4].Exporter = func(v any, i int) any { - switch v := v.(*PushDocGraphReply); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_net_proto_msgTypes[5].Exporter = func(v any, i int) any { - switch v := v.(*GetLogRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_net_proto_msgTypes[6].Exporter = func(v any, i int) any { - switch v := v.(*GetLogReply); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_net_proto_msgTypes[7].Exporter = func(v any, i int) any { - switch v := v.(*PushLogRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_net_proto_msgTypes[8].Exporter = func(v any, i int) any { - switch v := v.(*FetchEncryptionKeyRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_net_proto_msgTypes[9].Exporter = func(v any, i int) any { - switch v := v.(*FetchEncryptionKeyReply); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_net_proto_msgTypes[10].Exporter = func(v any, i int) any { - switch v := v.(*GetHeadLogRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_net_proto_msgTypes[11].Exporter = func(v any, i int) any { - switch v := v.(*PushLogReply); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_net_proto_msgTypes[12].Exporter = func(v any, i int) any { - switch v := v.(*GetHeadLogReply); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_net_proto_msgTypes[13].Exporter = func(v any, i int) any { - switch v := v.(*PushLogRequest_Body); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - } - type x struct{} - out := protoimpl.TypeBuilder{ - File: protoimpl.DescBuilder{ - GoPackagePath: reflect.TypeOf(x{}).PkgPath(), - RawDescriptor: file_net_proto_rawDesc, - NumEnums: 0, - NumMessages: 14, - NumExtensions: 0, - NumServices: 1, - }, - GoTypes: file_net_proto_goTypes, - DependencyIndexes: file_net_proto_depIdxs, - MessageInfos: file_net_proto_msgTypes, - }.Build() - File_net_proto = out.File - file_net_proto_rawDesc = nil - file_net_proto_goTypes = nil - file_net_proto_depIdxs = nil -} diff --git a/net/pb/net.proto b/net/pb/net.proto deleted file mode 100644 index 8dc8fe8a46..0000000000 --- a/net/pb/net.proto +++ /dev/null @@ -1,80 +0,0 @@ -syntax = "proto3"; -package net.pb; - -option go_package = "/;net_pb"; - -// Log represents a thread log. -message Log { - // block is the top-level node's raw data as an ipld.Block. - bytes block = 1; -} - -message GetDocGraphRequest {} - -message GetDocGraphReply {} - -message PushDocGraphRequest {} - -message PushDocGraphReply {} - -message GetLogRequest {} - -message GetLogReply {} - -message PushLogRequest { - Body body = 1; - - message Body { - // docID is the ID of the document that is affected by the log. - bytes docID = 1; - // cid is the CID of the composite of the document. - bytes cid = 2; - // schemaRoot is the SchemaRoot of the collection that the document resides in. - bytes schemaRoot = 3; - // creator is the PeerID of the peer that created the log. - string creator = 4; - // log hold the block that represent version of the document. - Log log = 6; - } -} - -// FetchEncryptionKeyRequest is a request to receive a doc encryption key -// from a peer that holds it. -message FetchEncryptionKeyRequest { - // links is the list of cid links of the blocks containing encryption keys. - repeated bytes links = 1; - // ephemeralPublicKey is an ephemeral public of the requesting peer for deriving shared secret - bytes ephemeralPublicKey = 2; -} - -// FetchEncryptionKeyReply is a response to FetchEncryptionKeyRequest request -// by a peer that holds the requested doc encryption key. -message FetchEncryptionKeyReply { - // links is the list of cid links of the blocks containing encryption keys. - repeated bytes links = 1; - // blocks is the list of blocks containing encryption keys. The order of blocks should match the order of links. - // Every block is encrypted and contains a nonce. - repeated bytes blocks = 2; - // ephemeralPublicKey is an ephemeral public of the responding peer for deriving shared secret - bytes ephemeralPublicKey = 3; -} - -message GetHeadLogRequest {} - -message PushLogReply {} - -message GetHeadLogReply {} - -// Service is the peer-to-peer network API for document sync -service Service { - // GetDocGraph from this peer. - rpc GetDocGraph(GetDocGraphRequest) returns (GetDocGraphReply) {} - // PushDocGraph to this peer. - rpc PushDocGraph(PushDocGraphRequest) returns (PushDocGraphReply) {} - // GetLog from this peer. - rpc GetLog(GetLogRequest) returns (GetLogReply) {} - // PushLog to this peer. - rpc PushLog(PushLogRequest) returns (PushLogReply) {} - // GetHeadLog from this peer - rpc GetHeadLog(GetHeadLogRequest) returns (GetHeadLogReply) {} -} diff --git a/net/pb/net_grpc.pb.go b/net/pb/net_grpc.pb.go deleted file mode 100644 index 84564d6bec..0000000000 --- a/net/pb/net_grpc.pb.go +++ /dev/null @@ -1,276 +0,0 @@ -// Code generated by protoc-gen-go-grpc. DO NOT EDIT. -// versions: -// - protoc-gen-go-grpc v1.4.0 -// - protoc v5.27.1 -// source: net.proto - -package net_pb - -import ( - context "context" - grpc "google.golang.org/grpc" - codes "google.golang.org/grpc/codes" - status "google.golang.org/grpc/status" -) - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the grpc package it is being compiled against. -// Requires gRPC-Go v1.62.0 or later. -const _ = grpc.SupportPackageIsVersion8 - -const ( - Service_GetDocGraph_FullMethodName = "/net.pb.Service/GetDocGraph" - Service_PushDocGraph_FullMethodName = "/net.pb.Service/PushDocGraph" - Service_GetLog_FullMethodName = "/net.pb.Service/GetLog" - Service_PushLog_FullMethodName = "/net.pb.Service/PushLog" - Service_GetHeadLog_FullMethodName = "/net.pb.Service/GetHeadLog" -) - -// ServiceClient is the client API for Service service. -// -// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. -// -// Service is the peer-to-peer network API for document sync -type ServiceClient interface { - // GetDocGraph from this peer. - GetDocGraph(ctx context.Context, in *GetDocGraphRequest, opts ...grpc.CallOption) (*GetDocGraphReply, error) - // PushDocGraph to this peer. - PushDocGraph(ctx context.Context, in *PushDocGraphRequest, opts ...grpc.CallOption) (*PushDocGraphReply, error) - // GetLog from this peer. - GetLog(ctx context.Context, in *GetLogRequest, opts ...grpc.CallOption) (*GetLogReply, error) - // PushLog to this peer. - PushLog(ctx context.Context, in *PushLogRequest, opts ...grpc.CallOption) (*PushLogReply, error) - // GetHeadLog from this peer - GetHeadLog(ctx context.Context, in *GetHeadLogRequest, opts ...grpc.CallOption) (*GetHeadLogReply, error) -} - -type serviceClient struct { - cc grpc.ClientConnInterface -} - -func NewServiceClient(cc grpc.ClientConnInterface) ServiceClient { - return &serviceClient{cc} -} - -func (c *serviceClient) GetDocGraph(ctx context.Context, in *GetDocGraphRequest, opts ...grpc.CallOption) (*GetDocGraphReply, error) { - cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) - out := new(GetDocGraphReply) - err := c.cc.Invoke(ctx, Service_GetDocGraph_FullMethodName, in, out, cOpts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *serviceClient) PushDocGraph(ctx context.Context, in *PushDocGraphRequest, opts ...grpc.CallOption) (*PushDocGraphReply, error) { - cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) - out := new(PushDocGraphReply) - err := c.cc.Invoke(ctx, Service_PushDocGraph_FullMethodName, in, out, cOpts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *serviceClient) GetLog(ctx context.Context, in *GetLogRequest, opts ...grpc.CallOption) (*GetLogReply, error) { - cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) - out := new(GetLogReply) - err := c.cc.Invoke(ctx, Service_GetLog_FullMethodName, in, out, cOpts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *serviceClient) PushLog(ctx context.Context, in *PushLogRequest, opts ...grpc.CallOption) (*PushLogReply, error) { - cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) - out := new(PushLogReply) - err := c.cc.Invoke(ctx, Service_PushLog_FullMethodName, in, out, cOpts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *serviceClient) GetHeadLog(ctx context.Context, in *GetHeadLogRequest, opts ...grpc.CallOption) (*GetHeadLogReply, error) { - cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) - out := new(GetHeadLogReply) - err := c.cc.Invoke(ctx, Service_GetHeadLog_FullMethodName, in, out, cOpts...) - if err != nil { - return nil, err - } - return out, nil -} - -// ServiceServer is the server API for Service service. -// All implementations must embed UnimplementedServiceServer -// for forward compatibility -// -// Service is the peer-to-peer network API for document sync -type ServiceServer interface { - // GetDocGraph from this peer. - GetDocGraph(context.Context, *GetDocGraphRequest) (*GetDocGraphReply, error) - // PushDocGraph to this peer. - PushDocGraph(context.Context, *PushDocGraphRequest) (*PushDocGraphReply, error) - // GetLog from this peer. - GetLog(context.Context, *GetLogRequest) (*GetLogReply, error) - // PushLog to this peer. - PushLog(context.Context, *PushLogRequest) (*PushLogReply, error) - // GetHeadLog from this peer - GetHeadLog(context.Context, *GetHeadLogRequest) (*GetHeadLogReply, error) - mustEmbedUnimplementedServiceServer() -} - -// UnimplementedServiceServer must be embedded to have forward compatible implementations. -type UnimplementedServiceServer struct { -} - -func (UnimplementedServiceServer) GetDocGraph(context.Context, *GetDocGraphRequest) (*GetDocGraphReply, error) { - return nil, status.Errorf(codes.Unimplemented, "method GetDocGraph not implemented") -} -func (UnimplementedServiceServer) PushDocGraph(context.Context, *PushDocGraphRequest) (*PushDocGraphReply, error) { - return nil, status.Errorf(codes.Unimplemented, "method PushDocGraph not implemented") -} -func (UnimplementedServiceServer) GetLog(context.Context, *GetLogRequest) (*GetLogReply, error) { - return nil, status.Errorf(codes.Unimplemented, "method GetLog not implemented") -} -func (UnimplementedServiceServer) PushLog(context.Context, *PushLogRequest) (*PushLogReply, error) { - return nil, status.Errorf(codes.Unimplemented, "method PushLog not implemented") -} -func (UnimplementedServiceServer) GetHeadLog(context.Context, *GetHeadLogRequest) (*GetHeadLogReply, error) { - return nil, status.Errorf(codes.Unimplemented, "method GetHeadLog not implemented") -} -func (UnimplementedServiceServer) mustEmbedUnimplementedServiceServer() {} - -// UnsafeServiceServer may be embedded to opt out of forward compatibility for this service. -// Use of this interface is not recommended, as added methods to ServiceServer will -// result in compilation errors. -type UnsafeServiceServer interface { - mustEmbedUnimplementedServiceServer() -} - -func RegisterServiceServer(s grpc.ServiceRegistrar, srv ServiceServer) { - s.RegisterService(&Service_ServiceDesc, srv) -} - -func _Service_GetDocGraph_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(GetDocGraphRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(ServiceServer).GetDocGraph(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: Service_GetDocGraph_FullMethodName, - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(ServiceServer).GetDocGraph(ctx, req.(*GetDocGraphRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _Service_PushDocGraph_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(PushDocGraphRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(ServiceServer).PushDocGraph(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: Service_PushDocGraph_FullMethodName, - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(ServiceServer).PushDocGraph(ctx, req.(*PushDocGraphRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _Service_GetLog_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(GetLogRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(ServiceServer).GetLog(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: Service_GetLog_FullMethodName, - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(ServiceServer).GetLog(ctx, req.(*GetLogRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _Service_PushLog_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(PushLogRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(ServiceServer).PushLog(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: Service_PushLog_FullMethodName, - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(ServiceServer).PushLog(ctx, req.(*PushLogRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _Service_GetHeadLog_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(GetHeadLogRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(ServiceServer).GetHeadLog(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: Service_GetHeadLog_FullMethodName, - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(ServiceServer).GetHeadLog(ctx, req.(*GetHeadLogRequest)) - } - return interceptor(ctx, in, info, handler) -} - -// Service_ServiceDesc is the grpc.ServiceDesc for Service service. -// It's only intended for direct use with grpc.RegisterService, -// and not to be introspected or modified (even as a copy) -var Service_ServiceDesc = grpc.ServiceDesc{ - ServiceName: "net.pb.Service", - HandlerType: (*ServiceServer)(nil), - Methods: []grpc.MethodDesc{ - { - MethodName: "GetDocGraph", - Handler: _Service_GetDocGraph_Handler, - }, - { - MethodName: "PushDocGraph", - Handler: _Service_PushDocGraph_Handler, - }, - { - MethodName: "GetLog", - Handler: _Service_GetLog_Handler, - }, - { - MethodName: "PushLog", - Handler: _Service_PushLog_Handler, - }, - { - MethodName: "GetHeadLog", - Handler: _Service_GetHeadLog_Handler, - }, - }, - Streams: []grpc.StreamDesc{}, - Metadata: "net.proto", -} diff --git a/net/pb/net_vtproto.pb.go b/net/pb/net_vtproto.pb.go deleted file mode 100644 index bf1c93e8e8..0000000000 --- a/net/pb/net_vtproto.pb.go +++ /dev/null @@ -1,1890 +0,0 @@ -// Code generated by protoc-gen-go-vtproto. DO NOT EDIT. -// protoc-gen-go-vtproto version: v0.6.0 -// source: net.proto - -package net_pb - -import ( - fmt "fmt" - protohelpers "github.com/planetscale/vtprotobuf/protohelpers" - protoimpl "google.golang.org/protobuf/runtime/protoimpl" - io "io" -) - -const ( - // Verify that this generated code is sufficiently up-to-date. - _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) - // Verify that runtime/protoimpl is sufficiently up-to-date. - _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) -) - -func (m *Log) MarshalVT() (dAtA []byte, err error) { - if m == nil { - return nil, nil - } - size := m.SizeVT() - dAtA = make([]byte, size) - n, err := m.MarshalToSizedBufferVT(dAtA[:size]) - if err != nil { - return nil, err - } - return dAtA[:n], nil -} - -func (m *Log) MarshalToVT(dAtA []byte) (int, error) { - size := m.SizeVT() - return m.MarshalToSizedBufferVT(dAtA[:size]) -} - -func (m *Log) MarshalToSizedBufferVT(dAtA []byte) (int, error) { - if m == nil { - return 0, nil - } - i := len(dAtA) - _ = i - var l int - _ = l - if m.unknownFields != nil { - i -= len(m.unknownFields) - copy(dAtA[i:], m.unknownFields) - } - if len(m.Block) > 0 { - i -= len(m.Block) - copy(dAtA[i:], m.Block) - i = protohelpers.EncodeVarint(dAtA, i, uint64(len(m.Block))) - i-- - dAtA[i] = 0xa - } - return len(dAtA) - i, nil -} - -func (m *GetDocGraphRequest) MarshalVT() (dAtA []byte, err error) { - if m == nil { - return nil, nil - } - size := m.SizeVT() - dAtA = make([]byte, size) - n, err := m.MarshalToSizedBufferVT(dAtA[:size]) - if err != nil { - return nil, err - } - return dAtA[:n], nil -} - -func (m *GetDocGraphRequest) MarshalToVT(dAtA []byte) (int, error) { - size := m.SizeVT() - return m.MarshalToSizedBufferVT(dAtA[:size]) -} - -func (m *GetDocGraphRequest) MarshalToSizedBufferVT(dAtA []byte) (int, error) { - if m == nil { - return 0, nil - } - i := len(dAtA) - _ = i - var l int - _ = l - if m.unknownFields != nil { - i -= len(m.unknownFields) - copy(dAtA[i:], m.unknownFields) - } - return len(dAtA) - i, nil -} - -func (m *GetDocGraphReply) MarshalVT() (dAtA []byte, err error) { - if m == nil { - return nil, nil - } - size := m.SizeVT() - dAtA = make([]byte, size) - n, err := m.MarshalToSizedBufferVT(dAtA[:size]) - if err != nil { - return nil, err - } - return dAtA[:n], nil -} - -func (m *GetDocGraphReply) MarshalToVT(dAtA []byte) (int, error) { - size := m.SizeVT() - return m.MarshalToSizedBufferVT(dAtA[:size]) -} - -func (m *GetDocGraphReply) MarshalToSizedBufferVT(dAtA []byte) (int, error) { - if m == nil { - return 0, nil - } - i := len(dAtA) - _ = i - var l int - _ = l - if m.unknownFields != nil { - i -= len(m.unknownFields) - copy(dAtA[i:], m.unknownFields) - } - return len(dAtA) - i, nil -} - -func (m *PushDocGraphRequest) MarshalVT() (dAtA []byte, err error) { - if m == nil { - return nil, nil - } - size := m.SizeVT() - dAtA = make([]byte, size) - n, err := m.MarshalToSizedBufferVT(dAtA[:size]) - if err != nil { - return nil, err - } - return dAtA[:n], nil -} - -func (m *PushDocGraphRequest) MarshalToVT(dAtA []byte) (int, error) { - size := m.SizeVT() - return m.MarshalToSizedBufferVT(dAtA[:size]) -} - -func (m *PushDocGraphRequest) MarshalToSizedBufferVT(dAtA []byte) (int, error) { - if m == nil { - return 0, nil - } - i := len(dAtA) - _ = i - var l int - _ = l - if m.unknownFields != nil { - i -= len(m.unknownFields) - copy(dAtA[i:], m.unknownFields) - } - return len(dAtA) - i, nil -} - -func (m *PushDocGraphReply) MarshalVT() (dAtA []byte, err error) { - if m == nil { - return nil, nil - } - size := m.SizeVT() - dAtA = make([]byte, size) - n, err := m.MarshalToSizedBufferVT(dAtA[:size]) - if err != nil { - return nil, err - } - return dAtA[:n], nil -} - -func (m *PushDocGraphReply) MarshalToVT(dAtA []byte) (int, error) { - size := m.SizeVT() - return m.MarshalToSizedBufferVT(dAtA[:size]) -} - -func (m *PushDocGraphReply) MarshalToSizedBufferVT(dAtA []byte) (int, error) { - if m == nil { - return 0, nil - } - i := len(dAtA) - _ = i - var l int - _ = l - if m.unknownFields != nil { - i -= len(m.unknownFields) - copy(dAtA[i:], m.unknownFields) - } - return len(dAtA) - i, nil -} - -func (m *GetLogRequest) MarshalVT() (dAtA []byte, err error) { - if m == nil { - return nil, nil - } - size := m.SizeVT() - dAtA = make([]byte, size) - n, err := m.MarshalToSizedBufferVT(dAtA[:size]) - if err != nil { - return nil, err - } - return dAtA[:n], nil -} - -func (m *GetLogRequest) MarshalToVT(dAtA []byte) (int, error) { - size := m.SizeVT() - return m.MarshalToSizedBufferVT(dAtA[:size]) -} - -func (m *GetLogRequest) MarshalToSizedBufferVT(dAtA []byte) (int, error) { - if m == nil { - return 0, nil - } - i := len(dAtA) - _ = i - var l int - _ = l - if m.unknownFields != nil { - i -= len(m.unknownFields) - copy(dAtA[i:], m.unknownFields) - } - return len(dAtA) - i, nil -} - -func (m *GetLogReply) MarshalVT() (dAtA []byte, err error) { - if m == nil { - return nil, nil - } - size := m.SizeVT() - dAtA = make([]byte, size) - n, err := m.MarshalToSizedBufferVT(dAtA[:size]) - if err != nil { - return nil, err - } - return dAtA[:n], nil -} - -func (m *GetLogReply) MarshalToVT(dAtA []byte) (int, error) { - size := m.SizeVT() - return m.MarshalToSizedBufferVT(dAtA[:size]) -} - -func (m *GetLogReply) MarshalToSizedBufferVT(dAtA []byte) (int, error) { - if m == nil { - return 0, nil - } - i := len(dAtA) - _ = i - var l int - _ = l - if m.unknownFields != nil { - i -= len(m.unknownFields) - copy(dAtA[i:], m.unknownFields) - } - return len(dAtA) - i, nil -} - -func (m *PushLogRequest_Body) MarshalVT() (dAtA []byte, err error) { - if m == nil { - return nil, nil - } - size := m.SizeVT() - dAtA = make([]byte, size) - n, err := m.MarshalToSizedBufferVT(dAtA[:size]) - if err != nil { - return nil, err - } - return dAtA[:n], nil -} - -func (m *PushLogRequest_Body) MarshalToVT(dAtA []byte) (int, error) { - size := m.SizeVT() - return m.MarshalToSizedBufferVT(dAtA[:size]) -} - -func (m *PushLogRequest_Body) MarshalToSizedBufferVT(dAtA []byte) (int, error) { - if m == nil { - return 0, nil - } - i := len(dAtA) - _ = i - var l int - _ = l - if m.unknownFields != nil { - i -= len(m.unknownFields) - copy(dAtA[i:], m.unknownFields) - } - if m.Log != nil { - size, err := m.Log.MarshalToSizedBufferVT(dAtA[:i]) - if err != nil { - return 0, err - } - i -= size - i = protohelpers.EncodeVarint(dAtA, i, uint64(size)) - i-- - dAtA[i] = 0x32 - } - if len(m.Creator) > 0 { - i -= len(m.Creator) - copy(dAtA[i:], m.Creator) - i = protohelpers.EncodeVarint(dAtA, i, uint64(len(m.Creator))) - i-- - dAtA[i] = 0x22 - } - if len(m.SchemaRoot) > 0 { - i -= len(m.SchemaRoot) - copy(dAtA[i:], m.SchemaRoot) - i = protohelpers.EncodeVarint(dAtA, i, uint64(len(m.SchemaRoot))) - i-- - dAtA[i] = 0x1a - } - if len(m.Cid) > 0 { - i -= len(m.Cid) - copy(dAtA[i:], m.Cid) - i = protohelpers.EncodeVarint(dAtA, i, uint64(len(m.Cid))) - i-- - dAtA[i] = 0x12 - } - if len(m.DocID) > 0 { - i -= len(m.DocID) - copy(dAtA[i:], m.DocID) - i = protohelpers.EncodeVarint(dAtA, i, uint64(len(m.DocID))) - i-- - dAtA[i] = 0xa - } - return len(dAtA) - i, nil -} - -func (m *PushLogRequest) MarshalVT() (dAtA []byte, err error) { - if m == nil { - return nil, nil - } - size := m.SizeVT() - dAtA = make([]byte, size) - n, err := m.MarshalToSizedBufferVT(dAtA[:size]) - if err != nil { - return nil, err - } - return dAtA[:n], nil -} - -func (m *PushLogRequest) MarshalToVT(dAtA []byte) (int, error) { - size := m.SizeVT() - return m.MarshalToSizedBufferVT(dAtA[:size]) -} - -func (m *PushLogRequest) MarshalToSizedBufferVT(dAtA []byte) (int, error) { - if m == nil { - return 0, nil - } - i := len(dAtA) - _ = i - var l int - _ = l - if m.unknownFields != nil { - i -= len(m.unknownFields) - copy(dAtA[i:], m.unknownFields) - } - if m.Body != nil { - size, err := m.Body.MarshalToSizedBufferVT(dAtA[:i]) - if err != nil { - return 0, err - } - i -= size - i = protohelpers.EncodeVarint(dAtA, i, uint64(size)) - i-- - dAtA[i] = 0xa - } - return len(dAtA) - i, nil -} - -func (m *FetchEncryptionKeyRequest) MarshalVT() (dAtA []byte, err error) { - if m == nil { - return nil, nil - } - size := m.SizeVT() - dAtA = make([]byte, size) - n, err := m.MarshalToSizedBufferVT(dAtA[:size]) - if err != nil { - return nil, err - } - return dAtA[:n], nil -} - -func (m *FetchEncryptionKeyRequest) MarshalToVT(dAtA []byte) (int, error) { - size := m.SizeVT() - return m.MarshalToSizedBufferVT(dAtA[:size]) -} - -func (m *FetchEncryptionKeyRequest) MarshalToSizedBufferVT(dAtA []byte) (int, error) { - if m == nil { - return 0, nil - } - i := len(dAtA) - _ = i - var l int - _ = l - if m.unknownFields != nil { - i -= len(m.unknownFields) - copy(dAtA[i:], m.unknownFields) - } - if len(m.EphemeralPublicKey) > 0 { - i -= len(m.EphemeralPublicKey) - copy(dAtA[i:], m.EphemeralPublicKey) - i = protohelpers.EncodeVarint(dAtA, i, uint64(len(m.EphemeralPublicKey))) - i-- - dAtA[i] = 0x12 - } - if len(m.Links) > 0 { - for iNdEx := len(m.Links) - 1; iNdEx >= 0; iNdEx-- { - i -= len(m.Links[iNdEx]) - copy(dAtA[i:], m.Links[iNdEx]) - i = protohelpers.EncodeVarint(dAtA, i, uint64(len(m.Links[iNdEx]))) - i-- - dAtA[i] = 0xa - } - } - return len(dAtA) - i, nil -} - -func (m *FetchEncryptionKeyReply) MarshalVT() (dAtA []byte, err error) { - if m == nil { - return nil, nil - } - size := m.SizeVT() - dAtA = make([]byte, size) - n, err := m.MarshalToSizedBufferVT(dAtA[:size]) - if err != nil { - return nil, err - } - return dAtA[:n], nil -} - -func (m *FetchEncryptionKeyReply) MarshalToVT(dAtA []byte) (int, error) { - size := m.SizeVT() - return m.MarshalToSizedBufferVT(dAtA[:size]) -} - -func (m *FetchEncryptionKeyReply) MarshalToSizedBufferVT(dAtA []byte) (int, error) { - if m == nil { - return 0, nil - } - i := len(dAtA) - _ = i - var l int - _ = l - if m.unknownFields != nil { - i -= len(m.unknownFields) - copy(dAtA[i:], m.unknownFields) - } - if len(m.EphemeralPublicKey) > 0 { - i -= len(m.EphemeralPublicKey) - copy(dAtA[i:], m.EphemeralPublicKey) - i = protohelpers.EncodeVarint(dAtA, i, uint64(len(m.EphemeralPublicKey))) - i-- - dAtA[i] = 0x1a - } - if len(m.Blocks) > 0 { - for iNdEx := len(m.Blocks) - 1; iNdEx >= 0; iNdEx-- { - i -= len(m.Blocks[iNdEx]) - copy(dAtA[i:], m.Blocks[iNdEx]) - i = protohelpers.EncodeVarint(dAtA, i, uint64(len(m.Blocks[iNdEx]))) - i-- - dAtA[i] = 0x12 - } - } - if len(m.Links) > 0 { - for iNdEx := len(m.Links) - 1; iNdEx >= 0; iNdEx-- { - i -= len(m.Links[iNdEx]) - copy(dAtA[i:], m.Links[iNdEx]) - i = protohelpers.EncodeVarint(dAtA, i, uint64(len(m.Links[iNdEx]))) - i-- - dAtA[i] = 0xa - } - } - return len(dAtA) - i, nil -} - -func (m *GetHeadLogRequest) MarshalVT() (dAtA []byte, err error) { - if m == nil { - return nil, nil - } - size := m.SizeVT() - dAtA = make([]byte, size) - n, err := m.MarshalToSizedBufferVT(dAtA[:size]) - if err != nil { - return nil, err - } - return dAtA[:n], nil -} - -func (m *GetHeadLogRequest) MarshalToVT(dAtA []byte) (int, error) { - size := m.SizeVT() - return m.MarshalToSizedBufferVT(dAtA[:size]) -} - -func (m *GetHeadLogRequest) MarshalToSizedBufferVT(dAtA []byte) (int, error) { - if m == nil { - return 0, nil - } - i := len(dAtA) - _ = i - var l int - _ = l - if m.unknownFields != nil { - i -= len(m.unknownFields) - copy(dAtA[i:], m.unknownFields) - } - return len(dAtA) - i, nil -} - -func (m *PushLogReply) MarshalVT() (dAtA []byte, err error) { - if m == nil { - return nil, nil - } - size := m.SizeVT() - dAtA = make([]byte, size) - n, err := m.MarshalToSizedBufferVT(dAtA[:size]) - if err != nil { - return nil, err - } - return dAtA[:n], nil -} - -func (m *PushLogReply) MarshalToVT(dAtA []byte) (int, error) { - size := m.SizeVT() - return m.MarshalToSizedBufferVT(dAtA[:size]) -} - -func (m *PushLogReply) MarshalToSizedBufferVT(dAtA []byte) (int, error) { - if m == nil { - return 0, nil - } - i := len(dAtA) - _ = i - var l int - _ = l - if m.unknownFields != nil { - i -= len(m.unknownFields) - copy(dAtA[i:], m.unknownFields) - } - return len(dAtA) - i, nil -} - -func (m *GetHeadLogReply) MarshalVT() (dAtA []byte, err error) { - if m == nil { - return nil, nil - } - size := m.SizeVT() - dAtA = make([]byte, size) - n, err := m.MarshalToSizedBufferVT(dAtA[:size]) - if err != nil { - return nil, err - } - return dAtA[:n], nil -} - -func (m *GetHeadLogReply) MarshalToVT(dAtA []byte) (int, error) { - size := m.SizeVT() - return m.MarshalToSizedBufferVT(dAtA[:size]) -} - -func (m *GetHeadLogReply) MarshalToSizedBufferVT(dAtA []byte) (int, error) { - if m == nil { - return 0, nil - } - i := len(dAtA) - _ = i - var l int - _ = l - if m.unknownFields != nil { - i -= len(m.unknownFields) - copy(dAtA[i:], m.unknownFields) - } - return len(dAtA) - i, nil -} - -func (m *Log) SizeVT() (n int) { - if m == nil { - return 0 - } - var l int - _ = l - l = len(m.Block) - if l > 0 { - n += 1 + l + protohelpers.SizeOfVarint(uint64(l)) - } - n += len(m.unknownFields) - return n -} - -func (m *GetDocGraphRequest) SizeVT() (n int) { - if m == nil { - return 0 - } - var l int - _ = l - n += len(m.unknownFields) - return n -} - -func (m *GetDocGraphReply) SizeVT() (n int) { - if m == nil { - return 0 - } - var l int - _ = l - n += len(m.unknownFields) - return n -} - -func (m *PushDocGraphRequest) SizeVT() (n int) { - if m == nil { - return 0 - } - var l int - _ = l - n += len(m.unknownFields) - return n -} - -func (m *PushDocGraphReply) SizeVT() (n int) { - if m == nil { - return 0 - } - var l int - _ = l - n += len(m.unknownFields) - return n -} - -func (m *GetLogRequest) SizeVT() (n int) { - if m == nil { - return 0 - } - var l int - _ = l - n += len(m.unknownFields) - return n -} - -func (m *GetLogReply) SizeVT() (n int) { - if m == nil { - return 0 - } - var l int - _ = l - n += len(m.unknownFields) - return n -} - -func (m *PushLogRequest_Body) SizeVT() (n int) { - if m == nil { - return 0 - } - var l int - _ = l - l = len(m.DocID) - if l > 0 { - n += 1 + l + protohelpers.SizeOfVarint(uint64(l)) - } - l = len(m.Cid) - if l > 0 { - n += 1 + l + protohelpers.SizeOfVarint(uint64(l)) - } - l = len(m.SchemaRoot) - if l > 0 { - n += 1 + l + protohelpers.SizeOfVarint(uint64(l)) - } - l = len(m.Creator) - if l > 0 { - n += 1 + l + protohelpers.SizeOfVarint(uint64(l)) - } - if m.Log != nil { - l = m.Log.SizeVT() - n += 1 + l + protohelpers.SizeOfVarint(uint64(l)) - } - n += len(m.unknownFields) - return n -} - -func (m *PushLogRequest) SizeVT() (n int) { - if m == nil { - return 0 - } - var l int - _ = l - if m.Body != nil { - l = m.Body.SizeVT() - n += 1 + l + protohelpers.SizeOfVarint(uint64(l)) - } - n += len(m.unknownFields) - return n -} - -func (m *FetchEncryptionKeyRequest) SizeVT() (n int) { - if m == nil { - return 0 - } - var l int - _ = l - if len(m.Links) > 0 { - for _, b := range m.Links { - l = len(b) - n += 1 + l + protohelpers.SizeOfVarint(uint64(l)) - } - } - l = len(m.EphemeralPublicKey) - if l > 0 { - n += 1 + l + protohelpers.SizeOfVarint(uint64(l)) - } - n += len(m.unknownFields) - return n -} - -func (m *FetchEncryptionKeyReply) SizeVT() (n int) { - if m == nil { - return 0 - } - var l int - _ = l - if len(m.Links) > 0 { - for _, b := range m.Links { - l = len(b) - n += 1 + l + protohelpers.SizeOfVarint(uint64(l)) - } - } - if len(m.Blocks) > 0 { - for _, b := range m.Blocks { - l = len(b) - n += 1 + l + protohelpers.SizeOfVarint(uint64(l)) - } - } - l = len(m.EphemeralPublicKey) - if l > 0 { - n += 1 + l + protohelpers.SizeOfVarint(uint64(l)) - } - n += len(m.unknownFields) - return n -} - -func (m *GetHeadLogRequest) SizeVT() (n int) { - if m == nil { - return 0 - } - var l int - _ = l - n += len(m.unknownFields) - return n -} - -func (m *PushLogReply) SizeVT() (n int) { - if m == nil { - return 0 - } - var l int - _ = l - n += len(m.unknownFields) - return n -} - -func (m *GetHeadLogReply) SizeVT() (n int) { - if m == nil { - return 0 - } - var l int - _ = l - n += len(m.unknownFields) - return n -} - -func (m *Log) UnmarshalVT(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return protohelpers.ErrIntOverflow - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: Log: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: Log: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - case 1: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Block", wireType) - } - var byteLen int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return protohelpers.ErrIntOverflow - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - byteLen |= int(b&0x7F) << shift - if b < 0x80 { - break - } - } - if byteLen < 0 { - return protohelpers.ErrInvalidLength - } - postIndex := iNdEx + byteLen - if postIndex < 0 { - return protohelpers.ErrInvalidLength - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.Block = append(m.Block[:0], dAtA[iNdEx:postIndex]...) - if m.Block == nil { - m.Block = []byte{} - } - iNdEx = postIndex - default: - iNdEx = preIndex - skippy, err := protohelpers.Skip(dAtA[iNdEx:]) - if err != nil { - return err - } - if (skippy < 0) || (iNdEx+skippy) < 0 { - return protohelpers.ErrInvalidLength - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.unknownFields = append(m.unknownFields, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func (m *GetDocGraphRequest) UnmarshalVT(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return protohelpers.ErrIntOverflow - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: GetDocGraphRequest: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: GetDocGraphRequest: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - default: - iNdEx = preIndex - skippy, err := protohelpers.Skip(dAtA[iNdEx:]) - if err != nil { - return err - } - if (skippy < 0) || (iNdEx+skippy) < 0 { - return protohelpers.ErrInvalidLength - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.unknownFields = append(m.unknownFields, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func (m *GetDocGraphReply) UnmarshalVT(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return protohelpers.ErrIntOverflow - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: GetDocGraphReply: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: GetDocGraphReply: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - default: - iNdEx = preIndex - skippy, err := protohelpers.Skip(dAtA[iNdEx:]) - if err != nil { - return err - } - if (skippy < 0) || (iNdEx+skippy) < 0 { - return protohelpers.ErrInvalidLength - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.unknownFields = append(m.unknownFields, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func (m *PushDocGraphRequest) UnmarshalVT(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return protohelpers.ErrIntOverflow - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: PushDocGraphRequest: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: PushDocGraphRequest: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - default: - iNdEx = preIndex - skippy, err := protohelpers.Skip(dAtA[iNdEx:]) - if err != nil { - return err - } - if (skippy < 0) || (iNdEx+skippy) < 0 { - return protohelpers.ErrInvalidLength - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.unknownFields = append(m.unknownFields, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func (m *PushDocGraphReply) UnmarshalVT(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return protohelpers.ErrIntOverflow - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: PushDocGraphReply: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: PushDocGraphReply: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - default: - iNdEx = preIndex - skippy, err := protohelpers.Skip(dAtA[iNdEx:]) - if err != nil { - return err - } - if (skippy < 0) || (iNdEx+skippy) < 0 { - return protohelpers.ErrInvalidLength - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.unknownFields = append(m.unknownFields, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func (m *GetLogRequest) UnmarshalVT(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return protohelpers.ErrIntOverflow - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: GetLogRequest: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: GetLogRequest: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - default: - iNdEx = preIndex - skippy, err := protohelpers.Skip(dAtA[iNdEx:]) - if err != nil { - return err - } - if (skippy < 0) || (iNdEx+skippy) < 0 { - return protohelpers.ErrInvalidLength - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.unknownFields = append(m.unknownFields, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func (m *GetLogReply) UnmarshalVT(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return protohelpers.ErrIntOverflow - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: GetLogReply: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: GetLogReply: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - default: - iNdEx = preIndex - skippy, err := protohelpers.Skip(dAtA[iNdEx:]) - if err != nil { - return err - } - if (skippy < 0) || (iNdEx+skippy) < 0 { - return protohelpers.ErrInvalidLength - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.unknownFields = append(m.unknownFields, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func (m *PushLogRequest_Body) UnmarshalVT(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return protohelpers.ErrIntOverflow - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: PushLogRequest_Body: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: PushLogRequest_Body: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - case 1: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field DocID", wireType) - } - var byteLen int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return protohelpers.ErrIntOverflow - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - byteLen |= int(b&0x7F) << shift - if b < 0x80 { - break - } - } - if byteLen < 0 { - return protohelpers.ErrInvalidLength - } - postIndex := iNdEx + byteLen - if postIndex < 0 { - return protohelpers.ErrInvalidLength - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.DocID = append(m.DocID[:0], dAtA[iNdEx:postIndex]...) - if m.DocID == nil { - m.DocID = []byte{} - } - iNdEx = postIndex - case 2: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Cid", wireType) - } - var byteLen int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return protohelpers.ErrIntOverflow - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - byteLen |= int(b&0x7F) << shift - if b < 0x80 { - break - } - } - if byteLen < 0 { - return protohelpers.ErrInvalidLength - } - postIndex := iNdEx + byteLen - if postIndex < 0 { - return protohelpers.ErrInvalidLength - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.Cid = append(m.Cid[:0], dAtA[iNdEx:postIndex]...) - if m.Cid == nil { - m.Cid = []byte{} - } - iNdEx = postIndex - case 3: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field SchemaRoot", wireType) - } - var byteLen int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return protohelpers.ErrIntOverflow - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - byteLen |= int(b&0x7F) << shift - if b < 0x80 { - break - } - } - if byteLen < 0 { - return protohelpers.ErrInvalidLength - } - postIndex := iNdEx + byteLen - if postIndex < 0 { - return protohelpers.ErrInvalidLength - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.SchemaRoot = append(m.SchemaRoot[:0], dAtA[iNdEx:postIndex]...) - if m.SchemaRoot == nil { - m.SchemaRoot = []byte{} - } - iNdEx = postIndex - case 4: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Creator", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return protohelpers.ErrIntOverflow - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - stringLen |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return protohelpers.ErrInvalidLength - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return protohelpers.ErrInvalidLength - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.Creator = string(dAtA[iNdEx:postIndex]) - iNdEx = postIndex - case 6: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Log", wireType) - } - var msglen int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return protohelpers.ErrIntOverflow - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - msglen |= int(b&0x7F) << shift - if b < 0x80 { - break - } - } - if msglen < 0 { - return protohelpers.ErrInvalidLength - } - postIndex := iNdEx + msglen - if postIndex < 0 { - return protohelpers.ErrInvalidLength - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - if m.Log == nil { - m.Log = &Log{} - } - if err := m.Log.UnmarshalVT(dAtA[iNdEx:postIndex]); err != nil { - return err - } - iNdEx = postIndex - default: - iNdEx = preIndex - skippy, err := protohelpers.Skip(dAtA[iNdEx:]) - if err != nil { - return err - } - if (skippy < 0) || (iNdEx+skippy) < 0 { - return protohelpers.ErrInvalidLength - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.unknownFields = append(m.unknownFields, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func (m *PushLogRequest) UnmarshalVT(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return protohelpers.ErrIntOverflow - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: PushLogRequest: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: PushLogRequest: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - case 1: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Body", wireType) - } - var msglen int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return protohelpers.ErrIntOverflow - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - msglen |= int(b&0x7F) << shift - if b < 0x80 { - break - } - } - if msglen < 0 { - return protohelpers.ErrInvalidLength - } - postIndex := iNdEx + msglen - if postIndex < 0 { - return protohelpers.ErrInvalidLength - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - if m.Body == nil { - m.Body = &PushLogRequest_Body{} - } - if err := m.Body.UnmarshalVT(dAtA[iNdEx:postIndex]); err != nil { - return err - } - iNdEx = postIndex - default: - iNdEx = preIndex - skippy, err := protohelpers.Skip(dAtA[iNdEx:]) - if err != nil { - return err - } - if (skippy < 0) || (iNdEx+skippy) < 0 { - return protohelpers.ErrInvalidLength - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.unknownFields = append(m.unknownFields, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func (m *FetchEncryptionKeyRequest) UnmarshalVT(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return protohelpers.ErrIntOverflow - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: FetchEncryptionKeyRequest: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: FetchEncryptionKeyRequest: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - case 1: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Links", wireType) - } - var byteLen int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return protohelpers.ErrIntOverflow - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - byteLen |= int(b&0x7F) << shift - if b < 0x80 { - break - } - } - if byteLen < 0 { - return protohelpers.ErrInvalidLength - } - postIndex := iNdEx + byteLen - if postIndex < 0 { - return protohelpers.ErrInvalidLength - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.Links = append(m.Links, make([]byte, postIndex-iNdEx)) - copy(m.Links[len(m.Links)-1], dAtA[iNdEx:postIndex]) - iNdEx = postIndex - case 2: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field EphemeralPublicKey", wireType) - } - var byteLen int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return protohelpers.ErrIntOverflow - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - byteLen |= int(b&0x7F) << shift - if b < 0x80 { - break - } - } - if byteLen < 0 { - return protohelpers.ErrInvalidLength - } - postIndex := iNdEx + byteLen - if postIndex < 0 { - return protohelpers.ErrInvalidLength - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.EphemeralPublicKey = append(m.EphemeralPublicKey[:0], dAtA[iNdEx:postIndex]...) - if m.EphemeralPublicKey == nil { - m.EphemeralPublicKey = []byte{} - } - iNdEx = postIndex - default: - iNdEx = preIndex - skippy, err := protohelpers.Skip(dAtA[iNdEx:]) - if err != nil { - return err - } - if (skippy < 0) || (iNdEx+skippy) < 0 { - return protohelpers.ErrInvalidLength - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.unknownFields = append(m.unknownFields, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func (m *FetchEncryptionKeyReply) UnmarshalVT(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return protohelpers.ErrIntOverflow - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: FetchEncryptionKeyReply: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: FetchEncryptionKeyReply: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - case 1: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Links", wireType) - } - var byteLen int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return protohelpers.ErrIntOverflow - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - byteLen |= int(b&0x7F) << shift - if b < 0x80 { - break - } - } - if byteLen < 0 { - return protohelpers.ErrInvalidLength - } - postIndex := iNdEx + byteLen - if postIndex < 0 { - return protohelpers.ErrInvalidLength - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.Links = append(m.Links, make([]byte, postIndex-iNdEx)) - copy(m.Links[len(m.Links)-1], dAtA[iNdEx:postIndex]) - iNdEx = postIndex - case 2: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Blocks", wireType) - } - var byteLen int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return protohelpers.ErrIntOverflow - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - byteLen |= int(b&0x7F) << shift - if b < 0x80 { - break - } - } - if byteLen < 0 { - return protohelpers.ErrInvalidLength - } - postIndex := iNdEx + byteLen - if postIndex < 0 { - return protohelpers.ErrInvalidLength - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.Blocks = append(m.Blocks, make([]byte, postIndex-iNdEx)) - copy(m.Blocks[len(m.Blocks)-1], dAtA[iNdEx:postIndex]) - iNdEx = postIndex - case 3: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field EphemeralPublicKey", wireType) - } - var byteLen int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return protohelpers.ErrIntOverflow - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - byteLen |= int(b&0x7F) << shift - if b < 0x80 { - break - } - } - if byteLen < 0 { - return protohelpers.ErrInvalidLength - } - postIndex := iNdEx + byteLen - if postIndex < 0 { - return protohelpers.ErrInvalidLength - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.EphemeralPublicKey = append(m.EphemeralPublicKey[:0], dAtA[iNdEx:postIndex]...) - if m.EphemeralPublicKey == nil { - m.EphemeralPublicKey = []byte{} - } - iNdEx = postIndex - default: - iNdEx = preIndex - skippy, err := protohelpers.Skip(dAtA[iNdEx:]) - if err != nil { - return err - } - if (skippy < 0) || (iNdEx+skippy) < 0 { - return protohelpers.ErrInvalidLength - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.unknownFields = append(m.unknownFields, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func (m *GetHeadLogRequest) UnmarshalVT(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return protohelpers.ErrIntOverflow - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: GetHeadLogRequest: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: GetHeadLogRequest: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - default: - iNdEx = preIndex - skippy, err := protohelpers.Skip(dAtA[iNdEx:]) - if err != nil { - return err - } - if (skippy < 0) || (iNdEx+skippy) < 0 { - return protohelpers.ErrInvalidLength - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.unknownFields = append(m.unknownFields, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func (m *PushLogReply) UnmarshalVT(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return protohelpers.ErrIntOverflow - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: PushLogReply: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: PushLogReply: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - default: - iNdEx = preIndex - skippy, err := protohelpers.Skip(dAtA[iNdEx:]) - if err != nil { - return err - } - if (skippy < 0) || (iNdEx+skippy) < 0 { - return protohelpers.ErrInvalidLength - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.unknownFields = append(m.unknownFields, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func (m *GetHeadLogReply) UnmarshalVT(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return protohelpers.ErrIntOverflow - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: GetHeadLogReply: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: GetHeadLogReply: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - default: - iNdEx = preIndex - skippy, err := protohelpers.Skip(dAtA[iNdEx:]) - if err != nil { - return err - } - if (skippy < 0) || (iNdEx+skippy) < 0 { - return protohelpers.ErrInvalidLength - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.unknownFields = append(m.unknownFields, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} diff --git a/net/peer.go b/net/peer.go index 24976ed388..7b855a1ca2 100644 --- a/net/peer.go +++ b/net/peer.go @@ -38,7 +38,6 @@ import ( "github.com/sourcenetwork/defradb/errors" "github.com/sourcenetwork/defradb/event" corenet "github.com/sourcenetwork/defradb/internal/core/net" - pb "github.com/sourcenetwork/defradb/net/pb" ) // Peer is a DefraDB Peer node which exposes all the LibP2P host/peer functionality @@ -163,7 +162,7 @@ func NewPeer( // register the P2P gRPC server go func() { - pb.RegisterServiceServer(p.p2pRPC, p.server) + registerServiceServer(p.p2pRPC, p.server) if err := p.p2pRPC.Serve(p2pListener); err != nil && !errors.Is(err, grpc.ErrServerStopped) { log.ErrorE("Fatal P2P RPC server error", err) @@ -280,17 +279,12 @@ func (p *Peer) RegisterNewDocument( return err } - // publish log - req := &pb.PushLogRequest{ - Body: &pb.PushLogRequest_Body{ - DocID: []byte(docID.String()), - Cid: c.Bytes(), - SchemaRoot: []byte(schemaRoot), - Creator: p.host.ID().String(), - Log: &pb.Log{ - Block: rawBlock, - }, - }, + req := &pushLogRequest{ + DocID: docID.String(), + CID: c.Bytes(), + SchemaRoot: schemaRoot, + Creator: p.host.ID().String(), + Block: rawBlock, } return p.server.publishLog(ctx, schemaRoot, req) @@ -315,26 +309,21 @@ func (p *Peer) handleDocCreateLog(evt event.Update) error { } func (p *Peer) handleDocUpdateLog(evt event.Update) error { - docID, err := client.NewDocIDFromString(evt.DocID) + // push to each peer (replicator) + p.pushLogToReplicators(evt) + + _, err := client.NewDocIDFromString(evt.DocID) if err != nil { return NewErrFailedToGetDocID(err) } - body := &pb.PushLogRequest_Body{ - DocID: []byte(docID.String()), - Cid: evt.Cid.Bytes(), - SchemaRoot: []byte(evt.SchemaRoot), + req := &pushLogRequest{ + DocID: evt.DocID, + CID: evt.Cid.Bytes(), + SchemaRoot: evt.SchemaRoot, Creator: p.host.ID().String(), - Log: &pb.Log{ - Block: evt.Block, - }, + Block: evt.Block, } - req := &pb.PushLogRequest{ - Body: body, - } - - // push to each peer (replicator) - p.pushLogToReplicators(evt) if err := p.server.publishLog(p.ctx, evt.DocID, req); err != nil { return NewErrPublishingToDocIDTopic(err, evt.Cid.String(), evt.DocID) diff --git a/net/server.go b/net/server.go index 42ff15f5fb..2e4939c77f 100644 --- a/net/server.go +++ b/net/server.go @@ -17,6 +17,7 @@ import ( "fmt" "sync" + "github.com/fxamacker/cbor/v2" cid "github.com/ipfs/go-cid" libpeer "github.com/libp2p/go-libp2p/core/peer" "github.com/libp2p/go-libp2p/core/peerstore" @@ -25,13 +26,11 @@ import ( "google.golang.org/grpc" "google.golang.org/grpc/credentials/insecure" grpcpeer "google.golang.org/grpc/peer" - "google.golang.org/protobuf/proto" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/errors" "github.com/sourcenetwork/defradb/event" coreblock "github.com/sourcenetwork/defradb/internal/core/block" - pb "github.com/sourcenetwork/defradb/net/pb" ) // server is the request/response instance for all P2P RPC communication. @@ -49,8 +48,6 @@ type server struct { mu sync.Mutex conns map[libpeer.ID]*grpc.ClientConn - - pb.UnimplementedServiceServer } // pubsubTopic is a wrapper of rpc.Topic to be able to track if the topic has @@ -74,6 +71,7 @@ func newServer(p *Peer, opts ...grpc.DialOption) (*server, error) { defaultOpts := []grpc.DialOption{ s.getLibp2pDialer(), grpc.WithTransportCredentials(cred), + grpc.WithDefaultCallOptions(grpc.CallContentSubtype(cborCodecName)), } s.opts = append(defaultOpts, opts...) @@ -84,43 +82,43 @@ func newServer(p *Peer, opts ...grpc.DialOption) (*server, error) { // GetDocGraph receives a get graph request func (s *server) GetDocGraph( ctx context.Context, - req *pb.GetDocGraphRequest, -) (*pb.GetDocGraphReply, error) { + req *getDocGraphRequest, +) (*getDocGraphReply, error) { return nil, nil } // PushDocGraph receives a push graph request func (s *server) PushDocGraph( ctx context.Context, - req *pb.PushDocGraphRequest, -) (*pb.PushDocGraphReply, error) { + req *pushDocGraphRequest, +) (*pushDocGraphReply, error) { return nil, nil } // GetLog receives a get log request -func (s *server) GetLog(ctx context.Context, req *pb.GetLogRequest) (*pb.GetLogReply, error) { +func (s *server) GetLog(ctx context.Context, req *getLogRequest) (*getLogReply, error) { return nil, nil } // PushLog receives a push log request -func (s *server) PushLog(ctx context.Context, req *pb.PushLogRequest) (*pb.PushLogReply, error) { +func (s *server) PushLog(ctx context.Context, req *pushLogRequest) (*pushLogReply, error) { pid, err := peerIDFromContext(ctx) if err != nil { return nil, err } - headCID, err := cid.Cast(req.Body.Cid) + headCID, err := cid.Cast(req.CID) if err != nil { return nil, err } - docID, err := client.NewDocIDFromString(string(req.Body.DocID)) + docID, err := client.NewDocIDFromString(req.DocID) if err != nil { return nil, err } - byPeer, err := libpeer.Decode(req.Body.Creator) + byPeer, err := libpeer.Decode(req.Creator) if err != nil { return nil, err } - block, err := coreblock.GetFromBytes(req.Body.Log.Block) + block, err := coreblock.GetFromBytes(req.Block) if err != nil { return nil, err } @@ -145,7 +143,7 @@ func (s *server) PushLog(ctx context.Context, req *pb.PushLogRequest) (*pb.PushL // Once processed, subscribe to the DocID topic on the pubsub network unless we already // subscribed to the collection. - if !s.hasPubSubTopic(string(req.Body.SchemaRoot)) { + if !s.hasPubSubTopic(req.SchemaRoot) { err = s.addPubSubTopic(docID.String(), true, nil) if err != nil { return nil, err @@ -157,17 +155,17 @@ func (s *server) PushLog(ctx context.Context, req *pb.PushLogRequest) (*pb.PushL ByPeer: byPeer, FromPeer: pid, Cid: headCID, - SchemaRoot: string(req.Body.SchemaRoot), + SchemaRoot: req.SchemaRoot, })) - return &pb.PushLogReply{}, nil + return &pushLogReply{}, nil } // GetHeadLog receives a get head log request func (s *server) GetHeadLog( ctx context.Context, - req *pb.GetHeadLogRequest, -) (*pb.GetHeadLogReply, error) { + req *getHeadLogRequest, +) (*getHeadLogReply, error) { return nil, nil } @@ -267,7 +265,7 @@ func (s *server) removeAllPubsubTopics() error { // publishLog publishes the given PushLogRequest object on the PubSub network via the // corresponding topic -func (s *server) publishLog(ctx context.Context, topic string, req *pb.PushLogRequest) error { +func (s *server) publishLog(ctx context.Context, topic string, req *pushLogRequest) error { log.InfoContext(ctx, "Publish log", corelog.String("PeerID", s.peer.PeerID().String()), corelog.String("Topic", topic)) @@ -286,7 +284,7 @@ func (s *server) publishLog(ctx context.Context, topic string, req *pb.PushLogRe return s.publishLog(ctx, topic, req) } - data, err := req.MarshalVT() + data, err := cbor.Marshal(req) if err != nil { return errors.Wrap("failed to marshal pubsub message", err) } @@ -305,8 +303,8 @@ func (s *server) pubSubMessageHandler(from libpeer.ID, topic string, msg []byte) corelog.Any("SenderId", from), corelog.String("Topic", topic)) - req := new(pb.PushLogRequest) - if err := proto.Unmarshal(msg, req); err != nil { + req := &pushLogRequest{} + if err := cbor.Unmarshal(msg, req); err != nil { log.ErrorE("Failed to unmarshal pubsub message %s", err) return nil, err } diff --git a/net/server_test.go b/net/server_test.go index 11a13604b1..4dc6428205 100644 --- a/net/server_test.go +++ b/net/server_test.go @@ -22,7 +22,6 @@ import ( "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/errors" "github.com/sourcenetwork/defradb/internal/core" - net_pb "github.com/sourcenetwork/defradb/net/pb" ) func TestNewServerSimple(t *testing.T) { @@ -39,7 +38,7 @@ func TestGetDocGraph(t *testing.T) { db, p := newTestPeer(ctx, t) defer db.Close() defer p.Close() - r, err := p.server.GetDocGraph(ctx, &net_pb.GetDocGraphRequest{}) + r, err := p.server.GetDocGraph(ctx, &getDocGraphRequest{}) require.Nil(t, r) require.Nil(t, err) } @@ -49,7 +48,7 @@ func TestPushDocGraph(t *testing.T) { db, p := newTestPeer(ctx, t) defer db.Close() defer p.Close() - r, err := p.server.PushDocGraph(ctx, &net_pb.PushDocGraphRequest{}) + r, err := p.server.PushDocGraph(ctx, &pushDocGraphRequest{}) require.Nil(t, r) require.Nil(t, err) } @@ -59,7 +58,7 @@ func TestGetLog(t *testing.T) { db, p := newTestPeer(ctx, t) defer db.Close() defer p.Close() - r, err := p.server.GetLog(ctx, &net_pb.GetLogRequest{}) + r, err := p.server.GetLog(ctx, &getLogRequest{}) require.Nil(t, r) require.Nil(t, err) } @@ -69,7 +68,7 @@ func TestGetHeadLog(t *testing.T) { db, p := newTestPeer(ctx, t) defer db.Close() defer p.Close() - r, err := p.server.GetHeadLog(ctx, &net_pb.GetHeadLogRequest{}) + r, err := p.server.GetHeadLog(ctx, &getHeadLogRequest{}) require.Nil(t, r) require.Nil(t, err) } @@ -126,16 +125,12 @@ func TestPushLog(t *testing.T) { b, err := db.Blockstore().AsIPLDStorage().Get(ctx, headCID.KeyString()) require.NoError(t, err) - _, err = p.server.PushLog(ctx, &net_pb.PushLogRequest{ - Body: &net_pb.PushLogRequest_Body{ - DocID: []byte(doc.ID().String()), - Cid: headCID.Bytes(), - SchemaRoot: []byte(col.SchemaRoot()), - Creator: p.PeerID().String(), - Log: &net_pb.Log{ - Block: b, - }, - }, + _, err = p.server.PushLog(ctx, &pushLogRequest{ + DocID: doc.ID().String(), + CID: headCID.Bytes(), + SchemaRoot: col.SchemaRoot(), + Creator: p.PeerID().String(), + Block: b, }) require.NoError(t, err) } From 79eb12cc56d95348501c2e5ceb4aa344e4b6d320 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Wed, 25 Sep 2024 08:28:11 -0700 Subject: [PATCH 40/71] feat: GraphQL fragments (#3066) ## Relevant issue(s) Resolves #3047 ## Description This PR enables support for GraphQL fragments. ## Tasks - [x] I made sure the code is well commented, particularly hard-to-understand areas. - [x] I made sure the repository-held documentation is changed accordingly. - [x] I made sure the pull request title adheres to the conventional commit style (the subset used in the project can be found in [tools/configs/chglog/config.yml](tools/configs/chglog/config.yml)). - [x] I made sure to discuss its limitations such as threats to validity, vulnerability to mistake and misuse, robustness to invalidation of assumptions, resource requirements, ... ## How has this been tested? Added integration tests Specify the platform(s) on which this was tested: - MacOS --- go.mod | 2 +- go.sum | 4 +- internal/request/graphql/parser/mutation.go | 19 +- internal/request/graphql/parser/query.go | 26 +- internal/request/graphql/parser/request.go | 48 ++- .../request/graphql/parser/subscription.go | 19 +- .../query/simple/with_fragments_test.go | 348 ++++++++++++++++++ 7 files changed, 418 insertions(+), 48 deletions(-) create mode 100644 tests/integration/query/simple/with_fragments_test.go diff --git a/go.mod b/go.mod index e7edd3cdcd..43acdd72ca 100644 --- a/go.mod +++ b/go.mod @@ -48,7 +48,7 @@ require ( github.com/sourcenetwork/badger/v4 v4.2.1-0.20231113215945-a63444ca5276 github.com/sourcenetwork/corelog v0.0.8 github.com/sourcenetwork/go-libp2p-pubsub-rpc v0.0.14 - github.com/sourcenetwork/graphql-go v0.7.10-0.20240910200541-550a51c57c7d + github.com/sourcenetwork/graphql-go v0.7.10-0.20240924172903-a4088313b20d github.com/sourcenetwork/immutable v0.3.0 github.com/sourcenetwork/sourcehub v0.2.1-0.20240704194128-f43f5e427274 github.com/spf13/cobra v1.8.1 diff --git a/go.sum b/go.sum index ed7cf49e3d..83aa313637 100644 --- a/go.sum +++ b/go.sum @@ -1398,8 +1398,8 @@ github.com/sourcenetwork/corelog v0.0.8 h1:jCo0mFBpWrfhUCGzzN3uUtPGyQv3jnITdPO1s github.com/sourcenetwork/corelog v0.0.8/go.mod h1:cMabHgs3kARgYTQeQYSOmaGGP8XMU6sZrHd8LFrL3zA= github.com/sourcenetwork/go-libp2p-pubsub-rpc v0.0.14 h1:620zKV4rOn7U5j/WsPkk4SFj0z9/pVV4bBx0BpZQgro= github.com/sourcenetwork/go-libp2p-pubsub-rpc v0.0.14/go.mod h1:jUoQv592uUX1u7QBjAY4C+l24X9ArhPfifOqXpDHz4U= -github.com/sourcenetwork/graphql-go v0.7.10-0.20240910200541-550a51c57c7d h1:gpBJx/mKmpelxZfHT4AYhPYFgSy8DKp/Ca+bBzIIy2A= -github.com/sourcenetwork/graphql-go v0.7.10-0.20240910200541-550a51c57c7d/go.mod h1:rkahXkgRH/3vZErN1Bx+qt1+w+CV5fgaJyKKWgISe4U= +github.com/sourcenetwork/graphql-go v0.7.10-0.20240924172903-a4088313b20d h1:P5y4g1ONf8HK36L86/8zDYjY7rRLM7AaqlQDRHOBMH8= +github.com/sourcenetwork/graphql-go v0.7.10-0.20240924172903-a4088313b20d/go.mod h1:rkahXkgRH/3vZErN1Bx+qt1+w+CV5fgaJyKKWgISe4U= github.com/sourcenetwork/immutable v0.3.0 h1:gHPtGvLrTBTK5YpDAhMU+u+S8v1F6iYmc3nbZLryMdc= github.com/sourcenetwork/immutable v0.3.0/go.mod h1:GD7ceuh/HD7z6cdIwzKK2ctzgZ1qqYFJpsFp+8qYnbI= github.com/sourcenetwork/raccoondb v0.2.1-0.20240606193653-1e91e9be9234 h1:8dA9bVC1A0ChJygtsUfNsek3oR0GnwpLoYpmEo4t2mk= diff --git a/internal/request/graphql/parser/mutation.go b/internal/request/graphql/parser/mutation.go index b7ce4a1723..71942f1223 100644 --- a/internal/request/graphql/parser/mutation.go +++ b/internal/request/graphql/parser/mutation.go @@ -32,24 +32,21 @@ var ( // 'mutation' operations, which there may be multiple of. func parseMutationOperationDefinition( exe *gql.ExecutionContext, - def *ast.OperationDefinition, + collectedFields map[string][]*ast.Field, ) (*request.OperationDefinition, error) { - qdef := &request.OperationDefinition{ - Selections: make([]request.Selection, len(def.SelectionSet.Selections)), - } - - for i, selection := range def.SelectionSet.Selections { - switch node := selection.(type) { - case *ast.Field: + var selections []request.Selection + for _, fields := range collectedFields { + for _, node := range fields { mut, err := parseMutation(exe, exe.Schema.MutationType(), node) if err != nil { return nil, err } - - qdef.Selections[i] = mut + selections = append(selections, mut) } } - return qdef, nil + return &request.OperationDefinition{ + Selections: selections, + }, nil } // @todo: Create separate mutation parse functions diff --git a/internal/request/graphql/parser/query.go b/internal/request/graphql/parser/query.go index 871580f2f2..e0248768ab 100644 --- a/internal/request/graphql/parser/query.go +++ b/internal/request/graphql/parser/query.go @@ -22,24 +22,20 @@ import ( // 'query' operations, which there may be multiple of. func parseQueryOperationDefinition( exe *gql.ExecutionContext, - def *ast.OperationDefinition, + collectedFields map[string][]*ast.Field, ) (*request.OperationDefinition, []error) { - qdef := &request.OperationDefinition{ - Selections: make([]request.Selection, len(def.SelectionSet.Selections)), - } - - for i, selection := range def.SelectionSet.Selections { - var parsedSelection request.Selection - switch node := selection.(type) { - case *ast.Field: - if _, isCommitQuery := request.CommitQueries[node.Name.Value]; isCommitQuery { + var selections []request.Selection + for name, fields := range collectedFields { + for _, node := range fields { + var parsedSelection request.Selection + if _, isCommitQuery := request.CommitQueries[name]; isCommitQuery { parsed, err := parseCommitSelect(exe, exe.Schema.QueryType(), node) if err != nil { return nil, []error{err} } parsedSelection = parsed - } else if _, isAggregate := request.Aggregates[node.Name.Value]; isAggregate { + } else if _, isAggregate := request.Aggregates[name]; isAggregate { parsed, err := parseAggregate(exe, exe.Schema.QueryType(), node) if err != nil { return nil, []error{err} @@ -72,11 +68,13 @@ func parseQueryOperationDefinition( parsedSelection = parsed } - - qdef.Selections[i] = parsedSelection + selections = append(selections, parsedSelection) } } - return qdef, nil + + return &request.OperationDefinition{ + Selections: selections, + }, nil } // @todo: Create separate select parse functions diff --git a/internal/request/graphql/parser/request.go b/internal/request/graphql/parser/request.go index bc5a8f9510..7545957ae3 100644 --- a/internal/request/graphql/parser/request.go +++ b/internal/request/graphql/parser/request.go @@ -11,6 +11,8 @@ package parser import ( + "fmt" + gql "github.com/sourcenetwork/graphql-go" "github.com/sourcenetwork/graphql-go/language/ast" "github.com/sourcenetwork/immutable" @@ -36,6 +38,15 @@ func ParseRequest(schema gql.Schema, doc *ast.Document, options *client.GQLOptio if err != nil { return nil, []error{err} } + operationType, err := gql.GetOperationRootType(exe.Schema, exe.Operation) + if err != nil { + return nil, []error{err} + } + collectedFields := gql.CollectFields(gql.CollectFieldsParams{ + ExeContext: exe, + RuntimeType: operationType, + SelectionSet: exe.Operation.GetSelectionSet(), + }) r := &request.Request{ Queries: make([]*request.OperationDefinition, 0), @@ -46,7 +57,7 @@ func ParseRequest(schema gql.Schema, doc *ast.Document, options *client.GQLOptio astOpDef := exe.Operation.(*ast.OperationDefinition) switch exe.Operation.GetOperation() { case ast.OperationTypeQuery: - parsedQueryOpDef, errs := parseQueryOperationDefinition(exe, exe.Operation.(*ast.OperationDefinition)) + parsedQueryOpDef, errs := parseQueryOperationDefinition(exe, collectedFields) if errs != nil { return nil, errs } @@ -59,7 +70,7 @@ func ParseRequest(schema gql.Schema, doc *ast.Document, options *client.GQLOptio r.Queries = append(r.Queries, parsedQueryOpDef) case ast.OperationTypeMutation: - parsedMutationOpDef, err := parseMutationOperationDefinition(exe, astOpDef) + parsedMutationOpDef, err := parseMutationOperationDefinition(exe, collectedFields) if err != nil { return nil, []error{err} } @@ -73,7 +84,7 @@ func ParseRequest(schema gql.Schema, doc *ast.Document, options *client.GQLOptio r.Mutations = append(r.Mutations, parsedMutationOpDef) case ast.OperationTypeSubscription: - parsedSubscriptionOpDef, err := parseSubscriptionOperationDefinition(exe, astOpDef) + parsedSubscriptionOpDef, err := parseSubscriptionOperationDefinition(exe, collectedFields) if err != nil { return nil, []error{err} } @@ -167,26 +178,45 @@ func parseSelectFields( parent *gql.Object, fields *ast.SelectionSet, ) ([]request.Selection, error) { - selections := make([]request.Selection, len(fields.Selections)) - // parse field selections - for i, selection := range fields.Selections { + var selections []request.Selection + for _, selection := range fields.Selections { switch node := selection.(type) { + case *ast.InlineFragment: + selection, err := parseSelectFields(exe, parent, node.GetSelectionSet()) + if err != nil { + return nil, err + } + selections = append(selections, selection...) + + case *ast.FragmentSpread: + fragment, ok := exe.Fragments[node.Name.Value] + if !ok { + return nil, fmt.Errorf("fragment not found %s", node.Name.Value) + } + selection, err := parseSelectFields(exe, parent, fragment.GetSelectionSet()) + if err != nil { + return nil, err + } + selections = append(selections, selection...) + case *ast.Field: + var selection request.Selection if _, isAggregate := request.Aggregates[node.Name.Value]; isAggregate { s, err := parseAggregate(exe, parent, node) if err != nil { return nil, err } - selections[i] = s + selection = s } else if node.SelectionSet == nil { // regular field - selections[i] = parseField(node) + selection = parseField(node) } else { // sub type with extra fields s, err := parseSelect(exe, parent, node) if err != nil { return nil, err } - selections[i] = s + selection = s } + selections = append(selections, selection) } } diff --git a/internal/request/graphql/parser/subscription.go b/internal/request/graphql/parser/subscription.go index 82aca83302..2d5beb5430 100644 --- a/internal/request/graphql/parser/subscription.go +++ b/internal/request/graphql/parser/subscription.go @@ -22,24 +22,21 @@ import ( // 'subcription' operations, which there may be multiple of. func parseSubscriptionOperationDefinition( exe *gql.ExecutionContext, - def *ast.OperationDefinition, + collectedFields map[string][]*ast.Field, ) (*request.OperationDefinition, error) { - sdef := &request.OperationDefinition{ - Selections: make([]request.Selection, len(def.SelectionSet.Selections)), - } - - for i, selection := range def.SelectionSet.Selections { - switch node := selection.(type) { - case *ast.Field: + var selections []request.Selection + for _, fields := range collectedFields { + for _, node := range fields { sub, err := parseSubscription(exe, node) if err != nil { return nil, err } - - sdef.Selections[i] = sub + selections = append(selections, sub) } } - return sdef, nil + return &request.OperationDefinition{ + Selections: selections, + }, nil } // parseSubscription parses a typed subscription field diff --git a/tests/integration/query/simple/with_fragments_test.go b/tests/integration/query/simple/with_fragments_test.go new file mode 100644 index 0000000000..8b353aaa06 --- /dev/null +++ b/tests/integration/query/simple/with_fragments_test.go @@ -0,0 +1,348 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package simple + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" + + "github.com/sourcenetwork/immutable" +) + +func TestQuerySimple_WithFragments_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with fragments succeeds", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "Name": "Alice", + "Age": 40 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Bob", + "Age": 21 + }`, + }, + testUtils.Request{ + Request: `query { + firstUser: Users(limit: 1) { + ...UserInfo + } + lastUser: Users(limit: 1, offset: 1) { + ...UserInfo + } + } + fragment UserInfo on Users { + Name + Age + }`, + Results: map[string]any{ + "firstUser": []map[string]any{ + { + "Name": "Bob", + "Age": int64(21), + }, + }, + "lastUser": []map[string]any{ + { + "Name": "Alice", + "Age": int64(40), + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQuerySimple_WithNestedFragments_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with nested fragment succeeds", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "Name": "Alice", + "Age": 40 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Bob", + "Age": 21 + }`, + }, + testUtils.Request{ + Request: `query { + Users { + ...UserWithNameAndAge + } + } + fragment UserWithName on Users { + Name + } + fragment UserWithNameAndAge on Users { + ...UserWithName + Age + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Name": "Bob", + "Age": int64(21), + }, + { + "Name": "Alice", + "Age": int64(40), + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQuerySimple_WithFragmentSpreadAndSelect_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with fragment spread and select", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "Name": "Alice", + "Age": 40 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Bob", + "Age": 21 + }`, + }, + testUtils.Request{ + Request: `query { + Users { + Name + ...UserAge + } + } + fragment UserAge on Users { + Age + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Name": "Bob", + "Age": int64(21), + }, + { + "Name": "Alice", + "Age": int64(40), + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQuerySimple_WithMissingFragment_ReturnsError(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with missing fragment returns error", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "Name": "Alice", + "Age": 40 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Bob", + "Age": 21 + }`, + }, + testUtils.Request{ + Request: `query { + Users { + ...UserInfo + } + }`, + ExpectedError: `Unknown fragment "UserInfo".`, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQuerySimple_WithFragmentWithInvalidField_ReturnsError(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with fragment with invalid field returns error", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "Name": "Alice", + "Age": 40 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Bob", + "Age": 21 + }`, + }, + testUtils.Request{ + Request: `query { + Users { + ...UserInvalid + } + } + fragment UserInvalid on Users { + Score + }`, + ExpectedError: `Cannot query field "Score" on type "Users".`, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQuerySimple_WithFragmentWithAggregate_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with fragment with aggregate", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "Name": "Alice", + "Age": 40 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Bob", + "Age": 21 + }`, + }, + testUtils.Request{ + Request: `query { + ...UserCount + } + fragment UserCount on Query { + _count(Users: {}) + }`, + Results: map[string]any{ + "_count": int64(2), + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQuerySimple_WithFragmentWithVariables_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with fragment with aggregate", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "Name": "Alice", + "Age": 40 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Bob", + "Age": 21 + }`, + }, + testUtils.Request{ + Variables: immutable.Some(map[string]any{ + "filter": map[string]any{ + "Age": map[string]any{ + "_gt": int64(30), + }, + }, + }), + Request: `query($filter: UsersFilterArg!) { + ...UserFilter + } + fragment UserFilter on Query { + Users(filter: $filter) { + Name + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Name": "Alice", + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQuerySimple_WithInlineFragment_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with inline fragment", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "Name": "Alice", + "Age": 40 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Bob", + "Age": 21 + }`, + }, + testUtils.Request{ + Request: `query { + Users { + ... on Users { + Name + Age + } + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Name": "Bob", + "Age": int64(21), + }, + { + "Name": "Alice", + "Age": int64(40), + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} From 20596e3f6beaa51e70457dca45a7276a5439cd03 Mon Sep 17 00:00:00 2001 From: Shahzad Lone Date: Thu, 26 Sep 2024 03:28:43 -0400 Subject: [PATCH 41/71] fix(i): Sort out invalid testing framework node indexing (#3068) ## Relevant issue(s) Resolves #3065 ## Description The main bug was only visible on sourcehub acp using http, due to the identity being copied with the audience value of another node's host failing authentication (the bearer tokens should be unique using correct node's audience). The biggest issue was the way we use `getNodes` and `getNodeCollections`. I would be in favor of completely removing them as they are more troublesome than the utility they provide. - First commit documents the bug - Some utility functions were overwriting and producing the wrong node index - Forbidden bug happening on sourcehub<>http test run: https://github.com/sourcenetwork/defradb/actions/runs/10930293192/job/30342883535?pr=2907 ### Future: - Resolve import/export documentation and implementation if different (#3067) - Should likely clean this test utils up and make helper methods to avoid code duplication (https://github.com/sourcenetwork/defradb/issues/3069) - Likely should remove all usages of `getNodeCollections` (https://github.com/sourcenetwork/defradb/issues/3069) - Likely should remove all usages of `getNodes` (https://github.com/sourcenetwork/defradb/issues/3069) ## How has this been tested? - Very painfully haha, had to install Linux bare-metal to investigate the first bug (sourcehub doesn't build on wsl for me) that was only occurring on sourcehub acp using http, due to the identity being copied with the audience value of another node the way we use `getNodes` and `getNodeCollections` --- tests/integration/acp/p2p/create_test.go | 132 ++++++++++ tests/integration/acp/p2p/delete_test.go | 159 ++++++++++++ tests/integration/acp/p2p/update_test.go | 171 +++++++++++++ tests/integration/test_case.go | 6 +- tests/integration/utils.go | 304 ++++++++++++++++++----- 5 files changed, 712 insertions(+), 60 deletions(-) create mode 100644 tests/integration/acp/p2p/create_test.go create mode 100644 tests/integration/acp/p2p/delete_test.go create mode 100644 tests/integration/acp/p2p/update_test.go diff --git a/tests/integration/acp/p2p/create_test.go b/tests/integration/acp/p2p/create_test.go new file mode 100644 index 0000000000..8775a553d7 --- /dev/null +++ b/tests/integration/acp/p2p/create_test.go @@ -0,0 +1,132 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_p2p + +import ( + "fmt" + "testing" + + "github.com/sourcenetwork/immutable" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_P2PCreatePrivateDocumentsOnDifferentNodes_SourceHubACP(t *testing.T) { + expectedPolicyID := "fc56b7509c20ac8ce682b3b9b4fdaad868a9c70dda6ec16720298be64f16e9a4" + + test := testUtils.TestCase{ + + Description: "Test acp, p2p create private documents on different nodes, with source-hub", + + SupportedACPTypes: immutable.Some( + []testUtils.ACPType{ + testUtils.SourceHubACPType, + }, + ), + + Actions: []any{ + testUtils.RandomNetworkingConfig(), + + testUtils.RandomNetworkingConfig(), + + testUtils.AddPolicy{ + + Identity: immutable.Some(1), + + Policy: ` + name: Test Policy + + description: A Policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + writer + + write: + expr: owner + writer + + nothing: + expr: dummy + + relations: + owner: + types: + - actor + + reader: + types: + - actor + + writer: + types: + - actor + + admin: + manages: + - reader + types: + - actor + + dummy: + types: + - actor + `, + + ExpectedPolicyID: expectedPolicyID, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + expectedPolicyID, + ), + }, + + testUtils.CreateDoc{ + Identity: immutable.Some(1), + + NodeID: immutable.Some(0), + + CollectionID: 0, + + DocMap: map[string]any{ + "name": "Shahzad", + }, + }, + + testUtils.CreateDoc{ + Identity: immutable.Some(1), + + NodeID: immutable.Some(1), + + CollectionID: 0, + + DocMap: map[string]any{ + "name": "Shahzad Lone", + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/p2p/delete_test.go b/tests/integration/acp/p2p/delete_test.go new file mode 100644 index 0000000000..59cae4cde9 --- /dev/null +++ b/tests/integration/acp/p2p/delete_test.go @@ -0,0 +1,159 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_p2p + +import ( + "fmt" + "testing" + + "github.com/sourcenetwork/immutable" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_P2PDeletePrivateDocumentsOnDifferentNodes_SourceHubACP(t *testing.T) { + expectedPolicyID := "fc56b7509c20ac8ce682b3b9b4fdaad868a9c70dda6ec16720298be64f16e9a4" + + test := testUtils.TestCase{ + + Description: "Test acp, p2p delete private documents on different nodes, with source-hub", + + SupportedACPTypes: immutable.Some( + []testUtils.ACPType{ + testUtils.SourceHubACPType, + }, + ), + + Actions: []any{ + testUtils.RandomNetworkingConfig(), + + testUtils.RandomNetworkingConfig(), + + testUtils.AddPolicy{ + + Identity: immutable.Some(1), + + Policy: ` + name: Test Policy + + description: A Policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + writer + + write: + expr: owner + writer + + nothing: + expr: dummy + + relations: + owner: + types: + - actor + + reader: + types: + - actor + + writer: + types: + - actor + + admin: + manages: + - reader + types: + - actor + + dummy: + types: + - actor + `, + + ExpectedPolicyID: expectedPolicyID, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + expectedPolicyID, + ), + }, + + testUtils.ConfigureReplicator{ + SourceNodeID: 0, + TargetNodeID: 1, + }, + + testUtils.CreateDoc{ + Identity: immutable.Some(1), + + NodeID: immutable.Some(0), + + CollectionID: 0, + + DocMap: map[string]any{ + "name": "Shahzad", + }, + }, + + testUtils.CreateDoc{ + Identity: immutable.Some(1), + + NodeID: immutable.Some(1), + + CollectionID: 0, + + DocMap: map[string]any{ + "name": "Shahzad Lone", + }, + }, + + testUtils.WaitForSync{}, + + testUtils.DeleteDoc{ + Identity: immutable.Some(1), + + NodeID: immutable.Some(0), + + CollectionID: 0, + + DocID: 0, + }, + + testUtils.DeleteDoc{ + Identity: immutable.Some(1), + + NodeID: immutable.Some(1), + + CollectionID: 0, + + DocID: 1, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/p2p/update_test.go b/tests/integration/acp/p2p/update_test.go new file mode 100644 index 0000000000..339babee10 --- /dev/null +++ b/tests/integration/acp/p2p/update_test.go @@ -0,0 +1,171 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_p2p + +import ( + "fmt" + "testing" + + "github.com/sourcenetwork/immutable" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_P2PUpdatePrivateDocumentsOnDifferentNodes_SourceHubACP(t *testing.T) { + expectedPolicyID := "fc56b7509c20ac8ce682b3b9b4fdaad868a9c70dda6ec16720298be64f16e9a4" + + test := testUtils.TestCase{ + + Description: "Test acp, p2p update private documents on different nodes, with source-hub", + + SupportedACPTypes: immutable.Some( + []testUtils.ACPType{ + testUtils.SourceHubACPType, + }, + ), + + Actions: []any{ + testUtils.RandomNetworkingConfig(), + + testUtils.RandomNetworkingConfig(), + + testUtils.AddPolicy{ + + Identity: immutable.Some(1), + + Policy: ` + name: Test Policy + + description: A Policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + writer + + write: + expr: owner + writer + + nothing: + expr: dummy + + relations: + owner: + types: + - actor + + reader: + types: + - actor + + writer: + types: + - actor + + admin: + manages: + - reader + types: + - actor + + dummy: + types: + - actor + `, + + ExpectedPolicyID: expectedPolicyID, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + expectedPolicyID, + ), + }, + + testUtils.ConfigureReplicator{ + SourceNodeID: 0, + TargetNodeID: 1, + }, + + testUtils.CreateDoc{ + Identity: immutable.Some(1), + + NodeID: immutable.Some(0), + + CollectionID: 0, + + DocMap: map[string]any{ + "name": "Shahzad", + }, + }, + + testUtils.CreateDoc{ + Identity: immutable.Some(1), + + NodeID: immutable.Some(1), + + CollectionID: 0, + + DocMap: map[string]any{ + "name": "Shahzad Lone", + }, + }, + + testUtils.WaitForSync{}, + + testUtils.UpdateDoc{ + Identity: immutable.Some(1), + + NodeID: immutable.Some(0), + + CollectionID: 0, + + DocID: 0, + + Doc: ` + { + "name": "ShahzadLone" + } + `, + }, + + testUtils.UpdateDoc{ + Identity: immutable.Some(1), + + NodeID: immutable.Some(1), + + CollectionID: 0, + + DocID: 1, + + Doc: ` + { + "name": "ShahzadLone" + } + `, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/test_case.go b/tests/integration/test_case.go index 9b0bce913b..f102294e97 100644 --- a/tests/integration/test_case.go +++ b/tests/integration/test_case.go @@ -718,7 +718,8 @@ type ClientIntrospectionRequest struct { type BackupExport struct { // NodeID may hold the ID (index) of a node to generate the backup from. // - // If a value is not provided the indexes will be retrieved from the first nodes. + // If a value is not provided the backup export will be done for all the nodes. + // todo: https://github.com/sourcenetwork/defradb/issues/3067 NodeID immutable.Option[int] // The backup configuration. @@ -738,7 +739,8 @@ type BackupExport struct { type BackupImport struct { // NodeID may hold the ID (index) of a node to generate the backup from. // - // If a value is not provided the indexes will be retrieved from the first nodes. + // If a value is not provided the backup import will be done for all the nodes. + // todo: https://github.com/sourcenetwork/defradb/issues/3067 NodeID immutable.Option[int] // The backup file path. diff --git a/tests/integration/utils.go b/tests/integration/utils.go index 85ba2f870d..e6ab296140 100644 --- a/tests/integration/utils.go +++ b/tests/integration/utils.go @@ -572,6 +572,12 @@ func getNodes(nodeID immutable.Option[int], nodes []clients.Client) []clients.Cl // // If nodeID has a value it will return collections for that node only, otherwise all collections across all // nodes will be returned. +// +// WARNING: +// The caller must not assume the returned collections are in order of the node index if the specified +// index is greater than 0. For example if requesting collections with nodeID=2 then the resulting output +// will contain only one element (at index 0) that will be the collections of the respective node, the +// caller might accidentally assume that these collections belong to node 0. func getNodeCollections(nodeID immutable.Option[int], collections [][]client.Collection) [][]client.Collection { if !nodeID.HasValue() { return collections @@ -931,11 +937,12 @@ func getIndexes( } var expectedErrorRaised bool - actionNodes := getNodes(action.NodeID, s.nodes) - for nodeID, collections := range getNodeCollections(action.NodeID, s.collections) { - err := withRetry( - actionNodes, - nodeID, + + if action.NodeID.HasValue() { + nodeID := action.NodeID.Value() + collections := s.collections[nodeID] + err := withRetryOnNode( + s.nodes[nodeID], func() error { actualIndexes, err := collections[action.CollectionID].GetIndexes(s.ctx) if err != nil { @@ -950,6 +957,25 @@ func getIndexes( ) expectedErrorRaised = expectedErrorRaised || AssertError(s.t, s.testCase.Description, err, action.ExpectedError) + } else { + for nodeID, collections := range s.collections { + err := withRetryOnNode( + s.nodes[nodeID], + func() error { + actualIndexes, err := collections[action.CollectionID].GetIndexes(s.ctx) + if err != nil { + return err + } + + assertIndexesListsEqual(action.ExpectedIndexes, + actualIndexes, s.t, s.testCase.Description) + + return nil + }, + ) + expectedErrorRaised = expectedErrorRaised || + AssertError(s.t, s.testCase.Description, err, action.ExpectedError) + } } assertExpectedErrorRaised(s.t, s.testCase.Description, action.ExpectedError, expectedErrorRaised) @@ -1206,18 +1232,43 @@ func createDoc( var expectedErrorRaised bool var docIDs []client.DocID - actionNodes := getNodes(action.NodeID, s.nodes) - for nodeID, collections := range getNodeCollections(action.NodeID, s.collections) { - err := withRetry( - actionNodes, - nodeID, + + if action.NodeID.HasValue() { + actionNode := s.nodes[action.NodeID.Value()] + collections := s.collections[action.NodeID.Value()] + err := withRetryOnNode( + actionNode, func() error { var err error - docIDs, err = mutation(s, action, actionNodes[nodeID], nodeID, collections[action.CollectionID]) + docIDs, err = mutation( + s, + action, + actionNode, + action.NodeID.Value(), + collections[action.CollectionID], + ) return err }, ) expectedErrorRaised = AssertError(s.t, s.testCase.Description, err, action.ExpectedError) + } else { + for nodeID, collections := range s.collections { + err := withRetryOnNode( + s.nodes[nodeID], + func() error { + var err error + docIDs, err = mutation( + s, + action, + s.nodes[nodeID], + nodeID, + collections[action.CollectionID], + ) + return err + }, + ) + expectedErrorRaised = AssertError(s.t, s.testCase.Description, err, action.ExpectedError) + } } assertExpectedErrorRaised(s.t, s.testCase.Description, action.ExpectedError, expectedErrorRaised) @@ -1389,20 +1440,34 @@ func deleteDoc( docID := s.docIDs[action.CollectionID][action.DocID] var expectedErrorRaised bool - actionNodes := getNodes(action.NodeID, s.nodes) - for nodeID, collections := range getNodeCollections(action.NodeID, s.collections) { + + if action.NodeID.HasValue() { + nodeID := action.NodeID.Value() + actionNode := s.nodes[nodeID] + collections := s.collections[nodeID] identity := getIdentity(s, nodeID, action.Identity) ctx := db.SetContextIdentity(s.ctx, identity) - - err := withRetry( - actionNodes, - nodeID, + err := withRetryOnNode( + actionNode, func() error { _, err := collections[action.CollectionID].Delete(ctx, docID) return err }, ) expectedErrorRaised = AssertError(s.t, s.testCase.Description, err, action.ExpectedError) + } else { + for nodeID, collections := range s.collections { + identity := getIdentity(s, nodeID, action.Identity) + ctx := db.SetContextIdentity(s.ctx, identity) + err := withRetryOnNode( + s.nodes[nodeID], + func() error { + _, err := collections[action.CollectionID].Delete(ctx, docID) + return err + }, + ) + expectedErrorRaised = AssertError(s.t, s.testCase.Description, err, action.ExpectedError) + } } assertExpectedErrorRaised(s.t, s.testCase.Description, action.ExpectedError, expectedErrorRaised) @@ -1433,16 +1498,41 @@ func updateDoc( } var expectedErrorRaised bool - actionNodes := getNodes(action.NodeID, s.nodes) - for nodeID, collections := range getNodeCollections(action.NodeID, s.collections) { - err := withRetry( - actionNodes, - nodeID, + + if action.NodeID.HasValue() { + nodeID := action.NodeID.Value() + collections := s.collections[nodeID] + actionNode := s.nodes[nodeID] + err := withRetryOnNode( + actionNode, func() error { - return mutation(s, action, actionNodes[nodeID], nodeID, collections[action.CollectionID]) + return mutation( + s, + action, + actionNode, + nodeID, + collections[action.CollectionID], + ) }, ) expectedErrorRaised = AssertError(s.t, s.testCase.Description, err, action.ExpectedError) + } else { + for nodeID, collections := range s.collections { + actionNode := s.nodes[nodeID] + err := withRetryOnNode( + actionNode, + func() error { + return mutation( + s, + action, + actionNode, + nodeID, + collections[action.CollectionID], + ) + }, + ) + expectedErrorRaised = AssertError(s.t, s.testCase.Description, err, action.ExpectedError) + } } assertExpectedErrorRaised(s.t, s.testCase.Description, action.ExpectedError, expectedErrorRaised) @@ -1531,14 +1621,13 @@ func updateDocViaGQL( func updateWithFilter(s *state, action UpdateWithFilter) { var res *client.UpdateResult var expectedErrorRaised bool - actionNodes := getNodes(action.NodeID, s.nodes) - for nodeID, collections := range getNodeCollections(action.NodeID, s.collections) { + if action.NodeID.HasValue() { + nodeID := action.NodeID.Value() + collections := s.collections[nodeID] identity := getIdentity(s, nodeID, action.Identity) ctx := db.SetContextIdentity(s.ctx, identity) - - err := withRetry( - actionNodes, - nodeID, + err := withRetryOnNode( + s.nodes[nodeID], func() error { var err error res, err = collections[action.CollectionID].UpdateWithFilter(ctx, action.Filter, action.Updater) @@ -1546,6 +1635,20 @@ func updateWithFilter(s *state, action UpdateWithFilter) { }, ) expectedErrorRaised = AssertError(s.t, s.testCase.Description, err, action.ExpectedError) + } else { + for nodeID, collections := range s.collections { + identity := getIdentity(s, nodeID, action.Identity) + ctx := db.SetContextIdentity(s.ctx, identity) + err := withRetryOnNode( + s.nodes[nodeID], + func() error { + var err error + res, err = collections[action.CollectionID].UpdateWithFilter(ctx, action.Filter, action.Updater) + return err + }, + ) + expectedErrorRaised = AssertError(s.t, s.testCase.Description, err, action.ExpectedError) + } } assertExpectedErrorRaised(s.t, s.testCase.Description, action.ExpectedError, expectedErrorRaised) @@ -1562,11 +1665,15 @@ func createIndex( ) { if action.CollectionID >= len(s.indexes) { // Expand the slice if required, so that the index can be accessed by collection index - s.indexes = append(s.indexes, - make([][][]client.IndexDescription, action.CollectionID-len(s.indexes)+1)...) + s.indexes = append( + s.indexes, + make([][][]client.IndexDescription, action.CollectionID-len(s.indexes)+1)..., + ) } - actionNodes := getNodes(action.NodeID, s.nodes) - for nodeID, collections := range getNodeCollections(action.NodeID, s.collections) { + + if action.NodeID.HasValue() { + nodeID := action.NodeID.Value() + collections := s.collections[nodeID] indexDesc := client.IndexDescription{ Name: action.IndexName, } @@ -1584,23 +1691,64 @@ func createIndex( }) } } + indexDesc.Unique = action.Unique - err := withRetry( - actionNodes, - nodeID, + err := withRetryOnNode( + s.nodes[nodeID], func() error { desc, err := collections[action.CollectionID].CreateIndex(s.ctx, indexDesc) if err != nil { return err } - s.indexes[nodeID][action.CollectionID] = - append(s.indexes[nodeID][action.CollectionID], desc) + s.indexes[nodeID][action.CollectionID] = append( + s.indexes[nodeID][action.CollectionID], + desc, + ) return nil }, ) if AssertError(s.t, s.testCase.Description, err, action.ExpectedError) { return } + } else { + for nodeID, collections := range s.collections { + indexDesc := client.IndexDescription{ + Name: action.IndexName, + } + if action.FieldName != "" { + indexDesc.Fields = []client.IndexedFieldDescription{ + { + Name: action.FieldName, + }, + } + } else if len(action.Fields) > 0 { + for i := range action.Fields { + indexDesc.Fields = append(indexDesc.Fields, client.IndexedFieldDescription{ + Name: action.Fields[i].Name, + Descending: action.Fields[i].Descending, + }) + } + } + + indexDesc.Unique = action.Unique + err := withRetryOnNode( + s.nodes[nodeID], + func() error { + desc, err := collections[action.CollectionID].CreateIndex(s.ctx, indexDesc) + if err != nil { + return err + } + s.indexes[nodeID][action.CollectionID] = append( + s.indexes[nodeID][action.CollectionID], + desc, + ) + return nil + }, + ) + if AssertError(s.t, s.testCase.Description, err, action.ExpectedError) { + return + } + } } assertExpectedErrorRaised(s.t, s.testCase.Description, action.ExpectedError, false) @@ -1612,21 +1760,38 @@ func dropIndex( action DropIndex, ) { var expectedErrorRaised bool - actionNodes := getNodes(action.NodeID, s.nodes) - for nodeID, collections := range getNodeCollections(action.NodeID, s.collections) { + + if action.NodeID.HasValue() { + nodeID := action.NodeID.Value() + collections := s.collections[nodeID] + indexName := action.IndexName if indexName == "" { indexName = s.indexes[nodeID][action.CollectionID][action.IndexID].Name } - err := withRetry( - actionNodes, - nodeID, + err := withRetryOnNode( + s.nodes[nodeID], func() error { return collections[action.CollectionID].DropIndex(s.ctx, indexName) }, ) expectedErrorRaised = AssertError(s.t, s.testCase.Description, err, action.ExpectedError) + } else { + for nodeID, collections := range s.collections { + indexName := action.IndexName + if indexName == "" { + indexName = s.indexes[nodeID][action.CollectionID][action.IndexID].Name + } + + err := withRetryOnNode( + s.nodes[nodeID], + func() error { + return collections[action.CollectionID].DropIndex(s.ctx, indexName) + }, + ) + expectedErrorRaised = AssertError(s.t, s.testCase.Description, err, action.ExpectedError) + } } assertExpectedErrorRaised(s.t, s.testCase.Description, action.ExpectedError, expectedErrorRaised) @@ -1642,11 +1807,12 @@ func backupExport( } var expectedErrorRaised bool - actionNodes := getNodes(action.NodeID, s.nodes) - for nodeID, node := range actionNodes { - err := withRetry( - actionNodes, - nodeID, + + if action.NodeID.HasValue() { + nodeID := action.NodeID.Value() + node := s.nodes[nodeID] + err := withRetryOnNode( + node, func() error { return node.BasicExport(s.ctx, &action.Config) }, ) expectedErrorRaised = AssertError(s.t, s.testCase.Description, err, action.ExpectedError) @@ -1654,7 +1820,20 @@ func backupExport( if !expectedErrorRaised { assertBackupContent(s.t, action.ExpectedContent, action.Config.Filepath) } + } else { + for _, node := range s.nodes { + err := withRetryOnNode( + node, + func() error { return node.BasicExport(s.ctx, &action.Config) }, + ) + expectedErrorRaised = AssertError(s.t, s.testCase.Description, err, action.ExpectedError) + + if !expectedErrorRaised { + assertBackupContent(s.t, action.ExpectedContent, action.Config.Filepath) + } + } } + assertExpectedErrorRaised(s.t, s.testCase.Description, action.ExpectedError, expectedErrorRaised) } @@ -1672,31 +1851,40 @@ func backupImport( _ = os.WriteFile(action.Filepath, []byte(action.ImportContent), 0664) var expectedErrorRaised bool - actionNodes := getNodes(action.NodeID, s.nodes) - for nodeID, node := range actionNodes { - err := withRetry( - actionNodes, - nodeID, + + if action.NodeID.HasValue() { + nodeID := action.NodeID.Value() + node := s.nodes[nodeID] + err := withRetryOnNode( + node, func() error { return node.BasicImport(s.ctx, action.Filepath) }, ) expectedErrorRaised = AssertError(s.t, s.testCase.Description, err, action.ExpectedError) + } else { + for _, node := range s.nodes { + err := withRetryOnNode( + node, + func() error { return node.BasicImport(s.ctx, action.Filepath) }, + ) + expectedErrorRaised = AssertError(s.t, s.testCase.Description, err, action.ExpectedError) + } } + assertExpectedErrorRaised(s.t, s.testCase.Description, action.ExpectedError, expectedErrorRaised) } -// withRetry attempts to perform the given action, retrying up to a DB-defined +// withRetryOnNode attempts to perform the given action, retrying up to a DB-defined // maximum attempt count if a transaction conflict error is returned. // // If a P2P-sync commit for the given document is already in progress this // Save call can fail as the transaction will conflict. We dont want to worry // about this in our tests so we just retry a few times until it works (or the // retry limit is breached - important incase this is a different error) -func withRetry( - nodes []clients.Client, - nodeID int, +func withRetryOnNode( + node clients.Client, action func() error, ) error { - for i := 0; i < nodes[nodeID].MaxTxnRetries(); i++ { + for i := 0; i < node.MaxTxnRetries(); i++ { err := action() if errors.Is(err, datastore.ErrTxnConflict) { time.Sleep(100 * time.Millisecond) From cf6154319a8a8e699eb9bb54287bd95c50d378c6 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 26 Sep 2024 10:11:24 -0400 Subject: [PATCH 42/71] bot: Bump @typescript-eslint/parser from 8.6.0 to 8.7.0 in /playground (#3060) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [@typescript-eslint/parser](https://github.com/typescript-eslint/typescript-eslint/tree/HEAD/packages/parser) from 8.6.0 to 8.7.0.
Release notes

Sourced from @​typescript-eslint/parser's releases.

v8.7.0

8.7.0 (2024-09-23)

🚀 Features

  • eslint-plugin: [no-unsafe-call] check calls of Function (#10010)
  • eslint-plugin: [consistent-type-exports] check export * exports to see if all exported members are types (#10006)

🩹 Fixes

  • eslint-plugin: properly coerce all types to string in getStaticMemberAccessValue (#10004)
  • eslint-plugin: [no-deprecated] report on imported deprecated variables (#9987)
  • eslint-plugin: [no-confusing-non-null-assertion] check !in and !instanceof (#9994)
  • types: add NewExpression as a parent of SpreadElement (#10024)
  • utils: add missing entries to the RuleListener selectors list (#9992)

❤️ Thank You

You can read about our versioning strategy and releases on our website.

Changelog

Sourced from @​typescript-eslint/parser's changelog.

8.7.0 (2024-09-23)

This was a version bump only for parser to align it with other projects, there were no code changes.

You can read about our versioning strategy and releases on our website.

Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=@typescript-eslint/parser&package-manager=npm_and_yarn&previous-version=8.6.0&new-version=8.7.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Shahzad Lone --- playground/package-lock.json | 91 ++++++++++++++++++++++++++++++++---- playground/package.json | 2 +- 2 files changed, 84 insertions(+), 9 deletions(-) diff --git a/playground/package-lock.json b/playground/package-lock.json index b36597dc6f..3b27f0ff32 100644 --- a/playground/package-lock.json +++ b/playground/package-lock.json @@ -19,7 +19,7 @@ "@types/react-dom": "^18.3.0", "@types/swagger-ui-react": "^4.18.3", "@typescript-eslint/eslint-plugin": "^8.6.0", - "@typescript-eslint/parser": "^8.6.0", + "@typescript-eslint/parser": "^8.7.0", "@vitejs/plugin-react-swc": "^3.7.0", "eslint": "^9.11.1", "eslint-plugin-react-hooks": "^4.6.2", @@ -2550,15 +2550,15 @@ } }, "node_modules/@typescript-eslint/parser": { - "version": "8.6.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.6.0.tgz", - "integrity": "sha512-eQcbCuA2Vmw45iGfcyG4y6rS7BhWfz9MQuk409WD47qMM+bKCGQWXxvoOs1DUp+T7UBMTtRTVT+kXr7Sh4O9Ow==", + "version": "8.7.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.7.0.tgz", + "integrity": "sha512-lN0btVpj2unxHlNYLI//BQ7nzbMJYBVQX5+pbNXvGYazdlgYonMn4AhhHifQ+J4fGRYA/m1DjaQjx+fDetqBOQ==", "dev": true, "dependencies": { - "@typescript-eslint/scope-manager": "8.6.0", - "@typescript-eslint/types": "8.6.0", - "@typescript-eslint/typescript-estree": "8.6.0", - "@typescript-eslint/visitor-keys": "8.6.0", + "@typescript-eslint/scope-manager": "8.7.0", + "@typescript-eslint/types": "8.7.0", + "@typescript-eslint/typescript-estree": "8.7.0", + "@typescript-eslint/visitor-keys": "8.7.0", "debug": "^4.3.4" }, "engines": { @@ -2577,6 +2577,81 @@ } } }, + "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/scope-manager": { + "version": "8.7.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.7.0.tgz", + "integrity": "sha512-87rC0k3ZlDOuz82zzXRtQ7Akv3GKhHs0ti4YcbAJtaomllXoSO8hi7Ix3ccEvCd824dy9aIX+j3d2UMAfCtVpg==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "8.7.0", + "@typescript-eslint/visitor-keys": "8.7.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/types": { + "version": "8.7.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.7.0.tgz", + "integrity": "sha512-LLt4BLHFwSfASHSF2K29SZ+ZCsbQOM+LuarPjRUuHm+Qd09hSe3GCeaQbcCr+Mik+0QFRmep/FyZBO6fJ64U3w==", + "dev": true, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/typescript-estree": { + "version": "8.7.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.7.0.tgz", + "integrity": "sha512-MC8nmcGHsmfAKxwnluTQpNqceniT8SteVwd2voYlmiSWGOtjvGXdPl17dYu2797GVscK30Z04WRM28CrKS9WOg==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "8.7.0", + "@typescript-eslint/visitor-keys": "8.7.0", + "debug": "^4.3.4", + "fast-glob": "^3.3.2", + "is-glob": "^4.0.3", + "minimatch": "^9.0.4", + "semver": "^7.6.0", + "ts-api-utils": "^1.3.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/visitor-keys": { + "version": "8.7.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.7.0.tgz", + "integrity": "sha512-b1tx0orFCCh/THWPQa2ZwWzvOeyzzp36vkJYOpVg0u8UVOIsfVrnuC9FqAw9gRKn+rG2VmWQ/zDJZzkxUnj/XQ==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "8.7.0", + "eslint-visitor-keys": "^3.4.3" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, "node_modules/@typescript-eslint/scope-manager": { "version": "8.6.0", "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.6.0.tgz", diff --git a/playground/package.json b/playground/package.json index eb14b3e0aa..39a56dd3f9 100644 --- a/playground/package.json +++ b/playground/package.json @@ -21,7 +21,7 @@ "@types/react-dom": "^18.3.0", "@types/swagger-ui-react": "^4.18.3", "@typescript-eslint/eslint-plugin": "^8.6.0", - "@typescript-eslint/parser": "^8.6.0", + "@typescript-eslint/parser": "^8.7.0", "@vitejs/plugin-react-swc": "^3.7.0", "eslint": "^9.11.1", "eslint-plugin-react-hooks": "^4.6.2", From 54c99e7c057857f832b72a17c2ec7adcb9f71f0e Mon Sep 17 00:00:00 2001 From: AndrewSisley Date: Thu, 26 Sep 2024 13:21:46 -0400 Subject: [PATCH 43/71] fix(i): Validate nearby relation fields in SchemaPatch (#3077) ## Relevant issue(s) Resolves #3074 ## Description Correctly handles validation of nearby relation fields in schema patch. Previously the equality check failed to account for `Kind` being a pointer and thus always flagged relation fields as having mutated. This was likely introduced in https://github.com/sourcenetwork/defradb/pull/2961 when `Kind` became a pointer. Collections referenced by relation fields were also not included in the validation set, causing the rules to think that the related object did not exist. --- internal/db/definition_validation.go | 4 +- internal/db/schema.go | 25 +++++++--- .../updates/add/field/with_relation_test.go | 46 +++++++++++++++++++ 3 files changed, 68 insertions(+), 7 deletions(-) create mode 100644 tests/integration/schema/updates/add/field/with_relation_test.go diff --git a/internal/db/definition_validation.go b/internal/db/definition_validation.go index 47c222bf9c..2d178f1e07 100644 --- a/internal/db/definition_validation.go +++ b/internal/db/definition_validation.go @@ -818,7 +818,9 @@ func validateFieldNotMutated( for _, newField := range newSchema.Fields { oldField, exists := oldFieldsByName[newField.Name] - if exists && oldField != newField { + + // DeepEqual is temporary, as this validation is temporary + if exists && !reflect.DeepEqual(oldField, newField) { return NewErrCannotMutateField(newField.Name) } } diff --git a/internal/db/schema.go b/internal/db/schema.go index d9b9a4055c..f1906feb54 100644 --- a/internal/db/schema.go +++ b/internal/db/schema.go @@ -371,6 +371,11 @@ func (db *db) updateSchema( proposedDescriptionsByName[schema.Name] = schema } + allExistingCols, err := db.getCollections(ctx, client.CollectionFetchOptions{}) + if err != nil { + return err + } + for _, schema := range proposedDescriptionsByName { previousSchema := existingSchemaByName[schema.Name] @@ -488,17 +493,25 @@ func (db *db) updateSchema( return err } - allExistingCols, err := db.getCollections(ctx, client.CollectionFetchOptions{}) - if err != nil { - return err - } - oldDefs := make([]client.CollectionDefinition, 0, len(allExistingCols)) for _, col := range allExistingCols { oldDefs = append(oldDefs, col.Definition()) } - err = db.validateSchemaUpdate(ctx, oldDefs, definitions) + defNames := make(map[string]struct{}, len(definitions)) + for _, def := range definitions { + defNames[def.GetName()] = struct{}{} + } + + newDefs := make([]client.CollectionDefinition, 0, len(definitions)) + newDefs = append(newDefs, definitions...) + for _, existing := range allExistingCols { + if _, ok := defNames[existing.Definition().GetName()]; !ok { + newDefs = append(newDefs, existing.Definition()) + } + } + + err = db.validateSchemaUpdate(ctx, oldDefs, newDefs) if err != nil { return err } diff --git a/tests/integration/schema/updates/add/field/with_relation_test.go b/tests/integration/schema/updates/add/field/with_relation_test.go new file mode 100644 index 0000000000..9adb5659ac --- /dev/null +++ b/tests/integration/schema/updates/add/field/with_relation_test.go @@ -0,0 +1,46 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package field + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +// This test ensures that nearby relation fields are not failing validation during a schema patch. +func TestSchemaUpdatesAddField_DoesNotAffectExistingRelation(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Book { + name: String + author: Author + } + + type Author { + name: String + books: [Book] + } + `, + }, + testUtils.SchemaPatch{ + Patch: ` + [ + { "op": "add", "path": "/Book/Fields/-", "value": {"Name": "rating", "Kind": 4} } + ] + `, + }, + }, + } + testUtils.ExecuteTestCase(t, test) +} From 88972df0fdd9dcd4872bab6d96a01cba67e2bdf3 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Fri, 27 Sep 2024 13:01:12 -0700 Subject: [PATCH 44/71] feat: GraphQL upsert mutation (#3075) ## Relevant issue(s) Resolves #783 ## Description This PR adds a new upsert GraphQL mutation operation. ## Tasks - [x] I made sure the code is well commented, particularly hard-to-understand areas. - [x] I made sure the repository-held documentation is changed accordingly. - [x] I made sure the pull request title adheres to the conventional commit style (the subset used in the project can be found in [tools/configs/chglog/config.yml](tools/configs/chglog/config.yml)). - [x] I made sure to discuss its limitations such as threats to validity, vulnerability to mistake and misuse, robustness to invalidation of assumptions, resource requirements, ... ## How has this been tested? Added integration tests. Specify the platform(s) on which this was tested: - MacOS --- client/request/consts.go | 2 + client/request/mutation.go | 11 +- internal/planner/create.go | 6 +- internal/planner/errors.go | 1 + internal/planner/explain.go | 3 + internal/planner/mapper/mapper.go | 3 +- internal/planner/mapper/mutation.go | 8 +- internal/planner/operations.go | 1 + internal/planner/planner.go | 6 + internal/planner/update.go | 6 +- internal/planner/upsert.go | 174 +++++++++ internal/request/graphql/parser/mutation.go | 161 ++++++--- .../request/graphql/schema/descriptions.go | 12 + internal/request/graphql/schema/generate.go | 13 +- tests/integration/explain.go | 1 + .../integration/explain/debug/upsert_test.go | 64 ++++ .../explain/default/upsert_test.go | 104 ++++++ .../explain/execute/upsert_test.go | 125 +++++++ .../mutation/upsert/simple_test.go | 330 ++++++++++++++++++ 19 files changed, 962 insertions(+), 69 deletions(-) create mode 100644 internal/planner/upsert.go create mode 100644 tests/integration/explain/debug/upsert_test.go create mode 100644 tests/integration/explain/default/upsert_test.go create mode 100644 tests/integration/explain/execute/upsert_test.go create mode 100644 tests/integration/mutation/upsert/simple_test.go diff --git a/client/request/consts.go b/client/request/consts.go index 157cab8b5f..0e27eaeb3d 100644 --- a/client/request/consts.go +++ b/client/request/consts.go @@ -21,6 +21,8 @@ const ( Cid = "cid" Input = "input" + CreateInput = "create" + UpdateInput = "update" FieldName = "field" FieldIDName = "fieldId" ShowDeleted = "showDeleted" diff --git a/client/request/mutation.go b/client/request/mutation.go index 146a7ac8b5..c91b5868b6 100644 --- a/client/request/mutation.go +++ b/client/request/mutation.go @@ -17,6 +17,7 @@ const ( CreateObjects UpdateObjects DeleteObjects + UpsertObjects ) // ObjectMutation is a field on the `mutation` operation of a graphql request. It includes @@ -36,11 +37,11 @@ type ObjectMutation struct { // Collection is the target collection name. Collection string - // Input is the array of json representations of the fieldName-value pairs of document - // properties to mutate. - // - // This is ignored for [DeleteObjects] mutations. - Input []map[string]any + // CreateInput is the array of maps of fields and values used for a create mutation. + CreateInput []map[string]any + + // UpdateInput is a map of fields and values used for an update mutation. + UpdateInput map[string]any // Encrypt is a boolean flag that indicates whether the input data should be encrypted. Encrypt bool diff --git a/internal/planner/create.go b/internal/planner/create.go index 0c36658a14..18365f966d 100644 --- a/internal/planner/create.go +++ b/internal/planner/create.go @@ -65,7 +65,7 @@ func docIDsToSpans(ids []string, desc client.CollectionDescription) core.Spans { return core.NewSpans(spans...) } -func documentsToDocIDs(docs []*client.Document) []string { +func documentsToDocIDs(docs ...*client.Document) []string { docIDs := make([]string, len(docs)) for i, doc := range docs { docIDs[i] = doc.ID().String() @@ -96,7 +96,7 @@ func (n *createNode) Next() (bool, error) { return false, err } - n.results.Spans(docIDsToSpans(documentsToDocIDs(n.docs), n.collection.Description())) + n.results.Spans(docIDsToSpans(documentsToDocIDs(n.docs...), n.collection.Description())) err = n.results.Init() if err != nil { @@ -151,7 +151,7 @@ func (p *Planner) CreateDocs(parsed *mapper.Mutation) (planNode, error) { // create a mutation createNode. create := &createNode{ p: p, - input: parsed.Input, + input: parsed.CreateInput, results: results, docMapper: docMapper{parsed.DocumentMapping}, } diff --git a/internal/planner/errors.go b/internal/planner/errors.go index 54db7a7c79..295196d4d5 100644 --- a/internal/planner/errors.go +++ b/internal/planner/errors.go @@ -34,6 +34,7 @@ var ( ErrMissingChildValue = errors.New("expected child value, however none was yielded") ErrUnknownRelationType = errors.New("failed sub selection, unknown relation type") ErrUnknownExplainRequestType = errors.New("can not explain request of unknown type") + ErrUpsertMultipleDocuments = errors.New("cannot upsert multiple matching documents") ) func NewErrUnknownDependency(name string) error { diff --git a/internal/planner/explain.go b/internal/planner/explain.go index 1f17968489..76679a85e3 100644 --- a/internal/planner/explain.go +++ b/internal/planner/explain.go @@ -47,6 +47,7 @@ var ( _ explainablePlanNode = (*topLevelNode)(nil) _ explainablePlanNode = (*typeIndexJoin)(nil) _ explainablePlanNode = (*updateNode)(nil) + _ explainablePlanNode = (*upsertNode)(nil) ) const ( @@ -54,6 +55,8 @@ const ( collectionIDLabel = "collectionID" collectionNameLabel = "collectionName" inputLabel = "input" + createInputLabel = "create" + updateInputLabel = "update" fieldNameLabel = "fieldName" filterLabel = "filter" joinRootLabel = "root" diff --git a/internal/planner/mapper/mapper.go b/internal/planner/mapper/mapper.go index 68a7924806..8aeab3c22e 100644 --- a/internal/planner/mapper/mapper.go +++ b/internal/planner/mapper/mapper.go @@ -1245,7 +1245,8 @@ func toMutation( return &Mutation{ Select: *underlyingSelect, Type: MutationType(mutationRequest.Type), - Input: mutationRequest.Input, + CreateInput: mutationRequest.CreateInput, + UpdateInput: mutationRequest.UpdateInput, Encrypt: mutationRequest.Encrypt, EncryptFields: mutationRequest.EncryptFields, }, nil diff --git a/internal/planner/mapper/mutation.go b/internal/planner/mapper/mutation.go index 6c4ab4c56f..4644efb2ab 100644 --- a/internal/planner/mapper/mutation.go +++ b/internal/planner/mapper/mutation.go @@ -17,6 +17,7 @@ const ( CreateObjects UpdateObjects DeleteObjects + UpsertObjects ) // Mutation represents a request to mutate data stored in Defra. @@ -27,8 +28,11 @@ type Mutation struct { // The type of mutation. For example a create request. Type MutationType - // Input is the array of maps of fields and values used for the mutation. - Input []map[string]any + // CreateInput is the array of maps of fields and values used for a create mutation. + CreateInput []map[string]any + + // UpdateInput is a map of fields and values used for an update mutation. + UpdateInput map[string]any // Encrypt is a flag to indicate if the input data should be encrypted. Encrypt bool diff --git a/internal/planner/operations.go b/internal/planner/operations.go index e08ebae5c3..6cbf7c24d4 100644 --- a/internal/planner/operations.go +++ b/internal/planner/operations.go @@ -31,6 +31,7 @@ var ( _ planNode = (*typeJoinMany)(nil) _ planNode = (*typeJoinOne)(nil) _ planNode = (*updateNode)(nil) + _ planNode = (*upsertNode)(nil) _ planNode = (*valuesNode)(nil) _ planNode = (*viewNode)(nil) _ planNode = (*lensNode)(nil) diff --git a/internal/planner/planner.go b/internal/planner/planner.go index 4423183d75..fb5ce5812a 100644 --- a/internal/planner/planner.go +++ b/internal/planner/planner.go @@ -121,6 +121,9 @@ func (p *Planner) newObjectMutationPlan(stmt *mapper.Mutation) (planNode, error) case mapper.DeleteObjects: return p.DeleteDocs(stmt) + case mapper.UpsertObjects: + return p.UpsertDocs(stmt) + default: return nil, client.NewErrUnhandledType("mutation", stmt.Type) } @@ -184,6 +187,9 @@ func (p *Planner) expandPlan(planNode planNode, parentPlan *selectTopNode) error case *deleteNode: return p.expandPlan(n.source, parentPlan) + case *upsertNode: + return p.expandPlan(n.source, parentPlan) + case *viewNode: return p.expandPlan(n.source, parentPlan) diff --git a/internal/planner/update.go b/internal/planner/update.go index 2f282af292..e707065022 100644 --- a/internal/planner/update.go +++ b/internal/planner/update.go @@ -163,15 +163,11 @@ func (p *Planner) UpdateDocs(parsed *mapper.Mutation) (planNode, error) { p: p, filter: parsed.Filter, docIDs: parsed.DocIDs.Value(), + input: parsed.UpdateInput, isUpdating: true, docMapper: docMapper{parsed.DocumentMapping}, } - // update mutation only supports a single input - if len(parsed.Input) > 0 { - update.input = parsed.Input[0] - } - // get collection col, err := p.db.GetCollectionByName(p.ctx, parsed.Name) if err != nil { diff --git a/internal/planner/upsert.go b/internal/planner/upsert.go new file mode 100644 index 0000000000..4f12395284 --- /dev/null +++ b/internal/planner/upsert.go @@ -0,0 +1,174 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package planner + +import ( + "github.com/sourcenetwork/defradb/client" + "github.com/sourcenetwork/defradb/client/request" + "github.com/sourcenetwork/defradb/internal/core" + "github.com/sourcenetwork/defradb/internal/planner/mapper" +) + +type upsertNode struct { + documentIterator + docMapper + + p *Planner + collection client.Collection + filter *mapper.Filter + createInput map[string]any + updateInput map[string]any + isInitialized bool + source planNode +} + +// Next only returns once. +func (n *upsertNode) Next() (bool, error) { + if !n.isInitialized { + next, err := n.source.Next() + if err != nil { + return false, err + } + if next { + n.currentValue = n.source.Value() + // make sure multiple documents do not match + next, err := n.source.Next() + if err != nil { + return false, err + } + if next { + return false, ErrUpsertMultipleDocuments + } + docID, err := client.NewDocIDFromString(n.currentValue.GetID()) + if err != nil { + return false, err + } + doc, err := n.collection.Get(n.p.ctx, docID, false) + if err != nil { + return false, err + } + for k, v := range n.updateInput { + if err := doc.Set(k, v); err != nil { + return false, err + } + } + err = n.collection.Update(n.p.ctx, doc) + if err != nil { + return false, err + } + } else { + doc, err := client.NewDocFromMap(n.createInput, n.collection.Definition()) + if err != nil { + return false, err + } + err = n.collection.Create(n.p.ctx, doc) + if err != nil { + return false, err + } + n.source.Spans(docIDsToSpans(documentsToDocIDs(doc), n.collection.Description())) + } + err = n.source.Init() + if err != nil { + return false, err + } + n.isInitialized = true + } + next, err := n.source.Next() + if err != nil { + return false, err + } + if !next { + return false, nil + } + n.currentValue = n.source.Value() + return true, nil +} + +func (n *upsertNode) Kind() string { + return "upsertNode" +} + +func (n *upsertNode) Spans(spans core.Spans) { + n.source.Spans(spans) +} + +func (n *upsertNode) Init() error { + return n.source.Init() +} + +func (n *upsertNode) Start() error { + return n.source.Start() +} + +func (n *upsertNode) Close() error { + return n.source.Close() +} + +func (n *upsertNode) Source() planNode { + return n.source +} + +func (n *upsertNode) simpleExplain() (map[string]any, error) { + simpleExplainMap := map[string]any{} + + // Add the filter attribute + simpleExplainMap[filterLabel] = n.filter.ToMap(n.documentMapping) + + // Add the attribute that represents the values to create or update. + simpleExplainMap[updateInputLabel] = n.updateInput + simpleExplainMap[createInputLabel] = n.createInput + + return simpleExplainMap, nil +} + +// Explain method returns a map containing all attributes of this node that +// are to be explained, subscribes / opts-in this node to be an explainablePlanNode. +func (n *upsertNode) Explain(explainType request.ExplainType) (map[string]any, error) { + switch explainType { + case request.SimpleExplain: + return n.simpleExplain() + + case request.ExecuteExplain: + return map[string]any{}, nil + + default: + return nil, ErrUnknownExplainRequestType + } +} + +func (p *Planner) UpsertDocs(parsed *mapper.Mutation) (planNode, error) { + upsert := &upsertNode{ + p: p, + filter: parsed.Filter, + updateInput: parsed.UpdateInput, + docMapper: docMapper{parsed.DocumentMapping}, + } + + if len(parsed.CreateInput) > 0 { + upsert.createInput = parsed.CreateInput[0] + } + + // get collection + col, err := p.db.GetCollectionByName(p.ctx, parsed.Name) + if err != nil { + return nil, err + } + upsert.collection = col + + // create the results Select node + resultsNode, err := p.Select(&parsed.Select) + if err != nil { + return nil, err + } + upsert.source = resultsNode + + return upsert, nil +} diff --git a/internal/request/graphql/parser/mutation.go b/internal/request/graphql/parser/mutation.go index 71942f1223..a5b0a62ea8 100644 --- a/internal/request/graphql/parser/mutation.go +++ b/internal/request/graphql/parser/mutation.go @@ -20,14 +20,6 @@ import ( "github.com/sourcenetwork/defradb/client/request" ) -var ( - mutationNameToType = map[string]request.MutationType{ - "create": request.CreateObjects, - "update": request.UpdateObjects, - "delete": request.DeleteObjects, - } -) - // parseMutationOperationDefinition parses the individual GraphQL // 'mutation' operations, which there may be multiple of. func parseMutationOperationDefinition( @@ -64,9 +56,6 @@ func parseMutation(exe *gql.ExecutionContext, parent *gql.Object, field *ast.Fie }, } - fieldDef := gql.GetFieldDef(exe.Schema, parent, mut.Name) - arguments := gql.GetArgumentValues(fieldDef.Args, field.Arguments, exe.VariableValues) - // parse the mutation type // mutation names are either generated from a type // which means they are in the form name_type, where @@ -77,11 +66,6 @@ func parseMutation(exe *gql.ExecutionContext, parent *gql.Object, field *ast.Fie // get back one of our defined types. mutNameParts := strings.Split(mut.Name, "_") typeStr := mutNameParts[0] - var ok bool - mut.Type, ok = mutationNameToType[typeStr] - if !ok { - return nil, ErrUnknownMutationName - } if len(mutNameParts) > 1 { // only generated object mutations // reconstruct the name. @@ -92,41 +76,61 @@ func parseMutation(exe *gql.ExecutionContext, parent *gql.Object, field *ast.Fie mut.Collection = strings.Join(mutNameParts[1:], "_") } - for _, argument := range field.Arguments { - name := argument.Name.Value - value := arguments[name] + fieldDef := gql.GetFieldDef(exe.Schema, parent, mut.Name) + arguments := gql.GetArgumentValues(fieldDef.Args, field.Arguments, exe.VariableValues) - switch name { - case request.Input: - switch v := value.(type) { - case []any: - // input for create is a list - inputs := make([]map[string]any, len(v)) - for i, v := range v { - inputs[i] = v.(map[string]any) - } - mut.Input = inputs + switch typeStr { + case "create": + mut.Type = request.CreateObjects + parseCreateMutationArgs(mut, arguments) - case map[string]any: - // input for update is an object - mut.Input = []map[string]any{v} - } + case "update": + mut.Type = request.UpdateObjects + parseUpdateMutationArgs(mut, arguments) - case request.FilterClause: - if v, ok := value.(map[string]any); ok { - mut.Filter = immutable.Some(request.Filter{Conditions: v}) - } + case "delete": + mut.Type = request.DeleteObjects + parseDeleteMutationArgs(mut, arguments) - case request.DocIDArgName: + case "upsert": + mut.Type = request.UpsertObjects + parseUpsertMutationArgs(mut, arguments) + + default: + return nil, ErrUnknownMutationName + } + + // if theres no field selections, just return + if field.SelectionSet == nil { + return mut, nil + } + + fieldObject, err := typeFromFieldDef(fieldDef) + if err != nil { + return nil, err + } + + mut.Fields, err = parseSelectFields(exe, fieldObject, field.SelectionSet) + if err != nil { + return nil, err + } + + return mut, err +} + +func parseCreateMutationArgs(mut *request.ObjectMutation, args map[string]any) { + for name, value := range args { + switch name { + case request.Input: v, ok := value.([]any) if !ok { continue // value is nil } - docIDs := make([]string, len(v)) + inputs := make([]map[string]any, len(v)) for i, v := range v { - docIDs[i] = v.(string) + inputs[i] = v.(map[string]any) } - mut.DocIDs = immutable.Some(docIDs) + mut.CreateInput = inputs case request.EncryptDocArgName: if v, ok := value.(bool); ok { @@ -145,21 +149,74 @@ func parseMutation(exe *gql.ExecutionContext, parent *gql.Object, field *ast.Fie mut.EncryptFields = fields } } +} - // if theres no field selections, just return - if field.SelectionSet == nil { - return mut, nil - } +func parseDeleteMutationArgs(mut *request.ObjectMutation, args map[string]any) { + for name, value := range args { + switch name { + case request.DocIDArgName: + v, ok := value.([]any) + if !ok { + continue // value is nil + } + docIDs := make([]string, len(v)) + for i, v := range v { + docIDs[i] = v.(string) + } + mut.DocIDs = immutable.Some(docIDs) - fieldObject, err := typeFromFieldDef(fieldDef) - if err != nil { - return nil, err + case request.FilterClause: + if v, ok := value.(map[string]any); ok { + mut.Filter = immutable.Some(request.Filter{Conditions: v}) + } + } } +} - mut.Fields, err = parseSelectFields(exe, fieldObject, field.SelectionSet) - if err != nil { - return nil, err +func parseUpdateMutationArgs(mut *request.ObjectMutation, args map[string]any) { + for name, value := range args { + switch name { + case request.Input: + if v, ok := value.(map[string]any); ok { + mut.UpdateInput = v + } + + case request.DocIDArgName: + v, ok := value.([]any) + if !ok { + continue // value is nil + } + docIDs := make([]string, len(v)) + for i, v := range v { + docIDs[i] = v.(string) + } + mut.DocIDs = immutable.Some(docIDs) + + case request.FilterClause: + if v, ok := value.(map[string]any); ok { + mut.Filter = immutable.Some(request.Filter{Conditions: v}) + } + } } +} - return mut, err +func parseUpsertMutationArgs(mut *request.ObjectMutation, args map[string]any) { + for name, value := range args { + switch name { + case request.CreateInput: + if v, ok := value.(map[string]any); ok { + mut.CreateInput = []map[string]any{v} + } + + case request.UpdateInput: + if v, ok := value.(map[string]any); ok { + mut.UpdateInput = v + } + + case request.FilterClause: + if v, ok := value.(map[string]any); ok { + mut.Filter = immutable.Some(request.Filter{Conditions: v}) + } + } + } } diff --git a/internal/request/graphql/schema/descriptions.go b/internal/request/graphql/schema/descriptions.go index 9fe841c466..b667410c2c 100644 --- a/internal/request/graphql/schema/descriptions.go +++ b/internal/request/graphql/schema/descriptions.go @@ -104,6 +104,13 @@ An optional value that specifies as to whether deleted documents may be createDocumentDescription string = ` Creates one or more documents of this type using the data provided. ` + upsertDocumentDescription string = ` +Update or create a document in this collection using the data provided. The provided filter + must match at most one document. The matching document will be updated with the provided + update input, or if no matching document is found, a new document will be created with the + provided create input. + +NOTE: It is highly recommended to create an index on the fields used to filter.` updateDocumentsDescription string = ` Updates documents in this collection using the data provided. Only documents matching any provided criteria will be updated, if no criteria are provided @@ -123,6 +130,11 @@ An optional set of docID values that will limit the update to documents An optional filter for this update that will limit the update to the documents matching the given criteria. If no matching documents are found, the operation will succeed, but no documents will be updated. +` + upsertFilterArgDescription string = ` +A required filter for this upsert that must match one or zero documents. + If a matching document is found it will be updated, otherwise a new + document will be created. ` deleteDocumentsDescription string = ` Deletes documents in this collection matching any provided criteria. If no diff --git a/internal/request/graphql/schema/generate.go b/internal/request/graphql/schema/generate.go index b0f0163f06..c2850f79ce 100644 --- a/internal/request/graphql/schema/generate.go +++ b/internal/request/graphql/schema/generate.go @@ -1082,7 +1082,18 @@ func (g *Generator) GenerateMutationInputForGQLType(obj *gql.Object) ([]*gql.Fie }, } - return []*gql.Field{create, update, delete}, nil + upsert := &gql.Field{ + Name: "upsert_" + obj.Name(), + Description: upsertDocumentDescription, + Type: gql.NewList(obj), + Args: gql.FieldConfigArgument{ + request.FilterClause: schemaTypes.NewArgConfig(gql.NewNonNull(filterInput), upsertFilterArgDescription), + request.CreateInput: schemaTypes.NewArgConfig(gql.NewNonNull(mutationInput), "Create field values"), + request.UpdateInput: schemaTypes.NewArgConfig(gql.NewNonNull(mutationInput), "Update field values"), + }, + } + + return []*gql.Field{create, update, delete, upsert}, nil } func (g *Generator) genTypeFieldsEnum(obj *gql.Object) *gql.Enum { diff --git a/tests/integration/explain.go b/tests/integration/explain.go index 7bc7f9074a..325e69b3f7 100644 --- a/tests/integration/explain.go +++ b/tests/integration/explain.go @@ -54,6 +54,7 @@ var ( "typeJoinMany": {}, "typeJoinOne": {}, "updateNode": {}, + "upsertNode": {}, "valuesNode": {}, "viewNode": {}, "lensNode": {}, diff --git a/tests/integration/explain/debug/upsert_test.go b/tests/integration/explain/debug/upsert_test.go new file mode 100644 index 0000000000..5c14b2328a --- /dev/null +++ b/tests/integration/explain/debug/upsert_test.go @@ -0,0 +1,64 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_explain_debug + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" + explainUtils "github.com/sourcenetwork/defradb/tests/integration/explain" +) + +var upsertPattern = dataMap{ + "explain": dataMap{ + "operationNode": []dataMap{ + { + "upsertNode": dataMap{ + "selectTopNode": dataMap{ + "selectNode": dataMap{ + "scanNode": dataMap{}, + }, + }, + }, + }, + }, + }, +} + +func TestDebugExplainMutationRequest_WithUpsert_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Explain (debug) mutation request with upsert.", + + Actions: []any{ + explainUtils.SchemaForExplainTests, + + testUtils.ExplainRequest{ + + Request: `mutation @explain(type: debug) { + upsert_Author( + filter: {name: {_eq: "Bob"}}, + update: {age: 59}, + create: {name: "Bob", age: 59} + ) { + _docID + name + age + } + }`, + + ExpectedPatterns: upsertPattern, + }, + }, + } + + explainUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/explain/default/upsert_test.go b/tests/integration/explain/default/upsert_test.go new file mode 100644 index 0000000000..7cc38294e8 --- /dev/null +++ b/tests/integration/explain/default/upsert_test.go @@ -0,0 +1,104 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_explain_default + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" + explainUtils "github.com/sourcenetwork/defradb/tests/integration/explain" +) + +var upsertPattern = dataMap{ + "explain": dataMap{ + "operationNode": []dataMap{ + { + "upsertNode": dataMap{ + "selectTopNode": dataMap{ + "selectNode": dataMap{ + "scanNode": dataMap{}, + }, + }, + }, + }, + }, + }, +} + +func TestDefaultExplainMutationRequest_WithUpsert_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Explain (default) mutation request with upsert.", + + Actions: []any{ + explainUtils.SchemaForExplainTests, + + testUtils.ExplainRequest{ + + Request: `mutation @explain { + upsert_Author( + filter: {name: {_eq: "Bob"}}, + create: {name: "Bob", age: 59}, + update: {age: 59} + ) { + _docID + name + age + } + }`, + + ExpectedPatterns: upsertPattern, + + ExpectedTargets: []testUtils.PlanNodeTargetCase{ + { + TargetNodeName: "upsertNode", + IncludeChildNodes: false, + ExpectedAttributes: dataMap{ + "create": dataMap{ + "name": "Bob", + "age": int32(59), + }, + "update": dataMap{ + "age": int32(59), + }, + "filter": dataMap{ + "name": dataMap{ + "_eq": "Bob", + }, + }, + }, + }, + { + TargetNodeName: "scanNode", + IncludeChildNodes: true, // should be last node, so will have no child nodes. + ExpectedAttributes: dataMap{ + "collectionID": "3", + "collectionName": "Author", + "filter": dataMap{ + "name": dataMap{ + "_eq": "Bob", + }, + }, + "spans": []dataMap{ + { + "end": "/4", + "start": "/3", + }, + }, + }, + }, + }, + }, + }, + } + + explainUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/explain/execute/upsert_test.go b/tests/integration/explain/execute/upsert_test.go new file mode 100644 index 0000000000..65b2abeed1 --- /dev/null +++ b/tests/integration/explain/execute/upsert_test.go @@ -0,0 +1,125 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_explain_execute + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" + explainUtils "github.com/sourcenetwork/defradb/tests/integration/explain" +) + +func TestExecuteExplainMutationRequest_WithUpsertAndMatchingFilter_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Explain (execute) mutation request with upsert and matching filter.", + + Actions: []any{ + explainUtils.SchemaForExplainTests, + + // Addresses + create2AddressDocuments(), + + testUtils.ExplainRequest{ + Request: `mutation @explain(type: execute) { + upsert_ContactAddress( + filter: {city: {_eq: "Waterloo"}}, + create: {city: "Waterloo", country: "USA"}, + update: {country: "USA"} + ) { + country + city + } + }`, + + ExpectedFullGraph: dataMap{ + "explain": dataMap{ + "executionSuccess": true, + "sizeOfResult": 1, + "planExecutions": uint64(2), + "operationNode": []dataMap{ + { + "upsertNode": dataMap{ + "selectTopNode": dataMap{ + "selectNode": dataMap{ + "iterations": uint64(4), + "filterMatches": uint64(2), + "scanNode": dataMap{ + "iterations": uint64(4), + "docFetches": uint64(4), + "fieldFetches": uint64(6), + "indexFetches": uint64(0), + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + } + + explainUtils.ExecuteTestCase(t, test) +} + +func TestExecuteExplainMutationRequest_WithUpsertAndNoMatchingFilter_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Explain (execute) mutation request with upsert and no matching filter.", + + Actions: []any{ + explainUtils.SchemaForExplainTests, + + testUtils.ExplainRequest{ + Request: `mutation @explain(type: execute) { + upsert_ContactAddress( + filter: {city: {_eq: "Waterloo"}}, + create: {city: "Waterloo", country: "USA"}, + update: {country: "USA"} + ) { + country + city + } + }`, + + ExpectedFullGraph: dataMap{ + "explain": dataMap{ + "executionSuccess": true, + "sizeOfResult": 1, + "planExecutions": uint64(2), + "operationNode": []dataMap{ + { + "upsertNode": dataMap{ + "selectTopNode": dataMap{ + "selectNode": dataMap{ + "iterations": uint64(3), + "filterMatches": uint64(1), + "scanNode": dataMap{ + "iterations": uint64(3), + "docFetches": uint64(1), + "fieldFetches": uint64(2), + "indexFetches": uint64(0), + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + } + + explainUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/mutation/upsert/simple_test.go b/tests/integration/mutation/upsert/simple_test.go new file mode 100644 index 0000000000..ba2ac7fe58 --- /dev/null +++ b/tests/integration/mutation/upsert/simple_test.go @@ -0,0 +1,330 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package upsert + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestMutationUpsertSimple_WithNoFilterMatch_CreatesNewDoc(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple upsert mutation with no filter match", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + age: Int + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Alice", + "age": 40 + }`, + }, + testUtils.Request{ + Request: `mutation { + upsert_Users( + filter: {name: {_eq: "Bob"}}, + create: {name: "Bob", age: 40}, + update: {age: 40} + ) { + name + age + } + }`, + Results: map[string]any{ + "upsert_Users": []map[string]any{ + { + "name": "Bob", + "age": int64(40), + }, + }, + }, + }, + testUtils.Request{ + Request: `query { + Users { + name + age + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "Bob", + "age": int64(40), + }, + { + "name": "Alice", + "age": int64(40), + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestMutationUpsertSimple_WithFilterMatch_UpdatesDoc(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple upsert mutation with filter match", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + age: Int + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Alice", + "age": 40 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Bob", + "age": 30 + }`, + }, + testUtils.Request{ + Request: `mutation { + upsert_Users( + filter: {name: {_eq: "Bob"}}, + create: {name: "Bob", age: 40}, + update: {age: 40} + ) { + name + age + } + }`, + Results: map[string]any{ + "upsert_Users": []map[string]any{ + { + "name": "Bob", + "age": int64(40), + }, + }, + }, + }, + testUtils.Request{ + Request: `query { + Users { + name + age + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "Alice", + "age": int64(40), + }, + { + "name": "Bob", + "age": int64(40), + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestMutationUpsertSimple_WithFilterMatchMultiple_ReturnsError(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple upsert mutation with multiple filter matches", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + age: Int + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Bob", + "age": 30 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Alice", + "age": 40 + }`, + }, + testUtils.Request{ + Request: `mutation { + upsert_Users( + filter: {}, + create: {name: "Alice", age: 40}, + update: {age: 50} + ) { + name + age + } + }`, + ExpectedError: `cannot upsert multiple matching documents`, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestMutationUpsertSimple_WithNullCreateInput_ReturnsError(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple upsert mutation with null create input", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + age: Int + } + `, + }, + testUtils.Request{ + Request: `mutation { + upsert_Users( + filter: {}, + create: null, + update: {age: 50} + ) { + name + age + } + }`, + ExpectedError: `Argument "create" has invalid value `, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestMutationUpsertSimple_WithNullUpdateInput_ReturnsError(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple upsert mutation with null update input", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + age: Int + } + `, + }, + testUtils.Request{ + Request: `mutation { + upsert_Users( + filter: {}, + create: {name: "Alice", age: 40}, + update: null, + ) { + name + age + } + }`, + ExpectedError: `Argument "update" has invalid value `, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestMutationUpsertSimple_WithNullFilterInput_ReturnsError(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple upsert mutation with null filter input", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + age: Int + } + `, + }, + testUtils.Request{ + Request: `mutation { + upsert_Users( + filter: null, + create: {name: "Alice", age: 40}, + update: {age: 50} + ) { + name + age + } + }`, + ExpectedError: `Argument "filter" has invalid value `, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestMutationUpsertSimple_WithUniqueCompositeIndexAndDuplicateUpdate_ReturnsError(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple upsert mutation with unique composite index and update", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users @index(includes: [{name: "name"}, {name: "age"}], unique: true) { + name: String + age: Int + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Alice", + "age": 40 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Bob", + "age": 50 + }`, + }, + testUtils.Request{ + Request: `mutation { + upsert_Users( + filter: {name: {_eq: "Bob"}}, + create: {name: "Alice", age: 40}, + update: {name: "Alice", age: 40} + ) { + name + age + } + }`, + ExpectedError: `can not index a doc's field(s) that violates unique index`, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} From adf0c11d930ed1cecf935a62841f66575d99abca Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Mon, 30 Sep 2024 11:14:51 -0400 Subject: [PATCH 45/71] bot: Update dependencies (bulk dependabot PRs) 30-09-2024 (#3088) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ✅ This PR was created by combining the following PRs: #3084 bot: Bump @vitejs/plugin-react-swc from 3.7.0 to 3.7.1 in /playground #3083 bot: Bump @types/react from 18.3.8 to 18.3.10 in /playground #3082 bot: Bump vite from 5.4.7 to 5.4.8 in /playground #3081 bot: Bump @typescript-eslint/eslint-plugin from 8.6.0 to 8.7.0 in /playground --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- playground/package-lock.json | 267 ++++++++++++----------------------- playground/package.json | 8 +- 2 files changed, 93 insertions(+), 182 deletions(-) diff --git a/playground/package-lock.json b/playground/package-lock.json index 3b27f0ff32..a08f776df2 100644 --- a/playground/package-lock.json +++ b/playground/package-lock.json @@ -15,17 +15,17 @@ "swagger-ui-react": "^5.17.14" }, "devDependencies": { - "@types/react": "^18.3.8", + "@types/react": "^18.3.10", "@types/react-dom": "^18.3.0", "@types/swagger-ui-react": "^4.18.3", - "@typescript-eslint/eslint-plugin": "^8.6.0", + "@typescript-eslint/eslint-plugin": "^8.7.0", "@typescript-eslint/parser": "^8.7.0", - "@vitejs/plugin-react-swc": "^3.7.0", + "@vitejs/plugin-react-swc": "^3.7.1", "eslint": "^9.11.1", "eslint-plugin-react-hooks": "^4.6.2", "eslint-plugin-react-refresh": "^0.4.12", "typescript": "^5.6.2", - "vite": "^5.4.7" + "vite": "^5.4.8" } }, "node_modules/@babel/runtime": { @@ -2171,12 +2171,11 @@ } }, "node_modules/@swc/core": { - "version": "1.7.14", - "resolved": "https://registry.npmjs.org/@swc/core/-/core-1.7.14.tgz", - "integrity": "sha512-9aeXeifnyuvc2pcuuhPQgVUwdpGEzZ+9nJu0W8/hNl/aESFsJGR5i9uQJRGu0atoNr01gK092fvmqMmQAPcKow==", + "version": "1.7.26", + "resolved": "https://registry.npmjs.org/@swc/core/-/core-1.7.26.tgz", + "integrity": "sha512-f5uYFf+TmMQyYIoxkn/evWhNGuUzC730dFwAKGwBVHHVoPyak1/GvJUm6i1SKl+2Hrj9oN0i3WSoWWZ4pgI8lw==", "dev": true, "hasInstallScript": true, - "license": "Apache-2.0", "dependencies": { "@swc/counter": "^0.1.3", "@swc/types": "^0.1.12" @@ -2189,16 +2188,16 @@ "url": "https://opencollective.com/swc" }, "optionalDependencies": { - "@swc/core-darwin-arm64": "1.7.14", - "@swc/core-darwin-x64": "1.7.14", - "@swc/core-linux-arm-gnueabihf": "1.7.14", - "@swc/core-linux-arm64-gnu": "1.7.14", - "@swc/core-linux-arm64-musl": "1.7.14", - "@swc/core-linux-x64-gnu": "1.7.14", - "@swc/core-linux-x64-musl": "1.7.14", - "@swc/core-win32-arm64-msvc": "1.7.14", - "@swc/core-win32-ia32-msvc": "1.7.14", - "@swc/core-win32-x64-msvc": "1.7.14" + "@swc/core-darwin-arm64": "1.7.26", + "@swc/core-darwin-x64": "1.7.26", + "@swc/core-linux-arm-gnueabihf": "1.7.26", + "@swc/core-linux-arm64-gnu": "1.7.26", + "@swc/core-linux-arm64-musl": "1.7.26", + "@swc/core-linux-x64-gnu": "1.7.26", + "@swc/core-linux-x64-musl": "1.7.26", + "@swc/core-win32-arm64-msvc": "1.7.26", + "@swc/core-win32-ia32-msvc": "1.7.26", + "@swc/core-win32-x64-msvc": "1.7.26" }, "peerDependencies": { "@swc/helpers": "*" @@ -2210,14 +2209,13 @@ } }, "node_modules/@swc/core-darwin-arm64": { - "version": "1.7.14", - "resolved": "https://registry.npmjs.org/@swc/core-darwin-arm64/-/core-darwin-arm64-1.7.14.tgz", - "integrity": "sha512-V0OUXjOH+hdGxDYG8NkQzy25mKOpcNKFpqtZEzLe5V/CpLJPnpg1+pMz70m14s9ZFda9OxsjlvPbg1FLUwhgIQ==", + "version": "1.7.26", + "resolved": "https://registry.npmjs.org/@swc/core-darwin-arm64/-/core-darwin-arm64-1.7.26.tgz", + "integrity": "sha512-FF3CRYTg6a7ZVW4yT9mesxoVVZTrcSWtmZhxKCYJX9brH4CS/7PRPjAKNk6kzWgWuRoglP7hkjQcd6EpMcZEAw==", "cpu": [ "arm64" ], "dev": true, - "license": "Apache-2.0 AND MIT", "optional": true, "os": [ "darwin" @@ -2227,14 +2225,13 @@ } }, "node_modules/@swc/core-darwin-x64": { - "version": "1.7.14", - "resolved": "https://registry.npmjs.org/@swc/core-darwin-x64/-/core-darwin-x64-1.7.14.tgz", - "integrity": "sha512-9iFvUnxG6FC3An5ogp5jbBfQuUmTTwy8KMB+ZddUoPB3NR1eV+Y9vOh/tfWcenSJbgOKDLgYC5D/b1mHAprsrQ==", + "version": "1.7.26", + "resolved": "https://registry.npmjs.org/@swc/core-darwin-x64/-/core-darwin-x64-1.7.26.tgz", + "integrity": "sha512-az3cibZdsay2HNKmc4bjf62QVukuiMRh5sfM5kHR/JMTrLyS6vSw7Ihs3UTkZjUxkLTT8ro54LI6sV6sUQUbLQ==", "cpu": [ "x64" ], "dev": true, - "license": "Apache-2.0 AND MIT", "optional": true, "os": [ "darwin" @@ -2244,14 +2241,13 @@ } }, "node_modules/@swc/core-linux-arm-gnueabihf": { - "version": "1.7.14", - "resolved": "https://registry.npmjs.org/@swc/core-linux-arm-gnueabihf/-/core-linux-arm-gnueabihf-1.7.14.tgz", - "integrity": "sha512-zGJsef9qPivKSH8Vv4F/HiBXBTHZ5Hs3ZjVGo/UIdWPJF8fTL9OVADiRrl34Q7zOZEtGXRwEKLUW1SCQcbDvZA==", + "version": "1.7.26", + "resolved": "https://registry.npmjs.org/@swc/core-linux-arm-gnueabihf/-/core-linux-arm-gnueabihf-1.7.26.tgz", + "integrity": "sha512-VYPFVJDO5zT5U3RpCdHE5v1gz4mmR8BfHecUZTmD2v1JeFY6fv9KArJUpjrHEEsjK/ucXkQFmJ0jaiWXmpOV9Q==", "cpu": [ "arm" ], "dev": true, - "license": "Apache-2.0", "optional": true, "os": [ "linux" @@ -2261,14 +2257,13 @@ } }, "node_modules/@swc/core-linux-arm64-gnu": { - "version": "1.7.14", - "resolved": "https://registry.npmjs.org/@swc/core-linux-arm64-gnu/-/core-linux-arm64-gnu-1.7.14.tgz", - "integrity": "sha512-AxV3MPsoI7i4B8FXOew3dx3N8y00YoJYvIPfxelw07RegeCEH3aHp2U2DtgbP/NV1ugZMx0TL2Z2DEvocmA51g==", + "version": "1.7.26", + "resolved": "https://registry.npmjs.org/@swc/core-linux-arm64-gnu/-/core-linux-arm64-gnu-1.7.26.tgz", + "integrity": "sha512-YKevOV7abpjcAzXrhsl+W48Z9mZvgoVs2eP5nY+uoMAdP2b3GxC0Df1Co0I90o2lkzO4jYBpTMcZlmUXLdXn+Q==", "cpu": [ "arm64" ], "dev": true, - "license": "Apache-2.0 AND MIT", "optional": true, "os": [ "linux" @@ -2278,14 +2273,13 @@ } }, "node_modules/@swc/core-linux-arm64-musl": { - "version": "1.7.14", - "resolved": "https://registry.npmjs.org/@swc/core-linux-arm64-musl/-/core-linux-arm64-musl-1.7.14.tgz", - "integrity": "sha512-JDLdNjUj3zPehd4+DrQD8Ltb3B5lD8D05IwePyDWw+uR/YPc7w/TX1FUVci5h3giJnlMCJRvi1IQYV7K1n7KtQ==", + "version": "1.7.26", + "resolved": "https://registry.npmjs.org/@swc/core-linux-arm64-musl/-/core-linux-arm64-musl-1.7.26.tgz", + "integrity": "sha512-3w8iZICMkQQON0uIcvz7+Q1MPOW6hJ4O5ETjA0LSP/tuKqx30hIniCGOgPDnv3UTMruLUnQbtBwVCZTBKR3Rkg==", "cpu": [ "arm64" ], "dev": true, - "license": "Apache-2.0 AND MIT", "optional": true, "os": [ "linux" @@ -2295,14 +2289,13 @@ } }, "node_modules/@swc/core-linux-x64-gnu": { - "version": "1.7.14", - "resolved": "https://registry.npmjs.org/@swc/core-linux-x64-gnu/-/core-linux-x64-gnu-1.7.14.tgz", - "integrity": "sha512-Siy5OvPCLLWmMdx4msnEs8HvEVUEigSn0+3pbLjv78iwzXd0qSBNHUPZyC1xeurVaUbpNDxZTpPRIwpqNE2+Og==", + "version": "1.7.26", + "resolved": "https://registry.npmjs.org/@swc/core-linux-x64-gnu/-/core-linux-x64-gnu-1.7.26.tgz", + "integrity": "sha512-c+pp9Zkk2lqb06bNGkR2Looxrs7FtGDMA4/aHjZcCqATgp348hOKH5WPvNLBl+yPrISuWjbKDVn3NgAvfvpH4w==", "cpu": [ "x64" ], "dev": true, - "license": "Apache-2.0 AND MIT", "optional": true, "os": [ "linux" @@ -2312,14 +2305,13 @@ } }, "node_modules/@swc/core-linux-x64-musl": { - "version": "1.7.14", - "resolved": "https://registry.npmjs.org/@swc/core-linux-x64-musl/-/core-linux-x64-musl-1.7.14.tgz", - "integrity": "sha512-FtEGm9mwtRYQNK43WMtUIadxHs/ja2rnDurB99os0ZoFTGG2IHuht2zD97W0wB8JbqEabT1XwSG9Y5wmN+ciEQ==", + "version": "1.7.26", + "resolved": "https://registry.npmjs.org/@swc/core-linux-x64-musl/-/core-linux-x64-musl-1.7.26.tgz", + "integrity": "sha512-PgtyfHBF6xG87dUSSdTJHwZ3/8vWZfNIXQV2GlwEpslrOkGqy+WaiiyE7Of7z9AvDILfBBBcJvJ/r8u980wAfQ==", "cpu": [ "x64" ], "dev": true, - "license": "Apache-2.0 AND MIT", "optional": true, "os": [ "linux" @@ -2329,14 +2321,13 @@ } }, "node_modules/@swc/core-win32-arm64-msvc": { - "version": "1.7.14", - "resolved": "https://registry.npmjs.org/@swc/core-win32-arm64-msvc/-/core-win32-arm64-msvc-1.7.14.tgz", - "integrity": "sha512-Jp8KDlfq7Ntt2/BXr0y344cYgB1zf0DaLzDZ1ZJR6rYlAzWYSccLYcxHa97VGnsYhhPspMpmCvHid97oe2hl4A==", + "version": "1.7.26", + "resolved": "https://registry.npmjs.org/@swc/core-win32-arm64-msvc/-/core-win32-arm64-msvc-1.7.26.tgz", + "integrity": "sha512-9TNXPIJqFynlAOrRD6tUQjMq7KApSklK3R/tXgIxc7Qx+lWu8hlDQ/kVPLpU7PWvMMwC/3hKBW+p5f+Tms1hmA==", "cpu": [ "arm64" ], "dev": true, - "license": "Apache-2.0 AND MIT", "optional": true, "os": [ "win32" @@ -2346,14 +2337,13 @@ } }, "node_modules/@swc/core-win32-ia32-msvc": { - "version": "1.7.14", - "resolved": "https://registry.npmjs.org/@swc/core-win32-ia32-msvc/-/core-win32-ia32-msvc-1.7.14.tgz", - "integrity": "sha512-I+cFsXF0OU0J9J4zdWiQKKLURO5dvCujH9Jr8N0cErdy54l9d4gfIxdctfTF+7FyXtWKLTCkp+oby9BQhkFGWA==", + "version": "1.7.26", + "resolved": "https://registry.npmjs.org/@swc/core-win32-ia32-msvc/-/core-win32-ia32-msvc-1.7.26.tgz", + "integrity": "sha512-9YngxNcG3177GYdsTum4V98Re+TlCeJEP4kEwEg9EagT5s3YejYdKwVAkAsJszzkXuyRDdnHUpYbTrPG6FiXrQ==", "cpu": [ "ia32" ], "dev": true, - "license": "Apache-2.0 AND MIT", "optional": true, "os": [ "win32" @@ -2363,14 +2353,13 @@ } }, "node_modules/@swc/core-win32-x64-msvc": { - "version": "1.7.14", - "resolved": "https://registry.npmjs.org/@swc/core-win32-x64-msvc/-/core-win32-x64-msvc-1.7.14.tgz", - "integrity": "sha512-NNrprQCK6d28mG436jVo2TD+vACHseUECacEBGZ9Ef0qfOIWS1XIt2MisQKG0Oea2VvLFl6tF/V4Lnx/H0Sn3Q==", + "version": "1.7.26", + "resolved": "https://registry.npmjs.org/@swc/core-win32-x64-msvc/-/core-win32-x64-msvc-1.7.26.tgz", + "integrity": "sha512-VR+hzg9XqucgLjXxA13MtV5O3C0bK0ywtLIBw/+a+O+Oc6mxFWHtdUeXDbIi5AiPbn0fjgVJMqYnyjGyyX8u0w==", "cpu": [ "x64" ], "dev": true, - "license": "Apache-2.0 AND MIT", "optional": true, "os": [ "win32" @@ -2383,15 +2372,13 @@ "version": "0.1.3", "resolved": "https://registry.npmjs.org/@swc/counter/-/counter-0.1.3.tgz", "integrity": "sha512-e2BR4lsJkkRlKZ/qCHPw9ZaSxc0MVUd7gtbtaB7aMvHeJVYe8sOB8DBZkP2DtISHGSku9sCK6T6cnY0CtXrOCQ==", - "dev": true, - "license": "Apache-2.0" + "dev": true }, "node_modules/@swc/types": { "version": "0.1.12", "resolved": "https://registry.npmjs.org/@swc/types/-/types-0.1.12.tgz", "integrity": "sha512-wBJA+SdtkbFhHjTMYH+dEH1y4VpfGdAc2Kw/LK09i9bXd/K6j6PkDcFCEzb6iVfZMkPRrl/q0e3toqTAJdkIVA==", "dev": true, - "license": "Apache-2.0", "dependencies": { "@swc/counter": "^0.1.3" } @@ -2467,9 +2454,9 @@ } }, "node_modules/@types/react": { - "version": "18.3.8", - "resolved": "https://registry.npmjs.org/@types/react/-/react-18.3.8.tgz", - "integrity": "sha512-syBUrW3/XpnW4WJ41Pft+I+aPoDVbrBVQGEnbD7NijDGlVC+8gV/XKRY+7vMDlfPpbwYt0l1vd/Sj8bJGMbs9Q==", + "version": "18.3.10", + "resolved": "https://registry.npmjs.org/@types/react/-/react-18.3.10.tgz", + "integrity": "sha512-02sAAlBnP39JgXwkAq3PeU9DVaaGpZyF3MGcC0MKgQVkZor5IiiDAipVaxQHtDJAmO4GIy/rVBy/LzVj76Cyqg==", "devOptional": true, "dependencies": { "@types/prop-types": "*", @@ -2517,16 +2504,16 @@ "license": "MIT" }, "node_modules/@typescript-eslint/eslint-plugin": { - "version": "8.6.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.6.0.tgz", - "integrity": "sha512-UOaz/wFowmoh2G6Mr9gw60B1mm0MzUtm6Ic8G2yM1Le6gyj5Loi/N+O5mocugRGY+8OeeKmkMmbxNqUCq3B4Sg==", + "version": "8.7.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.7.0.tgz", + "integrity": "sha512-RIHOoznhA3CCfSTFiB6kBGLQtB/sox+pJ6jeFu6FxJvqL8qRxq/FfGO/UhsGgQM9oGdXkV4xUgli+dt26biB6A==", "dev": true, "dependencies": { "@eslint-community/regexpp": "^4.10.0", - "@typescript-eslint/scope-manager": "8.6.0", - "@typescript-eslint/type-utils": "8.6.0", - "@typescript-eslint/utils": "8.6.0", - "@typescript-eslint/visitor-keys": "8.6.0", + "@typescript-eslint/scope-manager": "8.7.0", + "@typescript-eslint/type-utils": "8.7.0", + "@typescript-eslint/utils": "8.7.0", + "@typescript-eslint/visitor-keys": "8.7.0", "graphemer": "^1.4.0", "ignore": "^5.3.1", "natural-compare": "^1.4.0", @@ -2577,7 +2564,7 @@ } } }, - "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/scope-manager": { + "node_modules/@typescript-eslint/scope-manager": { "version": "8.7.0", "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.7.0.tgz", "integrity": "sha512-87rC0k3ZlDOuz82zzXRtQ7Akv3GKhHs0ti4YcbAJtaomllXoSO8hi7Ix3ccEvCd824dy9aIX+j3d2UMAfCtVpg==", @@ -2594,89 +2581,14 @@ "url": "https://opencollective.com/typescript-eslint" } }, - "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/types": { - "version": "8.7.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.7.0.tgz", - "integrity": "sha512-LLt4BLHFwSfASHSF2K29SZ+ZCsbQOM+LuarPjRUuHm+Qd09hSe3GCeaQbcCr+Mik+0QFRmep/FyZBO6fJ64U3w==", - "dev": true, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/typescript-estree": { - "version": "8.7.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.7.0.tgz", - "integrity": "sha512-MC8nmcGHsmfAKxwnluTQpNqceniT8SteVwd2voYlmiSWGOtjvGXdPl17dYu2797GVscK30Z04WRM28CrKS9WOg==", - "dev": true, - "dependencies": { - "@typescript-eslint/types": "8.7.0", - "@typescript-eslint/visitor-keys": "8.7.0", - "debug": "^4.3.4", - "fast-glob": "^3.3.2", - "is-glob": "^4.0.3", - "minimatch": "^9.0.4", - "semver": "^7.6.0", - "ts-api-utils": "^1.3.0" - }, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - }, - "peerDependenciesMeta": { - "typescript": { - "optional": true - } - } - }, - "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/visitor-keys": { - "version": "8.7.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.7.0.tgz", - "integrity": "sha512-b1tx0orFCCh/THWPQa2ZwWzvOeyzzp36vkJYOpVg0u8UVOIsfVrnuC9FqAw9gRKn+rG2VmWQ/zDJZzkxUnj/XQ==", - "dev": true, - "dependencies": { - "@typescript-eslint/types": "8.7.0", - "eslint-visitor-keys": "^3.4.3" - }, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@typescript-eslint/scope-manager": { - "version": "8.6.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.6.0.tgz", - "integrity": "sha512-ZuoutoS5y9UOxKvpc/GkvF4cuEmpokda4wRg64JEia27wX+PysIE9q+lzDtlHHgblwUWwo5/Qn+/WyTUvDwBHw==", - "dev": true, - "dependencies": { - "@typescript-eslint/types": "8.6.0", - "@typescript-eslint/visitor-keys": "8.6.0" - }, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, "node_modules/@typescript-eslint/type-utils": { - "version": "8.6.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.6.0.tgz", - "integrity": "sha512-dtePl4gsuenXVwC7dVNlb4mGDcKjDT/Ropsk4za/ouMBPplCLyznIaR+W65mvCvsyS97dymoBRrioEXI7k0XIg==", + "version": "8.7.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.7.0.tgz", + "integrity": "sha512-tl0N0Mj3hMSkEYhLkjREp54OSb/FI6qyCzfiiclvJvOqre6hsZTGSnHtmFLDU8TIM62G7ygEa1bI08lcuRwEnQ==", "dev": true, "dependencies": { - "@typescript-eslint/typescript-estree": "8.6.0", - "@typescript-eslint/utils": "8.6.0", + "@typescript-eslint/typescript-estree": "8.7.0", + "@typescript-eslint/utils": "8.7.0", "debug": "^4.3.4", "ts-api-utils": "^1.3.0" }, @@ -2694,9 +2606,9 @@ } }, "node_modules/@typescript-eslint/types": { - "version": "8.6.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.6.0.tgz", - "integrity": "sha512-rojqFZGd4MQxw33SrOy09qIDS8WEldM8JWtKQLAjf/X5mGSeEFh5ixQlxssMNyPslVIk9yzWqXCsV2eFhYrYUw==", + "version": "8.7.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.7.0.tgz", + "integrity": "sha512-LLt4BLHFwSfASHSF2K29SZ+ZCsbQOM+LuarPjRUuHm+Qd09hSe3GCeaQbcCr+Mik+0QFRmep/FyZBO6fJ64U3w==", "dev": true, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -2707,13 +2619,13 @@ } }, "node_modules/@typescript-eslint/typescript-estree": { - "version": "8.6.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.6.0.tgz", - "integrity": "sha512-MOVAzsKJIPIlLK239l5s06YXjNqpKTVhBVDnqUumQJja5+Y94V3+4VUFRA0G60y2jNnTVwRCkhyGQpavfsbq/g==", + "version": "8.7.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.7.0.tgz", + "integrity": "sha512-MC8nmcGHsmfAKxwnluTQpNqceniT8SteVwd2voYlmiSWGOtjvGXdPl17dYu2797GVscK30Z04WRM28CrKS9WOg==", "dev": true, "dependencies": { - "@typescript-eslint/types": "8.6.0", - "@typescript-eslint/visitor-keys": "8.6.0", + "@typescript-eslint/types": "8.7.0", + "@typescript-eslint/visitor-keys": "8.7.0", "debug": "^4.3.4", "fast-glob": "^3.3.2", "is-glob": "^4.0.3", @@ -2735,15 +2647,15 @@ } }, "node_modules/@typescript-eslint/utils": { - "version": "8.6.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.6.0.tgz", - "integrity": "sha512-eNp9cWnYf36NaOVjkEUznf6fEgVy1TWpE0o52e4wtojjBx7D1UV2WAWGzR+8Y5lVFtpMLPwNbC67T83DWSph4A==", + "version": "8.7.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.7.0.tgz", + "integrity": "sha512-ZbdUdwsl2X/s3CiyAu3gOlfQzpbuG3nTWKPoIvAu1pu5r8viiJvv2NPN2AqArL35NCYtw/lrPPfM4gxrMLNLPw==", "dev": true, "dependencies": { "@eslint-community/eslint-utils": "^4.4.0", - "@typescript-eslint/scope-manager": "8.6.0", - "@typescript-eslint/types": "8.6.0", - "@typescript-eslint/typescript-estree": "8.6.0" + "@typescript-eslint/scope-manager": "8.7.0", + "@typescript-eslint/types": "8.7.0", + "@typescript-eslint/typescript-estree": "8.7.0" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -2757,12 +2669,12 @@ } }, "node_modules/@typescript-eslint/visitor-keys": { - "version": "8.6.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.6.0.tgz", - "integrity": "sha512-wapVFfZg9H0qOYh4grNVQiMklJGluQrOUiOhYRrQWhx7BY/+I1IYb8BczWNbbUpO+pqy0rDciv3lQH5E1bCLrg==", + "version": "8.7.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.7.0.tgz", + "integrity": "sha512-b1tx0orFCCh/THWPQa2ZwWzvOeyzzp36vkJYOpVg0u8UVOIsfVrnuC9FqAw9gRKn+rG2VmWQ/zDJZzkxUnj/XQ==", "dev": true, "dependencies": { - "@typescript-eslint/types": "8.6.0", + "@typescript-eslint/types": "8.7.0", "eslint-visitor-keys": "^3.4.3" }, "engines": { @@ -2774,13 +2686,12 @@ } }, "node_modules/@vitejs/plugin-react-swc": { - "version": "3.7.0", - "resolved": "https://registry.npmjs.org/@vitejs/plugin-react-swc/-/plugin-react-swc-3.7.0.tgz", - "integrity": "sha512-yrknSb3Dci6svCd/qhHqhFPDSw0QtjumcqdKMoNNzmOl5lMXTTiqzjWtG4Qask2HdvvzaNgSunbQGet8/GrKdA==", + "version": "3.7.1", + "resolved": "https://registry.npmjs.org/@vitejs/plugin-react-swc/-/plugin-react-swc-3.7.1.tgz", + "integrity": "sha512-vgWOY0i1EROUK0Ctg1hwhtC3SdcDjZcdit4Ups4aPkDcB1jYhmo+RMYWY87cmXMhvtD5uf8lV89j2w16vkdSVg==", "dev": true, - "license": "MIT", "dependencies": { - "@swc/core": "^1.5.7" + "@swc/core": "^1.7.26" }, "peerDependencies": { "vite": "^4 || ^5" @@ -6009,9 +5920,9 @@ "optional": true }, "node_modules/vite": { - "version": "5.4.7", - "resolved": "https://registry.npmjs.org/vite/-/vite-5.4.7.tgz", - "integrity": "sha512-5l2zxqMEPVENgvzTuBpHer2awaetimj2BGkhBPdnwKbPNOlHsODU+oiazEZzLK7KhAnOrO+XGYJYn4ZlUhDtDQ==", + "version": "5.4.8", + "resolved": "https://registry.npmjs.org/vite/-/vite-5.4.8.tgz", + "integrity": "sha512-FqrItQ4DT1NC4zCUqMB4c4AZORMKIa0m8/URVCZ77OZ/QSNeJ54bU1vrFADbDsuwfIPcgknRkmqakQcgnL4GiQ==", "dev": true, "dependencies": { "esbuild": "^0.21.3", diff --git a/playground/package.json b/playground/package.json index 39a56dd3f9..658c97c8d2 100644 --- a/playground/package.json +++ b/playground/package.json @@ -17,16 +17,16 @@ "swagger-ui-react": "^5.17.14" }, "devDependencies": { - "@types/react": "^18.3.8", + "@types/react": "^18.3.10", "@types/react-dom": "^18.3.0", "@types/swagger-ui-react": "^4.18.3", - "@typescript-eslint/eslint-plugin": "^8.6.0", + "@typescript-eslint/eslint-plugin": "^8.7.0", "@typescript-eslint/parser": "^8.7.0", - "@vitejs/plugin-react-swc": "^3.7.0", + "@vitejs/plugin-react-swc": "^3.7.1", "eslint": "^9.11.1", "eslint-plugin-react-hooks": "^4.6.2", "eslint-plugin-react-refresh": "^0.4.12", "typescript": "^5.6.2", - "vite": "^5.4.7" + "vite": "^5.4.8" } } From 6b1f97fb6dedfbf46f087a38bad6206a4925f72e Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Mon, 30 Sep 2024 09:59:19 -0700 Subject: [PATCH 46/71] fix(i): Null compound filter panic (#3080) ## Relevant issue(s) Resolves #3079 ## Description This PR fixes an issue where null values within compound filters would panic. ## Tasks - [x] I made sure the code is well commented, particularly hard-to-understand areas. - [x] I made sure the repository-held documentation is changed accordingly. - [x] I made sure the pull request title adheres to the conventional commit style (the subset used in the project can be found in [tools/configs/chglog/config.yml](tools/configs/chglog/config.yml)). - [x] I made sure to discuss its limitations such as threats to validity, vulnerability to mistake and misuse, robustness to invalidation of assumptions, resource requirements, ... ## How has this been tested? Added integration tests Specify the platform(s) on which this was tested: - MacOS --- internal/connor/and.go | 3 + internal/connor/or.go | 5 +- internal/planner/mapper/mapper.go | 15 +- internal/planner/mapper/targetable.go | 11 +- internal/request/graphql/schema/generate.go | 6 +- .../query/simple/with_null_input_test.go | 210 ++++++++++++++++++ tests/integration/schema/default_fields.go | 8 +- tests/integration/schema/filter_test.go | 8 +- 8 files changed, 246 insertions(+), 20 deletions(-) diff --git a/internal/connor/and.go b/internal/connor/and.go index be2e097309..d302617dc8 100644 --- a/internal/connor/and.go +++ b/internal/connor/and.go @@ -16,6 +16,9 @@ func and(condition, data any) (bool, error) { } return true, nil + case nil: + return true, nil + default: return false, client.NewErrUnhandledType("condition", cn) } diff --git a/internal/connor/or.go b/internal/connor/or.go index 6273155e7d..c15e27393f 100644 --- a/internal/connor/or.go +++ b/internal/connor/or.go @@ -14,8 +14,11 @@ func or(condition, data any) (bool, error) { return true, nil } } - return false, nil + + case nil: + return true, nil + default: return false, client.NewErrUnhandledType("condition", cn) } diff --git a/internal/planner/mapper/mapper.go b/internal/planner/mapper/mapper.go index 8aeab3c22e..dfadd2f06c 100644 --- a/internal/planner/mapper/mapper.go +++ b/internal/planner/mapper/mapper.go @@ -964,9 +964,12 @@ func resolveInnerFilterDependencies( ) ([]Requestable, error) { newFields := []Requestable{} - for key := range source { + for key, value := range source { if key == request.FilterOpAnd || key == request.FilterOpOr { - compoundFilter := source[key].([]any) + if value == nil { + continue + } + compoundFilter := value.([]any) for _, innerFilter := range compoundFilter { innerFields, err := resolveInnerFilterDependencies( ctx, @@ -987,7 +990,10 @@ func resolveInnerFilterDependencies( } continue } else if key == request.FilterOpNot { - notFilter := source[key].(map[string]any) + if value == nil { + continue + } + notFilter := value.(map[string]any) innerFields, err := resolveInnerFilterDependencies( ctx, store, @@ -1044,8 +1050,7 @@ func resolveInnerFilterDependencies( newFields = append(newFields, childSelect) } - childSource := source[key] - childFilter, isChildFilter := childSource.(map[string]any) + childFilter, isChildFilter := value.(map[string]any) if !isChildFilter { // If the filter is not a child filter then the will be no inner dependencies to add and // we can continue. diff --git a/internal/planner/mapper/targetable.go b/internal/planner/mapper/targetable.go index a45e99a516..f85e6c8016 100644 --- a/internal/planner/mapper/targetable.go +++ b/internal/planner/mapper/targetable.go @@ -126,7 +126,10 @@ func filterObjectToMap(mapping *core.DocumentMapping, obj map[connor.FilterKey]a case *Operator: switch keyType.Operation { case request.FilterOpAnd, request.FilterOpOr: - v := v.([]any) + v, ok := v.([]any) + if !ok { + continue // value is nil + } logicMapEntries := make([]any, len(v)) for i, item := range v { itemMap := item.(map[connor.FilterKey]any) @@ -134,8 +137,10 @@ func filterObjectToMap(mapping *core.DocumentMapping, obj map[connor.FilterKey]a } outmap[keyType.Operation] = logicMapEntries case request.FilterOpNot: - itemMap := v.(map[connor.FilterKey]any) - outmap[keyType.Operation] = filterObjectToMap(mapping, itemMap) + itemMap, ok := v.(map[connor.FilterKey]any) + if ok { + outmap[keyType.Operation] = filterObjectToMap(mapping, itemMap) + } default: outmap[keyType.Operation] = v } diff --git a/internal/request/graphql/schema/generate.go b/internal/request/graphql/schema/generate.go index c2850f79ce..85491f5ee1 100644 --- a/internal/request/graphql/schema/generate.go +++ b/internal/request/graphql/schema/generate.go @@ -1138,11 +1138,11 @@ func (g *Generator) genTypeFilterArgInput(obj *gql.Object) *gql.InputObject { fields["_and"] = &gql.InputObjectFieldConfig{ Description: schemaTypes.AndOperatorDescription, - Type: gql.NewList(selfRefType), + Type: gql.NewList(gql.NewNonNull(selfRefType)), } fields["_or"] = &gql.InputObjectFieldConfig{ Description: schemaTypes.OrOperatorDescription, - Type: gql.NewList(selfRefType), + Type: gql.NewList(gql.NewNonNull(selfRefType)), } fields["_not"] = &gql.InputObjectFieldConfig{ Description: schemaTypes.NotOperatorDescription, @@ -1220,7 +1220,7 @@ func (g *Generator) genLeafFilterArgInput(obj gql.Type) *gql.InputObject { fields := gql.InputObjectConfigFieldMap{} compoundListType := &gql.InputObjectFieldConfig{ - Type: gql.NewList(selfRefType), + Type: gql.NewList(gql.NewNonNull(selfRefType)), } fields["_and"] = compoundListType diff --git a/tests/integration/query/simple/with_null_input_test.go b/tests/integration/query/simple/with_null_input_test.go index 9f11d14a8e..f81219051b 100644 --- a/tests/integration/query/simple/with_null_input_test.go +++ b/tests/integration/query/simple/with_null_input_test.go @@ -334,3 +334,213 @@ func TestQuerySimple_WithNullShowDeleted_Succeeds(t *testing.T) { executeTestCase(t, test) } + +func TestQuerySimple_WithFilterWithNullOr_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query, with filter with null or", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "Name": "John" + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {_or: null}) { + Name + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Name": "John", + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQuerySimple_WithFilterWithNullOrElement_ReturnsError(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query, with filter with null or element", + Actions: []any{ + testUtils.Request{ + Request: `query { + Users(filter: {_or: [null]}) { + Name + } + }`, + ExpectedError: `Expected "UsersFilterArg!", found null`, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQuerySimple_WithFilterWithNullOrField_ReturnsError(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query, with filter with or with null field", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "Name": null + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {_or: [{Name: null}]}) { + Name + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Name": nil, + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQuerySimple_WithFilterWithNullAnd_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query, with filter with null and", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "Name": "John" + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {_and: null}) { + Name + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Name": "John", + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQuerySimple_WithFilterWithNullAndElement_ReturnsError(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query, with filter with null and element", + Actions: []any{ + testUtils.Request{ + Request: `query { + Users(filter: {_and: [null]}) { + Name + } + }`, + ExpectedError: `Expected "UsersFilterArg!", found null`, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQuerySimple_WithFilterWithNullAndField_ReturnsError(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query, with filter with and with null field", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "Name": null + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {_and: [{Name: null}]}) { + Name + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Name": nil, + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQuerySimple_WithFilterWithNullNot_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query, with filter with null not", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "Name": "John" + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {_not: null}) { + Name + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Name": "John", + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQuerySimple_WithFilterWithNullNotField_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query, with filter with null not field", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "Name": "John" + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {_not: {Name: null}}) { + Name + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Name": "John", + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} diff --git a/tests/integration/schema/default_fields.go b/tests/integration/schema/default_fields.go index 51b224bd93..6462ef6066 100644 --- a/tests/integration/schema/default_fields.go +++ b/tests/integration/schema/default_fields.go @@ -215,14 +215,14 @@ func buildFilterArg(objectName string, fields []argDef) Field { inputFields := []any{ makeInputObject("_and", nil, map[string]any{ - "kind": "INPUT_OBJECT", - "name": filterArgName, + "kind": "NON_NULL", + "name": nil, }), makeInputObject("_docID", "IDOperatorBlock", nil), makeInputObject("_not", filterArgName, nil), makeInputObject("_or", nil, map[string]any{ - "kind": "INPUT_OBJECT", - "name": filterArgName, + "kind": "NON_NULL", + "name": nil, }), } diff --git a/tests/integration/schema/filter_test.go b/tests/integration/schema/filter_test.go index bf7617a3b6..c3dc47d668 100644 --- a/tests/integration/schema/filter_test.go +++ b/tests/integration/schema/filter_test.go @@ -71,7 +71,7 @@ func TestFilterForSimpleSchema(t *testing.T) { "type": map[string]any{ "name": nil, "ofType": map[string]any{ - "name": "UsersFilterArg", + "name": nil, }, }, }, @@ -94,7 +94,7 @@ func TestFilterForSimpleSchema(t *testing.T) { "type": map[string]any{ "name": nil, "ofType": map[string]any{ - "name": "UsersFilterArg", + "name": nil, }, }, }, @@ -203,7 +203,7 @@ func TestFilterForOneToOneSchema(t *testing.T) { "type": map[string]any{ "name": nil, "ofType": map[string]any{ - "name": "BookFilterArg", + "name": nil, }, }, }, @@ -226,7 +226,7 @@ func TestFilterForOneToOneSchema(t *testing.T) { "type": map[string]any{ "name": nil, "ofType": map[string]any{ - "name": "BookFilterArg", + "name": nil, }, }, }, From 3101c612d084e0978621d3fcd290f9d0c4efc881 Mon Sep 17 00:00:00 2001 From: Shahzad Lone Date: Mon, 30 Sep 2024 14:46:17 -0400 Subject: [PATCH 47/71] ci(i): Fix vulnerabilities scan by ignoring x/crises (#3091) ## Relevant issue(s) Resolves #3090 ## Description - Hacky fix for the vulnerability scanner until they improve the tool (or we solve the vulnerability). - Make the vulnerability scan fail if new vulnerabilities are introduced and pass if previous known ones remain. ## How has this been tested? - Tried introducing a vul and it works and ignores the current `x/crisis` one - Run: https://github.com/sourcenetwork/defradb/actions/runs/11110357130/job/30867868823?pr=3091 --- .github/workflows/check-vulnerabilities.yml | 28 ++++++++++++++++++--- Makefile | 5 ++++ 2 files changed, 30 insertions(+), 3 deletions(-) diff --git a/.github/workflows/check-vulnerabilities.yml b/.github/workflows/check-vulnerabilities.yml index 5ebb3192b1..82d05e80ca 100644 --- a/.github/workflows/check-vulnerabilities.yml +++ b/.github/workflows/check-vulnerabilities.yml @@ -30,10 +30,32 @@ jobs: runs-on: ubuntu-latest steps: - - name: Run govulncheck - uses: golang/govulncheck-action@v1 + - name: Checkout code into the directory + uses: actions/checkout@v4 + + - name: Setup Go environment explicitly + uses: actions/setup-go@v5 with: go-version-file: 'go.mod' check-latest: true cache: false - go-package: ./... + + - name: Install govulncheck + run: make deps:vulncheck + + - name: Run govulncheck scan + run: govulncheck -C . -format text ./... | tee govulncheck.txt + + - name: Check if only known vulnerabilities were found (there are new vulnerabilities if this fails) + run: cat govulncheck.txt | grep "Your code is affected by 2 vulnerabilities from 1 module." + + # Use the steps below once the x/crisis (crisis.init) bug is fixed or if the + # ability to silence is implemented: https://github.com/golang/go/issues/61211 + #steps: + # - name: Run govulncheck + # uses: golang/govulncheck-action@v1 + # with: + # go-version-file: 'go.mod' + # check-latest: true + # cache: false + # go-package: ./... diff --git a/Makefile b/Makefile index c84a0a118e..b5fb0ea43a 100644 --- a/Makefile +++ b/Makefile @@ -135,6 +135,10 @@ else $(info YAML linter 'yamllint' already installed.) endif +.PHONY: deps\:vulncheck +deps\:vulncheck: + go install golang.org/x/vuln/cmd/govulncheck@latest + .PHONY: deps\:lint deps\:lint: @$(MAKE) deps:lint-go && \ @@ -172,6 +176,7 @@ deps: $(MAKE) deps:bench && \ $(MAKE) deps:chglog && \ $(MAKE) deps:lint && \ + $(MAKE) deps:vulncheck && \ $(MAKE) deps:test && \ $(MAKE) deps:mocks From c9863123596e71b90ce2ad3ec5a1f97986e94d18 Mon Sep 17 00:00:00 2001 From: Shahzad Lone Date: Tue, 1 Oct 2024 20:22:08 -0400 Subject: [PATCH 48/71] feat: Ability to relate private documents to actors (#2907) ## Relevant issue(s) Resolves #2762 ## Description This PR introduces the ability to make use of the `relation`s defined within a policy to create relationships between an actor and a document within a collection. For users sake, I have made the clients (http, and cli) not consume the `policyID` and `resource` name but instead a `docID` and `collection name`, since the collection will have the policy and resource information available we can fetch that and make lives easier for the users. This PR also makes use of the `manages` feature we have had in our policy. The manages essentially defines who can make the relationship manipulation requests. There are a lot of tests in this PR due to a lot of edge cases I wanted to have tested specific to `manger`, and ensuring `write` and `read` permissions don't leak (i.e. are accidently granted). ## CLI Demo The following lets the target actor be able to now read the private document: ```bash defradb client acp relationship add \ --collection Users \ --docID bae-ff3ceb1c-b5c0-5e86-a024-dd1b16a4261c \ --relation reader \ --actor did:key:z7r8os2G88XXBNBTLj3kFR5rzUJ4VAesbX7PgsA68ak9B5RYcXF5EZEmjRzzinZndPSSwujXb4XKHG6vmKEFG6ZfsfcQn \ --identity e3b722906ee4e56368f581cd8b18ab0f48af1ea53e635e3f7b8acd076676f6ac ``` Result: ```json { "ExistedAlready": false // <-------------- Indicates a new relationship was formed } ``` ### Future (out-of-scope of this PR): - Most of write tests will split into `delete` and `update` in #2905 - Ability to revoke or delete relation coming in #2906 - Decide on the `can't write if no read permission` in #2992 - Move acp logic to a shared repo: https://github.com/sourcenetwork/defradb/issues/2980 ## How has this been tested? - Integration tests Specify the platform(s) on which this was tested: - Manjaro WSL2 --- acp/README.md | 203 +++ acp/acp.go | 16 + acp/acp_local.go | 31 + acp/acp_local_test.go | 286 +++- acp/acp_source_hub.go | 50 + acp/errors.go | 69 +- acp/source_hub_client.go | 85 ++ cli/acp_relationship.go | 25 + cli/acp_relationship_add.go | 130 ++ cli/cli.go | 12 +- client/{policy.go => acp.go} | 7 + client/db.go | 14 + client/errors.go | 39 +- client/mocks/db.go | 60 + .../references/cli/defradb_client_acp.md | 1 + .../cli/defradb_client_acp_relationship.md | 41 + .../defradb_client_acp_relationship_add.md | 81 ++ docs/website/references/http/openapi.json | 30 + examples/dpi_policy/user_dpi_policy.json | 1 + examples/dpi_policy/user_dpi_policy.yml | 2 + .../user_dpi_policy_with_manages.yml | 49 + http/client_acp.go | 50 + http/handler_acp.go | 45 + internal/db/db.go | 44 +- internal/db/permission/check.go | 2 +- internal/db/permission/permission.go | 4 +- internal/db/permission/register.go | 2 +- tests/clients/cli/wrapper.go | 20 - tests/clients/cli/wrapper_acp.go | 66 + tests/clients/http/wrapper.go | 16 + tests/integration/acp.go | 257 +++- ...icator_with_doc_actor_relationship_test.go | 219 +++ ...scribe_with_doc_actor_relationship_test.go | 225 +++ ...oc_actor_collection_with_no_policy_test.go | 66 + .../add_doc_actor_invalid_test.go | 545 +++++++ .../add_doc_actor_with_delete_test.go | 505 +++++++ .../add_doc_actor_with_dummy_relation_test.go | 302 ++++ .../add_doc_actor_with_manager_gql_test.go | 604 ++++++++ .../add_doc_actor_with_manager_test.go | 1286 +++++++++++++++++ .../add_doc_actor_with_only_write_gql_test.go | 198 +++ .../add_doc_actor_with_only_write_test.go | 359 +++++ ...add_doc_actor_with_public_document_test.go | 147 ++ .../add_doc_actor_with_reader_gql_test.go | 204 +++ .../add_doc_actor_with_reader_test.go | 810 +++++++++++ .../add_doc_actor_with_update_gql_test.go | 360 +++++ .../add_doc_actor_with_update_test.go | 541 +++++++ tests/integration/utils.go | 3 + 47 files changed, 8010 insertions(+), 102 deletions(-) create mode 100644 cli/acp_relationship.go create mode 100644 cli/acp_relationship_add.go rename client/{policy.go => acp.go} (80%) create mode 100644 docs/website/references/cli/defradb_client_acp_relationship.md create mode 100644 docs/website/references/cli/defradb_client_acp_relationship_add.md create mode 100644 examples/dpi_policy/user_dpi_policy_with_manages.yml create mode 100644 tests/clients/cli/wrapper_acp.go create mode 100644 tests/integration/acp/p2p/replicator_with_doc_actor_relationship_test.go create mode 100644 tests/integration/acp/p2p/subscribe_with_doc_actor_relationship_test.go create mode 100644 tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_collection_with_no_policy_test.go create mode 100644 tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_invalid_test.go create mode 100644 tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_with_delete_test.go create mode 100644 tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_with_dummy_relation_test.go create mode 100644 tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_with_manager_gql_test.go create mode 100644 tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_with_manager_test.go create mode 100644 tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_with_only_write_gql_test.go create mode 100644 tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_with_only_write_test.go create mode 100644 tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_with_public_document_test.go create mode 100644 tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_with_reader_gql_test.go create mode 100644 tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_with_reader_test.go create mode 100644 tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_with_update_gql_test.go create mode 100644 tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_with_update_test.go diff --git a/acp/README.md b/acp/README.md index 54c479b5ea..4c2c73907a 100644 --- a/acp/README.md +++ b/acp/README.md @@ -427,6 +427,209 @@ Error: ### Execute Explain example (coming soon) +### Sharing Private Documents With Others + +To share a document (or grant a more restricted access) with another actor, we must add a relationship between the +actor and the document. Inorder to make the relationship we require all of the following: + +1) **Target DocID**: The `docID` of the document we want to make a relationship for. +2) **Collection Name**: The name of the collection that has the `Target DocID`. +3) **Relation Name**: The type of relation (name must be defined within the linked policy on collection). +4) **Target Identity**: The identity of the actor the relationship is being made with. +5) **Requesting Identity**: The identity of the actor that is making the request. + +Note: + - ACP must be available (i.e. ACP can not be disabled). + - The collection with the target document must have a valid policy and resource linked. + - The target document must be registered with ACP already (private document). + - The requesting identity MUST either be the owner OR the manager (manages the relation) of the resource. + - If the specified relation was not granted the miminum DPI permissions (read or write) within the policy, + and a relationship is formed, the subject/actor will still not be able to access (read or write) the resource. + - If the relationship already exists, then it will just be a no-op. + +Consider the following policy that we have under `examples/dpi_policy/user_dpi_policy_with_manages.yml`: + +```yaml +name: An Example Policy + +description: A Policy + +actor: + name: actor + +resources: + users: + permissions: + read: + expr: owner + reader + writer + + write: + expr: owner + writer + + nothing: + expr: dummy + + relations: + owner: + types: + - actor + + reader: + types: + - actor + + writer: + types: + - actor + + admin: + manages: + - reader + types: + - actor + + dummy: + types: + - actor +``` + +Add the policy: +```sh +defradb client acp policy add -f examples/dpi_policy/user_dpi_policy_with_manages.yml \ +--identity e3b722906ee4e56368f581cd8b18ab0f48af1ea53e635e3f7b8acd076676f6ac +``` + +Result: +```json +{ + "PolicyID": "ec11b7e29a4e195f95787e2ec9b65af134718d16a2c9cd655b5e04562d1cabf9" +} +``` + +Add schema, linking to the users resource and our policyID: +```sh +defradb client schema add ' +type Users @policy( + id: "ec11b7e29a4e195f95787e2ec9b65af134718d16a2c9cd655b5e04562d1cabf9", + resource: "users" +) { + name: String + age: Int +} +' +``` + +Result: +```json +[ + { + "Name": "Users", + "ID": 1, + "RootID": 1, + "SchemaVersionID": "bafkreihhd6bqrjhl5zidwztgxzeseveplv3cj3fwtn3unjkdx7j2vr2vrq", + "Sources": [], + "Fields": [ + { + "Name": "_docID", + "ID": 0, + "Kind": null, + "RelationName": null, + "DefaultValue": null + }, + { + "Name": "age", + "ID": 1, + "Kind": null, + "RelationName": null, + "DefaultValue": null + }, + { + "Name": "name", + "ID": 2, + "Kind": null, + "RelationName": null, + "DefaultValue": null + } + ], + "Indexes": [], + "Policy": { + "ID": "ec11b7e29a4e195f95787e2ec9b65af134718d16a2c9cd655b5e04562d1cabf9", + "ResourceName": "users" + }, + "IsMaterialized": true + } +] +``` + +Create a private document: +```sh +defradb client collection create --name Users '[{ "name": "SecretShahzadLone" }]' \ +--identity e3b722906ee4e56368f581cd8b18ab0f48af1ea53e635e3f7b8acd076676f6ac +``` + +Only the owner can see it: +```sh +defradb client collection docIDs --identity e3b722906ee4e56368f581cd8b18ab0f48af1ea53e635e3f7b8acd076676f6ac +``` + +Result: +```json +{ + "docID": "bae-ff3ceb1c-b5c0-5e86-a024-dd1b16a4261c", + "error": "" +} +``` + +Another actor can not: +```sh +defradb client collection docIDs --identity 4d092126012ebaf56161716018a71630d99443d9d5217e9d8502bb5c5456f2c5 +``` + +**Result is empty from the above command** + + +Now let's make the other actor a reader of the document by adding a relationship: +```sh +defradb client acp relationship add \ +--collection Users \ +--docID bae-ff3ceb1c-b5c0-5e86-a024-dd1b16a4261c \ +--relation reader \ +--actor did:key:z7r8os2G88XXBNBTLj3kFR5rzUJ4VAesbX7PgsA68ak9B5RYcXF5EZEmjRzzinZndPSSwujXb4XKHG6vmKEFG6ZfsfcQn \ +--identity e3b722906ee4e56368f581cd8b18ab0f48af1ea53e635e3f7b8acd076676f6ac +``` + +Result: +```json +{ + "ExistedAlready": false +} +``` + +**Note: If the same relationship is created again the `ExistedAlready` would then be true, indicating no-op** + +Now the other actor can read: +```sh +defradb client collection docIDs --identity 4d092126012ebaf56161716018a71630d99443d9d5217e9d8502bb5c5456f2c5 +``` + +Result: +```json +{ + "docID": "bae-ff3ceb1c-b5c0-5e86-a024-dd1b16a4261c", + "error": "" +} +``` + +But, they still can not perform an update as they were only granted a read permission (through `reader` relation): +```sh +defradb client collection update --name Users --docID "bae-ff3ceb1c-b5c0-5e86-a024-dd1b16a4261c" \ +--identity 4d092126012ebaf56161716018a71630d99443d9d5217e9d8502bb5c5456f2c5 '{ "name": "SecretUpdatedShahzad" }' +``` + +Result: +```sh +Error: document not found or not authorized to access +``` ## DAC Usage HTTP: diff --git a/acp/acp.go b/acp/acp.go index 973181ae91..c7ae5936e6 100644 --- a/acp/acp.go +++ b/acp/acp.go @@ -99,6 +99,22 @@ type ACP interface { docID string, ) (bool, error) + // AddDocActorRelationship creates a relationship between document and the target actor. + // + // If failure occurs, the result will return an error. Upon success the boolean value will + // be true if the relationship already existed (no-op), and false if a new relationship was made. + // + // Note: The request actor must either be the owner or manager of the document. + AddDocActorRelationship( + ctx context.Context, + policyID string, + resourceName string, + docID string, + relation string, + requestActor identity.Identity, + targetActor string, + ) (bool, error) + // SupportsP2P returns true if the implementation supports ACP across a peer network. SupportsP2P() bool } diff --git a/acp/acp_local.go b/acp/acp_local.go index 97e7a67cce..6e85ac9313 100644 --- a/acp/acp_local.go +++ b/acp/acp_local.go @@ -236,3 +236,34 @@ func (l *ACPLocal) VerifyAccessRequest( return resp.Valid, nil } + +func (l *ACPLocal) AddActorRelationship( + ctx context.Context, + policyID string, + resourceName string, + objectID string, + relation string, + requester identity.Identity, + targetActor string, + creationTime *protoTypes.Timestamp, +) (bool, error) { + principal, err := auth.NewDIDPrincipal(requester.DID) + if err != nil { + return false, newErrInvalidActorID(err, requester.DID) + } + + ctx = auth.InjectPrincipal(ctx, principal) + + setRelationshipRequest := types.SetRelationshipRequest{ + PolicyId: policyID, + Relationship: types.NewActorRelationship(resourceName, objectID, relation, targetActor), + CreationTime: creationTime, + } + + setRelationshipResponse, err := l.engine.SetRelationship(ctx, &setRelationshipRequest) + if err != nil { + return false, err + } + + return setRelationshipResponse.RecordExisted, nil +} diff --git a/acp/acp_local_test.go b/acp/acp_local_test.go index 9dbf0b36e8..7b30b44cbb 100644 --- a/acp/acp_local_test.go +++ b/acp/acp_local_test.go @@ -663,6 +663,197 @@ func Test_LocalACP_PersistentMemory_CheckDocAccess_TrueIfHaveAccessFalseIfNotErr require.Nil(t, errClose) } +func Test_LocalACP_InMemory_AddDocActorRelationship_FalseIfExistsBeforeTrueIfNoOp(t *testing.T) { + ctx := context.Background() + localACP := NewLocalACP() + + localACP.Init(ctx, "") + errStart := localACP.Start(ctx) + require.Nil(t, errStart) + + policyID, errAddPolicy := localACP.AddPolicy( + ctx, + identity1, + validPolicy, + ) + require.Nil(t, errAddPolicy) + require.Equal( + t, + validPolicyID, + policyID, + ) + + // Register a document. + errRegisterDoc := localACP.RegisterDocObject( + ctx, + identity1, + validPolicyID, + "users", + "documentID_XYZ", + ) + require.Nil(t, errRegisterDoc) + + // Other identity does not have access yet. + hasAccess, errCheckDocAccess := localACP.CheckDocAccess( + ctx, + ReadPermission, + identity2.DID, + validPolicyID, + "users", + "documentID_XYZ", + ) + require.Nil(t, errCheckDocAccess) + require.False(t, hasAccess) + + // Grant other identity access. + exists, errAddDocActorRelationship := localACP.AddDocActorRelationship( + ctx, + validPolicyID, + "users", + "documentID_XYZ", + "reader", + identity1, + identity2.DID, + ) + require.Nil(t, errAddDocActorRelationship) + require.False(t, exists) + + // Granting again will be no-op + exists, errAddDocActorRelationship = localACP.AddDocActorRelationship( + ctx, + validPolicyID, + "users", + "documentID_XYZ", + "reader", + identity1, + identity2.DID, + ) + require.Nil(t, errAddDocActorRelationship) + require.True(t, exists) // Exists already this time + + // Now the other identity has access. + hasAccess, errCheckDocAccess = localACP.CheckDocAccess( + ctx, + ReadPermission, + identity2.DID, + validPolicyID, + "users", + "documentID_XYZ", + ) + require.Nil(t, errCheckDocAccess) + require.True(t, hasAccess) + + errClose := localACP.Close() + require.Nil(t, errClose) +} + +func Test_LocalACP_PersistentMemory_AddDocActorRelationship_FalseIfExistsBeforeTrueIfNoOp(t *testing.T) { + acpPath := t.TempDir() + require.NotEqual(t, "", acpPath) + + ctx := context.Background() + localACP := NewLocalACP() + + localACP.Init(ctx, acpPath) + errStart := localACP.Start(ctx) + require.Nil(t, errStart) + + policyID, errAddPolicy := localACP.AddPolicy( + ctx, + identity1, + validPolicy, + ) + require.Nil(t, errAddPolicy) + require.Equal( + t, + validPolicyID, + policyID, + ) + + // Register a document. + errRegisterDoc := localACP.RegisterDocObject( + ctx, + identity1, + validPolicyID, + "users", + "documentID_XYZ", + ) + require.Nil(t, errRegisterDoc) + + // Other identity does not have access yet. + hasAccess, errCheckDocAccess := localACP.CheckDocAccess( + ctx, + ReadPermission, + identity2.DID, + validPolicyID, + "users", + "documentID_XYZ", + ) + require.Nil(t, errCheckDocAccess) + require.False(t, hasAccess) + + // Grant other identity access. + exists, errAddDocActorRelationship := localACP.AddDocActorRelationship( + ctx, + validPolicyID, + "users", + "documentID_XYZ", + "reader", + identity1, + identity2.DID, + ) + require.Nil(t, errAddDocActorRelationship) + require.False(t, exists) + + // Granting again will be no-op + exists, errAddDocActorRelationship = localACP.AddDocActorRelationship( + ctx, + validPolicyID, + "users", + "documentID_XYZ", + "reader", + identity1, + identity2.DID, + ) + require.Nil(t, errAddDocActorRelationship) + require.True(t, exists) // Exists already this time + + // Now the other identity has access. + hasAccess, errCheckDocAccess = localACP.CheckDocAccess( + ctx, + ReadPermission, + identity2.DID, + validPolicyID, + "users", + "documentID_XYZ", + ) + require.Nil(t, errCheckDocAccess) + require.True(t, hasAccess) + + // Should continue having their correct behaviour and access even after a restart. + errClose := localACP.Close() + require.Nil(t, errClose) + + localACP.Init(ctx, acpPath) + errStart = localACP.Start(ctx) + require.Nil(t, errStart) + + // Now check again after the restart that the second identity still has access. + hasAccess, errCheckDocAccess = localACP.CheckDocAccess( + ctx, + ReadPermission, + identity2.DID, + validPolicyID, + "users", + "documentID_XYZ", + ) + require.Nil(t, errCheckDocAccess) + require.True(t, hasAccess) + + errClose = localACP.Close() + require.Nil(t, errClose) +} + func Test_LocalACP_InMemory_AddPolicy_InvalidCreatorIDReturnsError(t *testing.T) { ctx := context.Background() localACP := NewLocalACP() @@ -684,6 +875,30 @@ func Test_LocalACP_InMemory_AddPolicy_InvalidCreatorIDReturnsError(t *testing.T) require.NoError(t, err) } +func Test_LocalACP_Persistent_AddPolicy_InvalidCreatorIDReturnsError(t *testing.T) { + acpPath := t.TempDir() + require.NotEqual(t, "", acpPath) + + ctx := context.Background() + localACP := NewLocalACP() + + localACP.Init(ctx, acpPath) + err := localACP.Start(ctx) + require.Nil(t, err) + + policyID, err := localACP.AddPolicy( + ctx, + invalidIdentity, + validPolicy, + ) + + require.ErrorIs(t, err, ErrInvalidActorID) + require.Empty(t, policyID) + + err = localACP.Close() + require.NoError(t, err) +} + func Test_LocalACP_InMemory_RegisterObject_InvalidCreatorIDReturnsError(t *testing.T) { ctx := context.Background() localACP := NewLocalACP() @@ -706,7 +921,7 @@ func Test_LocalACP_InMemory_RegisterObject_InvalidCreatorIDReturnsError(t *testi require.NoError(t, err) } -func Test_LocalACP_Persistent_AddPolicy_InvalidCreatorIDReturnsError(t *testing.T) { +func Test_LocalACP_Persistent_RegisterObject_InvalidCreatorIDReturnsError(t *testing.T) { acpPath := t.TempDir() require.NotEqual(t, "", acpPath) @@ -717,20 +932,59 @@ func Test_LocalACP_Persistent_AddPolicy_InvalidCreatorIDReturnsError(t *testing. err := localACP.Start(ctx) require.Nil(t, err) - policyID, err := localACP.AddPolicy( + err = localACP.RegisterDocObject( ctx, invalidIdentity, - validPolicy, + validPolicyID, + "users", + "documentID_XYZ", ) require.ErrorIs(t, err, ErrInvalidActorID) - require.Empty(t, policyID) err = localACP.Close() require.NoError(t, err) } -func Test_LocalACP_Persistent_RegisterObject_InvalidCreatorIDReturnsError(t *testing.T) { +func Test_LocalACP_InMemory_AddDocActorRelationship_InvalidIdentitiesReturnError(t *testing.T) { + ctx := context.Background() + localACP := NewLocalACP() + + localACP.Init(ctx, "") + err := localACP.Start(ctx) + require.Nil(t, err) + + // Invalid requesting identity. + exists, err := localACP.AddDocActorRelationship( + ctx, + validPolicyID, + "users", + "documentID_XYZ", + "reader", + invalidIdentity, + identity2.DID, + ) + require.False(t, exists) + require.ErrorIs(t, err, ErrInvalidActorID) + + // Invalid target actor. + exists, err = localACP.AddDocActorRelationship( + ctx, + validPolicyID, + "users", + "documentID_XYZ", + "reader", + identity1, + invalidIdentity.DID, + ) + require.False(t, exists) + require.ErrorIs(t, err, ErrFailedToAddDocActorRelationshipWithACP) + + err = localACP.Close() + require.NoError(t, err) +} + +func Test_LocalACP_Persistent_AddDocActorRelationship_InvalidIdentitiesReturnError(t *testing.T) { acpPath := t.TempDir() require.NotEqual(t, "", acpPath) @@ -741,16 +995,32 @@ func Test_LocalACP_Persistent_RegisterObject_InvalidCreatorIDReturnsError(t *tes err := localACP.Start(ctx) require.Nil(t, err) - err = localACP.RegisterDocObject( + // Invalid requesting identity. + exists, err := localACP.AddDocActorRelationship( ctx, - invalidIdentity, validPolicyID, "users", "documentID_XYZ", + "reader", + invalidIdentity, + identity2.DID, ) - + require.False(t, exists) require.ErrorIs(t, err, ErrInvalidActorID) + // Invalid target actor. + exists, err = localACP.AddDocActorRelationship( + ctx, + validPolicyID, + "users", + "documentID_XYZ", + "reader", + identity1, + invalidIdentity.DID, + ) + require.False(t, exists) + require.ErrorIs(t, err, ErrFailedToAddDocActorRelationshipWithACP) + err = localACP.Close() require.NoError(t, err) } diff --git a/acp/acp_source_hub.go b/acp/acp_source_hub.go index 4dfb26c090..d0c4fb6b89 100644 --- a/acp/acp_source_hub.go +++ b/acp/acp_source_hub.go @@ -261,3 +261,53 @@ func (a *acpSourceHub) VerifyAccessRequest( func (a *acpSourceHub) Close() error { return nil } + +func (a *acpSourceHub) AddActorRelationship( + ctx context.Context, + policyID string, + resourceName string, + objectID string, + relation string, + requester identity.Identity, + targetActor string, + creationTime *protoTypes.Timestamp, +) (bool, error) { + msgSet := sourcehub.MsgSet{} + cmdMapper := msgSet.WithBearerPolicyCmd(&acptypes.MsgBearerPolicyCmd{ + Creator: a.signer.GetAccAddress(), + BearerToken: requester.BearerToken, + PolicyId: policyID, + Cmd: acptypes.NewSetRelationshipCmd( + acptypes.NewActorRelationship( + resourceName, + objectID, + relation, + targetActor, + ), + ), + CreationTime: creationTime, + }) + tx, err := a.txBuilder.Build(ctx, a.signer, &msgSet) + if err != nil { + return false, err + } + resp, err := a.client.BroadcastTx(ctx, tx) + if err != nil { + return false, err + } + + result, err := a.client.AwaitTx(ctx, resp.TxHash) + if err != nil { + return false, err + } + if result.Error() != nil { + return false, result.Error() + } + + cmdResult, err := cmdMapper.Map(result.TxPayload()) + if err != nil { + return false, err + } + + return cmdResult.GetResult().GetSetRelationshipResult().RecordExisted, nil +} diff --git a/acp/errors.go b/acp/errors.go index 5ff4eee302..e0717f15dd 100644 --- a/acp/errors.go +++ b/acp/errors.go @@ -15,12 +15,14 @@ import ( ) const ( - errInitializationOfACPFailed = "initialization of acp failed" - errStartingACPInEmptyPath = "starting acp in an empty path" - errFailedToAddPolicyWithACP = "failed to add policy with acp" - errFailedToRegisterDocWithACP = "failed to register document with acp" - errFailedToCheckIfDocIsRegisteredWithACP = "failed to check if doc is registered with acp" - errFailedToVerifyDocAccessWithACP = "failed to verify doc access with acp" + errInitializationOfACPFailed = "initialization of acp failed" + errStartingACPInEmptyPath = "starting acp in an empty path" + errFailedToAddPolicyWithACP = "failed to add policy with acp" + errFailedToRegisterDocWithACP = "failed to register document with acp" + errFailedToCheckIfDocIsRegisteredWithACP = "failed to check if doc is registered with acp" + errFailedToVerifyDocAccessWithACP = "failed to verify doc access with acp" + errFailedToAddDocActorRelationshipWithACP = "failed to add document actor relationship with acp" + errMissingRequiredArgToAddDocActorRelationship = "missing a required argument needed to add doc actor relationship" errObjectDidNotRegister = "no-op while registering object (already exists or error) with acp" errNoPolicyArgs = "missing policy arguments, must have both id and resource" @@ -40,12 +42,13 @@ const ( ) var ( - ErrInitializationOfACPFailed = errors.New(errInitializationOfACPFailed) - ErrFailedToAddPolicyWithACP = errors.New(errFailedToAddPolicyWithACP) - ErrFailedToRegisterDocWithACP = errors.New(errFailedToRegisterDocWithACP) - ErrFailedToCheckIfDocIsRegisteredWithACP = errors.New(errFailedToCheckIfDocIsRegisteredWithACP) - ErrFailedToVerifyDocAccessWithACP = errors.New(errFailedToVerifyDocAccessWithACP) - ErrPolicyDoesNotExistWithACP = errors.New(errPolicyDoesNotExistWithACP) + ErrInitializationOfACPFailed = errors.New(errInitializationOfACPFailed) + ErrFailedToAddPolicyWithACP = errors.New(errFailedToAddPolicyWithACP) + ErrFailedToRegisterDocWithACP = errors.New(errFailedToRegisterDocWithACP) + ErrFailedToCheckIfDocIsRegisteredWithACP = errors.New(errFailedToCheckIfDocIsRegisteredWithACP) + ErrFailedToVerifyDocAccessWithACP = errors.New(errFailedToVerifyDocAccessWithACP) + ErrFailedToAddDocActorRelationshipWithACP = errors.New(errFailedToAddDocActorRelationshipWithACP) + ErrPolicyDoesNotExistWithACP = errors.New(errPolicyDoesNotExistWithACP) ErrResourceDoesNotExistOnTargetPolicy = errors.New(errResourceDoesNotExistOnTargetPolicy) @@ -139,6 +142,29 @@ func NewErrFailedToVerifyDocAccessWithACP( ) } +func NewErrFailedToAddDocActorRelationshipWithACP( + inner error, + Type string, + policyID string, + resourceName string, + docID string, + relation string, + requestActor string, + targetActor string, +) error { + return errors.Wrap( + errFailedToAddDocActorRelationshipWithACP, + inner, + errors.NewKV("Type", Type), + errors.NewKV("PolicyID", policyID), + errors.NewKV("ResourceName", resourceName), + errors.NewKV("DocID", docID), + errors.NewKV("Relation", relation), + errors.NewKV("RequestActor", requestActor), + errors.NewKV("TargetActor", targetActor), + ) +} + func newErrPolicyDoesNotExistWithACP( inner error, policyID string, @@ -209,6 +235,25 @@ func newErrExprOfRequiredPermissionHasInvalidChar( ) } +func NewErrMissingRequiredArgToAddDocActorRelationship( + policyID string, + resourceName string, + docID string, + relation string, + requestActor string, + targetActor string, +) error { + return errors.New( + errMissingRequiredArgToAddDocActorRelationship, + errors.NewKV("PolicyID", policyID), + errors.NewKV("ResourceName", resourceName), + errors.NewKV("DocID", docID), + errors.NewKV("Relation", relation), + errors.NewKV("RequestActor", requestActor), + errors.NewKV("TargetActor", targetActor), + ) +} + func newErrInvalidActorID( inner error, id string, diff --git a/acp/source_hub_client.go b/acp/source_hub_client.go index 0bf344afb8..0bfbae72b1 100644 --- a/acp/source_hub_client.go +++ b/acp/source_hub_client.go @@ -85,6 +85,27 @@ type sourceHubClient interface { docID string, ) (bool, error) + // AddActorRelationship creates a relationship within a policy which ties the target actor + // with the specified object, which means that the set of high level rules defined in the + // policy will now apply to target actor as well. + // + // If failure occurs, the result will return an error. Upon success the boolean value will + // be true if the relationship with actor already existed (no-op), and false if a new + // relationship was made. + // + // Note: The requester identity must either be the owner of the object (being shared) or + // the manager (i.e. the relation has `manages` defined in the policy). + AddActorRelationship( + ctx context.Context, + policyID string, + resourceName string, + objectID string, + relation string, + requester identity.Identity, + targetActor string, + creationTime *protoTypes.Timestamp, + ) (bool, error) + // Close closes any resources in use by acp. Close() error } @@ -335,6 +356,70 @@ func (a *sourceHubBridge) CheckDocAccess( } } +func (a *sourceHubBridge) AddDocActorRelationship( + ctx context.Context, + policyID string, + resourceName string, + docID string, + relation string, + requestActor identity.Identity, + targetActor string, +) (bool, error) { + if policyID == "" || + resourceName == "" || + docID == "" || + relation == "" || + requestActor == (identity.Identity{}) || + targetActor == "" { + return false, NewErrMissingRequiredArgToAddDocActorRelationship( + policyID, + resourceName, + docID, + relation, + requestActor.DID, + targetActor, + ) + } + + exists, err := a.client.AddActorRelationship( + ctx, + policyID, + resourceName, + docID, + relation, + requestActor, + targetActor, + protoTypes.TimestampNow(), + ) + + if err != nil { + return false, NewErrFailedToAddDocActorRelationshipWithACP( + err, + "Local", + policyID, + resourceName, + docID, + relation, + requestActor.DID, + targetActor, + ) + } + + log.InfoContext( + ctx, + "Document and actor relationship set", + corelog.Any("PolicyID", policyID), + corelog.Any("ResourceName", resourceName), + corelog.Any("DocID", docID), + corelog.Any("Relation", relation), + corelog.Any("RequestActor", requestActor.DID), + corelog.Any("TargetActor", targetActor), + corelog.Any("Existed", exists), + ) + + return exists, nil +} + func (a *sourceHubBridge) SupportsP2P() bool { _, ok := a.client.(*acpSourceHub) return ok diff --git a/cli/acp_relationship.go b/cli/acp_relationship.go new file mode 100644 index 0000000000..a2a5f3cb64 --- /dev/null +++ b/cli/acp_relationship.go @@ -0,0 +1,25 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package cli + +import ( + "github.com/spf13/cobra" +) + +func MakeACPRelationshipCommand() *cobra.Command { + var cmd = &cobra.Command{ + Use: "relationship", + Short: "Interact with the acp relationship features of DefraDB instance", + Long: `Interact with the acp relationship features of DefraDB instance`, + } + + return cmd +} diff --git a/cli/acp_relationship_add.go b/cli/acp_relationship_add.go new file mode 100644 index 0000000000..9733732af8 --- /dev/null +++ b/cli/acp_relationship_add.go @@ -0,0 +1,130 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package cli + +import ( + "github.com/spf13/cobra" +) + +func MakeACPRelationshipAddCommand() *cobra.Command { + const ( + collectionFlagLong string = "collection" + collectionFlagShort string = "c" + + relationFlagLong string = "relation" + relationFlagShort string = "r" + + targetActorFlagLong string = "actor" + targetActorFlagShort string = "a" + + docIDFlag string = "docID" + ) + + var ( + collectionArg string + relationArg string + targetActorArg string + docIDArg string + ) + + var cmd = &cobra.Command{ + Use: "add [--docID] [-c --collection] [-r --relation] [-a --actor] [-i --identity]", + Short: "Add new relationship", + Long: `Add new relationship + +To share a document (or grant a more restricted access) with another actor, we must add a relationship between the +actor and the document. Inorder to make the relationship we require all of the following: +1) Target DocID: The docID of the document we want to make a relationship for. +2) Collection Name: The name of the collection that has the Target DocID. +3) Relation Name: The type of relation (name must be defined within the linked policy on collection). +4) Target Identity: The identity of the actor the relationship is being made with. +5) Requesting Identity: The identity of the actor that is making the request. + +Notes: + - ACP must be available (i.e. ACP can not be disabled). + - The target document must be registered with ACP already (policy & resource specified). + - The requesting identity MUST either be the owner OR the manager (manages the relation) of the resource. + - If the specified relation was not granted the miminum DPI permissions (read or write) within the policy, + and a relationship is formed, the subject/actor will still not be able to access (read or write) the resource. + - Learn more about [ACP & DPI Rules](/acp/README.md) + +Example: Let another actor (4d092126012ebaf56161716018a71630d99443d9d5217e9d8502bb5c5456f2c5) read a private document: + defradb client acp relationship add \ + --collection Users \ + --docID bae-ff3ceb1c-b5c0-5e86-a024-dd1b16a4261c \ + --relation reader \ + --actor did:key:z7r8os2G88XXBNBTLj3kFR5rzUJ4VAesbX7PgsA68ak9B5RYcXF5EZEmjRzzinZndPSSwujXb4XKHG6vmKEFG6ZfsfcQn \ + --identity e3b722906ee4e56368f581cd8b18ab0f48af1ea53e635e3f7b8acd076676f6ac + +Example: Creating a dummy relationship does nothing (from database prespective): + defradb client acp relationship add \ + -c Users \ + --docID bae-ff3ceb1c-b5c0-5e86-a024-dd1b16a4261c \ + -r dummy \ + -a did:key:z7r8os2G88XXBNBTLj3kFR5rzUJ4VAesbX7PgsA68ak9B5RYcXF5EZEmjRzzinZndPSSwujXb4XKHG6vmKEFG6ZfsfcQn \ + -i e3b722906ee4e56368f581cd8b18ab0f48af1ea53e635e3f7b8acd076676f6ac +`, + RunE: func(cmd *cobra.Command, args []string) error { + db := mustGetContextDB(cmd) + exists, err := db.AddDocActorRelationship( + cmd.Context(), + collectionArg, + docIDArg, + relationArg, + targetActorArg, + ) + + if err != nil { + return err + } + + return writeJSON(cmd, exists) + }, + } + + cmd.Flags().StringVarP( + &collectionArg, + collectionFlagLong, + collectionFlagShort, + "", + "Collection that has the resource and policy for object", + ) + _ = cmd.MarkFlagRequired(collectionFlagLong) + + cmd.Flags().StringVarP( + &relationArg, + relationFlagLong, + relationFlagShort, + "", + "Relation that needs to be set for the relationship", + ) + _ = cmd.MarkFlagRequired(relationFlagLong) + + cmd.Flags().StringVarP( + &targetActorArg, + targetActorFlagLong, + targetActorFlagShort, + "", + "Actor to add relationship with", + ) + _ = cmd.MarkFlagRequired(targetActorFlagLong) + + cmd.Flags().StringVarP( + &docIDArg, + docIDFlag, + "", + "", + "Document Identifier (ObjectID) to make relationship for", + ) + _ = cmd.MarkFlagRequired(docIDFlag) + + return cmd +} diff --git a/cli/cli.go b/cli/cli.go index 4453cbaafb..61d1fd51cf 100644 --- a/cli/cli.go +++ b/cli/cli.go @@ -62,14 +62,20 @@ func NewDefraCommand() *cobra.Command { schema_migrate, ) - policy := MakeACPPolicyCommand() - policy.AddCommand( + acp_policy := MakeACPPolicyCommand() + acp_policy.AddCommand( MakeACPPolicyAddCommand(), ) + acp_relationship := MakeACPRelationshipCommand() + acp_relationship.AddCommand( + MakeACPRelationshipAddCommand(), + ) + acp := MakeACPCommand() acp.AddCommand( - policy, + acp_policy, + acp_relationship, ) view := MakeViewCommand() diff --git a/client/policy.go b/client/acp.go similarity index 80% rename from client/policy.go rename to client/acp.go index 5b877696c2..7795369c8f 100644 --- a/client/policy.go +++ b/client/acp.go @@ -29,3 +29,10 @@ type AddPolicyResult struct { // upon successful creation of a policy. PolicyID string } + +// AddDocActorRelationshipResult wraps the result of making a document-actor relationship. +type AddDocActorRelationshipResult struct { + // ExistedAlready is true if the relationship existed already (no-op), and + // it is false if a new relationship was created. + ExistedAlready bool +} diff --git a/client/db.go b/client/db.go index b8f5e91e35..e28d21df02 100644 --- a/client/db.go +++ b/client/db.go @@ -106,6 +106,20 @@ type DB interface { // // Note: A policy can not be added without the creatorID (identity). AddPolicy(ctx context.Context, policy string) (AddPolicyResult, error) + + // AddDocActorRelationship creates a relationship between document and the target actor. + // + // If failure occurs, the result will return an error. Upon success the boolean value will + // be true if the relationship already existed (no-op), and false if a new relationship was made. + // + // Note: The request actor must either be the owner or manager of the document. + AddDocActorRelationship( + ctx context.Context, + collectionName string, + docID string, + relation string, + targetActor string, + ) (AddDocActorRelationshipResult, error) } // Store contains the core DefraDB read-write operations. diff --git a/client/errors.go b/client/errors.go index 866ad98ec4..ceb526b35e 100644 --- a/client/errors.go +++ b/client/errors.go @@ -41,25 +41,26 @@ const ( // This list is incomplete and undefined errors may also be returned. // Errors returned from this package may be tested against these errors with errors.Is. var ( - ErrFieldNotExist = errors.New(errFieldNotExist) - ErrUnexpectedType = errors.New(errUnexpectedType) - ErrFailedToUnmarshalCollection = errors.New(errFailedToUnmarshalCollection) - ErrOperationNotPermittedOnNamelessCols = errors.New(errOperationNotPermittedOnNamelessCols) - ErrFieldNotObject = errors.New("trying to access field on a non object type") - ErrValueTypeMismatch = errors.New("value does not match indicated type") - ErrDocumentNotFoundOrNotAuthorized = errors.New("document not found or not authorized to access") - ErrPolicyAddFailureNoACP = errors.New("failure adding policy because ACP was not available") - ErrInvalidUpdateTarget = errors.New("the target document to update is of invalid type") - ErrInvalidUpdater = errors.New("the updater of a document is of invalid type") - ErrInvalidDeleteTarget = errors.New("the target document to delete is of invalid type") - ErrMalformedDocID = errors.New("malformed document ID, missing either version or cid") - ErrInvalidDocIDVersion = errors.New("invalid document ID version") - ErrInvalidJSONPayload = errors.New(errInvalidJSONPayload) - ErrCanNotNormalizeValue = errors.New(errCanNotNormalizeValue) - ErrCanNotTurnNormalValueIntoArray = errors.New(errCanNotTurnNormalValueIntoArray) - ErrCanNotMakeNormalNilFromFieldKind = errors.New(errCanNotMakeNormalNilFromFieldKind) - ErrCollectionNotFound = errors.New(errCollectionNotFound) - ErrFailedToParseKind = errors.New(errFailedToParseKind) + ErrFieldNotExist = errors.New(errFieldNotExist) + ErrUnexpectedType = errors.New(errUnexpectedType) + ErrFailedToUnmarshalCollection = errors.New(errFailedToUnmarshalCollection) + ErrOperationNotPermittedOnNamelessCols = errors.New(errOperationNotPermittedOnNamelessCols) + ErrFieldNotObject = errors.New("trying to access field on a non object type") + ErrValueTypeMismatch = errors.New("value does not match indicated type") + ErrDocumentNotFoundOrNotAuthorized = errors.New("document not found or not authorized to access") + ErrACPOperationButACPNotAvailable = errors.New("operation requires ACP, but ACP not available") + ErrACPOperationButCollectionHasNoPolicy = errors.New("operation requires ACP, but collection has no policy") + ErrInvalidUpdateTarget = errors.New("the target document to update is of invalid type") + ErrInvalidUpdater = errors.New("the updater of a document is of invalid type") + ErrInvalidDeleteTarget = errors.New("the target document to delete is of invalid type") + ErrMalformedDocID = errors.New("malformed document ID, missing either version or cid") + ErrInvalidDocIDVersion = errors.New("invalid document ID version") + ErrInvalidJSONPayload = errors.New(errInvalidJSONPayload) + ErrCanNotNormalizeValue = errors.New(errCanNotNormalizeValue) + ErrCanNotTurnNormalValueIntoArray = errors.New(errCanNotTurnNormalValueIntoArray) + ErrCanNotMakeNormalNilFromFieldKind = errors.New(errCanNotMakeNormalNilFromFieldKind) + ErrCollectionNotFound = errors.New(errCollectionNotFound) + ErrFailedToParseKind = errors.New(errFailedToParseKind) ) // NewErrFieldNotExist returns an error indicating that the given field does not exist. diff --git a/client/mocks/db.go b/client/mocks/db.go index 8923e63d78..1297870e15 100644 --- a/client/mocks/db.go +++ b/client/mocks/db.go @@ -35,6 +35,66 @@ func (_m *DB) EXPECT() *DB_Expecter { return &DB_Expecter{mock: &_m.Mock} } +// AddDocActorRelationship provides a mock function with given fields: ctx, collectionName, docID, relation, targetActor +func (_m *DB) AddDocActorRelationship(ctx context.Context, collectionName string, docID string, relation string, targetActor string) (client.AddDocActorRelationshipResult, error) { + ret := _m.Called(ctx, collectionName, docID, relation, targetActor) + + if len(ret) == 0 { + panic("no return value specified for AddDocActorRelationship") + } + + var r0 client.AddDocActorRelationshipResult + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, string, string, string, string) (client.AddDocActorRelationshipResult, error)); ok { + return rf(ctx, collectionName, docID, relation, targetActor) + } + if rf, ok := ret.Get(0).(func(context.Context, string, string, string, string) client.AddDocActorRelationshipResult); ok { + r0 = rf(ctx, collectionName, docID, relation, targetActor) + } else { + r0 = ret.Get(0).(client.AddDocActorRelationshipResult) + } + + if rf, ok := ret.Get(1).(func(context.Context, string, string, string, string) error); ok { + r1 = rf(ctx, collectionName, docID, relation, targetActor) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// DB_AddDocActorRelationship_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'AddDocActorRelationship' +type DB_AddDocActorRelationship_Call struct { + *mock.Call +} + +// AddDocActorRelationship is a helper method to define mock.On call +// - ctx context.Context +// - collectionName string +// - docID string +// - relation string +// - targetActor string +func (_e *DB_Expecter) AddDocActorRelationship(ctx interface{}, collectionName interface{}, docID interface{}, relation interface{}, targetActor interface{}) *DB_AddDocActorRelationship_Call { + return &DB_AddDocActorRelationship_Call{Call: _e.mock.On("AddDocActorRelationship", ctx, collectionName, docID, relation, targetActor)} +} + +func (_c *DB_AddDocActorRelationship_Call) Run(run func(ctx context.Context, collectionName string, docID string, relation string, targetActor string)) *DB_AddDocActorRelationship_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(string), args[2].(string), args[3].(string), args[4].(string)) + }) + return _c +} + +func (_c *DB_AddDocActorRelationship_Call) Return(_a0 client.AddDocActorRelationshipResult, _a1 error) *DB_AddDocActorRelationship_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *DB_AddDocActorRelationship_Call) RunAndReturn(run func(context.Context, string, string, string, string) (client.AddDocActorRelationshipResult, error)) *DB_AddDocActorRelationship_Call { + _c.Call.Return(run) + return _c +} + // AddP2PCollections provides a mock function with given fields: ctx, collectionIDs func (_m *DB) AddP2PCollections(ctx context.Context, collectionIDs []string) error { ret := _m.Called(ctx, collectionIDs) diff --git a/docs/website/references/cli/defradb_client_acp.md b/docs/website/references/cli/defradb_client_acp.md index 5a9c9aef80..d2ffce5036 100644 --- a/docs/website/references/cli/defradb_client_acp.md +++ b/docs/website/references/cli/defradb_client_acp.md @@ -42,4 +42,5 @@ Learn more about [ACP](/acp/README.md) * [defradb client](defradb_client.md) - Interact with a DefraDB node * [defradb client acp policy](defradb_client_acp_policy.md) - Interact with the acp policy features of DefraDB instance +* [defradb client acp relationship](defradb_client_acp_relationship.md) - Interact with the acp relationship features of DefraDB instance diff --git a/docs/website/references/cli/defradb_client_acp_relationship.md b/docs/website/references/cli/defradb_client_acp_relationship.md new file mode 100644 index 0000000000..4c204d0ccd --- /dev/null +++ b/docs/website/references/cli/defradb_client_acp_relationship.md @@ -0,0 +1,41 @@ +## defradb client acp relationship + +Interact with the acp relationship features of DefraDB instance + +### Synopsis + +Interact with the acp relationship features of DefraDB instance + +### Options + +``` + -h, --help help for relationship +``` + +### Options inherited from parent commands + +``` + -i, --identity string Hex formatted private key used to authenticate with ACP + --keyring-backend string Keyring backend to use. Options are file or system (default "file") + --keyring-namespace string Service name to use when using the system backend (default "defradb") + --keyring-path string Path to store encrypted keys when using the file backend (default "keys") + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs + --no-keyring Disable the keyring and generate ephemeral keys + --no-log-color Disable colored log output + --rootdir string Directory for persistent data (default: $HOME/.defradb) + --secret-file string Path to the file containing secrets (default ".env") + --source-hub-address string The SourceHub address authorized by the client to make SourceHub transactions on behalf of the actor + --tx uint Transaction ID + --url string URL of HTTP endpoint to listen on or connect to (default "127.0.0.1:9181") +``` + +### SEE ALSO + +* [defradb client acp](defradb_client_acp.md) - Interact with the access control system of a DefraDB node +* [defradb client acp relationship add](defradb_client_acp_relationship_add.md) - Add new relationship + diff --git a/docs/website/references/cli/defradb_client_acp_relationship_add.md b/docs/website/references/cli/defradb_client_acp_relationship_add.md new file mode 100644 index 0000000000..ba5647c163 --- /dev/null +++ b/docs/website/references/cli/defradb_client_acp_relationship_add.md @@ -0,0 +1,81 @@ +## defradb client acp relationship add + +Add new relationship + +### Synopsis + +Add new relationship + +To share a document (or grant a more restricted access) with another actor, we must add a relationship between the +actor and the document. Inorder to make the relationship we require all of the following: +1) Target DocID: The docID of the document we want to make a relationship for. +2) Collection Name: The name of the collection that has the Target DocID. +3) Relation Name: The type of relation (name must be defined within the linked policy on collection). +4) Target Identity: The identity of the actor the relationship is being made with. +5) Requesting Identity: The identity of the actor that is making the request. + +Notes: + - ACP must be available (i.e. ACP can not be disabled). + - The target document must be registered with ACP already (policy & resource specified). + - The requesting identity MUST either be the owner OR the manager (manages the relation) of the resource. + - If the specified relation was not granted the miminum DPI permissions (read or write) within the policy, + and a relationship is formed, the subject/actor will still not be able to access (read or write) the resource. + - Learn more about [ACP & DPI Rules](/acp/README.md) + +Example: Let another actor (4d092126012ebaf56161716018a71630d99443d9d5217e9d8502bb5c5456f2c5) read a private document: + defradb client acp relationship add \ + --collection Users \ + --docID bae-ff3ceb1c-b5c0-5e86-a024-dd1b16a4261c \ + --relation reader \ + --actor did:key:z7r8os2G88XXBNBTLj3kFR5rzUJ4VAesbX7PgsA68ak9B5RYcXF5EZEmjRzzinZndPSSwujXb4XKHG6vmKEFG6ZfsfcQn \ + --identity e3b722906ee4e56368f581cd8b18ab0f48af1ea53e635e3f7b8acd076676f6ac + +Example: Creating a dummy relationship does nothing (from database prespective): + defradb client acp relationship add \ + -c Users \ + --docID bae-ff3ceb1c-b5c0-5e86-a024-dd1b16a4261c \ + -r dummy \ + -a did:key:z7r8os2G88XXBNBTLj3kFR5rzUJ4VAesbX7PgsA68ak9B5RYcXF5EZEmjRzzinZndPSSwujXb4XKHG6vmKEFG6ZfsfcQn \ + -i e3b722906ee4e56368f581cd8b18ab0f48af1ea53e635e3f7b8acd076676f6ac + + +``` +defradb client acp relationship add [--docID] [-c --collection] [-r --relation] [-a --actor] [-i --identity] [flags] +``` + +### Options + +``` + -a, --actor string Actor to add relationship with + -c, --collection string Collection that has the resource and policy for object + --docID string Document Identifier (ObjectID) to make relationship for + -h, --help help for add + -r, --relation string Relation that needs to be set for the relationship +``` + +### Options inherited from parent commands + +``` + -i, --identity string Hex formatted private key used to authenticate with ACP + --keyring-backend string Keyring backend to use. Options are file or system (default "file") + --keyring-namespace string Service name to use when using the system backend (default "defradb") + --keyring-path string Path to store encrypted keys when using the file backend (default "keys") + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs + --no-keyring Disable the keyring and generate ephemeral keys + --no-log-color Disable colored log output + --rootdir string Directory for persistent data (default: $HOME/.defradb) + --secret-file string Path to the file containing secrets (default ".env") + --source-hub-address string The SourceHub address authorized by the client to make SourceHub transactions on behalf of the actor + --tx uint Transaction ID + --url string URL of HTTP endpoint to listen on or connect to (default "127.0.0.1:9181") +``` + +### SEE ALSO + +* [defradb client acp relationship](defradb_client_acp_relationship.md) - Interact with the acp relationship features of DefraDB instance + diff --git a/docs/website/references/http/openapi.json b/docs/website/references/http/openapi.json index 6b7686c7c1..c0a7898364 100644 --- a/docs/website/references/http/openapi.json +++ b/docs/website/references/http/openapi.json @@ -588,6 +588,36 @@ ] } }, + "/acp/relationship": { + "post": { + "description": "Add an actor relationship using acp system", + "operationId": "add relationship", + "requestBody": { + "content": { + "text/plain": { + "schema": { + "type": "string" + } + } + }, + "required": true + }, + "responses": { + "200": { + "$ref": "#/components/responses/success" + }, + "400": { + "$ref": "#/components/responses/error" + }, + "default": { + "description": "" + } + }, + "tags": [ + "acp_relationship" + ] + } + }, "/backup/export": { "post": { "description": "Export a database backup to file", diff --git a/examples/dpi_policy/user_dpi_policy.json b/examples/dpi_policy/user_dpi_policy.json index 74028d8ee6..96c794b490 100644 --- a/examples/dpi_policy/user_dpi_policy.json +++ b/examples/dpi_policy/user_dpi_policy.json @@ -1,4 +1,5 @@ { + "name": "An Example Policy", "description": "A Valid Defra Policy Interface (DPI)", "actor": { "name": "actor" diff --git a/examples/dpi_policy/user_dpi_policy.yml b/examples/dpi_policy/user_dpi_policy.yml index fafae06957..1b1df1e0b9 100644 --- a/examples/dpi_policy/user_dpi_policy.yml +++ b/examples/dpi_policy/user_dpi_policy.yml @@ -7,6 +7,8 @@ # # Learn more about the DefraDB Policy Interface [DPI](/acp/README.md) +name: An Example Policy + description: A Valid DefraDB Policy Interface (DPI) actor: diff --git a/examples/dpi_policy/user_dpi_policy_with_manages.yml b/examples/dpi_policy/user_dpi_policy_with_manages.yml new file mode 100644 index 0000000000..4667660136 --- /dev/null +++ b/examples/dpi_policy/user_dpi_policy_with_manages.yml @@ -0,0 +1,49 @@ +# The below policy contains an example with valid DPI compliant resource that can be linked to a collection +# object during the schema add command to have access control enabled for documents of that collection. +# +# This policy specifically has the manages attribute defined under admin relation which gives admin +# of a resource, the ability to add/remove relationships with `reader` relation name. +# +# Learn more about the DefraDB Policy Interface [DPI](/acp/README.md) + +name: An Example Policy + +description: A Policy + +actor: + name: actor + +resources: + users: + permissions: + read: + expr: owner + reader + writer + + write: + expr: owner + writer + + nothing: + expr: dummy + + relations: + owner: + types: + - actor + + reader: + types: + - actor + + writer: + types: + - actor + + admin: + manages: + - reader + types: + - actor + + dummy: + types: + - actor diff --git a/http/client_acp.go b/http/client_acp.go index a0140cf437..d4f1ed02e5 100644 --- a/http/client_acp.go +++ b/http/client_acp.go @@ -11,7 +11,9 @@ package http import ( + "bytes" "context" + "encoding/json" "net/http" "strings" @@ -42,3 +44,51 @@ func (c *Client) AddPolicy( return policyResult, nil } + +type addDocActorRelationshipRequest struct { + CollectionName string + DocID string + Relation string + TargetActor string +} + +func (c *Client) AddDocActorRelationship( + ctx context.Context, + collectionName string, + docID string, + relation string, + targetActor string, +) (client.AddDocActorRelationshipResult, error) { + methodURL := c.http.baseURL.JoinPath("acp", "relationship") + + body, err := json.Marshal( + addDocActorRelationshipRequest{ + CollectionName: collectionName, + DocID: docID, + Relation: relation, + TargetActor: targetActor, + }, + ) + + if err != nil { + return client.AddDocActorRelationshipResult{}, err + } + + req, err := http.NewRequestWithContext( + ctx, + http.MethodPost, + methodURL.String(), + bytes.NewBuffer(body), + ) + + if err != nil { + return client.AddDocActorRelationshipResult{}, err + } + + var addDocActorRelResult client.AddDocActorRelationshipResult + if err := c.http.requestJson(req, &addDocActorRelResult); err != nil { + return client.AddDocActorRelationshipResult{}, err + } + + return addDocActorRelResult, nil +} diff --git a/http/handler_acp.go b/http/handler_acp.go index c3c5985c71..e9bdf2ce0e 100644 --- a/http/handler_acp.go +++ b/http/handler_acp.go @@ -46,6 +46,35 @@ func (s *acpHandler) AddPolicy(rw http.ResponseWriter, req *http.Request) { responseJSON(rw, http.StatusOK, addPolicyResult) } +func (s *acpHandler) AddDocActorRelationship(rw http.ResponseWriter, req *http.Request) { + db, ok := req.Context().Value(dbContextKey).(client.DB) + if !ok { + responseJSON(rw, http.StatusBadRequest, errorResponse{NewErrFailedToGetContext("db")}) + return + } + + var message addDocActorRelationshipRequest + err := requestJSON(req, &message) + if err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return + } + + addDocActorRelResult, err := db.AddDocActorRelationship( + req.Context(), + message.CollectionName, + message.DocID, + message.Relation, + message.TargetActor, + ) + if err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return + } + + responseJSON(rw, http.StatusOK, addDocActorRelResult) +} + func (h *acpHandler) bindRoutes(router *Router) { successResponse := &openapi3.ResponseRef{ Ref: "#/components/responses/success", @@ -69,5 +98,21 @@ func (h *acpHandler) bindRoutes(router *Router) { Value: acpAddPolicyRequest, } + acpAddDocActorRelationshipRequest := openapi3.NewRequestBody(). + WithRequired(true). + WithContent(openapi3.NewContentWithSchema(openapi3.NewStringSchema(), []string{"text/plain"})) + + acpAddDocActorRelationship := openapi3.NewOperation() + acpAddDocActorRelationship.OperationID = "add relationship" + acpAddDocActorRelationship.Description = "Add an actor relationship using acp system" + acpAddDocActorRelationship.Tags = []string{"acp_relationship"} + acpAddDocActorRelationship.Responses = openapi3.NewResponses() + acpAddDocActorRelationship.Responses.Set("200", successResponse) + acpAddDocActorRelationship.Responses.Set("400", errorResponse) + acpAddDocActorRelationship.RequestBody = &openapi3.RequestBodyRef{ + Value: acpAddDocActorRelationshipRequest, + } + router.AddRoute("/acp/policy", http.MethodPost, acpAddPolicy, h.AddPolicy) + router.AddRoute("/acp/relationship", http.MethodPost, acpAddDocActorRelationship, h.AddDocActorRelationship) } diff --git a/internal/db/db.go b/internal/db/db.go index d88c5920bc..73165c239a 100644 --- a/internal/db/db.go +++ b/internal/db/db.go @@ -31,6 +31,7 @@ import ( "github.com/sourcenetwork/defradb/errors" "github.com/sourcenetwork/defradb/event" "github.com/sourcenetwork/defradb/internal/core" + "github.com/sourcenetwork/defradb/internal/db/permission" "github.com/sourcenetwork/defradb/internal/request/graphql" ) @@ -190,8 +191,9 @@ func (db *db) AddPolicy( policy string, ) (client.AddPolicyResult, error) { if !db.acp.HasValue() { - return client.AddPolicyResult{}, client.ErrPolicyAddFailureNoACP + return client.AddPolicyResult{}, client.ErrACPOperationButACPNotAvailable } + identity := GetContextIdentity(ctx) policyID, err := db.acp.Value().AddPolicy( @@ -206,6 +208,46 @@ func (db *db) AddPolicy( return client.AddPolicyResult{PolicyID: policyID}, nil } +func (db *db) AddDocActorRelationship( + ctx context.Context, + collectionName string, + docID string, + relation string, + targetActor string, +) (client.AddDocActorRelationshipResult, error) { + if !db.acp.HasValue() { + return client.AddDocActorRelationshipResult{}, client.ErrACPOperationButACPNotAvailable + } + + collection, err := db.GetCollectionByName(ctx, collectionName) + if err != nil { + return client.AddDocActorRelationshipResult{}, err + } + + policyID, resourceName, hasPolicy := permission.IsPermissioned(collection) + if !hasPolicy { + return client.AddDocActorRelationshipResult{}, client.ErrACPOperationButCollectionHasNoPolicy + } + + identity := GetContextIdentity(ctx) + + exists, err := db.acp.Value().AddDocActorRelationship( + ctx, + policyID, + resourceName, + docID, + relation, + identity.Value(), + targetActor, + ) + + if err != nil { + return client.AddDocActorRelationshipResult{}, err + } + + return client.AddDocActorRelationshipResult{ExistedAlready: exists}, nil +} + // Initialize is called when a database is first run and creates all the db global meta data // like Collection ID counters. func (db *db) initialize(ctx context.Context) error { diff --git a/internal/db/permission/check.go b/internal/db/permission/check.go index 9d3d8a587b..b19500f41b 100644 --- a/internal/db/permission/check.go +++ b/internal/db/permission/check.go @@ -43,7 +43,7 @@ func CheckAccessOfDocOnCollectionWithACP( ) (bool, error) { // Even if acp exists, but there is no policy on the collection (unpermissioned collection) // then we still have unrestricted access. - policyID, resourceName, hasPolicy := isPermissioned(collection) + policyID, resourceName, hasPolicy := IsPermissioned(collection) if !hasPolicy { return true, nil } diff --git a/internal/db/permission/permission.go b/internal/db/permission/permission.go index 3b365cba75..a91d346a6f 100644 --- a/internal/db/permission/permission.go +++ b/internal/db/permission/permission.go @@ -14,13 +14,13 @@ import ( "github.com/sourcenetwork/defradb/client" ) -// isPermissioned returns true if the collection has a policy, otherwise returns false. +// IsPermissioned returns true if the collection has a policy, otherwise returns false. // // This tells us if access control is enabled for this collection or not. // // When there is a policy, in addition to returning true in the last return value, the // first returned value is policyID, second is the resource name. -func isPermissioned(collection client.Collection) (string, string, bool) { +func IsPermissioned(collection client.Collection) (string, string, bool) { policy := collection.Definition().Description.Policy if policy.HasValue() && policy.Value().ID != "" && diff --git a/internal/db/permission/register.go b/internal/db/permission/register.go index dedbdd8d63..5e03967fb4 100644 --- a/internal/db/permission/register.go +++ b/internal/db/permission/register.go @@ -37,7 +37,7 @@ func RegisterDocOnCollectionWithACP( docID string, ) error { // An identity exists and the collection has a policy. - if policyID, resourceName, hasPolicy := isPermissioned(collection); hasPolicy && identity.HasValue() { + if policyID, resourceName, hasPolicy := IsPermissioned(collection); hasPolicy && identity.HasValue() { return acpSystem.RegisterDocObject( ctx, identity.Value(), diff --git a/tests/clients/cli/wrapper.go b/tests/clients/cli/wrapper.go index 7a2f28fd4a..b3261f09a8 100644 --- a/tests/clients/cli/wrapper.go +++ b/tests/clients/cli/wrapper.go @@ -175,26 +175,6 @@ func (w *Wrapper) BasicExport(ctx context.Context, config *client.BackupConfig) return err } -func (w *Wrapper) AddPolicy( - ctx context.Context, - policy string, -) (client.AddPolicyResult, error) { - args := []string{"client", "acp", "policy", "add"} - args = append(args, policy) - - data, err := w.cmd.execute(ctx, args) - if err != nil { - return client.AddPolicyResult{}, err - } - - var addPolicyResult client.AddPolicyResult - if err := json.Unmarshal(data, &addPolicyResult); err != nil { - return client.AddPolicyResult{}, err - } - - return addPolicyResult, err -} - func (w *Wrapper) AddSchema(ctx context.Context, schema string) ([]client.CollectionDescription, error) { args := []string{"client", "schema", "add"} args = append(args, schema) diff --git a/tests/clients/cli/wrapper_acp.go b/tests/clients/cli/wrapper_acp.go new file mode 100644 index 0000000000..f76aad3cdf --- /dev/null +++ b/tests/clients/cli/wrapper_acp.go @@ -0,0 +1,66 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package cli + +import ( + "context" + "encoding/json" + + "github.com/sourcenetwork/defradb/client" +) + +func (w *Wrapper) AddPolicy( + ctx context.Context, + policy string, +) (client.AddPolicyResult, error) { + args := []string{"client", "acp", "policy", "add"} + args = append(args, policy) + + data, err := w.cmd.execute(ctx, args) + if err != nil { + return client.AddPolicyResult{}, err + } + + var addPolicyResult client.AddPolicyResult + if err := json.Unmarshal(data, &addPolicyResult); err != nil { + return client.AddPolicyResult{}, err + } + + return addPolicyResult, err +} + +func (w *Wrapper) AddDocActorRelationship( + ctx context.Context, + collectionName string, + docID string, + relation string, + targetActor string, +) (client.AddDocActorRelationshipResult, error) { + args := []string{ + "client", "acp", "relationship", "add", + "--collection", collectionName, + "--docID", docID, + "--relation", relation, + "--actor", targetActor, + } + + data, err := w.cmd.execute(ctx, args) + if err != nil { + return client.AddDocActorRelationshipResult{}, err + } + + var exists client.AddDocActorRelationshipResult + if err := json.Unmarshal(data, &exists); err != nil { + return client.AddDocActorRelationshipResult{}, err + } + + return exists, err +} diff --git a/tests/clients/http/wrapper.go b/tests/clients/http/wrapper.go index 2b84bfc701..81ed74b095 100644 --- a/tests/clients/http/wrapper.go +++ b/tests/clients/http/wrapper.go @@ -105,6 +105,22 @@ func (w *Wrapper) AddPolicy( return w.client.AddPolicy(ctx, policy) } +func (w *Wrapper) AddDocActorRelationship( + ctx context.Context, + collectionName string, + docID string, + relation string, + targetActor string, +) (client.AddDocActorRelationshipResult, error) { + return w.client.AddDocActorRelationship( + ctx, + collectionName, + docID, + relation, + targetActor, + ) +} + func (w *Wrapper) PatchSchema( ctx context.Context, patch string, diff --git a/tests/integration/acp.go b/tests/integration/acp.go index a6efd64110..a8f41e5f41 100644 --- a/tests/integration/acp.go +++ b/tests/integration/acp.go @@ -133,14 +133,192 @@ func addPolicyACP( } } +// AddDocActorRelationship will attempt to create a new relationship for a document with an actor. +type AddDocActorRelationship struct { + // NodeID may hold the ID (index) of the node we want to add doc actor relationship on. + // + // If a value is not provided the relationship will be added in all nodes, unless testing with + // sourcehub ACP, in which case the relationship will only be defined once. + NodeID immutable.Option[int] + + // The collection in which this document we want to add a relationship for exists. + // + // This is a required field. To test the invalid usage of not having this arg, use -1 index. + CollectionID int + + // The index-identifier of the document within the collection. This is based on + // the order in which it was created, not the ordering of the document within the + // database. + // + // This is a required field. To test the invalid usage of not having this arg, use -1 index. + DocID int + + // The name of the relation to set between document and target actor (should be defined in the policy). + // + // This is a required field. + Relation string + + // The target public identity, i.e. the identity of the actor to tie the document's relation with. + // + // This is a required field. To test the invalid usage of not having this arg, use -1 index. + TargetIdentity int + + // The requestor identity, i.e. identity of the actor creating the relationship. + // Note: This identity must either own or have managing access defined in the policy. + // + // This is a required field. To test the invalid usage of not having this arg, use -1 index. + RequestorIdentity int + + // Result returns true if it was a no-op due to existing before, and false if a new relationship was made. + ExpectedExistence bool + + // Any error expected from the action. Optional. + // + // String can be a partial, and the test will pass if an error is returned that + // contains this string. + ExpectedError string +} + +func addDocActorRelationshipACP( + s *state, + action AddDocActorRelationship, +) { + if action.NodeID.HasValue() { + nodeID := action.NodeID.Value() + collections := s.collections[nodeID] + node := s.nodes[nodeID] + + var collectionName string + if action.CollectionID == -1 { + collectionName = "" + } else { + collection := collections[action.CollectionID] + if !collection.Description().Name.HasValue() { + require.Fail(s.t, "Expected non-empty collection name, but it was empty.", s.testCase.Description) + } + collectionName = collection.Description().Name.Value() + } + + var docID string + if action.DocID == -1 || action.CollectionID == -1 { + docID = "" + } else { + docID = s.docIDs[action.CollectionID][action.DocID].String() + } + + var targetIdentity string + if action.TargetIdentity == -1 { + targetIdentity = "" + } else { + optionalTargetIdentity := getIdentity(s, nodeID, immutable.Some(action.TargetIdentity)) + if !optionalTargetIdentity.HasValue() { + require.Fail(s.t, "Expected non-empty target identity, but it was empty.", s.testCase.Description) + } + targetIdentity = optionalTargetIdentity.Value().DID + } + + var requestorIdentity immutable.Option[acpIdentity.Identity] + if action.RequestorIdentity == -1 { + requestorIdentity = acpIdentity.None + } else { + requestorIdentity = getIdentity(s, nodeID, immutable.Some(action.RequestorIdentity)) + if !requestorIdentity.HasValue() { + require.Fail(s.t, "Expected non-empty requestor identity, but it was empty.", s.testCase.Description) + } + } + ctx := db.SetContextIdentity(s.ctx, requestorIdentity) + + exists, err := node.AddDocActorRelationship( + ctx, + collectionName, + docID, + action.Relation, + targetIdentity, + ) + + if err == nil { + require.Equal(s.t, action.ExpectedError, "") + require.Equal(s.t, action.ExpectedExistence, exists.ExistedAlready) + } + + expectedErrorRaised := AssertError(s.t, s.testCase.Description, err, action.ExpectedError) + assertExpectedErrorRaised(s.t, s.testCase.Description, action.ExpectedError, expectedErrorRaised) + } else { + for i, node := range getNodes(action.NodeID, s.nodes) { + var collectionName string + if action.CollectionID == -1 { + collectionName = "" + } else { + collection := s.collections[i][action.CollectionID] + if !collection.Description().Name.HasValue() { + require.Fail(s.t, "Expected non-empty collection name, but it was empty.", s.testCase.Description) + } + collectionName = collection.Description().Name.Value() + } + + var docID string + if action.DocID == -1 || action.CollectionID == -1 { + docID = "" + } else { + docID = s.docIDs[action.CollectionID][action.DocID].String() + } + + var targetIdentity string + if action.TargetIdentity == -1 { + targetIdentity = "" + } else { + optionalTargetIdentity := getIdentity(s, i, immutable.Some(action.TargetIdentity)) + if !optionalTargetIdentity.HasValue() { + require.Fail(s.t, "Expected non-empty target identity, but it was empty.", s.testCase.Description) + } + targetIdentity = optionalTargetIdentity.Value().DID + } + + var requestorIdentity immutable.Option[acpIdentity.Identity] + if action.RequestorIdentity == -1 { + requestorIdentity = acpIdentity.None + } else { + requestorIdentity = getIdentity(s, i, immutable.Some(action.RequestorIdentity)) + if !requestorIdentity.HasValue() { + require.Fail(s.t, "Expected non-empty requestor identity, but it was empty.", s.testCase.Description) + } + } + ctx := db.SetContextIdentity(s.ctx, requestorIdentity) + + exists, err := node.AddDocActorRelationship( + ctx, + collectionName, + docID, + action.Relation, + targetIdentity, + ) + + if err == nil { + require.Equal(s.t, action.ExpectedError, "") + require.Equal(s.t, action.ExpectedExistence, exists.ExistedAlready) + } + + expectedErrorRaised := AssertError(s.t, s.testCase.Description, err, action.ExpectedError) + assertExpectedErrorRaised(s.t, s.testCase.Description, action.ExpectedError, expectedErrorRaised) + + // The relationship should only be added to a SourceHub chain once - there is no need to loop through + // the nodes. + if acpType == SourceHubACPType { + break + } + } + } +} + func setupSourceHub(s *state) ([]node.ACPOpt, error) { var isACPTest bool for _, a := range s.testCase.Actions { - if _, ok := a.(AddPolicy); ok { + switch a.(type) { + case AddPolicy, AddDocActorRelationship: isACPTest = true - break } } + if !isACPTest { // Spinning up SourceHub instances is a bit slow, so we should be quite aggressive in trimming down the // runtime of the test suite when SourceHub ACP is selected. @@ -405,6 +583,37 @@ func crossLock(port uint16) (func(), error) { nil } +// Generate the keys using the index as the seed so that multiple +// runs yield the same private key. This is important for stuff like +// the change detector. +func generateIdentity(s *state, seedIndex int, nodeIndex int) (acpIdentity.Identity, error) { + var audience immutable.Option[string] + switch client := s.nodes[nodeIndex].(type) { + case *http.Wrapper: + audience = immutable.Some(strings.TrimPrefix(client.Host(), "http://")) + case *cli.Wrapper: + audience = immutable.Some(strings.TrimPrefix(client.Host(), "http://")) + } + + source := rand.NewSource(int64(seedIndex)) + r := rand.New(source) + + privateKey, err := secp256k1.GeneratePrivateKeyFromRand(r) + require.NoError(s.t, err) + + identity, err := acpIdentity.FromPrivateKey( + privateKey, + authTokenExpiration, + audience, + immutable.Some(s.sourcehubAddress), + // Creating and signing the bearer token is slow, so we skip it if it not + // required. + !(acpType == SourceHubACPType || audience.HasValue()), + ) + + return identity, err +} + func getIdentity(s *state, nodeIndex int, index immutable.Option[int]) immutable.Option[acpIdentity.Identity] { if !index.HasValue() { return immutable.None[acpIdentity.Identity]() @@ -419,40 +628,18 @@ func getIdentity(s *state, nodeIndex int, index immutable.Option[int]) immutable if len(nodeIdentities) <= index.Value() { identities := make([]acpIdentity.Identity, index.Value()+1) - copy(identities, nodeIdentities) - nodeIdentities = identities - s.identities[nodeIndex] = nodeIdentities - - var audience immutable.Option[string] - switch client := s.nodes[nodeIndex].(type) { - case *http.Wrapper: - audience = immutable.Some(strings.TrimPrefix(client.Host(), "http://")) - case *cli.Wrapper: - audience = immutable.Some(strings.TrimPrefix(client.Host(), "http://")) + // Fill any empty identities up to the index. + for i := range identities { + if i < len(nodeIdentities) && nodeIdentities[i] != (acpIdentity.Identity{}) { + identities[i] = nodeIdentities[i] + continue + } + newIdentity, err := generateIdentity(s, i, nodeIndex) + require.NoError(s.t, err) + identities[i] = newIdentity } - - // Generate the keys using the index as the seed so that multiple - // runs yield the same private key. This is important for stuff like - // the change detector. - source := rand.NewSource(int64(index.Value())) - r := rand.New(source) - - privateKey, err := secp256k1.GeneratePrivateKeyFromRand(r) - require.NoError(s.t, err) - - identity, err := acpIdentity.FromPrivateKey( - privateKey, - authTokenExpiration, - audience, - immutable.Some(s.sourcehubAddress), - // Creating and signing the bearer token is slow, so we skip it if it not - // required. - !(acpType == SourceHubACPType || audience.HasValue()), - ) - require.NoError(s.t, err) - - nodeIdentities[index.Value()] = identity - return immutable.Some(identity) + s.identities[nodeIndex] = identities + return immutable.Some(identities[index.Value()]) } else { return immutable.Some(nodeIdentities[index.Value()]) } diff --git a/tests/integration/acp/p2p/replicator_with_doc_actor_relationship_test.go b/tests/integration/acp/p2p/replicator_with_doc_actor_relationship_test.go new file mode 100644 index 0000000000..fe06e10061 --- /dev/null +++ b/tests/integration/acp/p2p/replicator_with_doc_actor_relationship_test.go @@ -0,0 +1,219 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_p2p + +import ( + "fmt" + "testing" + + "github.com/sourcenetwork/immutable" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_P2PReplicatorWithPermissionedCollectionCreateDocActorRelationship_SourceHubACP(t *testing.T) { + expectedPolicyID := "fc56b7509c20ac8ce682b3b9b4fdaad868a9c70dda6ec16720298be64f16e9a4" + + test := testUtils.TestCase{ + + Description: "Test acp, p2p replicator with collection that has a policy, create a new doc-actor relationship", + + SupportedACPTypes: immutable.Some( + []testUtils.ACPType{ + testUtils.SourceHubACPType, + }, + ), + + Actions: []any{ + testUtils.RandomNetworkingConfig(), + + testUtils.RandomNetworkingConfig(), + + testUtils.AddPolicy{ + + Identity: immutable.Some(1), + + Policy: ` + name: Test Policy + + description: A Policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + writer + + write: + expr: owner + writer + + nothing: + expr: dummy + + relations: + owner: + types: + - actor + + reader: + types: + - actor + + writer: + types: + - actor + + admin: + manages: + - reader + types: + - actor + + dummy: + types: + - actor + `, + + ExpectedPolicyID: expectedPolicyID, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + expectedPolicyID, + ), + }, + + testUtils.ConfigureReplicator{ + SourceNodeID: 0, + + TargetNodeID: 1, + }, + + testUtils.CreateDoc{ + Identity: immutable.Some(1), + + NodeID: immutable.Some(0), + + CollectionID: 0, + + DocMap: map[string]any{ + "name": "Shahzad", + }, + }, + + testUtils.WaitForSync{}, + + testUtils.Request{ + // Ensure that the document is hidden on all nodes to an unauthorized actor + Identity: immutable.Some(2), + + Request: ` + query { + Users { + name + } + } + `, + + Results: map[string]any{ + "Users": []map[string]any{}, + }, + }, + + testUtils.AddDocActorRelationship{ + NodeID: immutable.Some(0), + + RequestorIdentity: 1, + + TargetIdentity: 2, + + CollectionID: 0, + + DocID: 0, + + Relation: "reader", + + ExpectedExistence: false, + }, + + testUtils.AddDocActorRelationship{ + NodeID: immutable.Some(1), // Note: Different node than the previous + + RequestorIdentity: 1, + + TargetIdentity: 2, + + CollectionID: 0, + + DocID: 0, + + Relation: "reader", + + ExpectedExistence: true, // Making the same relation through any node should be a no-op + }, + + testUtils.Request{ + // Ensure that the document is now accessible on all nodes to the newly authorized actor. + Identity: immutable.Some(2), + + Request: ` + query { + Users { + name + } + } + `, + + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "Shahzad", + }, + }, + }, + }, + + testUtils.Request{ + // Ensure that the document is still accessible on all nodes to the owner. + Identity: immutable.Some(1), + + Request: ` + query { + Users { + name + } + } + `, + + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "Shahzad", + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/p2p/subscribe_with_doc_actor_relationship_test.go b/tests/integration/acp/p2p/subscribe_with_doc_actor_relationship_test.go new file mode 100644 index 0000000000..a55c5a333e --- /dev/null +++ b/tests/integration/acp/p2p/subscribe_with_doc_actor_relationship_test.go @@ -0,0 +1,225 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_p2p + +import ( + "fmt" + "testing" + + "github.com/sourcenetwork/immutable" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_P2PSubscribeAddGetSingleWithPermissionedCollectionCreateDocActorRelationship_SourceHubACP(t *testing.T) { + expectedPolicyID := "fc56b7509c20ac8ce682b3b9b4fdaad868a9c70dda6ec16720298be64f16e9a4" + + test := testUtils.TestCase{ + + Description: "Test acp, p2p subscribe collection that has a policy, and create a new doc-actor relationship", + + SupportedACPTypes: immutable.Some( + []testUtils.ACPType{ + testUtils.SourceHubACPType, + }, + ), + + Actions: []any{ + testUtils.RandomNetworkingConfig(), + + testUtils.RandomNetworkingConfig(), + + testUtils.AddPolicy{ + + Identity: immutable.Some(1), + + Policy: ` + name: Test Policy + + description: A Policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + writer + + write: + expr: owner + writer + + nothing: + expr: dummy + + relations: + owner: + types: + - actor + + reader: + types: + - actor + + writer: + types: + - actor + + admin: + manages: + - reader + types: + - actor + + dummy: + types: + - actor + `, + + ExpectedPolicyID: expectedPolicyID, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + expectedPolicyID, + ), + }, + + testUtils.ConnectPeers{ + SourceNodeID: 1, + + TargetNodeID: 0, + }, + + testUtils.SubscribeToCollection{ + NodeID: 1, + + CollectionIDs: []int{0}, + }, + + testUtils.CreateDoc{ + Identity: immutable.Some(1), + + NodeID: immutable.Some(0), + + CollectionID: 0, + + DocMap: map[string]any{ + "name": "Shahzad", + }, + }, + + testUtils.WaitForSync{}, + + testUtils.Request{ + // Ensure that the document is hidden on all nodes to an unauthorized actor + Identity: immutable.Some(2), + + Request: ` + query { + Users { + name + } + } + `, + + Results: map[string]any{ + "Users": []map[string]any{}, + }, + }, + + testUtils.AddDocActorRelationship{ + NodeID: immutable.Some(0), + + RequestorIdentity: 1, + + TargetIdentity: 2, + + CollectionID: 0, + + DocID: 0, + + Relation: "reader", + + ExpectedExistence: false, + }, + + testUtils.AddDocActorRelationship{ + NodeID: immutable.Some(1), // Note: Different node than the previous + + RequestorIdentity: 1, + + TargetIdentity: 2, + + CollectionID: 0, + + DocID: 0, + + Relation: "reader", + + ExpectedExistence: true, // Making the same relation through any node should be a no-op + }, + + testUtils.Request{ + // Ensure that the document is now accessible on all nodes to the newly authorized actor. + Identity: immutable.Some(2), + + Request: ` + query { + Users { + name + } + } + `, + + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "Shahzad", + }, + }, + }, + }, + + testUtils.Request{ + // Ensure that the document is still accessible on all nodes to the owner. + Identity: immutable.Some(1), + + Request: ` + query { + Users { + name + } + } + `, + + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "Shahzad", + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_collection_with_no_policy_test.go b/tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_collection_with_no_policy_test.go new file mode 100644 index 0000000000..a614ef3ce9 --- /dev/null +++ b/tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_collection_with_no_policy_test.go @@ -0,0 +1,66 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_relationship_add_docactor + +import ( + "testing" + + "github.com/sourcenetwork/immutable" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_AddDocActorRelationshipWithCollectionThatHasNoPolicy_NotAllowedError(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, add doc actor relationship on a collection with no policy, not allowed error", + + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + age: Int + } + `, + }, + + testUtils.CreateDoc{ + Identity: immutable.Some(1), + + CollectionID: 0, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.AddDocActorRelationship{ + RequestorIdentity: 1, + + TargetIdentity: 2, + + CollectionID: 0, + + DocID: 0, + + Relation: "reader", + + ExpectedError: "operation requires ACP, but collection has no policy", // Everything is public anyway + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_invalid_test.go b/tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_invalid_test.go new file mode 100644 index 0000000000..cc0e0dac69 --- /dev/null +++ b/tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_invalid_test.go @@ -0,0 +1,545 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_relationship_add_docactor + +import ( + "fmt" + "testing" + + "github.com/sourcenetwork/immutable" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_AddDocActorRelationshipMissingDocID_Error(t *testing.T) { + expectedPolicyID := "fc56b7509c20ac8ce682b3b9b4fdaad868a9c70dda6ec16720298be64f16e9a4" + + test := testUtils.TestCase{ + + Description: "Test acp, add doc actor relationship with docID missing, return error", + + Actions: []any{ + testUtils.AddPolicy{ + + Identity: immutable.Some(1), + + Policy: ` + name: Test Policy + + description: A Policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + writer + + write: + expr: owner + writer + + nothing: + expr: dummy + + relations: + owner: + types: + - actor + + reader: + types: + - actor + + writer: + types: + - actor + + admin: + manages: + - reader + types: + - actor + + dummy: + types: + - actor + `, + + ExpectedPolicyID: expectedPolicyID, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + expectedPolicyID, + ), + }, + + testUtils.CreateDoc{ + Identity: immutable.Some(1), + + CollectionID: 0, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.AddDocActorRelationship{ + RequestorIdentity: 1, + + TargetIdentity: 2, + + CollectionID: 0, + + DocID: -1, + + Relation: "reader", + + ExpectedError: "missing a required argument needed to add doc actor relationship.", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddDocActorRelationshipMissingCollection_Error(t *testing.T) { + expectedPolicyID := "fc56b7509c20ac8ce682b3b9b4fdaad868a9c70dda6ec16720298be64f16e9a4" + + test := testUtils.TestCase{ + + Description: "Test acp, add doc actor relationship with collection missing, return error", + + Actions: []any{ + testUtils.AddPolicy{ + + Identity: immutable.Some(1), + + Policy: ` + name: Test Policy + + description: A Policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + writer + + write: + expr: owner + writer + + nothing: + expr: dummy + + relations: + owner: + types: + - actor + + reader: + types: + - actor + + writer: + types: + - actor + + admin: + manages: + - reader + types: + - actor + + dummy: + types: + - actor + `, + + ExpectedPolicyID: expectedPolicyID, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + expectedPolicyID, + ), + }, + + testUtils.CreateDoc{ + Identity: immutable.Some(1), + + CollectionID: 0, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.AddDocActorRelationship{ + RequestorIdentity: 1, + + TargetIdentity: 2, + + CollectionID: -1, + + DocID: 0, + + Relation: "reader", + + ExpectedError: "collection name can't be empty", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddDocActorRelationshipMissingRelationName_Error(t *testing.T) { + expectedPolicyID := "fc56b7509c20ac8ce682b3b9b4fdaad868a9c70dda6ec16720298be64f16e9a4" + + test := testUtils.TestCase{ + + Description: "Test acp, add doc actor relationship with relation name missing, return error", + + Actions: []any{ + testUtils.AddPolicy{ + + Identity: immutable.Some(1), + + Policy: ` + name: Test Policy + + description: A Policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + writer + + write: + expr: owner + writer + + nothing: + expr: dummy + + relations: + owner: + types: + - actor + + reader: + types: + - actor + + writer: + types: + - actor + + admin: + manages: + - reader + types: + - actor + + dummy: + types: + - actor + `, + + ExpectedPolicyID: expectedPolicyID, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + expectedPolicyID, + ), + }, + + testUtils.CreateDoc{ + Identity: immutable.Some(1), + + CollectionID: 0, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.AddDocActorRelationship{ + RequestorIdentity: 1, + + TargetIdentity: 2, + + CollectionID: 0, + + DocID: 0, + + Relation: "", + + ExpectedError: "missing a required argument needed to add doc actor relationship.", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddDocActorRelationshipMissingTargetActorName_Error(t *testing.T) { + expectedPolicyID := "fc56b7509c20ac8ce682b3b9b4fdaad868a9c70dda6ec16720298be64f16e9a4" + + test := testUtils.TestCase{ + + Description: "Test acp, add doc actor relationship with target actor missing, return error", + + Actions: []any{ + testUtils.AddPolicy{ + + Identity: immutable.Some(1), + + Policy: ` + name: Test Policy + + description: A Policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + writer + + write: + expr: owner + writer + + nothing: + expr: dummy + + relations: + owner: + types: + - actor + + reader: + types: + - actor + + writer: + types: + - actor + + admin: + manages: + - reader + types: + - actor + + dummy: + types: + - actor + `, + + ExpectedPolicyID: expectedPolicyID, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + expectedPolicyID, + ), + }, + + testUtils.CreateDoc{ + Identity: immutable.Some(1), + + CollectionID: 0, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.AddDocActorRelationship{ + RequestorIdentity: 1, + + TargetIdentity: -1, + + CollectionID: 0, + + DocID: 0, + + Relation: "reader", + + ExpectedError: "missing a required argument needed to add doc actor relationship.", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddDocActorRelationshipMissingReqestingIdentityName_Error(t *testing.T) { + expectedPolicyID := "fc56b7509c20ac8ce682b3b9b4fdaad868a9c70dda6ec16720298be64f16e9a4" + + test := testUtils.TestCase{ + + Description: "Test acp, add doc actor relationship with requesting identity missing, return error", + + Actions: []any{ + testUtils.AddPolicy{ + + Identity: immutable.Some(1), + + Policy: ` + name: Test Policy + + description: A Policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + writer + + write: + expr: owner + writer + + nothing: + expr: dummy + + relations: + owner: + types: + - actor + + reader: + types: + - actor + + writer: + types: + - actor + + admin: + manages: + - reader + types: + - actor + + dummy: + types: + - actor + `, + + ExpectedPolicyID: expectedPolicyID, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + expectedPolicyID, + ), + }, + + testUtils.CreateDoc{ + Identity: immutable.Some(1), + + CollectionID: 0, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.AddDocActorRelationship{ + RequestorIdentity: -1, + + TargetIdentity: 2, + + CollectionID: 0, + + DocID: 0, + + Relation: "reader", + + ExpectedError: "missing a required argument needed to add doc actor relationship.", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_with_delete_test.go b/tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_with_delete_test.go new file mode 100644 index 0000000000..9be3ace27d --- /dev/null +++ b/tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_with_delete_test.go @@ -0,0 +1,505 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_relationship_add_docactor + +import ( + "fmt" + "testing" + + "github.com/sourcenetwork/immutable" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_OwnerGivesDeleteWriteAccessToAnotherActorTwice_ShowThatTheRelationshipAlreadyExists(t *testing.T) { + expectedPolicyID := "fc56b7509c20ac8ce682b3b9b4fdaad868a9c70dda6ec16720298be64f16e9a4" + + test := testUtils.TestCase{ + + Description: "Test acp, owner gives write(delete) access to another actor twice, no-op", + + Actions: []any{ + testUtils.AddPolicy{ + + Identity: immutable.Some(1), + + Policy: ` + name: Test Policy + + description: A Policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + writer + + write: + expr: owner + writer + + nothing: + expr: dummy + + relations: + owner: + types: + - actor + + reader: + types: + - actor + + writer: + types: + - actor + + admin: + manages: + - reader + types: + - actor + + dummy: + types: + - actor + `, + + ExpectedPolicyID: expectedPolicyID, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + expectedPolicyID, + ), + }, + + testUtils.CreateDoc{ + Identity: immutable.Some(1), + + CollectionID: 0, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.Request{ + Identity: immutable.Some(2), // This identity can not read yet. + + Request: ` + query { + Users { + _docID + name + age + } + } + `, + + Results: map[string]any{ + "Users": []map[string]any{}, // Can't see the documents yet + }, + }, + + testUtils.DeleteDoc{ + CollectionID: 0, + + Identity: immutable.Some(2), // This identity can not delete yet. + + DocID: 0, + + ExpectedError: "document not found or not authorized to access", + }, + + testUtils.AddDocActorRelationship{ + RequestorIdentity: 1, + + TargetIdentity: 2, + + CollectionID: 0, + + DocID: 0, + + Relation: "writer", + + ExpectedExistence: false, + }, + + testUtils.AddDocActorRelationship{ + RequestorIdentity: 1, + + TargetIdentity: 2, + + CollectionID: 0, + + DocID: 0, + + Relation: "writer", + + ExpectedExistence: true, // is a no-op + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_OwnerGivesDeleteWriteAccessToAnotherActor_OtherActorCanDelete(t *testing.T) { + expectedPolicyID := "fc56b7509c20ac8ce682b3b9b4fdaad868a9c70dda6ec16720298be64f16e9a4" + + test := testUtils.TestCase{ + + Description: "Test acp, owner gives write(delete) access to another actor", + + Actions: []any{ + testUtils.AddPolicy{ + + Identity: immutable.Some(1), + + Policy: ` + name: Test Policy + + description: A Policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + writer + + write: + expr: owner + writer + + nothing: + expr: dummy + + relations: + owner: + types: + - actor + + reader: + types: + - actor + + writer: + types: + - actor + + admin: + manages: + - reader + types: + - actor + + dummy: + types: + - actor + `, + + ExpectedPolicyID: expectedPolicyID, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + expectedPolicyID, + ), + }, + + testUtils.CreateDoc{ + Identity: immutable.Some(1), + + CollectionID: 0, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.Request{ + Identity: immutable.Some(2), // This identity can not read yet. + + Request: ` + query { + Users { + _docID + name + age + } + } + `, + + Results: map[string]any{ + "Users": []map[string]any{}, // Can't see the documents yet + }, + }, + + testUtils.DeleteDoc{ + CollectionID: 0, + + Identity: immutable.Some(2), // This identity can not delete yet. + + DocID: 0, + + ExpectedError: "document not found or not authorized to access", + }, + + testUtils.AddDocActorRelationship{ + RequestorIdentity: 1, + + TargetIdentity: 2, + + CollectionID: 0, + + DocID: 0, + + Relation: "writer", + + ExpectedExistence: false, + }, + + testUtils.Request{ + Identity: immutable.Some(2), // This identity can now read. + + Request: ` + query { + Users { + _docID + name + age + } + } + `, + + Results: map[string]any{ + "Users": []map[string]any{ + { + "_docID": "bae-9d443d0c-52f6-568b-8f74-e8ff0825697b", + "name": "Shahzad", + "age": int64(28), + }, + }, + }, + }, + + testUtils.DeleteDoc{ + CollectionID: 0, + + Identity: immutable.Some(2), // This identity can now delete. + + DocID: 0, + }, + + testUtils.Request{ + Identity: immutable.Some(2), // Check if actually deleted. + + Request: ` + query { + Users { + _docID + name + age + } + } + `, + + Results: map[string]any{ + "Users": []map[string]any{}, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_OwnerGivesDeleteWriteAccessToAnotherActor_OtherActorCanDeleteSoCanTheOwner(t *testing.T) { + expectedPolicyID := "fc56b7509c20ac8ce682b3b9b4fdaad868a9c70dda6ec16720298be64f16e9a4" + + test := testUtils.TestCase{ + + Description: "Test acp, owner gives write(delete) access to another actor, both can read", + + Actions: []any{ + testUtils.AddPolicy{ + + Identity: immutable.Some(1), + + Policy: ` + name: Test Policy + + description: A Policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + writer + + write: + expr: owner + writer + + nothing: + expr: dummy + + relations: + owner: + types: + - actor + + reader: + types: + - actor + + writer: + types: + - actor + + admin: + manages: + - reader + types: + - actor + + dummy: + types: + - actor + `, + + ExpectedPolicyID: expectedPolicyID, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + expectedPolicyID, + ), + }, + + testUtils.CreateDoc{ + Identity: immutable.Some(1), + + CollectionID: 0, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.AddDocActorRelationship{ + RequestorIdentity: 1, + + TargetIdentity: 2, + + CollectionID: 0, + + DocID: 0, + + Relation: "writer", + + ExpectedExistence: false, + }, + + testUtils.Request{ + Identity: immutable.Some(1), // Owner can still also delete (ownership not transferred) + + Request: ` + query { + Users { + _docID + name + age + } + } + `, + + Results: map[string]any{ + "Users": []map[string]any{ + { + "_docID": "bae-9d443d0c-52f6-568b-8f74-e8ff0825697b", + "name": "Shahzad", + "age": int64(28), + }, + }, + }, + }, + + testUtils.DeleteDoc{ + CollectionID: 0, + + Identity: immutable.Some(1), // Owner can still also delete. + + DocID: 0, + }, + + testUtils.Request{ + Identity: immutable.Some(1), // Check if actually deleted. + + Request: ` + query { + Users { + _docID + name + age + } + } + `, + + Results: map[string]any{ + "Users": []map[string]any{}, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_with_dummy_relation_test.go b/tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_with_dummy_relation_test.go new file mode 100644 index 0000000000..66e17ba00a --- /dev/null +++ b/tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_with_dummy_relation_test.go @@ -0,0 +1,302 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_relationship_add_docactor + +import ( + "fmt" + "testing" + + "github.com/sourcenetwork/immutable" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_AddDocActorRelationshipWithDummyRelationDefinedOnPolicy_NothingChanges(t *testing.T) { + expectedPolicyID := "fc56b7509c20ac8ce682b3b9b4fdaad868a9c70dda6ec16720298be64f16e9a4" + + test := testUtils.TestCase{ + + Description: "Test acp, add doc actor relationship with a dummy relation defined on policy, nothing happens", + + Actions: []any{ + testUtils.AddPolicy{ + + Identity: immutable.Some(1), + + Policy: ` + name: Test Policy + + description: A Policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + writer + + write: + expr: owner + writer + + nothing: + expr: dummy + + relations: + owner: + types: + - actor + + reader: + types: + - actor + + writer: + types: + - actor + + admin: + manages: + - reader + types: + - actor + + dummy: + types: + - actor + `, + + ExpectedPolicyID: expectedPolicyID, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + expectedPolicyID, + ), + }, + + testUtils.CreateDoc{ + Identity: immutable.Some(1), + + CollectionID: 0, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.Request{ + Identity: immutable.Some(2), // This identity can not read yet. + + Request: ` + query { + Users { + _docID + name + age + } + } + `, + + Results: map[string]any{ + "Users": []map[string]any{}, // Can't see the documents + }, + }, + + testUtils.AddDocActorRelationship{ + RequestorIdentity: 1, + + TargetIdentity: 2, + + CollectionID: 0, + + DocID: 0, + + Relation: "dummy", // Doesn't mean anything to the database. + + ExpectedExistence: false, + }, + + testUtils.Request{ + Identity: immutable.Some(2), // This identity can still not read. + + Request: ` + query { + Users { + _docID + name + age + } + } + `, + + Results: map[string]any{ + "Users": []map[string]any{}, // Can't see the documents + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AddDocActorRelationshipWithDummyRelationNotDefinedOnPolicy_Error(t *testing.T) { + expectedPolicyID := "fc56b7509c20ac8ce682b3b9b4fdaad868a9c70dda6ec16720298be64f16e9a4" + + test := testUtils.TestCase{ + + Description: "Test acp, add doc actor relationship with an invalid relation (not defined on policy), error", + + Actions: []any{ + testUtils.AddPolicy{ + + Identity: immutable.Some(1), + + Policy: ` + name: Test Policy + + description: A Policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + writer + + write: + expr: owner + writer + + nothing: + expr: dummy + + relations: + owner: + types: + - actor + + reader: + types: + - actor + + writer: + types: + - actor + + admin: + manages: + - reader + types: + - actor + + dummy: + types: + - actor + `, + + ExpectedPolicyID: expectedPolicyID, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + expectedPolicyID, + ), + }, + + testUtils.CreateDoc{ + Identity: immutable.Some(1), + + CollectionID: 0, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.Request{ + Identity: immutable.Some(2), // This identity can not read yet. + + Request: ` + query { + Users { + _docID + name + age + } + } + `, + + Results: map[string]any{ + "Users": []map[string]any{}, // Can't see the documents + }, + }, + + testUtils.AddDocActorRelationship{ + RequestorIdentity: 1, + + TargetIdentity: 2, + + CollectionID: 0, + + DocID: 0, + + Relation: "NotOnPolicy", // Doesn't mean anything to the database and not on policy either. + + ExpectedError: "failed to add document actor relationship with acp", + }, + + testUtils.Request{ + Identity: immutable.Some(2), // This identity can still not read. + + Request: ` + query { + Users { + _docID + name + age + } + } + `, + + Results: map[string]any{ + "Users": []map[string]any{}, // Can't see the documents + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_with_manager_gql_test.go b/tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_with_manager_gql_test.go new file mode 100644 index 0000000000..9c2280d6ce --- /dev/null +++ b/tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_with_manager_gql_test.go @@ -0,0 +1,604 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_relationship_add_docactor + +import ( + "fmt" + "testing" + + "github.com/sourcenetwork/immutable" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_OwnerMakesAManagerThatGivesItSelfReadAndWriteAccess_GQL_ManagerCanReadAndWrite(t *testing.T) { + expectedPolicyID := "fc56b7509c20ac8ce682b3b9b4fdaad868a9c70dda6ec16720298be64f16e9a4" + + test := testUtils.TestCase{ + + Description: "Test acp, owner makes a manager that gives itself read and write access", + + SupportedMutationTypes: immutable.Some([]testUtils.MutationType{ + // GQL mutation will return no error when wrong identity is used so test that separately. + testUtils.GQLRequestMutationType, + }), + + Actions: []any{ + testUtils.AddPolicy{ + + Identity: immutable.Some(1), + + Policy: ` + name: Test Policy + + description: A Policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + writer + + write: + expr: owner + writer + + nothing: + expr: dummy + + relations: + owner: + types: + - actor + + reader: + types: + - actor + + writer: + types: + - actor + + admin: + manages: + - reader + - writer + types: + - actor + + dummy: + types: + - actor + `, + + ExpectedPolicyID: expectedPolicyID, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + expectedPolicyID, + ), + }, + + testUtils.CreateDoc{ + Identity: immutable.Some(1), + + CollectionID: 0, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.Request{ + Identity: immutable.Some(2), // This identity (to be manager) can not read yet. + + Request: ` + query { + Users { + _docID + name + age + } + } + `, + + Results: map[string]any{ + "Users": []map[string]any{}, // Can't see the documents yet + }, + }, + + testUtils.UpdateDoc{ + CollectionID: 0, + + Identity: immutable.Some(2), // Manager can't update yet. + + DocID: 0, + + Doc: ` + { + "name": "Shahzad Lone" + } + `, + + SkipLocalUpdateEvent: true, + }, + + testUtils.DeleteDoc{ + CollectionID: 0, + + Identity: immutable.Some(2), // Manager can't delete yet. + + DocID: 0, + + ExpectedError: "document not found or not authorized to access", + }, + + testUtils.AddDocActorRelationship{ // Make admin / manager + RequestorIdentity: 1, + + TargetIdentity: 2, + + CollectionID: 0, + + DocID: 0, + + Relation: "admin", + + ExpectedExistence: false, + }, + + testUtils.AddDocActorRelationship{ // Manager makes itself a writer + RequestorIdentity: 2, + + TargetIdentity: 2, + + CollectionID: 0, + + DocID: 0, + + Relation: "writer", + + ExpectedExistence: false, + }, + + // Note: It is not neccesary to make itself a reader, as becoming a writer allows reading. + testUtils.AddDocActorRelationship{ // Manager makes itself a reader + RequestorIdentity: 2, + + TargetIdentity: 2, + + CollectionID: 0, + + DocID: 0, + + Relation: "reader", + + ExpectedExistence: false, + }, + + testUtils.UpdateDoc{ + CollectionID: 0, + + Identity: immutable.Some(2), // Manager can now update. + + DocID: 0, + + Doc: ` + { + "name": "Shahzad Lone" + } + `, + }, + + testUtils.Request{ + Identity: immutable.Some(2), // Manager can read now + + Request: ` + query { + Users { + _docID + name + age + } + } + `, + + Results: map[string]any{ + "Users": []map[string]any{ + { + "_docID": "bae-9d443d0c-52f6-568b-8f74-e8ff0825697b", + "name": "Shahzad Lone", + "age": int64(28), + }, + }, + }, + }, + + testUtils.DeleteDoc{ + CollectionID: 0, + + Identity: immutable.Some(2), // Manager can now delete. + + DocID: 0, + }, + + testUtils.Request{ + Identity: immutable.Some(2), // Make sure manager was able to delete the document. + + Request: ` + query { + Users { + _docID + name + age + } + } + `, + + Results: map[string]any{ + "Users": []map[string]any{}, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_OwnerMakesManagerButManagerCanNotPerformOperations_GQL_ManagerCantReadOrWrite(t *testing.T) { + expectedPolicyID := "fc56b7509c20ac8ce682b3b9b4fdaad868a9c70dda6ec16720298be64f16e9a4" + + test := testUtils.TestCase{ + + Description: "Test acp, owner makes a manager, manager can't read or write", + + SupportedMutationTypes: immutable.Some([]testUtils.MutationType{ + // GQL mutation will return no error when wrong identity is used so test that separately. + testUtils.GQLRequestMutationType, + }), + + Actions: []any{ + testUtils.AddPolicy{ + + Identity: immutable.Some(1), + + Policy: ` + name: Test Policy + + description: A Policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + writer + + write: + expr: owner + writer + + nothing: + expr: dummy + + relations: + owner: + types: + - actor + + reader: + types: + - actor + + writer: + types: + - actor + + admin: + manages: + - reader + types: + - actor + + dummy: + types: + - actor + `, + + ExpectedPolicyID: expectedPolicyID, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + expectedPolicyID, + ), + }, + + testUtils.CreateDoc{ + Identity: immutable.Some(1), + + CollectionID: 0, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.AddDocActorRelationship{ // Make admin / manager + RequestorIdentity: 1, + + TargetIdentity: 2, + + CollectionID: 0, + + DocID: 0, + + Relation: "admin", + + ExpectedExistence: false, + }, + + testUtils.Request{ + Identity: immutable.Some(2), // Manager can not read + + Request: ` + query { + Users { + _docID + name + age + } + } + `, + + Results: map[string]any{ + "Users": []map[string]any{}, + }, + }, + + testUtils.UpdateDoc{ + CollectionID: 0, + + Identity: immutable.Some(2), // Manager can not update. + + DocID: 0, + + Doc: ` + { + "name": "Shahzad Lone" + } + `, + + SkipLocalUpdateEvent: true, + }, + + testUtils.DeleteDoc{ + CollectionID: 0, + + Identity: immutable.Some(2), // Manager can not delete. + + DocID: 0, + + ExpectedError: "document not found or not authorized to access", + }, + + testUtils.AddDocActorRelationship{ // Manager can manage only. + RequestorIdentity: 2, + + TargetIdentity: 3, + + CollectionID: 0, + + DocID: 0, + + Relation: "reader", + + ExpectedExistence: false, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_ManagerAddsRelationshipWithRelationItDoesNotManageAccordingToPolicy_GQL_Error(t *testing.T) { + expectedPolicyID := "fc56b7509c20ac8ce682b3b9b4fdaad868a9c70dda6ec16720298be64f16e9a4" + + test := testUtils.TestCase{ + + Description: "Test acp, manager adds relationship with relation it does not manage according to policy, error", + + SupportedMutationTypes: immutable.Some([]testUtils.MutationType{ + // GQL mutation will return no error when wrong identity is used so test that separately. + testUtils.GQLRequestMutationType, + }), + + Actions: []any{ + testUtils.AddPolicy{ + + Identity: immutable.Some(1), + + Policy: ` + name: Test Policy + + description: A Policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + writer + + write: + expr: owner + writer + + nothing: + expr: dummy + + relations: + owner: + types: + - actor + + reader: + types: + - actor + + writer: + types: + - actor + + admin: + manages: + - reader + types: + - actor + + dummy: + types: + - actor + `, + + ExpectedPolicyID: expectedPolicyID, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + expectedPolicyID, + ), + }, + + testUtils.CreateDoc{ + Identity: immutable.Some(1), + + CollectionID: 0, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.AddDocActorRelationship{ // Make admin / manager + RequestorIdentity: 1, + + TargetIdentity: 2, + + CollectionID: 0, + + DocID: 0, + + Relation: "admin", + + ExpectedExistence: false, + }, + + testUtils.AddDocActorRelationship{ // Admin tries to make another actor a writer + RequestorIdentity: 2, + + TargetIdentity: 3, + + CollectionID: 0, + + DocID: 0, + + Relation: "writer", + + ExpectedError: "acp protocol violation", + }, + + testUtils.Request{ + Identity: immutable.Some(3), // The other actor can't read + + Request: ` + query { + Users { + _docID + name + age + } + } + `, + + Results: map[string]any{ + "Users": []map[string]any{}, + }, + }, + + testUtils.UpdateDoc{ + CollectionID: 0, + + Identity: immutable.Some(3), // The other actor can not update + + DocID: 0, + + Doc: ` + { + "name": "Shahzad Lone" + } + `, + + SkipLocalUpdateEvent: true, + }, + + testUtils.DeleteDoc{ + CollectionID: 0, + + Identity: immutable.Some(3), // The other actor can not delete + + DocID: 0, + + ExpectedError: "document not found or not authorized to access", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_with_manager_test.go b/tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_with_manager_test.go new file mode 100644 index 0000000000..4467aa1af9 --- /dev/null +++ b/tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_with_manager_test.go @@ -0,0 +1,1286 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_relationship_add_docactor + +import ( + "fmt" + "testing" + + "github.com/sourcenetwork/immutable" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_ManagerGivesReadAccessToAnotherActor_OtherActorCanRead(t *testing.T) { + expectedPolicyID := "fc56b7509c20ac8ce682b3b9b4fdaad868a9c70dda6ec16720298be64f16e9a4" + + test := testUtils.TestCase{ + + Description: "Test acp, owner gives read access to another actor", + + Actions: []any{ + testUtils.AddPolicy{ + + Identity: immutable.Some(1), + + Policy: ` + name: Test Policy + + description: A Policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + writer + + write: + expr: owner + writer + + nothing: + expr: dummy + + relations: + owner: + types: + - actor + + reader: + types: + - actor + + writer: + types: + - actor + + admin: + manages: + - reader + types: + - actor + + dummy: + types: + - actor + `, + + ExpectedPolicyID: expectedPolicyID, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + expectedPolicyID, + ), + }, + + testUtils.CreateDoc{ + Identity: immutable.Some(1), + + CollectionID: 0, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.Request{ + Identity: immutable.Some(3), // This identity can not read yet. + + Request: ` + query { + Users { + _docID + name + age + } + } + `, + + Results: map[string]any{ + "Users": []map[string]any{}, // Can't see the documents yet + }, + }, + + testUtils.AddDocActorRelationship{ // Make admin / manager + RequestorIdentity: 1, + + TargetIdentity: 2, + + CollectionID: 0, + + DocID: 0, + + Relation: "admin", + + ExpectedExistence: false, + }, + + testUtils.AddDocActorRelationship{ // Admin makes another actor a reader + RequestorIdentity: 2, + + TargetIdentity: 3, + + CollectionID: 0, + + DocID: 0, + + Relation: "reader", + + ExpectedExistence: false, + }, + + testUtils.Request{ + Identity: immutable.Some(3), // The other actor can read + + Request: ` + query { + Users { + _docID + name + age + } + } + `, + + Results: map[string]any{ + "Users": []map[string]any{ + { + "_docID": "bae-9d443d0c-52f6-568b-8f74-e8ff0825697b", + "name": "Shahzad", + "age": int64(28), + }, + }, + }, + }, + + testUtils.UpdateDoc{ + CollectionID: 0, + + Identity: immutable.Some(3), // The other actor can not update + + DocID: 0, + + Doc: ` + { + "name": "Shahzad Lone" + } + `, + + ExpectedError: "document not found or not authorized to access", + }, + + testUtils.DeleteDoc{ + CollectionID: 0, + + Identity: immutable.Some(3), // The other actor can not delete + + DocID: 0, + + ExpectedError: "document not found or not authorized to access", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_ManagerGivesWriteAccessToAnotherActor_OtherActorCanWrite(t *testing.T) { + expectedPolicyID := "fc56b7509c20ac8ce682b3b9b4fdaad868a9c70dda6ec16720298be64f16e9a4" + + test := testUtils.TestCase{ + + Description: "Test acp, owner gives write access to another actor", + + Actions: []any{ + testUtils.AddPolicy{ + + Identity: immutable.Some(1), + + Policy: ` + name: Test Policy + + description: A Policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + writer + + write: + expr: owner + writer + + nothing: + expr: dummy + + relations: + owner: + types: + - actor + + reader: + types: + - actor + + writer: + types: + - actor + + admin: + manages: + - writer + types: + - actor + + dummy: + types: + - actor + `, + + ExpectedPolicyID: expectedPolicyID, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + expectedPolicyID, + ), + }, + + testUtils.CreateDoc{ + Identity: immutable.Some(1), + + CollectionID: 0, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.Request{ + Identity: immutable.Some(3), // This identity can not read yet. + + Request: ` + query { + Users { + _docID + name + age + } + } + `, + + Results: map[string]any{ + "Users": []map[string]any{}, // Can't see the documents yet + }, + }, + + testUtils.AddDocActorRelationship{ // Make admin / manager + RequestorIdentity: 1, + + TargetIdentity: 2, + + CollectionID: 0, + + DocID: 0, + + Relation: "admin", + + ExpectedExistence: false, + }, + + testUtils.AddDocActorRelationship{ // Admin makes another actor a writer + RequestorIdentity: 2, + + TargetIdentity: 3, + + CollectionID: 0, + + DocID: 0, + + Relation: "writer", + + ExpectedExistence: false, + }, + + testUtils.UpdateDoc{ + CollectionID: 0, + + Identity: immutable.Some(3), // The other actor can update + + DocID: 0, + + Doc: ` + { + "name": "Shahzad Lone" + } + `, + }, + + testUtils.Request{ + Identity: immutable.Some(3), // The other actor can read + + Request: ` + query { + Users { + _docID + name + age + } + } + `, + + Results: map[string]any{ + "Users": []map[string]any{ + { + "_docID": "bae-9d443d0c-52f6-568b-8f74-e8ff0825697b", + "name": "Shahzad Lone", // Updated name + "age": int64(28), + }, + }, + }, + }, + + testUtils.DeleteDoc{ + CollectionID: 0, + + Identity: immutable.Some(3), // The other actor can delete + + DocID: 0, + }, + + testUtils.Request{ + Identity: immutable.Some(3), + + Request: ` + query { + Users { + _docID + name + age + } + } + `, + + Results: map[string]any{ // Check actually deleted + "Users": []map[string]any{}, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_OwnerMakesAManagerThatGivesItSelfReadAccess_ManagerCanRead(t *testing.T) { + expectedPolicyID := "fc56b7509c20ac8ce682b3b9b4fdaad868a9c70dda6ec16720298be64f16e9a4" + + test := testUtils.TestCase{ + + Description: "Test acp, owner makes a manager that gives itself read access", + + Actions: []any{ + testUtils.AddPolicy{ + + Identity: immutable.Some(1), + + Policy: ` + name: Test Policy + + description: A Policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + writer + + write: + expr: owner + writer + + nothing: + expr: dummy + + relations: + owner: + types: + - actor + + reader: + types: + - actor + + writer: + types: + - actor + + admin: + manages: + - reader + types: + - actor + + dummy: + types: + - actor + `, + + ExpectedPolicyID: expectedPolicyID, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + expectedPolicyID, + ), + }, + + testUtils.CreateDoc{ + Identity: immutable.Some(1), + + CollectionID: 0, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.Request{ + Identity: immutable.Some(2), // This identity (to be manager) can not read yet. + + Request: ` + query { + Users { + _docID + name + age + } + } + `, + + Results: map[string]any{ + "Users": []map[string]any{}, // Can't see the documents yet + }, + }, + + testUtils.AddDocActorRelationship{ // Make admin / manager + RequestorIdentity: 1, + + TargetIdentity: 2, + + CollectionID: 0, + + DocID: 0, + + Relation: "admin", + + ExpectedExistence: false, + }, + + testUtils.AddDocActorRelationship{ // Manager makes itself a reader + RequestorIdentity: 2, + + TargetIdentity: 2, + + CollectionID: 0, + + DocID: 0, + + Relation: "reader", + + ExpectedExistence: false, + }, + + testUtils.Request{ + Identity: immutable.Some(2), // Manager can read now + + Request: ` + query { + Users { + _docID + name + age + } + } + `, + + Results: map[string]any{ + "Users": []map[string]any{ + { + "_docID": "bae-9d443d0c-52f6-568b-8f74-e8ff0825697b", + "name": "Shahzad", + "age": int64(28), + }, + }, + }, + }, + + testUtils.UpdateDoc{ + CollectionID: 0, + + Identity: immutable.Some(2), // Manager still can't update + + DocID: 0, + + Doc: ` + { + "name": "Shahzad Lone" + } + `, + + ExpectedError: "document not found or not authorized to access", + }, + + testUtils.DeleteDoc{ + CollectionID: 0, + + Identity: immutable.Some(2), // Manager still can't delete + + DocID: 0, + + ExpectedError: "document not found or not authorized to access", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_OwnerMakesAManagerThatGivesItSelfReadAndWriteAccess_ManagerCanReadAndWrite(t *testing.T) { + expectedPolicyID := "fc56b7509c20ac8ce682b3b9b4fdaad868a9c70dda6ec16720298be64f16e9a4" + + test := testUtils.TestCase{ + + Description: "Test acp, owner makes a manager that gives itself read and write access", + + SupportedMutationTypes: immutable.Some([]testUtils.MutationType{ + testUtils.CollectionNamedMutationType, + testUtils.CollectionSaveMutationType, + }), + + Actions: []any{ + testUtils.AddPolicy{ + + Identity: immutable.Some(1), + + Policy: ` + name: Test Policy + + description: A Policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + writer + + write: + expr: owner + writer + + nothing: + expr: dummy + + relations: + owner: + types: + - actor + + reader: + types: + - actor + + writer: + types: + - actor + + admin: + manages: + - reader + - writer + types: + - actor + + dummy: + types: + - actor + `, + + ExpectedPolicyID: expectedPolicyID, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + expectedPolicyID, + ), + }, + + testUtils.CreateDoc{ + Identity: immutable.Some(1), + + CollectionID: 0, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.Request{ + Identity: immutable.Some(2), // This identity (to be manager) can not read yet. + + Request: ` + query { + Users { + _docID + name + age + } + } + `, + + Results: map[string]any{ + "Users": []map[string]any{}, // Can't see the documents yet + }, + }, + + testUtils.UpdateDoc{ + CollectionID: 0, + + Identity: immutable.Some(2), // Manager can't update yet. + + DocID: 0, + + Doc: ` + { + "name": "Shahzad Lone" + } + `, + + ExpectedError: "document not found or not authorized to access", + }, + + testUtils.DeleteDoc{ + CollectionID: 0, + + Identity: immutable.Some(2), // Manager can't delete yet. + + DocID: 0, + + ExpectedError: "document not found or not authorized to access", + }, + + testUtils.AddDocActorRelationship{ // Make admin / manager + RequestorIdentity: 1, + + TargetIdentity: 2, + + CollectionID: 0, + + DocID: 0, + + Relation: "admin", + + ExpectedExistence: false, + }, + + testUtils.AddDocActorRelationship{ // Manager makes itself a writer + RequestorIdentity: 2, + + TargetIdentity: 2, + + CollectionID: 0, + + DocID: 0, + + Relation: "writer", + + ExpectedExistence: false, + }, + + // Note: It is not neccesary to make itself a reader, as becoming a writer allows reading. + testUtils.AddDocActorRelationship{ // Manager makes itself a reader + RequestorIdentity: 2, + + TargetIdentity: 2, + + CollectionID: 0, + + DocID: 0, + + Relation: "reader", + + ExpectedExistence: false, + }, + + testUtils.UpdateDoc{ + CollectionID: 0, + + Identity: immutable.Some(2), // Manager can now update. + + DocID: 0, + + Doc: ` + { + "name": "Shahzad Lone" + } + `, + }, + + testUtils.Request{ + Identity: immutable.Some(2), // Manager can read now + + Request: ` + query { + Users { + _docID + name + age + } + } + `, + + Results: map[string]any{ + "Users": []map[string]any{ + { + "_docID": "bae-9d443d0c-52f6-568b-8f74-e8ff0825697b", + "name": "Shahzad Lone", + "age": int64(28), + }, + }, + }, + }, + + testUtils.DeleteDoc{ + CollectionID: 0, + + Identity: immutable.Some(2), // Manager can now delete. + + DocID: 0, + }, + + testUtils.Request{ + Identity: immutable.Some(2), // Make sure manager was able to delete the document. + + Request: ` + query { + Users { + _docID + name + age + } + } + `, + + Results: map[string]any{ + "Users": []map[string]any{}, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_ManagerAddsRelationshipWithRelationItDoesNotManageAccordingToPolicy_Error(t *testing.T) { + expectedPolicyID := "fc56b7509c20ac8ce682b3b9b4fdaad868a9c70dda6ec16720298be64f16e9a4" + + test := testUtils.TestCase{ + + Description: "Test acp, manager adds relationship with relation it does not manage according to policy, error", + + SupportedMutationTypes: immutable.Some([]testUtils.MutationType{ + testUtils.CollectionNamedMutationType, + testUtils.CollectionSaveMutationType, + }), + + Actions: []any{ + testUtils.AddPolicy{ + + Identity: immutable.Some(1), + + Policy: ` + name: Test Policy + + description: A Policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + writer + + write: + expr: owner + writer + + nothing: + expr: dummy + + relations: + owner: + types: + - actor + + reader: + types: + - actor + + writer: + types: + - actor + + admin: + manages: + - reader + types: + - actor + + dummy: + types: + - actor + `, + + ExpectedPolicyID: expectedPolicyID, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + expectedPolicyID, + ), + }, + + testUtils.CreateDoc{ + Identity: immutable.Some(1), + + CollectionID: 0, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.AddDocActorRelationship{ // Make admin / manager + RequestorIdentity: 1, + + TargetIdentity: 2, + + CollectionID: 0, + + DocID: 0, + + Relation: "admin", + + ExpectedExistence: false, + }, + + testUtils.AddDocActorRelationship{ // Admin tries to make another actor a writer + RequestorIdentity: 2, + + TargetIdentity: 3, + + CollectionID: 0, + + DocID: 0, + + Relation: "writer", + + ExpectedError: "acp protocol violation", + }, + + testUtils.Request{ + Identity: immutable.Some(3), // The other actor can't read + + Request: ` + query { + Users { + _docID + name + age + } + } + `, + + Results: map[string]any{ + "Users": []map[string]any{}, + }, + }, + + testUtils.UpdateDoc{ + CollectionID: 0, + + Identity: immutable.Some(3), // The other actor can not update + + DocID: 0, + + Doc: ` + { + "name": "Shahzad Lone" + } + `, + + ExpectedError: "document not found or not authorized to access", + }, + + testUtils.DeleteDoc{ + CollectionID: 0, + + Identity: immutable.Some(3), // The other actor can not delete + + DocID: 0, + + ExpectedError: "document not found or not authorized to access", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_OwnerMakesManagerButManagerCanNotPerformOperations_ManagerCantReadOrWrite(t *testing.T) { + expectedPolicyID := "fc56b7509c20ac8ce682b3b9b4fdaad868a9c70dda6ec16720298be64f16e9a4" + + test := testUtils.TestCase{ + + Description: "Test acp, owner makes a manager, manager can't read or write", + + SupportedMutationTypes: immutable.Some([]testUtils.MutationType{ + testUtils.CollectionNamedMutationType, + testUtils.CollectionSaveMutationType, + }), + + Actions: []any{ + testUtils.AddPolicy{ + + Identity: immutable.Some(1), + + Policy: ` + name: Test Policy + + description: A Policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + writer + + write: + expr: owner + writer + + nothing: + expr: dummy + + relations: + owner: + types: + - actor + + reader: + types: + - actor + + writer: + types: + - actor + + admin: + manages: + - reader + types: + - actor + + dummy: + types: + - actor + `, + + ExpectedPolicyID: expectedPolicyID, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + expectedPolicyID, + ), + }, + + testUtils.CreateDoc{ + Identity: immutable.Some(1), + + CollectionID: 0, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.AddDocActorRelationship{ // Make admin / manager + RequestorIdentity: 1, + + TargetIdentity: 2, + + CollectionID: 0, + + DocID: 0, + + Relation: "admin", + + ExpectedExistence: false, + }, + + testUtils.Request{ + Identity: immutable.Some(2), // Manager can not read + + Request: ` + query { + Users { + _docID + name + age + } + } + `, + + Results: map[string]any{ + "Users": []map[string]any{}, + }, + }, + + testUtils.UpdateDoc{ + CollectionID: 0, + + Identity: immutable.Some(2), // Manager can not update. + + DocID: 0, + + Doc: ` + { + "name": "Shahzad Lone" + } + `, + + ExpectedError: "document not found or not authorized to access", + }, + + testUtils.DeleteDoc{ + CollectionID: 0, + + Identity: immutable.Some(2), // Manager can not delete. + + DocID: 0, + + ExpectedError: "document not found or not authorized to access", + }, + + testUtils.AddDocActorRelationship{ // Manager can manage only. + RequestorIdentity: 2, + + TargetIdentity: 3, + + CollectionID: 0, + + DocID: 0, + + Relation: "reader", + + ExpectedExistence: false, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_CantMakeRelationshipIfNotOwnerOrManager_Error(t *testing.T) { + expectedPolicyID := "fc56b7509c20ac8ce682b3b9b4fdaad868a9c70dda6ec16720298be64f16e9a4" + + test := testUtils.TestCase{ + + Description: "Test acp, cant make relation if identity doesn't own or manage object, return error", + + Actions: []any{ + testUtils.AddPolicy{ + + Identity: immutable.Some(1), + + Policy: ` + name: Test Policy + + description: A Policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + writer + + write: + expr: owner + writer + + nothing: + expr: dummy + + relations: + owner: + types: + - actor + + reader: + types: + - actor + + writer: + types: + - actor + + admin: + manages: + - reader + types: + - actor + + dummy: + types: + - actor + `, + + ExpectedPolicyID: expectedPolicyID, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + expectedPolicyID, + ), + }, + + testUtils.CreateDoc{ + Identity: immutable.Some(1), + + CollectionID: 0, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.AddDocActorRelationship{ + RequestorIdentity: 2, // This identity can not manage as not an admin yet + + TargetIdentity: 3, + + CollectionID: 0, + + DocID: 0, + + Relation: "reader", + + ExpectedExistence: false, + + ExpectedError: "failed to add document actor relationship with acp", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_with_only_write_gql_test.go b/tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_with_only_write_gql_test.go new file mode 100644 index 0000000000..e3f3e62050 --- /dev/null +++ b/tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_with_only_write_gql_test.go @@ -0,0 +1,198 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_relationship_add_docactor + +import ( + "fmt" + "testing" + + "github.com/sourcenetwork/immutable" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_OwnerGivesUpdateWriteAccessToAnotherActorWithoutExplicitReadPerm_GQL_OtherActorCantUpdate(t *testing.T) { + expectedPolicyID := "0a243b1e61f990bccde41db7e81a915ffa1507c1403ae19727ce764d3b08846b" + + test := testUtils.TestCase{ + + Description: "Test acp, owner gives write(update) access to another actor, without explicit read permission", + + SupportedMutationTypes: immutable.Some([]testUtils.MutationType{ + // GQL mutation will return no error when wrong identity is used so test that separately. + testUtils.GQLRequestMutationType, + }), + + Actions: []any{ + testUtils.AddPolicy{ + + Identity: immutable.Some(1), + + Policy: ` + name: Test Policy + + description: A Policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + + write: + expr: owner + writer + + nothing: + expr: dummy + + relations: + owner: + types: + - actor + + reader: + types: + - actor + + writer: + types: + - actor + + admin: + manages: + - reader + types: + - actor + + dummy: + types: + - actor + `, + + ExpectedPolicyID: expectedPolicyID, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + expectedPolicyID, + ), + }, + + testUtils.CreateDoc{ + Identity: immutable.Some(1), + + CollectionID: 0, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.Request{ + Identity: immutable.Some(2), // This identity can not read yet. + + Request: ` + query { + Users { + _docID + name + age + } + } + `, + + Results: map[string]any{ + "Users": []map[string]any{}, // Can't see the documents yet + }, + }, + + testUtils.UpdateDoc{ + CollectionID: 0, + + Identity: immutable.Some(2), // This identity can not update yet. + + DocID: 0, + + Doc: ` + { + "name": "Shahzad Lone" + } + `, + + SkipLocalUpdateEvent: true, + }, + + testUtils.AddDocActorRelationship{ + RequestorIdentity: 1, + + TargetIdentity: 2, + + CollectionID: 0, + + DocID: 0, + + Relation: "writer", + + ExpectedExistence: false, + }, + + testUtils.UpdateDoc{ + CollectionID: 0, + + Identity: immutable.Some(2), // This identity can still not update. + + DocID: 0, + + Doc: ` + { + "name": "Shahzad Lone" + } + `, + + SkipLocalUpdateEvent: true, + }, + + testUtils.Request{ + Identity: immutable.Some(2), // This identity can still not read. + + Request: ` + query { + Users { + _docID + name + age + } + } + `, + + Results: map[string]any{ + "Users": []map[string]any{}, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_with_only_write_test.go b/tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_with_only_write_test.go new file mode 100644 index 0000000000..e052d19afd --- /dev/null +++ b/tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_with_only_write_test.go @@ -0,0 +1,359 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_relationship_add_docactor + +import ( + "fmt" + "testing" + + "github.com/sourcenetwork/immutable" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_OwnerGivesUpdateWriteAccessToAnotherActorWithoutExplicitReadPerm_OtherActorCantUpdate(t *testing.T) { + expectedPolicyID := "0a243b1e61f990bccde41db7e81a915ffa1507c1403ae19727ce764d3b08846b" + + test := testUtils.TestCase{ + + Description: "Test acp, owner gives write(update) access to another actor, without explicit read permission", + + SupportedMutationTypes: immutable.Some([]testUtils.MutationType{ + testUtils.CollectionNamedMutationType, + testUtils.CollectionSaveMutationType, + }), + + Actions: []any{ + testUtils.AddPolicy{ + + Identity: immutable.Some(1), + + Policy: ` + name: Test Policy + + description: A Policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + + write: + expr: owner + writer + + nothing: + expr: dummy + + relations: + owner: + types: + - actor + + reader: + types: + - actor + + writer: + types: + - actor + + admin: + manages: + - reader + types: + - actor + + dummy: + types: + - actor + `, + + ExpectedPolicyID: expectedPolicyID, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + expectedPolicyID, + ), + }, + + testUtils.CreateDoc{ + Identity: immutable.Some(1), + + CollectionID: 0, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.Request{ + Identity: immutable.Some(2), // This identity can not read yet. + + Request: ` + query { + Users { + _docID + name + age + } + } + `, + + Results: map[string]any{ + "Users": []map[string]any{}, // Can't see the documents yet + }, + }, + + testUtils.UpdateDoc{ + CollectionID: 0, + + Identity: immutable.Some(2), // This identity can not update yet. + + DocID: 0, + + Doc: ` + { + "name": "Shahzad Lone" + } + `, + + ExpectedError: "document not found or not authorized to access", + }, + + testUtils.AddDocActorRelationship{ + RequestorIdentity: 1, + + TargetIdentity: 2, + + CollectionID: 0, + + DocID: 0, + + Relation: "writer", + + ExpectedExistence: false, + }, + + testUtils.UpdateDoc{ + CollectionID: 0, + + Identity: immutable.Some(2), // This identity can still not update. + + DocID: 0, + + Doc: ` + { + "name": "Shahzad Lone" + } + `, + + ExpectedError: "document not found or not authorized to access", + }, + + testUtils.Request{ + Identity: immutable.Some(2), // This identity can still not read. + + Request: ` + query { + Users { + _docID + name + age + } + } + `, + + Results: map[string]any{ + "Users": []map[string]any{}, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_OwnerGivesDeleteWriteAccessToAnotherActorWithoutExplicitReadPerm_OtherActorCantDelete(t *testing.T) { + expectedPolicyID := "0a243b1e61f990bccde41db7e81a915ffa1507c1403ae19727ce764d3b08846b" + + test := testUtils.TestCase{ + + Description: "Test acp, owner gives write(delete) access to another actor, without explicit read permission", + + Actions: []any{ + testUtils.AddPolicy{ + + Identity: immutable.Some(1), + + Policy: ` + name: Test Policy + + description: A Policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + + write: + expr: owner + writer + + nothing: + expr: dummy + + relations: + owner: + types: + - actor + + reader: + types: + - actor + + writer: + types: + - actor + + admin: + manages: + - reader + types: + - actor + + dummy: + types: + - actor + `, + + ExpectedPolicyID: expectedPolicyID, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + expectedPolicyID, + ), + }, + + testUtils.CreateDoc{ + Identity: immutable.Some(1), + + CollectionID: 0, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.Request{ + Identity: immutable.Some(2), // This identity can not read yet. + + Request: ` + query { + Users { + _docID + name + age + } + } + `, + + Results: map[string]any{ + "Users": []map[string]any{}, // Can't see the documents yet + }, + }, + + testUtils.DeleteDoc{ + CollectionID: 0, + + Identity: immutable.Some(2), // This identity can not delete yet. + + DocID: 0, + + ExpectedError: "document not found or not authorized to access", + }, + + testUtils.AddDocActorRelationship{ + RequestorIdentity: 1, + + TargetIdentity: 2, + + CollectionID: 0, + + DocID: 0, + + Relation: "writer", + + ExpectedExistence: false, + }, + + testUtils.Request{ + Identity: immutable.Some(2), // This identity can still not read. + + Request: ` + query { + Users { + _docID + name + age + } + } + `, + + Results: map[string]any{ + "Users": []map[string]any{}, + }, + }, + + testUtils.DeleteDoc{ + CollectionID: 0, + + Identity: immutable.Some(2), // This identity can still not delete. + + DocID: 0, + + ExpectedError: "document not found or not authorized to access", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_with_public_document_test.go b/tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_with_public_document_test.go new file mode 100644 index 0000000000..e134a821e4 --- /dev/null +++ b/tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_with_public_document_test.go @@ -0,0 +1,147 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_relationship_add_docactor + +import ( + "fmt" + "testing" + + "github.com/sourcenetwork/immutable" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_AddDocActorRelationshipWithPublicDocument_CanAlreadyAccess_Error(t *testing.T) { + expectedPolicyID := "fc56b7509c20ac8ce682b3b9b4fdaad868a9c70dda6ec16720298be64f16e9a4" + + test := testUtils.TestCase{ + + Description: "Test acp, add doc actor relationship on a public document, return error", + + Actions: []any{ + testUtils.AddPolicy{ + + Identity: immutable.Some(1), + + Policy: ` + name: Test Policy + + description: A Policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + writer + + write: + expr: owner + writer + + nothing: + expr: dummy + + relations: + owner: + types: + - actor + + reader: + types: + - actor + + writer: + types: + - actor + + admin: + manages: + - reader + types: + - actor + + dummy: + types: + - actor + `, + + ExpectedPolicyID: expectedPolicyID, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + expectedPolicyID, + ), + }, + + testUtils.CreateDoc{ // Note: Is a public document (without an identity). + CollectionID: 0, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.Request{ + Identity: immutable.Some(2), // Can read as it is a public document + + Request: ` + query { + Users { + _docID + name + age + } + } + `, + + Results: map[string]any{ + "Users": []map[string]any{ + { + "_docID": "bae-9d443d0c-52f6-568b-8f74-e8ff0825697b", + "name": "Shahzad", + "age": int64(28), + }, + }, + }, + }, + + testUtils.AddDocActorRelationship{ + RequestorIdentity: 1, + + TargetIdentity: 2, + + CollectionID: 0, + + DocID: 0, + + Relation: "reader", + + ExpectedError: "failed to add document actor relationship with acp", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_with_reader_gql_test.go b/tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_with_reader_gql_test.go new file mode 100644 index 0000000000..02a637833f --- /dev/null +++ b/tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_with_reader_gql_test.go @@ -0,0 +1,204 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_relationship_add_docactor + +import ( + "fmt" + "testing" + + "github.com/sourcenetwork/immutable" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_OwnerGivesOnlyReadAccessToAnotherActor_GQL_OtherActorCanReadButNotUpdate(t *testing.T) { + expectedPolicyID := "fc56b7509c20ac8ce682b3b9b4fdaad868a9c70dda6ec16720298be64f16e9a4" + + test := testUtils.TestCase{ + + Description: "Test acp, owner gives read access to another actor, but the other actor can't update", + + SupportedMutationTypes: immutable.Some([]testUtils.MutationType{ + // GQL mutation will return no error when wrong identity is used so test that separately. + testUtils.GQLRequestMutationType, + }), + + Actions: []any{ + testUtils.AddPolicy{ + + Identity: immutable.Some(1), + + Policy: ` + name: Test Policy + + description: A Policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + writer + + write: + expr: owner + writer + + nothing: + expr: dummy + + relations: + owner: + types: + - actor + + reader: + types: + - actor + + writer: + types: + - actor + + admin: + manages: + - reader + types: + - actor + + dummy: + types: + - actor + `, + + ExpectedPolicyID: expectedPolicyID, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + expectedPolicyID, + ), + }, + + testUtils.CreateDoc{ + Identity: immutable.Some(1), + + CollectionID: 0, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.Request{ + Identity: immutable.Some(2), // This identity can not read yet. + + Request: ` + query { + Users { + _docID + name + age + } + } + `, + + Results: map[string]any{ + "Users": []map[string]any{}, // Can't see the documents yet + }, + }, + + testUtils.UpdateDoc{ // Since it can't read, it can't update either. + CollectionID: 0, + + Identity: immutable.Some(2), + + DocID: 0, + + Doc: ` + { + "name": "Shahzad Lone" + } + `, + + SkipLocalUpdateEvent: true, + }, + + testUtils.AddDocActorRelationship{ + RequestorIdentity: 1, + + TargetIdentity: 2, + + CollectionID: 0, + + DocID: 0, + + Relation: "reader", + + ExpectedExistence: false, + }, + + testUtils.Request{ + Identity: immutable.Some(2), // Now this identity can read. + + Request: ` + query { + Users { + _docID + name + age + } + } + `, + + Results: map[string]any{ + "Users": []map[string]any{ + { + "_docID": "bae-9d443d0c-52f6-568b-8f74-e8ff0825697b", + "name": "Shahzad", + "age": int64(28), + }, + }, + }, + }, + + testUtils.UpdateDoc{ // But this actor still can't update. + CollectionID: 0, + + Identity: immutable.Some(2), + + DocID: 0, + + Doc: ` + { + "name": "Shahzad Lone" + } + `, + + ExpectedError: "document not found or not authorized to access", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_with_reader_test.go b/tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_with_reader_test.go new file mode 100644 index 0000000000..70a7676a96 --- /dev/null +++ b/tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_with_reader_test.go @@ -0,0 +1,810 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_relationship_add_docactor + +import ( + "fmt" + "testing" + + "github.com/sourcenetwork/immutable" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_OwnerGivesReadAccessToAnotherActorTwice_ShowThatTheRelationshipAlreadyExists(t *testing.T) { + expectedPolicyID := "fc56b7509c20ac8ce682b3b9b4fdaad868a9c70dda6ec16720298be64f16e9a4" + + test := testUtils.TestCase{ + + Description: "Test acp, owner gives read access to another actor twice, no-op", + + Actions: []any{ + testUtils.AddPolicy{ + + Identity: immutable.Some(1), + + Policy: ` + name: Test Policy + + description: A Policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + writer + + write: + expr: owner + writer + + nothing: + expr: dummy + + relations: + owner: + types: + - actor + + reader: + types: + - actor + + writer: + types: + - actor + + admin: + manages: + - reader + types: + - actor + + dummy: + types: + - actor + `, + + ExpectedPolicyID: expectedPolicyID, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + expectedPolicyID, + ), + }, + + testUtils.CreateDoc{ + Identity: immutable.Some(1), + + CollectionID: 0, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.Request{ + Identity: immutable.Some(2), // This identity can not read yet. + + Request: ` + query { + Users { + _docID + name + age + } + } + `, + + Results: map[string]any{ + "Users": []map[string]any{}, // Can't see the documents yet + }, + }, + + testUtils.AddDocActorRelationship{ + RequestorIdentity: 1, + + TargetIdentity: 2, + + CollectionID: 0, + + DocID: 0, + + Relation: "reader", + + ExpectedExistence: false, + }, + + testUtils.AddDocActorRelationship{ + RequestorIdentity: 1, + + TargetIdentity: 2, + + CollectionID: 0, + + DocID: 0, + + Relation: "reader", + + ExpectedExistence: true, // is a no-op + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_OwnerGivesReadAccessToAnotherActor_OtherActorCanRead(t *testing.T) { + expectedPolicyID := "fc56b7509c20ac8ce682b3b9b4fdaad868a9c70dda6ec16720298be64f16e9a4" + + test := testUtils.TestCase{ + + Description: "Test acp, owner gives read access to another actor", + + Actions: []any{ + testUtils.AddPolicy{ + + Identity: immutable.Some(1), + + Policy: ` + name: Test Policy + + description: A Policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + writer + + write: + expr: owner + writer + + nothing: + expr: dummy + + relations: + owner: + types: + - actor + + reader: + types: + - actor + + writer: + types: + - actor + + admin: + manages: + - reader + types: + - actor + + dummy: + types: + - actor + `, + + ExpectedPolicyID: expectedPolicyID, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + expectedPolicyID, + ), + }, + + testUtils.CreateDoc{ + Identity: immutable.Some(1), + + CollectionID: 0, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.Request{ + Identity: immutable.Some(2), // This identity can not read yet. + + Request: ` + query { + Users { + _docID + name + age + } + } + `, + + Results: map[string]any{ + "Users": []map[string]any{}, // Can't see the documents yet + }, + }, + + testUtils.AddDocActorRelationship{ + RequestorIdentity: 1, + + TargetIdentity: 2, + + CollectionID: 0, + + DocID: 0, + + Relation: "reader", + + ExpectedExistence: false, + }, + + testUtils.Request{ + Identity: immutable.Some(2), // Now this identity can read. + + Request: ` + query { + Users { + _docID + name + age + } + } + `, + + Results: map[string]any{ + "Users": []map[string]any{ + { + "_docID": "bae-9d443d0c-52f6-568b-8f74-e8ff0825697b", + "name": "Shahzad", + "age": int64(28), + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +// Note: Testing that owner can still read after the relationship was formed is to ensure +// that no transfer of ownership has taken place. +func TestACP_OwnerGivesReadAccessToAnotherActor_OtherActorCanReadSoCanTheOwner(t *testing.T) { + expectedPolicyID := "fc56b7509c20ac8ce682b3b9b4fdaad868a9c70dda6ec16720298be64f16e9a4" + + test := testUtils.TestCase{ + + Description: "Test acp, owner gives read access to another actor, both can read", + + Actions: []any{ + testUtils.AddPolicy{ + + Identity: immutable.Some(1), + + Policy: ` + name: Test Policy + + description: A Policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + writer + + write: + expr: owner + writer + + nothing: + expr: dummy + + relations: + owner: + types: + - actor + + reader: + types: + - actor + + writer: + types: + - actor + + admin: + manages: + - reader + types: + - actor + + dummy: + types: + - actor + `, + + ExpectedPolicyID: expectedPolicyID, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + expectedPolicyID, + ), + }, + + testUtils.CreateDoc{ + Identity: immutable.Some(1), + + CollectionID: 0, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.AddDocActorRelationship{ + RequestorIdentity: 1, + + TargetIdentity: 2, + + CollectionID: 0, + + DocID: 0, + + Relation: "reader", + + ExpectedExistence: false, + }, + + testUtils.Request{ + Identity: immutable.Some(2), // Now this identity can read. + + Request: ` + query { + Users { + _docID + name + age + } + } + `, + + Results: map[string]any{ + "Users": []map[string]any{ + { + "_docID": "bae-9d443d0c-52f6-568b-8f74-e8ff0825697b", + "name": "Shahzad", + "age": int64(28), + }, + }, + }, + }, + + testUtils.Request{ + Identity: immutable.Some(1), // And so can the owner (ownership not transferred). + + Request: ` + query { + Users { + _docID + name + age + } + } + `, + + Results: map[string]any{ + "Users": []map[string]any{ + { + "_docID": "bae-9d443d0c-52f6-568b-8f74-e8ff0825697b", + "name": "Shahzad", + "age": int64(28), + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_OwnerGivesOnlyReadAccessToAnotherActor_OtherActorCanReadButNotUpdate(t *testing.T) { + expectedPolicyID := "fc56b7509c20ac8ce682b3b9b4fdaad868a9c70dda6ec16720298be64f16e9a4" + + test := testUtils.TestCase{ + + Description: "Test acp, owner gives read access to another actor, but the other actor can't update", + + SupportedMutationTypes: immutable.Some([]testUtils.MutationType{ + testUtils.CollectionNamedMutationType, + testUtils.CollectionSaveMutationType, + }), + + Actions: []any{ + testUtils.AddPolicy{ + + Identity: immutable.Some(1), + + Policy: ` + name: Test Policy + + description: A Policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + writer + + write: + expr: owner + writer + + nothing: + expr: dummy + + relations: + owner: + types: + - actor + + reader: + types: + - actor + + writer: + types: + - actor + + admin: + manages: + - reader + types: + - actor + + dummy: + types: + - actor + `, + + ExpectedPolicyID: expectedPolicyID, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + expectedPolicyID, + ), + }, + + testUtils.CreateDoc{ + Identity: immutable.Some(1), + + CollectionID: 0, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.Request{ + Identity: immutable.Some(2), // This identity can not read yet. + + Request: ` + query { + Users { + _docID + name + age + } + } + `, + + Results: map[string]any{ + "Users": []map[string]any{}, // Can't see the documents yet + }, + }, + + testUtils.UpdateDoc{ // Since it can't read, it can't update either. + CollectionID: 0, + + Identity: immutable.Some(2), + + DocID: 0, + + Doc: ` + { + "name": "Shahzad Lone" + } + `, + + ExpectedError: "document not found or not authorized to access", + }, + + testUtils.AddDocActorRelationship{ + RequestorIdentity: 1, + + TargetIdentity: 2, + + CollectionID: 0, + + DocID: 0, + + Relation: "reader", + + ExpectedExistence: false, + }, + + testUtils.Request{ + Identity: immutable.Some(2), // Now this identity can read. + + Request: ` + query { + Users { + _docID + name + age + } + } + `, + + Results: map[string]any{ + "Users": []map[string]any{ + { + "_docID": "bae-9d443d0c-52f6-568b-8f74-e8ff0825697b", + "name": "Shahzad", + "age": int64(28), + }, + }, + }, + }, + + testUtils.UpdateDoc{ // But this actor still can't update. + CollectionID: 0, + + Identity: immutable.Some(2), + + DocID: 0, + + Doc: ` + { + "name": "Shahzad Lone" + } + `, + + ExpectedError: "document not found or not authorized to access", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_OwnerGivesOnlyReadAccessToAnotherActor_OtherActorCanReadButNotDelete(t *testing.T) { + expectedPolicyID := "fc56b7509c20ac8ce682b3b9b4fdaad868a9c70dda6ec16720298be64f16e9a4" + + test := testUtils.TestCase{ + + Description: "Test acp, owner gives read access to another actor, but the other actor can't delete", + + Actions: []any{ + testUtils.AddPolicy{ + + Identity: immutable.Some(1), + + Policy: ` + name: Test Policy + + description: A Policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + writer + + write: + expr: owner + writer + + nothing: + expr: dummy + + relations: + owner: + types: + - actor + + reader: + types: + - actor + + writer: + types: + - actor + + admin: + manages: + - reader + types: + - actor + + dummy: + types: + - actor + `, + + ExpectedPolicyID: expectedPolicyID, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + expectedPolicyID, + ), + }, + + testUtils.CreateDoc{ + Identity: immutable.Some(1), + + CollectionID: 0, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.Request{ + Identity: immutable.Some(2), // This identity can not read yet. + + Request: ` + query { + Users { + _docID + name + age + } + } + `, + + Results: map[string]any{ + "Users": []map[string]any{}, // Can't see the documents yet + }, + }, + + testUtils.DeleteDoc{ // Since it can't read, it can't delete either. + CollectionID: 0, + + Identity: immutable.Some(2), + + DocID: 0, + + ExpectedError: "document not found or not authorized to access", + }, + + testUtils.AddDocActorRelationship{ + RequestorIdentity: 1, + + TargetIdentity: 2, + + CollectionID: 0, + + DocID: 0, + + Relation: "reader", + + ExpectedExistence: false, + }, + + testUtils.Request{ + Identity: immutable.Some(2), // Now this identity can read. + + Request: ` + query { + Users { + _docID + name + age + } + } + `, + + Results: map[string]any{ + "Users": []map[string]any{ + { + "_docID": "bae-9d443d0c-52f6-568b-8f74-e8ff0825697b", + "name": "Shahzad", + "age": int64(28), + }, + }, + }, + }, + + testUtils.DeleteDoc{ // But this actor still can't delete. + CollectionID: 0, + + Identity: immutable.Some(2), + + DocID: 0, + + ExpectedError: "document not found or not authorized to access", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_with_update_gql_test.go b/tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_with_update_gql_test.go new file mode 100644 index 0000000000..dcfda587e8 --- /dev/null +++ b/tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_with_update_gql_test.go @@ -0,0 +1,360 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_relationship_add_docactor + +import ( + "fmt" + "testing" + + "github.com/sourcenetwork/immutable" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_OwnerGivesUpdateWriteAccessToAnotherActorTwice_GQL_ShowThatTheRelationshipAlreadyExists(t *testing.T) { + expectedPolicyID := "fc56b7509c20ac8ce682b3b9b4fdaad868a9c70dda6ec16720298be64f16e9a4" + + test := testUtils.TestCase{ + + Description: "Test acp, owner gives write(update) access to another actor twice, no-op", + + SupportedMutationTypes: immutable.Some([]testUtils.MutationType{ + // GQL mutation will return no error when wrong identity is used so test that separately. + testUtils.GQLRequestMutationType, + }), + + Actions: []any{ + testUtils.AddPolicy{ + + Identity: immutable.Some(1), + + Policy: ` + name: Test Policy + + description: A Policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + writer + + write: + expr: owner + writer + + nothing: + expr: dummy + + relations: + owner: + types: + - actor + + reader: + types: + - actor + + writer: + types: + - actor + + admin: + manages: + - reader + types: + - actor + + dummy: + types: + - actor + `, + + ExpectedPolicyID: expectedPolicyID, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + expectedPolicyID, + ), + }, + + testUtils.CreateDoc{ + Identity: immutable.Some(1), + + CollectionID: 0, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.Request{ + Identity: immutable.Some(2), // This identity can not read yet. + + Request: ` + query { + Users { + _docID + name + age + } + } + `, + + Results: map[string]any{ + "Users": []map[string]any{}, // Can't see the documents yet + }, + }, + + testUtils.UpdateDoc{ + CollectionID: 0, + + Identity: immutable.Some(2), // This identity can not update yet. + + DocID: 0, + + Doc: ` + { + "name": "Shahzad Lone" + } + `, + + SkipLocalUpdateEvent: true, + }, + + testUtils.AddDocActorRelationship{ + RequestorIdentity: 1, + + TargetIdentity: 2, + + CollectionID: 0, + + DocID: 0, + + Relation: "writer", + + ExpectedExistence: false, + }, + + testUtils.AddDocActorRelationship{ + RequestorIdentity: 1, + + TargetIdentity: 2, + + CollectionID: 0, + + DocID: 0, + + Relation: "writer", + + ExpectedExistence: true, // is a no-op + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_OwnerGivesUpdateWriteAccessToAnotherActor_GQL_OtherActorCanUpdate(t *testing.T) { + expectedPolicyID := "fc56b7509c20ac8ce682b3b9b4fdaad868a9c70dda6ec16720298be64f16e9a4" + + test := testUtils.TestCase{ + + Description: "Test acp, owner gives write(update) access to another actor", + + SupportedMutationTypes: immutable.Some([]testUtils.MutationType{ + // GQL mutation will return no error when wrong identity is used so test that separately. + testUtils.GQLRequestMutationType, + }), + + Actions: []any{ + testUtils.AddPolicy{ + + Identity: immutable.Some(1), + + Policy: ` + name: Test Policy + + description: A Policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + writer + + write: + expr: owner + writer + + nothing: + expr: dummy + + relations: + owner: + types: + - actor + + reader: + types: + - actor + + writer: + types: + - actor + + admin: + manages: + - reader + types: + - actor + + dummy: + types: + - actor + `, + + ExpectedPolicyID: expectedPolicyID, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + expectedPolicyID, + ), + }, + + testUtils.CreateDoc{ + Identity: immutable.Some(1), + + CollectionID: 0, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.Request{ + Identity: immutable.Some(2), // This identity can not read yet. + + Request: ` + query { + Users { + _docID + name + age + } + } + `, + + Results: map[string]any{ + "Users": []map[string]any{}, // Can't see the documents yet + }, + }, + + testUtils.UpdateDoc{ + CollectionID: 0, + + Identity: immutable.Some(2), // This identity can not update yet. + + DocID: 0, + + Doc: ` + { + "name": "Shahzad Lone" + } + `, + + SkipLocalUpdateEvent: true, + }, + + testUtils.AddDocActorRelationship{ + RequestorIdentity: 1, + + TargetIdentity: 2, + + CollectionID: 0, + + DocID: 0, + + Relation: "writer", + + ExpectedExistence: false, + }, + + testUtils.UpdateDoc{ + CollectionID: 0, + + Identity: immutable.Some(2), // This identity can now update. + + DocID: 0, + + Doc: ` + { + "name": "Shahzad Lone" + } + `, + }, + + testUtils.Request{ + Identity: immutable.Some(2), // This identity can now also read. + + Request: ` + query { + Users { + _docID + name + age + } + } + `, + + Results: map[string]any{ + "Users": []map[string]any{ + { + "_docID": "bae-9d443d0c-52f6-568b-8f74-e8ff0825697b", + "name": "Shahzad Lone", // Note: updated name + "age": int64(28), + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_with_update_test.go b/tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_with_update_test.go new file mode 100644 index 0000000000..79d727a690 --- /dev/null +++ b/tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_with_update_test.go @@ -0,0 +1,541 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_relationship_add_docactor + +import ( + "fmt" + "testing" + + "github.com/sourcenetwork/immutable" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_OwnerGivesUpdateWriteAccessToAnotherActorTwice_ShowThatTheRelationshipAlreadyExists(t *testing.T) { + expectedPolicyID := "fc56b7509c20ac8ce682b3b9b4fdaad868a9c70dda6ec16720298be64f16e9a4" + + test := testUtils.TestCase{ + + Description: "Test acp, owner gives write(update) access to another actor twice, no-op", + + SupportedMutationTypes: immutable.Some([]testUtils.MutationType{ + testUtils.CollectionNamedMutationType, + testUtils.CollectionSaveMutationType, + }), + + Actions: []any{ + testUtils.AddPolicy{ + + Identity: immutable.Some(1), + + Policy: ` + name: Test Policy + + description: A Policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + writer + + write: + expr: owner + writer + + nothing: + expr: dummy + + relations: + owner: + types: + - actor + + reader: + types: + - actor + + writer: + types: + - actor + + admin: + manages: + - reader + types: + - actor + + dummy: + types: + - actor + `, + + ExpectedPolicyID: expectedPolicyID, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + expectedPolicyID, + ), + }, + + testUtils.CreateDoc{ + Identity: immutable.Some(1), + + CollectionID: 0, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.Request{ + Identity: immutable.Some(2), // This identity can not read yet. + + Request: ` + query { + Users { + _docID + name + age + } + } + `, + + Results: map[string]any{ + "Users": []map[string]any{}, // Can't see the documents yet + }, + }, + + testUtils.UpdateDoc{ + CollectionID: 0, + + Identity: immutable.Some(2), // This identity can not update yet. + + DocID: 0, + + Doc: ` + { + "name": "Shahzad Lone" + } + `, + + ExpectedError: "document not found or not authorized to access", + }, + + testUtils.AddDocActorRelationship{ + RequestorIdentity: 1, + + TargetIdentity: 2, + + CollectionID: 0, + + DocID: 0, + + Relation: "writer", + + ExpectedExistence: false, + }, + + testUtils.AddDocActorRelationship{ + RequestorIdentity: 1, + + TargetIdentity: 2, + + CollectionID: 0, + + DocID: 0, + + Relation: "writer", + + ExpectedExistence: true, // is a no-op + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_OwnerGivesUpdateWriteAccessToAnotherActor_OtherActorCanUpdate(t *testing.T) { + expectedPolicyID := "fc56b7509c20ac8ce682b3b9b4fdaad868a9c70dda6ec16720298be64f16e9a4" + + test := testUtils.TestCase{ + + Description: "Test acp, owner gives write(update) access to another actor", + + SupportedMutationTypes: immutable.Some([]testUtils.MutationType{ + testUtils.CollectionNamedMutationType, + testUtils.CollectionSaveMutationType, + }), + + Actions: []any{ + testUtils.AddPolicy{ + + Identity: immutable.Some(1), + + Policy: ` + name: Test Policy + + description: A Policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + writer + + write: + expr: owner + writer + + nothing: + expr: dummy + + relations: + owner: + types: + - actor + + reader: + types: + - actor + + writer: + types: + - actor + + admin: + manages: + - reader + types: + - actor + + dummy: + types: + - actor + `, + + ExpectedPolicyID: expectedPolicyID, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + expectedPolicyID, + ), + }, + + testUtils.CreateDoc{ + Identity: immutable.Some(1), + + CollectionID: 0, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.Request{ + Identity: immutable.Some(2), // This identity can not read yet. + + Request: ` + query { + Users { + _docID + name + age + } + } + `, + + Results: map[string]any{ + "Users": []map[string]any{}, // Can't see the documents yet + }, + }, + + testUtils.UpdateDoc{ + CollectionID: 0, + + Identity: immutable.Some(2), // This identity can not update yet. + + DocID: 0, + + Doc: ` + { + "name": "Shahzad Lone" + } + `, + + ExpectedError: "document not found or not authorized to access", + }, + + testUtils.AddDocActorRelationship{ + RequestorIdentity: 1, + + TargetIdentity: 2, + + CollectionID: 0, + + DocID: 0, + + Relation: "writer", + + ExpectedExistence: false, + }, + + testUtils.UpdateDoc{ + CollectionID: 0, + + Identity: immutable.Some(2), // This identity can now update. + + DocID: 0, + + Doc: ` + { + "name": "Shahzad Lone" + } + `, + }, + + testUtils.Request{ + Identity: immutable.Some(2), // This identity can now also read. + + Request: ` + query { + Users { + _docID + name + age + } + } + `, + + Results: map[string]any{ + "Users": []map[string]any{ + { + "_docID": "bae-9d443d0c-52f6-568b-8f74-e8ff0825697b", + "name": "Shahzad Lone", // Note: updated name + "age": int64(28), + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_OwnerGivesUpdateWriteAccessToAnotherActor_OtherActorCanUpdateSoCanTheOwner(t *testing.T) { + expectedPolicyID := "fc56b7509c20ac8ce682b3b9b4fdaad868a9c70dda6ec16720298be64f16e9a4" + + test := testUtils.TestCase{ + + Description: "Test acp, owner gives write(update) access to another actor, both can read", + + Actions: []any{ + testUtils.AddPolicy{ + + Identity: immutable.Some(1), + + Policy: ` + name: Test Policy + + description: A Policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + writer + + write: + expr: owner + writer + + nothing: + expr: dummy + + relations: + owner: + types: + - actor + + reader: + types: + - actor + + writer: + types: + - actor + + admin: + manages: + - reader + types: + - actor + + dummy: + types: + - actor + `, + + ExpectedPolicyID: expectedPolicyID, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + expectedPolicyID, + ), + }, + + testUtils.CreateDoc{ + Identity: immutable.Some(1), + + CollectionID: 0, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.AddDocActorRelationship{ + RequestorIdentity: 1, + + TargetIdentity: 2, + + CollectionID: 0, + + DocID: 0, + + Relation: "writer", + + ExpectedExistence: false, + }, + + testUtils.UpdateDoc{ + CollectionID: 0, + + Identity: immutable.Some(2), // This identity can now update. + + DocID: 0, + + Doc: ` + { + "name": "Shahzad Lone" + } + `, + }, + + testUtils.Request{ + Identity: immutable.Some(2), // This identity can now also read. + + Request: ` + query { + Users { + _docID + name + age + } + } + `, + + Results: map[string]any{ + "Users": []map[string]any{ + { + "_docID": "bae-9d443d0c-52f6-568b-8f74-e8ff0825697b", + "name": "Shahzad Lone", // Note: updated name + "age": int64(28), + }, + }, + }, + }, + + testUtils.UpdateDoc{ + CollectionID: 0, + + Identity: immutable.Some(1), // Owner can still also update (ownership not transferred) + + DocID: 0, + + Doc: ` + { + "name": "Lone" + } + `, + }, + + testUtils.Request{ + Identity: immutable.Some(2), // Owner can still also read (ownership not transferred) + + Request: ` + query { + Users { + _docID + name + age + } + } + `, + + Results: map[string]any{ + "Users": []map[string]any{ + { + "_docID": "bae-9d443d0c-52f6-568b-8f74-e8ff0825697b", + "name": "Lone", // Note: updated name + "age": int64(28), + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/utils.go b/tests/integration/utils.go index e6ab296140..eb0128ab00 100644 --- a/tests/integration/utils.go +++ b/tests/integration/utils.go @@ -340,6 +340,9 @@ func performAction( case AddPolicy: addPolicyACP(s, action) + case AddDocActorRelationship: + addDocActorRelationshipACP(s, action) + case CreateDoc: createDoc(s, action) From e59f6d98db126034735032b01f76f64bfa8b866c Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Wed, 2 Oct 2024 10:42:38 -0700 Subject: [PATCH 49/71] feat: Min and max numerical aggregates (#3078) ## Relevant issue(s) Resolves #2978 ## Description This PR adds support for two new query aggregates `min` and `max`. ## Tasks - [x] I made sure the code is well commented, particularly hard-to-understand areas. - [x] I made sure the repository-held documentation is changed accordingly. - [x] I made sure the pull request title adheres to the conventional commit style (the subset used in the project can be found in [tools/configs/chglog/config.yml](tools/configs/chglog/config.yml)). - [x] I made sure to discuss its limitations such as threats to validity, vulnerability to mistake and misuse, robustness to invalidation of assumptions, resource requirements, ... ## How has this been tested? Added integration tests Specify the platform(s) on which this was tested: - MacOS --- client/request/consts.go | 6 + internal/planner/aggregate.go | 98 +++ internal/planner/explain.go | 2 + internal/planner/max.go | 255 ++++++++ internal/planner/min.go | 255 ++++++++ internal/planner/operations.go | 2 + internal/planner/select.go | 4 + internal/planner/sum.go | 119 +--- internal/planner/top.go | 4 + internal/request/graphql/schema/generate.go | 124 ++-- .../graphql/schema/types/descriptions.go | 10 + tests/integration/explain.go | 2 + .../explain/debug/top_with_max_test.go | 96 +++ .../explain/debug/top_with_min_test.go | 96 +++ .../explain/debug/with_max_join_test.go | 151 +++++ .../explain/debug/with_max_test.go | 59 ++ .../explain/debug/with_min_join_test.go | 151 +++++ .../explain/debug/with_min_test.go | 59 ++ .../explain/default/top_with_max_test.go | 166 +++++ .../explain/default/top_with_min_test.go | 166 +++++ .../explain/default/with_max_join_test.go | 375 ++++++++++++ .../explain/default/with_max_test.go | 90 +++ .../explain/default/with_min_join_test.go | 375 ++++++++++++ .../explain/default/with_min_test.go | 90 +++ .../explain/execute/with_max_test.go | 138 +++++ .../explain/execute/with_min_test.go | 138 +++++ .../inline_array/with_max_doc_id_test.go | 51 ++ .../inline_array/with_max_filter_test.go | 145 +++++ .../with_max_limit_offset_order_test.go | 281 +++++++++ .../with_max_limit_offset_test.go | 49 ++ .../query/inline_array/with_max_test.go | 305 +++++++++ .../inline_array/with_min_doc_id_test.go | 51 ++ .../inline_array/with_min_filter_test.go | 145 +++++ .../with_min_limit_offset_order_test.go | 281 +++++++++ .../with_min_limit_offset_test.go | 49 ++ .../query/inline_array/with_min_test.go | 273 +++++++++ tests/integration/query/simple/utils.go | 3 +- .../simple/with_group_average_max_test.go | 164 +++++ .../simple/with_group_average_min_test.go | 164 +++++ .../query/simple/with_group_count_max_test.go | 112 ++++ .../query/simple/with_group_count_min_test.go | 112 ++++ .../simple/with_group_max_filter_test.go | 294 +++++++++ .../with_group_max_limit_offset_test.go | 72 +++ .../query/simple/with_group_max_limit_test.go | 72 +++ .../query/simple/with_group_max_test.go | 577 ++++++++++++++++++ .../simple/with_group_min_filter_test.go | 294 +++++++++ .../with_group_min_limit_offset_test.go | 72 +++ .../query/simple/with_group_min_limit_test.go | 72 +++ .../query/simple/with_group_min_test.go | 577 ++++++++++++++++++ .../query/simple/with_max_filter_test.go | 53 ++ .../integration/query/simple/with_max_test.go | 129 ++++ .../query/simple/with_min_filter_test.go | 53 ++ .../integration/query/simple/with_min_test.go | 129 ++++ tests/integration/schema/default_fields.go | 14 + 54 files changed, 7485 insertions(+), 139 deletions(-) create mode 100644 internal/planner/aggregate.go create mode 100644 internal/planner/max.go create mode 100644 internal/planner/min.go create mode 100644 tests/integration/explain/debug/top_with_max_test.go create mode 100644 tests/integration/explain/debug/top_with_min_test.go create mode 100644 tests/integration/explain/debug/with_max_join_test.go create mode 100644 tests/integration/explain/debug/with_max_test.go create mode 100644 tests/integration/explain/debug/with_min_join_test.go create mode 100644 tests/integration/explain/debug/with_min_test.go create mode 100644 tests/integration/explain/default/top_with_max_test.go create mode 100644 tests/integration/explain/default/top_with_min_test.go create mode 100644 tests/integration/explain/default/with_max_join_test.go create mode 100644 tests/integration/explain/default/with_max_test.go create mode 100644 tests/integration/explain/default/with_min_join_test.go create mode 100644 tests/integration/explain/default/with_min_test.go create mode 100644 tests/integration/explain/execute/with_max_test.go create mode 100644 tests/integration/explain/execute/with_min_test.go create mode 100644 tests/integration/query/inline_array/with_max_doc_id_test.go create mode 100644 tests/integration/query/inline_array/with_max_filter_test.go create mode 100644 tests/integration/query/inline_array/with_max_limit_offset_order_test.go create mode 100644 tests/integration/query/inline_array/with_max_limit_offset_test.go create mode 100644 tests/integration/query/inline_array/with_max_test.go create mode 100644 tests/integration/query/inline_array/with_min_doc_id_test.go create mode 100644 tests/integration/query/inline_array/with_min_filter_test.go create mode 100644 tests/integration/query/inline_array/with_min_limit_offset_order_test.go create mode 100644 tests/integration/query/inline_array/with_min_limit_offset_test.go create mode 100644 tests/integration/query/inline_array/with_min_test.go create mode 100644 tests/integration/query/simple/with_group_average_max_test.go create mode 100644 tests/integration/query/simple/with_group_average_min_test.go create mode 100644 tests/integration/query/simple/with_group_count_max_test.go create mode 100644 tests/integration/query/simple/with_group_count_min_test.go create mode 100644 tests/integration/query/simple/with_group_max_filter_test.go create mode 100644 tests/integration/query/simple/with_group_max_limit_offset_test.go create mode 100644 tests/integration/query/simple/with_group_max_limit_test.go create mode 100644 tests/integration/query/simple/with_group_max_test.go create mode 100644 tests/integration/query/simple/with_group_min_filter_test.go create mode 100644 tests/integration/query/simple/with_group_min_limit_offset_test.go create mode 100644 tests/integration/query/simple/with_group_min_limit_test.go create mode 100644 tests/integration/query/simple/with_group_min_test.go create mode 100644 tests/integration/query/simple/with_max_filter_test.go create mode 100644 tests/integration/query/simple/with_max_test.go create mode 100644 tests/integration/query/simple/with_min_filter_test.go create mode 100644 tests/integration/query/simple/with_min_test.go diff --git a/client/request/consts.go b/client/request/consts.go index 0e27eaeb3d..18fed52946 100644 --- a/client/request/consts.go +++ b/client/request/consts.go @@ -46,6 +46,8 @@ const ( DeletedFieldName = "_deleted" SumFieldName = "_sum" VersionFieldName = "_version" + MaxFieldName = "_max" + MinFieldName = "_min" // New generated document id from a backed up document, // which might have a different _docID originally. @@ -109,12 +111,16 @@ var ( AverageFieldName: {}, DocIDFieldName: {}, DeletedFieldName: {}, + MaxFieldName: {}, + MinFieldName: {}, } Aggregates = map[string]struct{}{ CountFieldName: {}, SumFieldName: {}, AverageFieldName: {}, + MaxFieldName: {}, + MinFieldName: {}, } CommitQueries = map[string]struct{}{ diff --git a/internal/planner/aggregate.go b/internal/planner/aggregate.go new file mode 100644 index 0000000000..5feb8fb364 --- /dev/null +++ b/internal/planner/aggregate.go @@ -0,0 +1,98 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package planner + +import ( + "github.com/sourcenetwork/defradb/internal/core" + "github.com/sourcenetwork/defradb/internal/planner/mapper" + + "github.com/sourcenetwork/immutable" + "github.com/sourcenetwork/immutable/enumerable" +) + +type number interface { + int64 | float64 +} + +func lessN[T number](a T, b T) bool { + return a < b +} + +func lessO[T number](a immutable.Option[T], b immutable.Option[T]) bool { + if !a.HasValue() { + return true + } + + if !b.HasValue() { + return false + } + + return a.Value() < b.Value() +} + +// inverse returns the logical inverse of the given sort func. +func inverse[T any](original func(T, T) bool) func(T, T) bool { + return func(t1, t2 T) bool { + return !original(t1, t2) + } +} + +// reduces the documents in a slice, skipping over hidden items (a grouping mechanic). +// +// Docs should be reduced with this function to avoid applying offsets twice (once in the +// select, then once here). +func reduceDocs[T any]( + docs []core.Doc, + initialValue T, + reduceFunc func(core.Doc, T) T, +) T { + var value = initialValue + for _, doc := range docs { + if !doc.Hidden { + value = reduceFunc(doc, value) + } + } + return value +} + +func reduceItems[T any, V any]( + source []T, + aggregateTarget *mapper.AggregateTarget, + less func(T, T) bool, + initialValue V, + reduceFunc func(T, V) V, +) (V, error) { + items := enumerable.New(source) + if aggregateTarget.Filter != nil { + items = enumerable.Where(items, func(item T) (bool, error) { + return mapper.RunFilter(item, aggregateTarget.Filter) + }) + } + + if aggregateTarget.OrderBy != nil && len(aggregateTarget.OrderBy.Conditions) > 0 { + if aggregateTarget.OrderBy.Conditions[0].Direction == mapper.ASC { + items = enumerable.Sort(items, less, len(source)) + } else { + items = enumerable.Sort(items, inverse(less), len(source)) + } + } + + if aggregateTarget.Limit != nil { + items = enumerable.Skip(items, aggregateTarget.Limit.Offset) + items = enumerable.Take(items, aggregateTarget.Limit.Limit) + } + + var value = initialValue + err := enumerable.ForEach(items, func(item T) { + value = reduceFunc(item, value) + }) + return value, err +} diff --git a/internal/planner/explain.go b/internal/planner/explain.go index 76679a85e3..860ea39df1 100644 --- a/internal/planner/explain.go +++ b/internal/planner/explain.go @@ -39,6 +39,8 @@ var ( _ explainablePlanNode = (*deleteNode)(nil) _ explainablePlanNode = (*groupNode)(nil) _ explainablePlanNode = (*limitNode)(nil) + _ explainablePlanNode = (*maxNode)(nil) + _ explainablePlanNode = (*minNode)(nil) _ explainablePlanNode = (*orderNode)(nil) _ explainablePlanNode = (*scanNode)(nil) _ explainablePlanNode = (*selectNode)(nil) diff --git a/internal/planner/max.go b/internal/planner/max.go new file mode 100644 index 0000000000..dbcc991268 --- /dev/null +++ b/internal/planner/max.go @@ -0,0 +1,255 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package planner + +import ( + "math/big" + + "github.com/sourcenetwork/immutable" + + "github.com/sourcenetwork/defradb/client/request" + "github.com/sourcenetwork/defradb/internal/core" + "github.com/sourcenetwork/defradb/internal/planner/mapper" +) + +type maxNode struct { + documentIterator + docMapper + + p *Planner + plan planNode + parent *mapper.Select + + // virtualFieldIndex is the index of the field + // that contains the result of the aggregate. + virtualFieldIndex int + aggregateMapping []mapper.AggregateTarget + + execInfo maxExecInfo +} + +type maxExecInfo struct { + // Total number of times maxNode was executed. + iterations uint64 +} + +func (p *Planner) Max( + field *mapper.Aggregate, + parent *mapper.Select, +) (*maxNode, error) { + return &maxNode{ + p: p, + parent: parent, + aggregateMapping: field.AggregateTargets, + virtualFieldIndex: field.Index, + docMapper: docMapper{field.DocumentMapping}, + }, nil +} + +func (n *maxNode) Kind() string { return "maxNode" } +func (n *maxNode) Init() error { return n.plan.Init() } +func (n *maxNode) Start() error { return n.plan.Start() } +func (n *maxNode) Spans(spans core.Spans) { n.plan.Spans(spans) } +func (n *maxNode) Close() error { return n.plan.Close() } +func (n *maxNode) Source() planNode { return n.plan } +func (n *maxNode) SetPlan(p planNode) { n.plan = p } + +func (n *maxNode) simpleExplain() (map[string]any, error) { + sourceExplanations := make([]map[string]any, len(n.aggregateMapping)) + + for i, source := range n.aggregateMapping { + simpleExplainMap := map[string]any{} + + // Add the filter attribute if it exists. + if source.Filter == nil { + simpleExplainMap[filterLabel] = nil + } else { + // get the target aggregate document mapping. Since the filters + // are relative to the target aggregate collection (and doc mapper). + var targetMap *core.DocumentMapping + if source.Index < len(n.documentMapping.ChildMappings) && + n.documentMapping.ChildMappings[source.Index] != nil { + targetMap = n.documentMapping.ChildMappings[source.Index] + } else { + targetMap = n.documentMapping + } + simpleExplainMap[filterLabel] = source.Filter.ToMap(targetMap) + } + + // Add the main field name. + simpleExplainMap[fieldNameLabel] = source.Field.Name + + // Add the child field name if it exists. + if source.ChildTarget.HasValue { + simpleExplainMap[childFieldNameLabel] = source.ChildTarget.Name + } else { + simpleExplainMap[childFieldNameLabel] = nil + } + + sourceExplanations[i] = simpleExplainMap + } + + return map[string]any{ + sourcesLabel: sourceExplanations, + }, nil +} + +// Explain method returns a map containing all attributes of this node that +// are to be explained, subscribes / opts-in this node to be an explainablePlanNode. +func (n *maxNode) Explain(explainType request.ExplainType) (map[string]any, error) { + switch explainType { + case request.SimpleExplain: + return n.simpleExplain() + + case request.ExecuteExplain: + return map[string]any{ + "iterations": n.execInfo.iterations, + }, nil + + default: + return nil, ErrUnknownExplainRequestType + } +} + +func (n *maxNode) Next() (bool, error) { + n.execInfo.iterations++ + + hasNext, err := n.plan.Next() + if err != nil || !hasNext { + return hasNext, err + } + n.currentValue = n.plan.Value() + + var max *big.Float + isFloat := false + + for _, source := range n.aggregateMapping { + child := n.currentValue.Fields[source.Index] + var collectionMax *big.Float + var err error + switch childCollection := child.(type) { + case []core.Doc: + collectionMax = reduceDocs( + childCollection, + nil, + func(childItem core.Doc, value *big.Float) *big.Float { + childProperty := childItem.Fields[source.ChildTarget.Index] + res := &big.Float{} + switch v := childProperty.(type) { + case int: + res = res.SetInt64(int64(v)) + case int64: + res = res.SetInt64(v) + case uint64: + res = res.SetUint64(v) + case float64: + res = res.SetFloat64(v) + default: + return nil + } + if value == nil || res.Cmp(value) > 0 { + return res + } + return value + }, + ) + + case []int64: + collectionMax, err = reduceItems( + childCollection, + &source, + lessN[int64], + nil, + func(childItem int64, value *big.Float) *big.Float { + res := (&big.Float{}).SetInt64(childItem) + if value == nil || res.Cmp(value) > 0 { + return res + } + return value + }, + ) + + case []immutable.Option[int64]: + collectionMax, err = reduceItems( + childCollection, + &source, + lessO[int64], + nil, + func(childItem immutable.Option[int64], value *big.Float) *big.Float { + if !childItem.HasValue() { + return value + } + res := (&big.Float{}).SetInt64(childItem.Value()) + if value == nil || res.Cmp(value) > 0 { + return res + } + return value + }, + ) + + case []float64: + collectionMax, err = reduceItems( + childCollection, + &source, + lessN[float64], + nil, + func(childItem float64, value *big.Float) *big.Float { + res := big.NewFloat(childItem) + if value == nil || res.Cmp(value) > 0 { + return res + } + return value + }, + ) + + case []immutable.Option[float64]: + collectionMax, err = reduceItems( + childCollection, + &source, + lessO[float64], + nil, + func(childItem immutable.Option[float64], value *big.Float) *big.Float { + if !childItem.HasValue() { + return value + } + res := big.NewFloat(childItem.Value()) + if value == nil || res.Cmp(value) > 0 { + return res + } + return value + }, + ) + } + if err != nil { + return false, err + } + if collectionMax == nil || (max != nil && collectionMax.Cmp(max) <= 0) { + continue + } + isTargetFloat, err := n.p.isValueFloat(n.parent, &source) + if err != nil { + return false, err + } + isFloat = isTargetFloat + max = collectionMax + } + + if max == nil { + n.currentValue.Fields[n.virtualFieldIndex] = nil + } else if isFloat { + res, _ := max.Float64() + n.currentValue.Fields[n.virtualFieldIndex] = res + } else { + res, _ := max.Int64() + n.currentValue.Fields[n.virtualFieldIndex] = res + } + return true, nil +} diff --git a/internal/planner/min.go b/internal/planner/min.go new file mode 100644 index 0000000000..9be8ecd30a --- /dev/null +++ b/internal/planner/min.go @@ -0,0 +1,255 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package planner + +import ( + "math/big" + + "github.com/sourcenetwork/immutable" + + "github.com/sourcenetwork/defradb/client/request" + "github.com/sourcenetwork/defradb/internal/core" + "github.com/sourcenetwork/defradb/internal/planner/mapper" +) + +type minNode struct { + documentIterator + docMapper + + p *Planner + plan planNode + parent *mapper.Select + + // virtualFieldIndex is the index of the field + // that contains the result of the aggregate. + virtualFieldIndex int + aggregateMapping []mapper.AggregateTarget + + execInfo minExecInfo +} + +type minExecInfo struct { + // Total number of times minNode was executed. + iterations uint64 +} + +func (p *Planner) Min( + field *mapper.Aggregate, + parent *mapper.Select, +) (*minNode, error) { + return &minNode{ + p: p, + parent: parent, + aggregateMapping: field.AggregateTargets, + virtualFieldIndex: field.Index, + docMapper: docMapper{field.DocumentMapping}, + }, nil +} + +func (n *minNode) Kind() string { return "minNode" } +func (n *minNode) Init() error { return n.plan.Init() } +func (n *minNode) Start() error { return n.plan.Start() } +func (n *minNode) Spans(spans core.Spans) { n.plan.Spans(spans) } +func (n *minNode) Close() error { return n.plan.Close() } +func (n *minNode) Source() planNode { return n.plan } +func (n *minNode) SetPlan(p planNode) { n.plan = p } + +func (n *minNode) simpleExplain() (map[string]any, error) { + sourceExplanations := make([]map[string]any, len(n.aggregateMapping)) + + for i, source := range n.aggregateMapping { + simpleExplainMap := map[string]any{} + + // Add the filter attribute if it exists. + if source.Filter == nil { + simpleExplainMap[filterLabel] = nil + } else { + // get the target aggregate document mapping. Since the filters + // are relative to the target aggregate collection (and doc mapper). + var targetMap *core.DocumentMapping + if source.Index < len(n.documentMapping.ChildMappings) && + n.documentMapping.ChildMappings[source.Index] != nil { + targetMap = n.documentMapping.ChildMappings[source.Index] + } else { + targetMap = n.documentMapping + } + simpleExplainMap[filterLabel] = source.Filter.ToMap(targetMap) + } + + // Add the main field name. + simpleExplainMap[fieldNameLabel] = source.Field.Name + + // Add the child field name if it exists. + if source.ChildTarget.HasValue { + simpleExplainMap[childFieldNameLabel] = source.ChildTarget.Name + } else { + simpleExplainMap[childFieldNameLabel] = nil + } + + sourceExplanations[i] = simpleExplainMap + } + + return map[string]any{ + sourcesLabel: sourceExplanations, + }, nil +} + +// Explain method returns a map containing all attributes of this node that +// are to be explained, subscribes / opts-in this node to be an explainablePlanNode. +func (n *minNode) Explain(explainType request.ExplainType) (map[string]any, error) { + switch explainType { + case request.SimpleExplain: + return n.simpleExplain() + + case request.ExecuteExplain: + return map[string]any{ + "iterations": n.execInfo.iterations, + }, nil + + default: + return nil, ErrUnknownExplainRequestType + } +} + +func (n *minNode) Next() (bool, error) { + n.execInfo.iterations++ + + hasNext, err := n.plan.Next() + if err != nil || !hasNext { + return hasNext, err + } + n.currentValue = n.plan.Value() + + var min *big.Float + isFloat := false + + for _, source := range n.aggregateMapping { + child := n.currentValue.Fields[source.Index] + var collectionMin *big.Float + var err error + switch childCollection := child.(type) { + case []core.Doc: + collectionMin = reduceDocs( + childCollection, + nil, + func(childItem core.Doc, value *big.Float) *big.Float { + childProperty := childItem.Fields[source.ChildTarget.Index] + res := &big.Float{} + switch v := childProperty.(type) { + case int: + res = res.SetInt64(int64(v)) + case int64: + res = res.SetInt64(v) + case uint64: + res = res.SetUint64(v) + case float64: + res = res.SetFloat64(v) + default: + return nil + } + if value == nil || res.Cmp(value) < 0 { + return res + } + return value + }, + ) + + case []int64: + collectionMin, err = reduceItems( + childCollection, + &source, + lessN[int64], + nil, + func(childItem int64, value *big.Float) *big.Float { + res := (&big.Float{}).SetInt64(childItem) + if value == nil || res.Cmp(value) < 0 { + return res + } + return value + }, + ) + + case []immutable.Option[int64]: + collectionMin, err = reduceItems( + childCollection, + &source, + lessO[int64], + nil, + func(childItem immutable.Option[int64], value *big.Float) *big.Float { + if !childItem.HasValue() { + return value + } + res := (&big.Float{}).SetInt64(childItem.Value()) + if value == nil || res.Cmp(value) < 0 { + return res + } + return value + }, + ) + + case []float64: + collectionMin, err = reduceItems( + childCollection, + &source, + lessN[float64], + nil, + func(childItem float64, value *big.Float) *big.Float { + res := big.NewFloat(childItem) + if value == nil || res.Cmp(value) < 0 { + return res + } + return value + }, + ) + + case []immutable.Option[float64]: + collectionMin, err = reduceItems( + childCollection, + &source, + lessO[float64], + nil, + func(childItem immutable.Option[float64], value *big.Float) *big.Float { + if !childItem.HasValue() { + return value + } + res := big.NewFloat(childItem.Value()) + if value == nil || res.Cmp(value) < 0 { + return res + } + return value + }, + ) + } + if err != nil { + return false, err + } + if collectionMin == nil || (min != nil && collectionMin.Cmp(min) >= 0) { + continue + } + isTargetFloat, err := n.p.isValueFloat(n.parent, &source) + if err != nil { + return false, err + } + isFloat = isTargetFloat + min = collectionMin + } + + if min == nil { + n.currentValue.Fields[n.virtualFieldIndex] = nil + } else if isFloat { + res, _ := min.Float64() + n.currentValue.Fields[n.virtualFieldIndex] = res + } else { + res, _ := min.Int64() + n.currentValue.Fields[n.virtualFieldIndex] = res + } + return true, nil +} diff --git a/internal/planner/operations.go b/internal/planner/operations.go index 6cbf7c24d4..73fe1450bb 100644 --- a/internal/planner/operations.go +++ b/internal/planner/operations.go @@ -18,6 +18,8 @@ var ( _ planNode = (*deleteNode)(nil) _ planNode = (*groupNode)(nil) _ planNode = (*limitNode)(nil) + _ planNode = (*maxNode)(nil) + _ planNode = (*minNode)(nil) _ planNode = (*multiScanNode)(nil) _ planNode = (*orderNode)(nil) _ planNode = (*parallelNode)(nil) diff --git a/internal/planner/select.go b/internal/planner/select.go index 3c5cc58bee..064f9b2fec 100644 --- a/internal/planner/select.go +++ b/internal/planner/select.go @@ -347,6 +347,10 @@ func (n *selectNode) initFields(selectReq *mapper.Select) ([]aggregateNode, erro plan, aggregateError = n.planner.Sum(f, selectReq) case request.AverageFieldName: plan, aggregateError = n.planner.Average(f) + case request.MaxFieldName: + plan, aggregateError = n.planner.Max(f, selectReq) + case request.MinFieldName: + plan, aggregateError = n.planner.Min(f, selectReq) } if aggregateError != nil { diff --git a/internal/planner/sum.go b/internal/planner/sum.go index ff0b714ebf..177dd72e3a 100644 --- a/internal/planner/sum.go +++ b/internal/planner/sum.go @@ -12,7 +12,6 @@ package planner import ( "github.com/sourcenetwork/immutable" - "github.com/sourcenetwork/immutable/enumerable" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/client/request" @@ -231,65 +230,69 @@ func (n *sumNode) Next() (bool, error) { var err error switch childCollection := child.(type) { case []core.Doc: - collectionSum = sumDocs(childCollection, func(childItem core.Doc) float64 { + collectionSum = reduceDocs(childCollection, 0, func(childItem core.Doc, value float64) float64 { childProperty := childItem.Fields[source.ChildTarget.Index] switch v := childProperty.(type) { case int: - return float64(v) + return value + float64(v) case int64: - return float64(v) + return value + float64(v) case uint64: - return float64(v) + return value + float64(v) case float64: - return v + return value + v default: // return nothing, cannot be summed - return 0 + return value + 0 } }) case []int64: - collectionSum, err = sumItems( + collectionSum, err = reduceItems( childCollection, &source, lessN[int64], - func(childItem int64) float64 { - return float64(childItem) + 0, + func(childItem int64, value float64) float64 { + return value + float64(childItem) }, ) case []immutable.Option[int64]: - collectionSum, err = sumItems( + collectionSum, err = reduceItems( childCollection, &source, lessO[int64], - func(childItem immutable.Option[int64]) float64 { + 0, + func(childItem immutable.Option[int64], value float64) float64 { if !childItem.HasValue() { - return 0 + return value + 0 } - return float64(childItem.Value()) + return value + float64(childItem.Value()) }, ) case []float64: - collectionSum, err = sumItems( + collectionSum, err = reduceItems( childCollection, &source, lessN[float64], - func(childItem float64) float64 { - return childItem + 0, + func(childItem float64, value float64) float64 { + return value + childItem }, ) case []immutable.Option[float64]: - collectionSum, err = sumItems( + collectionSum, err = reduceItems( childCollection, &source, lessO[float64], - func(childItem immutable.Option[float64]) float64 { + 0, + func(childItem immutable.Option[float64], value float64) float64 { if !childItem.HasValue() { - return 0 + return value + 0 } - return childItem.Value() + return value + childItem.Value() }, ) } @@ -310,78 +313,4 @@ func (n *sumNode) Next() (bool, error) { return true, nil } -// offsets sums the documents in a slice, skipping over hidden items (a grouping mechanic). -// Docs should be counted with this function to avoid applying offsets twice (once in the -// select, then once here). -func sumDocs(docs []core.Doc, toFloat func(core.Doc) float64) float64 { - var sum float64 = 0 - for _, doc := range docs { - if !doc.Hidden { - sum += toFloat(doc) - } - } - - return sum -} - -func sumItems[T any]( - source []T, - aggregateTarget *mapper.AggregateTarget, - less func(T, T) bool, - toFloat func(T) float64, -) (float64, error) { - items := enumerable.New(source) - if aggregateTarget.Filter != nil { - items = enumerable.Where(items, func(item T) (bool, error) { - return mapper.RunFilter(item, aggregateTarget.Filter) - }) - } - - if aggregateTarget.OrderBy != nil && len(aggregateTarget.OrderBy.Conditions) > 0 { - if aggregateTarget.OrderBy.Conditions[0].Direction == mapper.ASC { - items = enumerable.Sort(items, less, len(source)) - } else { - items = enumerable.Sort(items, reverse(less), len(source)) - } - } - - if aggregateTarget.Limit != nil { - items = enumerable.Skip(items, aggregateTarget.Limit.Offset) - items = enumerable.Take(items, aggregateTarget.Limit.Limit) - } - - var sum float64 = 0 - err := enumerable.ForEach(items, func(item T) { - sum += toFloat(item) - }) - - return sum, err -} - func (n *sumNode) SetPlan(p planNode) { n.plan = p } - -type number interface { - int64 | float64 -} - -func lessN[T number](a T, b T) bool { - return a < b -} - -func lessO[T number](a immutable.Option[T], b immutable.Option[T]) bool { - if !a.HasValue() { - return true - } - - if !b.HasValue() { - return false - } - - return a.Value() < b.Value() -} - -func reverse[T any](original func(T, T) bool) func(T, T) bool { - return func(t1, t2 T) bool { - return !original(t1, t2) - } -} diff --git a/internal/planner/top.go b/internal/planner/top.go index d5faa491c1..ce2ce4e6dc 100644 --- a/internal/planner/top.go +++ b/internal/planner/top.go @@ -203,6 +203,10 @@ func (p *Planner) Top(m *mapper.Select) (*topLevelNode, error) { child, err = p.Sum(f, m) case request.AverageFieldName: child, err = p.Average(f) + case request.MaxFieldName: + child, err = p.Max(f, m) + case request.MinFieldName: + child, err = p.Min(f, m) } if err != nil { return nil, err diff --git a/internal/request/graphql/schema/generate.go b/internal/request/graphql/schema/generate.go index 85491f5ee1..f326a8232a 100644 --- a/internal/request/graphql/schema/generate.go +++ b/internal/request/graphql/schema/generate.go @@ -651,10 +651,21 @@ func (g *Generator) genAggregateFields() error { return err } t.AddFieldConfig(averageField.Name, &averageField) + + maxField, err := g.genMaximumFieldConfig(t) + if err != nil { + return err + } + t.AddFieldConfig(maxField.Name, &maxField) + + minField, err := g.genMinimumFieldConfig(t) + if err != nil { + return err + } + t.AddFieldConfig(minField.Name, &minField) } queryType := g.manager.schema.QueryType() - topLevelCountField := genTopLevelCount(topLevelCountInputs) queryType.AddFieldConfig(topLevelCountField.Name, topLevelCountField) @@ -695,12 +706,33 @@ func genTopLevelNumericAggregates(topLevelNumericAggInputs map[string]*gql.Input Args: gql.FieldConfigArgument{}, } + topLevelMaximumField := gql.Field{ + Name: request.MaxFieldName, + Description: schemaTypes.MaximumFieldDescription, + Type: gql.Float, + Args: gql.FieldConfigArgument{}, + } + + topLevelMinimumField := gql.Field{ + Name: request.MinFieldName, + Description: schemaTypes.MinimumFieldDescription, + Type: gql.Float, + Args: gql.FieldConfigArgument{}, + } + for name, inputObject := range topLevelNumericAggInputs { topLevelSumField.Args[name] = schemaTypes.NewArgConfig(inputObject, inputObject.Description()) topLevelAverageField.Args[name] = schemaTypes.NewArgConfig(inputObject, inputObject.Description()) + topLevelMaximumField.Args[name] = schemaTypes.NewArgConfig(inputObject, inputObject.Description()) + topLevelMinimumField.Args[name] = schemaTypes.NewArgConfig(inputObject, inputObject.Description()) } - return []*gql.Field{&topLevelSumField, &topLevelAverageField} + return []*gql.Field{ + &topLevelSumField, + &topLevelAverageField, + &topLevelMaximumField, + &topLevelMinimumField, + } } func (g *Generator) genCountFieldConfig(obj *gql.Object) (gql.Field, error) { @@ -741,50 +773,68 @@ func (g *Generator) genCountFieldConfig(obj *gql.Object) (gql.Field, error) { } func (g *Generator) genSumFieldConfig(obj *gql.Object) (gql.Field, error) { - childTypesByFieldName := map[string]gql.Type{} + field := gql.Field{ + Name: request.SumFieldName, + Description: schemaTypes.SumFieldDescription, + Type: gql.Float, + Args: gql.FieldConfigArgument{}, + } - for _, field := range obj.Fields() { - // we can only sum list items - listType, isList := field.Type.(*gql.List) - if !isList { - continue - } + childTypesByFieldName := g.getNumericFields(obj) + for name, inputObject := range childTypesByFieldName { + field.Args[name] = schemaTypes.NewArgConfig(inputObject, inputObject.Description()) + } + return field, nil +} - var inputObjectName string - if isNumericArray(listType) { - inputObjectName = genNumericInlineArraySelectorName(obj.Name(), field.Name) - } else { - inputObjectName = genNumericObjectSelectorName(listType.OfType.Name()) - } +func (g *Generator) genMinimumFieldConfig(obj *gql.Object) (gql.Field, error) { + field := gql.Field{ + Name: request.MinFieldName, + Description: schemaTypes.MinimumFieldDescription, + Type: gql.Float, + Args: gql.FieldConfigArgument{}, + } - subSumType, isSubTypeSumable := g.manager.schema.TypeMap()[inputObjectName] - // If the item is not in the type map, it must contain no summable - // fields (e.g. no Int/Floats) - if !isSubTypeSumable { - continue - } - childTypesByFieldName[field.Name] = subSumType + childTypesByFieldName := g.getNumericFields(obj) + for name, inputObject := range childTypesByFieldName { + field.Args[name] = schemaTypes.NewArgConfig(inputObject, inputObject.Description()) } + return field, nil +} +func (g *Generator) genMaximumFieldConfig(obj *gql.Object) (gql.Field, error) { field := gql.Field{ - Name: request.SumFieldName, - Description: schemaTypes.SumFieldDescription, + Name: request.MaxFieldName, + Description: schemaTypes.MaximumFieldDescription, Type: gql.Float, Args: gql.FieldConfigArgument{}, } + childTypesByFieldName := g.getNumericFields(obj) for name, inputObject := range childTypesByFieldName { field.Args[name] = schemaTypes.NewArgConfig(inputObject, inputObject.Description()) } - return field, nil } func (g *Generator) genAverageFieldConfig(obj *gql.Object) (gql.Field, error) { - childTypesByFieldName := map[string]gql.Type{} + field := gql.Field{ + Name: request.AverageFieldName, + Description: schemaTypes.AverageFieldDescription, + Type: gql.Float, + Args: gql.FieldConfigArgument{}, + } + + childTypesByFieldName := g.getNumericFields(obj) + for name, inputObject := range childTypesByFieldName { + field.Args[name] = schemaTypes.NewArgConfig(inputObject, inputObject.Description()) + } + return field, nil +} +func (g *Generator) getNumericFields(obj *gql.Object) map[string]gql.Type { + fieldTypes := map[string]gql.Type{} for _, field := range obj.Fields() { - // we can only sum list items listType, isList := field.Type.(*gql.List) if !isList { continue @@ -798,26 +848,12 @@ func (g *Generator) genAverageFieldConfig(obj *gql.Object) (gql.Field, error) { } subAverageType, isSubTypeAveragable := g.manager.schema.TypeMap()[inputObjectName] - // If the item is not in the type map, it must contain no averagable - // fields (e.g. no Int/Floats) if !isSubTypeAveragable { continue } - childTypesByFieldName[field.Name] = subAverageType - } - - field := gql.Field{ - Name: request.AverageFieldName, - Description: schemaTypes.AverageFieldDescription, - Type: gql.Float, - Args: gql.FieldConfigArgument{}, - } - - for name, inputObject := range childTypesByFieldName { - field.Args[name] = schemaTypes.NewArgConfig(inputObject, inputObject.Description()) + fieldTypes[field.Name] = subAverageType } - - return field, nil + return fieldTypes } func (g *Generator) genNumericInlineArraySelectorObject(obj *gql.Object) []*gql.InputObject { @@ -953,6 +989,8 @@ func (g *Generator) genNumericAggregateBaseArgInputs(obj *gql.Object) *gql.Input // A child aggregate will always be aggregatable, as it can be present via an inner grouping fieldsEnumCfg.Values[request.SumFieldName] = &gql.EnumValueConfig{Value: request.SumFieldName} fieldsEnumCfg.Values[request.AverageFieldName] = &gql.EnumValueConfig{Value: request.AverageFieldName} + fieldsEnumCfg.Values[request.MinFieldName] = &gql.EnumValueConfig{Value: request.MinFieldName} + fieldsEnumCfg.Values[request.MaxFieldName] = &gql.EnumValueConfig{Value: request.MaxFieldName} if !hasSumableFields { return nil, nil diff --git a/internal/request/graphql/schema/types/descriptions.go b/internal/request/graphql/schema/types/descriptions.go index 213266d891..e442545995 100644 --- a/internal/request/graphql/schema/types/descriptions.go +++ b/internal/request/graphql/schema/types/descriptions.go @@ -124,6 +124,16 @@ Returns the total sum of the specified field values within the specified child s Returns the average of the specified field values within the specified child sets. If multiple fields/sets are specified, the combined average of all items within each set (true average, not an average of averages) will be returned as a single value. +` + MaximumFieldDescription string = ` +Returns the maximum of the specified field values within the specified child sets. If + multiple fields/sets are specified, the combined maximum of all items within each set + will be returned as a single value. +` + MinimumFieldDescription string = ` +Returns the minimum of the specified field values within the specified child sets. If + multiple fields/sets are specified, the combined minimum of all items within each set + will be returned as a single value. ` booleanOperatorBlockDescription string = ` These are the set of filter operators available for use when filtering on Boolean diff --git a/tests/integration/explain.go b/tests/integration/explain.go index 325e69b3f7..4cdebe9103 100644 --- a/tests/integration/explain.go +++ b/tests/integration/explain.go @@ -41,6 +41,8 @@ var ( "deleteNode": {}, "groupNode": {}, "limitNode": {}, + "maxNode": {}, + "minNode": {}, "multiScanNode": {}, "orderNode": {}, "parallelNode": {}, diff --git a/tests/integration/explain/debug/top_with_max_test.go b/tests/integration/explain/debug/top_with_max_test.go new file mode 100644 index 0000000000..b79f469511 --- /dev/null +++ b/tests/integration/explain/debug/top_with_max_test.go @@ -0,0 +1,96 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_explain_debug + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" + explainUtils "github.com/sourcenetwork/defradb/tests/integration/explain" +) + +var topLevelMaxPattern = dataMap{ + "explain": dataMap{ + "operationNode": []dataMap{ + { + "topLevelNode": []dataMap{ + { + "selectTopNode": dataMap{ + "selectNode": dataMap{ + "scanNode": dataMap{}, + }, + }, + }, + { + "maxNode": dataMap{}, + }, + }, + }, + }, + }, +} + +func TestDebugExplain_TopLevelMaxRequest_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Explain (debug) top-level max request.", + + Actions: []any{ + explainUtils.SchemaForExplainTests, + + testUtils.ExplainRequest{ + + Request: `query @explain(type: debug) { + _max( + Author: { + field: age + } + ) + }`, + + ExpectedPatterns: topLevelMaxPattern, + }, + }, + } + + explainUtils.ExecuteTestCase(t, test) +} + +func TestDebugExplain_TopLevelMaxRequestWithFilter_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Explain (debug) top-level max request with filter.", + + Actions: []any{ + explainUtils.SchemaForExplainTests, + + testUtils.ExplainRequest{ + + Request: `query @explain(type: debug) { + _max( + Author: { + field: age, + filter: { + age: { + _gt: 26 + } + } + } + ) + }`, + + ExpectedPatterns: topLevelMaxPattern, + }, + }, + } + + explainUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/explain/debug/top_with_min_test.go b/tests/integration/explain/debug/top_with_min_test.go new file mode 100644 index 0000000000..1504f23905 --- /dev/null +++ b/tests/integration/explain/debug/top_with_min_test.go @@ -0,0 +1,96 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_explain_debug + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" + explainUtils "github.com/sourcenetwork/defradb/tests/integration/explain" +) + +var topLevelMinPattern = dataMap{ + "explain": dataMap{ + "operationNode": []dataMap{ + { + "topLevelNode": []dataMap{ + { + "selectTopNode": dataMap{ + "selectNode": dataMap{ + "scanNode": dataMap{}, + }, + }, + }, + { + "minNode": dataMap{}, + }, + }, + }, + }, + }, +} + +func TestDebugExplain_TopLevelMinRequest_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Explain (debug) top-level min request.", + + Actions: []any{ + explainUtils.SchemaForExplainTests, + + testUtils.ExplainRequest{ + + Request: `query @explain(type: debug) { + _min( + Author: { + field: age + } + ) + }`, + + ExpectedPatterns: topLevelMinPattern, + }, + }, + } + + explainUtils.ExecuteTestCase(t, test) +} + +func TestDebugExplain_TopLevelMinRequestWithFilter_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Explain (debug) top-level min request with filter.", + + Actions: []any{ + explainUtils.SchemaForExplainTests, + + testUtils.ExplainRequest{ + + Request: `query @explain(type: debug) { + _min( + Author: { + field: age, + filter: { + age: { + _gt: 26 + } + } + } + ) + }`, + + ExpectedPatterns: topLevelMinPattern, + }, + }, + } + + explainUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/explain/debug/with_max_join_test.go b/tests/integration/explain/debug/with_max_join_test.go new file mode 100644 index 0000000000..fd1bf25bfb --- /dev/null +++ b/tests/integration/explain/debug/with_max_join_test.go @@ -0,0 +1,151 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_explain_debug + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" + explainUtils "github.com/sourcenetwork/defradb/tests/integration/explain" +) + +var debugMaxTypeIndexJoinManyPattern = dataMap{ + "explain": dataMap{ + "operationNode": []dataMap{ + { + "selectTopNode": dataMap{ + "maxNode": dataMap{ + "selectNode": dataMap{ + "typeIndexJoin": dataMap{ + "typeJoinMany": normalTypeJoinPattern, + }, + }, + }, + }, + }, + }, + }, +} + +func TestDebugExplainRequest_WithMaxOnOneToManyJoinedField_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Explain (debug) request with max on a one-to-many joined field.", + + Actions: []any{ + explainUtils.SchemaForExplainTests, + + testUtils.ExplainRequest{ + + Request: `query @explain(type: debug) { + Author { + name + _docID + MaxPages: _max( + books: {field: pages} + ) + } + }`, + + ExpectedPatterns: debugMaxTypeIndexJoinManyPattern, + }, + }, + } + + explainUtils.ExecuteTestCase(t, test) +} + +func TestDebugExplainRequest_WithMaxOnOneToManyJoinedFieldWithFilter_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Explain (debug) request with max on a one-to-many joined field, with filter.", + + Actions: []any{ + explainUtils.SchemaForExplainTests, + + testUtils.ExplainRequest{ + + Request: `query @explain(type: debug) { + Author { + name + MaxPages: _max( + articles: { + field: pages, + filter: { + name: { + _eq: "To my dear readers" + } + } + } + ) + } + }`, + + ExpectedPatterns: debugMaxTypeIndexJoinManyPattern, + }, + }, + } + + explainUtils.ExecuteTestCase(t, test) +} + +func TestDebugExplainRequest_WithMaxOnOneToManyJoinedFieldWithManySources_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Explain (debug) request with max on a one-to-many joined field with many sources.", + + Actions: []any{ + explainUtils.SchemaForExplainTests, + + testUtils.ExplainRequest{ + + Request: `query @explain(type: debug) { + Author { + name + MaxPages: _max( + books: {field: pages}, + articles: {field: pages} + ) + } + }`, + + ExpectedPatterns: dataMap{ + "explain": dataMap{ + "operationNode": []dataMap{ + { + "selectTopNode": dataMap{ + "maxNode": dataMap{ + "selectNode": dataMap{ + "parallelNode": []dataMap{ + { + "typeIndexJoin": dataMap{ + "typeJoinMany": debugTypeJoinPattern, + }, + }, + { + "typeIndexJoin": dataMap{ + "typeJoinMany": debugTypeJoinPattern, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + } + + explainUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/explain/debug/with_max_test.go b/tests/integration/explain/debug/with_max_test.go new file mode 100644 index 0000000000..862056b3a7 --- /dev/null +++ b/tests/integration/explain/debug/with_max_test.go @@ -0,0 +1,59 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_explain_debug + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" + explainUtils "github.com/sourcenetwork/defradb/tests/integration/explain" +) + +var maxPattern = dataMap{ + "explain": dataMap{ + "operationNode": []dataMap{ + { + "selectTopNode": dataMap{ + "maxNode": dataMap{ + "selectNode": dataMap{ + "scanNode": dataMap{}, + }, + }, + }, + }, + }, + }, +} + +func TestDebugExplainRequestWithMaxOnInlineArrayField_ChildFieldWillBeEmpty(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Explain (debug) request with max on an inline array field.", + + Actions: []any{ + explainUtils.SchemaForExplainTests, + + testUtils.ExplainRequest{ + + Request: `query @explain(type: debug) { + Book { + name + MaxChapterPages: _max(chapterPages: {}) + } + }`, + + ExpectedPatterns: maxPattern, + }, + }, + } + + explainUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/explain/debug/with_min_join_test.go b/tests/integration/explain/debug/with_min_join_test.go new file mode 100644 index 0000000000..afbecdc687 --- /dev/null +++ b/tests/integration/explain/debug/with_min_join_test.go @@ -0,0 +1,151 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_explain_debug + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" + explainUtils "github.com/sourcenetwork/defradb/tests/integration/explain" +) + +var debugMinTypeIndexJoinManyPattern = dataMap{ + "explain": dataMap{ + "operationNode": []dataMap{ + { + "selectTopNode": dataMap{ + "minNode": dataMap{ + "selectNode": dataMap{ + "typeIndexJoin": dataMap{ + "typeJoinMany": normalTypeJoinPattern, + }, + }, + }, + }, + }, + }, + }, +} + +func TestDebugExplainRequest_WithMinOnOneToManyJoinedField_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Explain (debug) request with min on a one-to-many joined field.", + + Actions: []any{ + explainUtils.SchemaForExplainTests, + + testUtils.ExplainRequest{ + + Request: `query @explain(type: debug) { + Author { + name + _docID + MinPages: _min( + books: {field: pages} + ) + } + }`, + + ExpectedPatterns: debugMinTypeIndexJoinManyPattern, + }, + }, + } + + explainUtils.ExecuteTestCase(t, test) +} + +func TestDebugExplainRequest_WithMinOnOneToManyJoinedFieldWithFilter_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Explain (debug) request with min on a one-to-many joined field, with filter.", + + Actions: []any{ + explainUtils.SchemaForExplainTests, + + testUtils.ExplainRequest{ + + Request: `query @explain(type: debug) { + Author { + name + MinPages: _min( + articles: { + field: pages, + filter: { + name: { + _eq: "To my dear readers" + } + } + } + ) + } + }`, + + ExpectedPatterns: debugMinTypeIndexJoinManyPattern, + }, + }, + } + + explainUtils.ExecuteTestCase(t, test) +} + +func TestDebugExplainRequest_WithMinOnOneToManyJoinedFieldWithManySources_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Explain (debug) request with min on a one-to-many joined field with many sources.", + + Actions: []any{ + explainUtils.SchemaForExplainTests, + + testUtils.ExplainRequest{ + + Request: `query @explain(type: debug) { + Author { + name + MinPages: _min( + books: {field: pages}, + articles: {field: pages} + ) + } + }`, + + ExpectedPatterns: dataMap{ + "explain": dataMap{ + "operationNode": []dataMap{ + { + "selectTopNode": dataMap{ + "minNode": dataMap{ + "selectNode": dataMap{ + "parallelNode": []dataMap{ + { + "typeIndexJoin": dataMap{ + "typeJoinMany": debugTypeJoinPattern, + }, + }, + { + "typeIndexJoin": dataMap{ + "typeJoinMany": debugTypeJoinPattern, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + } + + explainUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/explain/debug/with_min_test.go b/tests/integration/explain/debug/with_min_test.go new file mode 100644 index 0000000000..28319dc648 --- /dev/null +++ b/tests/integration/explain/debug/with_min_test.go @@ -0,0 +1,59 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_explain_debug + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" + explainUtils "github.com/sourcenetwork/defradb/tests/integration/explain" +) + +var minPattern = dataMap{ + "explain": dataMap{ + "operationNode": []dataMap{ + { + "selectTopNode": dataMap{ + "minNode": dataMap{ + "selectNode": dataMap{ + "scanNode": dataMap{}, + }, + }, + }, + }, + }, + }, +} + +func TestDebugExplainRequestWithMinOnInlineArrayField_ChildFieldWillBeEmpty(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Explain (debug) request with min on an inline array field.", + + Actions: []any{ + explainUtils.SchemaForExplainTests, + + testUtils.ExplainRequest{ + + Request: `query @explain(type: debug) { + Book { + name + MinChapterPages: _min(chapterPages: {}) + } + }`, + + ExpectedPatterns: minPattern, + }, + }, + } + + explainUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/explain/default/top_with_max_test.go b/tests/integration/explain/default/top_with_max_test.go new file mode 100644 index 0000000000..16d53b5007 --- /dev/null +++ b/tests/integration/explain/default/top_with_max_test.go @@ -0,0 +1,166 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_explain_default + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" + explainUtils "github.com/sourcenetwork/defradb/tests/integration/explain" +) + +var topLevelMaxPattern = dataMap{ + "explain": dataMap{ + "operationNode": []dataMap{ + { + "topLevelNode": []dataMap{ + { + "selectTopNode": dataMap{ + "selectNode": dataMap{ + "scanNode": dataMap{}, + }, + }, + }, + { + "maxNode": dataMap{}, + }, + }, + }, + }, + }, +} + +func TestDefaultExplain_WithTopLevelMaxRequest_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Explain (default) top-level max request.", + + Actions: []any{ + explainUtils.SchemaForExplainTests, + + testUtils.ExplainRequest{ + + Request: `query @explain { + _max( + Author: { + field: age + } + ) + }`, + + ExpectedPatterns: topLevelMaxPattern, + + ExpectedTargets: []testUtils.PlanNodeTargetCase{ + { + TargetNodeName: "scanNode", + IncludeChildNodes: true, // should be leaf of it's branch, so will have no child nodes. + ExpectedAttributes: dataMap{ + "collectionID": "3", + "collectionName": "Author", + "filter": nil, + "spans": []dataMap{ + { + "start": "/3", + "end": "/4", + }, + }, + }, + }, + { + TargetNodeName: "maxNode", + IncludeChildNodes: true, // should be leaf of it's branch, so will have no child nodes. + ExpectedAttributes: dataMap{ + "sources": []dataMap{ + { + "fieldName": "Author", + "childFieldName": "age", + "filter": nil, + }, + }, + }, + }, + }, + }, + }, + } + + explainUtils.ExecuteTestCase(t, test) +} + +func TestDefaultExplain_WithTopLevelMaxRequestWithFilter_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Explain (default) top-level max request with filter.", + + Actions: []any{ + explainUtils.SchemaForExplainTests, + + testUtils.ExplainRequest{ + + Request: `query @explain { + _max( + Author: { + field: age, + filter: { + age: { + _gt: 26 + } + } + } + ) + }`, + + ExpectedPatterns: topLevelMaxPattern, + + ExpectedTargets: []testUtils.PlanNodeTargetCase{ + { + TargetNodeName: "scanNode", + IncludeChildNodes: true, // should be leaf of it's branch, so will have no child nodes. + ExpectedAttributes: dataMap{ + "collectionID": "3", + "collectionName": "Author", + "filter": dataMap{ + "age": dataMap{ + "_gt": int32(26), + }, + }, + "spans": []dataMap{ + { + "start": "/3", + "end": "/4", + }, + }, + }, + }, + { + TargetNodeName: "maxNode", + IncludeChildNodes: true, // should be leaf of it's branch, so will have no child nodes. + ExpectedAttributes: dataMap{ + "sources": []dataMap{ + { + "fieldName": "Author", + "childFieldName": "age", + "filter": dataMap{ + "age": dataMap{ + "_gt": int32(26), + }, + }, + }, + }, + }, + }, + }, + }, + }, + } + + explainUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/explain/default/top_with_min_test.go b/tests/integration/explain/default/top_with_min_test.go new file mode 100644 index 0000000000..b212953a60 --- /dev/null +++ b/tests/integration/explain/default/top_with_min_test.go @@ -0,0 +1,166 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_explain_default + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" + explainUtils "github.com/sourcenetwork/defradb/tests/integration/explain" +) + +var topLevelMinPattern = dataMap{ + "explain": dataMap{ + "operationNode": []dataMap{ + { + "topLevelNode": []dataMap{ + { + "selectTopNode": dataMap{ + "selectNode": dataMap{ + "scanNode": dataMap{}, + }, + }, + }, + { + "minNode": dataMap{}, + }, + }, + }, + }, + }, +} + +func TestDefaultExplain_WithTopLevelMinRequest_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Explain (default) top-level min request.", + + Actions: []any{ + explainUtils.SchemaForExplainTests, + + testUtils.ExplainRequest{ + + Request: `query @explain { + _min( + Author: { + field: age + } + ) + }`, + + ExpectedPatterns: topLevelMinPattern, + + ExpectedTargets: []testUtils.PlanNodeTargetCase{ + { + TargetNodeName: "scanNode", + IncludeChildNodes: true, // should be leaf of it's branch, so will have no child nodes. + ExpectedAttributes: dataMap{ + "collectionID": "3", + "collectionName": "Author", + "filter": nil, + "spans": []dataMap{ + { + "start": "/3", + "end": "/4", + }, + }, + }, + }, + { + TargetNodeName: "minNode", + IncludeChildNodes: true, // should be leaf of it's branch, so will have no child nodes. + ExpectedAttributes: dataMap{ + "sources": []dataMap{ + { + "fieldName": "Author", + "childFieldName": "age", + "filter": nil, + }, + }, + }, + }, + }, + }, + }, + } + + explainUtils.ExecuteTestCase(t, test) +} + +func TestDefaultExplain_WithTopLevelMinRequestWithFilter_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Explain (default) top-level min request with filter.", + + Actions: []any{ + explainUtils.SchemaForExplainTests, + + testUtils.ExplainRequest{ + + Request: `query @explain { + _min( + Author: { + field: age, + filter: { + age: { + _gt: 26 + } + } + } + ) + }`, + + ExpectedPatterns: topLevelMinPattern, + + ExpectedTargets: []testUtils.PlanNodeTargetCase{ + { + TargetNodeName: "scanNode", + IncludeChildNodes: true, // should be leaf of it's branch, so will have no child nodes. + ExpectedAttributes: dataMap{ + "collectionID": "3", + "collectionName": "Author", + "filter": dataMap{ + "age": dataMap{ + "_gt": int32(26), + }, + }, + "spans": []dataMap{ + { + "start": "/3", + "end": "/4", + }, + }, + }, + }, + { + TargetNodeName: "minNode", + IncludeChildNodes: true, // should be leaf of it's branch, so will have no child nodes. + ExpectedAttributes: dataMap{ + "sources": []dataMap{ + { + "fieldName": "Author", + "childFieldName": "age", + "filter": dataMap{ + "age": dataMap{ + "_gt": int32(26), + }, + }, + }, + }, + }, + }, + }, + }, + }, + } + + explainUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/explain/default/with_max_join_test.go b/tests/integration/explain/default/with_max_join_test.go new file mode 100644 index 0000000000..a282f9f134 --- /dev/null +++ b/tests/integration/explain/default/with_max_join_test.go @@ -0,0 +1,375 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_explain_default + +import ( + "testing" + + "github.com/sourcenetwork/immutable" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" + explainUtils "github.com/sourcenetwork/defradb/tests/integration/explain" +) + +var maxTypeIndexJoinPattern = dataMap{ + "explain": dataMap{ + "operationNode": []dataMap{ + { + "selectTopNode": dataMap{ + "maxNode": dataMap{ + "selectNode": dataMap{ + "typeIndexJoin": normalTypeJoinPattern, + }, + }, + }, + }, + }, + }, +} + +func TestDefaultExplainRequest_WithMaxOnOneToManyJoinedField_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Explain (default) request with max on a one-to-many joined field.", + + Actions: []any{ + explainUtils.SchemaForExplainTests, + + testUtils.ExplainRequest{ + + Request: `query @explain { + Author { + name + _docID + TotalPages: _max( + books: {field: pages} + ) + } + }`, + + ExpectedPatterns: maxTypeIndexJoinPattern, + + ExpectedTargets: []testUtils.PlanNodeTargetCase{ + { + TargetNodeName: "maxNode", + IncludeChildNodes: false, + ExpectedAttributes: dataMap{ + "sources": []dataMap{ + { + "fieldName": "books", + "childFieldName": "pages", + "filter": nil, + }, + }, + }, + }, + { + TargetNodeName: "typeIndexJoin", + IncludeChildNodes: false, + ExpectedAttributes: dataMap{ + "joinType": "typeJoinMany", + "rootName": immutable.Some("author"), + "subTypeName": "books", + }, + }, + { + TargetNodeName: "scanNode", // inside of root + OccurancesToSkip: 0, + IncludeChildNodes: true, // should be leaf of it's branch, so will have no child nodes. + ExpectedAttributes: dataMap{ + "collectionID": "3", + "collectionName": "Author", + "filter": nil, + "spans": []dataMap{ + { + "start": "/3", + "end": "/4", + }, + }, + }, + }, + { + TargetNodeName: "scanNode", // inside of subType (related type) + OccurancesToSkip: 1, + IncludeChildNodes: true, // should be leaf of it's branch, so will have no child nodes. + ExpectedAttributes: dataMap{ + "collectionID": "2", + "collectionName": "Book", + "filter": nil, + "spans": []dataMap{ + { + "start": "/2", + "end": "/3", + }, + }, + }, + }, + }, + }, + }, + } + + explainUtils.ExecuteTestCase(t, test) +} + +func TestDefaultExplainRequest_WithMaxOnOneToManyJoinedFieldWithFilter_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Explain (default) request with max on a one-to-many joined field, with filter.", + + Actions: []any{ + explainUtils.SchemaForExplainTests, + + testUtils.ExplainRequest{ + + Request: `query @explain { + Author { + name + TotalPages: _max( + articles: { + field: pages, + filter: { + name: { + _eq: "To my dear readers" + } + } + } + ) + } + }`, + + ExpectedPatterns: maxTypeIndexJoinPattern, + + ExpectedTargets: []testUtils.PlanNodeTargetCase{ + { + TargetNodeName: "maxNode", + IncludeChildNodes: false, + ExpectedAttributes: dataMap{ + "sources": []dataMap{ + { + "fieldName": "articles", + "childFieldName": "pages", + "filter": dataMap{ + "name": dataMap{ + "_eq": "To my dear readers", + }, + }, + }, + }, + }, + }, + { + TargetNodeName: "typeIndexJoin", + IncludeChildNodes: false, + ExpectedAttributes: dataMap{ + "joinType": "typeJoinMany", + "rootName": immutable.Some("author"), + "subTypeName": "articles", + }, + }, + { + TargetNodeName: "scanNode", // inside of root + OccurancesToSkip: 0, + IncludeChildNodes: true, // should be leaf of it's branch, so will have no child nodes. + ExpectedAttributes: dataMap{ + "collectionID": "3", + "collectionName": "Author", + "filter": nil, + "spans": []dataMap{ + { + "start": "/3", + "end": "/4", + }, + }, + }, + }, + { + TargetNodeName: "scanNode", // inside of subType (related type) + OccurancesToSkip: 1, + IncludeChildNodes: true, // should be leaf of it's branch, so will have no child nodes. + ExpectedAttributes: dataMap{ + "collectionID": "1", + "collectionName": "Article", + "filter": dataMap{ + "name": dataMap{ + "_eq": "To my dear readers", + }, + }, + "spans": []dataMap{ + { + "start": "/1", + "end": "/2", + }, + }, + }, + }, + }, + }, + }, + } + + explainUtils.ExecuteTestCase(t, test) +} + +func TestDefaultExplainRequest_WithMaxOnOneToManyJoinedFieldWithManySources_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Explain (default) request with max on a one-to-many joined field with many sources.", + + Actions: []any{ + explainUtils.SchemaForExplainTests, + + testUtils.ExplainRequest{ + + Request: `query @explain { + Author { + name + TotalPages: _max( + books: {field: pages}, + articles: {field: pages} + ) + } + }`, + + ExpectedPatterns: dataMap{ + "explain": dataMap{ + "operationNode": []dataMap{ + { + "selectTopNode": dataMap{ + "maxNode": dataMap{ + "selectNode": dataMap{ + "parallelNode": []dataMap{ + { + "typeIndexJoin": normalTypeJoinPattern, + }, + { + "typeIndexJoin": normalTypeJoinPattern, + }, + }, + }, + }, + }, + }, + }, + }, + }, + + ExpectedTargets: []testUtils.PlanNodeTargetCase{ + { + TargetNodeName: "maxNode", + IncludeChildNodes: false, + ExpectedAttributes: dataMap{ + "sources": []dataMap{ + { + "childFieldName": "pages", + "fieldName": "books", + "filter": nil, + }, + + { + "childFieldName": "pages", + "fieldName": "articles", + "filter": nil, + }, + }, + }, + }, + { + TargetNodeName: "typeIndexJoin", + OccurancesToSkip: 0, + IncludeChildNodes: false, + ExpectedAttributes: dataMap{ + "joinType": "typeJoinMany", + "rootName": immutable.Some("author"), + "subTypeName": "books", + }, + }, + { + TargetNodeName: "scanNode", // inside of 1st root type + OccurancesToSkip: 0, + IncludeChildNodes: true, // should be leaf of it's branch, so will have no child nodes. + ExpectedAttributes: dataMap{ + "collectionID": "3", + "collectionName": "Author", + "filter": nil, + "spans": []dataMap{ + { + "start": "/3", + "end": "/4", + }, + }, + }, + }, + { + TargetNodeName: "scanNode", // inside of 1st subType (related type) + OccurancesToSkip: 1, + IncludeChildNodes: true, // should be leaf of it's branch, so will have no child nodes. + ExpectedAttributes: dataMap{ + "collectionID": "2", + "collectionName": "Book", + "filter": nil, + "spans": []dataMap{ + { + "start": "/2", + "end": "/3", + }, + }, + }, + }, + { + TargetNodeName: "typeIndexJoin", + OccurancesToSkip: 1, + IncludeChildNodes: false, + ExpectedAttributes: dataMap{ + "joinType": "typeJoinMany", + "rootName": immutable.Some("author"), + "subTypeName": "articles", + }, + }, + { + TargetNodeName: "scanNode", // inside of 2nd root type (AKA: subType's root) + OccurancesToSkip: 2, + IncludeChildNodes: true, // should be leaf of it's branch, so will have no child nodes. + ExpectedAttributes: dataMap{ + "collectionID": "3", + "collectionName": "Author", + "filter": nil, + "spans": []dataMap{ + { + "start": "/3", + "end": "/4", + }, + }, + }, + }, + { + TargetNodeName: "scanNode", // inside of 2nd subType (AKA: subType's subtype) + OccurancesToSkip: 3, + IncludeChildNodes: true, // should be leaf of it's branch, so will have no child nodes. + ExpectedAttributes: dataMap{ + "collectionID": "1", + "collectionName": "Article", + "filter": nil, + "spans": []dataMap{ + { + "start": "/1", + "end": "/2", + }, + }, + }, + }, + }, + }, + }, + } + + explainUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/explain/default/with_max_test.go b/tests/integration/explain/default/with_max_test.go new file mode 100644 index 0000000000..823e3d5def --- /dev/null +++ b/tests/integration/explain/default/with_max_test.go @@ -0,0 +1,90 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_explain_default + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" + explainUtils "github.com/sourcenetwork/defradb/tests/integration/explain" +) + +var maxPattern = dataMap{ + "explain": dataMap{ + "operationNode": []dataMap{ + { + "selectTopNode": dataMap{ + "maxNode": dataMap{ + "selectNode": dataMap{ + "scanNode": dataMap{}, + }, + }, + }, + }, + }, + }, +} + +func TestDefaultExplainRequest_WithMaxOnInlineArrayField_ChildFieldWillBeEmpty(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Explain (default) request with max on an inline array field.", + + Actions: []any{ + explainUtils.SchemaForExplainTests, + + testUtils.ExplainRequest{ + + Request: `query @explain { + Book { + name + MaxChapterPages: _max(chapterPages: {}) + } + }`, + + ExpectedPatterns: maxPattern, + + ExpectedTargets: []testUtils.PlanNodeTargetCase{ + { + TargetNodeName: "maxNode", + IncludeChildNodes: false, + ExpectedAttributes: dataMap{ + "sources": []dataMap{ + { + "fieldName": "chapterPages", + "childFieldName": nil, + "filter": nil, + }, + }, + }, + }, + { + TargetNodeName: "scanNode", + IncludeChildNodes: true, // should be leaf of it's branch, so will have no child nodes. + ExpectedAttributes: dataMap{ + "collectionID": "2", + "collectionName": "Book", + "filter": nil, + "spans": []dataMap{ + { + "start": "/2", + "end": "/3", + }, + }, + }, + }, + }, + }, + }, + } + + explainUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/explain/default/with_min_join_test.go b/tests/integration/explain/default/with_min_join_test.go new file mode 100644 index 0000000000..2e12bf1788 --- /dev/null +++ b/tests/integration/explain/default/with_min_join_test.go @@ -0,0 +1,375 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_explain_default + +import ( + "testing" + + "github.com/sourcenetwork/immutable" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" + explainUtils "github.com/sourcenetwork/defradb/tests/integration/explain" +) + +var minTypeIndexJoinPattern = dataMap{ + "explain": dataMap{ + "operationNode": []dataMap{ + { + "selectTopNode": dataMap{ + "minNode": dataMap{ + "selectNode": dataMap{ + "typeIndexJoin": normalTypeJoinPattern, + }, + }, + }, + }, + }, + }, +} + +func TestDefaultExplainRequest_WithMinOnOneToManyJoinedField_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Explain (default) request with min on a one-to-many joined field.", + + Actions: []any{ + explainUtils.SchemaForExplainTests, + + testUtils.ExplainRequest{ + + Request: `query @explain { + Author { + name + _docID + TotalPages: _min( + books: {field: pages} + ) + } + }`, + + ExpectedPatterns: minTypeIndexJoinPattern, + + ExpectedTargets: []testUtils.PlanNodeTargetCase{ + { + TargetNodeName: "minNode", + IncludeChildNodes: false, + ExpectedAttributes: dataMap{ + "sources": []dataMap{ + { + "fieldName": "books", + "childFieldName": "pages", + "filter": nil, + }, + }, + }, + }, + { + TargetNodeName: "typeIndexJoin", + IncludeChildNodes: false, + ExpectedAttributes: dataMap{ + "joinType": "typeJoinMany", + "rootName": immutable.Some("author"), + "subTypeName": "books", + }, + }, + { + TargetNodeName: "scanNode", // inside of root + OccurancesToSkip: 0, + IncludeChildNodes: true, // should be leaf of it's branch, so will have no child nodes. + ExpectedAttributes: dataMap{ + "collectionID": "3", + "collectionName": "Author", + "filter": nil, + "spans": []dataMap{ + { + "start": "/3", + "end": "/4", + }, + }, + }, + }, + { + TargetNodeName: "scanNode", // inside of subType (related type) + OccurancesToSkip: 1, + IncludeChildNodes: true, // should be leaf of it's branch, so will have no child nodes. + ExpectedAttributes: dataMap{ + "collectionID": "2", + "collectionName": "Book", + "filter": nil, + "spans": []dataMap{ + { + "start": "/2", + "end": "/3", + }, + }, + }, + }, + }, + }, + }, + } + + explainUtils.ExecuteTestCase(t, test) +} + +func TestDefaultExplainRequest_WithMinOnOneToManyJoinedFieldWithFilter_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Explain (default) request with min on a one-to-many joined field, with filter.", + + Actions: []any{ + explainUtils.SchemaForExplainTests, + + testUtils.ExplainRequest{ + + Request: `query @explain { + Author { + name + TotalPages: _min( + articles: { + field: pages, + filter: { + name: { + _eq: "To my dear readers" + } + } + } + ) + } + }`, + + ExpectedPatterns: minTypeIndexJoinPattern, + + ExpectedTargets: []testUtils.PlanNodeTargetCase{ + { + TargetNodeName: "minNode", + IncludeChildNodes: false, + ExpectedAttributes: dataMap{ + "sources": []dataMap{ + { + "fieldName": "articles", + "childFieldName": "pages", + "filter": dataMap{ + "name": dataMap{ + "_eq": "To my dear readers", + }, + }, + }, + }, + }, + }, + { + TargetNodeName: "typeIndexJoin", + IncludeChildNodes: false, + ExpectedAttributes: dataMap{ + "joinType": "typeJoinMany", + "rootName": immutable.Some("author"), + "subTypeName": "articles", + }, + }, + { + TargetNodeName: "scanNode", // inside of root + OccurancesToSkip: 0, + IncludeChildNodes: true, // should be leaf of it's branch, so will have no child nodes. + ExpectedAttributes: dataMap{ + "collectionID": "3", + "collectionName": "Author", + "filter": nil, + "spans": []dataMap{ + { + "start": "/3", + "end": "/4", + }, + }, + }, + }, + { + TargetNodeName: "scanNode", // inside of subType (related type) + OccurancesToSkip: 1, + IncludeChildNodes: true, // should be leaf of it's branch, so will have no child nodes. + ExpectedAttributes: dataMap{ + "collectionID": "1", + "collectionName": "Article", + "filter": dataMap{ + "name": dataMap{ + "_eq": "To my dear readers", + }, + }, + "spans": []dataMap{ + { + "start": "/1", + "end": "/2", + }, + }, + }, + }, + }, + }, + }, + } + + explainUtils.ExecuteTestCase(t, test) +} + +func TestDefaultExplainRequest_WithMinOnOneToManyJoinedFieldWithManySources_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Explain (default) request with min on a one-to-many joined field with many sources.", + + Actions: []any{ + explainUtils.SchemaForExplainTests, + + testUtils.ExplainRequest{ + + Request: `query @explain { + Author { + name + TotalPages: _min( + books: {field: pages}, + articles: {field: pages} + ) + } + }`, + + ExpectedPatterns: dataMap{ + "explain": dataMap{ + "operationNode": []dataMap{ + { + "selectTopNode": dataMap{ + "minNode": dataMap{ + "selectNode": dataMap{ + "parallelNode": []dataMap{ + { + "typeIndexJoin": normalTypeJoinPattern, + }, + { + "typeIndexJoin": normalTypeJoinPattern, + }, + }, + }, + }, + }, + }, + }, + }, + }, + + ExpectedTargets: []testUtils.PlanNodeTargetCase{ + { + TargetNodeName: "minNode", + IncludeChildNodes: false, + ExpectedAttributes: dataMap{ + "sources": []dataMap{ + { + "childFieldName": "pages", + "fieldName": "books", + "filter": nil, + }, + + { + "childFieldName": "pages", + "fieldName": "articles", + "filter": nil, + }, + }, + }, + }, + { + TargetNodeName: "typeIndexJoin", + OccurancesToSkip: 0, + IncludeChildNodes: false, + ExpectedAttributes: dataMap{ + "joinType": "typeJoinMany", + "rootName": immutable.Some("author"), + "subTypeName": "books", + }, + }, + { + TargetNodeName: "scanNode", // inside of 1st root type + OccurancesToSkip: 0, + IncludeChildNodes: true, // should be leaf of it's branch, so will have no child nodes. + ExpectedAttributes: dataMap{ + "collectionID": "3", + "collectionName": "Author", + "filter": nil, + "spans": []dataMap{ + { + "start": "/3", + "end": "/4", + }, + }, + }, + }, + { + TargetNodeName: "scanNode", // inside of 1st subType (related type) + OccurancesToSkip: 1, + IncludeChildNodes: true, // should be leaf of it's branch, so will have no child nodes. + ExpectedAttributes: dataMap{ + "collectionID": "2", + "collectionName": "Book", + "filter": nil, + "spans": []dataMap{ + { + "start": "/2", + "end": "/3", + }, + }, + }, + }, + { + TargetNodeName: "typeIndexJoin", + OccurancesToSkip: 1, + IncludeChildNodes: false, + ExpectedAttributes: dataMap{ + "joinType": "typeJoinMany", + "rootName": immutable.Some("author"), + "subTypeName": "articles", + }, + }, + { + TargetNodeName: "scanNode", // inside of 2nd root type (AKA: subType's root) + OccurancesToSkip: 2, + IncludeChildNodes: true, // should be leaf of it's branch, so will have no child nodes. + ExpectedAttributes: dataMap{ + "collectionID": "3", + "collectionName": "Author", + "filter": nil, + "spans": []dataMap{ + { + "start": "/3", + "end": "/4", + }, + }, + }, + }, + { + TargetNodeName: "scanNode", // inside of 2nd subType (AKA: subType's subtype) + OccurancesToSkip: 3, + IncludeChildNodes: true, // should be leaf of it's branch, so will have no child nodes. + ExpectedAttributes: dataMap{ + "collectionID": "1", + "collectionName": "Article", + "filter": nil, + "spans": []dataMap{ + { + "start": "/1", + "end": "/2", + }, + }, + }, + }, + }, + }, + }, + } + + explainUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/explain/default/with_min_test.go b/tests/integration/explain/default/with_min_test.go new file mode 100644 index 0000000000..63da42909e --- /dev/null +++ b/tests/integration/explain/default/with_min_test.go @@ -0,0 +1,90 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_explain_default + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" + explainUtils "github.com/sourcenetwork/defradb/tests/integration/explain" +) + +var minPattern = dataMap{ + "explain": dataMap{ + "operationNode": []dataMap{ + { + "selectTopNode": dataMap{ + "minNode": dataMap{ + "selectNode": dataMap{ + "scanNode": dataMap{}, + }, + }, + }, + }, + }, + }, +} + +func TestDefaultExplainRequest_WithMinOnInlineArrayField_ChildFieldWillBeEmpty(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Explain (default) request with min on an inline array field.", + + Actions: []any{ + explainUtils.SchemaForExplainTests, + + testUtils.ExplainRequest{ + + Request: `query @explain { + Book { + name + MinChapterPages: _min(chapterPages: {}) + } + }`, + + ExpectedPatterns: minPattern, + + ExpectedTargets: []testUtils.PlanNodeTargetCase{ + { + TargetNodeName: "minNode", + IncludeChildNodes: false, + ExpectedAttributes: dataMap{ + "sources": []dataMap{ + { + "fieldName": "chapterPages", + "childFieldName": nil, + "filter": nil, + }, + }, + }, + }, + { + TargetNodeName: "scanNode", + IncludeChildNodes: true, // should be leaf of it's branch, so will have no child nodes. + ExpectedAttributes: dataMap{ + "collectionID": "2", + "collectionName": "Book", + "filter": nil, + "spans": []dataMap{ + { + "start": "/2", + "end": "/3", + }, + }, + }, + }, + }, + }, + }, + } + + explainUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/explain/execute/with_max_test.go b/tests/integration/explain/execute/with_max_test.go new file mode 100644 index 0000000000..139c86e210 --- /dev/null +++ b/tests/integration/explain/execute/with_max_test.go @@ -0,0 +1,138 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_explain_execute + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" + explainUtils "github.com/sourcenetwork/defradb/tests/integration/explain" +) + +func TestExecuteExplainRequest_WithMaxOfInlineArrayField_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Explain (execute) request with max on an inline array.", + + Actions: []any{ + explainUtils.SchemaForExplainTests, + + create2AddressDocuments(), + create2AuthorContactDocuments(), + create2AuthorDocuments(), + create3BookDocuments(), + + testUtils.ExplainRequest{ + Request: `query @explain(type: execute) { + Book { + name + MaxChapterPages: _max(chapterPages: {}) + } + }`, + + ExpectedFullGraph: dataMap{ + "explain": dataMap{ + "executionSuccess": true, + "sizeOfResult": 1, + "planExecutions": uint64(2), + "operationNode": []dataMap{ + { + "selectTopNode": dataMap{ + "maxNode": dataMap{ + "iterations": uint64(4), + "selectNode": dataMap{ + "iterations": uint64(4), + "filterMatches": uint64(3), + "scanNode": dataMap{ + "iterations": uint64(4), + "docFetches": uint64(3), + "fieldFetches": uint64(5), + "indexFetches": uint64(0), + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + } + + explainUtils.ExecuteTestCase(t, test) +} + +func TestExecuteExplainRequest_MaxOfRelatedOneToManyField_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Explain (execute) request with max of a related one to many field.", + + Actions: []any{ + explainUtils.SchemaForExplainTests, + create2AddressDocuments(), + create2AuthorContactDocuments(), + create2AuthorDocuments(), + create3ArticleDocuments(), + + testUtils.ExplainRequest{ + Request: `query @explain(type: execute) { + Author { + name + MaxPages: _max( + articles: { + field: pages, + } + ) + } + }`, + + ExpectedFullGraph: dataMap{ + "explain": dataMap{ + "executionSuccess": true, + "sizeOfResult": 1, + "planExecutions": uint64(2), + "operationNode": []dataMap{ + { + "selectTopNode": dataMap{ + "maxNode": dataMap{ + "iterations": uint64(3), + "selectNode": dataMap{ + "iterations": uint64(3), + "filterMatches": uint64(2), + "typeIndexJoin": dataMap{ + "iterations": uint64(3), + "scanNode": dataMap{ + "iterations": uint64(3), + "docFetches": uint64(2), + "fieldFetches": uint64(2), + "indexFetches": uint64(0), + }, + "subTypeScanNode": dataMap{ + "iterations": uint64(5), + "docFetches": uint64(6), + "fieldFetches": uint64(9), + "indexFetches": uint64(0), + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + } + + explainUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/explain/execute/with_min_test.go b/tests/integration/explain/execute/with_min_test.go new file mode 100644 index 0000000000..204a055f32 --- /dev/null +++ b/tests/integration/explain/execute/with_min_test.go @@ -0,0 +1,138 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_explain_execute + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" + explainUtils "github.com/sourcenetwork/defradb/tests/integration/explain" +) + +func TestExecuteExplainRequest_WithMinOfInlineArrayField_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Explain (execute) request with min on an inline array.", + + Actions: []any{ + explainUtils.SchemaForExplainTests, + + create2AddressDocuments(), + create2AuthorContactDocuments(), + create2AuthorDocuments(), + create3BookDocuments(), + + testUtils.ExplainRequest{ + Request: `query @explain(type: execute) { + Book { + name + MinChapterPages: _min(chapterPages: {}) + } + }`, + + ExpectedFullGraph: dataMap{ + "explain": dataMap{ + "executionSuccess": true, + "sizeOfResult": 1, + "planExecutions": uint64(2), + "operationNode": []dataMap{ + { + "selectTopNode": dataMap{ + "minNode": dataMap{ + "iterations": uint64(4), + "selectNode": dataMap{ + "iterations": uint64(4), + "filterMatches": uint64(3), + "scanNode": dataMap{ + "iterations": uint64(4), + "docFetches": uint64(3), + "fieldFetches": uint64(5), + "indexFetches": uint64(0), + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + } + + explainUtils.ExecuteTestCase(t, test) +} + +func TestExecuteExplainRequest_MinOfRelatedOneToManyField_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Explain (execute) request with min of a related one to many field.", + + Actions: []any{ + explainUtils.SchemaForExplainTests, + create2AddressDocuments(), + create2AuthorContactDocuments(), + create2AuthorDocuments(), + create3ArticleDocuments(), + + testUtils.ExplainRequest{ + Request: `query @explain(type: execute) { + Author { + name + MinPages: _min( + articles: { + field: pages, + } + ) + } + }`, + + ExpectedFullGraph: dataMap{ + "explain": dataMap{ + "executionSuccess": true, + "sizeOfResult": 1, + "planExecutions": uint64(2), + "operationNode": []dataMap{ + { + "selectTopNode": dataMap{ + "minNode": dataMap{ + "iterations": uint64(3), + "selectNode": dataMap{ + "iterations": uint64(3), + "filterMatches": uint64(2), + "typeIndexJoin": dataMap{ + "iterations": uint64(3), + "scanNode": dataMap{ + "iterations": uint64(3), + "docFetches": uint64(2), + "fieldFetches": uint64(2), + "indexFetches": uint64(0), + }, + "subTypeScanNode": dataMap{ + "iterations": uint64(5), + "docFetches": uint64(6), + "fieldFetches": uint64(9), + "indexFetches": uint64(0), + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + } + + explainUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/query/inline_array/with_max_doc_id_test.go b/tests/integration/query/inline_array/with_max_doc_id_test.go new file mode 100644 index 0000000000..3a473db9f2 --- /dev/null +++ b/tests/integration/query/inline_array/with_max_doc_id_test.go @@ -0,0 +1,51 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package inline_array + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +// This test is meant to provide coverage of the planNode.Spans +// func by targeting a specific docID in the parent select. +func TestQueryInlineNillableFloatArray_WithDocIDAndMax_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple inline array with doc id, max of nillable float array", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "pageRatings": [3.1425, 0.00000000001, 10, null] + }`, + }, + testUtils.Request{ + Request: `query { + Users(docID: "bae-3f7e0f22-e253-53dd-b31b-df8b081292d9") { + name + _max(pageRatings: {}) + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "Shahzad", + "_max": float64(10), + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} diff --git a/tests/integration/query/inline_array/with_max_filter_test.go b/tests/integration/query/inline_array/with_max_filter_test.go new file mode 100644 index 0000000000..8d3b1cee25 --- /dev/null +++ b/tests/integration/query/inline_array/with_max_filter_test.go @@ -0,0 +1,145 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package inline_array + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestQueryInlineIntegerArray_WithMaxWithFilter_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple inline array, filtered max of integer array", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "favouriteIntegers": [-1, 2, -1, 1, 0] + }`, + }, + testUtils.Request{ + Request: `query { + Users { + name + _max(favouriteIntegers: {filter: {_lt: 2}}) + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "Shahzad", + "_max": int64(1), + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQueryInlineNillableIntegerArray_WithMaxWithFilter_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple inline array with filter, max of nillable integer array", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "testScores": [-1, 2, null, 1, 0] + }`, + }, + testUtils.Request{ + Request: `query { + Users { + name + _max(testScores: {filter: {_lt: 2}}) + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "Shahzad", + "_max": int64(1), + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQueryInlineFloatArray_WithMaxWithFilter_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple inline array, filtered max of float array", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "favouriteFloats": [3.1425, 0.00000000001, 10] + }`, + }, + testUtils.Request{ + Request: `query { + Users { + name + _max(favouriteFloats: {filter: {_lt: 9}}) + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "Shahzad", + "_max": float64(3.1425), + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQueryInlineNillableFloatArray_WithMaxWithFilter_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple inline array with filter, max of nillable float array", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "pageRatings": [3.1425, 0.00000000001, 10, null] + }`, + }, + testUtils.Request{ + Request: `query { + Users { + name + _max(pageRatings: {filter: {_lt: 9}}) + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "Shahzad", + "_max": float64(3.1425), + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} diff --git a/tests/integration/query/inline_array/with_max_limit_offset_order_test.go b/tests/integration/query/inline_array/with_max_limit_offset_order_test.go new file mode 100644 index 0000000000..7f18935d93 --- /dev/null +++ b/tests/integration/query/inline_array/with_max_limit_offset_order_test.go @@ -0,0 +1,281 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package inline_array + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestQueryInlineIntegerArray_WithMaxWithOffsetWithLimitWithOrderAsc_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple inline array, ordered offsetted limited max of integer array", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "favouriteIntegers": [-1, 2, 5, 1, 0, 7] + }`, + }, + testUtils.Request{ + Request: `query { + Users { + name + _max(favouriteIntegers: {offset: 1, limit: 3, order: ASC}) + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "Shahzad", + // 0, 1, 2 + "_max": int64(2), + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQueryInlineIntegerArray_WithMaxWithOffsetWithLimitWithOrderDesc_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple inline array, ordered offsetted limited max of integer array", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "favouriteIntegers": [-1, 2, 5, 1, 0, 7] + }`, + }, + testUtils.Request{ + Request: `query { + Users { + name + _max(favouriteIntegers: {offset: 1, limit: 3, order: DESC}) + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "Shahzad", + // 5, 2, 1 + "_max": int64(5), + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQueryInlineNillableIntegerArray_WithMaxWithOffsetWithLimitWithOrderAsc_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple inline array, ordered offsetted limited max of integer array", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "testScores": [2, null, 5, 1, 0, 7] + }`, + }, + testUtils.Request{ + Request: `query { + Users { + name + _max(testScores: {offset: 1, limit: 3, order: ASC}) + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "Shahzad", + // 0, 1, 2 + "_max": int64(2), + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQueryInlineNillableIntegerArray_WithMaxWithOffsetWithLimitWithOrderDesc_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple inline array, ordered offsetted limited max of integer array", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "testScores": [null, 2, 5, 1, 0, 7] + }`, + }, + testUtils.Request{ + Request: `query { + Users { + name + _max(testScores: {offset: 1, limit: 3, order: DESC}) + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "Shahzad", + // 5, 2, 1 + "_max": int64(5), + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQueryInlineFloatArray_WithMaxWithOffsetWithLimitWithOrderAsc_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple inline array, ordered offsetted limited max of integer array", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "favouriteFloats": [3.1425, 0.00000000001, 10, 2.718, 0.577, 6.283] + }`, + }, + testUtils.Request{ + Request: `query { + Users { + name + _max(favouriteFloats: {offset: 1, limit: 3, order: ASC}) + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "Shahzad", + // 0.577, 2.718, 3.1425 + "_max": float64(3.1425), + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQueryInlineFloatArray_WithMaxWithOffsetWithLimitWithOrderDesc_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple inline array, ordered offsetted limited max of integer array", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "favouriteFloats": [3.1425, 0.00000000001, 10, 2.718, 0.577, 6.283] + }`, + }, + testUtils.Request{ + Request: `query { + Users { + name + _max(favouriteFloats: {offset: 1, limit: 3, order: DESC}) + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "Shahzad", + // 6.283, 3.1425, 2.718 + "_max": float64(6.283), + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQueryInlineNillableFloatArray_WithMaxWithOffsetWithLimitWithOrderAsc_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple inline array, ordered offsetted limited max of integer array", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "pageRatings": [3.1425, null, 10, 2.718, 0.577, 6.283] + }`, + }, + testUtils.Request{ + Request: `query { + Users { + name + _max(pageRatings: {offset: 1, limit: 3, order: ASC}) + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "Shahzad", + // 0.577, 2.718, 3.1425 + "_max": float64(3.1425), + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQueryInlineNillableFloatArray_WithMaxWithOffsetWithLimitWithOrderDesc_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple inline array, ordered offsetted limited max of integer array", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "pageRatings": [3.1425, null, 10, 2.718, 0.577, 6.283] + }`, + }, + testUtils.Request{ + Request: `query { + Users { + name + _max(pageRatings: {offset: 1, limit: 3, order: DESC}) + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "Shahzad", + // 6.283, 3.1425, 2.718 + "_max": float64(6.283), + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} diff --git a/tests/integration/query/inline_array/with_max_limit_offset_test.go b/tests/integration/query/inline_array/with_max_limit_offset_test.go new file mode 100644 index 0000000000..83eb8ac8ff --- /dev/null +++ b/tests/integration/query/inline_array/with_max_limit_offset_test.go @@ -0,0 +1,49 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package inline_array + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestQueryInlineIntegerArray_WithMaxWithOffsetWithLimit_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple inline array, offsetted limited max of integer array", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "favouriteIntegers": [-1, 2, 5, 1, 0] + }`, + }, + testUtils.Request{ + Request: `query { + Users { + name + _max(favouriteIntegers: {offset: 1, limit: 2}) + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "Shahzad", + "_max": int64(5), + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} diff --git a/tests/integration/query/inline_array/with_max_test.go b/tests/integration/query/inline_array/with_max_test.go new file mode 100644 index 0000000000..77cf0c8b37 --- /dev/null +++ b/tests/integration/query/inline_array/with_max_test.go @@ -0,0 +1,305 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package inline_array + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestQueryInlineIntegerArray_WithMaxAndNullArray_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple inline array with no filter, max of nil integer array", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "favouriteIntegers": null + }`, + }, + testUtils.Request{ + Request: `query { + Users { + name + _max(favouriteIntegers: {}) + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "John", + "_max": nil, + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQueryInlineIntegerArray_WithMaxAndEmptyArray_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple inline array with no filter, max of empty integer array", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "favouriteIntegers": [] + }`, + }, + testUtils.Request{ + Request: `query { + Users { + name + _max(favouriteIntegers: {}) + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "John", + "_max": nil, + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQueryInlineIntegerArray_WithMaxAndPopulatedArray_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple inline array with no filter, max of integer array", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "favouriteIntegers": [-1, 2, -1, 1, 0] + }`, + }, + testUtils.Request{ + Request: `query { + Users { + name + _max(favouriteIntegers: {}) + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "Shahzad", + "_max": int64(2), + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQueryInlineNillableIntegerArray_WithMaxAndPopulatedArray_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple inline array with no filter, max of nillable integer array", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "testScores": [-1, 2, null, 1, 0] + }`, + }, + testUtils.Request{ + Request: `query { + Users { + name + _max(testScores: {}) + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "Shahzad", + "_max": int64(2), + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQueryInlineFloatArray_WithMaxAndNullArray_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple inline array with no filter, max of nil float array", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "favouriteFloats": null + }`, + }, + testUtils.Request{ + Request: `query { + Users { + name + _max(favouriteFloats: {}) + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "John", + "_max": nil, + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQueryInlineFloatArray_WithMaxAndEmptyArray_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple inline array with no filter, max of empty float array", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "favouriteFloats": [] + }`, + }, + testUtils.Request{ + Request: `query { + Users { + name + _max(favouriteFloats: {}) + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "John", + "_max": nil, + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQueryInlineFloatArray_WithMaxAndPopulatedArray_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple inline array with no filter, max of float array", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "favouriteFloats": [3.1425, 0.00000000001, 10] + }`, + }, + testUtils.Request{ + Request: `query { + Users { + name + _max(favouriteFloats: {}) + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "John", + "_max": float64(10), + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQueryInlineNillableFloatArray_WithMaxAndPopulatedArray_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple inline array with no filter, max of nillable float array", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "pageRatings": [3.1425, 0.00000000001, 10, null] + }`, + }, + testUtils.Request{ + Request: `query { + Users { + name + _max(pageRatings: {}) + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "Shahzad", + "_max": float64(10), + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQueryInlineNillableFloatArray_WithDocIDMaxAndPopulatedArray_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple inline array with no filter, max of nillable float array", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "pageRatings": [3.1425, 0.00000000001, 10, null] + }`, + }, + testUtils.Request{ + Request: `query { + Users(docID: "bae-3f7e0f22-e253-53dd-b31b-df8b081292d9") { + name + _max(pageRatings: {}) + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "Shahzad", + "_max": float64(10), + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} diff --git a/tests/integration/query/inline_array/with_min_doc_id_test.go b/tests/integration/query/inline_array/with_min_doc_id_test.go new file mode 100644 index 0000000000..b8ad3d2c98 --- /dev/null +++ b/tests/integration/query/inline_array/with_min_doc_id_test.go @@ -0,0 +1,51 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package inline_array + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +// This test is meant to provide coverage of the planNode.Spans +// func by targeting a specific docID in the parent select. +func TestQueryInlineNillableFloatArray_WithDocIDAndMin_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple inline array with doc id, min of nillable float array", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "pageRatings": [3.1425, 0.00000000001, 10, null] + }`, + }, + testUtils.Request{ + Request: `query { + Users(docID: "bae-3f7e0f22-e253-53dd-b31b-df8b081292d9") { + name + _min(pageRatings: {}) + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "Shahzad", + "_min": float64(0.00000000001), + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} diff --git a/tests/integration/query/inline_array/with_min_filter_test.go b/tests/integration/query/inline_array/with_min_filter_test.go new file mode 100644 index 0000000000..50352dc83c --- /dev/null +++ b/tests/integration/query/inline_array/with_min_filter_test.go @@ -0,0 +1,145 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package inline_array + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestQueryInlineIntegerArray_WithMinWithFilter_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple inline array, filtered min of integer array", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "favouriteIntegers": [-1, 2, -1, 1, 0] + }`, + }, + testUtils.Request{ + Request: `query { + Users { + name + _min(favouriteIntegers: {filter: {_gt: 0}}) + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "Shahzad", + "_min": int64(1), + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQueryInlineNillableIntegerArray_WithMinWithFilter_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple inline array with filter, min of nillable integer array", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "testScores": [-1, 2, null, 1, 0] + }`, + }, + testUtils.Request{ + Request: `query { + Users { + name + _min(testScores: {filter: {_gt: 0}}) + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "Shahzad", + "_min": int64(1), + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQueryInlineFloatArray_WithMinWithFilter_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple inline array, filtered min of float array", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "favouriteFloats": [3.1425, 0.00000000001, 10] + }`, + }, + testUtils.Request{ + Request: `query { + Users { + name + _min(favouriteFloats: {filter: {_gt: 1}}) + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "Shahzad", + "_min": float64(3.1425), + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQueryInlineNillableFloatArray_WithMinWithFilter_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple inline array with filter, min of nillable float array", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "pageRatings": [3.1425, 0.00000000001, 10, null] + }`, + }, + testUtils.Request{ + Request: `query { + Users { + name + _min(pageRatings: {filter: {_gt: 1}}) + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "Shahzad", + "_min": float64(3.1425), + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} diff --git a/tests/integration/query/inline_array/with_min_limit_offset_order_test.go b/tests/integration/query/inline_array/with_min_limit_offset_order_test.go new file mode 100644 index 0000000000..ed5adde4b8 --- /dev/null +++ b/tests/integration/query/inline_array/with_min_limit_offset_order_test.go @@ -0,0 +1,281 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package inline_array + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestQueryInlineIntegerArray_WithMinWithOffsetWithLimitWithOrderAsc_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple inline array, ordered offsetted limited min of integer array", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "favouriteIntegers": [-1, 2, 5, 1, 0, 7] + }`, + }, + testUtils.Request{ + Request: `query { + Users { + name + _min(favouriteIntegers: {offset: 1, limit: 3, order: ASC}) + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "Shahzad", + // 0, 1, 2 + "_min": int64(0), + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQueryInlineIntegerArray_WithMinWithOffsetWithLimitWithOrderDesc_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple inline array, ordered offsetted limited min of integer array", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "favouriteIntegers": [-1, 2, 5, 1, 0, 7] + }`, + }, + testUtils.Request{ + Request: `query { + Users { + name + _min(favouriteIntegers: {offset: 1, limit: 3, order: DESC}) + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "Shahzad", + // 5, 2, 1 + "_min": int64(1), + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQueryInlineNillableIntegerArray_WithMinWithOffsetWithLimitWithOrderAsc_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple inline array, ordered offsetted limited min of integer array", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "testScores": [2, null, 5, 1, 0, 7] + }`, + }, + testUtils.Request{ + Request: `query { + Users { + name + _min(testScores: {offset: 1, limit: 3, order: ASC}) + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "Shahzad", + // 0, 1, 2 + "_min": int64(0), + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQueryInlineNillableIntegerArray_WithMinWithOffsetWithLimitWithOrderDesc_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple inline array, ordered offsetted limited min of integer array", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "testScores": [null, 2, 5, 1, 0, 7] + }`, + }, + testUtils.Request{ + Request: `query { + Users { + name + _min(testScores: {offset: 1, limit: 3, order: DESC}) + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "Shahzad", + // 5, 2, 1 + "_min": int64(1), + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQueryInlineFloatArray_WithMinWithOffsetWithLimitWithOrderAsc_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple inline array, ordered offsetted limited min of integer array", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "favouriteFloats": [3.1425, 0.00000000001, 10, 2.718, 0.577, 6.283] + }`, + }, + testUtils.Request{ + Request: `query { + Users { + name + _min(favouriteFloats: {offset: 1, limit: 3, order: ASC}) + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "Shahzad", + // 0.577, 2.718, 3.1425 + "_min": float64(0.577), + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQueryInlineFloatArray_WithMinWithOffsetWithLimitWithOrderDesc_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple inline array, ordered offsetted limited min of integer array", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "favouriteFloats": [3.1425, 0.00000000001, 10, 2.718, 0.577, 6.283] + }`, + }, + testUtils.Request{ + Request: `query { + Users { + name + _min(favouriteFloats: {offset: 1, limit: 3, order: DESC}) + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "Shahzad", + // 6.283, 3.1425, 2.718 + "_min": float64(2.718), + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQueryInlineNillableFloatArray_WithMinWithOffsetWithLimitWithOrderAsc_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple inline array, ordered offsetted limited min of integer array", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "pageRatings": [3.1425, null, 10, 2.718, 0.577, 6.283] + }`, + }, + testUtils.Request{ + Request: `query { + Users { + name + _min(pageRatings: {offset: 1, limit: 3, order: ASC}) + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "Shahzad", + // 0.577, 2.718, 3.1425 + "_min": float64(0.577), + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQueryInlineNillableFloatArray_WithMinWithOffsetWithLimitWithOrderDesc_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple inline array, ordered offsetted limited min of integer array", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "pageRatings": [3.1425, null, 10, 2.718, 0.577, 6.283] + }`, + }, + testUtils.Request{ + Request: `query { + Users { + name + _min(pageRatings: {offset: 1, limit: 3, order: DESC}) + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "Shahzad", + // 6.283, 3.1425, 2.718 + "_min": float64(2.718), + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} diff --git a/tests/integration/query/inline_array/with_min_limit_offset_test.go b/tests/integration/query/inline_array/with_min_limit_offset_test.go new file mode 100644 index 0000000000..a8b55dc786 --- /dev/null +++ b/tests/integration/query/inline_array/with_min_limit_offset_test.go @@ -0,0 +1,49 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package inline_array + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestQueryInlineIntegerArray_WithMinWithOffsetWithLimit_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple inline array, offsetted limited min of integer array", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "favouriteIntegers": [-1, 2, 5, 1, 0] + }`, + }, + testUtils.Request{ + Request: `query { + Users { + name + _min(favouriteIntegers: {offset: 1, limit: 2}) + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "Shahzad", + "_min": int64(2), + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} diff --git a/tests/integration/query/inline_array/with_min_test.go b/tests/integration/query/inline_array/with_min_test.go new file mode 100644 index 0000000000..8788181415 --- /dev/null +++ b/tests/integration/query/inline_array/with_min_test.go @@ -0,0 +1,273 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package inline_array + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestQueryInlineIntegerArray_WithMinAndNullArray_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple inline array with no filter, min of nil integer array", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "favouriteIntegers": null + }`, + }, + testUtils.Request{ + Request: `query { + Users { + name + _min(favouriteIntegers: {}) + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "John", + "_min": nil, + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQueryInlineIntegerArray_WithMinAndEmptyArray_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple inline array with no filter, min of empty integer array", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "favouriteIntegers": [] + }`, + }, + testUtils.Request{ + Request: `query { + Users { + name + _min(favouriteIntegers: {}) + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "John", + "_min": nil, + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQueryInlineIntegerArray_WithMinAndPopulatedArray_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple inline array with no filter, min of integer array", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "favouriteIntegers": [-1, 2, -1, 1, 0] + }`, + }, + testUtils.Request{ + Request: `query { + Users { + name + _min(favouriteIntegers: {}) + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "Shahzad", + "_min": int64(-1), + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQueryInlineNillableIntegerArray_WithMinAndPopulatedArray_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple inline array with no filter, min of nillable integer array", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "testScores": [-1, 2, null, 1, 0] + }`, + }, + testUtils.Request{ + Request: `query { + Users { + name + _min(testScores: {}) + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "Shahzad", + "_min": int64(-1), + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQueryInlineFloatArray_WithMinAndNullArray_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple inline array with no filter, min of nil float array", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "favouriteFloats": null + }`, + }, + testUtils.Request{ + Request: `query { + Users { + name + _min(favouriteFloats: {}) + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "John", + "_min": nil, + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQueryInlineFloatArray_WithMinAndEmptyArray_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple inline array with no filter, min of empty float array", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "favouriteFloats": [] + }`, + }, + testUtils.Request{ + Request: `query { + Users { + name + _min(favouriteFloats: {}) + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "John", + "_min": nil, + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQueryInlineFloatArray_WithMinAndPopulatedArray_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple inline array with no filter, min of float array", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "favouriteFloats": [3.1425, 0.00000000001, 10] + }`, + }, + testUtils.Request{ + Request: `query { + Users { + name + _min(favouriteFloats: {}) + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "John", + "_min": float64(0.00000000001), + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQueryInlineNillableFloatArray_WithMinAndPopulatedArray_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple inline array with no filter, min of nillable float array", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "pageRatings": [3.1425, 0.00000000001, 10, null] + }`, + }, + testUtils.Request{ + Request: `query { + Users { + name + _min(pageRatings: {}) + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "Shahzad", + "_min": float64(0.00000000001), + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} diff --git a/tests/integration/query/simple/utils.go b/tests/integration/query/simple/utils.go index 0f2258e30e..b64686d600 100644 --- a/tests/integration/query/simple/utils.go +++ b/tests/integration/query/simple/utils.go @@ -31,7 +31,8 @@ func executeTestCase(t *testing.T, test testUtils.TestCase) { testUtils.ExecuteTestCase( t, testUtils.TestCase{ - Description: test.Description, + Description: test.Description, + SupportedMutationTypes: test.SupportedMutationTypes, Actions: append( []any{ testUtils.SchemaUpdate{ diff --git a/tests/integration/query/simple/with_group_average_max_test.go b/tests/integration/query/simple/with_group_average_max_test.go new file mode 100644 index 0000000000..847f0db941 --- /dev/null +++ b/tests/integration/query/simple/with_group_average_max_test.go @@ -0,0 +1,164 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package simple + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestQuery_SimpleWithGroupByStringWithInnerGroupBooleanAndMaxOfAverageOfInt_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with group by string, with child group by boolean, and max of average on int", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Age": 25, + "Verified": true + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Age": 32, + "Verified": true + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Age": 34, + "Verified": false + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Carlo", + "Age": 55, + "Verified": true + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Alice", + "Age": 19, + "Verified": false + }`, + }, + testUtils.Request{ + Request: `query { + Users(groupBy: [Name]) { + Name + _max(_group: {field: _avg}) + _group (groupBy: [Verified]){ + Verified + _avg(_group: {field: Age}) + } + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Name": "John", + "_max": float64(34), + "_group": []map[string]any{ + { + "Verified": true, + "_avg": float64(28.5), + }, + { + "Verified": false, + "_avg": float64(34), + }, + }, + }, + { + "Name": "Carlo", + "_max": float64(55), + "_group": []map[string]any{ + { + "Verified": true, + "_avg": float64(55), + }, + }, + }, + { + "Name": "Alice", + "_max": float64(19), + "_group": []map[string]any{ + { + "Verified": false, + "_avg": float64(19), + }, + }, + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQuerySimple_WithGroupByStringWithoutRenderedGroupAndChildIntegerAverageAndMax_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with group by string, average and max on non-rendered group integer value", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Age": 32 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Age": 38 + }`, + }, + testUtils.CreateDoc{ + // It is important to test negative values here, due to the auto-typing of numbers + Doc: `{ + "Name": "Alice", + "Age": -19 + }`, + }, + testUtils.Request{ + Request: `query { + Users(groupBy: [Name]) { + Name + _avg(_group: {field: Age}) + _max(_group: {field: Age}) + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Name": "John", + "_avg": float64(35), + "_max": int64(38), + }, + { + "Name": "Alice", + "_avg": float64(-19), + "_max": int64(-19), + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} diff --git a/tests/integration/query/simple/with_group_average_min_test.go b/tests/integration/query/simple/with_group_average_min_test.go new file mode 100644 index 0000000000..c59c576b3b --- /dev/null +++ b/tests/integration/query/simple/with_group_average_min_test.go @@ -0,0 +1,164 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package simple + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestQuery_SimpleWithGroupByStringWithInnerGroupBooleanAndMinOfAverageOfInt_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with group by string, with child group by boolean, and min of average on int", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Age": 25, + "Verified": true + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Age": 32, + "Verified": true + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Age": 34, + "Verified": false + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Carlo", + "Age": 55, + "Verified": true + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Alice", + "Age": 19, + "Verified": false + }`, + }, + testUtils.Request{ + Request: `query { + Users(groupBy: [Name]) { + Name + _min(_group: {field: _avg}) + _group (groupBy: [Verified]){ + Verified + _avg(_group: {field: Age}) + } + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Name": "John", + "_min": float64(28.5), + "_group": []map[string]any{ + { + "Verified": true, + "_avg": float64(28.5), + }, + { + "Verified": false, + "_avg": float64(34), + }, + }, + }, + { + "Name": "Carlo", + "_min": float64(55), + "_group": []map[string]any{ + { + "Verified": true, + "_avg": float64(55), + }, + }, + }, + { + "Name": "Alice", + "_min": float64(19), + "_group": []map[string]any{ + { + "Verified": false, + "_avg": float64(19), + }, + }, + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQuerySimple_WithGroupByStringWithoutRenderedGroupAndChildIntegerAverageAndMin_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with group by string, average and min on non-rendered group integer value", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Age": 32 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Age": 38 + }`, + }, + testUtils.CreateDoc{ + // It is important to test negative values here, due to the auto-typing of numbers + Doc: `{ + "Name": "Alice", + "Age": -19 + }`, + }, + testUtils.Request{ + Request: `query { + Users(groupBy: [Name]) { + Name + _avg(_group: {field: Age}) + _min(_group: {field: Age}) + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Name": "John", + "_avg": float64(35), + "_min": int64(32), + }, + { + "Name": "Alice", + "_avg": float64(-19), + "_min": int64(-19), + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} diff --git a/tests/integration/query/simple/with_group_count_max_test.go b/tests/integration/query/simple/with_group_count_max_test.go new file mode 100644 index 0000000000..82749a3b76 --- /dev/null +++ b/tests/integration/query/simple/with_group_count_max_test.go @@ -0,0 +1,112 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package simple + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestQuerySimple_WithGroupByStringWithInnerGroupBooleanAndMaxOfCount_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with group by string, with child group by boolean, and max of count", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Age": 25, + "Verified": true + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Age": 32, + "Verified": true + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Age": 34, + "Verified": false + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Carlo", + "Age": 55, + "Verified": true + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Alice", + "Age": 19, + "Verified": false + }`, + }, + testUtils.Request{ + Request: `query { + Users(groupBy: [Name]) { + Name + _max(_group: {field: _count}) + _group (groupBy: [Verified]){ + Verified + _count(_group: {}) + } + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Name": "John", + "_max": int64(2), + "_group": []map[string]any{ + { + "Verified": true, + "_count": int(2), + }, + { + "Verified": false, + "_count": int(1), + }, + }, + }, + { + "Name": "Carlo", + "_max": int64(1), + "_group": []map[string]any{ + { + "Verified": true, + "_count": int(1), + }, + }, + }, + { + "Name": "Alice", + "_max": int64(1), + "_group": []map[string]any{ + { + "Verified": false, + "_count": int(1), + }, + }, + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} diff --git a/tests/integration/query/simple/with_group_count_min_test.go b/tests/integration/query/simple/with_group_count_min_test.go new file mode 100644 index 0000000000..4890ed527c --- /dev/null +++ b/tests/integration/query/simple/with_group_count_min_test.go @@ -0,0 +1,112 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package simple + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestQuerySimple_WithGroupByStringWithInnerGroupBooleanAndMinOfCount_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with group by string, with child group by boolean, and min of count", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Age": 25, + "Verified": true + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Age": 32, + "Verified": true + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Age": 34, + "Verified": false + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Carlo", + "Age": 55, + "Verified": true + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Alice", + "Age": 19, + "Verified": false + }`, + }, + testUtils.Request{ + Request: `query { + Users(groupBy: [Name]) { + Name + _min(_group: {field: _count}) + _group (groupBy: [Verified]){ + Verified + _count(_group: {}) + } + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Name": "John", + "_min": int64(1), + "_group": []map[string]any{ + { + "Verified": true, + "_count": int(2), + }, + { + "Verified": false, + "_count": int(1), + }, + }, + }, + { + "Name": "Carlo", + "_min": int64(1), + "_group": []map[string]any{ + { + "Verified": true, + "_count": int(1), + }, + }, + }, + { + "Name": "Alice", + "_min": int64(1), + "_group": []map[string]any{ + { + "Verified": false, + "_count": int(1), + }, + }, + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} diff --git a/tests/integration/query/simple/with_group_max_filter_test.go b/tests/integration/query/simple/with_group_max_filter_test.go new file mode 100644 index 0000000000..13db6c6e10 --- /dev/null +++ b/tests/integration/query/simple/with_group_max_filter_test.go @@ -0,0 +1,294 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package simple + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestQuerySimple_WithGroupByNumberWithoutRenderedGroupAndChildMaxWithFilter_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with group by number, no children, max on non-rendered, unfiltered group", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Age": 32 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Bob", + "Age": 32 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Alice", + "Age": 19 + }`, + }, + testUtils.Request{ + Request: `query { + Users(groupBy: [Age]) { + Age + _max(_group: {field: Age, filter: {Age: {_gt: 26}}}) + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Age": int64(32), + "_max": int64(32), + }, + { + "Age": int64(19), + "_max": nil, + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQuerySimple_WithGroupByNumberWithRenderedGroupAndChildMaxWithFilter_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with group by number, no children, max on rendered, unfiltered group", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Age": 32 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Bob", + "Age": 32 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Alice", + "Age": 19 + }`, + }, + testUtils.Request{ + Request: `query { + Users(groupBy: [Age]) { + Age + _max(_group: {field: Age, filter: {Age: {_gt: 26}}}) + _group { + Name + } + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Age": int64(32), + "_max": int64(32), + "_group": []map[string]any{ + { + "Name": "Bob", + }, + { + "Name": "John", + }, + }, + }, + { + "Age": int64(19), + "_max": nil, + "_group": []map[string]any{ + { + "Name": "Alice", + }, + }, + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQuerySimple_WithGroupByNumberWithRenderedGroupWithFilterAndChildMaxWithMatchingFilter_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with group by number, no children, max on rendered, matching filtered group", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Age": 32 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Bob", + "Age": 32 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Alice", + "Age": 19 + }`, + }, + testUtils.Request{ + Request: `query { + Users(groupBy: [Age]) { + Age + _max(_group: {field: Age, filter: {Name: {_eq: "John"}}}) + _group(filter: {Name: {_eq: "John"}}) { + Name + } + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Age": int64(32), + "_max": int64(32), + "_group": []map[string]any{ + { + "Name": "John", + }, + }, + }, + { + "Age": int64(19), + "_max": nil, + "_group": []map[string]any{}, + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQuerySimple_WithGroupByNumberWithRenderedGroupWithFilterAndChildMaxWithDifferentFilter_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with group by number, no children, max on non-rendered, different filtered group", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Age": 32 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Bob", + "Age": 32 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Alice", + "Age": 19 + }`, + }, + testUtils.Request{ + Request: `query { + Users(groupBy: [Age]) { + Age + _max(_group: {field: Age, filter: {Age: {_gt: 26}}}) + _group(filter: {Name: {_eq: "John"}}) { + Name + } + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Age": int64(32), + "_max": int64(32), + "_group": []map[string]any{ + { + "Name": "John", + }, + }, + }, + { + "Age": int64(19), + "_max": nil, + "_group": []map[string]any{}, + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQuerySimple_WithGroupByNumberWithoutRenderedGroupAndChildMaxWithDifferentFilters_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with group by number, no children, max on non-rendered, unfiltered group", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Age": 32 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Bob", + "Age": 32 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Alice", + "Age": 19 + }`, + }, + testUtils.Request{ + Request: `query { + Users(groupBy: [Age]) { + Age + S1: _max(_group: {field: Age, filter: {Age: {_gt: 26}}}) + S2: _max(_group: {field: Age, filter: {Age: {_lt: 26}}}) + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Age": int64(32), + "S1": int64(32), + "S2": nil, + }, + { + "Age": int64(19), + "S1": nil, + "S2": int64(19), + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} diff --git a/tests/integration/query/simple/with_group_max_limit_offset_test.go b/tests/integration/query/simple/with_group_max_limit_offset_test.go new file mode 100644 index 0000000000..601aa3a6f7 --- /dev/null +++ b/tests/integration/query/simple/with_group_max_limit_offset_test.go @@ -0,0 +1,72 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package simple + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestQuerySimple_WithGroupByStringWithoutRenderedGroupAndChildIntegerMaxWithLimitAndOffset_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with group by string, offsetted limited max on non-rendered group integer value", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Age": 32 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Age": 38 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Age": 28 + }`, + }, + testUtils.CreateDoc{ + // It is important to test negative values here, due to the auto-typing of numbers + Doc: `{ + "Name": "Alice", + "Age": -19 + }`, + }, + testUtils.Request{ + Request: `query { + Users(groupBy: [Name]) { + Name + _max(_group: {field: Age, offset: 1, limit: 2}) + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Name": "John", + "_max": int64(38), + }, + { + "Name": "Alice", + "_max": nil, + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} diff --git a/tests/integration/query/simple/with_group_max_limit_test.go b/tests/integration/query/simple/with_group_max_limit_test.go new file mode 100644 index 0000000000..924032ba43 --- /dev/null +++ b/tests/integration/query/simple/with_group_max_limit_test.go @@ -0,0 +1,72 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package simple + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestQuerySimple_WithGroupByStringWithoutRenderedGroupAndChildIntegerMaxWithLimit_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with group by string, limited max on non-rendered group integer value", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Age": 32 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Age": 38 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Age": 28 + }`, + }, + testUtils.CreateDoc{ + // It is important to test negative values here, due to the auto-typing of numbers + Doc: `{ + "Name": "Alice", + "Age": -19 + }`, + }, + testUtils.Request{ + Request: `query { + Users(groupBy: [Name]) { + Name + _max(_group: {field: Age, limit: 2}) + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Name": "John", + "_max": int64(38), + }, + { + "Name": "Alice", + "_max": int64(-19), + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} diff --git a/tests/integration/query/simple/with_group_max_test.go b/tests/integration/query/simple/with_group_max_test.go new file mode 100644 index 0000000000..f501223312 --- /dev/null +++ b/tests/integration/query/simple/with_group_max_test.go @@ -0,0 +1,577 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package simple + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestQuerySimple_WithGroupByStringWithoutRenderedGroupAndMaxOfUndefined_ReturnsError(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with max on unspecified field", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Age": 32 + }`, + }, + testUtils.Request{ + Request: `query { + Users (groupBy: [Name]) { + Name + _max + } + }`, + ExpectedError: "aggregate must be provided with a property to aggregate", + }, + }, + } + + executeTestCase(t, test) +} + +func TestQuerySimple_WithGroupByStringWithoutRenderedGroupAndChildIntegerMaxOnEmptyCollection_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with group by number, no children, max on non-rendered group, empty collection", + Actions: []any{ + testUtils.Request{ + Request: `query { + Users(groupBy: [Age]) { + Age + _max(_group: {field: Age}) + } + }`, + Results: map[string]any{ + "Users": []map[string]any{}, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQuerySimple_WithGroupByStringWithoutRenderedGroupAndChildIntegerMax_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with group by string, max on non-rendered group integer value", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Age": 32 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Age": 38 + }`, + }, + testUtils.CreateDoc{ + // It is important to test negative values here, due to the auto-typing of numbers + Doc: `{ + "Name": "Alice", + "Age": -19 + }`, + }, + testUtils.Request{ + Request: `query { + Users(groupBy: [Name]) { + Name + _max(_group: {field: Age}) + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Name": "John", + "_max": int64(38), + }, + { + "Name": "Alice", + "_max": int64(-19), + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQuerySimple_WithGroupByStringWithoutRenderedGroupAndChildNilMax_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with group by string, max on non-rendered group nil and integer values", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Age": 32 + }`, + }, + testUtils.CreateDoc{ + // Age is undefined here + Doc: `{ + "Name": "John" + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Alice", + "Age": 19 + }`, + }, + testUtils.Request{ + Request: `query { + Users(groupBy: [Name]) { + Name + _max(_group: {field: Age}) + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Name": "John", + "_max": int64(32), + }, + { + "Name": "Alice", + "_max": int64(19), + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQuerySimple_WithGroupByStringWithInnerGroupBooleanAndMaxOfMaxOfInt_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with group by string, with child group by boolean, and max of max on int", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Age": 25, + "Verified": true + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Age": 32, + "Verified": true + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Age": 34, + "Verified": false + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Carlo", + "Age": 55, + "Verified": true + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Alice", + "Age": 19, + "Verified": false + }`, + }, + testUtils.Request{ + Request: `query { + Users(groupBy: [Name]) { + Name + _max(_group: {field: _max}) + _group (groupBy: [Verified]){ + Verified + _max(_group: {field: Age}) + } + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Name": "John", + "_max": int64(34), + "_group": []map[string]any{ + { + "Verified": true, + "_max": int64(32), + }, + { + "Verified": false, + "_max": int64(34), + }, + }, + }, + { + "Name": "Carlo", + "_max": int64(55), + "_group": []map[string]any{ + { + "Verified": true, + "_max": int64(55), + }, + }, + }, + { + "Name": "Alice", + "_max": int64(19), + "_group": []map[string]any{ + { + "Verified": false, + "_max": int64(19), + }, + }, + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQuerySimple_WithGroupByStringWithoutRenderedGroupAndChildEmptyFloatMax_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with group by string, max on non-rendered group float (default) value", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "HeightM": 1.82 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "HeightM": 1.89 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Alice" + }`, + }, + testUtils.Request{ + Request: `query { + Users(groupBy: [Name]) { + Name + _max(_group: {field: HeightM}) + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Name": "John", + "_max": float64(1.89), + }, + { + "Name": "Alice", + "_max": nil, + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQuerySimple_WithGroupByStringWithoutRenderedGroupAndChildFloatMax_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with group by string, max on non-rendered group float value", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "HeightM": 1.82 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "HeightM": 1.89 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Alice", + "HeightM": 2.04 + }`, + }, + testUtils.Request{ + Request: `query { + Users(groupBy: [Name]) { + Name + _max(_group: {field: HeightM}) + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Name": "John", + "_max": float64(1.89), + }, + { + "Name": "Alice", + "_max": float64(2.04), + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQuerySimple_WithGroupByStringWithInnerGroupBooleanAndMaxOfMaxOfFloat_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with group by string, with child group by boolean, and max of max on float", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "HeightM": 1.82, + "Verified": true + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "HeightM": 1.61, + "Verified": true + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "HeightM": 2.22, + "Verified": false + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Carlo", + "HeightM": 1.74, + "Verified": true + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Alice", + "HeightM": 2.04, + "Verified": false + }`, + }, + testUtils.Request{ + Request: `query { + Users(groupBy: [Name]) { + Name + _max(_group: {field: _max}) + _group (groupBy: [Verified]){ + Verified + _max(_group: {field: HeightM}) + } + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Name": "Alice", + "_max": float64(2.04), + "_group": []map[string]any{ + { + "Verified": false, + "_max": float64(2.04), + }, + }, + }, + { + "Name": "John", + "_max": float64(2.22), + "_group": []map[string]any{ + { + "Verified": true, + "_max": float64(1.82), + }, + { + "Verified": false, + "_max": float64(2.22), + }, + }, + }, + { + "Name": "Carlo", + "_max": float64(1.74), + "_group": []map[string]any{ + { + "Verified": true, + "_max": float64(1.74), + }, + }, + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQuerySimple_WithGroupByStringWithInnerGroupBooleanAndMaxOfMaxOfMaxOfFloat_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with group by string, with child group by boolean, and max of max of max of float", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "HeightM": 1.82, + "Age": 25, + "Verified": true + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "HeightM": 1.61, + "Age": 32, + "Verified": true + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "HeightM": 2.22, + "Age": 34, + "Verified": false + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Carlo", + "HeightM": 1.74, + "Age": 55, + "Verified": true + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Alice", + "HeightM": 2.04, + "Age": 19, + "Verified": false + }`, + }, + testUtils.Request{ + Request: `query { + Users(groupBy: [Name]) { + Name + _max(_group: {field: _max}) + _group (groupBy: [Verified]){ + Verified + _max(_group: {field: HeightM}) + _group (groupBy: [Age]){ + Age + _max(_group: {field: HeightM}) + } + } + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Name": "Carlo", + "_max": float64(1.74), + "_group": []map[string]any{ + { + "Verified": true, + "_max": float64(1.74), + "_group": []map[string]any{ + { + "Age": int64(55), + "_max": float64(1.74), + }, + }, + }, + }, + }, + { + "Name": "Alice", + "_max": float64(2.04), + "_group": []map[string]any{ + { + "Verified": false, + "_max": float64(2.04), + "_group": []map[string]any{ + { + "Age": int64(19), + "_max": float64(2.04), + }, + }, + }, + }, + }, + { + "Name": "John", + "_max": float64(2.22), + "_group": []map[string]any{ + { + "Verified": true, + "_max": float64(1.82), + "_group": []map[string]any{ + { + "Age": int64(32), + "_max": float64(1.61), + }, + { + "Age": int64(25), + "_max": float64(1.82), + }, + }, + }, + { + "Verified": false, + "_max": float64(2.22), + "_group": []map[string]any{ + { + "Age": int64(34), + "_max": float64(2.22), + }, + }, + }, + }, + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} diff --git a/tests/integration/query/simple/with_group_min_filter_test.go b/tests/integration/query/simple/with_group_min_filter_test.go new file mode 100644 index 0000000000..21332bc827 --- /dev/null +++ b/tests/integration/query/simple/with_group_min_filter_test.go @@ -0,0 +1,294 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package simple + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestQuerySimple_WithGroupByNumberWithoutRenderedGroupAndChildMinWithFilter_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with group by number, no children, min on non-rendered, unfiltered group", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Age": 32 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Bob", + "Age": 32 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Alice", + "Age": 19 + }`, + }, + testUtils.Request{ + Request: `query { + Users(groupBy: [Age]) { + Age + _min(_group: {field: Age, filter: {Age: {_gt: 26}}}) + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Age": int64(32), + "_min": int64(32), + }, + { + "Age": int64(19), + "_min": nil, + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQuerySimple_WithGroupByNumberWithRenderedGroupAndChildMinWithFilter_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with group by number, no children, min on rendered, unfiltered group", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Age": 32 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Bob", + "Age": 32 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Alice", + "Age": 19 + }`, + }, + testUtils.Request{ + Request: `query { + Users(groupBy: [Age]) { + Age + _min(_group: {field: Age, filter: {Age: {_gt: 26}}}) + _group { + Name + } + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Age": int64(32), + "_min": int64(32), + "_group": []map[string]any{ + { + "Name": "Bob", + }, + { + "Name": "John", + }, + }, + }, + { + "Age": int64(19), + "_min": nil, + "_group": []map[string]any{ + { + "Name": "Alice", + }, + }, + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQuerySimple_WithGroupByNumberWithRenderedGroupWithFilterAndChildMinWithMatchingFilter_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with group by number, no children, min on rendered, matching filtered group", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Age": 32 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Bob", + "Age": 32 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Alice", + "Age": 19 + }`, + }, + testUtils.Request{ + Request: `query { + Users(groupBy: [Age]) { + Age + _min(_group: {field: Age, filter: {Name: {_eq: "John"}}}) + _group(filter: {Name: {_eq: "John"}}) { + Name + } + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Age": int64(32), + "_min": int64(32), + "_group": []map[string]any{ + { + "Name": "John", + }, + }, + }, + { + "Age": int64(19), + "_min": nil, + "_group": []map[string]any{}, + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQuerySimple_WithGroupByNumberWithRenderedGroupWithFilterAndChildMinWithDifferentFilter_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with group by number, no children, min on non-rendered, different filtered group", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Age": 32 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Bob", + "Age": 32 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Alice", + "Age": 19 + }`, + }, + testUtils.Request{ + Request: `query { + Users(groupBy: [Age]) { + Age + _min(_group: {field: Age, filter: {Age: {_gt: 26}}}) + _group(filter: {Name: {_eq: "John"}}) { + Name + } + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Age": int64(32), + "_min": int64(32), + "_group": []map[string]any{ + { + "Name": "John", + }, + }, + }, + { + "Age": int64(19), + "_min": nil, + "_group": []map[string]any{}, + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQuerySimple_WithGroupByNumberWithoutRenderedGroupAndChildMinWithDifferentFilters_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with group by number, no children, min on non-rendered, unfiltered group", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Age": 32 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Bob", + "Age": 32 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Alice", + "Age": 19 + }`, + }, + testUtils.Request{ + Request: `query { + Users(groupBy: [Age]) { + Age + S1: _min(_group: {field: Age, filter: {Age: {_gt: 26}}}) + S2: _min(_group: {field: Age, filter: {Age: {_lt: 26}}}) + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Age": int64(32), + "S1": int64(32), + "S2": nil, + }, + { + "Age": int64(19), + "S1": nil, + "S2": int64(19), + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} diff --git a/tests/integration/query/simple/with_group_min_limit_offset_test.go b/tests/integration/query/simple/with_group_min_limit_offset_test.go new file mode 100644 index 0000000000..b7ac741500 --- /dev/null +++ b/tests/integration/query/simple/with_group_min_limit_offset_test.go @@ -0,0 +1,72 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package simple + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestQuerySimple_WithGroupByStringWithoutRenderedGroupAndChildIntegerMinWithLimitAndOffset_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with group by string, offsetted limited min on non-rendered group integer value", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Age": 32 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Age": 38 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Age": 28 + }`, + }, + testUtils.CreateDoc{ + // It is important to test negative values here, due to the auto-typing of numbers + Doc: `{ + "Name": "Alice", + "Age": -19 + }`, + }, + testUtils.Request{ + Request: `query { + Users(groupBy: [Name]) { + Name + _min(_group: {field: Age, offset: 1, limit: 2}) + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Name": "John", + "_min": int64(32), + }, + { + "Name": "Alice", + "_min": nil, + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} diff --git a/tests/integration/query/simple/with_group_min_limit_test.go b/tests/integration/query/simple/with_group_min_limit_test.go new file mode 100644 index 0000000000..c2f350d218 --- /dev/null +++ b/tests/integration/query/simple/with_group_min_limit_test.go @@ -0,0 +1,72 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package simple + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestQuerySimple_WithGroupByStringWithoutRenderedGroupAndChildIntegerMinWithLimit_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with group by string, limited min on non-rendered group integer value", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Age": 32 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Age": 38 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Age": 28 + }`, + }, + testUtils.CreateDoc{ + // It is important to test negative values here, due to the auto-typing of numbers + Doc: `{ + "Name": "Alice", + "Age": -19 + }`, + }, + testUtils.Request{ + Request: `query { + Users(groupBy: [Name]) { + Name + _min(_group: {field: Age, limit: 2}) + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Name": "John", + "_min": int64(28), + }, + { + "Name": "Alice", + "_min": int64(-19), + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} diff --git a/tests/integration/query/simple/with_group_min_test.go b/tests/integration/query/simple/with_group_min_test.go new file mode 100644 index 0000000000..ac8d2e3ed9 --- /dev/null +++ b/tests/integration/query/simple/with_group_min_test.go @@ -0,0 +1,577 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package simple + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestQuerySimple_WithGroupByStringWithoutRenderedGroupAndMinOfUndefined_ReturnsError(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with min on unspecified field", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Age": 32 + }`, + }, + testUtils.Request{ + Request: `query { + Users (groupBy: [Name]) { + Name + _min + } + }`, + ExpectedError: "aggregate must be provided with a property to aggregate", + }, + }, + } + + executeTestCase(t, test) +} + +func TestQuerySimple_WithGroupByStringWithoutRenderedGroupAndChildIntegerMinOnEmptyCollection_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with group by number, no children, min on non-rendered group, empty collection", + Actions: []any{ + testUtils.Request{ + Request: `query { + Users(groupBy: [Age]) { + Age + _min(_group: {field: Age}) + } + }`, + Results: map[string]any{ + "Users": []map[string]any{}, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQuerySimple_WithGroupByStringWithoutRenderedGroupAndChildIntegerMin_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with group by string, min on non-rendered group integer value", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Age": 32 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Age": 38 + }`, + }, + testUtils.CreateDoc{ + // It is important to test negative values here, due to the auto-typing of numbers + Doc: `{ + "Name": "Alice", + "Age": -19 + }`, + }, + testUtils.Request{ + Request: `query { + Users(groupBy: [Name]) { + Name + _min(_group: {field: Age}) + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Name": "John", + "_min": int64(32), + }, + { + "Name": "Alice", + "_min": int64(-19), + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQuerySimple_WithGroupByStringWithoutRenderedGroupAndChildNilMin_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with group by string, min on non-rendered group nil and integer values", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Age": 32 + }`, + }, + testUtils.CreateDoc{ + // Age is undefined here + Doc: `{ + "Name": "John" + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Alice", + "Age": 19 + }`, + }, + testUtils.Request{ + Request: `query { + Users(groupBy: [Name]) { + Name + _min(_group: {field: Age}) + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Name": "John", + "_min": int64(32), + }, + { + "Name": "Alice", + "_min": int64(19), + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQuerySimple_WithGroupByStringWithInnerGroupBooleanAndMinOfMinOfInt_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with group by string, with child group by boolean, and min of min on int", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Age": 25, + "Verified": true + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Age": 32, + "Verified": true + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Age": 34, + "Verified": false + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Carlo", + "Age": 55, + "Verified": true + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Alice", + "Age": 19, + "Verified": false + }`, + }, + testUtils.Request{ + Request: `query { + Users(groupBy: [Name]) { + Name + _min(_group: {field: _min}) + _group (groupBy: [Verified]){ + Verified + _min(_group: {field: Age}) + } + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Name": "John", + "_min": int64(25), + "_group": []map[string]any{ + { + "Verified": true, + "_min": int64(25), + }, + { + "Verified": false, + "_min": int64(34), + }, + }, + }, + { + "Name": "Carlo", + "_min": int64(55), + "_group": []map[string]any{ + { + "Verified": true, + "_min": int64(55), + }, + }, + }, + { + "Name": "Alice", + "_min": int64(19), + "_group": []map[string]any{ + { + "Verified": false, + "_min": int64(19), + }, + }, + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQuerySimple_WithGroupByStringWithoutRenderedGroupAndChildEmptyFloatMin_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with group by string, min on non-rendered group float (default) value", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "HeightM": 1.82 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "HeightM": 1.89 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Alice" + }`, + }, + testUtils.Request{ + Request: `query { + Users(groupBy: [Name]) { + Name + _min(_group: {field: HeightM}) + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Name": "John", + "_min": float64(1.82), + }, + { + "Name": "Alice", + "_min": nil, + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQuerySimple_WithGroupByStringWithoutRenderedGroupAndChildFloatMin_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with group by string, min on non-rendered group float value", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "HeightM": 1.82 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "HeightM": 1.89 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Alice", + "HeightM": 2.04 + }`, + }, + testUtils.Request{ + Request: `query { + Users(groupBy: [Name]) { + Name + _min(_group: {field: HeightM}) + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Name": "John", + "_min": float64(1.82), + }, + { + "Name": "Alice", + "_min": float64(2.04), + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQuerySimple_WithGroupByStringWithInnerGroupBooleanAndMinOfMinOfFloat_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with group by string, with child group by boolean, and min of min on float", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "HeightM": 1.82, + "Verified": true + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "HeightM": 1.61, + "Verified": true + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "HeightM": 2.22, + "Verified": false + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Carlo", + "HeightM": 1.74, + "Verified": true + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Alice", + "HeightM": 2.04, + "Verified": false + }`, + }, + testUtils.Request{ + Request: `query { + Users(groupBy: [Name]) { + Name + _min(_group: {field: _min}) + _group (groupBy: [Verified]){ + Verified + _min(_group: {field: HeightM}) + } + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Name": "Alice", + "_min": float64(2.04), + "_group": []map[string]any{ + { + "Verified": false, + "_min": float64(2.04), + }, + }, + }, + { + "Name": "John", + "_min": float64(1.61), + "_group": []map[string]any{ + { + "Verified": true, + "_min": float64(1.61), + }, + { + "Verified": false, + "_min": float64(2.22), + }, + }, + }, + { + "Name": "Carlo", + "_min": float64(1.74), + "_group": []map[string]any{ + { + "Verified": true, + "_min": float64(1.74), + }, + }, + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQuerySimple_WithGroupByStringWithInnerGroupBooleanAndMinOfMinOfMinOfFloat_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with group by string, with child group by boolean, and min of min of min of float", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "HeightM": 1.82, + "Age": 25, + "Verified": true + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "HeightM": 1.61, + "Age": 32, + "Verified": true + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "HeightM": 2.22, + "Age": 34, + "Verified": false + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Carlo", + "HeightM": 1.74, + "Age": 55, + "Verified": true + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Alice", + "HeightM": 2.04, + "Age": 19, + "Verified": false + }`, + }, + testUtils.Request{ + Request: `query { + Users(groupBy: [Name]) { + Name + _min(_group: {field: _min}) + _group (groupBy: [Verified]){ + Verified + _min(_group: {field: HeightM}) + _group (groupBy: [Age]){ + Age + _min(_group: {field: HeightM}) + } + } + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Name": "Carlo", + "_min": float64(1.74), + "_group": []map[string]any{ + { + "Verified": true, + "_min": float64(1.74), + "_group": []map[string]any{ + { + "Age": int64(55), + "_min": float64(1.74), + }, + }, + }, + }, + }, + { + "Name": "Alice", + "_min": float64(2.04), + "_group": []map[string]any{ + { + "Verified": false, + "_min": float64(2.04), + "_group": []map[string]any{ + { + "Age": int64(19), + "_min": float64(2.04), + }, + }, + }, + }, + }, + { + "Name": "John", + "_min": float64(1.61), + "_group": []map[string]any{ + { + "Verified": true, + "_min": float64(1.61), + "_group": []map[string]any{ + { + "Age": int64(32), + "_min": float64(1.61), + }, + { + "Age": int64(25), + "_min": float64(1.82), + }, + }, + }, + { + "Verified": false, + "_min": float64(2.22), + "_group": []map[string]any{ + { + "Age": int64(34), + "_min": float64(2.22), + }, + }, + }, + }, + }, + }, + }, + }, + }, + } + + executeTestCase(t, test) +} diff --git a/tests/integration/query/simple/with_max_filter_test.go b/tests/integration/query/simple/with_max_filter_test.go new file mode 100644 index 0000000000..78f275ca46 --- /dev/null +++ b/tests/integration/query/simple/with_max_filter_test.go @@ -0,0 +1,53 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package simple + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestQuerySimple_WithMaxFilter_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with max filter", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Age": 21 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Bob", + "Age": 30 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Alice", + "Age": 32 + }`, + }, + testUtils.Request{ + Request: `query { + _max(Users: {field: Age, filter: {Age: {_lt: 32}}}) + }`, + Results: map[string]any{ + "_max": int64(30), + }, + }, + }, + } + + executeTestCase(t, test) +} diff --git a/tests/integration/query/simple/with_max_test.go b/tests/integration/query/simple/with_max_test.go new file mode 100644 index 0000000000..bdb47b6f8c --- /dev/null +++ b/tests/integration/query/simple/with_max_test.go @@ -0,0 +1,129 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package simple + +import ( + "math" + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" + + "github.com/sourcenetwork/immutable" +) + +func TestQuerySimple_WithMaxOnUndefinedObject_ReturnsError(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query max on undefined object", + Actions: []any{ + testUtils.Request{ + Request: `query { + _max + }`, + ExpectedError: "aggregate must be provided with a property to aggregate", + }, + }, + } + + executeTestCase(t, test) +} + +func TestQuerySimple_WithMaxOnUndefinedField_ReturnsError(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query max on undefined field", + Actions: []any{ + testUtils.Request{ + Request: `query { + _max(Users: {}) + }`, + ExpectedError: "Argument \"Users\" has invalid value {}.\nIn field \"field\": Expected \"UsersNumericFieldsArg!\", found null.", + }, + }, + } + + executeTestCase(t, test) +} + +func TestQuerySimple_WithMaxOnEmptyCollection_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query max on empty", + Actions: []any{ + testUtils.Request{ + Request: `query { + _max(Users: {field: Age}) + }`, + Results: map[string]any{ + "_max": nil, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQuerySimple_WithMax_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query max", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Age": 21 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Bob", + "Age": 30 + }`, + }, + testUtils.Request{ + Request: `query { + _max(Users: {field: Age}) + }`, + Results: map[string]any{ + "_max": int64(30), + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQuerySimple_WithMaxAndMaxValueInt_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + SupportedMutationTypes: immutable.Some([]testUtils.MutationType{ + // GraphQL does not support 64 bit int + testUtils.CollectionSaveMutationType, + testUtils.CollectionNamedMutationType, + }), + Description: "Simple query max and max value int", + Actions: []any{ + testUtils.CreateDoc{ + DocMap: map[string]any{ + "Name": "John", + "Age": int64(math.MaxInt64), + }, + }, + testUtils.Request{ + Request: `query { + _max(Users: {field: Age}) + }`, + Results: map[string]any{ + "_max": int64(math.MaxInt64), + }, + }, + }, + } + + executeTestCase(t, test) +} diff --git a/tests/integration/query/simple/with_min_filter_test.go b/tests/integration/query/simple/with_min_filter_test.go new file mode 100644 index 0000000000..cd429e3361 --- /dev/null +++ b/tests/integration/query/simple/with_min_filter_test.go @@ -0,0 +1,53 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package simple + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestQuerySimple_WithMinFilter_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with min filter", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Age": 21 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Bob", + "Age": 30 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Alice", + "Age": 32 + }`, + }, + testUtils.Request{ + Request: `query { + _min(Users: {field: Age, filter: {Age: {_gt: 21}}}) + }`, + Results: map[string]any{ + "_min": int64(30), + }, + }, + }, + } + + executeTestCase(t, test) +} diff --git a/tests/integration/query/simple/with_min_test.go b/tests/integration/query/simple/with_min_test.go new file mode 100644 index 0000000000..feb8e54e2f --- /dev/null +++ b/tests/integration/query/simple/with_min_test.go @@ -0,0 +1,129 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package simple + +import ( + "math" + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" + + "github.com/sourcenetwork/immutable" +) + +func TestQuerySimple_WithMinOnUndefinedObject_ReturnsError(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query min on undefined object", + Actions: []any{ + testUtils.Request{ + Request: `query { + _min + }`, + ExpectedError: "aggregate must be provided with a property to aggregate", + }, + }, + } + + executeTestCase(t, test) +} + +func TestQuerySimple_WithMinOnUndefinedField_ReturnsError(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query min on undefined field", + Actions: []any{ + testUtils.Request{ + Request: `query { + _min(Users: {}) + }`, + ExpectedError: "Argument \"Users\" has invalid value {}.\nIn field \"field\": Expected \"UsersNumericFieldsArg!\", found null.", + }, + }, + } + + executeTestCase(t, test) +} + +func TestQuerySimple_WithMinOnEmptyCollection_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query min on empty", + Actions: []any{ + testUtils.Request{ + Request: `query { + _min(Users: {field: Age}) + }`, + Results: map[string]any{ + "_min": nil, + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQuerySimple_WithMin_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query min", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Age": 21 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Bob", + "Age": 30 + }`, + }, + testUtils.Request{ + Request: `query { + _min(Users: {field: Age}) + }`, + Results: map[string]any{ + "_min": int64(21), + }, + }, + }, + } + + executeTestCase(t, test) +} + +func TestQuerySimple_WithMinAndMaxValueInt_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + SupportedMutationTypes: immutable.Some([]testUtils.MutationType{ + // GraphQL does not support 64 bit int + testUtils.CollectionSaveMutationType, + testUtils.CollectionNamedMutationType, + }), + Description: "Simple query min and max value int", + Actions: []any{ + testUtils.CreateDoc{ + DocMap: map[string]any{ + "Name": "John", + "Age": int64(math.MaxInt64), + }, + }, + testUtils.Request{ + Request: `query { + _max(Users: {field: Age}) + }`, + Results: map[string]any{ + "_max": int64(math.MaxInt64), + }, + }, + }, + } + + executeTestCase(t, test) +} diff --git a/tests/integration/schema/default_fields.go b/tests/integration/schema/default_fields.go index 6462ef6066..18c09975a2 100644 --- a/tests/integration/schema/default_fields.go +++ b/tests/integration/schema/default_fields.go @@ -119,6 +119,20 @@ var aggregateFields = fields{ "name": "Float", }, }, + map[string]any{ + "name": "_max", + "type": map[string]any{ + "kind": "SCALAR", + "name": "Float", + }, + }, + map[string]any{ + "name": "_min", + "type": map[string]any{ + "kind": "SCALAR", + "name": "Float", + }, + }, map[string]any{ "name": "_count", "type": map[string]any{ From 4e5470ca6dd2814d45a002116b94db962a0c068f Mon Sep 17 00:00:00 2001 From: AndrewSisley Date: Thu, 3 Oct 2024 09:26:08 -0400 Subject: [PATCH 50/71] fix: Remove duplication of block heads on delete (#3096) ## Relevant issue(s) Resolves #3085 #3089 Documents #3056 #3086 #3087 (I'm going to close these on merge, no need to have them littering the backlog) ## Description Removes the duplication of head links from delete blocks. PR also includes the following to save the hassle of multiple test-cid updates: - Removes `fieldName` from composite block deltas - Removes the magic `_head` link name, and extracts head links to a new, optional prop - Documents the reasons for duplicating various bits of data in the blockstore blocks as discussed in standup With the actions defined in `TestQueryCommitsWithFieldIDFieldWithUpdate`, create block size has been reduced by 4%, and update block size by 7% - this will vary a lot depending on what fields are being updated though, the test chosen to calc was just the first test I found that created one small doc, and updated a single field. I recommend reviewing commit by commit. The test-cid changes have been pulled out to their own commit. --- docs/data_format_changes/i3085-block-trim.md | 3 + internal/core/block/block.go | 71 +++++++++++++---- internal/core/block/block_test.go | 17 ++-- internal/core/crdt/base.go | 2 + internal/core/crdt/composite.go | 22 ++++-- internal/core/crdt/ipld_union.go | 3 - internal/core/type.go | 1 - internal/db/collection.go | 1 - internal/db/collection_delete.go | 18 +---- internal/db/fetcher/versioned.go | 15 +--- internal/db/merge.go | 35 ++++----- internal/db/merge_test.go | 33 ++++---- internal/merkle/clock/clock.go | 14 +--- internal/merkle/clock/clock_test.go | 4 +- internal/merkle/crdt/composite.go | 2 - internal/merkle/crdt/merklecrdt.go | 1 - internal/planner/commit.go | 48 ++++++------ net/sync_dag.go | 4 +- tests/integration/encryption/commit_test.go | 10 +-- tests/integration/encryption/peer_test.go | 10 +-- .../mutation/create/with_version_test.go | 2 +- .../integration/query/commits/simple_test.go | 48 ++++++------ .../query/commits/with_cid_test.go | 8 +- .../query/commits/with_delete_test.go | 78 +++++++++++++++++++ .../query/commits/with_depth_test.go | 34 ++++---- .../query/commits/with_doc_id_cid_test.go | 4 +- .../query/commits/with_doc_id_count_test.go | 6 +- .../query/commits/with_doc_id_field_test.go | 4 +- .../commits/with_doc_id_limit_offset_test.go | 4 +- .../query/commits/with_doc_id_limit_test.go | 4 +- .../with_doc_id_order_limit_offset_test.go | 4 +- .../query/commits/with_doc_id_order_test.go | 70 ++++++++--------- .../query/commits/with_doc_id_test.go | 54 ++++++------- .../commits/with_doc_id_typename_test.go | 6 +- .../query/commits/with_field_test.go | 6 +- .../query/commits/with_group_test.go | 16 ++-- .../query/commits/with_null_input_test.go | 48 ++++++------ .../latest_commits/with_doc_id_field_test.go | 12 +-- .../query/latest_commits/with_doc_id_test.go | 12 +-- .../query/one_to_many/with_cid_doc_id_test.go | 8 +- .../query/simple/with_cid_doc_id_test.go | 18 ++--- .../query/simple/with_version_test.go | 30 +++---- 42 files changed, 431 insertions(+), 359 deletions(-) create mode 100644 docs/data_format_changes/i3085-block-trim.md create mode 100644 tests/integration/query/commits/with_delete_test.go diff --git a/docs/data_format_changes/i3085-block-trim.md b/docs/data_format_changes/i3085-block-trim.md new file mode 100644 index 0000000000..52383fc305 --- /dev/null +++ b/docs/data_format_changes/i3085-block-trim.md @@ -0,0 +1,3 @@ +# Remove duplication of block heads on delete + +The structure of blocks in the blockstore was reworked slightly - head links have been extracted to a separate property, and fieldName has been removed from composite blocks. diff --git a/internal/core/block/block.go b/internal/core/block/block.go index 1ec62fe939..e930816030 100644 --- a/internal/core/block/block.go +++ b/internal/core/block/block.go @@ -82,6 +82,11 @@ type DAGLink struct { // Name is the name of the link. // // This will be either the field name of the CRDT delta or "_head" for the head link. + // + // This field currently serves no purpose and is duplicating data already held on the target + // block. However we want to have this long term to enable some fancy P2P magic to allow users + // to configure the collection to only sync particular fields using + // [GraphSync](https://github.com/ipfs/go-graphsync) which will need to make use of this property. Name string // Link is the CID link to the object. cidlink.Link @@ -121,8 +126,21 @@ type Encryption struct { type Block struct { // Delta is the CRDT delta that is stored in the block. Delta crdt.CRDT + + // The previous block-CIDs that this block is based on. + // + // For example: + // - This will be empty for all 'create' blocks. + // - Most 'update' blocks will have a single head, however they will have multiple if the history has + // diverged and there were multiple blocks at the previous height. + Heads []cidlink.Link + // Links are the links to other blocks in the DAG. + // + // This does not include `Heads`. This will be empty for field-level blocks. It will never be empty + // for composite blocks (and will contain links to field-level blocks). Links []DAGLink + // Encryption contains the encryption information for the block's delta. // It needs to be a pointer so that it can be translated from and to `optional` in the IPLD schema. Encryption *cidlink.Link @@ -137,20 +155,25 @@ func (block *Block) IsEncrypted() bool { func (block *Block) Clone() *Block { return &Block{ Delta: block.Delta.Clone(), + Heads: block.Heads, Links: block.Links, Encryption: block.Encryption, } } -// GetHeadLinks returns the CIDs of the previous blocks. There can be more than 1 with multiple heads. -func (block *Block) GetHeadLinks() []cid.Cid { - var heads []cid.Cid +// AllLinks returns the block `Heads` and `Links` combined into a single set. +// +// All heads will be first in the set, followed by other links. +func (block *Block) AllLinks() []cidlink.Link { + result := make([]cidlink.Link, 0, len(block.Heads)+len(block.Links)) + + result = append(result, block.Heads...) + for _, link := range block.Links { - if link.Name == core.HEAD { - heads = append(heads, link.Cid) - } + result = append(result, link.Link) } - return heads + + return result } // IPLDSchemaBytes returns the IPLD schema representation for the block. @@ -160,7 +183,8 @@ func (block *Block) IPLDSchemaBytes() []byte { return []byte(` type Block struct { delta CRDT - links [DAGLink] + heads optional [Link] + links optional [DAGLink] encryption optional Link } `) @@ -181,20 +205,17 @@ func (enc *Encryption) IPLDSchemaBytes() []byte { // New creates a new block with the given delta and links. func New(delta core.Delta, links []DAGLink, heads ...cid.Cid) *Block { - blockLinks := make([]DAGLink, 0, len(links)+len(heads)) - // Sort the heads lexicographically by CID. // We need to do this to ensure that the block is deterministic. sort.Slice(heads, func(i, j int) bool { return strings.Compare(heads[i].String(), heads[j].String()) < 0 }) + + headLinks := make([]cidlink.Link, 0, len(heads)) for _, head := range heads { - blockLinks = append( - blockLinks, - DAGLink{ - Name: core.HEAD, - Link: cidlink.Link{Cid: head}, - }, + headLinks = append( + headLinks, + cidlink.Link{Cid: head}, ) } @@ -204,9 +225,27 @@ func New(delta core.Delta, links []DAGLink, heads ...cid.Cid) *Block { return strings.Compare(links[i].Cid.String(), links[j].Cid.String()) < 0 }) + blockLinks := make([]DAGLink, 0, len(links)) blockLinks = append(blockLinks, links...) + if len(headLinks) == 0 { + // The encoding used for block serialization will consume space if an empty set is + // provided, but it will not consume space if nil is provided, so if empty we set it + // to nil. The would-be space consumed includes the property name, so this is not an + // insignificant amount. + headLinks = nil + } + + if len(blockLinks) == 0 { + // The encoding used for block serialization will consume space if an empty set is + // provided, but it will not consume space if nil is provided, so if empty we set it + // to nil. The would-be space consumed includes the property name, so this is not an + // insignificant amount. + blockLinks = nil + } + return &Block{ + Heads: headLinks, Links: blockLinks, Delta: crdt.NewCRDT(delta), } diff --git a/internal/core/block/block_test.go b/internal/core/block/block_test.go index d7fe2d1bf0..f94500ca80 100644 --- a/internal/core/block/block_test.go +++ b/internal/core/block/block_test.go @@ -21,7 +21,6 @@ import ( "github.com/ipld/go-ipld-prime/storage/memstore" "github.com/stretchr/testify/require" - "github.com/sourcenetwork/defradb/internal/core" "github.com/sourcenetwork/defradb/internal/core/crdt" ) @@ -47,7 +46,6 @@ func generateBlocks(lsys *linking.LinkSystem) (cidlink.Link, error) { Delta: crdt.CRDT{ CompositeDAGDelta: &crdt.CompositeDAGDelta{ DocID: []byte("docID"), - FieldName: "C", Priority: 1, SchemaVersionID: "schemaVersionID", Status: 1, @@ -75,11 +73,8 @@ func generateBlocks(lsys *linking.LinkSystem) (cidlink.Link, error) { Data: []byte("Johny"), }, }, - Links: []DAGLink{ - { - Name: core.HEAD, - Link: fieldBlockLink.(cidlink.Link), - }, + Heads: []cidlink.Link{ + fieldBlockLink.(cidlink.Link), }, } fieldUpdateBlockLink, err := lsys.Store(ipld.LinkContext{}, GetLinkPrototype(), fieldUpdateBlock.GenerateNode()) @@ -91,17 +86,15 @@ func generateBlocks(lsys *linking.LinkSystem) (cidlink.Link, error) { Delta: crdt.CRDT{ CompositeDAGDelta: &crdt.CompositeDAGDelta{ DocID: []byte("docID"), - FieldName: "C", Priority: 2, SchemaVersionID: "schemaVersionID", Status: 1, }, }, + Heads: []cidlink.Link{ + compositeBlockLink.(cidlink.Link), + }, Links: []DAGLink{ - { - Name: core.HEAD, - Link: compositeBlockLink.(cidlink.Link), - }, { Name: "name", Link: fieldUpdateBlockLink.(cidlink.Link), diff --git a/internal/core/crdt/base.go b/internal/core/crdt/base.go index 9958b7b1ac..87a4af445f 100644 --- a/internal/core/crdt/base.go +++ b/internal/core/crdt/base.go @@ -34,6 +34,8 @@ type baseCRDT struct { // It can be used to identify the collection datastructure state at the time of commit. schemaVersionKey core.CollectionSchemaVersionKey + // fieldName holds the name of the field hosting this CRDT, if this is a field level + // commit. fieldName string } diff --git a/internal/core/crdt/composite.go b/internal/core/crdt/composite.go index c730badcb6..d1be34d64a 100644 --- a/internal/core/crdt/composite.go +++ b/internal/core/crdt/composite.go @@ -26,12 +26,23 @@ import ( // CompositeDAGDelta represents a delta-state update made of sub-MerkleCRDTs. type CompositeDAGDelta struct { - DocID []byte - FieldName string - Priority uint64 + // This property is duplicated from field-level blocks. + // + // We could remove this without much hassle from the composite, however long-term + // the ideal solution would be to remove it from the field-level commits *excluding* + // the initial field level commit where it must exist in order to scope it to a particular + // document. This would require a local index in order to handle field level commit-queries. + DocID []byte + Priority uint64 // SchemaVersionID is the schema version datastore key at the time of commit. // // It can be used to identify the collection datastructure state at the time of commit. + // + // This property is deliberately duplicated from field-level blocks as it makes the P2P code + // quite a lot easier - we can remove this from here at some point if we want to. + // + // Conversely we could remove this from the field-level commits and leave it on the composite, + // however that would complicate commit-queries and would require us to maintain an index elsewhere. SchemaVersionID string // Status represents the status of the document. By default it is `Active`. // Alternatively, if can be set to `Deleted`. @@ -47,7 +58,6 @@ func (delta *CompositeDAGDelta) IPLDSchemaBytes() []byte { return []byte(` type CompositeDAGDelta struct { docID Bytes - fieldName String priority Int schemaVersionID String status Int @@ -75,9 +85,8 @@ func NewCompositeDAG( store datastore.DSReaderWriter, schemaVersionKey core.CollectionSchemaVersionKey, key core.DataStoreKey, - fieldName string, ) CompositeDAG { - return CompositeDAG{newBaseCRDT(store, key, schemaVersionKey, fieldName)} + return CompositeDAG{newBaseCRDT(store, key, schemaVersionKey, "")} } // Value is a no-op for a CompositeDAG. @@ -89,7 +98,6 @@ func (c CompositeDAG) Value(ctx context.Context) ([]byte, error) { func (c CompositeDAG) Set(status client.DocumentStatus) *CompositeDAGDelta { return &CompositeDAGDelta{ DocID: []byte(c.key.DocID), - FieldName: c.fieldName, SchemaVersionID: c.schemaVersionKey.SchemaVersionID, Status: status, } diff --git a/internal/core/crdt/ipld_union.go b/internal/core/crdt/ipld_union.go index 95023f28b2..28c9ccf420 100644 --- a/internal/core/crdt/ipld_union.go +++ b/internal/core/crdt/ipld_union.go @@ -75,8 +75,6 @@ func (c CRDT) GetFieldName() string { switch { case c.LWWRegDelta != nil: return c.LWWRegDelta.FieldName - case c.CompositeDAGDelta != nil: - return c.CompositeDAGDelta.FieldName case c.CounterDelta != nil: return c.CounterDelta.FieldName } @@ -124,7 +122,6 @@ func (c CRDT) Clone() CRDT { case c.CompositeDAGDelta != nil: cloned.CompositeDAGDelta = &CompositeDAGDelta{ DocID: c.CompositeDAGDelta.DocID, - FieldName: c.CompositeDAGDelta.FieldName, Priority: c.CompositeDAGDelta.Priority, SchemaVersionID: c.CompositeDAGDelta.SchemaVersionID, Status: c.CompositeDAGDelta.Status, diff --git a/internal/core/type.go b/internal/core/type.go index 9162e5a2bf..30c26dd831 100644 --- a/internal/core/type.go +++ b/internal/core/type.go @@ -12,5 +12,4 @@ package core const ( COMPOSITE_NAMESPACE = "C" - HEAD = "_head" ) diff --git a/internal/db/collection.go b/internal/db/collection.go index 6165218f78..dd1a413946 100644 --- a/internal/db/collection.go +++ b/internal/db/collection.go @@ -920,7 +920,6 @@ func (c *collection) saveCompositeToMerkleCRDT( txn, core.NewCollectionSchemaVersionKey(c.Schema().VersionID, c.ID()), dsKey, - "", ) if status.IsDeleted() { diff --git a/internal/db/collection_delete.go b/internal/db/collection_delete.go index 9ccca92ed5..62d7c24e50 100644 --- a/internal/db/collection_delete.go +++ b/internal/db/collection_delete.go @@ -13,14 +13,11 @@ package db import ( "context" - cidlink "github.com/ipld/go-ipld-prime/linking/cid" - "github.com/sourcenetwork/defradb/acp" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/event" "github.com/sourcenetwork/defradb/internal/core" coreblock "github.com/sourcenetwork/defradb/internal/core/block" - "github.com/sourcenetwork/defradb/internal/merkle/clock" ) // DeleteWithFilter deletes using a filter to target documents for delete. @@ -142,24 +139,11 @@ func (c *collection) applyDelete( txn := mustGetContextTxn(ctx) dsKey := primaryKey.ToDataStoreKey() - headset := clock.NewHeadSet( - txn.Headstore(), - dsKey.WithFieldID(core.COMPOSITE_NAMESPACE).ToHeadStoreKey(), - ) - cids, _, err := headset.List(ctx) - if err != nil { - return err - } - - dagLinks := make([]coreblock.DAGLink, len(cids)) - for i, cid := range cids { - dagLinks[i] = coreblock.NewDAGLink(core.HEAD, cidlink.Link{Cid: cid}) - } link, b, err := c.saveCompositeToMerkleCRDT( ctx, dsKey, - dagLinks, + []coreblock.DAGLink{}, client.Deleted, ) if err != nil { diff --git a/internal/db/fetcher/versioned.go b/internal/db/fetcher/versioned.go index 80b71cdd88..2660664bcd 100644 --- a/internal/db/fetcher/versioned.go +++ b/internal/db/fetcher/versioned.go @@ -312,20 +312,14 @@ func (vf *VersionedFetcher) seekNext(c cid.Cid, topParent bool) error { } // only seekNext on parent if we have a HEAD link - l, ok := block.GetLinkByName(core.HEAD) - if ok { - err := vf.seekNext(l.Cid, true) + if len(block.Heads) != 0 { + err := vf.seekNext(block.Heads[0].Cid, true) if err != nil { return err } } - // loop over links and ignore head links for _, l := range block.Links { - if l.Name == core.HEAD { - continue - } - err := vf.seekNext(l.Link.Cid, false) if err != nil { return err @@ -362,12 +356,7 @@ func (vf *VersionedFetcher) merge(c cid.Cid) error { } // handle subgraphs - // loop over links and ignore head links for _, l := range block.Links { - if l.Name == core.HEAD { - continue - } - // get node subBlock, err := vf.getDAGBlock(l.Link.Cid) if err != nil { diff --git a/internal/db/merge.go b/internal/db/merge.go index 58c89cfc4e..8b08b333e4 100644 --- a/internal/db/merge.go +++ b/internal/db/merge.go @@ -204,8 +204,8 @@ func (mp *mergeProcessor) loadComposites( // In this case, we also need to walk back the merge target's DAG until we reach a common block. if block.Delta.GetPriority() >= mt.headHeight { mp.composites.PushFront(block) - for _, prevCid := range block.GetHeadLinks() { - err := mp.loadComposites(ctx, prevCid, mt) + for _, head := range block.Heads { + err := mp.loadComposites(ctx, head.Cid, mt) if err != nil { return err } @@ -213,21 +213,19 @@ func (mp *mergeProcessor) loadComposites( } else { newMT := newMergeTarget() for _, b := range mt.heads { - for _, link := range b.Links { - if link.Name == core.HEAD { - nd, err := mp.blockLS.Load(linking.LinkContext{Ctx: ctx}, link.Link, coreblock.SchemaPrototype) - if err != nil { - return err - } - - childBlock, err := coreblock.GetFromNode(nd) - if err != nil { - return err - } - - newMT.heads[link.Cid] = childBlock - newMT.headHeight = childBlock.Delta.GetPriority() + for _, link := range b.Heads { + nd, err := mp.blockLS.Load(linking.LinkContext{Ctx: ctx}, link, coreblock.SchemaPrototype) + if err != nil { + return err } + + childBlock, err := coreblock.GetFromNode(nd) + if err != nil { + return err + } + + newMT.heads[link.Cid] = childBlock + newMT.headHeight = childBlock.Delta.GetPriority() } } return mp.loadComposites(ctx, blockCid, newMT) @@ -387,10 +385,6 @@ func (mp *mergeProcessor) processBlock( } for _, link := range dagBlock.Links { - if link.Name == core.HEAD { - continue - } - nd, err := mp.blockLS.Load(linking.LinkContext{Ctx: ctx}, link.Link, coreblock.SchemaPrototype) if err != nil { return err @@ -449,7 +443,6 @@ func (mp *mergeProcessor) initCRDTForType(field string) (merklecrdt.MerkleCRDT, mp.txn, schemaVersionKey, mp.dsKey.WithFieldID(core.COMPOSITE_NAMESPACE), - "", ) mp.mCRDTs[field] = mcrdt return mcrdt, nil diff --git a/internal/db/merge_test.go b/internal/db/merge_test.go index 55cc172634..f9478be536 100644 --- a/internal/db/merge_test.go +++ b/internal/db/merge_test.go @@ -15,6 +15,7 @@ import ( "testing" "time" + "github.com/ipfs/go-cid" "github.com/ipld/go-ipld-prime" "github.com/ipld/go-ipld-prime/linking" cidlink "github.com/ipld/go-ipld-prime/linking/cid" @@ -22,7 +23,6 @@ import ( "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/event" - "github.com/sourcenetwork/defradb/internal/core" coreblock "github.com/sourcenetwork/defradb/internal/core/block" "github.com/sourcenetwork/defradb/internal/core/crdt" ) @@ -185,7 +185,7 @@ func TestMerge_DualBranchWithOneIncomplete_CouldNotFindCID(t *testing.T) { Cid: compInfo3.link.Cid, SchemaRoot: col.SchemaRoot(), }) - require.ErrorContains(t, err, "could not find bafyreifi4sa4auy4uk6psoljwuzqepgwqzsjk3h6p2xjdtsty7bdjz4uzm") + require.ErrorContains(t, err, "could not find bafyreibdsxukhmkwea4hdd2svvf6fijvuhdxeil2bf75v4wzooldb74uwq") // Verify the document was created with the expected values doc, err := col.Get(ctx, docID, false) @@ -228,14 +228,13 @@ type compositeInfo struct { } func (d *dagBuilder) generateCompositeUpdate(lsys *linking.LinkSystem, fields map[string]any, from compositeInfo) (compositeInfo, error) { - links := []coreblock.DAGLink{} + heads := []cid.Cid{} newPriority := from.height + 1 if from.link.ByteLen() != 0 { - links = append(links, coreblock.DAGLink{ - Name: core.HEAD, - Link: from.link, - }) + heads = append(heads, from.link.Cid) } + + links := []coreblock.DAGLink{} for field, val := range fields { d.fieldsHeight[field]++ // Generate new Block and save to lsys @@ -260,18 +259,16 @@ func (d *dagBuilder) generateCompositeUpdate(lsys *linking.LinkSystem, fields ma }) } - compositeBlock := coreblock.Block{ - Delta: crdt.CRDT{ - CompositeDAGDelta: &crdt.CompositeDAGDelta{ - DocID: d.docID, - FieldName: "", - Priority: newPriority, - SchemaVersionID: d.col.Schema().VersionID, - Status: 1, - }, + compositeBlock := coreblock.New( + &crdt.CompositeDAGDelta{ + DocID: d.docID, + Priority: newPriority, + SchemaVersionID: d.col.Schema().VersionID, + Status: 1, }, - Links: links, - } + links, + heads..., + ) compositeBlockLink, err := lsys.Store(ipld.LinkContext{}, coreblock.GetLinkPrototype(), compositeBlock.GenerateNode()) if err != nil { diff --git a/internal/merkle/clock/clock.go b/internal/merkle/clock/clock.go index b5b55e1374..9f0f6e77fb 100644 --- a/internal/merkle/clock/clock.go +++ b/internal/merkle/clock/clock.go @@ -217,7 +217,7 @@ func encryptBlock( return nil, err } clonedCRDT.SetData(bytes) - return &coreblock.Block{Delta: clonedCRDT, Links: block.Links}, nil + return &coreblock.Block{Delta: clonedCRDT, Heads: block.Heads, Links: block.Links}, nil } // ProcessBlock merges the delta CRDT and updates the state accordingly. @@ -241,22 +241,14 @@ func (mc *MerkleClock) updateHeads( ) error { priority := block.Delta.GetPriority() - // check if we have any HEAD links - hasHeads := false - for _, l := range block.Links { - if l.Name == core.HEAD { - hasHeads = true - break - } - } - if !hasHeads { // reached the bottom, at a leaf + if len(block.Heads) == 0 { // reached the bottom, at a leaf err := mc.headset.Write(ctx, blockLink.Cid, priority) if err != nil { return NewErrAddingHead(blockLink.Cid, err) } } - for _, l := range block.Links { + for _, l := range block.AllLinks() { linkCid := l.Cid isHead, err := mc.headset.IsHead(ctx, linkCid) if err != nil { diff --git a/internal/merkle/clock/clock_test.go b/internal/merkle/clock/clock_test.go index fe008971e4..f3f2f6e155 100644 --- a/internal/merkle/clock/clock_test.go +++ b/internal/merkle/clock/clock_test.go @@ -97,8 +97,8 @@ func TestMerkleClockPutBlockWithHeads(t *testing.T) { return } - if len(block.Links) != 1 { - t.Errorf("putBlock has incorrect number of heads. Have %v, want %v", len(block.Links), 1) + if len(block.Heads) != 1 { + t.Errorf("putBlock has incorrect number of heads. Have %v, want %v", len(block.Heads), 1) } } diff --git a/internal/merkle/crdt/composite.go b/internal/merkle/crdt/composite.go index f8211b9f0a..bf277dddba 100644 --- a/internal/merkle/crdt/composite.go +++ b/internal/merkle/crdt/composite.go @@ -35,13 +35,11 @@ func NewMerkleCompositeDAG( store Stores, schemaVersionKey core.CollectionSchemaVersionKey, key core.DataStoreKey, - fieldName string, ) *MerkleCompositeDAG { compositeDag := corecrdt.NewCompositeDAG( store.Datastore(), schemaVersionKey, key, - fieldName, ) clock := clock.NewMerkleClock(store.Headstore(), store.Blockstore(), store.Encstore(), diff --git a/internal/merkle/crdt/merklecrdt.go b/internal/merkle/crdt/merklecrdt.go index 457ba0f200..d6e73208d5 100644 --- a/internal/merkle/crdt/merklecrdt.go +++ b/internal/merkle/crdt/merklecrdt.go @@ -103,7 +103,6 @@ func InstanceWithStore( store, schemaVersionKey, key, - fieldName, ), nil } return nil, client.NewErrUnknownCRDT(cType) diff --git a/internal/planner/commit.go b/internal/planner/commit.go index 76825afe15..d9551dc7ab 100644 --- a/internal/planner/commit.go +++ b/internal/planner/commit.go @@ -12,7 +12,6 @@ package planner import ( cid "github.com/ipfs/go-cid" - cidlink "github.com/ipld/go-ipld-prime/linking/cid" "github.com/sourcenetwork/immutable" @@ -221,7 +220,7 @@ func (n *dagScanNode) Next() (bool, error) { return false, err } - currentValue, heads, err := n.dagBlockToNodeDoc(dagBlock) + currentValue, err := n.dagBlockToNodeDoc(dagBlock) if err != nil { return false, err } @@ -239,10 +238,10 @@ func (n *dagScanNode) Next() (bool, error) { if !n.commitSelect.Depth.HasValue() || n.depthVisited < n.commitSelect.Depth.Value() { // Insert the newly fetched cids into the slice of queued items, in reverse order // so that the last new cid will be at the front of the slice - n.queuedCids = append(make([]*cid.Cid, len(heads)), n.queuedCids...) + n.queuedCids = append(make([]*cid.Cid, len(dagBlock.Heads)), n.queuedCids...) - for i, head := range heads { - n.queuedCids[len(heads)-i-1] = &head.Cid + for i, head := range dagBlock.Heads { + n.queuedCids[len(dagBlock.Heads)-i-1] = &head.Cid } } @@ -293,11 +292,11 @@ which returns the current dag commit for the stored CRDT value. All the dagScanNode endpoints use similar structures */ -func (n *dagScanNode) dagBlockToNodeDoc(block *coreblock.Block) (core.Doc, []cidlink.Link, error) { +func (n *dagScanNode) dagBlockToNodeDoc(block *coreblock.Block) (core.Doc, error) { commit := n.commitSelect.DocumentMapping.NewDoc() link, err := block.GenerateLink() if err != nil { - return core.Doc{}, nil, err + return core.Doc{}, err } n.commitSelect.DocumentMapping.SetFirstOfName(&commit, request.CidFieldName, link.String()) @@ -323,17 +322,17 @@ func (n *dagScanNode) dagBlockToNodeDoc(block *coreblock.Block) (core.Doc, []cid }, ) if err != nil { - return core.Doc{}, nil, err + return core.Doc{}, err } if len(cols) == 0 { - return core.Doc{}, nil, client.NewErrCollectionNotFoundForSchemaVersion(schemaVersionId) + return core.Doc{}, client.NewErrCollectionNotFoundForSchemaVersion(schemaVersionId) } // Because we only care about the schema, we can safely take the first - the schema is the same // for all in the set. field, ok := cols[0].Definition().GetFieldByName(fName) if !ok { - return core.Doc{}, nil, client.NewErrFieldNotExist(fName) + return core.Doc{}, client.NewErrFieldNotExist(fName) } fieldID = field.ID.String() } @@ -362,10 +361,10 @@ func (n *dagScanNode) dagBlockToNodeDoc(block *coreblock.Block) (core.Doc, []cid }, ) if err != nil { - return core.Doc{}, nil, err + return core.Doc{}, err } if len(cols) == 0 { - return core.Doc{}, nil, client.NewErrCollectionNotFoundForSchemaVersion(schemaVersionId) + return core.Doc{}, client.NewErrCollectionNotFoundForSchemaVersion(schemaVersionId) } // WARNING: This will become incorrect once we allow multiple collections to share the same schema, @@ -374,31 +373,34 @@ func (n *dagScanNode) dagBlockToNodeDoc(block *coreblock.Block) (core.Doc, []cid n.commitSelect.DocumentMapping.SetFirstOfName(&commit, request.CollectionIDFieldName, int64(cols[0].ID())) - heads := make([]cidlink.Link, 0) - // links linksIndexes := n.commitSelect.DocumentMapping.IndexesByName[request.LinksFieldName] for _, linksIndex := range linksIndexes { - links := make([]core.Doc, len(block.Links)) + links := make([]core.Doc, len(block.Heads)+len(block.Links)) linksMapping := n.commitSelect.DocumentMapping.ChildMappings[linksIndex] - for i, l := range block.Links { + i := 0 + for _, l := range block.Heads { + link := linksMapping.NewDoc() + linksMapping.SetFirstOfName(&link, request.LinksNameFieldName, "_head") + linksMapping.SetFirstOfName(&link, request.LinksCidFieldName, l.Cid.String()) + + links[i] = link + i++ + } + + for _, l := range block.Links { link := linksMapping.NewDoc() linksMapping.SetFirstOfName(&link, request.LinksNameFieldName, l.Name) linksMapping.SetFirstOfName(&link, request.LinksCidFieldName, l.Link.Cid.String()) links[i] = link + i++ } commit.Fields[linksIndex] = links } - for _, l := range block.Links { - if l.Name == "_head" { - heads = append(heads, l.Link) - } - } - - return commit, heads, nil + return commit, nil } diff --git a/net/sync_dag.go b/net/sync_dag.go index e9c17035bf..11e021f239 100644 --- a/net/sync_dag.go +++ b/net/sync_dag.go @@ -72,9 +72,9 @@ func loadBlockLinks(ctx context.Context, lsys linking.LinkSystem, block *coreblo cancel() } - for _, lnk := range block.Links { + for _, lnk := range block.AllLinks() { wg.Add(1) - go func(lnk coreblock.DAGLink) { + go func(lnk cidlink.Link) { defer wg.Done() if ctx.Err() != nil { return diff --git a/tests/integration/encryption/commit_test.go b/tests/integration/encryption/commit_test.go index da493e097f..592f1e0501 100644 --- a/tests/integration/encryption/commit_test.go +++ b/tests/integration/encryption/commit_test.go @@ -48,7 +48,7 @@ func TestDocEncryption_WithEncryptionOnLWWCRDT_ShouldStoreCommitsDeltaEncrypted( Results: map[string]any{ "commits": []map[string]any{ { - "cid": "bafyreidkuvcdxxkyoeapnmttu6l2vk43qnm3zuzpxegbifpj6w24jrvrxq", + "cid": "bafyreiba7bxnqquldhojcnkak7afamaxssvjk4uav4ev4lwqgixarvvp4i", "collectionID": int64(1), "delta": encrypt(testUtils.CBORValue(21), john21DocID, ""), "docID": john21DocID, @@ -58,7 +58,7 @@ func TestDocEncryption_WithEncryptionOnLWWCRDT_ShouldStoreCommitsDeltaEncrypted( "links": []map[string]any{}, }, { - "cid": "bafyreihdlv4fvvptetghxzyerxt4jc4zgprecybhoijrfjuyxqe55qw3x4", + "cid": "bafyreigawlzc5zi2juad5vldnwvels5qsehymb45maoeamdbckajwcao24", "collectionID": int64(1), "delta": encrypt(testUtils.CBORValue("John"), john21DocID, ""), "docID": john21DocID, @@ -68,7 +68,7 @@ func TestDocEncryption_WithEncryptionOnLWWCRDT_ShouldStoreCommitsDeltaEncrypted( "links": []map[string]any{}, }, { - "cid": "bafyreie5jegw4c2hg56bbiv6cgxmfz336jruukjakbjuyapockfnn6b5le", + "cid": "bafyreidl77w6pex7uworttm5bsqyvli5qxqoqy3q2n2xqor5vrqfr3woee", "collectionID": int64(1), "delta": nil, "docID": john21DocID, @@ -77,11 +77,11 @@ func TestDocEncryption_WithEncryptionOnLWWCRDT_ShouldStoreCommitsDeltaEncrypted( "height": int64(1), "links": []map[string]any{ { - "cid": "bafyreidkuvcdxxkyoeapnmttu6l2vk43qnm3zuzpxegbifpj6w24jrvrxq", + "cid": "bafyreiba7bxnqquldhojcnkak7afamaxssvjk4uav4ev4lwqgixarvvp4i", "name": "age", }, { - "cid": "bafyreihdlv4fvvptetghxzyerxt4jc4zgprecybhoijrfjuyxqe55qw3x4", + "cid": "bafyreigawlzc5zi2juad5vldnwvels5qsehymb45maoeamdbckajwcao24", "name": "name", }, }, diff --git a/tests/integration/encryption/peer_test.go b/tests/integration/encryption/peer_test.go index 9f5b875586..480f0a66c9 100644 --- a/tests/integration/encryption/peer_test.go +++ b/tests/integration/encryption/peer_test.go @@ -61,7 +61,7 @@ func TestDocEncryptionPeer_UponSync_ShouldSyncEncryptedDAG(t *testing.T) { Results: map[string]any{ "commits": []map[string]any{ { - "cid": "bafyreidkuvcdxxkyoeapnmttu6l2vk43qnm3zuzpxegbifpj6w24jrvrxq", + "cid": "bafyreiba7bxnqquldhojcnkak7afamaxssvjk4uav4ev4lwqgixarvvp4i", "collectionID": int64(1), "delta": encrypt(testUtils.CBORValue(21), john21DocID, ""), "docID": john21DocID, @@ -71,7 +71,7 @@ func TestDocEncryptionPeer_UponSync_ShouldSyncEncryptedDAG(t *testing.T) { "links": []map[string]any{}, }, { - "cid": "bafyreihdlv4fvvptetghxzyerxt4jc4zgprecybhoijrfjuyxqe55qw3x4", + "cid": "bafyreigawlzc5zi2juad5vldnwvels5qsehymb45maoeamdbckajwcao24", "collectionID": int64(1), "delta": encrypt(testUtils.CBORValue("John"), john21DocID, ""), "docID": john21DocID, @@ -81,7 +81,7 @@ func TestDocEncryptionPeer_UponSync_ShouldSyncEncryptedDAG(t *testing.T) { "links": []map[string]any{}, }, { - "cid": "bafyreie5jegw4c2hg56bbiv6cgxmfz336jruukjakbjuyapockfnn6b5le", + "cid": "bafyreidl77w6pex7uworttm5bsqyvli5qxqoqy3q2n2xqor5vrqfr3woee", "collectionID": int64(1), "delta": nil, "docID": john21DocID, @@ -90,11 +90,11 @@ func TestDocEncryptionPeer_UponSync_ShouldSyncEncryptedDAG(t *testing.T) { "height": int64(1), "links": []map[string]any{ { - "cid": "bafyreidkuvcdxxkyoeapnmttu6l2vk43qnm3zuzpxegbifpj6w24jrvrxq", + "cid": "bafyreiba7bxnqquldhojcnkak7afamaxssvjk4uav4ev4lwqgixarvvp4i", "name": "age", }, { - "cid": "bafyreihdlv4fvvptetghxzyerxt4jc4zgprecybhoijrfjuyxqe55qw3x4", + "cid": "bafyreigawlzc5zi2juad5vldnwvels5qsehymb45maoeamdbckajwcao24", "name": "name", }, }, diff --git a/tests/integration/mutation/create/with_version_test.go b/tests/integration/mutation/create/with_version_test.go index b19db8ee4c..b0124851cd 100644 --- a/tests/integration/mutation/create/with_version_test.go +++ b/tests/integration/mutation/create/with_version_test.go @@ -40,7 +40,7 @@ func TestMutationCreate_ReturnsVersionCID(t *testing.T) { { "_version": []map[string]any{ { - "cid": "bafyreia5ph2hvwebdsxe7m2f6bwuq7ngwxzqp7esiuzjihtcz2jswma6xu", + "cid": "bafyreib7afkd5hepl45wdtwwpai433bhnbd3ps5m2rv3masctda7b6mmxe", }, }, }, diff --git a/tests/integration/query/commits/simple_test.go b/tests/integration/query/commits/simple_test.go index a1be0a7986..6a69578ada 100644 --- a/tests/integration/query/commits/simple_test.go +++ b/tests/integration/query/commits/simple_test.go @@ -37,13 +37,13 @@ func TestQueryCommits(t *testing.T) { Results: map[string]any{ "commits": []map[string]any{ { - "cid": "bafyreifzyy7bmpx2eywj4lznxzrzrvh6vrz6l7bhthkpexdq3wtho3vz6i", + "cid": "bafyreif6dqbkr7t37jcjfxxrjnxt7cspxzvs7qwlbtjca57cc663he4s7e", }, { - "cid": "bafyreic2sba5sffkfnt32wfeoaw4qsqozjb5acwwtouxuzllb3aymjwute", + "cid": "bafyreigtnj6ntulcilkmin4pgukjwv3nwglqpiiyddz3dyfexdbltze7sy", }, { - "cid": "bafyreihv7jqe32wsuff5vwzlp7izoo6pqg6kgqf5edknp3mqm3344gu35q", + "cid": "bafyreia2vlbfkcbyogdjzmbqcjneabwwwtw7ti2xbd7yor5mbu2sk4pcoy", }, }, }, @@ -82,22 +82,22 @@ func TestQueryCommitsMultipleDocs(t *testing.T) { Results: map[string]any{ "commits": []map[string]any{ { - "cid": "bafyreihpasbgxcoxmzv5bp6euq3lbaoh5y5wjbbgfthtxqs3nppk36kebq", + "cid": "bafyreid47btbb7bvj66qqa52wi773nst4dvd2556v34tejjiorrgcakv2a", }, { - "cid": "bafyreihe3jydldbt7mvkiae6asrchdxajzkxwid6syi436nmrpcqhwt7xa", + "cid": "bafyreie7p6vhgmdjn6q7t4lw7o5hv5lgt52jq3kmfyvi6a5vdt6spigcqm", }, { - "cid": "bafyreihb5eo3luqoojztdmxtg3tdpvm6pc64mkyrzlefbdauker5qlnop4", + "cid": "bafyreihyy3s7xfno4fryoqexigpsj4csqzkxf6e6kch7e5h24pgz3wq3pq", }, { - "cid": "bafyreifzyy7bmpx2eywj4lznxzrzrvh6vrz6l7bhthkpexdq3wtho3vz6i", + "cid": "bafyreif6dqbkr7t37jcjfxxrjnxt7cspxzvs7qwlbtjca57cc663he4s7e", }, { - "cid": "bafyreic2sba5sffkfnt32wfeoaw4qsqozjb5acwwtouxuzllb3aymjwute", + "cid": "bafyreigtnj6ntulcilkmin4pgukjwv3nwglqpiiyddz3dyfexdbltze7sy", }, { - "cid": "bafyreihv7jqe32wsuff5vwzlp7izoo6pqg6kgqf5edknp3mqm3344gu35q", + "cid": "bafyreia2vlbfkcbyogdjzmbqcjneabwwwtw7ti2xbd7yor5mbu2sk4pcoy", }, }, }, @@ -130,15 +130,15 @@ func TestQueryCommitsWithSchemaVersionIDField(t *testing.T) { Results: map[string]any{ "commits": []map[string]any{ { - "cid": "bafyreifzyy7bmpx2eywj4lznxzrzrvh6vrz6l7bhthkpexdq3wtho3vz6i", + "cid": "bafyreif6dqbkr7t37jcjfxxrjnxt7cspxzvs7qwlbtjca57cc663he4s7e", "schemaVersionId": "bafkreicprhqxzlw3akyssz2v6pifwfueavp7jq2yj3dghapi3qcq6achs4", }, { - "cid": "bafyreic2sba5sffkfnt32wfeoaw4qsqozjb5acwwtouxuzllb3aymjwute", + "cid": "bafyreigtnj6ntulcilkmin4pgukjwv3nwglqpiiyddz3dyfexdbltze7sy", "schemaVersionId": "bafkreicprhqxzlw3akyssz2v6pifwfueavp7jq2yj3dghapi3qcq6achs4", }, { - "cid": "bafyreihv7jqe32wsuff5vwzlp7izoo6pqg6kgqf5edknp3mqm3344gu35q", + "cid": "bafyreia2vlbfkcbyogdjzmbqcjneabwwwtw7ti2xbd7yor5mbu2sk4pcoy", "schemaVersionId": "bafkreicprhqxzlw3akyssz2v6pifwfueavp7jq2yj3dghapi3qcq6achs4", }, }, @@ -364,7 +364,7 @@ func TestQuery_CommitsWithAllFieldsWithUpdate_NoError(t *testing.T) { Results: map[string]any{ "commits": []map[string]any{ { - "cid": "bafyreiay56ley5dvsptso37fsonfcrtbuphwlfhi67d2y52vzzexba6vua", + "cid": "bafyreih5h6i6ohfsgrcjtg76iarebqcurpaft73gpobl2z2cfsvihsgdqu", "collectionID": int64(1), "delta": testUtils.CBORValue(22), "docID": "bae-c9fb0fa4-1195-589c-aa54-e68333fb90b3", @@ -373,13 +373,13 @@ func TestQuery_CommitsWithAllFieldsWithUpdate_NoError(t *testing.T) { "height": int64(2), "links": []map[string]any{ { - "cid": "bafyreifzyy7bmpx2eywj4lznxzrzrvh6vrz6l7bhthkpexdq3wtho3vz6i", + "cid": "bafyreif6dqbkr7t37jcjfxxrjnxt7cspxzvs7qwlbtjca57cc663he4s7e", "name": "_head", }, }, }, { - "cid": "bafyreifzyy7bmpx2eywj4lznxzrzrvh6vrz6l7bhthkpexdq3wtho3vz6i", + "cid": "bafyreif6dqbkr7t37jcjfxxrjnxt7cspxzvs7qwlbtjca57cc663he4s7e", "collectionID": int64(1), "delta": testUtils.CBORValue(21), "docID": "bae-c9fb0fa4-1195-589c-aa54-e68333fb90b3", @@ -389,7 +389,7 @@ func TestQuery_CommitsWithAllFieldsWithUpdate_NoError(t *testing.T) { "links": []map[string]any{}, }, { - "cid": "bafyreic2sba5sffkfnt32wfeoaw4qsqozjb5acwwtouxuzllb3aymjwute", + "cid": "bafyreigtnj6ntulcilkmin4pgukjwv3nwglqpiiyddz3dyfexdbltze7sy", "collectionID": int64(1), "delta": testUtils.CBORValue("John"), "docID": "bae-c9fb0fa4-1195-589c-aa54-e68333fb90b3", @@ -399,7 +399,7 @@ func TestQuery_CommitsWithAllFieldsWithUpdate_NoError(t *testing.T) { "links": []map[string]any{}, }, { - "cid": "bafyreicsavx5oblk6asfoqyssz4ge2gf5ekfouvi7o6l7adly275op5oje", + "cid": "bafyreiale6qsjc7qewod3c6h2odwamfwcf7vt4zlqtw7ldcm57xdkgxja4", "collectionID": int64(1), "delta": nil, "docID": "bae-c9fb0fa4-1195-589c-aa54-e68333fb90b3", @@ -408,17 +408,17 @@ func TestQuery_CommitsWithAllFieldsWithUpdate_NoError(t *testing.T) { "height": int64(2), "links": []map[string]any{ { - "cid": "bafyreihv7jqe32wsuff5vwzlp7izoo6pqg6kgqf5edknp3mqm3344gu35q", + "cid": "bafyreia2vlbfkcbyogdjzmbqcjneabwwwtw7ti2xbd7yor5mbu2sk4pcoy", "name": "_head", }, { - "cid": "bafyreiay56ley5dvsptso37fsonfcrtbuphwlfhi67d2y52vzzexba6vua", + "cid": "bafyreih5h6i6ohfsgrcjtg76iarebqcurpaft73gpobl2z2cfsvihsgdqu", "name": "age", }, }, }, { - "cid": "bafyreihv7jqe32wsuff5vwzlp7izoo6pqg6kgqf5edknp3mqm3344gu35q", + "cid": "bafyreia2vlbfkcbyogdjzmbqcjneabwwwtw7ti2xbd7yor5mbu2sk4pcoy", "collectionID": int64(1), "delta": nil, "docID": "bae-c9fb0fa4-1195-589c-aa54-e68333fb90b3", @@ -427,12 +427,12 @@ func TestQuery_CommitsWithAllFieldsWithUpdate_NoError(t *testing.T) { "height": int64(1), "links": []map[string]any{ { - "cid": "bafyreic2sba5sffkfnt32wfeoaw4qsqozjb5acwwtouxuzllb3aymjwute", - "name": "name", + "cid": "bafyreif6dqbkr7t37jcjfxxrjnxt7cspxzvs7qwlbtjca57cc663he4s7e", + "name": "age", }, { - "cid": "bafyreifzyy7bmpx2eywj4lznxzrzrvh6vrz6l7bhthkpexdq3wtho3vz6i", - "name": "age", + "cid": "bafyreigtnj6ntulcilkmin4pgukjwv3nwglqpiiyddz3dyfexdbltze7sy", + "name": "name", }, }, }, diff --git a/tests/integration/query/commits/with_cid_test.go b/tests/integration/query/commits/with_cid_test.go index b051e2b289..ade22beb46 100644 --- a/tests/integration/query/commits/with_cid_test.go +++ b/tests/integration/query/commits/with_cid_test.go @@ -38,7 +38,7 @@ func TestQueryCommitsWithCid(t *testing.T) { testUtils.Request{ Request: `query { commits( - cid: "bafyreihv7jqe32wsuff5vwzlp7izoo6pqg6kgqf5edknp3mqm3344gu35q" + cid: "bafyreia2vlbfkcbyogdjzmbqcjneabwwwtw7ti2xbd7yor5mbu2sk4pcoy" ) { cid } @@ -46,7 +46,7 @@ func TestQueryCommitsWithCid(t *testing.T) { Results: map[string]any{ "commits": []map[string]any{ { - "cid": "bafyreihv7jqe32wsuff5vwzlp7izoo6pqg6kgqf5edknp3mqm3344gu35q", + "cid": "bafyreia2vlbfkcbyogdjzmbqcjneabwwwtw7ti2xbd7yor5mbu2sk4pcoy", }, }, }, @@ -73,7 +73,7 @@ func TestQueryCommitsWithCidForFieldCommit(t *testing.T) { testUtils.Request{ Request: `query { commits( - cid: "bafyreihv7jqe32wsuff5vwzlp7izoo6pqg6kgqf5edknp3mqm3344gu35q" + cid: "bafyreia2vlbfkcbyogdjzmbqcjneabwwwtw7ti2xbd7yor5mbu2sk4pcoy" ) { cid } @@ -81,7 +81,7 @@ func TestQueryCommitsWithCidForFieldCommit(t *testing.T) { Results: map[string]any{ "commits": []map[string]any{ { - "cid": "bafyreihv7jqe32wsuff5vwzlp7izoo6pqg6kgqf5edknp3mqm3344gu35q", + "cid": "bafyreia2vlbfkcbyogdjzmbqcjneabwwwtw7ti2xbd7yor5mbu2sk4pcoy", }, }, }, diff --git a/tests/integration/query/commits/with_delete_test.go b/tests/integration/query/commits/with_delete_test.go new file mode 100644 index 0000000000..cd52f5d861 --- /dev/null +++ b/tests/integration/query/commits/with_delete_test.go @@ -0,0 +1,78 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package commits + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestQueryCommits_AfterDocDeletion_ShouldStillFetch(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + age: Int + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "age": 21 + }`, + }, + testUtils.DeleteDoc{ + DocID: 0, + }, + testUtils.Request{ + Request: ` + query { + commits(fieldId: "C") { + fieldName + links { + name + } + } + } + `, + Results: map[string]any{ + "commits": []map[string]any{ + { + "fieldName": nil, + "links": []map[string]any{ + { + "name": "_head", + }, + }, + }, + { + "fieldName": nil, + "links": []map[string]any{ + { + "name": "age", + }, + { + "name": "name", + }, + }, + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/query/commits/with_depth_test.go b/tests/integration/query/commits/with_depth_test.go index 71d805307f..4b0e9f379f 100644 --- a/tests/integration/query/commits/with_depth_test.go +++ b/tests/integration/query/commits/with_depth_test.go @@ -37,13 +37,13 @@ func TestQueryCommitsWithDepth1(t *testing.T) { Results: map[string]any{ "commits": []map[string]any{ { - "cid": "bafyreifzyy7bmpx2eywj4lznxzrzrvh6vrz6l7bhthkpexdq3wtho3vz6i", + "cid": "bafyreif6dqbkr7t37jcjfxxrjnxt7cspxzvs7qwlbtjca57cc663he4s7e", }, { - "cid": "bafyreic2sba5sffkfnt32wfeoaw4qsqozjb5acwwtouxuzllb3aymjwute", + "cid": "bafyreigtnj6ntulcilkmin4pgukjwv3nwglqpiiyddz3dyfexdbltze7sy", }, { - "cid": "bafyreihv7jqe32wsuff5vwzlp7izoo6pqg6kgqf5edknp3mqm3344gu35q", + "cid": "bafyreia2vlbfkcbyogdjzmbqcjneabwwwtw7ti2xbd7yor5mbu2sk4pcoy", }, }, }, @@ -84,16 +84,16 @@ func TestQueryCommitsWithDepth1WithUpdate(t *testing.T) { "commits": []map[string]any{ { // "Age" field head - "cid": "bafyreiay56ley5dvsptso37fsonfcrtbuphwlfhi67d2y52vzzexba6vua", + "cid": "bafyreih5h6i6ohfsgrcjtg76iarebqcurpaft73gpobl2z2cfsvihsgdqu", "height": int64(2), }, { // "Name" field head (unchanged from create) - "cid": "bafyreic2sba5sffkfnt32wfeoaw4qsqozjb5acwwtouxuzllb3aymjwute", + "cid": "bafyreigtnj6ntulcilkmin4pgukjwv3nwglqpiiyddz3dyfexdbltze7sy", "height": int64(1), }, { - "cid": "bafyreicsavx5oblk6asfoqyssz4ge2gf5ekfouvi7o6l7adly275op5oje", + "cid": "bafyreiale6qsjc7qewod3c6h2odwamfwcf7vt4zlqtw7ldcm57xdkgxja4", "height": int64(2), }, }, @@ -142,27 +142,27 @@ func TestQueryCommitsWithDepth2WithUpdate(t *testing.T) { "commits": []map[string]any{ { // Composite head - "cid": "bafyreibpiyrugj4gku336wp5lvcw3fgyxqpjvugm3t4z7v5h3ulwxs3x2y", + "cid": "bafyreig3qosmew7pkq27dijjvwe35jjpvh3ed3f5dxpzemtqhw7xka7hga", "height": int64(3), }, { // Composite head -1 - "cid": "bafyreiay56ley5dvsptso37fsonfcrtbuphwlfhi67d2y52vzzexba6vua", + "cid": "bafyreih5h6i6ohfsgrcjtg76iarebqcurpaft73gpobl2z2cfsvihsgdqu", "height": int64(2), }, { // "Name" field head (unchanged from create) - "cid": "bafyreic2sba5sffkfnt32wfeoaw4qsqozjb5acwwtouxuzllb3aymjwute", + "cid": "bafyreigtnj6ntulcilkmin4pgukjwv3nwglqpiiyddz3dyfexdbltze7sy", "height": int64(1), }, { // "Age" field head - "cid": "bafyreieydjk3sqrxs5aqhsiy7ct25vu5qtbtpmzbytzee4apeidx6dq7je", + "cid": "bafyreiahq3xwdjmp2kq7jernt2axomiq3kuef2rik7k3fnn2pb242a5oha", "height": int64(3), }, { // "Age" field head -1 - "cid": "bafyreicsavx5oblk6asfoqyssz4ge2gf5ekfouvi7o6l7adly275op5oje", + "cid": "bafyreiale6qsjc7qewod3c6h2odwamfwcf7vt4zlqtw7ldcm57xdkgxja4", "height": int64(2), }, }, @@ -202,22 +202,22 @@ func TestQueryCommitsWithDepth1AndMultipleDocs(t *testing.T) { Results: map[string]any{ "commits": []map[string]any{ { - "cid": "bafyreibzfxudkhrcsz7lsgtb637gzyegsdkehlugvb2dg76smhhnkg46dm", + "cid": "bafyreiddiyec4bz2pqiav2bivqcqttr4kyniajrqxf66tybhq4cm36exi4", }, { - "cid": "bafyreiabiarng2rcvkfgoirnnyy3yvd7yi3c66akovkbmhivrxvdawtcna", + "cid": "bafyreicotst6miuynokequzsm7zjm42aw3zsfor7cvw7gja7hut3f5v6qq", }, { - "cid": "bafyreibubqh6ltxbxmtrtd5oczaekcfw5knqfyocnwkdwhpjatl7johoue", + "cid": "bafyreigiyb2tronlgaz4j5alh2a52gy7j5fi2ebvvf6r3dircvp6qkf4um", }, { - "cid": "bafyreifzyy7bmpx2eywj4lznxzrzrvh6vrz6l7bhthkpexdq3wtho3vz6i", + "cid": "bafyreif6dqbkr7t37jcjfxxrjnxt7cspxzvs7qwlbtjca57cc663he4s7e", }, { - "cid": "bafyreic2sba5sffkfnt32wfeoaw4qsqozjb5acwwtouxuzllb3aymjwute", + "cid": "bafyreigtnj6ntulcilkmin4pgukjwv3nwglqpiiyddz3dyfexdbltze7sy", }, { - "cid": "bafyreihv7jqe32wsuff5vwzlp7izoo6pqg6kgqf5edknp3mqm3344gu35q", + "cid": "bafyreia2vlbfkcbyogdjzmbqcjneabwwwtw7ti2xbd7yor5mbu2sk4pcoy", }, }, }, diff --git a/tests/integration/query/commits/with_doc_id_cid_test.go b/tests/integration/query/commits/with_doc_id_cid_test.go index bba28526a8..7be8265cb9 100644 --- a/tests/integration/query/commits/with_doc_id_cid_test.go +++ b/tests/integration/query/commits/with_doc_id_cid_test.go @@ -108,7 +108,7 @@ func TestQueryCommitsWithDocIDAndCidWithUpdate(t *testing.T) { Request: ` { commits( docID: "bae-c9fb0fa4-1195-589c-aa54-e68333fb90b3", - cid: "bafyreicsavx5oblk6asfoqyssz4ge2gf5ekfouvi7o6l7adly275op5oje" + cid: "bafyreiale6qsjc7qewod3c6h2odwamfwcf7vt4zlqtw7ldcm57xdkgxja4" ) { cid } @@ -116,7 +116,7 @@ func TestQueryCommitsWithDocIDAndCidWithUpdate(t *testing.T) { Results: map[string]any{ "commits": []map[string]any{ { - "cid": "bafyreicsavx5oblk6asfoqyssz4ge2gf5ekfouvi7o6l7adly275op5oje", + "cid": "bafyreiale6qsjc7qewod3c6h2odwamfwcf7vt4zlqtw7ldcm57xdkgxja4", }, }, }, diff --git a/tests/integration/query/commits/with_doc_id_count_test.go b/tests/integration/query/commits/with_doc_id_count_test.go index 607822abec..65d80844a9 100644 --- a/tests/integration/query/commits/with_doc_id_count_test.go +++ b/tests/integration/query/commits/with_doc_id_count_test.go @@ -38,15 +38,15 @@ func TestQueryCommitsWithDocIDAndLinkCount(t *testing.T) { Results: map[string]any{ "commits": []map[string]any{ { - "cid": "bafyreifzyy7bmpx2eywj4lznxzrzrvh6vrz6l7bhthkpexdq3wtho3vz6i", + "cid": "bafyreif6dqbkr7t37jcjfxxrjnxt7cspxzvs7qwlbtjca57cc663he4s7e", "_count": 0, }, { - "cid": "bafyreic2sba5sffkfnt32wfeoaw4qsqozjb5acwwtouxuzllb3aymjwute", + "cid": "bafyreigtnj6ntulcilkmin4pgukjwv3nwglqpiiyddz3dyfexdbltze7sy", "_count": 0, }, { - "cid": "bafyreihv7jqe32wsuff5vwzlp7izoo6pqg6kgqf5edknp3mqm3344gu35q", + "cid": "bafyreia2vlbfkcbyogdjzmbqcjneabwwwtw7ti2xbd7yor5mbu2sk4pcoy", "_count": 2, }, }, diff --git a/tests/integration/query/commits/with_doc_id_field_test.go b/tests/integration/query/commits/with_doc_id_field_test.go index 3b36e9f9d8..7ecfa7f742 100644 --- a/tests/integration/query/commits/with_doc_id_field_test.go +++ b/tests/integration/query/commits/with_doc_id_field_test.go @@ -125,7 +125,7 @@ func TestQueryCommitsWithDocIDAndFieldId(t *testing.T) { Results: map[string]any{ "commits": []map[string]any{ { - "cid": "bafyreifzyy7bmpx2eywj4lznxzrzrvh6vrz6l7bhthkpexdq3wtho3vz6i", + "cid": "bafyreif6dqbkr7t37jcjfxxrjnxt7cspxzvs7qwlbtjca57cc663he4s7e", }, }, }, @@ -159,7 +159,7 @@ func TestQueryCommitsWithDocIDAndCompositeFieldId(t *testing.T) { Results: map[string]any{ "commits": []map[string]any{ { - "cid": "bafyreihv7jqe32wsuff5vwzlp7izoo6pqg6kgqf5edknp3mqm3344gu35q", + "cid": "bafyreia2vlbfkcbyogdjzmbqcjneabwwwtw7ti2xbd7yor5mbu2sk4pcoy", }, }, }, diff --git a/tests/integration/query/commits/with_doc_id_limit_offset_test.go b/tests/integration/query/commits/with_doc_id_limit_offset_test.go index 2fb095a263..b3cb06eb73 100644 --- a/tests/integration/query/commits/with_doc_id_limit_offset_test.go +++ b/tests/integration/query/commits/with_doc_id_limit_offset_test.go @@ -58,10 +58,10 @@ func TestQueryCommitsWithDocIDAndLimitAndOffset(t *testing.T) { Results: map[string]any{ "commits": []map[string]any{ { - "cid": "bafyreibpiyrugj4gku336wp5lvcw3fgyxqpjvugm3t4z7v5h3ulwxs3x2y", + "cid": "bafyreig3qosmew7pkq27dijjvwe35jjpvh3ed3f5dxpzemtqhw7xka7hga", }, { - "cid": "bafyreiay56ley5dvsptso37fsonfcrtbuphwlfhi67d2y52vzzexba6vua", + "cid": "bafyreih5h6i6ohfsgrcjtg76iarebqcurpaft73gpobl2z2cfsvihsgdqu", }, }, }, diff --git a/tests/integration/query/commits/with_doc_id_limit_test.go b/tests/integration/query/commits/with_doc_id_limit_test.go index 5a569f3295..86e21f28c0 100644 --- a/tests/integration/query/commits/with_doc_id_limit_test.go +++ b/tests/integration/query/commits/with_doc_id_limit_test.go @@ -51,10 +51,10 @@ func TestQueryCommitsWithDocIDAndLimit(t *testing.T) { Results: map[string]any{ "commits": []map[string]any{ { - "cid": "bafyreibpiyrugj4gku336wp5lvcw3fgyxqpjvugm3t4z7v5h3ulwxs3x2y", + "cid": "bafyreig3qosmew7pkq27dijjvwe35jjpvh3ed3f5dxpzemtqhw7xka7hga", }, { - "cid": "bafyreiay56ley5dvsptso37fsonfcrtbuphwlfhi67d2y52vzzexba6vua", + "cid": "bafyreih5h6i6ohfsgrcjtg76iarebqcurpaft73gpobl2z2cfsvihsgdqu", }, }, }, diff --git a/tests/integration/query/commits/with_doc_id_order_limit_offset_test.go b/tests/integration/query/commits/with_doc_id_order_limit_offset_test.go index 389ec5c291..dd56fb2704 100644 --- a/tests/integration/query/commits/with_doc_id_order_limit_offset_test.go +++ b/tests/integration/query/commits/with_doc_id_order_limit_offset_test.go @@ -59,11 +59,11 @@ func TestQueryCommitsWithDocIDAndOrderAndLimitAndOffset(t *testing.T) { Results: map[string]any{ "commits": []map[string]any{ { - "cid": "bafyreicsavx5oblk6asfoqyssz4ge2gf5ekfouvi7o6l7adly275op5oje", + "cid": "bafyreiale6qsjc7qewod3c6h2odwamfwcf7vt4zlqtw7ldcm57xdkgxja4", "height": int64(2), }, { - "cid": "bafyreibpiyrugj4gku336wp5lvcw3fgyxqpjvugm3t4z7v5h3ulwxs3x2y", + "cid": "bafyreig3qosmew7pkq27dijjvwe35jjpvh3ed3f5dxpzemtqhw7xka7hga", "height": int64(3), }, }, diff --git a/tests/integration/query/commits/with_doc_id_order_test.go b/tests/integration/query/commits/with_doc_id_order_test.go index ba0ff67def..89562b77eb 100644 --- a/tests/integration/query/commits/with_doc_id_order_test.go +++ b/tests/integration/query/commits/with_doc_id_order_test.go @@ -45,23 +45,23 @@ func TestQueryCommitsWithDocIDAndOrderHeightDesc(t *testing.T) { Results: map[string]any{ "commits": []map[string]any{ { - "cid": "bafyreiay56ley5dvsptso37fsonfcrtbuphwlfhi67d2y52vzzexba6vua", + "cid": "bafyreih5h6i6ohfsgrcjtg76iarebqcurpaft73gpobl2z2cfsvihsgdqu", "height": int64(2), }, { - "cid": "bafyreicsavx5oblk6asfoqyssz4ge2gf5ekfouvi7o6l7adly275op5oje", + "cid": "bafyreiale6qsjc7qewod3c6h2odwamfwcf7vt4zlqtw7ldcm57xdkgxja4", "height": int64(2), }, { - "cid": "bafyreifzyy7bmpx2eywj4lznxzrzrvh6vrz6l7bhthkpexdq3wtho3vz6i", + "cid": "bafyreif6dqbkr7t37jcjfxxrjnxt7cspxzvs7qwlbtjca57cc663he4s7e", "height": int64(1), }, { - "cid": "bafyreic2sba5sffkfnt32wfeoaw4qsqozjb5acwwtouxuzllb3aymjwute", + "cid": "bafyreigtnj6ntulcilkmin4pgukjwv3nwglqpiiyddz3dyfexdbltze7sy", "height": int64(1), }, { - "cid": "bafyreihv7jqe32wsuff5vwzlp7izoo6pqg6kgqf5edknp3mqm3344gu35q", + "cid": "bafyreia2vlbfkcbyogdjzmbqcjneabwwwtw7ti2xbd7yor5mbu2sk4pcoy", "height": int64(1), }, }, @@ -102,23 +102,23 @@ func TestQueryCommitsWithDocIDAndOrderHeightAsc(t *testing.T) { Results: map[string]any{ "commits": []map[string]any{ { - "cid": "bafyreifzyy7bmpx2eywj4lznxzrzrvh6vrz6l7bhthkpexdq3wtho3vz6i", + "cid": "bafyreif6dqbkr7t37jcjfxxrjnxt7cspxzvs7qwlbtjca57cc663he4s7e", "height": int64(1), }, { - "cid": "bafyreic2sba5sffkfnt32wfeoaw4qsqozjb5acwwtouxuzllb3aymjwute", + "cid": "bafyreigtnj6ntulcilkmin4pgukjwv3nwglqpiiyddz3dyfexdbltze7sy", "height": int64(1), }, { - "cid": "bafyreihv7jqe32wsuff5vwzlp7izoo6pqg6kgqf5edknp3mqm3344gu35q", + "cid": "bafyreia2vlbfkcbyogdjzmbqcjneabwwwtw7ti2xbd7yor5mbu2sk4pcoy", "height": int64(1), }, { - "cid": "bafyreiay56ley5dvsptso37fsonfcrtbuphwlfhi67d2y52vzzexba6vua", + "cid": "bafyreih5h6i6ohfsgrcjtg76iarebqcurpaft73gpobl2z2cfsvihsgdqu", "height": int64(2), }, { - "cid": "bafyreicsavx5oblk6asfoqyssz4ge2gf5ekfouvi7o6l7adly275op5oje", + "cid": "bafyreiale6qsjc7qewod3c6h2odwamfwcf7vt4zlqtw7ldcm57xdkgxja4", "height": int64(2), }, }, @@ -159,25 +159,25 @@ func TestQueryCommitsWithDocIDAndOrderCidDesc(t *testing.T) { Results: map[string]any{ "commits": []map[string]any{ { - "cid": "bafyreihv7jqe32wsuff5vwzlp7izoo6pqg6kgqf5edknp3mqm3344gu35q", + "cid": "bafyreih5h6i6ohfsgrcjtg76iarebqcurpaft73gpobl2z2cfsvihsgdqu", + "height": int64(2), + }, + { + "cid": "bafyreigtnj6ntulcilkmin4pgukjwv3nwglqpiiyddz3dyfexdbltze7sy", "height": int64(1), }, { - "cid": "bafyreifzyy7bmpx2eywj4lznxzrzrvh6vrz6l7bhthkpexdq3wtho3vz6i", + "cid": "bafyreif6dqbkr7t37jcjfxxrjnxt7cspxzvs7qwlbtjca57cc663he4s7e", "height": int64(1), }, { - "cid": "bafyreicsavx5oblk6asfoqyssz4ge2gf5ekfouvi7o6l7adly275op5oje", + "cid": "bafyreiale6qsjc7qewod3c6h2odwamfwcf7vt4zlqtw7ldcm57xdkgxja4", "height": int64(2), }, { - "cid": "bafyreic2sba5sffkfnt32wfeoaw4qsqozjb5acwwtouxuzllb3aymjwute", + "cid": "bafyreia2vlbfkcbyogdjzmbqcjneabwwwtw7ti2xbd7yor5mbu2sk4pcoy", "height": int64(1), }, - { - "cid": "bafyreiay56ley5dvsptso37fsonfcrtbuphwlfhi67d2y52vzzexba6vua", - "height": int64(2), - }, }, }, }, @@ -216,25 +216,25 @@ func TestQueryCommitsWithDocIDAndOrderCidAsc(t *testing.T) { Results: map[string]any{ "commits": []map[string]any{ { - "cid": "bafyreiay56ley5dvsptso37fsonfcrtbuphwlfhi67d2y52vzzexba6vua", - "height": int64(2), - }, - { - "cid": "bafyreic2sba5sffkfnt32wfeoaw4qsqozjb5acwwtouxuzllb3aymjwute", + "cid": "bafyreia2vlbfkcbyogdjzmbqcjneabwwwtw7ti2xbd7yor5mbu2sk4pcoy", "height": int64(1), }, { - "cid": "bafyreicsavx5oblk6asfoqyssz4ge2gf5ekfouvi7o6l7adly275op5oje", + "cid": "bafyreiale6qsjc7qewod3c6h2odwamfwcf7vt4zlqtw7ldcm57xdkgxja4", "height": int64(2), }, { - "cid": "bafyreifzyy7bmpx2eywj4lznxzrzrvh6vrz6l7bhthkpexdq3wtho3vz6i", + "cid": "bafyreif6dqbkr7t37jcjfxxrjnxt7cspxzvs7qwlbtjca57cc663he4s7e", "height": int64(1), }, { - "cid": "bafyreihv7jqe32wsuff5vwzlp7izoo6pqg6kgqf5edknp3mqm3344gu35q", + "cid": "bafyreigtnj6ntulcilkmin4pgukjwv3nwglqpiiyddz3dyfexdbltze7sy", "height": int64(1), }, + { + "cid": "bafyreih5h6i6ohfsgrcjtg76iarebqcurpaft73gpobl2z2cfsvihsgdqu", + "height": int64(2), + }, }, }, }, @@ -287,39 +287,39 @@ func TestQueryCommitsWithDocIDAndOrderAndMultiUpdatesCidAsc(t *testing.T) { Results: map[string]any{ "commits": []map[string]any{ { - "cid": "bafyreifzyy7bmpx2eywj4lznxzrzrvh6vrz6l7bhthkpexdq3wtho3vz6i", + "cid": "bafyreif6dqbkr7t37jcjfxxrjnxt7cspxzvs7qwlbtjca57cc663he4s7e", "height": int64(1), }, { - "cid": "bafyreic2sba5sffkfnt32wfeoaw4qsqozjb5acwwtouxuzllb3aymjwute", + "cid": "bafyreigtnj6ntulcilkmin4pgukjwv3nwglqpiiyddz3dyfexdbltze7sy", "height": int64(1), }, { - "cid": "bafyreihv7jqe32wsuff5vwzlp7izoo6pqg6kgqf5edknp3mqm3344gu35q", + "cid": "bafyreia2vlbfkcbyogdjzmbqcjneabwwwtw7ti2xbd7yor5mbu2sk4pcoy", "height": int64(1), }, { - "cid": "bafyreiay56ley5dvsptso37fsonfcrtbuphwlfhi67d2y52vzzexba6vua", + "cid": "bafyreih5h6i6ohfsgrcjtg76iarebqcurpaft73gpobl2z2cfsvihsgdqu", "height": int64(2), }, { - "cid": "bafyreicsavx5oblk6asfoqyssz4ge2gf5ekfouvi7o6l7adly275op5oje", + "cid": "bafyreiale6qsjc7qewod3c6h2odwamfwcf7vt4zlqtw7ldcm57xdkgxja4", "height": int64(2), }, { - "cid": "bafyreibpiyrugj4gku336wp5lvcw3fgyxqpjvugm3t4z7v5h3ulwxs3x2y", + "cid": "bafyreig3qosmew7pkq27dijjvwe35jjpvh3ed3f5dxpzemtqhw7xka7hga", "height": int64(3), }, { - "cid": "bafyreieydjk3sqrxs5aqhsiy7ct25vu5qtbtpmzbytzee4apeidx6dq7je", + "cid": "bafyreiahq3xwdjmp2kq7jernt2axomiq3kuef2rik7k3fnn2pb242a5oha", "height": int64(3), }, { - "cid": "bafyreic6rjkn7qsoxpboviode2l64ahg4yajsrb3p25zeooisnaxcweccu", + "cid": "bafyreig3nogimi6exh2uokpayevfeds3sseixk657dj2asusys7avyu7wu", "height": int64(4), }, { - "cid": "bafyreieifkfzufdvlvni4o5pbdtuvm3w6x4fnqyelyq2owvsliiwjvddpi", + "cid": "bafyreibhg2q3574zycclsiooz6h2ofafdqmpqqglodk4je5esegaosy3q4", "height": int64(4), }, }, diff --git a/tests/integration/query/commits/with_doc_id_test.go b/tests/integration/query/commits/with_doc_id_test.go index 26fa6136af..9ace3966c0 100644 --- a/tests/integration/query/commits/with_doc_id_test.go +++ b/tests/integration/query/commits/with_doc_id_test.go @@ -65,13 +65,13 @@ func TestQueryCommitsWithDocID(t *testing.T) { Results: map[string]any{ "commits": []map[string]any{ { - "cid": "bafyreifzyy7bmpx2eywj4lznxzrzrvh6vrz6l7bhthkpexdq3wtho3vz6i", + "cid": "bafyreif6dqbkr7t37jcjfxxrjnxt7cspxzvs7qwlbtjca57cc663he4s7e", }, { - "cid": "bafyreic2sba5sffkfnt32wfeoaw4qsqozjb5acwwtouxuzllb3aymjwute", + "cid": "bafyreigtnj6ntulcilkmin4pgukjwv3nwglqpiiyddz3dyfexdbltze7sy", }, { - "cid": "bafyreihv7jqe32wsuff5vwzlp7izoo6pqg6kgqf5edknp3mqm3344gu35q", + "cid": "bafyreia2vlbfkcbyogdjzmbqcjneabwwwtw7ti2xbd7yor5mbu2sk4pcoy", }, }, }, @@ -107,23 +107,23 @@ func TestQueryCommitsWithDocIDAndLinks(t *testing.T) { Results: map[string]any{ "commits": []map[string]any{ { - "cid": "bafyreifzyy7bmpx2eywj4lznxzrzrvh6vrz6l7bhthkpexdq3wtho3vz6i", + "cid": "bafyreif6dqbkr7t37jcjfxxrjnxt7cspxzvs7qwlbtjca57cc663he4s7e", "links": []map[string]any{}, }, { - "cid": "bafyreic2sba5sffkfnt32wfeoaw4qsqozjb5acwwtouxuzllb3aymjwute", + "cid": "bafyreigtnj6ntulcilkmin4pgukjwv3nwglqpiiyddz3dyfexdbltze7sy", "links": []map[string]any{}, }, { - "cid": "bafyreihv7jqe32wsuff5vwzlp7izoo6pqg6kgqf5edknp3mqm3344gu35q", + "cid": "bafyreia2vlbfkcbyogdjzmbqcjneabwwwtw7ti2xbd7yor5mbu2sk4pcoy", "links": []map[string]any{ { - "cid": "bafyreic2sba5sffkfnt32wfeoaw4qsqozjb5acwwtouxuzllb3aymjwute", - "name": "name", + "cid": "bafyreif6dqbkr7t37jcjfxxrjnxt7cspxzvs7qwlbtjca57cc663he4s7e", + "name": "age", }, { - "cid": "bafyreifzyy7bmpx2eywj4lznxzrzrvh6vrz6l7bhthkpexdq3wtho3vz6i", - "name": "age", + "cid": "bafyreigtnj6ntulcilkmin4pgukjwv3nwglqpiiyddz3dyfexdbltze7sy", + "name": "name", }, }, }, @@ -165,23 +165,23 @@ func TestQueryCommitsWithDocIDAndUpdate(t *testing.T) { Results: map[string]any{ "commits": []map[string]any{ { - "cid": "bafyreiay56ley5dvsptso37fsonfcrtbuphwlfhi67d2y52vzzexba6vua", + "cid": "bafyreih5h6i6ohfsgrcjtg76iarebqcurpaft73gpobl2z2cfsvihsgdqu", "height": int64(2), }, { - "cid": "bafyreifzyy7bmpx2eywj4lznxzrzrvh6vrz6l7bhthkpexdq3wtho3vz6i", + "cid": "bafyreif6dqbkr7t37jcjfxxrjnxt7cspxzvs7qwlbtjca57cc663he4s7e", "height": int64(1), }, { - "cid": "bafyreic2sba5sffkfnt32wfeoaw4qsqozjb5acwwtouxuzllb3aymjwute", + "cid": "bafyreigtnj6ntulcilkmin4pgukjwv3nwglqpiiyddz3dyfexdbltze7sy", "height": int64(1), }, { - "cid": "bafyreicsavx5oblk6asfoqyssz4ge2gf5ekfouvi7o6l7adly275op5oje", + "cid": "bafyreiale6qsjc7qewod3c6h2odwamfwcf7vt4zlqtw7ldcm57xdkgxja4", "height": int64(2), }, { - "cid": "bafyreihv7jqe32wsuff5vwzlp7izoo6pqg6kgqf5edknp3mqm3344gu35q", + "cid": "bafyreia2vlbfkcbyogdjzmbqcjneabwwwtw7ti2xbd7yor5mbu2sk4pcoy", "height": int64(1), }, }, @@ -228,45 +228,45 @@ func TestQueryCommitsWithDocIDAndUpdateAndLinks(t *testing.T) { Results: map[string]any{ "commits": []map[string]any{ { - "cid": "bafyreiay56ley5dvsptso37fsonfcrtbuphwlfhi67d2y52vzzexba6vua", + "cid": "bafyreih5h6i6ohfsgrcjtg76iarebqcurpaft73gpobl2z2cfsvihsgdqu", "links": []map[string]any{ { - "cid": "bafyreifzyy7bmpx2eywj4lznxzrzrvh6vrz6l7bhthkpexdq3wtho3vz6i", + "cid": "bafyreif6dqbkr7t37jcjfxxrjnxt7cspxzvs7qwlbtjca57cc663he4s7e", "name": "_head", }, }, }, { - "cid": "bafyreifzyy7bmpx2eywj4lznxzrzrvh6vrz6l7bhthkpexdq3wtho3vz6i", + "cid": "bafyreif6dqbkr7t37jcjfxxrjnxt7cspxzvs7qwlbtjca57cc663he4s7e", "links": []map[string]any{}, }, { - "cid": "bafyreic2sba5sffkfnt32wfeoaw4qsqozjb5acwwtouxuzllb3aymjwute", + "cid": "bafyreigtnj6ntulcilkmin4pgukjwv3nwglqpiiyddz3dyfexdbltze7sy", "links": []map[string]any{}, }, { - "cid": "bafyreicsavx5oblk6asfoqyssz4ge2gf5ekfouvi7o6l7adly275op5oje", + "cid": "bafyreiale6qsjc7qewod3c6h2odwamfwcf7vt4zlqtw7ldcm57xdkgxja4", "links": []map[string]any{ { - "cid": "bafyreihv7jqe32wsuff5vwzlp7izoo6pqg6kgqf5edknp3mqm3344gu35q", + "cid": "bafyreia2vlbfkcbyogdjzmbqcjneabwwwtw7ti2xbd7yor5mbu2sk4pcoy", "name": "_head", }, { - "cid": "bafyreiay56ley5dvsptso37fsonfcrtbuphwlfhi67d2y52vzzexba6vua", + "cid": "bafyreih5h6i6ohfsgrcjtg76iarebqcurpaft73gpobl2z2cfsvihsgdqu", "name": "age", }, }, }, { - "cid": "bafyreihv7jqe32wsuff5vwzlp7izoo6pqg6kgqf5edknp3mqm3344gu35q", + "cid": "bafyreia2vlbfkcbyogdjzmbqcjneabwwwtw7ti2xbd7yor5mbu2sk4pcoy", "links": []map[string]any{ { - "cid": "bafyreic2sba5sffkfnt32wfeoaw4qsqozjb5acwwtouxuzllb3aymjwute", - "name": "name", + "cid": "bafyreif6dqbkr7t37jcjfxxrjnxt7cspxzvs7qwlbtjca57cc663he4s7e", + "name": "age", }, { - "cid": "bafyreifzyy7bmpx2eywj4lznxzrzrvh6vrz6l7bhthkpexdq3wtho3vz6i", - "name": "age", + "cid": "bafyreigtnj6ntulcilkmin4pgukjwv3nwglqpiiyddz3dyfexdbltze7sy", + "name": "name", }, }, }, diff --git a/tests/integration/query/commits/with_doc_id_typename_test.go b/tests/integration/query/commits/with_doc_id_typename_test.go index c825933d83..17cf68007a 100644 --- a/tests/integration/query/commits/with_doc_id_typename_test.go +++ b/tests/integration/query/commits/with_doc_id_typename_test.go @@ -38,15 +38,15 @@ func TestQueryCommitsWithDocIDWithTypeName(t *testing.T) { Results: map[string]any{ "commits": []map[string]any{ { - "cid": "bafyreifzyy7bmpx2eywj4lznxzrzrvh6vrz6l7bhthkpexdq3wtho3vz6i", + "cid": "bafyreif6dqbkr7t37jcjfxxrjnxt7cspxzvs7qwlbtjca57cc663he4s7e", "__typename": "Commit", }, { - "cid": "bafyreic2sba5sffkfnt32wfeoaw4qsqozjb5acwwtouxuzllb3aymjwute", + "cid": "bafyreigtnj6ntulcilkmin4pgukjwv3nwglqpiiyddz3dyfexdbltze7sy", "__typename": "Commit", }, { - "cid": "bafyreihv7jqe32wsuff5vwzlp7izoo6pqg6kgqf5edknp3mqm3344gu35q", + "cid": "bafyreia2vlbfkcbyogdjzmbqcjneabwwwtw7ti2xbd7yor5mbu2sk4pcoy", "__typename": "Commit", }, }, diff --git a/tests/integration/query/commits/with_field_test.go b/tests/integration/query/commits/with_field_test.go index 43d23c462c..177d2c300d 100644 --- a/tests/integration/query/commits/with_field_test.go +++ b/tests/integration/query/commits/with_field_test.go @@ -69,7 +69,7 @@ func TestQueryCommitsWithFieldId(t *testing.T) { Results: map[string]any{ "commits": []map[string]any{ { - "cid": "bafyreifzyy7bmpx2eywj4lznxzrzrvh6vrz6l7bhthkpexdq3wtho3vz6i", + "cid": "bafyreif6dqbkr7t37jcjfxxrjnxt7cspxzvs7qwlbtjca57cc663he4s7e", }, }, }, @@ -103,7 +103,7 @@ func TestQueryCommitsWithCompositeFieldId(t *testing.T) { Results: map[string]any{ "commits": []map[string]any{ { - "cid": "bafyreihv7jqe32wsuff5vwzlp7izoo6pqg6kgqf5edknp3mqm3344gu35q", + "cid": "bafyreia2vlbfkcbyogdjzmbqcjneabwwwtw7ti2xbd7yor5mbu2sk4pcoy", }, }, }, @@ -138,7 +138,7 @@ func TestQueryCommitsWithCompositeFieldIdWithReturnedSchemaVersionID(t *testing. Results: map[string]any{ "commits": []map[string]any{ { - "cid": "bafyreihv7jqe32wsuff5vwzlp7izoo6pqg6kgqf5edknp3mqm3344gu35q", + "cid": "bafyreia2vlbfkcbyogdjzmbqcjneabwwwtw7ti2xbd7yor5mbu2sk4pcoy", "schemaVersionId": "bafkreicprhqxzlw3akyssz2v6pifwfueavp7jq2yj3dghapi3qcq6achs4", }, }, diff --git a/tests/integration/query/commits/with_group_test.go b/tests/integration/query/commits/with_group_test.go index 372097932c..15ed115d5a 100644 --- a/tests/integration/query/commits/with_group_test.go +++ b/tests/integration/query/commits/with_group_test.go @@ -92,10 +92,10 @@ func TestQueryCommitsWithGroupByHeightWithChild(t *testing.T) { "height": int64(2), "_group": []map[string]any{ { - "cid": "bafyreiay56ley5dvsptso37fsonfcrtbuphwlfhi67d2y52vzzexba6vua", + "cid": "bafyreih5h6i6ohfsgrcjtg76iarebqcurpaft73gpobl2z2cfsvihsgdqu", }, { - "cid": "bafyreicsavx5oblk6asfoqyssz4ge2gf5ekfouvi7o6l7adly275op5oje", + "cid": "bafyreiale6qsjc7qewod3c6h2odwamfwcf7vt4zlqtw7ldcm57xdkgxja4", }, }, }, @@ -103,13 +103,13 @@ func TestQueryCommitsWithGroupByHeightWithChild(t *testing.T) { "height": int64(1), "_group": []map[string]any{ { - "cid": "bafyreifzyy7bmpx2eywj4lznxzrzrvh6vrz6l7bhthkpexdq3wtho3vz6i", + "cid": "bafyreif6dqbkr7t37jcjfxxrjnxt7cspxzvs7qwlbtjca57cc663he4s7e", }, { - "cid": "bafyreic2sba5sffkfnt32wfeoaw4qsqozjb5acwwtouxuzllb3aymjwute", + "cid": "bafyreigtnj6ntulcilkmin4pgukjwv3nwglqpiiyddz3dyfexdbltze7sy", }, { - "cid": "bafyreihv7jqe32wsuff5vwzlp7izoo6pqg6kgqf5edknp3mqm3344gu35q", + "cid": "bafyreia2vlbfkcbyogdjzmbqcjneabwwwtw7ti2xbd7yor5mbu2sk4pcoy", }, }, }, @@ -147,7 +147,7 @@ func TestQueryCommitsWithGroupByCidWithChild(t *testing.T) { Results: map[string]any{ "commits": []map[string]any{ { - "cid": "bafyreifzyy7bmpx2eywj4lznxzrzrvh6vrz6l7bhthkpexdq3wtho3vz6i", + "cid": "bafyreif6dqbkr7t37jcjfxxrjnxt7cspxzvs7qwlbtjca57cc663he4s7e", "_group": []map[string]any{ { "height": int64(1), @@ -155,7 +155,7 @@ func TestQueryCommitsWithGroupByCidWithChild(t *testing.T) { }, }, { - "cid": "bafyreic2sba5sffkfnt32wfeoaw4qsqozjb5acwwtouxuzllb3aymjwute", + "cid": "bafyreigtnj6ntulcilkmin4pgukjwv3nwglqpiiyddz3dyfexdbltze7sy", "_group": []map[string]any{ { "height": int64(1), @@ -163,7 +163,7 @@ func TestQueryCommitsWithGroupByCidWithChild(t *testing.T) { }, }, { - "cid": "bafyreihv7jqe32wsuff5vwzlp7izoo6pqg6kgqf5edknp3mqm3344gu35q", + "cid": "bafyreia2vlbfkcbyogdjzmbqcjneabwwwtw7ti2xbd7yor5mbu2sk4pcoy", "_group": []map[string]any{ { "height": int64(1), diff --git a/tests/integration/query/commits/with_null_input_test.go b/tests/integration/query/commits/with_null_input_test.go index 84a257b332..912bfc36ca 100644 --- a/tests/integration/query/commits/with_null_input_test.go +++ b/tests/integration/query/commits/with_null_input_test.go @@ -37,13 +37,13 @@ func TestQueryCommitsWithNullDepth(t *testing.T) { Results: map[string]any{ "commits": []map[string]any{ { - "cid": "bafyreifzyy7bmpx2eywj4lznxzrzrvh6vrz6l7bhthkpexdq3wtho3vz6i", + "cid": "bafyreif6dqbkr7t37jcjfxxrjnxt7cspxzvs7qwlbtjca57cc663he4s7e", }, { - "cid": "bafyreic2sba5sffkfnt32wfeoaw4qsqozjb5acwwtouxuzllb3aymjwute", + "cid": "bafyreigtnj6ntulcilkmin4pgukjwv3nwglqpiiyddz3dyfexdbltze7sy", }, { - "cid": "bafyreihv7jqe32wsuff5vwzlp7izoo6pqg6kgqf5edknp3mqm3344gu35q", + "cid": "bafyreia2vlbfkcbyogdjzmbqcjneabwwwtw7ti2xbd7yor5mbu2sk4pcoy", }, }, }, @@ -75,13 +75,13 @@ func TestQueryCommitsWithNullCID(t *testing.T) { Results: map[string]any{ "commits": []map[string]any{ { - "cid": "bafyreifzyy7bmpx2eywj4lznxzrzrvh6vrz6l7bhthkpexdq3wtho3vz6i", + "cid": "bafyreif6dqbkr7t37jcjfxxrjnxt7cspxzvs7qwlbtjca57cc663he4s7e", }, { - "cid": "bafyreic2sba5sffkfnt32wfeoaw4qsqozjb5acwwtouxuzllb3aymjwute", + "cid": "bafyreigtnj6ntulcilkmin4pgukjwv3nwglqpiiyddz3dyfexdbltze7sy", }, { - "cid": "bafyreihv7jqe32wsuff5vwzlp7izoo6pqg6kgqf5edknp3mqm3344gu35q", + "cid": "bafyreia2vlbfkcbyogdjzmbqcjneabwwwtw7ti2xbd7yor5mbu2sk4pcoy", }, }, }, @@ -113,13 +113,13 @@ func TestQueryCommitsWithNullFieldID(t *testing.T) { Results: map[string]any{ "commits": []map[string]any{ { - "cid": "bafyreifzyy7bmpx2eywj4lznxzrzrvh6vrz6l7bhthkpexdq3wtho3vz6i", + "cid": "bafyreif6dqbkr7t37jcjfxxrjnxt7cspxzvs7qwlbtjca57cc663he4s7e", }, { - "cid": "bafyreic2sba5sffkfnt32wfeoaw4qsqozjb5acwwtouxuzllb3aymjwute", + "cid": "bafyreigtnj6ntulcilkmin4pgukjwv3nwglqpiiyddz3dyfexdbltze7sy", }, { - "cid": "bafyreihv7jqe32wsuff5vwzlp7izoo6pqg6kgqf5edknp3mqm3344gu35q", + "cid": "bafyreia2vlbfkcbyogdjzmbqcjneabwwwtw7ti2xbd7yor5mbu2sk4pcoy", }, }, }, @@ -151,13 +151,13 @@ func TestQueryCommitsWithNullOrder(t *testing.T) { Results: map[string]any{ "commits": []map[string]any{ { - "cid": "bafyreifzyy7bmpx2eywj4lznxzrzrvh6vrz6l7bhthkpexdq3wtho3vz6i", + "cid": "bafyreif6dqbkr7t37jcjfxxrjnxt7cspxzvs7qwlbtjca57cc663he4s7e", }, { - "cid": "bafyreic2sba5sffkfnt32wfeoaw4qsqozjb5acwwtouxuzllb3aymjwute", + "cid": "bafyreigtnj6ntulcilkmin4pgukjwv3nwglqpiiyddz3dyfexdbltze7sy", }, { - "cid": "bafyreihv7jqe32wsuff5vwzlp7izoo6pqg6kgqf5edknp3mqm3344gu35q", + "cid": "bafyreia2vlbfkcbyogdjzmbqcjneabwwwtw7ti2xbd7yor5mbu2sk4pcoy", }, }, }, @@ -189,13 +189,13 @@ func TestQueryCommitsWithNullOrderField(t *testing.T) { Results: map[string]any{ "commits": []map[string]any{ { - "cid": "bafyreifzyy7bmpx2eywj4lznxzrzrvh6vrz6l7bhthkpexdq3wtho3vz6i", + "cid": "bafyreif6dqbkr7t37jcjfxxrjnxt7cspxzvs7qwlbtjca57cc663he4s7e", }, { - "cid": "bafyreic2sba5sffkfnt32wfeoaw4qsqozjb5acwwtouxuzllb3aymjwute", + "cid": "bafyreigtnj6ntulcilkmin4pgukjwv3nwglqpiiyddz3dyfexdbltze7sy", }, { - "cid": "bafyreihv7jqe32wsuff5vwzlp7izoo6pqg6kgqf5edknp3mqm3344gu35q", + "cid": "bafyreia2vlbfkcbyogdjzmbqcjneabwwwtw7ti2xbd7yor5mbu2sk4pcoy", }, }, }, @@ -227,13 +227,13 @@ func TestQueryCommitsWithNullLimit(t *testing.T) { Results: map[string]any{ "commits": []map[string]any{ { - "cid": "bafyreifzyy7bmpx2eywj4lznxzrzrvh6vrz6l7bhthkpexdq3wtho3vz6i", + "cid": "bafyreif6dqbkr7t37jcjfxxrjnxt7cspxzvs7qwlbtjca57cc663he4s7e", }, { - "cid": "bafyreic2sba5sffkfnt32wfeoaw4qsqozjb5acwwtouxuzllb3aymjwute", + "cid": "bafyreigtnj6ntulcilkmin4pgukjwv3nwglqpiiyddz3dyfexdbltze7sy", }, { - "cid": "bafyreihv7jqe32wsuff5vwzlp7izoo6pqg6kgqf5edknp3mqm3344gu35q", + "cid": "bafyreia2vlbfkcbyogdjzmbqcjneabwwwtw7ti2xbd7yor5mbu2sk4pcoy", }, }, }, @@ -265,13 +265,13 @@ func TestQueryCommitsWithNullOffset(t *testing.T) { Results: map[string]any{ "commits": []map[string]any{ { - "cid": "bafyreifzyy7bmpx2eywj4lznxzrzrvh6vrz6l7bhthkpexdq3wtho3vz6i", + "cid": "bafyreif6dqbkr7t37jcjfxxrjnxt7cspxzvs7qwlbtjca57cc663he4s7e", }, { - "cid": "bafyreic2sba5sffkfnt32wfeoaw4qsqozjb5acwwtouxuzllb3aymjwute", + "cid": "bafyreigtnj6ntulcilkmin4pgukjwv3nwglqpiiyddz3dyfexdbltze7sy", }, { - "cid": "bafyreihv7jqe32wsuff5vwzlp7izoo6pqg6kgqf5edknp3mqm3344gu35q", + "cid": "bafyreia2vlbfkcbyogdjzmbqcjneabwwwtw7ti2xbd7yor5mbu2sk4pcoy", }, }, }, @@ -303,13 +303,13 @@ func TestQueryCommitsWithNullGroupBy(t *testing.T) { Results: map[string]any{ "commits": []map[string]any{ { - "cid": "bafyreifzyy7bmpx2eywj4lznxzrzrvh6vrz6l7bhthkpexdq3wtho3vz6i", + "cid": "bafyreif6dqbkr7t37jcjfxxrjnxt7cspxzvs7qwlbtjca57cc663he4s7e", }, { - "cid": "bafyreic2sba5sffkfnt32wfeoaw4qsqozjb5acwwtouxuzllb3aymjwute", + "cid": "bafyreigtnj6ntulcilkmin4pgukjwv3nwglqpiiyddz3dyfexdbltze7sy", }, { - "cid": "bafyreihv7jqe32wsuff5vwzlp7izoo6pqg6kgqf5edknp3mqm3344gu35q", + "cid": "bafyreia2vlbfkcbyogdjzmbqcjneabwwwtw7ti2xbd7yor5mbu2sk4pcoy", }, }, }, diff --git a/tests/integration/query/latest_commits/with_doc_id_field_test.go b/tests/integration/query/latest_commits/with_doc_id_field_test.go index e96c2d2604..19843e121d 100644 --- a/tests/integration/query/latest_commits/with_doc_id_field_test.go +++ b/tests/integration/query/latest_commits/with_doc_id_field_test.go @@ -73,7 +73,7 @@ func TestQueryLatestCommitsWithDocIDAndFieldId(t *testing.T) { Results: map[string]any{ "latestCommits": []map[string]any{ { - "cid": "bafyreifzyy7bmpx2eywj4lznxzrzrvh6vrz6l7bhthkpexdq3wtho3vz6i", + "cid": "bafyreif6dqbkr7t37jcjfxxrjnxt7cspxzvs7qwlbtjca57cc663he4s7e", "links": []map[string]any{}, }, }, @@ -110,15 +110,15 @@ func TestQueryLatestCommitsWithDocIDAndCompositeFieldId(t *testing.T) { Results: map[string]any{ "latestCommits": []map[string]any{ { - "cid": "bafyreihv7jqe32wsuff5vwzlp7izoo6pqg6kgqf5edknp3mqm3344gu35q", + "cid": "bafyreia2vlbfkcbyogdjzmbqcjneabwwwtw7ti2xbd7yor5mbu2sk4pcoy", "links": []map[string]any{ { - "cid": "bafyreic2sba5sffkfnt32wfeoaw4qsqozjb5acwwtouxuzllb3aymjwute", - "name": "name", + "cid": "bafyreif6dqbkr7t37jcjfxxrjnxt7cspxzvs7qwlbtjca57cc663he4s7e", + "name": "age", }, { - "cid": "bafyreifzyy7bmpx2eywj4lznxzrzrvh6vrz6l7bhthkpexdq3wtho3vz6i", - "name": "age", + "cid": "bafyreigtnj6ntulcilkmin4pgukjwv3nwglqpiiyddz3dyfexdbltze7sy", + "name": "name", }, }, }, diff --git a/tests/integration/query/latest_commits/with_doc_id_test.go b/tests/integration/query/latest_commits/with_doc_id_test.go index 9db8206b2e..6250409402 100644 --- a/tests/integration/query/latest_commits/with_doc_id_test.go +++ b/tests/integration/query/latest_commits/with_doc_id_test.go @@ -39,15 +39,15 @@ func TestQueryLatestCommitsWithDocID(t *testing.T) { Results: map[string]any{ "latestCommits": []map[string]any{ { - "cid": "bafyreihv7jqe32wsuff5vwzlp7izoo6pqg6kgqf5edknp3mqm3344gu35q", + "cid": "bafyreia2vlbfkcbyogdjzmbqcjneabwwwtw7ti2xbd7yor5mbu2sk4pcoy", "links": []map[string]any{ { - "cid": "bafyreic2sba5sffkfnt32wfeoaw4qsqozjb5acwwtouxuzllb3aymjwute", - "name": "name", + "cid": "bafyreif6dqbkr7t37jcjfxxrjnxt7cspxzvs7qwlbtjca57cc663he4s7e", + "name": "age", }, { - "cid": "bafyreifzyy7bmpx2eywj4lznxzrzrvh6vrz6l7bhthkpexdq3wtho3vz6i", - "name": "age", + "cid": "bafyreigtnj6ntulcilkmin4pgukjwv3nwglqpiiyddz3dyfexdbltze7sy", + "name": "name", }, }, }, @@ -80,7 +80,7 @@ func TestQueryLatestCommitsWithDocIDWithSchemaVersionIDField(t *testing.T) { Results: map[string]any{ "latestCommits": []map[string]any{ { - "cid": "bafyreihv7jqe32wsuff5vwzlp7izoo6pqg6kgqf5edknp3mqm3344gu35q", + "cid": "bafyreia2vlbfkcbyogdjzmbqcjneabwwwtw7ti2xbd7yor5mbu2sk4pcoy", "schemaVersionId": "bafkreicprhqxzlw3akyssz2v6pifwfueavp7jq2yj3dghapi3qcq6achs4", }, }, diff --git a/tests/integration/query/one_to_many/with_cid_doc_id_test.go b/tests/integration/query/one_to_many/with_cid_doc_id_test.go index 9811a4c54a..a073209c62 100644 --- a/tests/integration/query/one_to_many/with_cid_doc_id_test.go +++ b/tests/integration/query/one_to_many/with_cid_doc_id_test.go @@ -104,7 +104,7 @@ func TestQueryOneToManyWithCidAndDocID(t *testing.T) { testUtils.Request{ Request: `query { Book ( - cid: "bafyreieytzivxtdjslivrsim22xkszg7sxy4onmp737u5uxf7v2cxvzikm" + cid: "bafyreiasdwoa6u2irf3x3mpogkzlzfhyl7ugzzigiyzgecnlryqmiqf4fi" docID: "bae-064f13c1-7726-5d53-8eec-c395d94da4d0" ) { name @@ -181,7 +181,7 @@ func TestQueryOneToManyWithChildUpdateAndFirstCidAndDocID(t *testing.T) { testUtils.Request{ Request: `query { Book ( - cid: "bafyreieytzivxtdjslivrsim22xkszg7sxy4onmp737u5uxf7v2cxvzikm", + cid: "bafyreiasdwoa6u2irf3x3mpogkzlzfhyl7ugzzigiyzgecnlryqmiqf4fi", docID: "bae-064f13c1-7726-5d53-8eec-c395d94da4d0" ) { name @@ -256,7 +256,7 @@ func TestQueryOneToManyWithParentUpdateAndFirstCidAndDocID(t *testing.T) { testUtils.Request{ Request: `query { Book ( - cid: "bafyreieytzivxtdjslivrsim22xkszg7sxy4onmp737u5uxf7v2cxvzikm", + cid: "bafyreiasdwoa6u2irf3x3mpogkzlzfhyl7ugzzigiyzgecnlryqmiqf4fi", docID: "bae-064f13c1-7726-5d53-8eec-c395d94da4d0" ) { name @@ -331,7 +331,7 @@ func TestQueryOneToManyWithParentUpdateAndLastCidAndDocID(t *testing.T) { testUtils.Request{ Request: `query { Book ( - cid: "bafyreia2sayewutxhcewm2ek2p6nwwg6zzeugrxsnwjyvam4pplydkjmz4", + cid: "bafyreibisnyjsjsmfd5hgaxt5sn4ktjwr2tisyl7khxr753sa35un3zyoq", docID: "bae-064f13c1-7726-5d53-8eec-c395d94da4d0" ) { name diff --git a/tests/integration/query/simple/with_cid_doc_id_test.go b/tests/integration/query/simple/with_cid_doc_id_test.go index 4cb1614ddc..0536099128 100644 --- a/tests/integration/query/simple/with_cid_doc_id_test.go +++ b/tests/integration/query/simple/with_cid_doc_id_test.go @@ -93,7 +93,7 @@ func TestQuerySimpleWithCidAndDocID(t *testing.T) { testUtils.Request{ Request: `query { Users ( - cid: "bafyreia5ph2hvwebdsxe7m2f6bwuq7ngwxzqp7esiuzjihtcz2jswma6xu", + cid: "bafyreib7afkd5hepl45wdtwwpai433bhnbd3ps5m2rv3masctda7b6mmxe", docID: "bae-6845cfdf-cb0f-56a3-be3a-b5a67be5fbdc" ) { name @@ -137,7 +137,7 @@ func TestQuerySimpleWithUpdateAndFirstCidAndDocID(t *testing.T) { testUtils.Request{ Request: `query { Users ( - cid: "bafyreia5ph2hvwebdsxe7m2f6bwuq7ngwxzqp7esiuzjihtcz2jswma6xu", + cid: "bafyreib7afkd5hepl45wdtwwpai433bhnbd3ps5m2rv3masctda7b6mmxe", docID: "bae-6845cfdf-cb0f-56a3-be3a-b5a67be5fbdc" ) { name @@ -181,7 +181,7 @@ func TestQuerySimpleWithUpdateAndLastCidAndDocID(t *testing.T) { testUtils.Request{ Request: `query { Users ( - cid: "bafyreid2idg2eod3zv7zqxnv7pvrbcmlxiizho3s6xnk6c5zmxemi5gcxu", + cid: "bafyreig2j5zwcozovwzrxr7ivfnptlj7urlabzjbv4lls64hlkh6jmhfim", docID: "bae-6845cfdf-cb0f-56a3-be3a-b5a67be5fbdc" ) { name @@ -230,7 +230,7 @@ func TestQuerySimpleWithUpdateAndMiddleCidAndDocID(t *testing.T) { testUtils.Request{ Request: `query { Users ( - cid: "bafyreid2idg2eod3zv7zqxnv7pvrbcmlxiizho3s6xnk6c5zmxemi5gcxu", + cid: "bafyreig2j5zwcozovwzrxr7ivfnptlj7urlabzjbv4lls64hlkh6jmhfim", docID: "bae-6845cfdf-cb0f-56a3-be3a-b5a67be5fbdc" ) { name @@ -274,7 +274,7 @@ func TestQuerySimpleWithUpdateAndFirstCidAndDocIDAndSchemaVersion(t *testing.T) testUtils.Request{ Request: `query { Users ( - cid: "bafyreia5ph2hvwebdsxe7m2f6bwuq7ngwxzqp7esiuzjihtcz2jswma6xu", + cid: "bafyreib7afkd5hepl45wdtwwpai433bhnbd3ps5m2rv3masctda7b6mmxe", docID: "bae-6845cfdf-cb0f-56a3-be3a-b5a67be5fbdc" ) { name @@ -334,7 +334,7 @@ func TestCidAndDocIDQuery_ContainsPNCounterWithIntKind_NoError(t *testing.T) { testUtils.Request{ Request: `query { Users ( - cid: "bafyreib4e2q5uvv6iabkja4s4yoep6b4ifs7rclkteqhwrwr7lkzn56po4", + cid: "bafyreihsqayh6zvmjrvmma3sjmrb4bkeiyy6l56nt6y2t2tm4xajkif3gu", docID: "bae-d8cb53d4-ac5a-5c55-8306-64df633d400d" ) { name @@ -388,7 +388,7 @@ func TestCidAndDocIDQuery_ContainsPNCounterWithFloatKind_NoError(t *testing.T) { testUtils.Request{ Request: `query { Users ( - cid: "bafyreia4qbtcckxhyaplunzj5waoli5btfpwyhsx5fpk7o62xawuqjxjti", + cid: "bafyreigkdjnvkpqfjoqoke3aqc3b6ibb45xjuxx5djpk7c6tart2lw3dcm", docID: "bae-d420ebcd-023a-5800-ae2e-8ea89442318e" ) { name @@ -437,7 +437,7 @@ func TestCidAndDocIDQuery_ContainsPCounterWithIntKind_NoError(t *testing.T) { testUtils.Request{ Request: `query { Users ( - cid: "bafyreibkt7zaipwafro2wajbsqbipma27g6hc2uedquhtjfwyulmwzz4ry", + cid: "bafyreihxjjootrhxhapn563gsoagmtpld6uqhzf5mtn3fmmzp5sawadheu", docID: "bae-d8cb53d4-ac5a-5c55-8306-64df633d400d" ) { name @@ -486,7 +486,7 @@ func TestCidAndDocIDQuery_ContainsPCounterWithFloatKind_NoError(t *testing.T) { testUtils.Request{ Request: `query { Users ( - cid: "bafyreihbfsbje6lqmot6xpv2hcseduxu5rv5hf3adsmk7hlkzfuaa6lj5e", + cid: "bafyreihf2nipoyoxu3wjicqj6pftndjnnxljdw6nephkamgwyw5n6lcwca", docID: "bae-d420ebcd-023a-5800-ae2e-8ea89442318e" ) { name diff --git a/tests/integration/query/simple/with_version_test.go b/tests/integration/query/simple/with_version_test.go index d0eda9db43..a197666fe3 100644 --- a/tests/integration/query/simple/with_version_test.go +++ b/tests/integration/query/simple/with_version_test.go @@ -47,14 +47,14 @@ func TestQuerySimpleWithEmbeddedLatestCommit(t *testing.T) { "Age": int64(21), "_version": []map[string]any{ { - "cid": "bafyreiby7drdzfsg4wwo7f6vkdqhurbe74s4lhayn3k3226zvkgwjd2fbu", + "cid": "bafyreidwu4r345cq63vwr7p3hjekedge457y3tp32w7run76uj3le2zx34", "links": []map[string]any{ { - "cid": "bafyreid4sasigytiflrh3rupyevo6wy43b6mlfi4jwkjjwvohgjcd3oscu", + "cid": "bafyreidqxuofjmcmo6cmoesfcozsixgh7pghbl57mcstlqqausvswutzzm", "name": "Age", }, { - "cid": "bafyreieg3p2kpyxwiowskvb3pp35nedzawmapjuib7glrvszcgmv6z37fm", + "cid": "bafyreiercz7wngub3kxluee2mevdtis7la6piunceuol63qv5u32ngm3zu", "name": "Name", }, }, @@ -184,14 +184,14 @@ func TestQuerySimpleWithMultipleAliasedEmbeddedLatestCommit(t *testing.T) { "Age": int64(21), "_version": []map[string]any{ { - "cid": "bafyreiby7drdzfsg4wwo7f6vkdqhurbe74s4lhayn3k3226zvkgwjd2fbu", + "cid": "bafyreidwu4r345cq63vwr7p3hjekedge457y3tp32w7run76uj3le2zx34", "L1": []map[string]any{ { - "cid": "bafyreid4sasigytiflrh3rupyevo6wy43b6mlfi4jwkjjwvohgjcd3oscu", + "cid": "bafyreidqxuofjmcmo6cmoesfcozsixgh7pghbl57mcstlqqausvswutzzm", "name": "Age", }, { - "cid": "bafyreieg3p2kpyxwiowskvb3pp35nedzawmapjuib7glrvszcgmv6z37fm", + "cid": "bafyreiercz7wngub3kxluee2mevdtis7la6piunceuol63qv5u32ngm3zu", "name": "Name", }, }, @@ -259,7 +259,7 @@ func TestQuery_WithAllCommitFields_NoError(t *testing.T) { "_docID": docID, "_version": []map[string]any{ { - "cid": "bafyreiby7drdzfsg4wwo7f6vkdqhurbe74s4lhayn3k3226zvkgwjd2fbu", + "cid": "bafyreidwu4r345cq63vwr7p3hjekedge457y3tp32w7run76uj3le2zx34", "collectionID": int64(1), "delta": nil, "docID": "bae-d4303725-7db9-53d2-b324-f3ee44020e52", @@ -268,11 +268,11 @@ func TestQuery_WithAllCommitFields_NoError(t *testing.T) { "height": int64(1), "links": []map[string]any{ { - "cid": "bafyreid4sasigytiflrh3rupyevo6wy43b6mlfi4jwkjjwvohgjcd3oscu", + "cid": "bafyreidqxuofjmcmo6cmoesfcozsixgh7pghbl57mcstlqqausvswutzzm", "name": "Age", }, { - "cid": "bafyreieg3p2kpyxwiowskvb3pp35nedzawmapjuib7glrvszcgmv6z37fm", + "cid": "bafyreiercz7wngub3kxluee2mevdtis7la6piunceuol63qv5u32ngm3zu", "name": "Name", }, }, @@ -340,7 +340,7 @@ func TestQuery_WithAllCommitFieldsWithUpdate_NoError(t *testing.T) { "_docID": docID, "_version": []map[string]any{ { - "cid": "bafyreigfstknvmsl77pg443lqqf2g64y7hr575tts5c4nxuzk3dynffkem", + "cid": "bafyreichg2fm3tzwibfzakwmzguk5wlmyw7vmyhz6zt6gqu37pnzywk564", "collectionID": int64(1), "delta": nil, "docID": docID, @@ -349,18 +349,18 @@ func TestQuery_WithAllCommitFieldsWithUpdate_NoError(t *testing.T) { "height": int64(2), "links": []map[string]any{ { - "cid": "bafyreiby7drdzfsg4wwo7f6vkdqhurbe74s4lhayn3k3226zvkgwjd2fbu", + "cid": "bafyreidwu4r345cq63vwr7p3hjekedge457y3tp32w7run76uj3le2zx34", "name": "_head", }, { - "cid": "bafyreiapjg22e47sanhjtqgu453mvmxcfcl4ksrcoctyfl6nfsh3xwfcvm", + "cid": "bafyreib7cyhflarpdlelordkmfaqppggvgtxl67tykpzrvsof7e764q5cy", "name": "Age", }, }, "schemaVersionId": "bafkreigqmcqzkbg3elpe24vfza4rjle2r6cxu7ihzvg56aov57crhaebry", }, { - "cid": "bafyreiby7drdzfsg4wwo7f6vkdqhurbe74s4lhayn3k3226zvkgwjd2fbu", + "cid": "bafyreidwu4r345cq63vwr7p3hjekedge457y3tp32w7run76uj3le2zx34", "collectionID": int64(1), "delta": nil, "docID": docID, @@ -369,11 +369,11 @@ func TestQuery_WithAllCommitFieldsWithUpdate_NoError(t *testing.T) { "height": int64(1), "links": []map[string]any{ { - "cid": "bafyreid4sasigytiflrh3rupyevo6wy43b6mlfi4jwkjjwvohgjcd3oscu", + "cid": "bafyreidqxuofjmcmo6cmoesfcozsixgh7pghbl57mcstlqqausvswutzzm", "name": "Age", }, { - "cid": "bafyreieg3p2kpyxwiowskvb3pp35nedzawmapjuib7glrvszcgmv6z37fm", + "cid": "bafyreiercz7wngub3kxluee2mevdtis7la6piunceuol63qv5u32ngm3zu", "name": "Name", }, }, From 01030fd558e46b2ca9f18472dd91778fc0f278e7 Mon Sep 17 00:00:00 2001 From: AndrewSisley Date: Fri, 4 Oct 2024 12:50:40 -0400 Subject: [PATCH 51/71] fix: Treat explicitly set nil values like omitted values (#3101) ## Relevant issue(s) Resolves #3100 ## Description Treat explicitly set nil values like omitted values. --- client/document.go | 41 ++++- internal/core/crdt/lwwreg.go | 18 ++- internal/db/collection.go | 5 + tests/integration/events.go | 6 + .../one_to_one/with_null_value_test.go | 149 ++++++++++++++++++ .../mutation/create/with_null_value_test.go | 54 +++++++ 6 files changed, 263 insertions(+), 10 deletions(-) create mode 100644 tests/integration/mutation/create/field_kinds/one_to_one/with_null_value_test.go create mode 100644 tests/integration/mutation/create/with_null_value_test.go diff --git a/client/document.go b/client/document.go index fa4c842343..a49a60307b 100644 --- a/client/document.go +++ b/client/document.go @@ -27,6 +27,18 @@ import ( ccid "github.com/sourcenetwork/defradb/internal/core/cid" ) +func init() { + enc, err := CborEncodingOptions().EncMode() + if err != nil { + panic(err) + } + + CborNil, err = enc.Marshal(nil) + if err != nil { + panic(err) + } +} + // CborEncodingOptions returns the set of cbor encoding options to be used whenever // encoding defra documents. // @@ -42,6 +54,10 @@ func CborEncodingOptions() cbor.EncOptions { return opts } +// CborNil is the cbor encoded value of `nil` using the options returned from +// [CborEncodingOptions()] +var CborNil []byte + // This is the main implementation starting point for accessing the internal Document API // which provides API access to the various operations available for Documents, i.e. CRUD. // @@ -697,7 +713,12 @@ func (doc *Document) Values() map[Field]*FieldValue { // Bytes returns the document as a serialzed byte array using CBOR encoding. func (doc *Document) Bytes() ([]byte, error) { - docMap, err := doc.toMap() + // We want to ommit properties with nil values from the map, as setting a + // propery to nil should result in the same serialized value as ommiting the + // the property from the document. + // + // This is particularly important for docID generation. + docMap, err := doc.toMap(true) if err != nil { return nil, err } @@ -713,7 +734,7 @@ func (doc *Document) Bytes() ([]byte, error) { // Note: This representation should not be used for any cryptographic operations, // such as signatures, or hashes as it does not guarantee canonical representation or ordering. func (doc *Document) String() (string, error) { - docMap, err := doc.toMap() + docMap, err := doc.toMap(false) if err != nil { return "", err } @@ -734,7 +755,7 @@ func (doc *Document) ToMap() (map[string]any, error) { // ToJSONPatch returns a json patch that can be used to update // a document by calling SetWithJSON. func (doc *Document) ToJSONPatch() ([]byte, error) { - docMap, err := doc.toMap() + docMap, err := doc.toMap(false) if err != nil { return nil, err } @@ -758,9 +779,11 @@ func (doc *Document) Clean() { } } -// converts the document into a map[string]any -// including any sub documents -func (doc *Document) toMap() (map[string]any, error) { +// converts the document into a map[string]any including any sub documents. +// +// If `true` is provided, properties with nil values will be ommited from +// the result. +func (doc *Document) toMap(excludeEmpty bool) (map[string]any, error) { doc.mu.RLock() defer doc.mu.RUnlock() docMap := make(map[string]any) @@ -770,9 +793,13 @@ func (doc *Document) toMap() (map[string]any, error) { return nil, NewErrFieldNotExist(v.Name()) } + if excludeEmpty && value.Value() == nil { + continue + } + if value.IsDocument() { subDoc := value.Value().(*Document) - subDocMap, err := subDoc.toMap() + subDocMap, err := subDoc.toMap(excludeEmpty) if err != nil { return nil, err } diff --git a/internal/core/crdt/lwwreg.go b/internal/core/crdt/lwwreg.go index edfff9ca05..e27a4c5ace 100644 --- a/internal/core/crdt/lwwreg.go +++ b/internal/core/crdt/lwwreg.go @@ -16,6 +16,7 @@ import ( ds "github.com/ipfs/go-datastore" + "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/datastore" "github.com/sourcenetwork/defradb/errors" "github.com/sourcenetwork/defradb/internal/core" @@ -144,9 +145,20 @@ func (reg LWWRegister) setValue(ctx context.Context, val []byte, priority uint64 } } - err = reg.store.Put(ctx, key.ToDS(), val) - if err != nil { - return NewErrFailedToStoreValue(err) + if bytes.Equal(val, client.CborNil) { + // If len(val) is 1 or less the property is nil and there is no reason for + // the field datastore key to exist. Ommiting the key saves space and is + // consistent with what would be found if the user omitted the property on + // create. + err = reg.store.Delete(ctx, key.ToDS()) + if err != nil { + return err + } + } else { + err = reg.store.Put(ctx, key.ToDS(), val) + if err != nil { + return NewErrFailedToStoreValue(err) + } } return reg.setPriority(ctx, reg.key, priority) diff --git a/internal/db/collection.go b/internal/db/collection.go index dd1a413946..785c96641e 100644 --- a/internal/db/collection.go +++ b/internal/db/collection.go @@ -657,6 +657,11 @@ func (c *collection) save( relationFieldDescription, isSecondaryRelationID := fieldDescription.GetSecondaryRelationField(c.Definition()) if isSecondaryRelationID { + if val.Value() == nil { + // If the value (relation) is nil, we don't need to check for any documents already linked to it + continue + } + primaryId := val.Value().(string) err = c.patchPrimaryDoc( diff --git a/tests/integration/events.go b/tests/integration/events.go index bf004b99aa..5e57a97294 100644 --- a/tests/integration/events.go +++ b/tests/integration/events.go @@ -335,6 +335,12 @@ func getEventsForCreateDoc(s *state, action CreateDoc) map[string]struct{} { // check for any secondary relation fields that could publish an event for f, v := range doc.Values() { + if v.Value() == nil { + // If the new relation value is nil there will be no related document + // to get an event for + continue + } + field, ok := def.GetFieldByName(f.Name()) if !ok { continue // ignore unknown field diff --git a/tests/integration/mutation/create/field_kinds/one_to_one/with_null_value_test.go b/tests/integration/mutation/create/field_kinds/one_to_one/with_null_value_test.go new file mode 100644 index 0000000000..a6421aec5c --- /dev/null +++ b/tests/integration/mutation/create/field_kinds/one_to_one/with_null_value_test.go @@ -0,0 +1,149 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package one_to_one + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestMutationCreateOneToOne_WithExplicitNullOnPrimarySide(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Book { + name: String + author: Author @primary + } + + type Author { + name: String + published: Book + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "How to Be a Canadian", + "author": null + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Secrets at Maple Syrup Farm", + "author": null + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + DocMap: map[string]any{ + "name": "Will Ferguson", + "published": testUtils.NewDocIndex(0, 0), + }, + }, + testUtils.Request{ + Request: ` + query { + Book { + name + author { + name + } + } + }`, + Results: map[string]any{ + "Book": []map[string]any{ + { + "name": "Secrets at Maple Syrup Farm", + "author": nil, + }, + { + "name": "How to Be a Canadian", + "author": map[string]any{ + "name": "Will Ferguson", + }, + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestMutationCreateOneToOne_WithExplicitNullOnSecondarySide(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Book { + name: String + author: Author + } + + type Author { + name: String + published: Book @primary + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "How to Be a Canadian", + "author": null + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Secrets at Maple Syrup Farm", + "author": null + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + DocMap: map[string]any{ + "name": "Will Ferguson", + "published": testUtils.NewDocIndex(0, 0), + }, + }, + testUtils.Request{ + Request: ` + query { + Book { + name + author { + name + } + } + }`, + Results: map[string]any{ + "Book": []map[string]any{ + { + "name": "Secrets at Maple Syrup Farm", + "author": nil, + }, + { + "name": "How to Be a Canadian", + "author": map[string]any{ + "name": "Will Ferguson", + }, + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/mutation/create/with_null_value_test.go b/tests/integration/mutation/create/with_null_value_test.go new file mode 100644 index 0000000000..97e1d6cb58 --- /dev/null +++ b/tests/integration/mutation/create/with_null_value_test.go @@ -0,0 +1,54 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package create + +import ( + "testing" + + "github.com/sourcenetwork/immutable" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestMutationCreate_WithOmittedValueAndExplicitNullValue(t *testing.T) { + test := testUtils.TestCase{ + SupportedMutationTypes: immutable.Some([]testUtils.MutationType{ + // Collection.Save would treat the second create as an update, and so + // is excluded from this test. + testUtils.CollectionNamedMutationType, + testUtils.GQLRequestMutationType, + }), + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + age: Int + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John" + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "age": null + }`, + ExpectedError: "a document with the given ID already exist", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} From bc93bff4c24f5c5afacdf57c9a607fd112d5f750 Mon Sep 17 00:00:00 2001 From: Islam Aliev Date: Fri, 4 Oct 2024 23:20:41 +0200 Subject: [PATCH 52/71] feat: Enable Indexing of array fields (#3092) ## Relevant issue(s) Resolves #2279 ## Description Enable indexing of array fields. This change also adds `Equal` method to `NormalValue`. --- client/index.go | 6 +- client/normal_array.go | 62 + client/normal_array_of_nillables.go | 70 + client/normal_nillable_array.go | 44 + client/normal_nillable_array_of_nillables.go | 51 + client/normal_nillable_scalar.go | 28 + client/normal_scalar.go | 39 + client/normal_value.go | 3 + client/normal_value_test.go | 1385 +++++++++++++++++ client/normal_void.go | 5 + client/schema_field_description.go | 23 + client/schema_field_description_test.go | 77 + internal/core/encoding.go | 6 + internal/core/key.go | 19 + internal/core/key_test.go | 68 + internal/db/collection.go | 5 + internal/db/collection_index.go | 19 + internal/db/fetcher/errors.go | 7 + internal/db/fetcher/indexer.go | 11 +- internal/db/fetcher/indexer_iterators.go | 237 ++- internal/db/index.go | 471 +++++- internal/db/index_test.go | 92 +- internal/db/indexed_docs_test.go | 280 +++- internal/planner/filter/copy_field.go | 4 +- internal/planner/filter/merge.go | 19 +- internal/planner/filter/merge_test.go | 6 +- internal/planner/filter/split.go | 2 +- internal/planner/scan.go | 18 +- internal/planner/type_join.go | 8 +- internal/utils/slice/slice.go | 50 + internal/utils/slice/slice_test.go | 100 ++ .../integration/index/array_composite_test.go | 597 +++++++ tests/integration/index/array_test.go | 1103 +++++++++++++ .../index/array_unique_composite_test.go | 204 +++ tests/integration/index/array_unique_test.go | 395 +++++ 35 files changed, 5216 insertions(+), 298 deletions(-) create mode 100644 client/schema_field_description_test.go create mode 100644 internal/utils/slice/slice.go create mode 100644 internal/utils/slice/slice_test.go create mode 100644 tests/integration/index/array_composite_test.go create mode 100644 tests/integration/index/array_test.go create mode 100644 tests/integration/index/array_unique_composite_test.go create mode 100644 tests/integration/index/array_unique_test.go diff --git a/client/index.go b/client/index.go index 6f87626c98..b09b258224 100644 --- a/client/index.go +++ b/client/index.go @@ -38,9 +38,11 @@ type IndexDescription struct { // CollectionIndex is an interface for indexing documents in a collection. type CollectionIndex interface { - // Save indexes a document by storing it + // Save indexes a document by storing indexed field values. + // It doesn't retire previous values. For this [Update] should be used. Save(context.Context, datastore.Txn, *Document) error - // Update updates an existing document in the index + // Update updates an existing document in the index. + // It removes the previous indexed field values and stores the new ones. Update(context.Context, datastore.Txn, *Document, *Document) error // Delete deletes an existing document from the index Delete(context.Context, datastore.Txn, *Document) error diff --git a/client/normal_array.go b/client/normal_array.go index 00133a0f74..b560a4bd9a 100644 --- a/client/normal_array.go +++ b/client/normal_array.go @@ -41,6 +41,10 @@ func (v normalBoolArray) BoolArray() ([]bool, bool) { return v.val, true } +func (v normalBoolArray) Equal(other NormalValue) bool { + return areNormalArraysEqual(v.val, other.BoolArray) +} + type normalIntArray struct { baseArrayNormalValue[[]int64] } @@ -49,6 +53,10 @@ func (v normalIntArray) IntArray() ([]int64, bool) { return v.val, true } +func (v normalIntArray) Equal(other NormalValue) bool { + return areNormalArraysEqual(v.val, other.IntArray) +} + type normalFloatArray struct { baseArrayNormalValue[[]float64] } @@ -57,6 +65,10 @@ func (v normalFloatArray) FloatArray() ([]float64, bool) { return v.val, true } +func (v normalFloatArray) Equal(other NormalValue) bool { + return areNormalArraysEqual(v.val, other.FloatArray) +} + type normalStringArray struct { baseArrayNormalValue[[]string] } @@ -65,6 +77,10 @@ func (v normalStringArray) StringArray() ([]string, bool) { return v.val, true } +func (v normalStringArray) Equal(other NormalValue) bool { + return areNormalArraysEqual(v.val, other.StringArray) +} + type normalBytesArray struct { baseArrayNormalValue[[][]byte] } @@ -73,6 +89,13 @@ func (v normalBytesArray) BytesArray() ([][]byte, bool) { return v.val, true } +func (v normalBytesArray) Equal(other NormalValue) bool { + if otherVal, ok := other.BytesArray(); ok { + return are2DArraysEqual(v.val, otherVal) + } + return false +} + type normalTimeArray struct { baseArrayNormalValue[[]time.Time] } @@ -81,6 +104,10 @@ func (v normalTimeArray) TimeArray() ([]time.Time, bool) { return v.val, true } +func (v normalTimeArray) Equal(other NormalValue) bool { + return areNormalArraysEqual(v.val, other.TimeArray) +} + type normalDocumentArray struct { baseArrayNormalValue[[]*Document] } @@ -89,6 +116,10 @@ func (v normalDocumentArray) DocumentArray() ([]*Document, bool) { return v.val, true } +func (v normalDocumentArray) Equal(other NormalValue) bool { + return areNormalArraysEqual(v.val, other.DocumentArray) +} + // NewNormalBoolArray creates a new NormalValue that represents a `[]bool` value. func NewNormalBoolArray(val []bool) NormalValue { return normalBoolArray{newBaseArrayNormalValue(val)} @@ -147,3 +178,34 @@ func normalizeCharsArr[R string | []byte, T string | []byte](val []T) []R { } return arr } + +func areArraysEqual[T comparable](arr1, arr2 []T) bool { + if len(arr1) != len(arr2) { + return false + } + for i, v := range arr1 { + if v != arr2[i] { + return false + } + } + return true +} + +func areNormalArraysEqual[T comparable](val []T, f func() ([]T, bool)) bool { + if otherVal, ok := f(); ok { + return areArraysEqual(val, otherVal) + } + return false +} + +func are2DArraysEqual[T comparable](arr1, arr2 [][]T) bool { + if len(arr1) != len(arr2) { + return false + } + for i, v := range arr1 { + if !areArraysEqual(v, arr2[i]) { + return false + } + } + return true +} diff --git a/client/normal_array_of_nillables.go b/client/normal_array_of_nillables.go index 53461f6afa..35f3b50bcb 100644 --- a/client/normal_array_of_nillables.go +++ b/client/normal_array_of_nillables.go @@ -11,6 +11,7 @@ package client import ( + "bytes" "time" "github.com/sourcenetwork/immutable" @@ -25,6 +26,10 @@ func (v normalNillableBoolArray) NillableBoolArray() ([]immutable.Option[bool], return v.val, true } +func (v normalNillableBoolArray) Equal(other NormalValue) bool { + return areNormalArraysOfNillablesEqual(v.val, other.NillableBoolArray) +} + type normalNillableIntArray struct { baseArrayNormalValue[[]immutable.Option[int64]] } @@ -33,6 +38,10 @@ func (v normalNillableIntArray) NillableIntArray() ([]immutable.Option[int64], b return v.val, true } +func (v normalNillableIntArray) Equal(other NormalValue) bool { + return areNormalArraysOfNillablesEqual(v.val, other.NillableIntArray) +} + type normalNillableFloatArray struct { baseArrayNormalValue[[]immutable.Option[float64]] } @@ -41,6 +50,10 @@ func (v normalNillableFloatArray) NillableFloatArray() ([]immutable.Option[float return v.val, true } +func (v normalNillableFloatArray) Equal(other NormalValue) bool { + return areNormalArraysOfNillablesEqual(v.val, other.NillableFloatArray) +} + type normalNillableStringArray struct { baseArrayNormalValue[[]immutable.Option[string]] } @@ -49,6 +62,10 @@ func (v normalNillableStringArray) NillableStringArray() ([]immutable.Option[str return v.val, true } +func (v normalNillableStringArray) Equal(other NormalValue) bool { + return areNormalArraysOfNillablesEqual(v.val, other.NillableStringArray) +} + type normalNillableBytesArray struct { baseArrayNormalValue[[]immutable.Option[[]byte]] } @@ -57,6 +74,13 @@ func (v normalNillableBytesArray) NillableBytesArray() ([]immutable.Option[[]byt return v.val, true } +func (v normalNillableBytesArray) Equal(other NormalValue) bool { + if otherVal, ok := other.NillableBytesArray(); ok { + return areArraysOfNillableBytesEqual(v.val, otherVal) + } + return false +} + type normalNillableTimeArray struct { baseArrayNormalValue[[]immutable.Option[time.Time]] } @@ -65,6 +89,10 @@ func (v normalNillableTimeArray) NillableTimeArray() ([]immutable.Option[time.Ti return v.val, true } +func (v normalNillableTimeArray) Equal(other NormalValue) bool { + return areNormalArraysOfNillablesEqual(v.val, other.NillableTimeArray) +} + type normalNillableDocumentArray struct { baseArrayNormalValue[[]immutable.Option[*Document]] } @@ -73,6 +101,10 @@ func (v normalNillableDocumentArray) NillableDocumentArray() ([]immutable.Option return v.val, true } +func (v normalNillableDocumentArray) Equal(other NormalValue) bool { + return areNormalArraysOfNillablesEqual(v.val, other.NillableDocumentArray) +} + // NewNormalNillableBoolNillableArray creates a new NormalValue that represents a // `immutable.Option[[]immutable.Option[bool]]` value. func NewNormalNillableBoolArray(val []immutable.Option[bool]) NormalValue { @@ -140,3 +172,41 @@ func normalizeNillableCharsArr[R string | []byte, T string | []byte](val []immut } return arr } + +func areNormalArraysOfNillablesEqual[T comparable]( + val []immutable.Option[T], + f func() ([]immutable.Option[T], bool), +) bool { + if otherVal, ok := f(); ok { + return areArraysOfNillablesEqual(val, otherVal) + } + return false +} + +func areArraysOfNillablesEqual[T comparable](a, b []immutable.Option[T]) bool { + if len(a) != len(b) { + return false + } + for i, v := range a { + if v != b[i] { + return false + } + } + return true +} + +func areArraysOfNillableBytesEqual(a, b []immutable.Option[[]byte]) bool { + if len(a) != len(b) { + return false + } + for i, v := range a { + if v.HasValue() && b[i].HasValue() { + if !bytes.Equal(v.Value(), b[i].Value()) { + return false + } + } else if v.HasValue() || b[i].HasValue() { + return false + } + } + return true +} diff --git a/client/normal_nillable_array.go b/client/normal_nillable_array.go index fa6bdc4bbb..9b30c342e2 100644 --- a/client/normal_nillable_array.go +++ b/client/normal_nillable_array.go @@ -52,6 +52,10 @@ func (v normalBoolNillableArray) BoolNillableArray() (immutable.Option[[]bool], return v.val, true } +func (v normalBoolNillableArray) Equal(other NormalValue) bool { + return areOptionsArrEqual(v.val, other.BoolNillableArray) +} + type normalIntNillableArray struct { baseNillableArrayNormalValue[[]int64] } @@ -60,6 +64,10 @@ func (v normalIntNillableArray) IntNillableArray() (immutable.Option[[]int64], b return v.val, true } +func (v normalIntNillableArray) Equal(other NormalValue) bool { + return areOptionsArrEqual(v.val, other.IntNillableArray) +} + type normalFloatNillableArray struct { baseNillableArrayNormalValue[[]float64] } @@ -68,6 +76,10 @@ func (v normalFloatNillableArray) FloatNillableArray() (immutable.Option[[]float return v.val, true } +func (v normalFloatNillableArray) Equal(other NormalValue) bool { + return areOptionsArrEqual(v.val, other.FloatNillableArray) +} + type normalStringNillableArray struct { baseNillableArrayNormalValue[[]string] } @@ -76,6 +88,10 @@ func (v normalStringNillableArray) StringNillableArray() (immutable.Option[[]str return v.val, true } +func (v normalStringNillableArray) Equal(other NormalValue) bool { + return areOptionsArrEqual(v.val, other.StringNillableArray) +} + type normalBytesNillableArray struct { baseNillableArrayNormalValue[[][]byte] } @@ -84,6 +100,16 @@ func (v normalBytesNillableArray) BytesNillableArray() (immutable.Option[[][]byt return v.val, true } +func (v normalBytesNillableArray) Equal(other NormalValue) bool { + if otherVal, ok := other.BytesNillableArray(); ok { + if v.val.HasValue() && otherVal.HasValue() { + return are2DArraysEqual(v.val.Value(), otherVal.Value()) + } + return !v.val.HasValue() && !otherVal.HasValue() + } + return false +} + type normalTimeNillableArray struct { baseNillableArrayNormalValue[[]time.Time] } @@ -92,6 +118,10 @@ func (v normalTimeNillableArray) TimeNillableArray() (immutable.Option[[]time.Ti return v.val, true } +func (v normalTimeNillableArray) Equal(other NormalValue) bool { + return areOptionsArrEqual(v.val, other.TimeNillableArray) +} + type normalDocumentNillableArray struct { baseNillableArrayNormalValue[[]*Document] } @@ -100,6 +130,10 @@ func (v normalDocumentNillableArray) DocumentNillableArray() (immutable.Option[[ return v.val, true } +func (v normalDocumentNillableArray) Equal(other NormalValue) bool { + return areOptionsArrEqual(v.val, other.DocumentNillableArray) +} + // NewNormalNillableBoolArray creates a new NormalValue that represents a `immutable.Option[[]bool]` value. func NewNormalBoolNillableArray(val immutable.Option[[]bool]) NormalValue { return normalBoolNillableArray{newBaseNillableArrayNormalValue(val)} @@ -150,3 +184,13 @@ func normalizeCharsNillableArr[R string | []byte, T string | []byte](val immutab } return immutable.None[[]R]() } + +func areOptionsArrEqual[T comparable](val immutable.Option[[]T], f func() (immutable.Option[[]T], bool)) bool { + if otherVal, ok := f(); ok { + if val.HasValue() && otherVal.HasValue() { + return areArraysEqual(val.Value(), otherVal.Value()) + } + return !val.HasValue() && !otherVal.HasValue() + } + return false +} diff --git a/client/normal_nillable_array_of_nillables.go b/client/normal_nillable_array_of_nillables.go index 3594186ba2..47052176e2 100644 --- a/client/normal_nillable_array_of_nillables.go +++ b/client/normal_nillable_array_of_nillables.go @@ -27,6 +27,10 @@ func (v normalNillableBoolNillableArray) NillableBoolNillableArray() ( return v.val, true } +func (v normalNillableBoolNillableArray) Equal(other NormalValue) bool { + return areNormalNillableArraysOfNillablesEqual(v.val, other.NillableBoolNillableArray) +} + type normalNillableIntNillableArray struct { baseNillableArrayNormalValue[[]immutable.Option[int64]] } @@ -37,6 +41,10 @@ func (v normalNillableIntNillableArray) NillableIntNillableArray() ( return v.val, true } +func (v normalNillableIntNillableArray) Equal(other NormalValue) bool { + return areNormalNillableArraysOfNillablesEqual(v.val, other.NillableIntNillableArray) +} + type normalNillableFloatNillableArray struct { baseNillableArrayNormalValue[[]immutable.Option[float64]] } @@ -47,6 +55,10 @@ func (v normalNillableFloatNillableArray) NillableFloatNillableArray() ( return v.val, true } +func (v normalNillableFloatNillableArray) Equal(other NormalValue) bool { + return areNormalNillableArraysOfNillablesEqual(v.val, other.NillableFloatNillableArray) +} + type normalNillableStringNillableArray struct { baseNillableArrayNormalValue[[]immutable.Option[string]] } @@ -57,6 +69,10 @@ func (v normalNillableStringNillableArray) NillableStringNillableArray() ( return v.val, true } +func (v normalNillableStringNillableArray) Equal(other NormalValue) bool { + return areNormalNillableArraysOfNillablesEqual(v.val, other.NillableStringNillableArray) +} + type normalNillableBytesNillableArray struct { baseNillableArrayNormalValue[[]immutable.Option[[]byte]] } @@ -67,6 +83,16 @@ func (v normalNillableBytesNillableArray) NillableBytesNillableArray() ( return v.val, true } +func (v normalNillableBytesNillableArray) Equal(other NormalValue) bool { + if otherVal, ok := other.NillableBytesNillableArray(); ok { + if v.val.HasValue() && otherVal.HasValue() { + return areArraysOfNillableBytesEqual(v.val.Value(), otherVal.Value()) + } + return !v.val.HasValue() && !otherVal.HasValue() + } + return false +} + type normalNillableTimeNillableArray struct { baseNillableArrayNormalValue[[]immutable.Option[time.Time]] } @@ -77,6 +103,10 @@ func (v normalNillableTimeNillableArray) NillableTimeNillableArray() ( return v.val, true } +func (v normalNillableTimeNillableArray) Equal(other NormalValue) bool { + return areNormalNillableArraysOfNillablesEqual(v.val, other.NillableTimeNillableArray) +} + type normalNillableDocumentNillableArray struct { baseNillableArrayNormalValue[[]immutable.Option[*Document]] } @@ -87,6 +117,10 @@ func (v normalNillableDocumentNillableArray) NillableDocumentNillableArray() ( return v.val, true } +func (v normalNillableDocumentNillableArray) Equal(other NormalValue) bool { + return areNormalNillableArraysOfNillablesEqual(v.val, other.NillableDocumentNillableArray) +} + // NewNormalNillableBoolNillableArray creates a new NormalValue that represents a // `immutable.Option[[]immutable.Option[bool]]` value. func NewNormalNillableBoolNillableArray(val immutable.Option[[]immutable.Option[bool]]) NormalValue { @@ -158,3 +192,20 @@ func normalizeNillableCharsNillableArr[R string | []byte, T string | []byte]( } return immutable.None[[]immutable.Option[R]]() } + +func areNormalNillableArraysOfNillablesEqual[T comparable]( + val immutable.Option[[]immutable.Option[T]], + f func() (immutable.Option[[]immutable.Option[T]], bool), +) bool { + if otherVal, ok := f(); ok { + return areNillableArraysOfNillablesEqual(val, otherVal) + } + return false +} + +func areNillableArraysOfNillablesEqual[T comparable](a, b immutable.Option[[]immutable.Option[T]]) bool { + if a.HasValue() && b.HasValue() { + return areArraysOfNillablesEqual(a.Value(), b.Value()) + } + return !a.HasValue() && !b.HasValue() +} diff --git a/client/normal_nillable_scalar.go b/client/normal_nillable_scalar.go index 88876c9d7e..86b4827bf7 100644 --- a/client/normal_nillable_scalar.go +++ b/client/normal_nillable_scalar.go @@ -48,6 +48,10 @@ func (v normalNillableBool) NillableBool() (immutable.Option[bool], bool) { return v.val, true } +func (v normalNillableBool) Equal(other NormalValue) bool { + return areNormalScalarsEqual(v.val, other.NillableBool) +} + type normalNillableInt struct { baseNillableNormalValue[int64] } @@ -56,6 +60,10 @@ func (v normalNillableInt) NillableInt() (immutable.Option[int64], bool) { return v.val, true } +func (v normalNillableInt) Equal(other NormalValue) bool { + return areNormalScalarsEqual(v.val, other.NillableInt) +} + type normalNillableFloat struct { baseNillableNormalValue[float64] } @@ -64,6 +72,10 @@ func (v normalNillableFloat) NillableFloat() (immutable.Option[float64], bool) { return v.val, true } +func (v normalNillableFloat) Equal(other NormalValue) bool { + return areNormalScalarsEqual(v.val, other.NillableFloat) +} + type normalNillableString struct { baseNillableNormalValue[string] } @@ -72,6 +84,10 @@ func (v normalNillableString) NillableString() (immutable.Option[string], bool) return v.val, true } +func (v normalNillableString) Equal(other NormalValue) bool { + return areNormalScalarsEqual(v.val, other.NillableString) +} + type normalNillableBytes struct { baseNillableNormalValue[[]byte] } @@ -80,6 +96,10 @@ func (v normalNillableBytes) NillableBytes() (immutable.Option[[]byte], bool) { return v.val, true } +func (v normalNillableBytes) Equal(other NormalValue) bool { + return areOptionsArrEqual(v.val, other.NillableBytes) +} + type normalNillableTime struct { baseNillableNormalValue[time.Time] } @@ -88,6 +108,10 @@ func (v normalNillableTime) NillableTime() (immutable.Option[time.Time], bool) { return v.val, true } +func (v normalNillableTime) Equal(other NormalValue) bool { + return areNormalScalarsEqual(v.val, other.NillableTime) +} + type normalNillableDocument struct { baseNillableNormalValue[*Document] } @@ -96,6 +120,10 @@ func (v normalNillableDocument) NillableDocument() (immutable.Option[*Document], return v.val, true } +func (v normalNillableDocument) Equal(other NormalValue) bool { + return areNormalScalarsEqual(v.val, other.NillableDocument) +} + // NewNormalNillableBool creates a new NormalValue that represents a `immutable.Option[bool]` value. func NewNormalNillableBool(val immutable.Option[bool]) NormalValue { return normalNillableBool{newBaseNillableNormalValue(val)} diff --git a/client/normal_scalar.go b/client/normal_scalar.go index f4378f5474..f30eca78d7 100644 --- a/client/normal_scalar.go +++ b/client/normal_scalar.go @@ -11,6 +11,7 @@ package client import ( + "bytes" "time" "golang.org/x/exp/constraints" @@ -38,6 +39,10 @@ func (v normalBool) Bool() (bool, bool) { return v.val, true } +func (v normalBool) Equal(other NormalValue) bool { + return areNormalScalarsEqual(v.val, other.Bool) +} + type normalInt struct { baseNormalValue[int64] } @@ -46,6 +51,10 @@ func (v normalInt) Int() (int64, bool) { return v.val, true } +func (v normalInt) Equal(other NormalValue) bool { + return areNormalScalarsEqual(v.val, other.Int) +} + type normalFloat struct { baseNormalValue[float64] } @@ -54,6 +63,10 @@ func (v normalFloat) Float() (float64, bool) { return v.val, true } +func (v normalFloat) Equal(other NormalValue) bool { + return areNormalScalarsEqual(v.val, other.Float) +} + type normalString struct { baseNormalValue[string] } @@ -62,6 +75,10 @@ func (v normalString) String() (string, bool) { return v.val, true } +func (v normalString) Equal(other NormalValue) bool { + return areNormalScalarsEqual(v.val, other.String) +} + type normalBytes struct { baseNormalValue[[]byte] } @@ -70,6 +87,13 @@ func (v normalBytes) Bytes() ([]byte, bool) { return v.val, true } +func (v normalBytes) Equal(other NormalValue) bool { + if otherVal, ok := other.Bytes(); ok { + return bytes.Equal(v.val, otherVal) + } + return false +} + type normalTime struct { baseNormalValue[time.Time] } @@ -78,10 +102,18 @@ func (v normalTime) Time() (time.Time, bool) { return v.val, true } +func (v normalTime) Equal(other NormalValue) bool { + return areNormalScalarsEqual(v.val, other.Time) +} + type normalDocument struct { baseNormalValue[*Document] } +func (v normalDocument) Equal(other NormalValue) bool { + return areNormalScalarsEqual(v.val, other.Document) +} + func (v normalDocument) Document() (*Document, bool) { return v.val, true } @@ -128,3 +160,10 @@ func NewNormalTime(val time.Time) NormalValue { func NewNormalDocument(val *Document) NormalValue { return normalDocument{baseNormalValue[*Document]{val: val}} } + +func areNormalScalarsEqual[T comparable](val T, f func() (T, bool)) bool { + if otherVal, ok := f(); ok { + return val == otherVal + } + return false +} diff --git a/client/normal_value.go b/client/normal_value.go index 3f0681fbfc..18bdd74ff0 100644 --- a/client/normal_value.go +++ b/client/normal_value.go @@ -31,6 +31,9 @@ type NormalValue interface { // if the option has value, otherwise it will return nil. Unwrap() any + // Equal returns if the value is equal to the given value. + Equal(NormalValue) bool + // IsNil returns if the value is nil. For not nillable values it will always return false. IsNil() bool // IsNillable returns if the value can be nil. diff --git a/client/normal_value_test.go b/client/normal_value_test.go index ce454a55b4..c368a300e3 100644 --- a/client/normal_value_test.go +++ b/client/normal_value_test.go @@ -1647,3 +1647,1388 @@ func TestArrayValue_IsNillable(t *testing.T) { assert.True(t, v.IsNil()) } } + +func TestNormalValue_IsEqual(t *testing.T) { + now := time.Now() + later := now.Add(time.Hour) + doc1 := &Document{} + doc2 := &Document{} + doc3 := &Document{} + + tests := []struct { + name string + v1 NormalValue + v2 NormalValue + expected bool + }{ + // Values + { + name: "void", + v1: NormalVoid{}, + v2: NormalVoid{}, + expected: true, + }, + { + name: "void not equal", + v1: NormalVoid{}, + v2: NewNormalInt(1), + expected: false, + }, + { + name: "bool", + v1: NewNormalBool(true), + v2: NewNormalBool(true), + expected: true, + }, + { + name: "bool not equal", + v1: NewNormalBool(true), + v2: NewNormalBool(false), + expected: false, + }, + { + name: "bool different type", + v1: NewNormalBool(true), + v2: NewNormalInt(1), + expected: false, + }, + { + name: "int", + v1: NewNormalInt(1), + v2: NewNormalInt(1), + expected: true, + }, + { + name: "int not equal", + v1: NewNormalInt(1), + v2: NewNormalInt(2), + expected: false, + }, + { + name: "int different type", + v1: NewNormalInt(1), + v2: NewNormalFloat(1.0), + expected: false, + }, + { + name: "float", + v1: NewNormalFloat(1.0), + v2: NewNormalFloat(1.0), + expected: true, + }, + { + name: "float not equal", + v1: NewNormalFloat(1.0), + v2: NewNormalFloat(1.1), + expected: false, + }, + { + name: "float different type", + v1: NewNormalFloat(1.0), + v2: NewNormalString("1.0"), + expected: false, + }, + { + name: "string", + v1: NewNormalString("test"), + v2: NewNormalString("test"), + expected: true, + }, + { + name: "string not equal", + v1: NewNormalString("test"), + v2: NewNormalString("test2"), + expected: false, + }, + { + name: "string different type", + v1: NewNormalString("test"), + v2: NewNormalBytes([]byte("test")), + expected: false, + }, + { + name: "bytes", + v1: NewNormalBytes([]byte{1, 2, 3}), + v2: NewNormalBytes([]byte{1, 2, 3}), + expected: true, + }, + { + name: "bytes not equal", + v1: NewNormalBytes([]byte{1, 2, 3}), + v2: NewNormalBytes([]byte{1, 2, 4}), + expected: false, + }, + { + name: "bytes different length", + v1: NewNormalBytes([]byte{1, 2, 3}), + v2: NewNormalBytes([]byte{1, 2, 3, 4}), + expected: false, + }, + { + name: "bytes different type", + v1: NewNormalBytes([]byte{1, 2, 3}), + v2: NewNormalString("123"), + expected: false, + }, + { + name: "time", + v1: NewNormalTime(now), + v2: NewNormalTime(now), + expected: true, + }, + { + name: "time not equal", + v1: NewNormalTime(now), + v2: NewNormalTime(later), + expected: false, + }, + { + name: "time different type", + v1: NewNormalTime(now), + v2: NewNormalString(now.String()), + expected: false, + }, + { + name: "document", + v1: NewNormalDocument(doc1), + v2: NewNormalDocument(doc1), + expected: true, + }, + { + name: "document not equal", + v1: NewNormalDocument(doc1), + v2: NewNormalDocument(doc2), + expected: false, + }, + { + name: "document different type", + v1: NewNormalDocument(doc1), + v2: NewNormalString("document"), + expected: false, + }, + // Nillable values + { + name: "bool nillable", + v1: NewNormalNillableBool(immutable.Some(true)), + v2: NewNormalNillableBool(immutable.Some(true)), + expected: true, + }, + { + name: "bool nillable not equal", + v1: NewNormalNillableBool(immutable.Some(true)), + v2: NewNormalNillableBool(immutable.Some(false)), + expected: false, + }, + { + name: "bool nillable one nil", + v1: NewNormalNillableBool(immutable.Some(true)), + v2: NewNormalNillableBool(immutable.None[bool]()), + expected: false, + }, + { + name: "bool nillable different type", + v1: NewNormalNillableBool(immutable.Some(true)), + v2: NewNormalBool(true), + expected: false, + }, + { + name: "int nillable", + v1: NewNormalNillableInt(immutable.Some(int64(1))), + v2: NewNormalNillableInt(immutable.Some(int64(1))), + expected: true, + }, + { + name: "int nillable not equal", + v1: NewNormalNillableInt(immutable.Some(int64(1))), + v2: NewNormalNillableInt(immutable.Some(int64(2))), + expected: false, + }, + { + name: "int nillable one nil", + v1: NewNormalNillableInt(immutable.Some(int64(1))), + v2: NewNormalNillableInt(immutable.None[int64]()), + expected: false, + }, + { + name: "int nillable different type", + v1: NewNormalNillableInt(immutable.Some(int64(1))), + v2: NewNormalInt(1), + expected: false, + }, + { + name: "float nillable", + v1: NewNormalNillableFloat(immutable.Some(1.0)), + v2: NewNormalNillableFloat(immutable.Some(1.0)), + expected: true, + }, + { + name: "float nillable not equal", + v1: NewNormalNillableFloat(immutable.Some(1.0)), + v2: NewNormalNillableFloat(immutable.Some(2.0)), + expected: false, + }, + { + name: "float nillable one nil", + v1: NewNormalNillableFloat(immutable.Some(1.0)), + v2: NewNormalNillableFloat(immutable.None[float64]()), + expected: false, + }, + { + name: "float nillable different type", + v1: NewNormalNillableFloat(immutable.Some(1.0)), + v2: NewNormalFloat(1.0), + expected: false, + }, + { + name: "string nillable", + v1: NewNormalNillableString(immutable.Some("test")), + v2: NewNormalNillableString(immutable.Some("test")), + expected: true, + }, + { + name: "string nillable not equal", + v1: NewNormalNillableString(immutable.Some("test")), + v2: NewNormalNillableString(immutable.Some("test2")), + expected: false, + }, + { + name: "string nillable one nil", + v1: NewNormalNillableString(immutable.Some("test")), + v2: NewNormalNillableString(immutable.None[string]()), + expected: false, + }, + { + name: "string nillable different type", + v1: NewNormalNillableString(immutable.Some("test")), + v2: NewNormalString("test"), + expected: false, + }, + { + name: "bytes nillable", + v1: NewNormalNillableBytes(immutable.Some([]byte{1, 2, 3})), + v2: NewNormalNillableBytes(immutable.Some([]byte{1, 2, 3})), + expected: true, + }, + { + name: "bytes nillable not equal", + v1: NewNormalNillableBytes(immutable.Some([]byte{1, 2, 3})), + v2: NewNormalNillableBytes(immutable.Some([]byte{1, 2, 4})), + expected: false, + }, + { + name: "bytes nillable one nil", + v1: NewNormalNillableBytes(immutable.Some([]byte{1, 2, 3})), + v2: NewNormalNillableBytes(immutable.None[[]byte]()), + expected: false, + }, + { + name: "bytes nillable different length", + v1: NewNormalNillableBytes(immutable.Some([]byte{1, 2, 3})), + v2: NewNormalNillableBytes(immutable.Some([]byte{1, 2, 3, 4})), + expected: false, + }, + { + name: "bytes nillable different type", + v1: NewNormalNillableBytes(immutable.Some([]byte{1, 2, 3})), + v2: NewNormalBytes([]byte{1, 2, 3}), + expected: false, + }, + { + name: "time nillable", + v1: NewNormalNillableTime(immutable.Some(now)), + v2: NewNormalNillableTime(immutable.Some(now)), + expected: true, + }, + { + name: "time nillable not equal", + v1: NewNormalNillableTime(immutable.Some(now)), + v2: NewNormalNillableTime(immutable.Some(later)), + expected: false, + }, + { + name: "time nillable one nil", + v1: NewNormalNillableTime(immutable.Some(now)), + v2: NewNormalNillableTime(immutable.None[time.Time]()), + expected: false, + }, + { + name: "time nillable different type", + v1: NewNormalNillableTime(immutable.Some(now)), + v2: NewNormalTime(now), + expected: false, + }, + { + name: "document nillable", + v1: NewNormalNillableDocument(immutable.Some(doc1)), + v2: NewNormalNillableDocument(immutable.Some(doc1)), + expected: true, + }, + { + name: "document nillable not equal", + v1: NewNormalNillableDocument(immutable.Some(doc1)), + v2: NewNormalNillableDocument(immutable.Some(doc2)), + expected: false, + }, + { + name: "document nillable one nil", + v1: NewNormalNillableDocument(immutable.Some(doc1)), + v2: NewNormalNillableDocument(immutable.None[*Document]()), + expected: false, + }, + { + name: "document nillable different type", + v1: NewNormalNillableDocument(immutable.Some(doc1)), + v2: NewNormalDocument(doc1), + expected: false, + }, + // Arrays + { + name: "bool array", + v1: NewNormalBoolArray([]bool{true, false}), + v2: NewNormalBoolArray([]bool{true, false}), + expected: true, + }, + { + name: "bool array not equal", + v1: NewNormalBoolArray([]bool{true, false}), + v2: NewNormalBoolArray([]bool{true, true}), + expected: false, + }, + { + name: "bool array different length", + v1: NewNormalBoolArray([]bool{true, false}), + v2: NewNormalBoolArray([]bool{true}), + expected: false, + }, + { + name: "bool array different type", + v1: NewNormalBoolArray([]bool{true, false}), + v2: NewNormalIntArray([]int64{1, 0}), + expected: false, + }, + { + name: "int array", + v1: NewNormalIntArray([]int64{1, 2}), + v2: NewNormalIntArray([]int64{1, 2}), + expected: true, + }, + { + name: "int array not equal", + v1: NewNormalIntArray([]int64{1, 2}), + v2: NewNormalIntArray([]int64{1, 3}), + expected: false, + }, + { + name: "int array different length", + v1: NewNormalIntArray([]int64{1, 2}), + v2: NewNormalIntArray([]int64{1}), + expected: false, + }, + { + name: "int array different type", + v1: NewNormalIntArray([]int64{1, 2}), + v2: NewNormalFloatArray([]float64{1.0, 2.0}), + expected: false, + }, + { + name: "float array", + v1: NewNormalFloatArray([]float64{1.0, 2.0}), + v2: NewNormalFloatArray([]float64{1.0, 2.0}), + expected: true, + }, + { + name: "float array not equal", + v1: NewNormalFloatArray([]float64{1.0, 2.0}), + v2: NewNormalFloatArray([]float64{1.0, 3.0}), + expected: false, + }, + { + name: "float array different length", + v1: NewNormalFloatArray([]float64{1.0, 2.0}), + v2: NewNormalFloatArray([]float64{1.0}), + expected: false, + }, + { + name: "float array different type", + v1: NewNormalFloatArray([]float64{1.0, 2.0}), + v2: NewNormalStringArray([]string{"1.0", "2.0"}), + expected: false, + }, + { + name: "string array", + v1: NewNormalStringArray([]string{"test", "test2"}), + v2: NewNormalStringArray([]string{"test", "test2"}), + expected: true, + }, + { + name: "string array not equal", + v1: NewNormalStringArray([]string{"test", "test2"}), + v2: NewNormalStringArray([]string{"test", "test3"}), + expected: false, + }, + { + name: "string array different length", + v1: NewNormalStringArray([]string{"test", "test2"}), + v2: NewNormalStringArray([]string{"test"}), + expected: false, + }, + { + name: "string array different type", + v1: NewNormalStringArray([]string{"test", "test2"}), + v2: NewNormalBytesArray([][]byte{{1, 2, 3}, {4, 5, 6}}), + expected: false, + }, + { + name: "bytes array", + v1: NewNormalBytesArray([][]byte{{1, 2, 3}, {4, 5, 6}}), + v2: NewNormalBytesArray([][]byte{{1, 2, 3}, {4, 5, 6}}), + expected: true, + }, + { + name: "bytes array not equal", + v1: NewNormalBytesArray([][]byte{{1, 2, 3}, {4, 5, 6}}), + v2: NewNormalBytesArray([][]byte{{1, 2, 3}, {4, 5, 7}}), + expected: false, + }, + { + name: "bytes array different length", + v1: NewNormalBytesArray([][]byte{{1, 2, 3}, {4, 5, 6}}), + v2: NewNormalBytesArray([][]byte{{1, 2, 3}}), + expected: false, + }, + { + name: "bytes array different type", + v1: NewNormalBytesArray([][]byte{{1, 2, 3}, {4, 5, 6}}), + v2: NewNormalStringArray([]string{"123", "456"}), + expected: false, + }, + { + name: "time array", + v1: NewNormalTimeArray([]time.Time{now, now}), + v2: NewNormalTimeArray([]time.Time{now, now}), + expected: true, + }, + { + name: "time array not equal", + v1: NewNormalTimeArray([]time.Time{now, now}), + v2: NewNormalTimeArray([]time.Time{now, later}), + expected: false, + }, + { + name: "time array different length", + v1: NewNormalTimeArray([]time.Time{now, now}), + v2: NewNormalTimeArray([]time.Time{now}), + expected: false, + }, + { + name: "time array different type", + v1: NewNormalTimeArray([]time.Time{now, now}), + v2: NewNormalStringArray([]string{now.String(), now.String()}), + expected: false, + }, + { + name: "document array", + v1: NewNormalDocumentArray([]*Document{doc1, doc2}), + v2: NewNormalDocumentArray([]*Document{doc1, doc2}), + expected: true, + }, + { + name: "document array not equal", + v1: NewNormalDocumentArray([]*Document{doc1, doc2}), + v2: NewNormalDocumentArray([]*Document{doc1, doc1}), + expected: false, + }, + { + name: "document array different length", + v1: NewNormalDocumentArray([]*Document{doc1, doc2}), + v2: NewNormalDocumentArray([]*Document{doc1}), + expected: false, + }, + { + name: "document array different type", + v1: NewNormalDocumentArray([]*Document{doc1, doc2}), + v2: NewNormalStringArray([]string{"doc1", "doc2"}), + expected: false, + }, + // Arrays of nillables + { + name: "array of nillable bools", + v1: NewNormalNillableBoolArray([]immutable.Option[bool]{immutable.Some(true), immutable.Some(false)}), + v2: NewNormalNillableBoolArray([]immutable.Option[bool]{immutable.Some(true), immutable.Some(false)}), + expected: true, + }, + { + name: "array of nillable bools not equal", + v1: NewNormalNillableBoolArray([]immutable.Option[bool]{immutable.Some(true), immutable.Some(false)}), + v2: NewNormalNillableBoolArray([]immutable.Option[bool]{immutable.Some(false), immutable.Some(true)}), + expected: false, + }, + { + name: "array of nillable bools with nil values equal", + v1: NewNormalNillableBoolArray( + []immutable.Option[bool]{immutable.Some(true), immutable.None[bool]()}), + v2: NewNormalNillableBoolArray( + []immutable.Option[bool]{immutable.Some(true), immutable.None[bool]()}), + expected: true, + }, + { + name: "array of nillable bools with nil values not equal", + v1: NewNormalNillableBoolArray( + []immutable.Option[bool]{immutable.Some(true), immutable.None[bool]()}), + v2: NewNormalNillableBoolArray( + []immutable.Option[bool]{immutable.None[bool](), immutable.Some(true)}), + expected: false, + }, + { + name: "array of nillable bools different lengths", + v1: NewNormalNillableBoolArray( + []immutable.Option[bool]{immutable.Some(true), immutable.Some(false)}), + v2: NewNormalNillableBoolArray([]immutable.Option[bool]{immutable.Some(true)}), + expected: false, + }, + { + name: "array of nillable bools vs non-nillable array", + v1: NewNormalNillableBoolArray( + []immutable.Option[bool]{immutable.Some(true), immutable.Some(false)}), + v2: NewNormalBoolArray([]bool{true, false}), + expected: false, + }, + { + name: "array of nillable ints equal", + v1: NewNormalNillableIntArray( + []immutable.Option[int64]{immutable.Some(int64(1)), immutable.Some(int64(2))}), + v2: NewNormalNillableIntArray( + []immutable.Option[int64]{immutable.Some(int64(1)), immutable.Some(int64(2))}), + expected: true, + }, + { + name: "array of nillable ints not equal", + v1: NewNormalNillableIntArray( + []immutable.Option[int64]{immutable.Some(int64(1)), immutable.Some(int64(2))}), + v2: NewNormalNillableIntArray( + []immutable.Option[int64]{immutable.Some(int64(2)), immutable.Some(int64(1))}), + expected: false, + }, + { + name: "array of nillable ints with nil values equal", + v1: NewNormalNillableIntArray( + []immutable.Option[int64]{immutable.Some(int64(1)), immutable.None[int64]()}), + v2: NewNormalNillableIntArray( + []immutable.Option[int64]{immutable.Some(int64(1)), immutable.None[int64]()}), + expected: true, + }, + { + name: "array of nillable ints with nil values not equal", + v1: NewNormalNillableIntArray( + []immutable.Option[int64]{immutable.Some(int64(1)), immutable.None[int64]()}), + v2: NewNormalNillableIntArray( + []immutable.Option[int64]{immutable.None[int64](), immutable.Some(int64(1))}), + expected: false, + }, + { + name: "array of nillable ints different lengths", + v1: NewNormalNillableIntArray( + []immutable.Option[int64]{immutable.Some(int64(1)), immutable.Some(int64(2))}), + v2: NewNormalNillableIntArray([]immutable.Option[int64]{immutable.Some(int64(1))}), + expected: false, + }, + { + name: "array of nillable ints vs non-nillable array", + v1: NewNormalNillableIntArray( + []immutable.Option[int64]{immutable.Some(int64(1)), immutable.Some(int64(2))}), + v2: NewNormalIntArray([]int64{1, 2}), + expected: false, + }, + { + name: "float nillable array equal", + v1: NewNormalNillableFloatArray( + []immutable.Option[float64]{immutable.Some(1.0), immutable.Some(2.0)}), + v2: NewNormalNillableFloatArray( + []immutable.Option[float64]{immutable.Some(1.0), immutable.Some(2.0)}), + expected: true, + }, + { + name: "float nillable array not equal", + v1: NewNormalNillableFloatArray( + []immutable.Option[float64]{immutable.Some(1.0), immutable.Some(2.0)}), + v2: NewNormalNillableFloatArray( + []immutable.Option[float64]{immutable.Some(2.0), immutable.Some(1.0)}), + expected: false, + }, + { + name: "float nillable array with nil values equal", + v1: NewNormalNillableFloatArray( + []immutable.Option[float64]{immutable.Some(1.0), immutable.None[float64]()}), + v2: NewNormalNillableFloatArray( + []immutable.Option[float64]{immutable.Some(1.0), immutable.None[float64]()}), + expected: true, + }, + { + name: "float nillable array with nil values not equal", + v1: NewNormalNillableFloatArray( + []immutable.Option[float64]{immutable.Some(1.0), immutable.None[float64]()}), + v2: NewNormalNillableFloatArray( + []immutable.Option[float64]{immutable.None[float64](), immutable.Some(1.0)}), + expected: false, + }, + { + name: "float nillable array different lengths", + v1: NewNormalNillableFloatArray( + []immutable.Option[float64]{immutable.Some(1.0), immutable.Some(2.0)}), + v2: NewNormalNillableFloatArray([]immutable.Option[float64]{immutable.Some(1.0)}), + expected: false, + }, + { + name: "float nillable array vs non-nillable array", + v1: NewNormalNillableFloatArray( + []immutable.Option[float64]{immutable.Some(1.0), immutable.Some(2.0)}), + v2: NewNormalFloatArray([]float64{1.0, 2.0}), + expected: false, + }, + { + name: "array of nillable strings", + v1: NewNormalNillableStringArray( + []immutable.Option[string]{immutable.Some("test"), immutable.Some("test2")}), + v2: NewNormalNillableStringArray( + []immutable.Option[string]{immutable.Some("test"), immutable.Some("test2")}), + expected: true, + }, + { + name: "array of nillable strings not equal", + v1: NewNormalNillableStringArray( + []immutable.Option[string]{immutable.Some("test"), immutable.Some("test2")}), + v2: NewNormalNillableStringArray( + []immutable.Option[string]{immutable.Some("test"), immutable.Some("test3")}), + expected: false, + }, + { + name: "array of nillable strings with nil", + v1: NewNormalNillableStringArray( + []immutable.Option[string]{immutable.Some("test"), immutable.None[string]()}), + v2: NewNormalNillableStringArray( + []immutable.Option[string]{immutable.Some("test"), immutable.None[string]()}), + expected: true, + }, + { + name: "array of nillable strings different length", + v1: NewNormalNillableStringArray( + []immutable.Option[string]{immutable.Some("test"), immutable.Some("test2")}), + v2: NewNormalNillableStringArray( + []immutable.Option[string]{immutable.Some("test")}), + expected: false, + }, + { + name: "array of nillable strings different type", + v1: NewNormalNillableStringArray( + []immutable.Option[string]{immutable.Some("test"), immutable.Some("test2")}), + v2: NewNormalStringArray([]string{"test", "test2"}), + expected: false, + }, + { + name: "array of nillable bytes", + v1: NewNormalNillableBytesArray( + []immutable.Option[[]byte]{immutable.Some([]byte{1, 2, 3}), immutable.Some([]byte{4, 5, 6})}), + v2: NewNormalNillableBytesArray( + []immutable.Option[[]byte]{immutable.Some([]byte{1, 2, 3}), immutable.Some([]byte{4, 5, 6})}), + expected: true, + }, + { + name: "array of nillable bytes not equal", + v1: NewNormalNillableBytesArray( + []immutable.Option[[]byte]{immutable.Some([]byte{1, 2, 3}), immutable.Some([]byte{4, 5, 6})}), + v2: NewNormalNillableBytesArray( + []immutable.Option[[]byte]{immutable.Some([]byte{1, 2, 3}), immutable.Some([]byte{4, 5, 7})}), + expected: false, + }, + { + name: "array of nillable bytes with nil", + v1: NewNormalNillableBytesArray( + []immutable.Option[[]byte]{immutable.Some([]byte{1, 2, 3}), immutable.None[[]byte]()}), + v2: NewNormalNillableBytesArray( + []immutable.Option[[]byte]{immutable.Some([]byte{1, 2, 3}), immutable.None[[]byte]()}), + expected: true, + }, + { + name: "array of nillable bytes different nil values", + v1: NewNormalNillableBytesArray( + []immutable.Option[[]byte]{immutable.Some([]byte{1, 2, 3}), immutable.Some([]byte{4, 5, 6})}), + v2: NewNormalNillableBytesArray( + []immutable.Option[[]byte]{immutable.Some([]byte{1, 2, 3}), immutable.None[[]byte]()}), + expected: false, + }, + { + name: "array of nillable bytes different length", + v1: NewNormalNillableBytesArray( + []immutable.Option[[]byte]{immutable.Some([]byte{1, 2, 3}), immutable.Some([]byte{4, 5, 6})}), + v2: NewNormalNillableBytesArray( + []immutable.Option[[]byte]{immutable.Some([]byte{1, 2, 3})}), + expected: false, + }, + { + name: "array of nillable bytes different type", + v1: NewNormalNillableBytesArray( + []immutable.Option[[]byte]{immutable.Some([]byte{1, 2, 3}), immutable.Some([]byte{4, 5, 6})}), + v2: NewNormalBytesArray([][]byte{{1, 2, 3}, {4, 5, 6}}), + expected: false, + }, + { + name: "array of nillable time values", + v1: NewNormalNillableTimeArray( + []immutable.Option[time.Time]{immutable.Some(now), immutable.Some(now)}), + v2: NewNormalNillableTimeArray( + []immutable.Option[time.Time]{immutable.Some(now), immutable.Some(now)}), + expected: true, + }, + { + name: "array of nillable time values not equal", + v1: NewNormalNillableTimeArray( + []immutable.Option[time.Time]{immutable.Some(now), immutable.Some(now)}), + v2: NewNormalNillableTimeArray( + []immutable.Option[time.Time]{immutable.Some(now), immutable.Some(now.Add(time.Hour))}), + expected: false, + }, + { + name: "array of nillable time values different lengths", + v1: NewNormalNillableTimeArray( + []immutable.Option[time.Time]{immutable.Some(now), immutable.Some(now)}), + v2: NewNormalNillableTimeArray( + []immutable.Option[time.Time]{immutable.Some(now)}), + expected: false, + }, + { + name: "array of nillable time values different type", + v1: NewNormalNillableTimeArray( + []immutable.Option[time.Time]{immutable.Some(now), immutable.Some(now)}), + v2: NewNormalTimeArray([]time.Time{now, now}), + expected: false, + }, + { + name: "array of nillable time values with nil", + v1: NewNormalNillableTimeArray( + []immutable.Option[time.Time]{immutable.Some(now), immutable.None[time.Time]()}), + v2: NewNormalNillableTimeArray( + []immutable.Option[time.Time]{immutable.Some(now), immutable.None[time.Time]()}), + expected: true, + }, + { + name: "array of nillable time values different nil vals", + v1: NewNormalNillableTimeArray( + []immutable.Option[time.Time]{immutable.Some(now), immutable.Some(now)}), + v2: NewNormalNillableTimeArray( + []immutable.Option[time.Time]{immutable.Some(now), immutable.None[time.Time]()}), + expected: false, + }, + { + name: "array of nillable time values with nil", + v1: NewNormalNillableTimeArray( + []immutable.Option[time.Time]{immutable.Some(now), immutable.None[time.Time]()}), + v2: NewNormalNillableTimeArray( + []immutable.Option[time.Time]{immutable.Some(now), immutable.None[time.Time]()}), + expected: true, + }, + { + name: "document nillable array", + v1: NewNormalNillableDocumentArray( + []immutable.Option[*Document]{immutable.Some(doc1), immutable.Some(doc2)}), + v2: NewNormalNillableDocumentArray( + []immutable.Option[*Document]{immutable.Some(doc1), immutable.Some(doc2)}), + expected: true, + }, + { + name: "document nillable array not equal", + v1: NewNormalNillableDocumentArray( + []immutable.Option[*Document]{immutable.Some(doc1), immutable.Some(doc2)}), + v2: NewNormalNillableDocumentArray( + []immutable.Option[*Document]{immutable.Some(doc1), immutable.Some(doc3)}), + expected: false, + }, + { + name: "document nillable array different lengths", + v1: NewNormalNillableDocumentArray( + []immutable.Option[*Document]{immutable.Some(doc1), immutable.Some(doc2)}), + v2: NewNormalNillableDocumentArray( + []immutable.Option[*Document]{immutable.Some(doc1)}), + expected: false, + }, + { + name: "document nillable array with nil", + v1: NewNormalNillableDocumentArray( + []immutable.Option[*Document]{immutable.Some(doc1), immutable.None[*Document]()}), + v2: NewNormalNillableDocumentArray( + []immutable.Option[*Document]{immutable.Some(doc1), immutable.None[*Document]()}), + expected: true, + }, + // Nillable arrays + { + name: "bool nillable array equal", + v1: NewNormalBoolNillableArray(immutable.Some([]bool{true})), + v2: NewNormalBoolNillableArray(immutable.Some([]bool{true})), + expected: true, + }, + { + name: "bool nillable array not equal", + v1: NewNormalBoolNillableArray(immutable.Some([]bool{true})), + v2: NewNormalBoolNillableArray(immutable.Some([]bool{false})), + expected: false, + }, + { + name: "bool nillable array nil vs non-nil", + v1: NewNormalBoolNillableArray(immutable.Some([]bool{true})), + v2: NewNormalBoolNillableArray(immutable.None[[]bool]()), + expected: false, + }, + { + name: "bool nillable array different lengths", + v1: NewNormalBoolNillableArray(immutable.Some([]bool{true, false})), + v2: NewNormalBoolNillableArray(immutable.Some([]bool{true})), + expected: false, + }, + { + name: "bool nillable array nil array equal", + v1: NewNormalBoolNillableArray(immutable.None[[]bool]()), + v2: NewNormalBoolNillableArray(immutable.None[[]bool]()), + expected: true, + }, + { + name: "bool nillable array nil array not equal", + v1: NewNormalBoolNillableArray(immutable.None[[]bool]()), + v2: NewNormalBoolNillableArray(immutable.Some([]bool{true})), + expected: false, + }, + { + name: "int nillable array", + v1: NewNormalIntNillableArray(immutable.Some([]int64{1})), + v2: NewNormalIntNillableArray(immutable.Some([]int64{1})), + expected: true, + }, + { + name: "int nillable array not equal", + v1: NewNormalIntNillableArray(immutable.Some([]int64{1})), + v2: NewNormalIntNillableArray(immutable.Some([]int64{2})), + expected: false, + }, + { + name: "int nillable array nil vs non-nil", + v1: NewNormalIntNillableArray(immutable.Some([]int64{1})), + v2: NewNormalIntNillableArray(immutable.None[[]int64]()), + expected: false, + }, + { + name: "int nillable array different lengths", + v1: NewNormalIntNillableArray(immutable.Some([]int64{1, 2})), + v2: NewNormalIntNillableArray(immutable.Some([]int64{1})), + expected: false, + }, + { + name: "int nillable array nil array equal", + v1: NewNormalIntNillableArray(immutable.None[[]int64]()), + v2: NewNormalIntNillableArray(immutable.None[[]int64]()), + expected: true, + }, + { + name: "int nillable array nil array not equal", + v1: NewNormalIntNillableArray(immutable.None[[]int64]()), + v2: NewNormalIntNillableArray(immutable.Some([]int64{1})), + expected: false, + }, + { + name: "float nillable array equal", + v1: NewNormalFloatNillableArray(immutable.Some([]float64{1.0})), + v2: NewNormalFloatNillableArray(immutable.Some([]float64{1.0})), + expected: true, + }, + { + name: "float nillable array not equal", + v1: NewNormalFloatNillableArray(immutable.Some([]float64{1.0})), + v2: NewNormalFloatNillableArray(immutable.Some([]float64{2.0})), + expected: false, + }, + { + name: "float nillable array nil vs non-nil", + v1: NewNormalFloatNillableArray(immutable.Some([]float64{1.0})), + v2: NewNormalFloatNillableArray(immutable.None[[]float64]()), + expected: false, + }, + { + name: "float nillable array different lengths", + v1: NewNormalFloatNillableArray(immutable.Some([]float64{1.0, 2.0})), + v2: NewNormalFloatNillableArray(immutable.Some([]float64{1.0})), + expected: false, + }, + { + name: "float nillable array nil array equal", + v1: NewNormalFloatNillableArray(immutable.None[[]float64]()), + v2: NewNormalFloatNillableArray(immutable.None[[]float64]()), + expected: true, + }, + { + name: "float nillable array nil array not equal", + v1: NewNormalFloatNillableArray(immutable.None[[]float64]()), + v2: NewNormalFloatNillableArray(immutable.Some([]float64{1.0})), + expected: false, + }, + { + name: "string nillable array equal", + v1: NewNormalStringNillableArray(immutable.Some([]string{"test"})), + v2: NewNormalStringNillableArray(immutable.Some([]string{"test"})), + expected: true, + }, + { + name: "string nillable array not equal", + v1: NewNormalStringNillableArray(immutable.Some([]string{"test"})), + v2: NewNormalStringNillableArray(immutable.Some([]string{"different"})), + expected: false, + }, + { + name: "string nillable array nil vs non-nil", + v1: NewNormalStringNillableArray(immutable.Some([]string{"test"})), + v2: NewNormalStringNillableArray(immutable.None[[]string]()), + expected: false, + }, + { + name: "string nillable array different lengths", + v1: NewNormalStringNillableArray(immutable.Some([]string{"test", "another"})), + v2: NewNormalStringNillableArray(immutable.Some([]string{"test"})), + expected: false, + }, + { + name: "string nillable array empty strings", + v1: NewNormalStringNillableArray(immutable.Some([]string{"", ""})), + v2: NewNormalStringNillableArray(immutable.Some([]string{"", ""})), + expected: true, + }, + { + name: "bytes nillable array equal", + v1: NewNormalBytesNillableArray(immutable.Some([][]byte{{1, 2, 3}})), + v2: NewNormalBytesNillableArray(immutable.Some([][]byte{{1, 2, 3}})), + expected: true, + }, + { + name: "bytes nillable array not equal", + v1: NewNormalBytesNillableArray(immutable.Some([][]byte{{1, 2, 3}})), + v2: NewNormalBytesNillableArray(immutable.Some([][]byte{{1, 2, 4}})), + expected: false, + }, + { + name: "bytes nillable array nil vs non-nil", + v1: NewNormalBytesNillableArray(immutable.Some([][]byte{{1, 2, 3}})), + v2: NewNormalBytesNillableArray(immutable.None[[][]byte]()), + expected: false, + }, + { + name: "bytes nillable array different lengths", + v1: NewNormalBytesNillableArray(immutable.Some([][]byte{{1, 2, 3}, {4, 5, 6}})), + v2: NewNormalBytesNillableArray(immutable.Some([][]byte{{1, 2, 3}})), + expected: false, + }, + { + name: "bytes nillable array empty slices", + v1: NewNormalBytesNillableArray(immutable.Some([][]byte{{}, {}})), + v2: NewNormalBytesNillableArray(immutable.Some([][]byte{{}, {}})), + expected: true, + }, + { + name: "time nillable array", + v1: NewNormalTimeNillableArray(immutable.Some([]time.Time{now})), + v2: NewNormalTimeNillableArray(immutable.Some([]time.Time{now})), + expected: true, + }, + { + name: "time nillable array equal", + v1: NewNormalTimeNillableArray(immutable.Some([]time.Time{now})), + v2: NewNormalTimeNillableArray(immutable.Some([]time.Time{now})), + expected: true, + }, + { + name: "time nillable array not equal", + v1: NewNormalTimeNillableArray(immutable.Some([]time.Time{now})), + v2: NewNormalTimeNillableArray(immutable.Some([]time.Time{now.Add(time.Hour)})), + expected: false, + }, + { + name: "time nillable array nil vs non-nil", + v1: NewNormalTimeNillableArray(immutable.Some([]time.Time{now})), + v2: NewNormalTimeNillableArray(immutable.None[[]time.Time]()), + expected: false, + }, + { + name: "time nillable array different lengths", + v1: NewNormalTimeNillableArray(immutable.Some([]time.Time{now, now.Add(time.Hour)})), + v2: NewNormalTimeNillableArray(immutable.Some([]time.Time{now})), + expected: false, + }, + { + name: "time nillable array zero times", + v1: NewNormalTimeNillableArray(immutable.Some([]time.Time{{}, {}})), + v2: NewNormalTimeNillableArray(immutable.Some([]time.Time{{}, {}})), + expected: true, + }, + { + name: "document nillable array", + v1: NewNormalDocumentNillableArray(immutable.Some([]*Document{doc1})), + v2: NewNormalDocumentNillableArray(immutable.Some([]*Document{doc1})), + expected: true, + }, + { + name: "document nillable array equal", + v1: NewNormalDocumentNillableArray(immutable.Some([]*Document{doc1})), + v2: NewNormalDocumentNillableArray(immutable.Some([]*Document{doc1})), + expected: true, + }, + { + name: "document nillable array not equal", + v1: NewNormalDocumentNillableArray(immutable.Some([]*Document{doc1})), + v2: NewNormalDocumentNillableArray(immutable.Some([]*Document{doc2})), + expected: false, + }, + { + name: "document nillable array nil vs non-nil", + v1: NewNormalDocumentNillableArray(immutable.Some([]*Document{doc1})), + v2: NewNormalDocumentNillableArray(immutable.None[[]*Document]()), + expected: false, + }, + { + name: "document nillable array different lengths", + v1: NewNormalDocumentNillableArray(immutable.Some([]*Document{doc1, doc2})), + v2: NewNormalDocumentNillableArray(immutable.Some([]*Document{doc1})), + expected: false, + }, + { + name: "document nillable array with nil documents", + v1: NewNormalDocumentNillableArray(immutable.Some([]*Document{nil, nil})), + v2: NewNormalDocumentNillableArray(immutable.Some([]*Document{nil, nil})), + expected: true, + }, + // Nillable arrays of nillables + { + name: "nillable bool nillable array", + v1: NewNormalNillableBoolNillableArray( + immutable.Some([]immutable.Option[bool]{immutable.Some(true)})), + v2: NewNormalNillableBoolNillableArray( + immutable.Some([]immutable.Option[bool]{immutable.Some(true)})), + expected: true, + }, + { + name: "nillable bool nillable array equal", + v1: NewNormalNillableBoolNillableArray( + immutable.Some([]immutable.Option[bool]{immutable.Some(true)})), + v2: NewNormalNillableBoolNillableArray( + immutable.Some([]immutable.Option[bool]{immutable.Some(true)})), + expected: true, + }, + { + name: "nillable bool nillable array not equal", + v1: NewNormalNillableBoolNillableArray( + immutable.Some([]immutable.Option[bool]{immutable.Some(true)})), + v2: NewNormalNillableBoolNillableArray( + immutable.Some([]immutable.Option[bool]{immutable.Some(false)})), + expected: false, + }, + { + name: "nillable bool nillable array outer nil", + v1: NewNormalNillableBoolNillableArray( + immutable.Some([]immutable.Option[bool]{immutable.Some(true)})), + v2: NewNormalNillableBoolNillableArray(immutable.None[[]immutable.Option[bool]]()), + expected: false, + }, + { + name: "nillable bool nillable array inner nil", + v1: NewNormalNillableBoolNillableArray( + immutable.Some([]immutable.Option[bool]{immutable.Some(true), immutable.None[bool]()})), + v2: NewNormalNillableBoolNillableArray( + immutable.Some([]immutable.Option[bool]{immutable.Some(true), immutable.None[bool]()})), + expected: true, + }, + { + name: "nillable bool nillable array different type", + v1: NewNormalNillableBoolNillableArray( + immutable.Some([]immutable.Option[bool]{immutable.Some(true)})), + v2: NewNormalNillableBoolArray([]immutable.Option[bool]{immutable.Some(true)}), + expected: false, + }, + { + name: "nillable int nillable array", + v1: NewNormalNillableIntNillableArray( + immutable.Some([]immutable.Option[int64]{immutable.Some(int64(1))})), + v2: NewNormalNillableIntNillableArray( + immutable.Some([]immutable.Option[int64]{immutable.Some(int64(1))})), + expected: true, + }, + { + name: "nillable int nillable array equal", + v1: NewNormalNillableIntNillableArray( + immutable.Some([]immutable.Option[int64]{immutable.Some(int64(1))})), + v2: NewNormalNillableIntNillableArray( + immutable.Some([]immutable.Option[int64]{immutable.Some(int64(1))})), + expected: true, + }, + { + name: "nillable int nillable array not equal", + v1: NewNormalNillableIntNillableArray( + immutable.Some([]immutable.Option[int64]{immutable.Some(int64(1))})), + v2: NewNormalNillableIntNillableArray( + immutable.Some([]immutable.Option[int64]{immutable.Some(int64(2))})), + expected: false, + }, + { + name: "nillable int nillable array outer nil", + v1: NewNormalNillableIntNillableArray( + immutable.Some([]immutable.Option[int64]{immutable.Some(int64(1))})), + v2: NewNormalNillableIntNillableArray(immutable.None[[]immutable.Option[int64]]()), + expected: false, + }, + { + name: "nillable int nillable array inner nil", + v1: NewNormalNillableIntNillableArray( + immutable.Some([]immutable.Option[int64]{immutable.Some(int64(1)), immutable.None[int64]()})), + v2: NewNormalNillableIntNillableArray( + immutable.Some([]immutable.Option[int64]{immutable.Some(int64(1)), immutable.None[int64]()})), + expected: true, + }, + { + name: "nillable int nillable array different type", + v1: NewNormalNillableIntNillableArray( + immutable.Some([]immutable.Option[int64]{immutable.Some(int64(1))})), + v2: NewNormalNillableIntArray([]immutable.Option[int64]{immutable.Some(int64(1))}), + expected: false, + }, + { + name: "nillable float nillable array", + v1: NewNormalNillableFloatNillableArray( + immutable.Some([]immutable.Option[float64]{immutable.Some(1.0)})), + v2: NewNormalNillableFloatNillableArray( + immutable.Some([]immutable.Option[float64]{immutable.Some(1.0)})), + expected: true, + }, + { + name: "nillable float nillable array equal", + v1: NewNormalNillableFloatNillableArray( + immutable.Some([]immutable.Option[float64]{immutable.Some(1.0)})), + v2: NewNormalNillableFloatNillableArray( + immutable.Some([]immutable.Option[float64]{immutable.Some(1.0)})), + expected: true, + }, + { + name: "nillable float nillable array not equal", + v1: NewNormalNillableFloatNillableArray( + immutable.Some([]immutable.Option[float64]{immutable.Some(1.0)})), + v2: NewNormalNillableFloatNillableArray( + immutable.Some([]immutable.Option[float64]{immutable.Some(2.0)})), + expected: false, + }, + { + name: "nillable float nillable array outer nil", + v1: NewNormalNillableFloatNillableArray( + immutable.Some([]immutable.Option[float64]{immutable.Some(1.0)})), + v2: NewNormalNillableFloatNillableArray(immutable.None[[]immutable.Option[float64]]()), + expected: false, + }, + { + name: "nillable float nillable array inner nil", + v1: NewNormalNillableFloatNillableArray( + immutable.Some([]immutable.Option[float64]{immutable.Some(1.0), immutable.None[float64]()})), + v2: NewNormalNillableFloatNillableArray( + immutable.Some([]immutable.Option[float64]{immutable.Some(1.0), immutable.None[float64]()})), + expected: true, + }, + { + name: "nillable float nillable array different type", + v1: NewNormalNillableFloatNillableArray( + immutable.Some([]immutable.Option[float64]{immutable.Some(1.0)})), + v2: NewNormalNillableFloatArray([]immutable.Option[float64]{immutable.Some(1.0)}), + expected: false, + }, + { + name: "nillable string nillable array", + v1: NewNormalNillableStringNillableArray( + immutable.Some([]immutable.Option[string]{immutable.Some("test")})), + v2: NewNormalNillableStringNillableArray( + immutable.Some([]immutable.Option[string]{immutable.Some("test")})), + expected: true, + }, + { + name: "nillable string nillable array equal", + v1: NewNormalNillableStringNillableArray( + immutable.Some([]immutable.Option[string]{immutable.Some("test")})), + v2: NewNormalNillableStringNillableArray( + immutable.Some([]immutable.Option[string]{immutable.Some("test")})), + expected: true, + }, + { + name: "nillable string nillable array not equal", + v1: NewNormalNillableStringNillableArray( + immutable.Some([]immutable.Option[string]{immutable.Some("test")})), + v2: NewNormalNillableStringNillableArray( + immutable.Some([]immutable.Option[string]{immutable.Some("different")})), + expected: false, + }, + { + name: "nillable string nillable array outer nil", + v1: NewNormalNillableStringNillableArray( + immutable.Some([]immutable.Option[string]{immutable.Some("test")})), + v2: NewNormalNillableStringNillableArray(immutable.None[[]immutable.Option[string]]()), + expected: false, + }, + { + name: "nillable string nillable array inner nil", + v1: NewNormalNillableStringNillableArray( + immutable.Some([]immutable.Option[string]{immutable.Some("test"), immutable.None[string]()})), + v2: NewNormalNillableStringNillableArray( + immutable.Some([]immutable.Option[string]{immutable.Some("test"), immutable.None[string]()})), + expected: true, + }, + { + name: "nillable string nillable array different type", + v1: NewNormalNillableStringNillableArray( + immutable.Some([]immutable.Option[string]{immutable.Some("test")})), + v2: NewNormalNillableStringArray([]immutable.Option[string]{immutable.Some("test")}), + expected: false, + }, + { + name: "nillable bytes nillable array", + v1: NewNormalNillableBytesNillableArray( + immutable.Some([]immutable.Option[[]byte]{immutable.Some([]byte{1, 2, 3})})), + v2: NewNormalNillableBytesNillableArray( + immutable.Some([]immutable.Option[[]byte]{immutable.Some([]byte{1, 2, 3})})), + expected: true, + }, + { + name: "nillable bytes nillable array equal", + v1: NewNormalNillableBytesNillableArray( + immutable.Some([]immutable.Option[[]byte]{immutable.Some([]byte{1, 2, 3})})), + v2: NewNormalNillableBytesNillableArray( + immutable.Some([]immutable.Option[[]byte]{immutable.Some([]byte{1, 2, 3})})), + expected: true, + }, + { + name: "nillable bytes nillable array not equal", + v1: NewNormalNillableBytesNillableArray( + immutable.Some([]immutable.Option[[]byte]{immutable.Some([]byte{1, 2, 3})})), + v2: NewNormalNillableBytesNillableArray( + immutable.Some([]immutable.Option[[]byte]{immutable.Some([]byte{1, 2, 4})})), + expected: false, + }, + { + name: "nillable bytes nillable array outer nil", + v1: NewNormalNillableBytesNillableArray( + immutable.Some([]immutable.Option[[]byte]{immutable.Some([]byte{1, 2, 3})})), + v2: NewNormalNillableBytesNillableArray(immutable.None[[]immutable.Option[[]byte]]()), + expected: false, + }, + { + name: "nillable bytes nillable array inner nil", + v1: NewNormalNillableBytesNillableArray( + immutable.Some([]immutable.Option[[]byte]{immutable.Some([]byte{1, 2, 3}), immutable.None[[]byte]()})), + v2: NewNormalNillableBytesNillableArray( + immutable.Some([]immutable.Option[[]byte]{immutable.Some([]byte{1, 2, 3}), immutable.None[[]byte]()})), + expected: true, + }, + { + name: "nillable bytes nillable array different type", + v1: NewNormalNillableBytesNillableArray( + immutable.Some([]immutable.Option[[]byte]{immutable.Some([]byte{1, 2, 3})})), + v2: NewNormalNillableBytesArray([]immutable.Option[[]byte]{immutable.Some([]byte{1, 2, 3})}), + expected: false, + }, + { + name: "nillable time nillable array", + v1: NewNormalNillableTimeNillableArray( + immutable.Some([]immutable.Option[time.Time]{immutable.Some(now)})), + v2: NewNormalNillableTimeNillableArray( + immutable.Some([]immutable.Option[time.Time]{immutable.Some(now)})), + expected: true, + }, + { + name: "nillable time nillable array equal", + v1: NewNormalNillableTimeNillableArray( + immutable.Some([]immutable.Option[time.Time]{immutable.Some(now)})), + v2: NewNormalNillableTimeNillableArray( + immutable.Some([]immutable.Option[time.Time]{immutable.Some(now)})), + expected: true, + }, + { + name: "nillable time nillable array not equal", + v1: NewNormalNillableTimeNillableArray( + immutable.Some([]immutable.Option[time.Time]{immutable.Some(now)})), + v2: NewNormalNillableTimeNillableArray( + immutable.Some([]immutable.Option[time.Time]{immutable.Some(now.Add(time.Hour))})), + expected: false, + }, + { + name: "nillable time nillable array outer nil", + v1: NewNormalNillableTimeNillableArray( + immutable.Some([]immutable.Option[time.Time]{immutable.Some(now)})), + v2: NewNormalNillableTimeNillableArray(immutable.None[[]immutable.Option[time.Time]]()), + expected: false, + }, + { + name: "nillable time nillable array inner nil", + v1: NewNormalNillableTimeNillableArray( + immutable.Some([]immutable.Option[time.Time]{immutable.Some(now), immutable.None[time.Time]()})), + v2: NewNormalNillableTimeNillableArray( + immutable.Some([]immutable.Option[time.Time]{immutable.Some(now), immutable.None[time.Time]()})), + expected: true, + }, + { + name: "nillable time nillable array different type", + v1: NewNormalNillableTimeNillableArray( + immutable.Some([]immutable.Option[time.Time]{immutable.Some(now)})), + v2: NewNormalNillableTimeArray([]immutable.Option[time.Time]{immutable.Some(now)}), + expected: false, + }, + { + name: "nillable document nillable array", + v1: NewNormalNillableDocumentNillableArray( + immutable.Some([]immutable.Option[*Document]{immutable.Some(doc1)})), + v2: NewNormalNillableDocumentNillableArray( + immutable.Some([]immutable.Option[*Document]{immutable.Some(doc1)})), + expected: true, + }, + { + name: "nillable document nillable array equal", + v1: NewNormalNillableDocumentNillableArray( + immutable.Some([]immutable.Option[*Document]{immutable.Some(doc1)})), + v2: NewNormalNillableDocumentNillableArray( + immutable.Some([]immutable.Option[*Document]{immutable.Some(doc1)})), + expected: true, + }, + { + name: "nillable document nillable array not equal", + v1: NewNormalNillableDocumentNillableArray( + immutable.Some([]immutable.Option[*Document]{immutable.Some(doc1)})), + v2: NewNormalNillableDocumentNillableArray( + immutable.Some([]immutable.Option[*Document]{immutable.Some(doc2)})), + expected: false, + }, + { + name: "nillable document nillable array outer nil", + v1: NewNormalNillableDocumentNillableArray( + immutable.Some([]immutable.Option[*Document]{immutable.Some(doc1)})), + v2: NewNormalNillableDocumentNillableArray(immutable.None[[]immutable.Option[*Document]]()), + expected: false, + }, + { + name: "nillable document nillable array inner nil", + v1: NewNormalNillableDocumentNillableArray( + immutable.Some([]immutable.Option[*Document]{immutable.Some(doc1), immutable.None[*Document]()})), + v2: NewNormalNillableDocumentNillableArray( + immutable.Some([]immutable.Option[*Document]{immutable.Some(doc1), immutable.None[*Document]()})), + expected: true, + }, + { + name: "nillable document nillable array different type", + v1: NewNormalNillableDocumentNillableArray( + immutable.Some([]immutable.Option[*Document]{immutable.Some(doc1)})), + v2: NewNormalNillableDocumentArray([]immutable.Option[*Document]{immutable.Some(doc1)}), + expected: false, + }, + } + + for _, tt := range tests { + tStr := string(tt.name) + t.Run(tStr, func(t *testing.T) { + actual := tt.v1.Equal(tt.v2) + assert.Equal(t, tt.expected, actual) + }) + } +} diff --git a/client/normal_void.go b/client/normal_void.go index e3e29b5094..3e13fe489d 100644 --- a/client/normal_void.go +++ b/client/normal_void.go @@ -24,6 +24,11 @@ func (NormalVoid) Unwrap() any { return nil } +func (NormalVoid) Equal(other NormalValue) bool { + _, ok := other.(NormalVoid) + return ok +} + func (NormalVoid) IsNil() bool { return false } diff --git a/client/schema_field_description.go b/client/schema_field_description.go index cc5690b72c..0619aebdf1 100644 --- a/client/schema_field_description.go +++ b/client/schema_field_description.go @@ -186,6 +186,29 @@ func (k ScalarArrayKind) IsArray() bool { return true } +func (k ScalarArrayKind) SubKind() FieldKind { + switch k { + case FieldKind_NILLABLE_BOOL_ARRAY: + return FieldKind_NILLABLE_BOOL + case FieldKind_BOOL_ARRAY: + return FieldKind_NILLABLE_BOOL + case FieldKind_NILLABLE_INT_ARRAY: + return FieldKind_NILLABLE_INT + case FieldKind_INT_ARRAY: + return FieldKind_NILLABLE_INT + case FieldKind_NILLABLE_FLOAT_ARRAY: + return FieldKind_NILLABLE_FLOAT + case FieldKind_FLOAT_ARRAY: + return FieldKind_NILLABLE_FLOAT + case FieldKind_NILLABLE_STRING_ARRAY: + return FieldKind_NILLABLE_STRING + case FieldKind_STRING_ARRAY: + return FieldKind_NILLABLE_STRING + default: + return FieldKind_None + } +} + func NewCollectionKind(root uint32, isArray bool) *CollectionKind { return &CollectionKind{ Root: root, diff --git a/client/schema_field_description_test.go b/client/schema_field_description_test.go new file mode 100644 index 0000000000..39dea4cf60 --- /dev/null +++ b/client/schema_field_description_test.go @@ -0,0 +1,77 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package client + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestField_ScalarArray_HasSubKind(t *testing.T) { + tests := []struct { + name string + arrKind ScalarArrayKind + subKind ScalarKind + }{ + { + name: "bool array", + arrKind: FieldKind_BOOL_ARRAY, + subKind: FieldKind_NILLABLE_BOOL, + }, + { + name: "int array", + arrKind: FieldKind_INT_ARRAY, + subKind: FieldKind_NILLABLE_INT, + }, + { + name: "float array", + arrKind: FieldKind_FLOAT_ARRAY, + subKind: FieldKind_NILLABLE_FLOAT, + }, + { + name: "string array", + arrKind: FieldKind_STRING_ARRAY, + subKind: FieldKind_NILLABLE_STRING, + }, + { + name: "nillable bool array", + arrKind: FieldKind_NILLABLE_BOOL_ARRAY, + subKind: FieldKind_NILLABLE_BOOL, + }, + { + name: "nillable int array", + arrKind: FieldKind_NILLABLE_INT_ARRAY, + subKind: FieldKind_NILLABLE_INT, + }, + { + name: "nillable float array", + arrKind: FieldKind_NILLABLE_FLOAT_ARRAY, + subKind: FieldKind_NILLABLE_FLOAT, + }, + { + name: "nillable string array", + arrKind: FieldKind_NILLABLE_STRING_ARRAY, + subKind: FieldKind_NILLABLE_STRING, + }, + { + name: "nillable string array", + arrKind: ScalarArrayKind(0), + subKind: FieldKind_None, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.subKind, tt.arrKind.SubKind()) + }) + } +} diff --git a/internal/core/encoding.go b/internal/core/encoding.go index 14dfb072b1..6e2e0e0dcb 100644 --- a/internal/core/encoding.go +++ b/internal/core/encoding.go @@ -254,6 +254,12 @@ func DecodeIndexDataStoreKey( return IndexDataStoreKey{}, ErrInvalidKey } + if kind != nil && kind.IsArray() { + if arrKind, ok := kind.(client.ScalarArrayKind); ok { + kind = arrKind.SubKind() + } + } + var val client.NormalValue data, val, err = encoding.DecodeFieldValue(data, descending, kind) if err != nil { diff --git a/internal/core/key.go b/internal/core/key.go index 0e7942411d..60601795b2 100644 --- a/internal/core/key.go +++ b/internal/core/key.go @@ -626,6 +626,25 @@ func (k *IndexDataStoreKey) ToString() string { return string(k.Bytes()) } +// Equal returns true if the two keys are equal +func (k *IndexDataStoreKey) Equal(other IndexDataStoreKey) bool { + if k.CollectionID != other.CollectionID || k.IndexID != other.IndexID { + return false + } + + if len(k.Fields) != len(other.Fields) { + return false + } + + for i, field := range k.Fields { + if !field.Value.Equal(other.Fields[i].Value) || field.Descending != other.Fields[i].Descending { + return false + } + } + + return true +} + func (k PrimaryDataStoreKey) ToDataStoreKey() DataStoreKey { return DataStoreKey{ CollectionRootID: k.CollectionRootID, diff --git a/internal/core/key_test.go b/internal/core/key_test.go index b564fb001b..4cdb46b72d 100644 --- a/internal/core/key_test.go +++ b/internal/core/key_test.go @@ -323,3 +323,71 @@ func TestDecodeIndexDataStoreKey_InvalidKey(t *testing.T) { }) } } + +func TestIndexDataStoreKey_IsEqual(t *testing.T) { + const colID, indexID = 1, 2 + + cases := []struct { + name string + key1 IndexDataStoreKey + key2 IndexDataStoreKey + shouldMatch bool + }{ + { + name: "empty", + key1: IndexDataStoreKey{}, + key2: IndexDataStoreKey{}, + shouldMatch: true, + }, + { + name: "same", + key1: NewIndexDataStoreKey(colID, indexID, []IndexedField{{Value: client.NewNormalInt(5)}}), + key2: NewIndexDataStoreKey(colID, indexID, []IndexedField{{Value: client.NewNormalInt(5)}}), + shouldMatch: true, + }, + { + name: "different collection", + key1: NewIndexDataStoreKey(colID, indexID, []IndexedField{{Value: client.NewNormalInt(5)}}), + key2: NewIndexDataStoreKey(colID+1, indexID, []IndexedField{{Value: client.NewNormalInt(5)}}), + shouldMatch: false, + }, + { + name: "different index", + key1: NewIndexDataStoreKey(colID, indexID, []IndexedField{{Value: client.NewNormalInt(5)}}), + key2: NewIndexDataStoreKey(colID, indexID+1, []IndexedField{{Value: client.NewNormalInt(5)}}), + shouldMatch: false, + }, + { + name: "different field", + key1: NewIndexDataStoreKey(colID, indexID, []IndexedField{{Value: client.NewNormalInt(5)}}), + key2: NewIndexDataStoreKey(colID, indexID, []IndexedField{{Value: client.NewNormalInt(6)}}), + shouldMatch: false, + }, + { + name: "different field count", + key1: NewIndexDataStoreKey(colID, indexID, []IndexedField{{Value: client.NewNormalInt(5)}}), + key2: NewIndexDataStoreKey(colID, indexID, + []IndexedField{{Value: client.NewNormalInt(5)}, {Value: client.NewNormalInt(6)}}), + shouldMatch: false, + }, + { + name: "different field type", + key1: NewIndexDataStoreKey(colID, indexID, []IndexedField{{Value: client.NewNormalInt(5)}}), + key2: NewIndexDataStoreKey(colID, indexID, []IndexedField{{Value: client.NewNormalString("5")}}), + shouldMatch: false, + }, + { + name: "different field descending", + key1: NewIndexDataStoreKey(colID, indexID, []IndexedField{{Value: client.NewNormalInt(5)}}), + key2: NewIndexDataStoreKey(colID, indexID, + []IndexedField{{Value: client.NewNormalInt(5), Descending: true}}), + shouldMatch: false, + }, + } + for _, c := range cases { + t.Run(c.name, func(t *testing.T) { + res := c.key1.Equal(c.key2) + assert.Equal(t, res, c.shouldMatch, c.name) + }) + } +} diff --git a/internal/db/collection.go b/internal/db/collection.go index 785c96641e..8f78e51429 100644 --- a/internal/db/collection.go +++ b/internal/db/collection.go @@ -852,6 +852,11 @@ func (c *collection) Delete( primaryKey := c.getPrimaryKeyFromDocID(docID) + err = c.deleteIndexedDocWithID(ctx, docID) + if err != nil { + return false, err + } + err = c.applyDelete(ctx, primaryKey) if err != nil { return false, err diff --git a/internal/db/collection_index.go b/internal/db/collection_index.go index c606cc45b7..a0786eb8c8 100644 --- a/internal/db/collection_index.go +++ b/internal/db/collection_index.go @@ -181,6 +181,25 @@ func (c *collection) deleteIndexedDoc( return nil } +// deleteIndexedDocWithID deletes an indexed document with the provided document ID. +func (c *collection) deleteIndexedDocWithID( + ctx context.Context, + docID client.DocID, +) error { + // we need to fetch the document to delete it from the indexes, because in order to do so + // we need to know the values of the fields that are indexed. + doc, err := c.get( + ctx, + c.getPrimaryKeyFromDocID(docID), + c.Definition().CollectIndexedFields(), + false, + ) + if err != nil { + return err + } + return c.deleteIndexedDoc(ctx, doc) +} + // CreateIndex creates a new index on the collection. // // If the index name is empty, a name will be automatically generated. diff --git a/internal/db/fetcher/errors.go b/internal/db/fetcher/errors.go index 2a2967bbdb..22f0c8b182 100644 --- a/internal/db/fetcher/errors.go +++ b/internal/db/fetcher/errors.go @@ -13,6 +13,7 @@ package fetcher import ( "fmt" + "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/errors" ) @@ -30,6 +31,7 @@ const ( errMissingMapper string = "missing document mapper" errInvalidInOperatorValue string = "invalid _in/_nin value" errInvalidFilterOperator string = "invalid filter operator is provided" + errNotSupportedKindByIndex string = "kind is not supported by index" errUnexpectedTypeValue string = "unexpected type value" ) @@ -107,6 +109,11 @@ func NewErrInvalidFilterOperator(operator string) error { return errors.New(errInvalidFilterOperator, errors.NewKV("Operator", operator)) } +// NewErrNotSupportedKindByIndex returns an error indicating that the given kind is not supported by index. +func NewErrNotSupportedKindByIndex(kind client.FieldKind) error { + return errors.New(errNotSupportedKindByIndex, errors.NewKV("Kind", kind.String())) +} + // NewErrUnexpectedTypeValue returns an error indicating that the given value is of an unexpected type. func NewErrUnexpectedTypeValue[T any](value any) error { var t T diff --git a/internal/db/fetcher/indexer.go b/internal/db/fetcher/indexer.go index 7eb8f5b117..4d370146ed 100644 --- a/internal/db/fetcher/indexer.go +++ b/internal/db/fetcher/indexer.go @@ -85,7 +85,10 @@ func (f *IndexFetcher) Init( outer: for i := range fields { for j := range f.indexedFields { - if fields[i].Name == f.indexedFields[j].Name { + // If the field is array, we want to keep it also for the document fetcher + // because the index only contains one array elements, not the whole array. + // The doc fetcher will fetch the whole array for us. + if fields[i].Name == f.indexedFields[j].Name && !fields[i].Kind.IsArray() { continue outer } } @@ -156,6 +159,12 @@ func (f *IndexFetcher) FetchNext(ctx context.Context) (EncodedDocument, ExecInfo hasNilField = true } + // Index will fetch only 1 array element. So we skip it here and let doc fetcher + // fetch the whole array. + if indexedField.Kind.IsArray() { + continue + } + // We need to convert it to cbor bytes as this is what it will be encoded from on value retrieval. // In the future we have to either get rid of CBOR or properly handle different encoding // for properties in a single document. diff --git a/internal/db/fetcher/indexer_iterators.go b/internal/db/fetcher/indexer_iterators.go index e4e69c99c3..ecf964185d 100644 --- a/internal/db/fetcher/indexer_iterators.go +++ b/internal/db/fetcher/indexer_iterators.go @@ -29,24 +29,27 @@ import ( ) const ( - opEq = "_eq" - opGt = "_gt" - opGe = "_ge" - opLt = "_lt" - opLe = "_le" - opNe = "_ne" - opIn = "_in" - opNin = "_nin" - opLike = "_like" - opNlike = "_nlike" - opILike = "_ilike" - opNILike = "_nilike" + opEq = "_eq" + opGt = "_gt" + opGe = "_ge" + opLt = "_lt" + opLe = "_le" + opNe = "_ne" + opIn = "_in" + opNin = "_nin" + opLike = "_like" + opNlike = "_nlike" + opILike = "_ilike" + opNILike = "_nilike" + compOpAny = "_any" + compOpAll = "_all" + compOpNone = "_none" // it's just there for composite indexes. We construct a slice of value matchers with // every matcher being responsible for a corresponding field in the index to match. // For some fields there might not be any criteria to match. For examples if you have // composite index of /name/age/email/ and in the filter you specify only "name" and "email". - // Then the "_any" matcher will be used for "age". - opAny = "_any" + // Then the "__any" matcher will be used for "age". + opAny = "__any" ) // indexIterator is an iterator over index keys. @@ -155,6 +158,8 @@ type eqSingleIndexIterator struct { store datastore.DSReaderWriter } +var _ indexIterator = (*eqSingleIndexIterator)(nil) + func (iter *eqSingleIndexIterator) Init(ctx context.Context, store datastore.DSReaderWriter) error { iter.ctx = ctx iter.store = store @@ -177,7 +182,7 @@ func (iter *eqSingleIndexIterator) Next() (indexIterResult, error) { return indexIterResult{key: iter.indexKey, value: val, foundKey: true}, nil } -func (i *eqSingleIndexIterator) Close() error { +func (iter *eqSingleIndexIterator) Close() error { return nil } @@ -190,6 +195,8 @@ type inIndexIterator struct { hasIterator bool } +var _ indexIterator = (*inIndexIterator)(nil) + func (iter *inIndexIterator) nextIterator() (bool, error) { if iter.nextValIndex > 0 { err := iter.indexIterator.Close() @@ -246,6 +253,58 @@ func (iter *inIndexIterator) Close() error { return nil } +// arrayIndexIterator is an iterator indexed array elements. +// It keeps track of the already fetched documents to avoid duplicates. +type arrayIndexIterator struct { + inner indexIterator + + fetchedDocs map[string]struct{} + + ctx context.Context + store datastore.DSReaderWriter +} + +var _ indexIterator = (*arrayIndexIterator)(nil) + +func (iter *arrayIndexIterator) Init(ctx context.Context, store datastore.DSReaderWriter) error { + iter.ctx = ctx + iter.store = store + iter.fetchedDocs = make(map[string]struct{}) + return iter.inner.Init(ctx, store) +} + +func (iter *arrayIndexIterator) Next() (indexIterResult, error) { + for { + res, err := iter.inner.Next() + if err != nil { + return indexIterResult{}, err + } + if !res.foundKey { + return res, nil + } + var docID string + if len(res.value) > 0 { + docID = string(res.value) + } else { + lastField := &res.key.Fields[len(res.key.Fields)-1] + var ok bool + docID, ok = lastField.Value.String() + if !ok { + return indexIterResult{}, NewErrUnexpectedTypeValue[string](lastField.Value) + } + } + if _, ok := iter.fetchedDocs[docID]; ok { + continue + } + iter.fetchedDocs[docID] = struct{}{} + return res, nil + } +} + +func (iter *arrayIndexIterator) Close() error { + return iter.inner.Close() +} + func executeValueMatchers(matchers []valueMatcher, fields []core.IndexedField) (bool, error) { for i := range matchers { res, err := matchers[i].Match(fields[i].Value) @@ -349,6 +408,31 @@ func (m *timeMatcher) Match(value client.NormalValue) (bool, error) { return false, NewErrInvalidFilterOperator(m.op) } +type boolMatcher struct { + value bool + isEq bool +} + +func (m *boolMatcher) Match(value client.NormalValue) (bool, error) { + boolVal, ok := value.Bool() + if !ok { + if boolOptVal, ok := value.NillableBool(); ok { + boolVal = boolOptVal.Value() + } else { + intVal, ok := value.Int() + if !ok { + if intOptVal, ok := value.NillableInt(); ok { + intVal = intOptVal.Value() + } else { + return false, NewErrUnexpectedTypeValue[bool](value) + } + } + boolVal = intVal != 0 + } + } + return boolVal == m.value == m.isEq, nil +} + type nilMatcher struct { matchNil bool } @@ -449,15 +533,29 @@ type anyMatcher struct{} func (m *anyMatcher) Match(client.NormalValue) (bool, error) { return true, nil } -// newPrefixIndexIterator creates a new eqPrefixIndexIterator for fetching indexed data. +// invertedMatcher inverts the result of the inner matcher. +type invertedMatcher struct { + matcher valueMatcher +} + +func (m *invertedMatcher) Match(val client.NormalValue) (bool, error) { + res, err := m.matcher.Match(val) + if err != nil { + return false, err + } + return !res, nil +} + +// newPrefixIteratorFromConditions creates a new eqPrefixIndexIterator for fetching indexed data. // It can modify the input matchers slice. -func (f *IndexFetcher) newPrefixIndexIterator( +func (f *IndexFetcher) newPrefixIteratorFromConditions( fieldConditions []fieldFilterCond, matchers []valueMatcher, ) (*indexPrefixIterator, error) { keyFieldValues := make([]client.NormalValue, 0, len(fieldConditions)) for i := range fieldConditions { - if fieldConditions[i].op != opEq { + c := &fieldConditions[i] + if c.op != opEq || c.arrOp == compOpNone { // prefix can be created only for subsequent _eq conditions // if we encounter any other condition, we built the longest prefix we could break @@ -474,10 +572,10 @@ func (f *IndexFetcher) newPrefixIndexIterator( key := f.newIndexDataStoreKeyWithValues(keyFieldValues) - return f.newQueryResultIterator(key, matchers, &f.execInfo), nil + return f.newPrefixIterator(key, matchers, &f.execInfo), nil } -func (f *IndexFetcher) newQueryResultIterator( +func (f *IndexFetcher) newPrefixIterator( indexKey core.IndexDataStoreKey, matchers []valueMatcher, execInfo *ExecInfo, @@ -528,7 +626,7 @@ func (f *IndexFetcher) newInIndexIterator( indexKey := f.newIndexDataStoreKey() indexKey.Fields = []core.IndexedField{{Descending: f.indexDesc.Fields[0].Descending}} - iter = f.newQueryResultIterator(indexKey, matchers, &f.execInfo) + iter = f.newPrefixIterator(indexKey, matchers, &f.execInfo) } return &inIndexIterator{ indexIterator: iter, @@ -566,8 +664,19 @@ func (f *IndexFetcher) createIndexIterator() (indexIterator, error) { return nil, err } - switch fieldConditions[0].op { - case opEq: + hasArray := false + for i := range fieldConditions { + if len(fieldConditions[i].arrOp) > 0 { + hasArray = true + if fieldConditions[i].arrOp == compOpNone { + matchers[i] = &invertedMatcher{matcher: matchers[i]} + } + } + } + + var iter indexIterator + + if fieldConditions[0].op == opEq { if isUniqueFetchByFullKey(&f.indexDesc, fieldConditions) { keyFieldValues := make([]client.NormalValue, len(fieldConditions)) for i := range fieldConditions { @@ -575,21 +684,29 @@ func (f *IndexFetcher) createIndexIterator() (indexIterator, error) { } key := f.newIndexDataStoreKeyWithValues(keyFieldValues) - - return &eqSingleIndexIterator{ - indexKey: key, - execInfo: &f.execInfo, - }, nil + iter = &eqSingleIndexIterator{indexKey: key, execInfo: &f.execInfo} } else { - return f.newPrefixIndexIterator(fieldConditions, matchers) + iter, err = f.newPrefixIteratorFromConditions(fieldConditions, matchers) } - case opIn: - return f.newInIndexIterator(fieldConditions, matchers) - case opGt, opGe, opLt, opLe, opNe, opNin, opLike, opNlike, opILike, opNILike: - return f.newQueryResultIterator(f.newIndexDataStoreKey(), matchers, &f.execInfo), nil + } else if fieldConditions[0].op == opIn && fieldConditions[0].arrOp != compOpNone { + iter, err = f.newInIndexIterator(fieldConditions, matchers) + } else { + iter, err = f.newPrefixIterator(f.newIndexDataStoreKey(), matchers, &f.execInfo), nil + } + + if err != nil { + return nil, err + } + + if iter == nil { + return nil, NewErrInvalidFilterOperator(fieldConditions[0].op) + } + + if hasArray { + iter = &arrayIndexIterator{inner: iter} } - return nil, NewErrInvalidFilterOperator(fieldConditions[0].op) + return iter, nil } func createValueMatcher(condition *fieldFilterCond) (valueMatcher, error) { @@ -606,26 +723,17 @@ func createValueMatcher(condition *fieldFilterCond) (valueMatcher, error) { if v, ok := condition.val.Int(); ok { return &intMatcher{value: v, evalFunc: getCompareValsFunc[int64](condition.op)}, nil } - if v, ok := condition.val.NillableInt(); ok { - return &intMatcher{value: v.Value(), evalFunc: getCompareValsFunc[int64](condition.op)}, nil - } if v, ok := condition.val.Float(); ok { return &floatMatcher{value: v, evalFunc: getCompareValsFunc[float64](condition.op)}, nil } - if v, ok := condition.val.NillableFloat(); ok { - return &floatMatcher{value: v.Value(), evalFunc: getCompareValsFunc[float64](condition.op)}, nil - } if v, ok := condition.val.String(); ok { return &stringMatcher{value: v, evalFunc: getCompareValsFunc[string](condition.op)}, nil } - if v, ok := condition.val.NillableString(); ok { - return &stringMatcher{value: v.Value(), evalFunc: getCompareValsFunc[string](condition.op)}, nil - } if v, ok := condition.val.Time(); ok { return &timeMatcher{value: v, op: condition.op}, nil } - if v, ok := condition.val.NillableTime(); ok { - return &timeMatcher{value: v.Value(), op: condition.op}, nil + if v, ok := condition.val.Bool(); ok { + return &boolMatcher{value: v, isEq: condition.op == opEq}, nil } case opIn, opNin: inVals, err := client.ToArrayOfNormalValues(condition.val) @@ -665,9 +773,10 @@ func createValueMatchers(conditions []fieldFilterCond) ([]valueMatcher, error) { } type fieldFilterCond struct { - op string - val client.NormalValue - kind client.FieldKind + op string + arrOp string + val client.NormalValue + kind client.FieldKind } // determineFieldFilterConditions determines the conditions and their corresponding operation @@ -689,22 +798,36 @@ func (f *IndexFetcher) determineFieldFilterConditions() ([]fieldFilterCond, erro condMap := indexFilterCond.(map[connor.FilterKey]any) for key, filterVal := range condMap { - opKey := key.(*mapper.Operator) - var normalVal client.NormalValue + cond := fieldFilterCond{ + op: key.(*mapper.Operator).Operation, + kind: f.indexedFields[i].Kind, + } + var err error if filterVal == nil { - normalVal, err = client.NewNormalNil(f.indexedFields[i].Kind) + cond.val, err = client.NewNormalNil(cond.kind) + } else if !f.indexedFields[i].Kind.IsArray() { + cond.val, err = client.NewNormalValue(filterVal) } else { - normalVal, err = client.NewNormalValue(filterVal) + subCondMap := filterVal.(map[connor.FilterKey]any) + for subKey, subVal := range subCondMap { + arrKind := cond.kind.(client.ScalarArrayKind) + if subVal == nil { + cond.val, err = client.NewNormalNil(arrKind.SubKind()) + } else { + cond.val, err = client.NewNormalValue(subVal) + } + cond.arrOp = cond.op + cond.op = subKey.(*mapper.Operator).Operation + // the sub condition is supposed to have only 1 record + break + } } + if err != nil { return nil, err } - result = append(result, fieldFilterCond{ - op: opKey.Operation, - val: normalVal, - kind: f.indexedFields[i].Kind, - }) + result = append(result, cond) break } break diff --git a/internal/db/index.go b/internal/db/index.go index bd11e9f94b..c3860dca5a 100644 --- a/internal/db/index.go +++ b/internal/db/index.go @@ -12,13 +12,12 @@ package db import ( "context" - "time" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/datastore" "github.com/sourcenetwork/defradb/errors" "github.com/sourcenetwork/defradb/internal/core" - "github.com/sourcenetwork/defradb/internal/request/graphql/schema/types" + "github.com/sourcenetwork/defradb/internal/utils/slice" ) // CollectionIndex is an interface for collection indexes @@ -30,55 +29,34 @@ type CollectionIndex interface { RemoveAll(context.Context, datastore.Txn) error } -func canConvertIndexFieldValue[T any](val any) bool { - _, ok := val.(T) - return ok -} - -func getValidateIndexFieldFunc(kind client.FieldKind) func(any) bool { +func isSupportedKind(kind client.FieldKind) bool { if kind.IsObject() && !kind.IsArray() { - return canConvertIndexFieldValue[string] + return true } switch kind { - case client.FieldKind_NILLABLE_STRING, client.FieldKind_DocID: - return canConvertIndexFieldValue[string] - case client.FieldKind_NILLABLE_INT: - return canConvertIndexFieldValue[int64] - case client.FieldKind_NILLABLE_FLOAT: - return canConvertIndexFieldValue[float64] - case client.FieldKind_NILLABLE_BOOL: - return canConvertIndexFieldValue[bool] - case client.FieldKind_NILLABLE_BLOB: - return func(val any) bool { - blobStrVal, ok := val.(string) - if !ok { - return false - } - return types.BlobPattern.MatchString(blobStrVal) - } - case client.FieldKind_NILLABLE_DATETIME: - return func(val any) bool { - timeStrVal, ok := val.(string) - if !ok { - return false - } - _, err := time.Parse(time.RFC3339, timeStrVal) - return err == nil - } + case + client.FieldKind_DocID, + client.FieldKind_STRING_ARRAY, + client.FieldKind_INT_ARRAY, + client.FieldKind_BOOL_ARRAY, + client.FieldKind_FLOAT_ARRAY, + client.FieldKind_NILLABLE_STRING, + client.FieldKind_NILLABLE_INT, + client.FieldKind_NILLABLE_FLOAT, + client.FieldKind_NILLABLE_BOOL, + client.FieldKind_NILLABLE_BLOB, + client.FieldKind_NILLABLE_DATETIME, + client.FieldKind_NILLABLE_BOOL_ARRAY, + client.FieldKind_NILLABLE_INT_ARRAY, + client.FieldKind_NILLABLE_FLOAT_ARRAY, + client.FieldKind_NILLABLE_STRING_ARRAY: + return true default: - return nil + return false } } -func getFieldValidateFunc(kind client.FieldKind) (func(any) bool, error) { - validateFunc := getValidateIndexFieldFunc(kind) - if validateFunc == nil { - return nil, NewErrUnsupportedIndexFieldType(kind) - } - return validateFunc, nil -} - // NewCollectionIndex creates a new collection index func NewCollectionIndex( collection client.Collection, @@ -88,21 +66,26 @@ func NewCollectionIndex( return nil, NewErrIndexDescHasNoFields(desc) } base := collectionBaseIndex{collection: collection, desc: desc} - base.validateFieldFuncs = make([]func(any) bool, len(desc.Fields)) base.fieldsDescs = make([]client.SchemaFieldDescription, len(desc.Fields)) + isArray := false for i := range desc.Fields { field, foundField := collection.Schema().GetFieldByName(desc.Fields[i].Name) if !foundField { return nil, client.NewErrFieldNotExist(desc.Fields[i].Name) } base.fieldsDescs[i] = field - validateFunc, err := getFieldValidateFunc(field.Kind) - if err != nil { - return nil, err + if !isSupportedKind(field.Kind) { + return nil, NewErrUnsupportedIndexFieldType(field.Kind) } - base.validateFieldFuncs[i] = validateFunc + isArray = isArray || field.Kind.IsArray() } - if desc.Unique { + if isArray { + if desc.Unique { + return newCollectionArrayUniqueIndex(base), nil + } else { + return newCollectionArrayIndex(base), nil + } + } else if desc.Unique { return &collectionUniqueIndex{collectionBaseIndex: base}, nil } else { return &collectionSimpleIndex{collectionBaseIndex: base}, nil @@ -110,10 +93,9 @@ func NewCollectionIndex( } type collectionBaseIndex struct { - collection client.Collection - desc client.IndexDescription - validateFieldFuncs []func(any) bool - fieldsDescs []client.SchemaFieldDescription + collection client.Collection + desc client.IndexDescription + fieldsDescs []client.SchemaFieldDescription } func (index *collectionBaseIndex) getDocFieldValues(doc *client.Document) ([]client.NormalValue, error) { @@ -138,6 +120,7 @@ func (index *collectionBaseIndex) getDocFieldValues(doc *client.Document) ([]cli func (index *collectionBaseIndex) getDocumentsIndexKey( doc *client.Document, + appendDocID bool, ) (core.IndexDataStoreKey, error) { fieldValues, err := index.getDocFieldValues(doc) if err != nil { @@ -149,6 +132,10 @@ func (index *collectionBaseIndex) getDocumentsIndexKey( fields[i].Value = fieldValues[i] fields[i].Descending = index.desc.Fields[i].Descending } + + if appendDocID { + fields = append(fields, core.IndexedField{Value: client.NewNormalString(doc.ID().String())}) + } return core.NewIndexDataStoreKey(index.collection.ID(), index.desc.ID, fields), nil } @@ -210,13 +197,8 @@ var _ CollectionIndex = (*collectionSimpleIndex)(nil) func (index *collectionSimpleIndex) getDocumentsIndexKey( doc *client.Document, ) (core.IndexDataStoreKey, error) { - key, err := index.collectionBaseIndex.getDocumentsIndexKey(doc) - if err != nil { - return core.IndexDataStoreKey{}, err - } - - key.Fields = append(key.Fields, core.IndexedField{Value: client.NewNormalString(doc.ID().String())}) - return key, nil + // docID is appended, as it's part of the key for non-unique indexes + return index.collectionBaseIndex.getDocumentsIndexKey(doc, true) } // Save indexes a document by storing the indexed field value. @@ -303,19 +285,17 @@ func (index *collectionUniqueIndex) Save( txn datastore.Txn, doc *client.Document, ) error { - key, val, err := index.prepareIndexRecordToStore(ctx, txn, doc) + key, val, err := index.prepareUniqueIndexRecordToStore(ctx, txn, doc) if err != nil { return err } return index.save(ctx, txn, &key, val) } -func (index *collectionUniqueIndex) newUniqueIndexError( - doc *client.Document, -) error { - kvs := make([]errors.KV, 0, len(index.fieldsDescs)) - for iter := range index.fieldsDescs { - fieldVal, err := doc.TryGetValue(index.fieldsDescs[iter].Name) +func newUniqueIndexError(doc *client.Document, fieldsDescs []client.SchemaFieldDescription) error { + kvs := make([]errors.KV, 0, len(fieldsDescs)) + for iter := range fieldsDescs { + fieldVal, err := doc.TryGetValue(fieldsDescs[iter].Name) var val any if err != nil { return err @@ -324,19 +304,26 @@ func (index *collectionUniqueIndex) newUniqueIndexError( if fieldVal != nil { val = fieldVal.Value() } - kvs = append(kvs, errors.NewKV(index.fieldsDescs[iter].Name, val)) + kvs = append(kvs, errors.NewKV(fieldsDescs[iter].Name, val)) } return NewErrCanNotIndexNonUniqueFields(doc.ID().String(), kvs...) } -func (index *collectionUniqueIndex) getDocumentsIndexRecord( +func (index *collectionBaseIndex) getDocumentsUniqueIndexRecord( doc *client.Document, ) (core.IndexDataStoreKey, []byte, error) { - key, err := index.getDocumentsIndexKey(doc) + key, err := index.getDocumentsIndexKey(doc, false) if err != nil { return core.IndexDataStoreKey{}, nil, err } + return makeUniqueKeyValueRecord(key, doc) +} + +func makeUniqueKeyValueRecord( + key core.IndexDataStoreKey, + doc *client.Document, +) (core.IndexDataStoreKey, []byte, error) { if hasIndexKeyNilField(&key) { key.Fields = append(key.Fields, core.IndexedField{Value: client.NewNormalString(doc.ID().String())}) return key, []byte{}, nil @@ -345,26 +332,36 @@ func (index *collectionUniqueIndex) getDocumentsIndexRecord( } } -func (index *collectionUniqueIndex) prepareIndexRecordToStore( +func (index *collectionUniqueIndex) prepareUniqueIndexRecordToStore( ctx context.Context, txn datastore.Txn, doc *client.Document, ) (core.IndexDataStoreKey, []byte, error) { - key, val, err := index.getDocumentsIndexRecord(doc) + key, val, err := index.getDocumentsUniqueIndexRecord(doc) if err != nil { return core.IndexDataStoreKey{}, nil, err } + return key, val, validateUniqueKeyValue(ctx, txn, key, val, doc, index.fieldsDescs) +} + +func validateUniqueKeyValue( + ctx context.Context, + txn datastore.Txn, + key core.IndexDataStoreKey, + val []byte, + doc *client.Document, + fieldsDescs []client.SchemaFieldDescription, +) error { if len(val) != 0 { - var exists bool - exists, err = txn.Datastore().Has(ctx, key.ToDS()) + exists, err := txn.Datastore().Has(ctx, key.ToDS()) if err != nil { - return core.IndexDataStoreKey{}, nil, err + return err } if exists { - return core.IndexDataStoreKey{}, nil, index.newUniqueIndexError(doc) + return newUniqueIndexError(doc, fieldsDescs) } } - return key, val, nil + return nil } func (index *collectionUniqueIndex) Delete( @@ -386,7 +383,7 @@ func (index *collectionUniqueIndex) Update( if !isUpdatingIndexedFields(index, oldDoc, newDoc) { return nil } - newKey, newVal, err := index.prepareIndexRecordToStore(ctx, txn, newDoc) + newKey, newVal, err := index.prepareUniqueIndexRecordToStore(ctx, txn, newDoc) if err != nil { return err } @@ -402,7 +399,7 @@ func (index *collectionUniqueIndex) deleteDocIndex( txn datastore.Txn, doc *client.Document, ) error { - key, _, err := index.getDocumentsIndexRecord(doc) + key, _, err := index.getDocumentsUniqueIndexRecord(doc) if err != nil { return err } @@ -430,3 +427,319 @@ func isUpdatingIndexedFields(index CollectionIndex, oldDoc, newDoc *client.Docum } return false } + +type collectionArrayBaseIndex struct { + collectionBaseIndex + arrFieldsIndexes []int +} + +func newCollectionArrayBaseIndex(base collectionBaseIndex) collectionArrayBaseIndex { + ind := collectionArrayBaseIndex{collectionBaseIndex: base} + for i := range base.fieldsDescs { + if base.fieldsDescs[i].Kind.IsArray() { + ind.arrFieldsIndexes = append(ind.arrFieldsIndexes, i) + } + } + if len(ind.arrFieldsIndexes) == 0 { + return collectionArrayBaseIndex{} + } + return ind +} + +// newIndexKeyGenerator creates a function that generates index keys for a document +// with multiple array fields. +// All generated keys are unique. +// For example for a doc with these values {{"a", "b", "a"}, {"c", "d", "e"}, {"f", "g"}} it generates: +// "acf", "acg", "adf", "adg", "aef", "aeg", "bcf", "bcg", "bdf", "bdg", "bef", "beg" +// Note: the example is simplified and doesn't include field separation +func (index *collectionArrayBaseIndex) newIndexKeyGenerator( + doc *client.Document, + appendDocID bool, +) (func() (core.IndexDataStoreKey, bool), error) { + key, err := index.getDocumentsIndexKey(doc, appendDocID) + if err != nil { + return nil, err + } + + // Collect unique values to use as source for generating keys + normValsArr := make([][]client.NormalValue, 0, len(index.arrFieldsIndexes)) + for _, arrFieldIndex := range index.arrFieldsIndexes { + arrVal := key.Fields[arrFieldIndex].Value + normVals, err := client.ToArrayOfNormalValues(arrVal) + if err != nil { + return nil, err + } + normValsArr = append(normValsArr, slice.RemoveDuplicates(normVals)) + } + + // arrFieldCounter keeps track of indexes into arrays of normal values + arrFieldCounter := make([]int, len(index.arrFieldsIndexes)) + done := false + + // This function generates the next key by iterating through all possible combinations. + // It works pretty much like a digital clock that first iterates through seconds, then minutes, etc. + return func() (core.IndexDataStoreKey, bool) { + if done { + return core.IndexDataStoreKey{}, false + } + + resultKey := core.IndexDataStoreKey{ + CollectionID: key.CollectionID, + IndexID: key.IndexID, + Fields: make([]core.IndexedField, len(key.Fields)), + } + copy(resultKey.Fields, key.Fields) + + // Use current indexes in arrFieldsIndexes to replace corresponding fields in the key + for i, counter := range arrFieldCounter { + field := &resultKey.Fields[index.arrFieldsIndexes[i]] + field.Value = normValsArr[i][counter] + } + + // iterate in reverse order so that we exhaust all combination for the last field first, + // {"f", "g"} in the example above. This way we guarantee that the order of generated keys + // is from left to right, "acf" -> "acg" -> "adf" -> "adg" -> ... + for i := len(arrFieldCounter) - 1; i >= 0; i-- { + arrFieldCounter[i]++ + if arrFieldCounter[i] < len(normValsArr[i]) { + break + } + // if we iterated through all combinations for the current field, reset the counter + // so that we do it again for the next field from the left side + arrFieldCounter[i] = 0 + // if the current side happens to be the leftmost one (the first), we are done + if i == 0 { + done = true + } + } + + return resultKey, true + }, nil +} + +func (index *collectionArrayBaseIndex) getAllKeys( + doc *client.Document, + appendDocID bool, +) ([]core.IndexDataStoreKey, error) { + getNextOldKey, err := index.newIndexKeyGenerator(doc, appendDocID) + if err != nil { + return nil, err + } + keys := make([]core.IndexDataStoreKey, 0) + for { + key, ok := getNextOldKey() + if !ok { + break + } + keys = append(keys, key) + } + return keys, nil +} + +func (index *collectionArrayBaseIndex) deleteRetiredKeysAndReturnNew( + ctx context.Context, + txn datastore.Txn, + oldDoc *client.Document, + newDoc *client.Document, + appendDocID bool, +) ([]core.IndexDataStoreKey, error) { + prevKeys, err := index.getAllKeys(oldDoc, appendDocID) + if err != nil { + return nil, err + } + currentKeys, err := index.getAllKeys(newDoc, appendDocID) + if err != nil { + return nil, err + } + + for _, prevKey := range prevKeys { + keyEqual := func(key core.IndexDataStoreKey) bool { return prevKey.Equal(key) } + rem, removedVal := slice.RemoveFirstIf(currentKeys, keyEqual) + // If a previous keys is not among the current keys, it should be retired + if !removedVal.HasValue() { + err = index.deleteIndexKey(ctx, txn, prevKey) + if err != nil { + return nil, err + } + } + currentKeys = rem + } + + return currentKeys, nil +} + +type collectionArrayIndex struct { + collectionArrayBaseIndex +} + +var _ CollectionIndex = (*collectionArrayIndex)(nil) + +func newCollectionArrayIndex(base collectionBaseIndex) *collectionArrayIndex { + return &collectionArrayIndex{collectionArrayBaseIndex: newCollectionArrayBaseIndex(base)} +} + +// Save indexes a document by storing the indexed field value. +func (index *collectionArrayIndex) Save( + ctx context.Context, + txn datastore.Txn, + doc *client.Document, +) error { + getNextKey, err := index.newIndexKeyGenerator(doc, true) + if err != nil { + return err + } + + for { + key, hasKey := getNextKey() + if !hasKey { + break + } + err = txn.Datastore().Put(ctx, key.ToDS(), []byte{}) + if err != nil { + return NewErrFailedToStoreIndexedField(key.ToString(), err) + } + } + return nil +} + +func (index *collectionArrayIndex) Update( + ctx context.Context, + txn datastore.Txn, + oldDoc *client.Document, + newDoc *client.Document, +) error { + newKeys, err := index.deleteRetiredKeysAndReturnNew(ctx, txn, oldDoc, newDoc, true) + if err != nil { + return err + } + + for _, key := range newKeys { + err = txn.Datastore().Put(ctx, key.ToDS(), []byte{}) + if err != nil { + return NewErrFailedToStoreIndexedField(key.ToString(), err) + } + } + + return nil +} + +func (index *collectionArrayIndex) Delete( + ctx context.Context, + txn datastore.Txn, + doc *client.Document, +) error { + getNextKey, err := index.newIndexKeyGenerator(doc, true) + if err != nil { + return err + } + + for { + key, ok := getNextKey() + if !ok { + break + } + err = index.deleteIndexKey(ctx, txn, key) + if err != nil { + return err + } + } + return nil +} + +type collectionArrayUniqueIndex struct { + collectionArrayBaseIndex +} + +var _ CollectionIndex = (*collectionArrayUniqueIndex)(nil) + +func newCollectionArrayUniqueIndex(base collectionBaseIndex) *collectionArrayUniqueIndex { + return &collectionArrayUniqueIndex{collectionArrayBaseIndex: newCollectionArrayBaseIndex(base)} +} + +func (index *collectionArrayUniqueIndex) Save( + ctx context.Context, + txn datastore.Txn, + doc *client.Document, +) error { + getNextKey, err := index.newIndexKeyGenerator(doc, false) + if err != nil { + return err + } + + for { + key, ok := getNextKey() + if !ok { + break + } + err := index.addNewUniqueKey(ctx, txn, doc, key) + if err != nil { + return err + } + } + return nil +} + +func (index *collectionArrayUniqueIndex) addNewUniqueKey( + ctx context.Context, + txn datastore.Txn, + doc *client.Document, + key core.IndexDataStoreKey, +) error { + key, val, err := makeUniqueKeyValueRecord(key, doc) + if err != nil { + return err + } + err = validateUniqueKeyValue(ctx, txn, key, val, doc, index.fieldsDescs) + if err != nil { + return err + } + err = txn.Datastore().Put(ctx, key.ToDS(), val) + if err != nil { + return NewErrFailedToStoreIndexedField(key.ToString(), err) + } + return nil +} + +func (index *collectionArrayUniqueIndex) Update( + ctx context.Context, + txn datastore.Txn, + oldDoc *client.Document, + newDoc *client.Document, +) error { + newKeys, err := index.deleteRetiredKeysAndReturnNew(ctx, txn, oldDoc, newDoc, false) + if err != nil { + return err + } + + for _, key := range newKeys { + err := index.addNewUniqueKey(ctx, txn, newDoc, key) + if err != nil { + return err + } + } + + return nil +} + +func (index *collectionArrayUniqueIndex) Delete( + ctx context.Context, + txn datastore.Txn, + doc *client.Document, +) error { + getNextKey, err := index.newIndexKeyGenerator(doc, false) + if err != nil { + return err + } + + for { + key, ok := getNextKey() + if !ok { + break + } + err = index.deleteIndexKey(ctx, txn, key) + if err != nil { + return err + } + } + return nil +} diff --git a/internal/db/index_test.go b/internal/db/index_test.go index 9226f92efd..779bcdff84 100644 --- a/internal/db/index_test.go +++ b/internal/db/index_test.go @@ -37,9 +37,11 @@ const ( usersColName = "Users" productsColName = "Products" - usersNameFieldName = "name" - usersAgeFieldName = "age" - usersWeightFieldName = "weight" + usersNameFieldName = "name" + usersAgeFieldName = "age" + usersWeightFieldName = "weight" + usersNumbersFieldName = "numbers" + usersHobbiesFieldName = "hobbies" productsIDFieldName = "id" productsPriceFieldName = "price" @@ -71,11 +73,15 @@ func (f *indexTestFixture) addUsersCollection() client.Collection { %s: String %s: Int %s: Float + %s: [Int!] + %s: [String!] }`, usersColName, usersNameFieldName, usersAgeFieldName, usersWeightFieldName, + usersNumbersFieldName, + usersHobbiesFieldName, ), ) require.NoError(f.t, err) @@ -186,6 +192,20 @@ func (f *indexTestFixture) createUserCollectionIndexOnName() client.IndexDescrip return newDesc } +func (f *indexTestFixture) createUserCollectionIndexOnNumbers() client.IndexDescription { + indexDesc := client.IndexDescription{ + Name: "users_numbers_index", + Fields: []client.IndexedFieldDescription{ + {Name: usersNumbersFieldName}, + }, + } + + newDesc, err := f.createCollectionIndexFor(f.users.Name().Value(), indexDesc) + require.NoError(f.t, err) + + return newDesc +} + func makeUnique(indexDesc client.IndexDescription) client.IndexDescription { indexDesc.Unique = true return indexDesc @@ -509,35 +529,6 @@ func TestCreateIndex_ShouldUpdateCollectionsDescription(t *testing.T) { f.users.Description().Indexes) } -func TestCreateIndex_IfAttemptToIndexOnUnsupportedType_ReturnError(t *testing.T) { - f := newIndexTestFixtureBare(t) - - const unsupportedKind = client.FieldKind_BOOL_ARRAY - - _, err := f.db.AddSchema( - f.ctx, - `type testTypeCol { - field: [Boolean!] - }`, - ) - require.NoError(f.t, err) - - collection, err := f.db.GetCollectionByName(f.ctx, "testTypeCol") - require.NoError(f.t, err) - - indexDesc := client.IndexDescription{ - Fields: []client.IndexedFieldDescription{ - {Name: "field"}, - }, - } - - f.txn, err = f.db.NewTxn(f.ctx, false) - require.NoError(f.t, err) - - _, err = f.createCollectionIndexFor(collection.Name().Value(), indexDesc) - require.ErrorIs(f.t, err, NewErrUnsupportedIndexFieldType(unsupportedKind)) -} - func TestGetIndexes_ShouldReturnListOfAllExistingIndexes(t *testing.T) { f := newIndexTestFixture(t) defer f.db.Close() @@ -876,43 +867,6 @@ func TestCollectionGetIndexes_IfFailsToCreateTxn_ShouldNotCache(t *testing.T) { assert.Equal(t, testUsersColIndexName, indexes[0].Name) } -func TestCollectionGetIndexes_IfStoredIndexWithUnsupportedType_ReturnError(t *testing.T) { - f := newIndexTestFixtureBare(t) - - const unsupportedKind = client.FieldKind_BOOL_ARRAY - _, err := f.db.AddSchema( - f.ctx, - `type testTypeCol { - name: String - field: [Boolean!] - }`, - ) - require.NoError(f.t, err) - - collection, err := f.db.GetCollectionByName(f.ctx, "testTypeCol") - require.NoError(f.t, err) - - f.txn, err = f.db.NewTxn(f.ctx, false) - require.NoError(f.t, err) - - indexDesc := client.IndexDescription{ - Fields: []client.IndexedFieldDescription{ - {Name: "field"}, - }, - } - indexDescData, err := json.Marshal(indexDesc) - require.NoError(t, err) - - mockedTxn := f.mockTxn() - mockedTxn.MockSystemstore.EXPECT().Query(mock.Anything, mock.Anything).Unset() - mockedTxn.MockSystemstore.EXPECT().Query(mock.Anything, mock.Anything). - Return(mocks.NewQueryResultsWithValues(t, indexDescData), nil) - - ctx := SetContextTxn(f.ctx, mockedTxn) - _, err = collection.GetIndexes(ctx) - require.ErrorIs(t, err, NewErrUnsupportedIndexFieldType(unsupportedKind)) -} - func TestCollectionGetIndexes_IfInvalidIndexIsStored_ReturnError(t *testing.T) { f := newIndexTestFixture(t) defer f.db.Close() diff --git a/internal/db/indexed_docs_test.go b/internal/db/indexed_docs_test.go index 9f4ea3fe72..4cd591a536 100644 --- a/internal/db/indexed_docs_test.go +++ b/internal/db/indexed_docs_test.go @@ -18,7 +18,6 @@ import ( ipfsDatastore "github.com/ipfs/go-datastore" "github.com/ipfs/go-datastore/query" - "github.com/ipld/go-ipld-prime/storage/bsadapter" "github.com/sourcenetwork/immutable" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/mock" @@ -36,9 +35,11 @@ import ( ) type userDoc struct { - Name string `json:"name"` - Age int `json:"age"` - Weight float64 `json:"weight"` + Name string `json:"name"` + Age int `json:"age"` + Weight float64 `json:"weight"` + Numbers []int `json:"numbers"` + Hobbies []string `json:"hobbies"` } type productDoc struct { @@ -56,6 +57,15 @@ func (f *indexTestFixture) saveDocToCollection(doc *client.Document, col client. require.NoError(f.t, err) } +func (f *indexTestFixture) deleteDocFromCollection(docID client.DocID, col client.Collection) { + res, err := col.Delete(f.ctx, docID) + require.NoError(f.t, err) + require.True(f.t, res) + f.commitTxn() + f.txn, err = f.db.NewTxn(f.ctx, false) + require.NoError(f.t, err) +} + func (f *indexTestFixture) newUserDoc(name string, age int, col client.Collection) *client.Document { d := userDoc{Name: name, Age: age, Weight: 154.1} data, err := json.Marshal(d) @@ -66,6 +76,15 @@ func (f *indexTestFixture) newUserDoc(name string, age int, col client.Collectio return doc } +func (f *indexTestFixture) newCustomUserDoc(d userDoc, col client.Collection) *client.Document { + data, err := json.Marshal(d) + require.NoError(f.t, err) + + doc, err := client.NewDocFromJSON(data, col.Definition()) + require.NoError(f.t, err) + return doc +} + func (f *indexTestFixture) newProdDoc(id int, price float64, cat string, col client.Collection) *client.Document { d := productDoc{ID: id, Price: price, Category: cat} data, err := json.Marshal(d) @@ -86,10 +105,11 @@ type indexKeyBuilder struct { descendingFields []bool doc *client.Document isUnique bool + arrayFieldValues map[string]any } func newIndexKeyBuilder(f *indexTestFixture) *indexKeyBuilder { - return &indexKeyBuilder{f: f} + return &indexKeyBuilder{f: f, arrayFieldValues: make(map[string]any)} } func (b *indexKeyBuilder) Col(colName string) *indexKeyBuilder { @@ -105,6 +125,14 @@ func (b *indexKeyBuilder) Fields(fieldsNames ...string) *indexKeyBuilder { return b } +// ArrayFieldVal sets the value for the array field. +// The value should be of a single element of the array, as index indexes array fields by each element. +// If ArrayFieldVal is not set and index array field is present, it will take array first element as a value. +func (b *indexKeyBuilder) ArrayFieldVal(fieldName string, val any) *indexKeyBuilder { + b.arrayFieldValues[fieldName] = val + return b +} + // Fields sets the fields names for the index key. func (b *indexKeyBuilder) DescendingFields(descending ...bool) *indexKeyBuilder { b.descendingFields = descending @@ -120,6 +148,7 @@ func (b *indexKeyBuilder) Doc(doc *client.Document) *indexKeyBuilder { return b } +// Unique sets the index key to be unique. func (b *indexKeyBuilder) Unique() *indexKeyBuilder { b.isUnique = true return b @@ -170,12 +199,12 @@ indexLoop: hasNilValue := false for i, fieldName := range b.fieldsNames { fieldValue, err := b.doc.GetValue(fieldName) - var val client.NormalValue if err != nil { if !errors.Is(err, client.ErrFieldNotExist) { require.NoError(b.f.t, err) } } + var val client.NormalValue if fieldValue != nil { val = fieldValue.NormalValue() } else { @@ -190,6 +219,20 @@ indexLoop: } if val.IsNil() { hasNilValue = true + } else if val.IsArray() { + if arrVal, ok := b.arrayFieldValues[fieldName]; ok { + if normVal, ok := arrVal.(client.NormalValue); ok { + val = normVal + } else { + val, err = client.NewNormalValue(arrVal) + require.NoError(b.f.t, err, "given value is not a normal value") + } + } else { + arrVals, err := client.ToArrayOfNormalValues(val) + require.NoError(b.f.t, err) + require.Greater(b.f.t, len(arrVals), 0, "empty array can not be indexed") + val = arrVals[0] + } } descending := false if i < len(b.descendingFields) { @@ -290,6 +333,19 @@ func TestNonUnique_IfDocIsAdded_ShouldBeIndexed(t *testing.T) { assert.Len(t, data, 0) } +func TestNonUnique_IfDocIsDeleted_ShouldRemoveIndex(t *testing.T) { + f := newIndexTestFixture(t) + defer f.db.Close() + f.createUserCollectionIndexOnName() + + doc := f.newUserDoc("John", 21, f.users) + f.saveDocToCollection(doc, f.users) + f.deleteDocFromCollection(doc.ID(), f.users) + + userNameKey := newIndexKeyBuilder(f).Col(usersColName).Fields(usersNameFieldName).Build() + assert.Len(t, f.getPrefixFromDataStore(userNameKey.ToString()), 0) +} + func TestNonUnique_IfDocWithDescendingOrderIsAdded_ShouldBeIndexed(t *testing.T) { f := newIndexTestFixture(t) defer f.db.Close() @@ -309,29 +365,6 @@ func TestNonUnique_IfDocWithDescendingOrderIsAdded_ShouldBeIndexed(t *testing.T) assert.Len(t, data, 0) } -func TestNonUnique_IfFailsToStoreIndexedDoc_Error(t *testing.T) { - f := newIndexTestFixture(t) - defer f.db.Close() - f.createUserCollectionIndexOnName() - - doc := f.newUserDoc("John", 21, f.users) - key := newIndexKeyBuilder(f).Col(usersColName).Fields(usersNameFieldName).Doc(doc).Build() - - mockTxn := f.mockTxn() - a := &mocks.DAGStore{} - mockTxn.MockDAGstore.EXPECT().AsIPLDStorage().Return(&bsadapter.Adapter{Wrapped: a}) - a.EXPECT().Put(mock.Anything, mock.Anything).Return(nil) - - dataStoreOn := mockTxn.MockDatastore.EXPECT() - dataStoreOn.Put(mock.Anything, mock.Anything, mock.Anything).Unset() - dataStoreOn.Put(mock.Anything, key.ToDS(), mock.Anything).Return(errors.New("error")) - dataStoreOn.Put(mock.Anything, mock.Anything, mock.Anything).Return(nil) - - ctx := SetContextTxn(f.ctx, mockTxn) - err := f.users.Create(ctx, doc) - require.ErrorIs(f.t, err, NewErrFailedToStoreIndexedField("name", nil)) -} - func TestNonUnique_IfDocDoesNotHaveIndexedField_SkipIndex(t *testing.T) { f := newIndexTestFixture(t) defer f.db.Close() @@ -354,50 +387,6 @@ func TestNonUnique_IfDocDoesNotHaveIndexedField_SkipIndex(t *testing.T) { assert.Len(t, prefixes, 0) } -func TestNonUnique_IfSystemStorageHasInvalidIndexDescription_Error(t *testing.T) { - f := newIndexTestFixture(t) - defer f.db.Close() - f.createUserCollectionIndexOnName() - - doc := f.newUserDoc("John", 21, f.users) - - mockTxn := f.mockTxn().ClearSystemStore() - a := &mocks.DAGStore{} - mockTxn.MockDAGstore.EXPECT().AsIPLDStorage().Return(&bsadapter.Adapter{Wrapped: a}) - a.EXPECT().Put(mock.Anything, mock.Anything).Return(nil) - - systemStoreOn := mockTxn.MockSystemstore.EXPECT() - systemStoreOn.Query(mock.Anything, mock.Anything). - Return(mocks.NewQueryResultsWithValues(t, []byte("invalid")), nil) - - ctx := SetContextTxn(f.ctx, mockTxn) - err := f.users.Create(ctx, doc) - assert.ErrorIs(t, err, datastore.NewErrInvalidStoredValue(nil)) -} - -func TestNonUnique_IfSystemStorageFailsToReadIndexDesc_Error(t *testing.T) { - f := newIndexTestFixture(t) - defer f.db.Close() - f.createUserCollectionIndexOnName() - - doc := f.newUserDoc("John", 21, f.users) - - testErr := errors.New("test error") - - mockTxn := f.mockTxn().ClearSystemStore() - a := &mocks.DAGStore{} - mockTxn.MockDAGstore.EXPECT().AsIPLDStorage().Return(&bsadapter.Adapter{Wrapped: a}) - a.EXPECT().Put(mock.Anything, mock.Anything).Return(nil) - - systemStoreOn := mockTxn.MockSystemstore.EXPECT() - systemStoreOn.Query(mock.Anything, mock.Anything). - Return(nil, testErr) - - ctx := SetContextTxn(f.ctx, mockTxn) - err := f.users.Create(ctx, doc) - require.ErrorIs(t, err, testErr) -} - func TestNonUnique_IfIndexIntField_StoreIt(t *testing.T) { f := newIndexTestFixture(t) defer f.db.Close() @@ -1458,3 +1447,148 @@ func TestCompositeUpdate_ShouldDeleteOldValueAndStoreNewOne(t *testing.T) { f.commitTxn() } } + +func TestArrayIndex_IfDocIsAdded_ShouldIndexAllArrayElements(t *testing.T) { + f := newIndexTestFixture(t) + defer f.db.Close() + + f.createUserCollectionIndexOnNumbers() + + numbersArray := []int{1, 2, 3} + doc := f.newCustomUserDoc(userDoc{Name: "John", Numbers: numbersArray}, f.users) + f.saveDocToCollection(doc, f.users) + + for _, num := range numbersArray { + key := newIndexKeyBuilder(f).Col(usersColName).Fields(usersNumbersFieldName). + ArrayFieldVal(usersNumbersFieldName, num).Doc(doc).Build() + + data, err := f.txn.Datastore().Get(f.ctx, key.ToDS()) + require.NoError(t, err) + assert.Len(t, data, 0) + } +} + +func TestArrayIndex_IfDocIsDeleted_ShouldRemoveIndex(t *testing.T) { + f := newIndexTestFixture(t) + defer f.db.Close() + + f.createUserCollectionIndexOnNumbers() + + numbersArray := []int{1, 2, 3} + doc := f.newCustomUserDoc(userDoc{Name: "John", Numbers: numbersArray}, f.users) + f.saveDocToCollection(doc, f.users) + + userNumbersKey := newIndexKeyBuilder(f).Col(usersColName).Fields(usersNumbersFieldName).Build() + assert.Len(t, f.getPrefixFromDataStore(userNumbersKey.ToString()), len(numbersArray)) + + f.deleteDocFromCollection(doc.ID(), f.users) + + assert.Len(t, f.getPrefixFromDataStore(userNumbersKey.ToString()), 0) +} + +func TestArrayIndex_IfDocIsDeletedButOneArrayElementHasNoIndexRecord_Error(t *testing.T) { + f := newIndexTestFixture(t) + defer f.db.Close() + + f.createUserCollectionIndexOnNumbers() + + numbersArray := []int{1, 2, 3} + doc := f.newCustomUserDoc(userDoc{Name: "John", Numbers: numbersArray}, f.users) + f.saveDocToCollection(doc, f.users) + + userNumbersKey := newIndexKeyBuilder(f).Col(usersColName).Fields(usersNumbersFieldName). + ArrayFieldVal(usersNumbersFieldName, 2).Doc(doc).Build() + + err := f.txn.Datastore().Delete(f.ctx, userNumbersKey.ToDS()) + require.NoError(t, err) + f.commitTxn() + + res, err := f.users.Delete(f.ctx, doc.ID()) + require.Error(f.t, err) + require.False(f.t, res) +} + +func TestArrayIndex_With2ArrayFieldsIfDocIsDeleted_ShouldRemoveIndex(t *testing.T) { + f := newIndexTestFixture(t) + defer f.db.Close() + + indexDesc := client.IndexDescription{ + Fields: []client.IndexedFieldDescription{ + {Name: usersNumbersFieldName}, + {Name: usersHobbiesFieldName}, + }, + } + + _, err := f.createCollectionIndexFor(f.users.Name().Value(), indexDesc) + require.NoError(f.t, err) + + numbersArray := []int{1, 2} + hobbiesArray := []string{"reading", "swimming"} + doc := f.newCustomUserDoc(userDoc{Name: "John", Numbers: numbersArray, Hobbies: hobbiesArray}, f.users) + f.saveDocToCollection(doc, f.users) + + userNumbersKey := newIndexKeyBuilder(f).Col(usersColName). + Fields(usersNumbersFieldName, usersHobbiesFieldName).Build() + assert.Len(t, f.getPrefixFromDataStore(userNumbersKey.ToString()), len(numbersArray)*len(hobbiesArray)) + + f.deleteDocFromCollection(doc.ID(), f.users) + + assert.Len(t, f.getPrefixFromDataStore(userNumbersKey.ToString()), 0) +} + +func TestArrayIndex_With2ArrayFieldsIfDocIsDeletedButOneArrayElementHasNoIndexRecord_ShouldRemoveIndex(t *testing.T) { + f := newIndexTestFixture(t) + defer f.db.Close() + + indexDesc := client.IndexDescription{ + Fields: []client.IndexedFieldDescription{ + {Name: usersNumbersFieldName}, + {Name: usersHobbiesFieldName}, + }, + } + + _, err := f.createCollectionIndexFor(f.users.Name().Value(), indexDesc) + require.NoError(f.t, err) + + numbersArray := []int{1, 2} + hobbiesArray := []string{"reading", "swimming"} + doc := f.newCustomUserDoc(userDoc{Name: "John", Numbers: numbersArray, Hobbies: hobbiesArray}, f.users) + f.saveDocToCollection(doc, f.users) + + userNumbersKey := newIndexKeyBuilder(f).Col(usersColName).Fields(usersNumbersFieldName, usersHobbiesFieldName). + ArrayFieldVal(usersNumbersFieldName, 2).ArrayFieldVal(usersHobbiesFieldName, "swimming").Doc(doc).Build() + + err = f.txn.Datastore().Delete(f.ctx, userNumbersKey.ToDS()) + require.NoError(t, err) + f.commitTxn() + + res, err := f.users.Delete(f.ctx, doc.ID()) + require.Error(f.t, err) + require.False(f.t, res) +} + +func TestArrayIndex_WithUniqueIndexIfDocIsDeleted_ShouldRemoveIndex(t *testing.T) { + f := newIndexTestFixture(t) + defer f.db.Close() + + indexDesc := client.IndexDescription{ + Unique: true, + Fields: []client.IndexedFieldDescription{ + {Name: usersNumbersFieldName}, + }, + } + + _, err := f.createCollectionIndexFor(f.users.Name().Value(), indexDesc) + require.NoError(f.t, err) + + numbersArray := []int{1, 2, 3} + doc := f.newCustomUserDoc(userDoc{Name: "John", Numbers: numbersArray}, f.users) + f.saveDocToCollection(doc, f.users) + + userNumbersKey := newIndexKeyBuilder(f).Col(usersColName).Fields(usersNumbersFieldName).Unique().Build() + assert.Len(t, f.getPrefixFromDataStore(userNumbersKey.ToString()), len(numbersArray)) + + f.deleteDocFromCollection(doc.ID(), f.users) + + assert.Len(t, f.getPrefixFromDataStore(userNumbersKey.ToString()), 0) +} diff --git a/internal/planner/filter/copy_field.go b/internal/planner/filter/copy_field.go index 9a524ecabb..838cdf4cf0 100644 --- a/internal/planner/filter/copy_field.go +++ b/internal/planner/filter/copy_field.go @@ -29,11 +29,9 @@ func CopyField(filter *mapper.Filter, fields ...mapper.Field) *mapper.Filter { }) } - resultFilter := &mapper.Filter{} conditionMap := traverseFilterByProperty(conditionKeys, filter.Conditions, false) if len(conditionMap) > 0 { - resultFilter.Conditions = conditionMap - return resultFilter + return &mapper.Filter{Conditions: conditionMap} } return nil } diff --git a/internal/planner/filter/merge.go b/internal/planner/filter/merge.go index d5644e807e..9afbbf4c47 100644 --- a/internal/planner/filter/merge.go +++ b/internal/planner/filter/merge.go @@ -15,9 +15,9 @@ import ( "github.com/sourcenetwork/defradb/internal/planner/mapper" ) -// Merge merges two filters into one. +// MergeConditions merges two sets of filter conditions into one. // It basically applies _and to both filters and normalizes them. -func Merge(c1 map[connor.FilterKey]any, c2 map[connor.FilterKey]any) map[connor.FilterKey]any { +func MergeConditions(c1 map[connor.FilterKey]any, c2 map[connor.FilterKey]any) map[connor.FilterKey]any { if len(c1) == 0 { return c2 } @@ -37,3 +37,18 @@ func Merge(c1 map[connor.FilterKey]any, c2 map[connor.FilterKey]any) map[connor. // more complex, that's why simplify if by normalizing it. return normalize(result) } + +// Merge merges two filters into one. +// It basically applies _and to both filters and normalizes them. +func Merge(f1 *mapper.Filter, f2 *mapper.Filter) *mapper.Filter { + if f1 == nil { + return f2 + } + if f2 == nil { + return f1 + } + + return &mapper.Filter{ + Conditions: MergeConditions(f1.Conditions, f2.Conditions), + } +} diff --git a/internal/planner/filter/merge_test.go b/internal/planner/filter/merge_test.go index 6ea663eba5..745642fe1f 100644 --- a/internal/planner/filter/merge_test.go +++ b/internal/planner/filter/merge_test.go @@ -59,7 +59,7 @@ func TestMergeFilterConditions(t *testing.T) { t.Run(tt.name, func(t *testing.T) { leftFilter := mapper.ToFilter(request.Filter{Conditions: tt.left}, mapping) rightFilter := mapper.ToFilter(request.Filter{Conditions: tt.right}, mapping) - actualFilter := Merge(leftFilter.Conditions, rightFilter.Conditions) + actualFilter := MergeConditions(leftFilter.Conditions, rightFilter.Conditions) expectedFilter := mapper.ToFilter(request.Filter{Conditions: tt.expected}, mapping) AssertEqualFilterMap(t, expectedFilter.Conditions, actualFilter) }) @@ -70,6 +70,6 @@ func TestMergeNullFilter(t *testing.T) { f := map[connor.FilterKey]any{ &mapper.PropertyIndex{Index: 0}: "value1", } - AssertEqualFilterMap(t, f, Merge(f, nil)) - AssertEqualFilterMap(t, f, Merge(nil, f)) + AssertEqualFilterMap(t, f, MergeConditions(f, nil)) + AssertEqualFilterMap(t, f, MergeConditions(nil, f)) } diff --git a/internal/planner/filter/split.go b/internal/planner/filter/split.go index 69aed9fc2e..db8381620e 100644 --- a/internal/planner/filter/split.go +++ b/internal/planner/filter/split.go @@ -41,7 +41,7 @@ func SplitByFields(filter *mapper.Filter, fields ...mapper.Field) (*mapper.Filte if newSplitF == nil { continue } - splitF.Conditions = Merge(splitF.Conditions, newSplitF.Conditions) + splitF.Conditions = MergeConditions(splitF.Conditions, newSplitF.Conditions) RemoveField(filter, field) } diff --git a/internal/planner/scan.go b/internal/planner/scan.go index 019cd1dee2..a5fe4a32e9 100644 --- a/internal/planner/scan.go +++ b/internal/planner/scan.go @@ -164,14 +164,26 @@ func (scan *scanNode) initFetcher( f = new(fetcher.DocumentFetcher) if index.HasValue() { - fields := make([]mapper.Field, 0, len(index.Value().Fields)) + fieldsToMove := make([]mapper.Field, 0, len(index.Value().Fields)) + fieldsToCopy := make([]mapper.Field, 0, len(index.Value().Fields)) for _, field := range index.Value().Fields { fieldName := field.Name typeIndex := scan.documentMapping.FirstIndexOfName(fieldName) - fields = append(fields, mapper.Field{Index: typeIndex, Name: fieldName}) + indexField := mapper.Field{Index: typeIndex, Name: fieldName} + fd, _ := scan.col.Definition().Schema.GetFieldByName(fieldName) + // if the field is an array, we need to copy it instead of moving so that the + // top select node can do final filter check on the whole array of the document + if fd.Kind.IsArray() { + fieldsToCopy = append(fieldsToCopy, indexField) + } else { + fieldsToMove = append(fieldsToMove, indexField) + } } var indexFilter *mapper.Filter - scan.filter, indexFilter = filter.SplitByFields(scan.filter, fields...) + scan.filter, indexFilter = filter.SplitByFields(scan.filter, fieldsToMove...) + for i := range fieldsToCopy { + indexFilter = filter.Merge(indexFilter, filter.CopyField(scan.filter, fieldsToCopy[i])) + } if indexFilter != nil { f = fetcher.NewIndexFetcher(f, index.Value(), indexFilter) } diff --git a/internal/planner/type_join.go b/internal/planner/type_join.go index fc5eb9bbaf..a6d726b801 100644 --- a/internal/planner/type_join.go +++ b/internal/planner/type_join.go @@ -277,8 +277,7 @@ func prepareScanNodeFilterForTypeJoin( parent.filter = mapper.NewFilter() parent.filter.Conditions = filter.Copy(scan.filter.Conditions) } else { - parent.filter.Conditions = filter.Merge( - parent.filter.Conditions, scan.filter.Conditions) + parent.filter = filter.Merge(parent.filter, scan.filter) } scan.filter = nil } else { @@ -288,8 +287,7 @@ func prepareScanNodeFilterForTypeJoin( if parent.filter == nil { parent.filter = parentFilter } else { - parent.filter.Conditions = filter.Merge( - parent.filter.Conditions, parentFilter.Conditions) + parent.filter = filter.Merge(parent.filter, parentFilter) } } } @@ -799,7 +797,7 @@ func addFilterOnIDField(scan *scanNode, propIndex int, val any) { } filter.RemoveField(scan.filter, mapper.Field{Index: propIndex}) - scan.filter.Conditions = filter.Merge(scan.filter.Conditions, filterConditions) + scan.filter.Conditions = filter.MergeConditions(scan.filter.Conditions, filterConditions) } func getScanNode(plan planNode) *scanNode { diff --git a/internal/utils/slice/slice.go b/internal/utils/slice/slice.go new file mode 100644 index 0000000000..2a2463d6ec --- /dev/null +++ b/internal/utils/slice/slice.go @@ -0,0 +1,50 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package slice + +import "github.com/sourcenetwork/immutable" + +// RemoveDuplicates removes duplicates from a slice of elements. +// Relative order of the elements is not preserved. +// Both runtime and space complexity are O(n). +func RemoveDuplicates[S ~[]E, E comparable](s S) S { + sets := make(map[E]struct{}) + for i := len(s) - 1; i >= 0; i-- { + if _, ok := sets[s[i]]; ok { + swapLast(s, i) + s = s[:len(s)-1] + } else { + sets[s[i]] = struct{}{} + } + } + return s +} + +// RemoveFirstIf removes the first element that satisfies the predicate. +// Relative order of the elements is not preserved, as the last element is swapped with the removed one. +func RemoveFirstIf[S ~[]E, E any](s S, predicate func(E) bool) (S, immutable.Option[E]) { + for i := 0; i < len(s); i++ { + if predicate(s[i]) { + swapLast(s, i) + lastInd := len(s) - 1 + return s[:lastInd], immutable.Some(s[lastInd]) + } + } + return s, immutable.None[E]() +} + +func swap[T any](elements []T, i, j int) { + elements[i], elements[j] = elements[j], elements[i] +} + +func swapLast[T any](elements []T, i int) { + swap(elements, i, len(elements)-1) +} diff --git a/internal/utils/slice/slice_test.go b/internal/utils/slice/slice_test.go new file mode 100644 index 0000000000..07d6d88dc8 --- /dev/null +++ b/internal/utils/slice/slice_test.go @@ -0,0 +1,100 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. +package slice_test + +import ( + "testing" + + "github.com/sourcenetwork/immutable" + "github.com/stretchr/testify/assert" + + "github.com/sourcenetwork/defradb/internal/utils/slice" +) + +func TestRemoveFirstIf(t *testing.T) { + tests := []struct { + name string + input []int + predicate func(int) bool + expected []int + found immutable.Option[int] + }{ + { + name: "remove in the middle", + input: []int{1, 3, 4, 5, 6}, + predicate: func(n int) bool { return n%2 == 0 }, + expected: []int{1, 3, 6, 5}, + found: immutable.Some(4), + }, + { + name: "nothing removed", + input: []int{1, 3, 4, 5, 6}, + predicate: func(n int) bool { return n > 10 }, + expected: []int{1, 3, 4, 5, 6}, + found: immutable.None[int](), + }, + { + name: "empty slice", + input: []int{}, + predicate: func(n int) bool { return n == 5 }, + expected: []int{}, + found: immutable.None[int](), + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result, found := slice.RemoveFirstIf(tt.input, tt.predicate) + assert.Equal(t, tt.expected, result, "expected %v, got %v", tt.expected, result) + assert.Equal(t, tt.found, found, "expected found %v, got %v", tt.found, found) + }) + } +} + +func TestRemoveDuplicates(t *testing.T) { + tests := []struct { + name string + input []int + expected []int + }{ + { + name: "no duplicates", + input: []int{1, 2, 3, 4, 5}, + expected: []int{1, 2, 3, 4, 5}, + }, + { + name: "all duplicates", + input: []int{1, 1, 1, 1, 1}, + expected: []int{1}, + }, + { + name: "some duplicates", + input: []int{1, 2, 4, 2, 3, 4, 4, 5}, + expected: []int{1, 2, 3, 4, 5}, + }, + { + name: "empty slice", + input: []int{}, + expected: []int{}, + }, + { + name: "single element", + input: []int{1}, + expected: []int{1}, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := slice.RemoveDuplicates(tt.input) + assert.ElementsMatch(t, tt.expected, result, "expected %v, got %v", tt.expected, result) + }) + } +} diff --git a/tests/integration/index/array_composite_test.go b/tests/integration/index/array_composite_test.go new file mode 100644 index 0000000000..94ccc9d659 --- /dev/null +++ b/tests/integration/index/array_composite_test.go @@ -0,0 +1,597 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package index + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestArrayCompositeIndex_WithFilterOnIndexedArrayUsingAny_ShouldUseIndex(t *testing.T) { + req := `query { + User(filter: {name: {_eq: "Shahzad"}, numbers: {_any: {_eq: 30}}, age: {_eq: 30}}) { + _docID + numbers + } + }` + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User @index(includes: [{name: "name"}, {name: "numbers"}, {name: "age"}]) { + name: String + numbers: [Int!] + age: Int + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "numbers": [0, 30, 20], + "age": 30 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "numbers": [30, 40, 50, 30], + "age": 30 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "numbers": [40, 50, 30], + "age": 60 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "numbers": [1, 2, 3], + "age": 30 + }`, + }, + testUtils.Request{ + Request: req, + Results: map[string]any{ + "User": []map[string]any{ + { + "_docID": testUtils.NewDocIndex(0, 1), + "numbers": []int64{30, 40, 50, 30}, + }, + }, + }, + }, + testUtils.Request{ + Request: makeExplainQuery(req), + Asserter: testUtils.NewExplainAsserter().WithIndexFetches(1), + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestArrayCompositeIndex_WithFilterOnIndexedArrayUsingAll_ShouldUseIndex(t *testing.T) { + req := `query { + User(filter: {name: {_eq: "Shahzad"}, numbers: {_all: {_gt: 1}}, age: {_eq: 30}}) { + _docID + numbers + } + }` + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User @index(includes: [{name: "name"}, {name: "numbers"}, {name: "age"}]) { + name: String + numbers: [Int!] + age: Int + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "numbers": [0, 30, 20], + "age": 30 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "numbers": [30, 40], + "age": 30 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "numbers": [50], + "age": 60 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "numbers": [1, 2], + "age": 30 + }`, + }, + testUtils.Request{ + Request: req, + Results: map[string]any{ + "User": []map[string]any{ + { + "_docID": testUtils.NewDocIndex(0, 1), + "numbers": []int64{30, 40}, + }, + }, + }, + }, + testUtils.Request{ + Request: makeExplainQuery(req), + // all "Shahzad" users have in total 5 numbers + Asserter: testUtils.NewExplainAsserter().WithIndexFetches(5), + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestArrayCompositeIndex_WithFilterOnIndexedArrayUsingNone_ShouldUseIndex(t *testing.T) { + req := `query { + User(filter: {name: {_eq: "Shahzad"}, numbers: {_none: {_eq: 3}}, age: {_eq: 30}}) { + _docID + numbers + } + }` + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User @index(includes: [{name: "name"}, {name: "numbers"}, {name: "age"}]) { + name: String + numbers: [Int!] + age: Int + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "numbers": [0, 30, 20], + "age": 30 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "numbers": [30, 40], + "age": 30 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "numbers": [50], + "age": 60 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "numbers": [2, 3], + "age": 30 + }`, + }, + testUtils.Request{ + Request: req, + Results: map[string]any{ + "User": []map[string]any{ + { + "_docID": testUtils.NewDocIndex(0, 1), + "numbers": []int64{30, 40}, + }, + }, + }, + }, + testUtils.Request{ + Request: makeExplainQuery(req), + // all "Shahzad" users have in total 5 numbers + Asserter: testUtils.NewExplainAsserter().WithIndexFetches(5), + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestArrayCompositeIndex_With2ConsecutiveArrayFields_Succeed(t *testing.T) { + req := `query { + User(filter: {name: {_eq: "Shahzad"}, numbers: {_any: {_eq: 30}}, hobbies: {_any: {_eq: "sports"}} age: {_eq: 30}}) { + _docID + numbers + hobbies + } + }` + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User @index(includes: [{name: "name"}, {name: "numbers"}, {name: "hobbies"}, {name: "age"}]) { + name: String + numbers: [Int!] + hobbies: [String!] + age: Int + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "numbers": [0, 30, 20], + "hobbies": ["sports", "books"], + "age": 30 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "numbers": [30, 40], + "hobbies": ["sports", "books"], + "age": 30 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "numbers": [50], + "hobbies": ["books", "movies"], + "age": 60 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "numbers": [2, 3], + "hobbies": ["sports", "movies", "books"], + "age": 30 + }`, + }, + testUtils.Request{ + Request: req, + Results: map[string]any{ + "User": []map[string]any{ + { + "_docID": testUtils.NewDocIndex(0, 1), + "numbers": []int64{30, 40}, + "hobbies": []string{"sports", "books"}, + }, + }, + }, + }, + testUtils.Request{ + Request: makeExplainQuery(req), + // all "Shahzad" users have in total 5 numbers + Asserter: testUtils.NewExplainAsserter().WithIndexFetches(1), + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestArrayCompositeIndex_With2SeparateArrayFields_Succeed(t *testing.T) { + req := `query { + User(filter: {name: {_eq: "Shahzad"}, numbers: {_any: {_eq: 30}}, hobbies: {_any: {_eq: "sports"}} age: {_eq: 30}}) { + _docID + numbers + hobbies + } + }` + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User @index(includes: [{name: "numbers"}, {name: "name"}, {name: "age"}, {name: "hobbies"}]) { + name: String + numbers: [Int!] + hobbies: [String!] + age: Int + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "numbers": [0, 30, 20], + "hobbies": ["sports", "books"], + "age": 30 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "numbers": [30, 40], + "hobbies": ["sports", "books"], + "age": 30 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "numbers": [50], + "hobbies": ["books", "movies"], + "age": 60 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "numbers": [2, 3], + "hobbies": ["sports", "movies", "books"], + "age": 30 + }`, + }, + testUtils.Request{ + Request: req, + Results: map[string]any{ + "User": []map[string]any{ + { + "_docID": testUtils.NewDocIndex(0, 1), + "numbers": []int64{30, 40}, + "hobbies": []string{"sports", "books"}, + }, + }, + }, + }, + testUtils.Request{ + Request: makeExplainQuery(req), + Asserter: testUtils.NewExplainAsserter().WithIndexFetches(1), + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestArrayCompositeIndex_WithAnyNoneAll_Succeed(t *testing.T) { + req := `query { + User(filter: { + numbers1: {_all: {_gt: 0}}, + numbers2: {_none: {_eq: 40}}, + numbers3: {_any: {_le: 200}} + }) { + name + } + }` + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User @index(includes: [{name: "numbers1"}, {name: "numbers2"}, {name: "numbers3"}]) { + name: String + numbers1: [Int!] + numbers2: [Int!] + numbers3: [Int!] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "numbers1": [1, 2, 3], + "numbers2": [10, 20, 30], + "numbers3": [100, 200, 300] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "numbers1": [2, 3, 4], + "numbers2": [20, 30, 40], + "numbers3": [200, 300, 400] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Keenan", + "numbers1": [0, 1], + "numbers2": [90], + "numbers3": [900] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Islam", + "numbers1": [6, 7, 8], + "numbers2": [10, 70, 80], + "numbers3": [100, 700, 800] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Fred", + "numbers1": [1, 4, 5, 8], + "numbers2": [60, 80], + "numbers3": [600, 800] + }`, + }, + testUtils.Request{ + Request: req, + Results: map[string]any{ + "User": []map[string]any{ + {"name": "John"}, + {"name": "Islam"}, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestArrayCompositeIndexUpdate_With2ArrayFields_Succeed(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User @index(includes: [{name: "name"}, {name: "numbers"}, {name: "hobbies"}]) { + name: String + numbers: [Int!] + hobbies: [String!] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "numbers": [0, 30, 20, 40], + "hobbies": ["sports", "books"] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "numbers": [30, 40, 30], + "hobbies": ["sports", "books"] + }`, + }, + testUtils.UpdateDoc{ + DocID: 1, + Doc: `{ + "name": "Shahzad", + "numbers": [40, 50, 50], + "hobbies": ["books", "movies", "books", "movies"] + }`, + }, + testUtils.Request{ + Request: `query { + User(filter: {numbers: {_any: {_eq: 30}}}) { + name + } + }`, + Results: map[string]any{ + "User": []map[string]any{{"name": "John"}}, + }, + }, + testUtils.Request{ + Request: `query { + User(filter: {numbers: {_any: {_eq: 40}}}) { + name + } + }`, + Results: map[string]any{ + "User": []map[string]any{ + {"name": "John"}, + {"name": "Shahzad"}, + }, + }, + }, + testUtils.Request{ + Request: `query { + User(filter: {numbers: {_any: {_eq: 50}}}) { + name + } + }`, + Results: map[string]any{ + "User": []map[string]any{{"name": "Shahzad"}}, + }, + }, + testUtils.Request{ + Request: `query { + User(filter: {numbers: {_any: {_gt: 0}}, hobbies: {_any: {_eq: "sports"}}}) { + name + } + }`, + Results: map[string]any{ + "User": []map[string]any{{"name": "John"}}, + }, + }, + testUtils.Request{ + Request: `query { + User(filter: {numbers: {_any: {_gt: 0}}, hobbies: {_any: {_eq: "books"}}}) { + name + } + }`, + Results: map[string]any{ + "User": []map[string]any{ + {"name": "John"}, + {"name": "Shahzad"}, + }, + }, + }, + testUtils.Request{ + Request: `query { + User(filter: {numbers: {_any: {_gt: 0}}, hobbies: {_any: {_eq: "movies"}}}) { + name + } + }`, + Results: map[string]any{ + "User": []map[string]any{{"name": "Shahzad"}}, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestArrayCompositeIndexDelete_With2ConsecutiveArrayFields_Succeed(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User @index(includes: [{name: "name"}, {name: "numbers"}, {name: "hobbies"}]) { + name: String + numbers: [Int!] + hobbies: [String!] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "numbers": [0, 30, 20], + "hobbies": ["sports", "books"] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "numbers": [30, 40, 30, 50], + "hobbies": ["sports", "books", "sports", "movies"] + }`, + }, + testUtils.DeleteDoc{DocID: 1}, + testUtils.Request{ + Request: `query { + User(filter: {numbers: {_any: {_eq: 30}}}) { + name + } + }`, + Results: map[string]any{ + "User": []map[string]any{{"name": "John"}}, + }, + }, + testUtils.Request{ + Request: `query { + User(filter: {numbers: {_any: {_gt: 0}}, hobbies: {_any: {_eq: "sports"}}}) { + name + } + }`, + Results: map[string]any{ + "User": []map[string]any{{"name": "John"}}, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/index/array_test.go b/tests/integration/index/array_test.go new file mode 100644 index 0000000000..096ecb87e1 --- /dev/null +++ b/tests/integration/index/array_test.go @@ -0,0 +1,1103 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package index + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestArrayIndex_WithFilterOnIndexedArrayUsingAny_ShouldUseIndex(t *testing.T) { + req := `query { + User(filter: {numbers: {_any: {_eq: 30}}}) { + name + } + }` + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + numbers: [Int!] @index + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "numbers": [0, 10, 20] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "numbers": [30, 40, 50, 30] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Andy", + "numbers": [33, 44, 55] + }`, + }, + testUtils.Request{ + Request: req, + Results: map[string]any{ + "User": []map[string]any{ + { + "name": "Shahzad", + }, + }, + }, + }, + testUtils.Request{ + Request: makeExplainQuery(req), + Asserter: testUtils.NewExplainAsserter().WithFieldFetches(2).WithIndexFetches(1), + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestArrayIndex_WithFilterOnIndexedArrayUsingAll_ShouldUseIndex(t *testing.T) { + req := `query { + User(filter: {numbers: {_all: {_ge: 33}}}) { + name + } + }` + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + numbers: [Int!] @index + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "numbers": [0, 10, 20] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "numbers": [30, 40, 50] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Andy", + "numbers": [33, 44, 55] + }`, + }, + testUtils.Request{ + Request: req, + Results: map[string]any{ + "User": []map[string]any{ + { + "name": "Andy", + }, + }, + }, + }, + testUtils.Request{ + Request: makeExplainQuery(req), + Asserter: testUtils.NewExplainAsserter().WithIndexFetches(9), + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestArrayIndex_WithFilterOnIndexedArrayUsingNone_ShouldUseIndex(t *testing.T) { + req := `query { + User(filter: {numbers: {_none: {_ge: 33}}}) { + name + } + }` + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + numbers: [Int!] @index + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "numbers": [0, 10, 20] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "numbers": [30, 40, 50] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Andy", + "numbers": [33, 44, 55] + }`, + }, + testUtils.Request{ + Request: req, + Results: map[string]any{ + "User": []map[string]any{ + { + "name": "John", + }, + }, + }, + }, + testUtils.Request{ + Request: makeExplainQuery(req), + Asserter: testUtils.NewExplainAsserter().WithIndexFetches(9), + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestArrayIndexUpdate_IfUpdateRearrangesArrayElements_ShouldFetch(t *testing.T) { + req := `query { + User(filter: {numbers: {_any: {_eq: 30}}}) { + name + } + }` + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + numbers: [Int!] @index + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "numbers": [0, 10, 20] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "numbers": [30, 40, 50, 30] + }`, + }, + testUtils.UpdateDoc{ + DocID: 1, + Doc: `{ + "name": "Shahzad", + "numbers": [50, 30, 40] + }`, + }, + testUtils.Request{ + Request: req, + Results: map[string]any{ + "User": []map[string]any{ + { + "name": "Shahzad", + }, + }, + }, + }, + testUtils.Request{ + Request: makeExplainQuery(req), + Asserter: testUtils.NewExplainAsserter().WithFieldFetches(2).WithIndexFetches(1), + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestArrayIndexUpdate_IfUpdateRemovesSoughtElement_ShouldNotFetch(t *testing.T) { + req := `query { + User(filter: {numbers: {_any: {_eq: 30}}}) { + name + } + }` + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + numbers: [Int!] @index + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "numbers": [0, 10, 20] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "numbers": [30, 40, 50, 30] + }`, + }, + testUtils.UpdateDoc{ + DocID: 1, + Doc: `{ + "name": "Shahzad", + "numbers": [50, 40] + }`, + }, + testUtils.Request{ + Request: req, + Results: map[string]any{ + "User": []map[string]any{}, + }, + }, + testUtils.Request{ + Request: makeExplainQuery(req), + Asserter: testUtils.NewExplainAsserter().WithIndexFetches(0), + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestArrayIndexUpdate_IfUpdateAddsSoughtElement_ShouldFetch(t *testing.T) { + req := `query { + User(filter: {numbers: {_any: {_eq: 30}}}) { + name + } + }` + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + numbers: [Int!] @index + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "numbers": [0, 10, 20] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "numbers": [40, 50] + }`, + }, + testUtils.UpdateDoc{ + DocID: 1, + Doc: `{ + "name": "Shahzad", + "numbers": [80, 30, 60] + }`, + }, + testUtils.Request{ + Request: req, + Results: map[string]any{ + "User": []map[string]any{ + { + "name": "Shahzad", + }, + }, + }, + }, + testUtils.Request{ + Request: makeExplainQuery(req), + Asserter: testUtils.NewExplainAsserter().WithIndexFetches(1), + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestArrayIndexDelete_IfUpdateRemovesSoughtElement_ShouldNotFetch(t *testing.T) { + req := `query { + User(filter: {numbers: {_any: {_gt: 0}}}) { + name + } + }` + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + numbers: [Int!] @index + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "numbers": [0, 10, 20] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "numbers": [40, 50] + }`, + }, + testUtils.DeleteDoc{DocID: 0}, + testUtils.Request{ + Request: req, + Results: map[string]any{ + "User": []map[string]any{ + {"name": "Shahzad"}, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestArrayIndex_Bool_ShouldUseIndex(t *testing.T) { + req := `query { + User(filter: {booleans: {_any: {_eq: true}}}) { + name + } + }` + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + booleans: [Boolean!] @index + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "booleans": [true, false, true] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "booleans": [false, false] + }`, + }, + testUtils.Request{ + Request: req, + Results: map[string]any{ + "User": []map[string]any{ + {"name": "John"}, + }, + }, + }, + testUtils.Request{ + Request: makeExplainQuery(req), + Asserter: testUtils.NewExplainAsserter().WithFieldFetches(2).WithIndexFetches(1), + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestArrayIndex_OptionalBool_ShouldUseIndex(t *testing.T) { + req := `query { + User(filter: {booleans: {_any: {_eq: true}}}) { + name + } + }` + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + booleans: [Boolean] @index + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "booleans": [true, false, true] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "booleans": [false, false] + }`, + }, + testUtils.Request{ + Request: req, + Results: map[string]any{ + "User": []map[string]any{ + {"name": "John"}, + }, + }, + }, + testUtils.Request{ + Request: makeExplainQuery(req), + Asserter: testUtils.NewExplainAsserter().WithFieldFetches(2).WithIndexFetches(1), + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestArrayIndex_OptionalInt_ShouldUseIndex(t *testing.T) { + req := `query { + User(filter: {numbers: {_any: {_eq: 3}}}) { + name + } + }` + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + numbers: [Int] @index + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "numbers": [4, 3, 7] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "numbers": [2, 8] + }`, + }, + testUtils.Request{ + Request: req, + Results: map[string]any{ + "User": []map[string]any{ + {"name": "John"}, + }, + }, + }, + testUtils.Request{ + Request: makeExplainQuery(req), + Asserter: testUtils.NewExplainAsserter().WithIndexFetches(1), + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestArrayIndex_Float_ShouldUseIndex(t *testing.T) { + req := `query { + User(filter: {rates: {_any: {_eq: 1.25}}}) { + name + } + }` + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + rates: [Float!] @index + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "rates": [0.5, 1.0, 1.25] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "rates": [1.5, 1.2] + }`, + }, + testUtils.Request{ + Request: req, + Results: map[string]any{ + "User": []map[string]any{ + {"name": "John"}, + }, + }, + }, + testUtils.Request{ + Request: makeExplainQuery(req), + Asserter: testUtils.NewExplainAsserter().WithIndexFetches(1), + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestArrayIndex_OptionalFloat_ShouldUseIndex(t *testing.T) { + req := `query { + User(filter: {rates: {_any: {_eq: 1.25}}}) { + name + } + }` + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + rates: [Float] @index + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "rates": [0.5, 1.0, 1.25] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "rates": [1.5, 1.2] + }`, + }, + testUtils.Request{ + Request: req, + Results: map[string]any{ + "User": []map[string]any{ + {"name": "John"}, + }, + }, + }, + testUtils.Request{ + Request: makeExplainQuery(req), + Asserter: testUtils.NewExplainAsserter().WithIndexFetches(1), + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestArrayIndex_OptionalString_ShouldUseIndex(t *testing.T) { + req := `query { + User(filter: {hobbies: {_any: {_eq: "books"}}}) { + name + } + }` + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + hobbies: [String] @index + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "hobbies": ["games", "books", "music"] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "hobbies": ["movies", "music"] + }`, + }, + testUtils.Request{ + Request: req, + Results: map[string]any{ + "User": []map[string]any{ + {"name": "John"}, + }, + }, + }, + testUtils.Request{ + Request: makeExplainQuery(req), + Asserter: testUtils.NewExplainAsserter().WithIndexFetches(1), + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestArrayIndex_WithAnyAndInOperator_Succeed(t *testing.T) { + req := `query { + User(filter: {numbers: {_any: {_in: [3, 4, 5]}}}) { + name + } + }` + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + numbers: [Int!] @index + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "numbers": [1, 4, 7] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "numbers": [2, 8] + }`, + }, + testUtils.Request{ + Request: req, + Results: map[string]any{ + "User": []map[string]any{ + {"name": "John"}, + }, + }, + }, + testUtils.Request{ + Request: makeExplainQuery(req), + Asserter: testUtils.NewExplainAsserter().WithIndexFetches(1), + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestArrayIndex_WithAllAndInOperator_Succeed(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + numbers: [Int!] @index + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "numbers": [3, 4] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "numbers": [2, 8] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Andy", + "numbers": [3, 5, 8] + }`, + }, + testUtils.Request{ + Request: `query { + User(filter: {numbers: {_all: {_in: [3, 4, 5]}}}) { + name + } + }`, + Results: map[string]any{ + "User": []map[string]any{ + {"name": "John"}, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestArrayIndex_WithNoneAndInOperator_Succeed(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + numbers: [Int!] @index + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "numbers": [3, 4] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "numbers": [2, 8] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Andy", + "numbers": [3, 5, 8] + }`, + }, + testUtils.Request{ + Request: `query { + User(filter: {numbers: {_none: {_in: [4, 5]}}}) { + name + } + }`, + Results: map[string]any{ + "User": []map[string]any{ + {"name": "Shahzad"}, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestArrayIndex_WithNoneAndNinOperator_Succeed(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + numbers: [Int!] @index + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "numbers": [3, 4] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "numbers": [2, 8] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Andy", + "numbers": [3, 5, 8] + }`, + }, + testUtils.Request{ + Request: `query { + User(filter: {numbers: {_none: {_nin: [3, 4, 5]}}}) { + name + } + }`, + Results: map[string]any{ + "User": []map[string]any{ + {"name": "John"}, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestArrayIndex_WithAllAndNinOperator_Succeed(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + numbers: [Int!] @index + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "numbers": [3, 4] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "numbers": [2, 8] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Andy", + "numbers": [3, 5, 8] + }`, + }, + testUtils.Request{ + Request: `query { + User(filter: {numbers: {_all: {_nin: [4, 5]}}}) { + name + } + }`, + Results: map[string]any{ + "User": []map[string]any{ + {"name": "Shahzad"}, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestArrayIndex_WithAnyAndNinOperator_Succeed(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + numbers: [Int!] @index + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "numbers": [3, 4] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "numbers": [2, 8] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Andy", + "numbers": [3, 5, 8] + }`, + }, + testUtils.Request{ + Request: `query { + User(filter: {numbers: {_any: {_nin: [3, 4, 5]}}}) { + name + } + }`, + Results: map[string]any{ + "User": []map[string]any{ + {"name": "Shahzad"}, + {"name": "Andy"}, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestArrayIndex_WithNilElementsAndAnyOp_Succeed(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + numbers: [Int] @index + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "numbers": [0, null, 2, 3, null] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "numbers": [10, 20, null] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Andy", + "numbers": [33, 44, 55] + }`, + }, + testUtils.Request{ + Request: `query { + User(filter: {numbers: {_any: {_eq: 2}}}) { + name + } + }`, + Results: map[string]any{ + "User": []map[string]any{ + {"name": "John"}, + }, + }, + }, + testUtils.Request{ + Request: `query { + User(filter: {numbers: {_any: {_eq: null}}}) { + name + } + }`, + Results: map[string]any{ + "User": []map[string]any{ + {"name": "John"}, + {"name": "Shahzad"}, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestArrayIndex_WithNilElementsAndAllOp_Succeed(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + numbers: [Int] @index + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "numbers": [0, null, 2, 3, null] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "numbers": [10, 20, null] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Andy", + "numbers": [33, 44, 55] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Islam", + "numbers": [null, null] + }`, + }, + testUtils.Request{ + Request: `query { + User(filter: {numbers: {_all: {_ge: 10}}}) { + name + } + }`, + Results: map[string]any{ + "User": []map[string]any{ + {"name": "Andy"}, + }, + }, + }, + testUtils.Request{ + Request: `query { + User(filter: {numbers: {_all: {_eq: null}}}) { + name + } + }`, + Results: map[string]any{ + "User": []map[string]any{ + {"name": "Islam"}, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestArrayIndex_WithNilElementsAndNoneOp_Succeed(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + numbers: [Int] @index + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "numbers": [0, null, 2, 3, null] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "numbers": [10, 20, null] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Andy", + "numbers": [33, 44, 55] + }`, + }, + testUtils.Request{ + Request: `query { + User(filter: {numbers: {_none: {_ge: 10}}}) { + name + } + }`, + Results: map[string]any{ + "User": []map[string]any{ + {"name": "John"}, + }, + }, + }, + testUtils.Request{ + Request: `query { + User(filter: {numbers: {_none: {_eq: null}}}) { + name + } + }`, + Results: map[string]any{ + "User": []map[string]any{ + {"name": "Andy"}, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/index/array_unique_composite_test.go b/tests/integration/index/array_unique_composite_test.go new file mode 100644 index 0000000000..ec1b10ee0f --- /dev/null +++ b/tests/integration/index/array_unique_composite_test.go @@ -0,0 +1,204 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package index + +import ( + "testing" + + "github.com/sourcenetwork/defradb/errors" + "github.com/sourcenetwork/defradb/internal/db" + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestArrayUniqueCompositeIndex_WithUniqueCombinations_Succeed(t *testing.T) { + req := `query { + User(filter: {nfts1: {_any: {_eq: 2}}, nfts2: {_any: {_eq: 3}}}) { + name + } + }` + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User @index(unique: true, includes: [{name: "nfts1"}, {name: "nfts2"}]) { + name: String + nfts1: [Int!] + nfts2: [Int!] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "nfts1": [1, 2], + "nfts2": [1, 3] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "nfts1": [1, 2], + "nfts2": [2, 4] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Keenan", + "nfts1": [3, 4], + "nfts2": [1, 3] + }`, + }, + testUtils.Request{ + Request: req, + Results: map[string]any{ + "User": []map[string]any{ + {"name": "John"}, + }, + }, + }, + testUtils.Request{ + Request: makeExplainQuery(req), + Asserter: testUtils.NewExplainAsserter().WithIndexFetches(1), + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestArrayUniqueCompositeIndex_IfDocIsCreatedThatViolatesUniqueness_Error(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User @index(unique: true, includes: [{name: "nfts1"}, {name: "nfts2"}]) { + name: String + nfts1: [Int!] + nfts2: [Int!] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "nfts1": [1, 2], + "nfts2": [1, 3] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "nfts1": [1, 2], + "nfts2": [2, 4, 3] + }`, + ExpectedError: db.NewErrCanNotIndexNonUniqueFields( + "bae-02823b81-729a-5cb8-88cb-6df2e15232b1", + errors.NewKV("nfts1", []int64{1, 2}), errors.NewKV("nfts2", []int64{2, 4, 3})).Error(), + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "nfts1": [5, 6, 2], + "nfts2": [1, 3] + }`, + ExpectedError: db.NewErrCanNotIndexNonUniqueFields( + "bae-ccb3fd51-caf9-5b34-b2d2-e4ad020409e1", + errors.NewKV("nfts1", []int64{5, 6, 2}), errors.NewKV("nfts2", []int64{1, 3})).Error(), + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestArrayUniqueCompositeIndex_IfDocIsUpdatedThatViolatesUniqueness_Error(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User @index(unique: true, includes: [{name: "nfts1"}, {name: "nfts2"}]) { + name: String + nfts1: [Int!] + nfts2: [Int!] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "nfts1": [1, 2], + "nfts2": [1, 3] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "nfts1": [1, 2], + "nfts2": [2, 4, 5, 6] + }`, + }, + testUtils.UpdateDoc{ + DocID: 1, + Doc: `{ + "name": "Shahzad", + "nfts1": [1], + "nfts2": [2, 5, 3] + }`, + ExpectedError: db.NewErrCanNotIndexNonUniqueFields( + "bae-f6b3ab5a-dfa4-53fd-a320-a3e203a9e6f5", + errors.NewKV("nfts1", []int64{1}), errors.NewKV("nfts2", []int64{2, 5, 3})).Error(), + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestArrayUniqueCompositeIndex_IfDocsHaveNilValues_Succeed(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User @index(unique: true, includes: [{name: "nfts1"}, {name: "nfts2"}]) { + name: String + nfts1: [Int] + nfts2: [Int] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "nfts1": [1, null], + "nfts2": [null, 1, 3, null] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "nfts1": [1, null, 2], + "nfts2": [2, 4, null, 5, 6, null] + }`, + }, + testUtils.Request{ + Request: `query { + User(filter: {nfts1: {_any: {_eq: null}}, nfts2: {_any: {_eq: null}}}) { + name + } + }`, + Results: map[string]any{ + "User": []map[string]any{ + {"name": "John"}, + {"name": "Shahzad"}, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/index/array_unique_test.go b/tests/integration/index/array_unique_test.go new file mode 100644 index 0000000000..4627595ad9 --- /dev/null +++ b/tests/integration/index/array_unique_test.go @@ -0,0 +1,395 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package index + +import ( + "testing" + + "github.com/sourcenetwork/defradb/errors" + "github.com/sourcenetwork/defradb/internal/db" + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestArrayUniqueIndex_UponDocCreationWithArrayElementThatExists_Error(t *testing.T) { + req := `query { + User(filter: {nfts: {_any: {_eq: 30}}}) { + name + } + }` + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + nfts: [Int!] @index(unique: true) + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "nfts": [0, 30, 20] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "nfts": [10, 40] + }`, + }, + testUtils.Request{ + Request: req, + Results: map[string]any{ + "User": []map[string]any{ + {"name": "John"}, + }, + }, + }, + testUtils.Request{ + Request: makeExplainQuery(req), + Asserter: testUtils.NewExplainAsserter().WithIndexFetches(1), + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestArrayUniqueIndex_UponDocCreationWithUniqueElements_Succeed(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + nfts: [Int!] @index(unique: true) + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "nfts": [0, 30, 20] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Andy", + "nfts": [50, 30] + }`, + ExpectedError: db.NewErrCanNotIndexNonUniqueFields( + "bae-a4045a20-b9e6-5b19-82d5-5e54176895a8", + errors.NewKV("nfts", []int64{50, 30})).Error(), + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestArrayUniqueIndex_UponDocUpdateWithUniqueElements_Succeed(t *testing.T) { + req := `query { + User(filter: {nfts: {_any: {_eq: 60}}}) { + name + } + }` + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + nfts: [Int!] @index(unique: true) + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "nfts": [0, 30, 20] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "nfts": [10, 40] + }`, + }, + testUtils.UpdateDoc{ + DocID: 1, + Doc: `{ + "nfts": [10, 60] + }`, + }, + testUtils.Request{ + Request: req, + Results: map[string]any{ + "User": []map[string]any{ + {"name": "Shahzad"}, + }, + }, + }, + testUtils.Request{ + Request: makeExplainQuery(req), + Asserter: testUtils.NewExplainAsserter().WithIndexFetches(1), + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestArrayUniqueIndex_UponDocUpdateWithArrayElementThatExists_Error(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + nfts: [Int!] @index(unique: true) + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "nfts": [0, 30, 20] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "nfts": [10, 40] + }`, + }, + testUtils.UpdateDoc{ + DocID: 1, + Doc: `{ + "nfts": [50, 30] + }`, + ExpectedError: db.NewErrCanNotIndexNonUniqueFields( + "bae-d065234c-4bf5-5cb8-8068-6f1fda8ed661", + errors.NewKV("nfts", []int64{50, 30})).Error(), + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestArrayUniqueIndex_UponDeletingDoc_Succeed(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + nfts: [Int!] @index(unique: true) + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "nfts": [0, 30, 20] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "nfts": [10, 40] + }`, + }, + testUtils.DeleteDoc{ + DocID: 1, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestArrayUniqueIndex_WithNilElementsAndAnyOp_Succeed(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + numbers: [Int] @index(unique: true) + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "numbers": [0, null, 2, 3, null] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "numbers": [10, 20, null] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Andy", + "numbers": [33, 44, 55] + }`, + }, + testUtils.Request{ + Request: `query { + User(filter: {numbers: {_any: {_eq: 2}}}) { + name + } + }`, + Results: map[string]any{ + "User": []map[string]any{ + {"name": "John"}, + }, + }, + }, + testUtils.Request{ + Request: `query { + User(filter: {numbers: {_any: {_eq: null}}}) { + name + } + }`, + Results: map[string]any{ + "User": []map[string]any{ + {"name": "John"}, + {"name": "Shahzad"}, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestArrayUniqueIndex_WithNilElementsAndAllOp_Succeed(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + numbers: [Int] @index(unique: true) + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "numbers": [0, null, 2, 3, null] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "numbers": [10, 20, null] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Andy", + "numbers": [33, 44, 55] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Islam", + "numbers": [null, null] + }`, + }, + testUtils.Request{ + Request: `query { + User(filter: {numbers: {_all: {_ge: 10}}}) { + name + } + }`, + Results: map[string]any{ + "User": []map[string]any{ + {"name": "Andy"}, + }, + }, + }, + testUtils.Request{ + Request: `query { + User(filter: {numbers: {_all: {_eq: null}}}) { + name + } + }`, + Results: map[string]any{ + "User": []map[string]any{ + {"name": "Islam"}, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestArrayUniqueIndex_WithNilElementsAndNoneOp_Succeed(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + numbers: [Int] @index(unique: true) + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "numbers": [0, null, 2, 3, null] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "numbers": [10, 20, null] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Andy", + "numbers": [33, 44, 55] + }`, + }, + testUtils.Request{ + Request: `query { + User(filter: {numbers: {_none: {_ge: 10}}}) { + name + } + }`, + Results: map[string]any{ + "User": []map[string]any{ + {"name": "John"}, + }, + }, + }, + testUtils.Request{ + Request: `query { + User(filter: {numbers: {_none: {_eq: null}}}) { + name + } + }`, + Results: map[string]any{ + "User": []map[string]any{ + {"name": "Andy"}, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} From 2bdbbd4af0c836932086bc056eeaee9ecf9ef6a9 Mon Sep 17 00:00:00 2001 From: Shahzad Lone Date: Fri, 4 Oct 2024 18:36:49 -0400 Subject: [PATCH 53/71] feat: Ability to unrelate private documents from actors (#3099) ## Relevant issue(s) Resolves #2906 ## Description Follow-up to https://github.com/sourcenetwork/defradb/pull/2907, the ability to delete a relationship (in order to revoke access from an identity). ## CLI Demo The following revokes the target actors 'relational' access to the private doc (can't read anymore): ```bash defradb client acp relationship delete \ --collection Users \ --docID bae-ff3ceb1c-b5c0-5e86-a024-dd1b16a4261c \ --relation reader \ --actor did:key:z7r8os2G88XXBNBTLj3kFR5rzUJ4VAesbX7PgsA68ak9B5RYcXF5EZEmjRzzinZndPSSwujXb4XKHG6vmKEFG6ZfsfcQn \ --identity e3b722906ee4e56368f581cd8b18ab0f48af1ea53e635e3f7b8acd076676f6ac ``` Result: ```json { "RecordFound": true // <-------------- Indicates a relationship was found and deleted } ``` ## How has this been tested? CI + Integration Tests + Unit Tests --- acp/README.md | 64 ++ acp/acp.go | 17 + acp/acp_local.go | 30 + acp/acp_local_test.go | 296 ++++++++++ acp/acp_source_hub.go | 53 ++ acp/errors.go | 77 ++- acp/source_hub_client.go | 85 +++ cli/acp_relationship_add.go | 3 +- cli/acp_relationship_delete.go | 121 ++++ cli/cli.go | 1 + client/acp.go | 7 + client/db.go | 15 + client/mocks/db.go | 60 ++ .../cli/defradb_client_acp_relationship.md | 1 + .../defradb_client_acp_relationship_delete.md | 73 +++ docs/website/references/http/openapi.json | 115 +++- http/client_acp.go | 48 ++ http/handler_acp.go | 82 ++- http/openapi.go | 47 +- internal/db/db.go | 40 ++ tests/clients/cli/wrapper_acp.go | 28 + tests/clients/http/wrapper.go | 16 + tests/integration/acp.go | 201 ++++++- ...icator_with_doc_actor_relationship_test.go | 70 +++ ...scribe_with_doc_actor_relationship_test.go | 70 +++ .../add/invalid_test.go} | 2 +- .../add/with_delete_test.go} | 2 +- .../add/with_dummy_relation_test.go} | 2 +- .../add/with_manager_gql_test.go} | 2 +- .../add/with_manager_test.go} | 2 +- .../add/with_no_policy_on_collection_test.go} | 2 +- .../add/with_only_write_gql_test.go} | 2 +- .../add/with_only_write_test.go} | 2 +- .../add/with_public_document_test.go} | 2 +- .../add/with_reader_gql_test.go} | 2 +- .../add/with_reader_test.go} | 2 +- .../add/with_update_gql_test.go} | 2 +- .../add/with_update_test.go} | 2 +- .../doc_actor/delete/invalid_test.go | 545 ++++++++++++++++++ .../doc_actor/delete/with_delete_test.go | 252 ++++++++ .../delete/with_dummy_relation_test.go | 302 ++++++++++ .../doc_actor/delete/with_manager_test.go | 534 +++++++++++++++++ .../with_no_policy_on_collection_test.go | 66 +++ .../delete/with_public_document_test.go | 147 +++++ .../doc_actor/delete/with_reader_test.go | 314 ++++++++++ .../doc_actor/delete/with_self_test.go | 272 +++++++++ .../doc_actor/delete/with_update_test.go | 458 +++++++++++++++ tests/integration/utils.go | 3 + 48 files changed, 4465 insertions(+), 74 deletions(-) create mode 100644 cli/acp_relationship_delete.go create mode 100644 docs/website/references/cli/defradb_client_acp_relationship_delete.md rename tests/integration/acp/relationship/{add_doc_actor_test/add_doc_actor_invalid_test.go => doc_actor/add/invalid_test.go} (99%) rename tests/integration/acp/relationship/{add_doc_actor_test/add_doc_actor_with_delete_test.go => doc_actor/add/with_delete_test.go} (99%) rename tests/integration/acp/relationship/{add_doc_actor_test/add_doc_actor_with_dummy_relation_test.go => doc_actor/add/with_dummy_relation_test.go} (99%) rename tests/integration/acp/relationship/{add_doc_actor_test/add_doc_actor_with_manager_gql_test.go => doc_actor/add/with_manager_gql_test.go} (99%) rename tests/integration/acp/relationship/{add_doc_actor_test/add_doc_actor_with_manager_test.go => doc_actor/add/with_manager_test.go} (99%) rename tests/integration/acp/relationship/{add_doc_actor_test/add_doc_actor_collection_with_no_policy_test.go => doc_actor/add/with_no_policy_on_collection_test.go} (96%) rename tests/integration/acp/relationship/{add_doc_actor_test/add_doc_actor_with_only_write_gql_test.go => doc_actor/add/with_only_write_gql_test.go} (98%) rename tests/integration/acp/relationship/{add_doc_actor_test/add_doc_actor_with_only_write_test.go => doc_actor/add/with_only_write_test.go} (99%) rename tests/integration/acp/relationship/{add_doc_actor_test/add_doc_actor_with_public_document_test.go => doc_actor/add/with_public_document_test.go} (98%) rename tests/integration/acp/relationship/{add_doc_actor_test/add_doc_actor_with_reader_gql_test.go => doc_actor/add/with_reader_gql_test.go} (98%) rename tests/integration/acp/relationship/{add_doc_actor_test/add_doc_actor_with_reader_test.go => doc_actor/add/with_reader_test.go} (99%) rename tests/integration/acp/relationship/{add_doc_actor_test/add_doc_actor_with_update_gql_test.go => doc_actor/add/with_update_gql_test.go} (99%) rename tests/integration/acp/relationship/{add_doc_actor_test/add_doc_actor_with_update_test.go => doc_actor/add/with_update_test.go} (99%) create mode 100644 tests/integration/acp/relationship/doc_actor/delete/invalid_test.go create mode 100644 tests/integration/acp/relationship/doc_actor/delete/with_delete_test.go create mode 100644 tests/integration/acp/relationship/doc_actor/delete/with_dummy_relation_test.go create mode 100644 tests/integration/acp/relationship/doc_actor/delete/with_manager_test.go create mode 100644 tests/integration/acp/relationship/doc_actor/delete/with_no_policy_on_collection_test.go create mode 100644 tests/integration/acp/relationship/doc_actor/delete/with_public_document_test.go create mode 100644 tests/integration/acp/relationship/doc_actor/delete/with_reader_test.go create mode 100644 tests/integration/acp/relationship/doc_actor/delete/with_self_test.go create mode 100644 tests/integration/acp/relationship/doc_actor/delete/with_update_test.go diff --git a/acp/README.md b/acp/README.md index 4c2c73907a..4e8d5b7f5b 100644 --- a/acp/README.md +++ b/acp/README.md @@ -631,6 +631,70 @@ Result: Error: document not found or not authorized to access ``` +### Revoking Access To Private Documents + +To revoke access to a document for an actor, we must delete the relationship between the +actor and the document. Inorder to delete the relationship we require all of the following: + +1) Target DocID: The docID of the document we want to delete a relationship for. +2) Collection Name: The name of the collection that has the Target DocID. +3) Relation Name: The type of relation (name must be defined within the linked policy on collection). +4) Target Identity: The identity of the actor the relationship is being deleted for. +5) Requesting Identity: The identity of the actor that is making the request. + +Notes: + - ACP must be available (i.e. ACP can not be disabled). + - The target document must be registered with ACP already (policy & resource specified). + - The requesting identity MUST either be the owner OR the manager (manages the relation) of the resource. + - If the relationship record was not found, then it will be a no-op. + +Consider the same policy and added relationship from the previous example in the section above where we learnt +how to share the document with other actors. + +We made the document accessible to an actor by adding a relationship: +```sh +defradb client acp relationship add \ +--collection Users \ +--docID bae-ff3ceb1c-b5c0-5e86-a024-dd1b16a4261c \ +--relation reader \ +--actor did:key:z7r8os2G88XXBNBTLj3kFR5rzUJ4VAesbX7PgsA68ak9B5RYcXF5EZEmjRzzinZndPSSwujXb4XKHG6vmKEFG6ZfsfcQn \ +--identity e3b722906ee4e56368f581cd8b18ab0f48af1ea53e635e3f7b8acd076676f6ac +``` + +Result: +```json +{ + "ExistedAlready": false +} +``` + +Similarly, inorder to revoke access to a document we have the following command to delete the relationship: +```sh +defradb client acp relationship delete \ +--collection Users \ +--docID bae-ff3ceb1c-b5c0-5e86-a024-dd1b16a4261c \ +--relation reader \ +--actor did:key:z7r8os2G88XXBNBTLj3kFR5rzUJ4VAesbX7PgsA68ak9B5RYcXF5EZEmjRzzinZndPSSwujXb4XKHG6vmKEFG6ZfsfcQn \ +--identity e3b722906ee4e56368f581cd8b18ab0f48af1ea53e635e3f7b8acd076676f6ac +``` + +Result: +```json +{ + "RecordFound": true +} +``` + +**Note: If the same relationship is deleted again (or a record for a relationship does not exist) then the `RecordFound` +would be false, indicating no-op** + +Now the other actor can no longer read: +```sh +defradb client collection docIDs --identity 4d092126012ebaf56161716018a71630d99443d9d5217e9d8502bb5c5456f2c5 +``` + +**Result is empty from the above command** + ## DAC Usage HTTP: ### Authentication diff --git a/acp/acp.go b/acp/acp.go index c7ae5936e6..d30c45d128 100644 --- a/acp/acp.go +++ b/acp/acp.go @@ -115,6 +115,23 @@ type ACP interface { targetActor string, ) (bool, error) + // DeleteDocActorRelationship deletes a relationship between document and the target actor. + // + // If failure occurs, the result will return an error. Upon success the boolean value will + // be true if the relationship record was found, and deleted. Upon success the boolean + // value will be false if the relationship record was not found (no-op). + // + // Note: The request actor must either be the owner or manager of the document. + DeleteDocActorRelationship( + ctx context.Context, + policyID string, + resourceName string, + docID string, + relation string, + requestActor identity.Identity, + targetActor string, + ) (bool, error) + // SupportsP2P returns true if the implementation supports ACP across a peer network. SupportsP2P() bool } diff --git a/acp/acp_local.go b/acp/acp_local.go index 6e85ac9313..a8a0d32290 100644 --- a/acp/acp_local.go +++ b/acp/acp_local.go @@ -267,3 +267,33 @@ func (l *ACPLocal) AddActorRelationship( return setRelationshipResponse.RecordExisted, nil } + +func (l *ACPLocal) DeleteActorRelationship( + ctx context.Context, + policyID string, + resourceName string, + objectID string, + relation string, + requester identity.Identity, + targetActor string, + creationTime *protoTypes.Timestamp, +) (bool, error) { + principal, err := auth.NewDIDPrincipal(requester.DID) + if err != nil { + return false, newErrInvalidActorID(err, requester.DID) + } + + ctx = auth.InjectPrincipal(ctx, principal) + + deleteRelationshipRequest := types.DeleteRelationshipRequest{ + PolicyId: policyID, + Relationship: types.NewActorRelationship(resourceName, objectID, relation, targetActor), + } + + deleteRelationshipResponse, err := l.engine.DeleteRelationship(ctx, &deleteRelationshipRequest) + if err != nil { + return false, err + } + + return deleteRelationshipResponse.RecordFound, nil +} diff --git a/acp/acp_local_test.go b/acp/acp_local_test.go index 7b30b44cbb..fce65e9974 100644 --- a/acp/acp_local_test.go +++ b/acp/acp_local_test.go @@ -854,6 +854,223 @@ func Test_LocalACP_PersistentMemory_AddDocActorRelationship_FalseIfExistsBeforeT require.Nil(t, errClose) } +func Test_LocalACP_InMemory_DeleteDocActorRelationship_TrueIfFoundAndDeletedFalseOtherwise(t *testing.T) { + ctx := context.Background() + localACP := NewLocalACP() + + localACP.Init(ctx, "") + errStart := localACP.Start(ctx) + require.Nil(t, errStart) + + policyID, errAddPolicy := localACP.AddPolicy( + ctx, + identity1, + validPolicy, + ) + require.Nil(t, errAddPolicy) + require.Equal( + t, + validPolicyID, + policyID, + ) + + // Register a document. + errRegisterDoc := localACP.RegisterDocObject( + ctx, + identity1, + validPolicyID, + "users", + "documentID_XYZ", + ) + require.Nil(t, errRegisterDoc) + + // Grant other identity access. + exists, errAddDocActorRelationship := localACP.AddDocActorRelationship( + ctx, + validPolicyID, + "users", + "documentID_XYZ", + "reader", + identity1, + identity2.DID, + ) + require.Nil(t, errAddDocActorRelationship) + require.False(t, exists) + + // Now the other identity has access. + hasAccess, errCheckDocAccess := localACP.CheckDocAccess( + ctx, + ReadPermission, + identity2.DID, + validPolicyID, + "users", + "documentID_XYZ", + ) + require.Nil(t, errCheckDocAccess) + require.True(t, hasAccess) + + // Delete other identity's access by removing their relationship. + foundRecord, errDeleteDocActorRelationship := localACP.DeleteDocActorRelationship( + ctx, + validPolicyID, + "users", + "documentID_XYZ", + "reader", + identity1, + identity2.DID, + ) + require.Nil(t, errDeleteDocActorRelationship) + require.True(t, foundRecord) + + // Deleting same relationship again should be no-op. + foundRecord, errDeleteDocActorRelationship = localACP.DeleteDocActorRelationship( + ctx, + validPolicyID, + "users", + "documentID_XYZ", + "reader", + identity1, + identity2.DID, + ) + require.Nil(t, errDeleteDocActorRelationship) + require.False(t, foundRecord) // Is a no-op + + // Other identity now has no access again. + hasAccess, errCheckDocAccess = localACP.CheckDocAccess( + ctx, + ReadPermission, + identity2.DID, + validPolicyID, + "users", + "documentID_XYZ", + ) + require.Nil(t, errCheckDocAccess) + require.False(t, hasAccess) + + errClose := localACP.Close() + require.Nil(t, errClose) +} + +func Test_LocalACP_PersistentMemory_DeleteDocActorRelationship_TrueIfFoundAndDeletedFalseOtherwise(t *testing.T) { + acpPath := t.TempDir() + require.NotEqual(t, "", acpPath) + + ctx := context.Background() + localACP := NewLocalACP() + + localACP.Init(ctx, acpPath) + errStart := localACP.Start(ctx) + require.Nil(t, errStart) + + policyID, errAddPolicy := localACP.AddPolicy( + ctx, + identity1, + validPolicy, + ) + require.Nil(t, errAddPolicy) + require.Equal( + t, + validPolicyID, + policyID, + ) + + // Register a document. + errRegisterDoc := localACP.RegisterDocObject( + ctx, + identity1, + validPolicyID, + "users", + "documentID_XYZ", + ) + require.Nil(t, errRegisterDoc) + + // Grant other identity access. + exists, errAddDocActorRelationship := localACP.AddDocActorRelationship( + ctx, + validPolicyID, + "users", + "documentID_XYZ", + "reader", + identity1, + identity2.DID, + ) + require.Nil(t, errAddDocActorRelationship) + require.False(t, exists) + + // Now the other identity has access. + hasAccess, errCheckDocAccess := localACP.CheckDocAccess( + ctx, + ReadPermission, + identity2.DID, + validPolicyID, + "users", + "documentID_XYZ", + ) + require.Nil(t, errCheckDocAccess) + require.True(t, hasAccess) + + // Delete other identity's access by removing their relationship. + foundRecord, errDeleteDocActorRelationship := localACP.DeleteDocActorRelationship( + ctx, + validPolicyID, + "users", + "documentID_XYZ", + "reader", + identity1, + identity2.DID, + ) + require.Nil(t, errDeleteDocActorRelationship) + require.True(t, foundRecord) + + // Deleting same relationship again should be no-op. + foundRecord, errDeleteDocActorRelationship = localACP.DeleteDocActorRelationship( + ctx, + validPolicyID, + "users", + "documentID_XYZ", + "reader", + identity1, + identity2.DID, + ) + require.Nil(t, errDeleteDocActorRelationship) + require.False(t, foundRecord) // Is a no-op + + // Other identity now has no access again. + hasAccess, errCheckDocAccess = localACP.CheckDocAccess( + ctx, + ReadPermission, + identity2.DID, + validPolicyID, + "users", + "documentID_XYZ", + ) + require.Nil(t, errCheckDocAccess) + require.False(t, hasAccess) + + // Should continue having their correct behaviour and access even after a restart. + errClose := localACP.Close() + require.Nil(t, errClose) + + localACP.Init(ctx, acpPath) + errStart = localACP.Start(ctx) + require.Nil(t, errStart) + + // Now check again after the restart that the second identity still has no access. + hasAccess, errCheckDocAccess = localACP.CheckDocAccess( + ctx, + ReadPermission, + identity2.DID, + validPolicyID, + "users", + "documentID_XYZ", + ) + require.Nil(t, errCheckDocAccess) + require.False(t, hasAccess) + + errClose = localACP.Close() + require.Nil(t, errClose) +} + func Test_LocalACP_InMemory_AddPolicy_InvalidCreatorIDReturnsError(t *testing.T) { ctx := context.Background() localACP := NewLocalACP() @@ -1024,3 +1241,82 @@ func Test_LocalACP_Persistent_AddDocActorRelationship_InvalidIdentitiesReturnErr err = localACP.Close() require.NoError(t, err) } + +func Test_LocalACP_InMemory_DeleteDocActorRelationship_InvalidIdentitiesReturnError(t *testing.T) { + ctx := context.Background() + localACP := NewLocalACP() + + localACP.Init(ctx, "") + err := localACP.Start(ctx) + require.Nil(t, err) + + // Invalid requesting identity. + exists, err := localACP.DeleteDocActorRelationship( + ctx, + validPolicyID, + "users", + "documentID_XYZ", + "reader", + invalidIdentity, + identity2.DID, + ) + require.False(t, exists) + require.ErrorIs(t, err, ErrInvalidActorID) + + // Invalid target actor. + exists, err = localACP.DeleteDocActorRelationship( + ctx, + validPolicyID, + "users", + "documentID_XYZ", + "reader", + identity1, + invalidIdentity.DID, + ) + require.False(t, exists) + require.ErrorIs(t, err, ErrFailedToDeleteDocActorRelationshipWithACP) + + err = localACP.Close() + require.NoError(t, err) +} + +func Test_LocalACP_Persistent_DeleteDocActorRelationship_InvalidIdentitiesReturnError(t *testing.T) { + acpPath := t.TempDir() + require.NotEqual(t, "", acpPath) + + ctx := context.Background() + localACP := NewLocalACP() + + localACP.Init(ctx, acpPath) + err := localACP.Start(ctx) + require.Nil(t, err) + + // Invalid requesting identity. + exists, err := localACP.DeleteDocActorRelationship( + ctx, + validPolicyID, + "users", + "documentID_XYZ", + "reader", + invalidIdentity, + identity2.DID, + ) + require.False(t, exists) + require.ErrorIs(t, err, ErrInvalidActorID) + + // Invalid target actor. + exists, err = localACP.DeleteDocActorRelationship( + ctx, + validPolicyID, + "users", + "documentID_XYZ", + "reader", + identity1, + invalidIdentity.DID, + ) + require.False(t, exists) + require.ErrorIs(t, err, ErrFailedToDeleteDocActorRelationshipWithACP) + + err = localACP.Close() + require.NoError(t, err) +} diff --git a/acp/acp_source_hub.go b/acp/acp_source_hub.go index d0c4fb6b89..edd6008b63 100644 --- a/acp/acp_source_hub.go +++ b/acp/acp_source_hub.go @@ -311,3 +311,56 @@ func (a *acpSourceHub) AddActorRelationship( return cmdResult.GetResult().GetSetRelationshipResult().RecordExisted, nil } + +func (a *acpSourceHub) DeleteActorRelationship( + ctx context.Context, + policyID string, + resourceName string, + objectID string, + relation string, + requester identity.Identity, + targetActor string, + creationTime *protoTypes.Timestamp, +) (bool, error) { + msgSet := sourcehub.MsgSet{} + cmdMapper := msgSet.WithBearerPolicyCmd(&acptypes.MsgBearerPolicyCmd{ + Creator: a.signer.GetAccAddress(), + BearerToken: requester.BearerToken, + PolicyId: policyID, + Cmd: acptypes.NewDeleteRelationshipCmd( + acptypes.NewActorRelationship( + resourceName, + objectID, + relation, + targetActor, + ), + ), + CreationTime: creationTime, + }) + + tx, err := a.txBuilder.Build(ctx, a.signer, &msgSet) + if err != nil { + return false, err + } + + resp, err := a.client.BroadcastTx(ctx, tx) + if err != nil { + return false, err + } + + result, err := a.client.AwaitTx(ctx, resp.TxHash) + if err != nil { + return false, err + } + + if result.Error() != nil { + return false, result.Error() + } + + cmdResult, err := cmdMapper.Map(result.TxPayload()) + if err != nil { + return false, err + } + + return cmdResult.GetResult().GetDeleteRelationshipResult().GetRecordFound(), nil +} diff --git a/acp/errors.go b/acp/errors.go index e0717f15dd..72fbc00b95 100644 --- a/acp/errors.go +++ b/acp/errors.go @@ -15,14 +15,16 @@ import ( ) const ( - errInitializationOfACPFailed = "initialization of acp failed" - errStartingACPInEmptyPath = "starting acp in an empty path" - errFailedToAddPolicyWithACP = "failed to add policy with acp" - errFailedToRegisterDocWithACP = "failed to register document with acp" - errFailedToCheckIfDocIsRegisteredWithACP = "failed to check if doc is registered with acp" - errFailedToVerifyDocAccessWithACP = "failed to verify doc access with acp" - errFailedToAddDocActorRelationshipWithACP = "failed to add document actor relationship with acp" - errMissingRequiredArgToAddDocActorRelationship = "missing a required argument needed to add doc actor relationship" + errInitializationOfACPFailed = "initialization of acp failed" + errStartingACPInEmptyPath = "starting acp in an empty path" + errFailedToAddPolicyWithACP = "failed to add policy with acp" + errFailedToRegisterDocWithACP = "failed to register document with acp" + errFailedToCheckIfDocIsRegisteredWithACP = "failed to check if doc is registered with acp" + errFailedToVerifyDocAccessWithACP = "failed to verify doc access with acp" + errFailedToAddDocActorRelationshipWithACP = "failed to add document actor relationship with acp" + errFailedToDeleteDocActorRelationshipWithACP = "failed to delete document actor relationship with acp" + errMissingReqArgToAddDocActorRelationship = "missing a required argument needed to add doc actor relationship" + errMissingReqArgToDeleteDocActorRelationship = "missing a required argument needed to delete doc actor relationship" errObjectDidNotRegister = "no-op while registering object (already exists or error) with acp" errNoPolicyArgs = "missing policy arguments, must have both id and resource" @@ -42,13 +44,14 @@ const ( ) var ( - ErrInitializationOfACPFailed = errors.New(errInitializationOfACPFailed) - ErrFailedToAddPolicyWithACP = errors.New(errFailedToAddPolicyWithACP) - ErrFailedToRegisterDocWithACP = errors.New(errFailedToRegisterDocWithACP) - ErrFailedToCheckIfDocIsRegisteredWithACP = errors.New(errFailedToCheckIfDocIsRegisteredWithACP) - ErrFailedToVerifyDocAccessWithACP = errors.New(errFailedToVerifyDocAccessWithACP) - ErrFailedToAddDocActorRelationshipWithACP = errors.New(errFailedToAddDocActorRelationshipWithACP) - ErrPolicyDoesNotExistWithACP = errors.New(errPolicyDoesNotExistWithACP) + ErrInitializationOfACPFailed = errors.New(errInitializationOfACPFailed) + ErrFailedToAddPolicyWithACP = errors.New(errFailedToAddPolicyWithACP) + ErrFailedToRegisterDocWithACP = errors.New(errFailedToRegisterDocWithACP) + ErrFailedToCheckIfDocIsRegisteredWithACP = errors.New(errFailedToCheckIfDocIsRegisteredWithACP) + ErrFailedToVerifyDocAccessWithACP = errors.New(errFailedToVerifyDocAccessWithACP) + ErrFailedToAddDocActorRelationshipWithACP = errors.New(errFailedToAddDocActorRelationshipWithACP) + ErrFailedToDeleteDocActorRelationshipWithACP = errors.New(errFailedToDeleteDocActorRelationshipWithACP) + ErrPolicyDoesNotExistWithACP = errors.New(errPolicyDoesNotExistWithACP) ErrResourceDoesNotExistOnTargetPolicy = errors.New(errResourceDoesNotExistOnTargetPolicy) @@ -165,6 +168,29 @@ func NewErrFailedToAddDocActorRelationshipWithACP( ) } +func NewErrFailedToDeleteDocActorRelationshipWithACP( + inner error, + Type string, + policyID string, + resourceName string, + docID string, + relation string, + requestActor string, + targetActor string, +) error { + return errors.Wrap( + errFailedToDeleteDocActorRelationshipWithACP, + inner, + errors.NewKV("Type", Type), + errors.NewKV("PolicyID", policyID), + errors.NewKV("ResourceName", resourceName), + errors.NewKV("DocID", docID), + errors.NewKV("Relation", relation), + errors.NewKV("RequestActor", requestActor), + errors.NewKV("TargetActor", targetActor), + ) +} + func newErrPolicyDoesNotExistWithACP( inner error, policyID string, @@ -244,7 +270,26 @@ func NewErrMissingRequiredArgToAddDocActorRelationship( targetActor string, ) error { return errors.New( - errMissingRequiredArgToAddDocActorRelationship, + errMissingReqArgToAddDocActorRelationship, + errors.NewKV("PolicyID", policyID), + errors.NewKV("ResourceName", resourceName), + errors.NewKV("DocID", docID), + errors.NewKV("Relation", relation), + errors.NewKV("RequestActor", requestActor), + errors.NewKV("TargetActor", targetActor), + ) +} + +func NewErrMissingRequiredArgToDeleteDocActorRelationship( + policyID string, + resourceName string, + docID string, + relation string, + requestActor string, + targetActor string, +) error { + return errors.New( + errMissingReqArgToDeleteDocActorRelationship, errors.NewKV("PolicyID", policyID), errors.NewKV("ResourceName", resourceName), errors.NewKV("DocID", docID), diff --git a/acp/source_hub_client.go b/acp/source_hub_client.go index 0bfbae72b1..b211214d9f 100644 --- a/acp/source_hub_client.go +++ b/acp/source_hub_client.go @@ -106,6 +106,27 @@ type sourceHubClient interface { creationTime *protoTypes.Timestamp, ) (bool, error) + // DeleteActorRelationship deletes a relationship within a policy which ties the target actor + // with the specified object, which means that the set of high level rules defined in the + // policy for that relation no-longer will apply to target actor anymore. + // + // If failure occurs, the result will return an error. Upon success the boolean value will + // be true if the relationship record was found and deleted. Upon success the boolean value + // will be false if the relationship record was not found (no-op). + // + // Note: The requester identity must either be the owner of the object (being shared) or + // the manager (i.e. the relation has `manages` defined in the policy). + DeleteActorRelationship( + ctx context.Context, + policyID string, + resourceName string, + objectID string, + relation string, + requester identity.Identity, + targetActor string, + creationTime *protoTypes.Timestamp, + ) (bool, error) + // Close closes any resources in use by acp. Close() error } @@ -420,6 +441,70 @@ func (a *sourceHubBridge) AddDocActorRelationship( return exists, nil } +func (a *sourceHubBridge) DeleteDocActorRelationship( + ctx context.Context, + policyID string, + resourceName string, + docID string, + relation string, + requestActor identity.Identity, + targetActor string, +) (bool, error) { + if policyID == "" || + resourceName == "" || + docID == "" || + relation == "" || + requestActor == (identity.Identity{}) || + targetActor == "" { + return false, NewErrMissingRequiredArgToDeleteDocActorRelationship( + policyID, + resourceName, + docID, + relation, + requestActor.DID, + targetActor, + ) + } + + recordFound, err := a.client.DeleteActorRelationship( + ctx, + policyID, + resourceName, + docID, + relation, + requestActor, + targetActor, + protoTypes.TimestampNow(), + ) + + if err != nil { + return false, NewErrFailedToDeleteDocActorRelationshipWithACP( + err, + "Local", + policyID, + resourceName, + docID, + relation, + requestActor.DID, + targetActor, + ) + } + + log.InfoContext( + ctx, + "Document and actor relationship delete", + corelog.Any("PolicyID", policyID), + corelog.Any("ResourceName", resourceName), + corelog.Any("DocID", docID), + corelog.Any("Relation", relation), + corelog.Any("RequestActor", requestActor.DID), + corelog.Any("TargetActor", targetActor), + corelog.Any("RecordFound", recordFound), + ) + + return recordFound, nil +} + func (a *sourceHubBridge) SupportsP2P() bool { _, ok := a.client.(*acpSourceHub) return ok diff --git a/cli/acp_relationship_add.go b/cli/acp_relationship_add.go index 9733732af8..59b5c3cd32 100644 --- a/cli/acp_relationship_add.go +++ b/cli/acp_relationship_add.go @@ -117,11 +117,10 @@ Example: Creating a dummy relationship does nothing (from database prespective): ) _ = cmd.MarkFlagRequired(targetActorFlagLong) - cmd.Flags().StringVarP( + cmd.Flags().StringVar( &docIDArg, docIDFlag, "", - "", "Document Identifier (ObjectID) to make relationship for", ) _ = cmd.MarkFlagRequired(docIDFlag) diff --git a/cli/acp_relationship_delete.go b/cli/acp_relationship_delete.go new file mode 100644 index 0000000000..7e0852e301 --- /dev/null +++ b/cli/acp_relationship_delete.go @@ -0,0 +1,121 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package cli + +import ( + "github.com/spf13/cobra" +) + +func MakeACPRelationshipDeleteCommand() *cobra.Command { + const ( + collectionFlagLong string = "collection" + collectionFlagShort string = "c" + + relationFlagLong string = "relation" + relationFlagShort string = "r" + + targetActorFlagLong string = "actor" + targetActorFlagShort string = "a" + + docIDFlag string = "docID" + ) + + var ( + collectionArg string + relationArg string + targetActorArg string + docIDArg string + ) + + var cmd = &cobra.Command{ + Use: "delete [--docID] [-c --collection] [-r --relation] [-a --actor] [-i --identity]", + Short: "Delete relationship", + Long: `Delete relationship + +To revoke access to a document for an actor, we must delete the relationship between the +actor and the document. Inorder to delete the relationship we require all of the following: + +1) Target DocID: The docID of the document we want to delete a relationship for. +2) Collection Name: The name of the collection that has the Target DocID. +3) Relation Name: The type of relation (name must be defined within the linked policy on collection). +4) Target Identity: The identity of the actor the relationship is being deleted for. +5) Requesting Identity: The identity of the actor that is making the request. + +Notes: + - ACP must be available (i.e. ACP can not be disabled). + - The target document must be registered with ACP already (policy & resource specified). + - The requesting identity MUST either be the owner OR the manager (manages the relation) of the resource. + - If the relationship record was not found, then it will be a no-op. + - Learn more about [ACP & DPI Rules](/acp/README.md) + +Example: Let another actor (4d092126012ebaf56161716018a71630d99443d9d5217e9d8502bb5c5456f2c5) read a private document: + defradb client acp relationship delete \ + --collection Users \ + --docID bae-ff3ceb1c-b5c0-5e86-a024-dd1b16a4261c \ + --relation reader \ + --actor did:key:z7r8os2G88XXBNBTLj3kFR5rzUJ4VAesbX7PgsA68ak9B5RYcXF5EZEmjRzzinZndPSSwujXb4XKHG6vmKEFG6ZfsfcQn \ + --identity e3b722906ee4e56368f581cd8b18ab0f48af1ea53e635e3f7b8acd076676f6ac +`, + RunE: func(cmd *cobra.Command, args []string) error { + db := mustGetContextDB(cmd) + deleteDocActorRelationshipResult, err := db.DeleteDocActorRelationship( + cmd.Context(), + collectionArg, + docIDArg, + relationArg, + targetActorArg, + ) + + if err != nil { + return err + } + + return writeJSON(cmd, deleteDocActorRelationshipResult) + }, + } + + cmd.Flags().StringVarP( + &collectionArg, + collectionFlagLong, + collectionFlagShort, + "", + "Collection that has the resource and policy for object", + ) + _ = cmd.MarkFlagRequired(collectionFlagLong) + + cmd.Flags().StringVarP( + &relationArg, + relationFlagLong, + relationFlagShort, + "", + "Relation that needs to be deleted within the relationship", + ) + _ = cmd.MarkFlagRequired(relationFlagLong) + + cmd.Flags().StringVarP( + &targetActorArg, + targetActorFlagLong, + targetActorFlagShort, + "", + "Actor to delete relationship for", + ) + _ = cmd.MarkFlagRequired(targetActorFlagLong) + + cmd.Flags().StringVar( + &docIDArg, + docIDFlag, + "", + "Document Identifier (ObjectID) to delete relationship for", + ) + _ = cmd.MarkFlagRequired(docIDFlag) + + return cmd +} diff --git a/cli/cli.go b/cli/cli.go index 61d1fd51cf..f6950225a6 100644 --- a/cli/cli.go +++ b/cli/cli.go @@ -70,6 +70,7 @@ func NewDefraCommand() *cobra.Command { acp_relationship := MakeACPRelationshipCommand() acp_relationship.AddCommand( MakeACPRelationshipAddCommand(), + MakeACPRelationshipDeleteCommand(), ) acp := MakeACPCommand() diff --git a/client/acp.go b/client/acp.go index 7795369c8f..19d330fe02 100644 --- a/client/acp.go +++ b/client/acp.go @@ -36,3 +36,10 @@ type AddDocActorRelationshipResult struct { // it is false if a new relationship was created. ExistedAlready bool } + +// DeleteDocActorRelationshipResult wraps the result of making a document-actor relationship. +type DeleteDocActorRelationshipResult struct { + // RecordFound is true if the relationship record was found, and + // is false if the relationship record was not found (no-op). + RecordFound bool +} diff --git a/client/db.go b/client/db.go index e28d21df02..4838773dde 100644 --- a/client/db.go +++ b/client/db.go @@ -120,6 +120,21 @@ type DB interface { relation string, targetActor string, ) (AddDocActorRelationshipResult, error) + + // DeleteDocActorRelationship deletes a relationship between document and the target actor. + // + // If failure occurs, the result will return an error. Upon success the boolean value will + // be true if the relationship record was found and deleted. Upon success the boolean value + // will be false if the relationship record was not found (no-op). + // + // Note: The request actor must either be the owner or manager of the document. + DeleteDocActorRelationship( + ctx context.Context, + collectionName string, + docID string, + relation string, + targetActor string, + ) (DeleteDocActorRelationshipResult, error) } // Store contains the core DefraDB read-write operations. diff --git a/client/mocks/db.go b/client/mocks/db.go index 1297870e15..73cf4b3665 100644 --- a/client/mocks/db.go +++ b/client/mocks/db.go @@ -492,6 +492,66 @@ func (_c *DB_Close_Call) RunAndReturn(run func()) *DB_Close_Call { return _c } +// DeleteDocActorRelationship provides a mock function with given fields: ctx, collectionName, docID, relation, targetActor +func (_m *DB) DeleteDocActorRelationship(ctx context.Context, collectionName string, docID string, relation string, targetActor string) (client.DeleteDocActorRelationshipResult, error) { + ret := _m.Called(ctx, collectionName, docID, relation, targetActor) + + if len(ret) == 0 { + panic("no return value specified for DeleteDocActorRelationship") + } + + var r0 client.DeleteDocActorRelationshipResult + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, string, string, string, string) (client.DeleteDocActorRelationshipResult, error)); ok { + return rf(ctx, collectionName, docID, relation, targetActor) + } + if rf, ok := ret.Get(0).(func(context.Context, string, string, string, string) client.DeleteDocActorRelationshipResult); ok { + r0 = rf(ctx, collectionName, docID, relation, targetActor) + } else { + r0 = ret.Get(0).(client.DeleteDocActorRelationshipResult) + } + + if rf, ok := ret.Get(1).(func(context.Context, string, string, string, string) error); ok { + r1 = rf(ctx, collectionName, docID, relation, targetActor) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// DB_DeleteDocActorRelationship_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteDocActorRelationship' +type DB_DeleteDocActorRelationship_Call struct { + *mock.Call +} + +// DeleteDocActorRelationship is a helper method to define mock.On call +// - ctx context.Context +// - collectionName string +// - docID string +// - relation string +// - targetActor string +func (_e *DB_Expecter) DeleteDocActorRelationship(ctx interface{}, collectionName interface{}, docID interface{}, relation interface{}, targetActor interface{}) *DB_DeleteDocActorRelationship_Call { + return &DB_DeleteDocActorRelationship_Call{Call: _e.mock.On("DeleteDocActorRelationship", ctx, collectionName, docID, relation, targetActor)} +} + +func (_c *DB_DeleteDocActorRelationship_Call) Run(run func(ctx context.Context, collectionName string, docID string, relation string, targetActor string)) *DB_DeleteDocActorRelationship_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(string), args[2].(string), args[3].(string), args[4].(string)) + }) + return _c +} + +func (_c *DB_DeleteDocActorRelationship_Call) Return(_a0 client.DeleteDocActorRelationshipResult, _a1 error) *DB_DeleteDocActorRelationship_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *DB_DeleteDocActorRelationship_Call) RunAndReturn(run func(context.Context, string, string, string, string) (client.DeleteDocActorRelationshipResult, error)) *DB_DeleteDocActorRelationship_Call { + _c.Call.Return(run) + return _c +} + // DeleteReplicator provides a mock function with given fields: ctx, rep func (_m *DB) DeleteReplicator(ctx context.Context, rep client.Replicator) error { ret := _m.Called(ctx, rep) diff --git a/docs/website/references/cli/defradb_client_acp_relationship.md b/docs/website/references/cli/defradb_client_acp_relationship.md index 4c204d0ccd..2518f6c3ed 100644 --- a/docs/website/references/cli/defradb_client_acp_relationship.md +++ b/docs/website/references/cli/defradb_client_acp_relationship.md @@ -38,4 +38,5 @@ Interact with the acp relationship features of DefraDB instance * [defradb client acp](defradb_client_acp.md) - Interact with the access control system of a DefraDB node * [defradb client acp relationship add](defradb_client_acp_relationship_add.md) - Add new relationship +* [defradb client acp relationship delete](defradb_client_acp_relationship_delete.md) - Delete relationship diff --git a/docs/website/references/cli/defradb_client_acp_relationship_delete.md b/docs/website/references/cli/defradb_client_acp_relationship_delete.md new file mode 100644 index 0000000000..501f5fb242 --- /dev/null +++ b/docs/website/references/cli/defradb_client_acp_relationship_delete.md @@ -0,0 +1,73 @@ +## defradb client acp relationship delete + +Delete relationship + +### Synopsis + +Delete relationship + +To revoke access to a document for an actor, we must delete the relationship between the +actor and the document. Inorder to delete the relationship we require all of the following: + +1) Target DocID: The docID of the document we want to delete a relationship for. +2) Collection Name: The name of the collection that has the Target DocID. +3) Relation Name: The type of relation (name must be defined within the linked policy on collection). +4) Target Identity: The identity of the actor the relationship is being deleted for. +5) Requesting Identity: The identity of the actor that is making the request. + +Notes: + - ACP must be available (i.e. ACP can not be disabled). + - The target document must be registered with ACP already (policy & resource specified). + - The requesting identity MUST either be the owner OR the manager (manages the relation) of the resource. + - If the relationship record was not found, then it will be a no-op. + - Learn more about [ACP & DPI Rules](/acp/README.md) + +Example: Let another actor (4d092126012ebaf56161716018a71630d99443d9d5217e9d8502bb5c5456f2c5) read a private document: + defradb client acp relationship delete \ + --collection Users \ + --docID bae-ff3ceb1c-b5c0-5e86-a024-dd1b16a4261c \ + --relation reader \ + --actor did:key:z7r8os2G88XXBNBTLj3kFR5rzUJ4VAesbX7PgsA68ak9B5RYcXF5EZEmjRzzinZndPSSwujXb4XKHG6vmKEFG6ZfsfcQn \ + --identity e3b722906ee4e56368f581cd8b18ab0f48af1ea53e635e3f7b8acd076676f6ac + + +``` +defradb client acp relationship delete [--docID] [-c --collection] [-r --relation] [-a --actor] [-i --identity] [flags] +``` + +### Options + +``` + -a, --actor string Actor to delete relationship for + -c, --collection string Collection that has the resource and policy for object + --docID string Document Identifier (ObjectID) to delete relationship for + -h, --help help for delete + -r, --relation string Relation that needs to be deleted within the relationship +``` + +### Options inherited from parent commands + +``` + -i, --identity string Hex formatted private key used to authenticate with ACP + --keyring-backend string Keyring backend to use. Options are file or system (default "file") + --keyring-namespace string Service name to use when using the system backend (default "defradb") + --keyring-path string Path to store encrypted keys when using the file backend (default "keys") + --log-format string Log format to use. Options are text or json (default "text") + --log-level string Log level to use. Options are debug, info, error, fatal (default "info") + --log-output string Log output path. Options are stderr or stdout. (default "stderr") + --log-overrides string Logger config overrides. Format ,=,...;,... + --log-source Include source location in logs + --log-stacktrace Include stacktrace in error and fatal logs + --no-keyring Disable the keyring and generate ephemeral keys + --no-log-color Disable colored log output + --rootdir string Directory for persistent data (default: $HOME/.defradb) + --secret-file string Path to the file containing secrets (default ".env") + --source-hub-address string The SourceHub address authorized by the client to make SourceHub transactions on behalf of the actor + --tx uint Transaction ID + --url string URL of HTTP endpoint to listen on or connect to (default "127.0.0.1:9181") +``` + +### SEE ALSO + +* [defradb client acp relationship](defradb_client_acp_relationship.md) - Interact with the acp relationship features of DefraDB instance + diff --git a/docs/website/references/http/openapi.json b/docs/website/references/http/openapi.json index c0a7898364..77168f7e93 100644 --- a/docs/website/references/http/openapi.json +++ b/docs/website/references/http/openapi.json @@ -27,6 +27,64 @@ } }, "schemas": { + "acp_policy_add_result": { + "properties": { + "PolicyID": { + "type": "string" + } + }, + "type": "object" + }, + "acp_relationship_add_request": { + "properties": { + "CollectionName": { + "type": "string" + }, + "DocID": { + "type": "string" + }, + "Relation": { + "type": "string" + }, + "TargetActor": { + "type": "string" + } + }, + "type": "object" + }, + "acp_relationship_add_result": { + "properties": { + "ExistedAlready": { + "type": "boolean" + } + }, + "type": "object" + }, + "acp_relationship_delete_request": { + "properties": { + "CollectionName": { + "type": "string" + }, + "DocID": { + "type": "string" + }, + "Relation": { + "type": "string" + }, + "TargetActor": { + "type": "string" + } + }, + "type": "object" + }, + "acp_relationship_delete_result": { + "properties": { + "RecordFound": { + "type": "boolean" + } + }, + "type": "object" + }, "add_view_request": { "properties": { "Query": { @@ -574,7 +632,14 @@ }, "responses": { "200": { - "$ref": "#/components/responses/success" + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/acp_policy_add_result" + } + } + }, + "description": "Add acp policy result" }, "400": { "$ref": "#/components/responses/error" @@ -589,14 +654,49 @@ } }, "/acp/relationship": { + "delete": { + "description": "Delete an actor relationship using acp system", + "operationId": "delete relationship", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/acp_relationship_delete_request" + } + } + }, + "required": true + }, + "responses": { + "200": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/acp_relationship_delete_result" + } + } + }, + "description": "Delete acp relationship result" + }, + "400": { + "$ref": "#/components/responses/error" + }, + "default": { + "description": "" + } + }, + "tags": [ + "acp_relationship" + ] + }, "post": { "description": "Add an actor relationship using acp system", "operationId": "add relationship", "requestBody": { "content": { - "text/plain": { + "application/json": { "schema": { - "type": "string" + "$ref": "#/components/schemas/acp_relationship_add_request" } } }, @@ -604,7 +704,14 @@ }, "responses": { "200": { - "$ref": "#/components/responses/success" + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/acp_relationship_add_result" + } + } + }, + "description": "Add acp relationship result" }, "400": { "$ref": "#/components/responses/error" diff --git a/http/client_acp.go b/http/client_acp.go index d4f1ed02e5..91a2f502b2 100644 --- a/http/client_acp.go +++ b/http/client_acp.go @@ -92,3 +92,51 @@ func (c *Client) AddDocActorRelationship( return addDocActorRelResult, nil } + +type deleteDocActorRelationshipRequest struct { + CollectionName string + DocID string + Relation string + TargetActor string +} + +func (c *Client) DeleteDocActorRelationship( + ctx context.Context, + collectionName string, + docID string, + relation string, + targetActor string, +) (client.DeleteDocActorRelationshipResult, error) { + methodURL := c.http.baseURL.JoinPath("acp", "relationship") + + body, err := json.Marshal( + deleteDocActorRelationshipRequest{ + CollectionName: collectionName, + DocID: docID, + Relation: relation, + TargetActor: targetActor, + }, + ) + + if err != nil { + return client.DeleteDocActorRelationshipResult{}, err + } + + req, err := http.NewRequestWithContext( + ctx, + http.MethodDelete, + methodURL.String(), + bytes.NewBuffer(body), + ) + + if err != nil { + return client.DeleteDocActorRelationshipResult{}, err + } + + var deleteDocActorRelResult client.DeleteDocActorRelationshipResult + if err := c.http.requestJson(req, &deleteDocActorRelResult); err != nil { + return client.DeleteDocActorRelationshipResult{}, err + } + + return deleteDocActorRelResult, nil +} diff --git a/http/handler_acp.go b/http/handler_acp.go index e9bdf2ce0e..d359d5085e 100644 --- a/http/handler_acp.go +++ b/http/handler_acp.go @@ -75,24 +75,70 @@ func (s *acpHandler) AddDocActorRelationship(rw http.ResponseWriter, req *http.R responseJSON(rw, http.StatusOK, addDocActorRelResult) } -func (h *acpHandler) bindRoutes(router *Router) { - successResponse := &openapi3.ResponseRef{ - Ref: "#/components/responses/success", +func (s *acpHandler) DeleteDocActorRelationship(rw http.ResponseWriter, req *http.Request) { + db, ok := req.Context().Value(dbContextKey).(client.DB) + if !ok { + responseJSON(rw, http.StatusBadRequest, errorResponse{NewErrFailedToGetContext("db")}) + return + } + + var message deleteDocActorRelationshipRequest + err := requestJSON(req, &message) + if err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return + } + + deleteDocActorRelResult, err := db.DeleteDocActorRelationship( + req.Context(), + message.CollectionName, + message.DocID, + message.Relation, + message.TargetActor, + ) + if err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return } + + responseJSON(rw, http.StatusOK, deleteDocActorRelResult) +} + +func (h *acpHandler) bindRoutes(router *Router) { errorResponse := &openapi3.ResponseRef{ Ref: "#/components/responses/error", } + acpPolicyAddResultSchema := &openapi3.SchemaRef{ + Ref: "#/components/schemas/acp_policy_add_result", + } + + acpRelationshipAddRequestSchema := &openapi3.SchemaRef{ + Ref: "#/components/schemas/acp_relationship_add_request", + } + acpRelationshipAddResultSchema := &openapi3.SchemaRef{ + Ref: "#/components/schemas/acp_relationship_add_result", + } + + acpRelationshipDeleteRequestSchema := &openapi3.SchemaRef{ + Ref: "#/components/schemas/acp_relationship_delete_request", + } + acpRelationshipDeleteResultSchema := &openapi3.SchemaRef{ + Ref: "#/components/schemas/acp_relationship_delete_result", + } + acpAddPolicyRequest := openapi3.NewRequestBody(). WithRequired(true). WithContent(openapi3.NewContentWithSchema(openapi3.NewStringSchema(), []string{"text/plain"})) - + acpPolicyAddResult := openapi3.NewResponse(). + WithDescription("Add acp policy result"). + WithJSONSchemaRef(acpPolicyAddResultSchema) acpAddPolicy := openapi3.NewOperation() acpAddPolicy.OperationID = "add policy" acpAddPolicy.Description = "Add a policy using acp system" acpAddPolicy.Tags = []string{"acp_policy"} acpAddPolicy.Responses = openapi3.NewResponses() - acpAddPolicy.Responses.Set("200", successResponse) + acpAddPolicy.AddResponse(200, acpPolicyAddResult) acpAddPolicy.Responses.Set("400", errorResponse) acpAddPolicy.RequestBody = &openapi3.RequestBodyRef{ Value: acpAddPolicyRequest, @@ -100,19 +146,39 @@ func (h *acpHandler) bindRoutes(router *Router) { acpAddDocActorRelationshipRequest := openapi3.NewRequestBody(). WithRequired(true). - WithContent(openapi3.NewContentWithSchema(openapi3.NewStringSchema(), []string{"text/plain"})) - + WithContent(openapi3.NewContentWithJSONSchemaRef(acpRelationshipAddRequestSchema)) + acpAddDocActorRelationshipResult := openapi3.NewResponse(). + WithDescription("Add acp relationship result"). + WithJSONSchemaRef(acpRelationshipAddResultSchema) acpAddDocActorRelationship := openapi3.NewOperation() acpAddDocActorRelationship.OperationID = "add relationship" acpAddDocActorRelationship.Description = "Add an actor relationship using acp system" acpAddDocActorRelationship.Tags = []string{"acp_relationship"} acpAddDocActorRelationship.Responses = openapi3.NewResponses() - acpAddDocActorRelationship.Responses.Set("200", successResponse) + acpAddDocActorRelationship.AddResponse(200, acpAddDocActorRelationshipResult) acpAddDocActorRelationship.Responses.Set("400", errorResponse) acpAddDocActorRelationship.RequestBody = &openapi3.RequestBodyRef{ Value: acpAddDocActorRelationshipRequest, } + acpDeleteDocActorRelationshipRequest := openapi3.NewRequestBody(). + WithRequired(true). + WithContent(openapi3.NewContentWithJSONSchemaRef(acpRelationshipDeleteRequestSchema)) + acpDeleteDocActorRelationshipResult := openapi3.NewResponse(). + WithDescription("Delete acp relationship result"). + WithJSONSchemaRef(acpRelationshipDeleteResultSchema) + acpDeleteDocActorRelationship := openapi3.NewOperation() + acpDeleteDocActorRelationship.OperationID = "delete relationship" + acpDeleteDocActorRelationship.Description = "Delete an actor relationship using acp system" + acpDeleteDocActorRelationship.Tags = []string{"acp_relationship"} + acpDeleteDocActorRelationship.Responses = openapi3.NewResponses() + acpDeleteDocActorRelationship.AddResponse(200, acpDeleteDocActorRelationshipResult) + acpDeleteDocActorRelationship.Responses.Set("400", errorResponse) + acpDeleteDocActorRelationship.RequestBody = &openapi3.RequestBodyRef{ + Value: acpDeleteDocActorRelationshipRequest, + } + router.AddRoute("/acp/policy", http.MethodPost, acpAddPolicy, h.AddPolicy) router.AddRoute("/acp/relationship", http.MethodPost, acpAddDocActorRelationship, h.AddDocActorRelationship) + router.AddRoute("/acp/relationship", http.MethodDelete, acpDeleteDocActorRelationship, h.DeleteDocActorRelationship) } diff --git a/http/openapi.go b/http/openapi.go index 0bb5f71743..0a62b6ebac 100644 --- a/http/openapi.go +++ b/http/openapi.go @@ -20,27 +20,32 @@ import ( // openApiSchemas is a mapping of types to auto generate schemas for. var openApiSchemas = map[string]any{ - "error": &errorResponse{}, - "create_tx": &CreateTxResponse{}, - "collection_update": &CollectionUpdateRequest{}, - "collection_delete": &CollectionDeleteRequest{}, - "peer_info": &peer.AddrInfo{}, - "graphql_request": &GraphQLRequest{}, - "backup_config": &client.BackupConfig{}, - "collection": &client.CollectionDescription{}, - "schema": &client.SchemaDescription{}, - "collection_definition": &client.CollectionDefinition{}, - "index": &client.IndexDescription{}, - "delete_result": &client.DeleteResult{}, - "update_result": &client.UpdateResult{}, - "lens_config": &client.LensConfig{}, - "replicator": &client.Replicator{}, - "ccip_request": &CCIPRequest{}, - "ccip_response": &CCIPResponse{}, - "patch_schema_request": &patchSchemaRequest{}, - "add_view_request": &addViewRequest{}, - "migrate_request": &migrateRequest{}, - "set_migration_request": &setMigrationRequest{}, + "error": &errorResponse{}, + "create_tx": &CreateTxResponse{}, + "collection_update": &CollectionUpdateRequest{}, + "collection_delete": &CollectionDeleteRequest{}, + "peer_info": &peer.AddrInfo{}, + "graphql_request": &GraphQLRequest{}, + "backup_config": &client.BackupConfig{}, + "collection": &client.CollectionDescription{}, + "schema": &client.SchemaDescription{}, + "collection_definition": &client.CollectionDefinition{}, + "index": &client.IndexDescription{}, + "delete_result": &client.DeleteResult{}, + "update_result": &client.UpdateResult{}, + "lens_config": &client.LensConfig{}, + "replicator": &client.Replicator{}, + "ccip_request": &CCIPRequest{}, + "ccip_response": &CCIPResponse{}, + "patch_schema_request": &patchSchemaRequest{}, + "add_view_request": &addViewRequest{}, + "migrate_request": &migrateRequest{}, + "set_migration_request": &setMigrationRequest{}, + "acp_policy_add_result": &client.AddPolicyResult{}, + "acp_relationship_add_request": &addDocActorRelationshipRequest{}, + "acp_relationship_add_result": &client.AddDocActorRelationshipResult{}, + "acp_relationship_delete_request": &deleteDocActorRelationshipRequest{}, + "acp_relationship_delete_result": &client.DeleteDocActorRelationshipResult{}, } func NewOpenAPISpec() (*openapi3.T, error) { diff --git a/internal/db/db.go b/internal/db/db.go index 73165c239a..d5872cef0c 100644 --- a/internal/db/db.go +++ b/internal/db/db.go @@ -248,6 +248,46 @@ func (db *db) AddDocActorRelationship( return client.AddDocActorRelationshipResult{ExistedAlready: exists}, nil } +func (db *db) DeleteDocActorRelationship( + ctx context.Context, + collectionName string, + docID string, + relation string, + targetActor string, +) (client.DeleteDocActorRelationshipResult, error) { + if !db.acp.HasValue() { + return client.DeleteDocActorRelationshipResult{}, client.ErrACPOperationButACPNotAvailable + } + + collection, err := db.GetCollectionByName(ctx, collectionName) + if err != nil { + return client.DeleteDocActorRelationshipResult{}, err + } + + policyID, resourceName, hasPolicy := permission.IsPermissioned(collection) + if !hasPolicy { + return client.DeleteDocActorRelationshipResult{}, client.ErrACPOperationButCollectionHasNoPolicy + } + + identity := GetContextIdentity(ctx) + + recordFound, err := db.acp.Value().DeleteDocActorRelationship( + ctx, + policyID, + resourceName, + docID, + relation, + identity.Value(), + targetActor, + ) + + if err != nil { + return client.DeleteDocActorRelationshipResult{}, err + } + + return client.DeleteDocActorRelationshipResult{RecordFound: recordFound}, nil +} + // Initialize is called when a database is first run and creates all the db global meta data // like Collection ID counters. func (db *db) initialize(ctx context.Context) error { diff --git a/tests/clients/cli/wrapper_acp.go b/tests/clients/cli/wrapper_acp.go index f76aad3cdf..06c356afaa 100644 --- a/tests/clients/cli/wrapper_acp.go +++ b/tests/clients/cli/wrapper_acp.go @@ -64,3 +64,31 @@ func (w *Wrapper) AddDocActorRelationship( return exists, err } + +func (w *Wrapper) DeleteDocActorRelationship( + ctx context.Context, + collectionName string, + docID string, + relation string, + targetActor string, +) (client.DeleteDocActorRelationshipResult, error) { + args := []string{ + "client", "acp", "relationship", "delete", + "--collection", collectionName, + "--docID", docID, + "--relation", relation, + "--actor", targetActor, + } + + data, err := w.cmd.execute(ctx, args) + if err != nil { + return client.DeleteDocActorRelationshipResult{}, err + } + + var exists client.DeleteDocActorRelationshipResult + if err := json.Unmarshal(data, &exists); err != nil { + return client.DeleteDocActorRelationshipResult{}, err + } + + return exists, err +} diff --git a/tests/clients/http/wrapper.go b/tests/clients/http/wrapper.go index 81ed74b095..ae6cd61529 100644 --- a/tests/clients/http/wrapper.go +++ b/tests/clients/http/wrapper.go @@ -121,6 +121,22 @@ func (w *Wrapper) AddDocActorRelationship( ) } +func (w *Wrapper) DeleteDocActorRelationship( + ctx context.Context, + collectionName string, + docID string, + relation string, + targetActor string, +) (client.DeleteDocActorRelationshipResult, error) { + return w.client.DeleteDocActorRelationship( + ctx, + collectionName, + docID, + relation, + targetActor, + ) +} + func (w *Wrapper) PatchSchema( ctx context.Context, patch string, diff --git a/tests/integration/acp.go b/tests/integration/acp.go index a8f41e5f41..d98fe08a3f 100644 --- a/tests/integration/acp.go +++ b/tests/integration/acp.go @@ -117,14 +117,14 @@ func addPolicyACP( ctx := db.SetContextIdentity(s.ctx, identity) policyResult, err := node.AddPolicy(ctx, action.Policy) - if err == nil { + expectedErrorRaised := AssertError(s.t, s.testCase.Description, err, action.ExpectedError) + assertExpectedErrorRaised(s.t, s.testCase.Description, action.ExpectedError, expectedErrorRaised) + + if !expectedErrorRaised { require.Equal(s.t, action.ExpectedError, "") require.Equal(s.t, action.ExpectedPolicyID, policyResult.PolicyID) } - expectedErrorRaised := AssertError(s.t, s.testCase.Description, err, action.ExpectedError) - assertExpectedErrorRaised(s.t, s.testCase.Description, action.ExpectedError, expectedErrorRaised) - // The policy should only be added to a SourceHub chain once - there is no need to loop through // the nodes. if acpType == SourceHubACPType { @@ -236,13 +236,13 @@ func addDocActorRelationshipACP( targetIdentity, ) - if err == nil { + expectedErrorRaised := AssertError(s.t, s.testCase.Description, err, action.ExpectedError) + assertExpectedErrorRaised(s.t, s.testCase.Description, action.ExpectedError, expectedErrorRaised) + + if !expectedErrorRaised { require.Equal(s.t, action.ExpectedError, "") require.Equal(s.t, action.ExpectedExistence, exists.ExistedAlready) } - - expectedErrorRaised := AssertError(s.t, s.testCase.Description, err, action.ExpectedError) - assertExpectedErrorRaised(s.t, s.testCase.Description, action.ExpectedError, expectedErrorRaised) } else { for i, node := range getNodes(action.NodeID, s.nodes) { var collectionName string @@ -293,14 +293,192 @@ func addDocActorRelationshipACP( targetIdentity, ) - if err == nil { + expectedErrorRaised := AssertError(s.t, s.testCase.Description, err, action.ExpectedError) + assertExpectedErrorRaised(s.t, s.testCase.Description, action.ExpectedError, expectedErrorRaised) + + if !expectedErrorRaised { require.Equal(s.t, action.ExpectedError, "") require.Equal(s.t, action.ExpectedExistence, exists.ExistedAlready) } + // The relationship should only be added to a SourceHub chain once - there is no need to loop through + // the nodes. + if acpType == SourceHubACPType { + break + } + } + } +} + +// DeleteDocActorRelationship will attempt to delete a relationship between a document and an actor. +type DeleteDocActorRelationship struct { + // NodeID may hold the ID (index) of the node we want to delete doc actor relationship on. + // + // If a value is not provided the relationship will be deleted on all nodes, unless testing with + // sourcehub ACP, in which case the relationship will only be deleted once. + NodeID immutable.Option[int] + + // The collection in which the target document we want to delete relationship for exists. + // + // This is a required field. To test the invalid usage of not having this arg, use -1 index. + CollectionID int + + // The index-identifier of the document within the collection. This is based on + // the order in which it was created, not the ordering of the document within the + // database. + // + // This is a required field. To test the invalid usage of not having this arg, use -1 index. + DocID int + + // The name of the relation within the relationship we want to delete (should be defined in the policy). + // + // This is a required field. + Relation string + + // The target public identity, i.e. the identity of the actor with whom the relationship is with. + // + // This is a required field. To test the invalid usage of not having this arg, use -1 index. + TargetIdentity int + + // The requestor identity, i.e. identity of the actor deleting the relationship. + // Note: This identity must either own or have managing access defined in the policy. + // + // This is a required field. To test the invalid usage of not having this arg, use -1 index. + RequestorIdentity int + + // Result returns true if the relationship record was expected to be found and deleted, + // and returns false if no matching relationship record was found (no-op). + ExpectedRecordFound bool + + // Any error expected from the action. Optional. + // + // String can be a partial, and the test will pass if an error is returned that + // contains this string. + ExpectedError string +} + +func deleteDocActorRelationshipACP( + s *state, + action DeleteDocActorRelationship, +) { + if action.NodeID.HasValue() { + nodeID := action.NodeID.Value() + collections := s.collections[nodeID] + node := s.nodes[nodeID] + + var collectionName string + if action.CollectionID == -1 { + collectionName = "" + } else { + collection := collections[action.CollectionID] + if !collection.Description().Name.HasValue() { + require.Fail(s.t, "Expected non-empty collection name, but it was empty.", s.testCase.Description) + } + collectionName = collection.Description().Name.Value() + } + + var docID string + if action.DocID == -1 || action.CollectionID == -1 { + docID = "" + } else { + docID = s.docIDs[action.CollectionID][action.DocID].String() + } + + var targetIdentity string + if action.TargetIdentity == -1 { + targetIdentity = "" + } else { + optionalTargetIdentity := getIdentity(s, nodeID, immutable.Some(action.TargetIdentity)) + if !optionalTargetIdentity.HasValue() { + require.Fail(s.t, "Expected non-empty target identity, but it was empty.", s.testCase.Description) + } + targetIdentity = optionalTargetIdentity.Value().DID + } + + var requestorIdentity immutable.Option[acpIdentity.Identity] + if action.RequestorIdentity == -1 { + requestorIdentity = acpIdentity.None + } else { + requestorIdentity = getIdentity(s, nodeID, immutable.Some(action.RequestorIdentity)) + if !requestorIdentity.HasValue() { + require.Fail(s.t, "Expected non-empty requestor identity, but it was empty.", s.testCase.Description) + } + } + ctx := db.SetContextIdentity(s.ctx, requestorIdentity) + + deleteDocActorRelationshipResult, err := node.DeleteDocActorRelationship( + ctx, + collectionName, + docID, + action.Relation, + targetIdentity, + ) + + expectedErrorRaised := AssertError(s.t, s.testCase.Description, err, action.ExpectedError) + assertExpectedErrorRaised(s.t, s.testCase.Description, action.ExpectedError, expectedErrorRaised) + + if !expectedErrorRaised { + require.Equal(s.t, action.ExpectedError, "") + require.Equal(s.t, action.ExpectedRecordFound, deleteDocActorRelationshipResult.RecordFound) + } + } else { + for i, node := range getNodes(action.NodeID, s.nodes) { + var collectionName string + if action.CollectionID == -1 { + collectionName = "" + } else { + collection := s.collections[i][action.CollectionID] + if !collection.Description().Name.HasValue() { + require.Fail(s.t, "Expected non-empty collection name, but it was empty.", s.testCase.Description) + } + collectionName = collection.Description().Name.Value() + } + + var docID string + if action.DocID == -1 || action.CollectionID == -1 { + docID = "" + } else { + docID = s.docIDs[action.CollectionID][action.DocID].String() + } + + var targetIdentity string + if action.TargetIdentity == -1 { + targetIdentity = "" + } else { + optionalTargetIdentity := getIdentity(s, i, immutable.Some(action.TargetIdentity)) + if !optionalTargetIdentity.HasValue() { + require.Fail(s.t, "Expected non-empty target identity, but it was empty.", s.testCase.Description) + } + targetIdentity = optionalTargetIdentity.Value().DID + } + + var requestorIdentity immutable.Option[acpIdentity.Identity] + if action.RequestorIdentity == -1 { + requestorIdentity = acpIdentity.None + } else { + requestorIdentity = getIdentity(s, i, immutable.Some(action.RequestorIdentity)) + if !requestorIdentity.HasValue() { + require.Fail(s.t, "Expected non-empty requestor identity, but it was empty.", s.testCase.Description) + } + } + ctx := db.SetContextIdentity(s.ctx, requestorIdentity) + + deleteDocActorRelationshipResult, err := node.DeleteDocActorRelationship( + ctx, + collectionName, + docID, + action.Relation, + targetIdentity, + ) + expectedErrorRaised := AssertError(s.t, s.testCase.Description, err, action.ExpectedError) assertExpectedErrorRaised(s.t, s.testCase.Description, action.ExpectedError, expectedErrorRaised) + if !expectedErrorRaised { + require.Equal(s.t, action.ExpectedError, "") + require.Equal(s.t, action.ExpectedRecordFound, deleteDocActorRelationshipResult.RecordFound) + } + // The relationship should only be added to a SourceHub chain once - there is no need to loop through // the nodes. if acpType == SourceHubACPType { @@ -314,7 +492,10 @@ func setupSourceHub(s *state) ([]node.ACPOpt, error) { var isACPTest bool for _, a := range s.testCase.Actions { switch a.(type) { - case AddPolicy, AddDocActorRelationship: + case + AddPolicy, + AddDocActorRelationship, + DeleteDocActorRelationship: isACPTest = true } } diff --git a/tests/integration/acp/p2p/replicator_with_doc_actor_relationship_test.go b/tests/integration/acp/p2p/replicator_with_doc_actor_relationship_test.go index fe06e10061..cdefe70a46 100644 --- a/tests/integration/acp/p2p/replicator_with_doc_actor_relationship_test.go +++ b/tests/integration/acp/p2p/replicator_with_doc_actor_relationship_test.go @@ -212,6 +212,76 @@ func TestACP_P2PReplicatorWithPermissionedCollectionCreateDocActorRelationship_S }, }, }, + + testUtils.DeleteDocActorRelationship{ + NodeID: immutable.Some(1), + + RequestorIdentity: 1, + + TargetIdentity: 2, + + CollectionID: 0, + + DocID: 0, + + Relation: "reader", + + ExpectedRecordFound: true, + }, + + testUtils.DeleteDocActorRelationship{ + NodeID: immutable.Some(0), // Note: Different node than the previous + + RequestorIdentity: 1, + + TargetIdentity: 2, + + CollectionID: 0, + + DocID: 0, + + Relation: "reader", + + ExpectedRecordFound: false, // Making the same relation through any node should be a no-op + }, + + testUtils.Request{ + // Ensure that the document is now inaccessible on all nodes to the actor we revoked access from. + Identity: immutable.Some(2), + + Request: ` + query { + Users { + name + } + } + `, + + Results: map[string]any{ + "Users": []map[string]any{}, + }, + }, + + testUtils.Request{ + // Ensure that the document is still accessible on all nodes to the owner. + Identity: immutable.Some(1), + + Request: ` + query { + Users { + name + } + } + `, + + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "Shahzad", + }, + }, + }, + }, }, } diff --git a/tests/integration/acp/p2p/subscribe_with_doc_actor_relationship_test.go b/tests/integration/acp/p2p/subscribe_with_doc_actor_relationship_test.go index a55c5a333e..b9f3f8edd3 100644 --- a/tests/integration/acp/p2p/subscribe_with_doc_actor_relationship_test.go +++ b/tests/integration/acp/p2p/subscribe_with_doc_actor_relationship_test.go @@ -218,6 +218,76 @@ func TestACP_P2PSubscribeAddGetSingleWithPermissionedCollectionCreateDocActorRel }, }, }, + + testUtils.DeleteDocActorRelationship{ + NodeID: immutable.Some(1), + + RequestorIdentity: 1, + + TargetIdentity: 2, + + CollectionID: 0, + + DocID: 0, + + Relation: "reader", + + ExpectedRecordFound: true, + }, + + testUtils.DeleteDocActorRelationship{ + NodeID: immutable.Some(0), // Note: Different node than the previous + + RequestorIdentity: 1, + + TargetIdentity: 2, + + CollectionID: 0, + + DocID: 0, + + Relation: "reader", + + ExpectedRecordFound: false, // Making the same relation through any node should be a no-op + }, + + testUtils.Request{ + // Ensure that the document is now inaccessible on all nodes to the actor we revoked access from. + Identity: immutable.Some(2), + + Request: ` + query { + Users { + name + } + } + `, + + Results: map[string]any{ + "Users": []map[string]any{}, + }, + }, + + testUtils.Request{ + // Ensure that the document is still accessible on all nodes to the owner. + Identity: immutable.Some(1), + + Request: ` + query { + Users { + name + } + } + `, + + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "Shahzad", + }, + }, + }, + }, }, } diff --git a/tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_invalid_test.go b/tests/integration/acp/relationship/doc_actor/add/invalid_test.go similarity index 99% rename from tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_invalid_test.go rename to tests/integration/acp/relationship/doc_actor/add/invalid_test.go index cc0e0dac69..d9f96d9c21 100644 --- a/tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_invalid_test.go +++ b/tests/integration/acp/relationship/doc_actor/add/invalid_test.go @@ -8,7 +8,7 @@ // by the Apache License, Version 2.0, included in the file // licenses/APL.txt. -package test_acp_relationship_add_docactor +package test_acp_relationship_doc_actor_add import ( "fmt" diff --git a/tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_with_delete_test.go b/tests/integration/acp/relationship/doc_actor/add/with_delete_test.go similarity index 99% rename from tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_with_delete_test.go rename to tests/integration/acp/relationship/doc_actor/add/with_delete_test.go index 9be3ace27d..c87c3c0a8f 100644 --- a/tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_with_delete_test.go +++ b/tests/integration/acp/relationship/doc_actor/add/with_delete_test.go @@ -8,7 +8,7 @@ // by the Apache License, Version 2.0, included in the file // licenses/APL.txt. -package test_acp_relationship_add_docactor +package test_acp_relationship_doc_actor_add import ( "fmt" diff --git a/tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_with_dummy_relation_test.go b/tests/integration/acp/relationship/doc_actor/add/with_dummy_relation_test.go similarity index 99% rename from tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_with_dummy_relation_test.go rename to tests/integration/acp/relationship/doc_actor/add/with_dummy_relation_test.go index 66e17ba00a..79cc4639e2 100644 --- a/tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_with_dummy_relation_test.go +++ b/tests/integration/acp/relationship/doc_actor/add/with_dummy_relation_test.go @@ -8,7 +8,7 @@ // by the Apache License, Version 2.0, included in the file // licenses/APL.txt. -package test_acp_relationship_add_docactor +package test_acp_relationship_doc_actor_add import ( "fmt" diff --git a/tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_with_manager_gql_test.go b/tests/integration/acp/relationship/doc_actor/add/with_manager_gql_test.go similarity index 99% rename from tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_with_manager_gql_test.go rename to tests/integration/acp/relationship/doc_actor/add/with_manager_gql_test.go index 9c2280d6ce..1881979c32 100644 --- a/tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_with_manager_gql_test.go +++ b/tests/integration/acp/relationship/doc_actor/add/with_manager_gql_test.go @@ -8,7 +8,7 @@ // by the Apache License, Version 2.0, included in the file // licenses/APL.txt. -package test_acp_relationship_add_docactor +package test_acp_relationship_doc_actor_add import ( "fmt" diff --git a/tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_with_manager_test.go b/tests/integration/acp/relationship/doc_actor/add/with_manager_test.go similarity index 99% rename from tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_with_manager_test.go rename to tests/integration/acp/relationship/doc_actor/add/with_manager_test.go index 4467aa1af9..f07971589c 100644 --- a/tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_with_manager_test.go +++ b/tests/integration/acp/relationship/doc_actor/add/with_manager_test.go @@ -8,7 +8,7 @@ // by the Apache License, Version 2.0, included in the file // licenses/APL.txt. -package test_acp_relationship_add_docactor +package test_acp_relationship_doc_actor_add import ( "fmt" diff --git a/tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_collection_with_no_policy_test.go b/tests/integration/acp/relationship/doc_actor/add/with_no_policy_on_collection_test.go similarity index 96% rename from tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_collection_with_no_policy_test.go rename to tests/integration/acp/relationship/doc_actor/add/with_no_policy_on_collection_test.go index a614ef3ce9..a7ad53db41 100644 --- a/tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_collection_with_no_policy_test.go +++ b/tests/integration/acp/relationship/doc_actor/add/with_no_policy_on_collection_test.go @@ -8,7 +8,7 @@ // by the Apache License, Version 2.0, included in the file // licenses/APL.txt. -package test_acp_relationship_add_docactor +package test_acp_relationship_doc_actor_add import ( "testing" diff --git a/tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_with_only_write_gql_test.go b/tests/integration/acp/relationship/doc_actor/add/with_only_write_gql_test.go similarity index 98% rename from tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_with_only_write_gql_test.go rename to tests/integration/acp/relationship/doc_actor/add/with_only_write_gql_test.go index e3f3e62050..36bf181478 100644 --- a/tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_with_only_write_gql_test.go +++ b/tests/integration/acp/relationship/doc_actor/add/with_only_write_gql_test.go @@ -8,7 +8,7 @@ // by the Apache License, Version 2.0, included in the file // licenses/APL.txt. -package test_acp_relationship_add_docactor +package test_acp_relationship_doc_actor_add import ( "fmt" diff --git a/tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_with_only_write_test.go b/tests/integration/acp/relationship/doc_actor/add/with_only_write_test.go similarity index 99% rename from tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_with_only_write_test.go rename to tests/integration/acp/relationship/doc_actor/add/with_only_write_test.go index e052d19afd..09703f93aa 100644 --- a/tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_with_only_write_test.go +++ b/tests/integration/acp/relationship/doc_actor/add/with_only_write_test.go @@ -8,7 +8,7 @@ // by the Apache License, Version 2.0, included in the file // licenses/APL.txt. -package test_acp_relationship_add_docactor +package test_acp_relationship_doc_actor_add import ( "fmt" diff --git a/tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_with_public_document_test.go b/tests/integration/acp/relationship/doc_actor/add/with_public_document_test.go similarity index 98% rename from tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_with_public_document_test.go rename to tests/integration/acp/relationship/doc_actor/add/with_public_document_test.go index e134a821e4..30c299e222 100644 --- a/tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_with_public_document_test.go +++ b/tests/integration/acp/relationship/doc_actor/add/with_public_document_test.go @@ -8,7 +8,7 @@ // by the Apache License, Version 2.0, included in the file // licenses/APL.txt. -package test_acp_relationship_add_docactor +package test_acp_relationship_doc_actor_add import ( "fmt" diff --git a/tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_with_reader_gql_test.go b/tests/integration/acp/relationship/doc_actor/add/with_reader_gql_test.go similarity index 98% rename from tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_with_reader_gql_test.go rename to tests/integration/acp/relationship/doc_actor/add/with_reader_gql_test.go index 02a637833f..e40661cede 100644 --- a/tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_with_reader_gql_test.go +++ b/tests/integration/acp/relationship/doc_actor/add/with_reader_gql_test.go @@ -8,7 +8,7 @@ // by the Apache License, Version 2.0, included in the file // licenses/APL.txt. -package test_acp_relationship_add_docactor +package test_acp_relationship_doc_actor_add import ( "fmt" diff --git a/tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_with_reader_test.go b/tests/integration/acp/relationship/doc_actor/add/with_reader_test.go similarity index 99% rename from tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_with_reader_test.go rename to tests/integration/acp/relationship/doc_actor/add/with_reader_test.go index 70a7676a96..bac553d553 100644 --- a/tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_with_reader_test.go +++ b/tests/integration/acp/relationship/doc_actor/add/with_reader_test.go @@ -8,7 +8,7 @@ // by the Apache License, Version 2.0, included in the file // licenses/APL.txt. -package test_acp_relationship_add_docactor +package test_acp_relationship_doc_actor_add import ( "fmt" diff --git a/tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_with_update_gql_test.go b/tests/integration/acp/relationship/doc_actor/add/with_update_gql_test.go similarity index 99% rename from tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_with_update_gql_test.go rename to tests/integration/acp/relationship/doc_actor/add/with_update_gql_test.go index dcfda587e8..d265b448c3 100644 --- a/tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_with_update_gql_test.go +++ b/tests/integration/acp/relationship/doc_actor/add/with_update_gql_test.go @@ -8,7 +8,7 @@ // by the Apache License, Version 2.0, included in the file // licenses/APL.txt. -package test_acp_relationship_add_docactor +package test_acp_relationship_doc_actor_add import ( "fmt" diff --git a/tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_with_update_test.go b/tests/integration/acp/relationship/doc_actor/add/with_update_test.go similarity index 99% rename from tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_with_update_test.go rename to tests/integration/acp/relationship/doc_actor/add/with_update_test.go index 79d727a690..de98f32b53 100644 --- a/tests/integration/acp/relationship/add_doc_actor_test/add_doc_actor_with_update_test.go +++ b/tests/integration/acp/relationship/doc_actor/add/with_update_test.go @@ -8,7 +8,7 @@ // by the Apache License, Version 2.0, included in the file // licenses/APL.txt. -package test_acp_relationship_add_docactor +package test_acp_relationship_doc_actor_add import ( "fmt" diff --git a/tests/integration/acp/relationship/doc_actor/delete/invalid_test.go b/tests/integration/acp/relationship/doc_actor/delete/invalid_test.go new file mode 100644 index 0000000000..41cb6e4921 --- /dev/null +++ b/tests/integration/acp/relationship/doc_actor/delete/invalid_test.go @@ -0,0 +1,545 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_relationship_doc_actor_delete + +import ( + "fmt" + "testing" + + "github.com/sourcenetwork/immutable" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_DeleteDocActorRelationshipMissingDocID_Error(t *testing.T) { + expectedPolicyID := "fc56b7509c20ac8ce682b3b9b4fdaad868a9c70dda6ec16720298be64f16e9a4" + + test := testUtils.TestCase{ + + Description: "Test acp, delete doc actor relationship with docID missing, return error", + + Actions: []any{ + testUtils.AddPolicy{ + + Identity: immutable.Some(1), + + Policy: ` + name: Test Policy + + description: A Policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + writer + + write: + expr: owner + writer + + nothing: + expr: dummy + + relations: + owner: + types: + - actor + + reader: + types: + - actor + + writer: + types: + - actor + + admin: + manages: + - reader + types: + - actor + + dummy: + types: + - actor + `, + + ExpectedPolicyID: expectedPolicyID, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + expectedPolicyID, + ), + }, + + testUtils.CreateDoc{ + Identity: immutable.Some(1), + + CollectionID: 0, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.DeleteDocActorRelationship{ + RequestorIdentity: 1, + + TargetIdentity: 2, + + CollectionID: 0, + + DocID: -1, + + Relation: "reader", + + ExpectedError: "missing a required argument needed to delete doc actor relationship.", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_DeleteDocActorRelationshipMissingCollection_Error(t *testing.T) { + expectedPolicyID := "fc56b7509c20ac8ce682b3b9b4fdaad868a9c70dda6ec16720298be64f16e9a4" + + test := testUtils.TestCase{ + + Description: "Test acp, delete doc actor relationship with collection missing, return error", + + Actions: []any{ + testUtils.AddPolicy{ + + Identity: immutable.Some(1), + + Policy: ` + name: Test Policy + + description: A Policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + writer + + write: + expr: owner + writer + + nothing: + expr: dummy + + relations: + owner: + types: + - actor + + reader: + types: + - actor + + writer: + types: + - actor + + admin: + manages: + - reader + types: + - actor + + dummy: + types: + - actor + `, + + ExpectedPolicyID: expectedPolicyID, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + expectedPolicyID, + ), + }, + + testUtils.CreateDoc{ + Identity: immutable.Some(1), + + CollectionID: 0, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.DeleteDocActorRelationship{ + RequestorIdentity: 1, + + TargetIdentity: 2, + + CollectionID: -1, + + DocID: 0, + + Relation: "reader", + + ExpectedError: "collection name can't be empty", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_DeleteDocActorRelationshipMissingRelationName_Error(t *testing.T) { + expectedPolicyID := "fc56b7509c20ac8ce682b3b9b4fdaad868a9c70dda6ec16720298be64f16e9a4" + + test := testUtils.TestCase{ + + Description: "Test acp, delete doc actor relationship with relation name missing, return error", + + Actions: []any{ + testUtils.AddPolicy{ + + Identity: immutable.Some(1), + + Policy: ` + name: Test Policy + + description: A Policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + writer + + write: + expr: owner + writer + + nothing: + expr: dummy + + relations: + owner: + types: + - actor + + reader: + types: + - actor + + writer: + types: + - actor + + admin: + manages: + - reader + types: + - actor + + dummy: + types: + - actor + `, + + ExpectedPolicyID: expectedPolicyID, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + expectedPolicyID, + ), + }, + + testUtils.CreateDoc{ + Identity: immutable.Some(1), + + CollectionID: 0, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.DeleteDocActorRelationship{ + RequestorIdentity: 1, + + TargetIdentity: 2, + + CollectionID: 0, + + DocID: 0, + + Relation: "", + + ExpectedError: "missing a required argument needed to delete doc actor relationship.", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_DeleteDocActorRelationshipMissingTargetActorName_Error(t *testing.T) { + expectedPolicyID := "fc56b7509c20ac8ce682b3b9b4fdaad868a9c70dda6ec16720298be64f16e9a4" + + test := testUtils.TestCase{ + + Description: "Test acp, delete doc actor relationship with target actor missing, return error", + + Actions: []any{ + testUtils.AddPolicy{ + + Identity: immutable.Some(1), + + Policy: ` + name: Test Policy + + description: A Policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + writer + + write: + expr: owner + writer + + nothing: + expr: dummy + + relations: + owner: + types: + - actor + + reader: + types: + - actor + + writer: + types: + - actor + + admin: + manages: + - reader + types: + - actor + + dummy: + types: + - actor + `, + + ExpectedPolicyID: expectedPolicyID, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + expectedPolicyID, + ), + }, + + testUtils.CreateDoc{ + Identity: immutable.Some(1), + + CollectionID: 0, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.DeleteDocActorRelationship{ + RequestorIdentity: 1, + + TargetIdentity: -1, + + CollectionID: 0, + + DocID: 0, + + Relation: "reader", + + ExpectedError: "missing a required argument needed to delete doc actor relationship.", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_DeleteDocActorRelationshipMissingReqestingIdentityName_Error(t *testing.T) { + expectedPolicyID := "fc56b7509c20ac8ce682b3b9b4fdaad868a9c70dda6ec16720298be64f16e9a4" + + test := testUtils.TestCase{ + + Description: "Test acp, delete doc actor relationship with requesting identity missing, return error", + + Actions: []any{ + testUtils.AddPolicy{ + + Identity: immutable.Some(1), + + Policy: ` + name: Test Policy + + description: A Policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + writer + + write: + expr: owner + writer + + nothing: + expr: dummy + + relations: + owner: + types: + - actor + + reader: + types: + - actor + + writer: + types: + - actor + + admin: + manages: + - reader + types: + - actor + + dummy: + types: + - actor + `, + + ExpectedPolicyID: expectedPolicyID, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + expectedPolicyID, + ), + }, + + testUtils.CreateDoc{ + Identity: immutable.Some(1), + + CollectionID: 0, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.DeleteDocActorRelationship{ + RequestorIdentity: -1, + + TargetIdentity: 2, + + CollectionID: 0, + + DocID: 0, + + Relation: "reader", + + ExpectedError: "missing a required argument needed to delete doc actor relationship.", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/relationship/doc_actor/delete/with_delete_test.go b/tests/integration/acp/relationship/doc_actor/delete/with_delete_test.go new file mode 100644 index 0000000000..d931a7049b --- /dev/null +++ b/tests/integration/acp/relationship/doc_actor/delete/with_delete_test.go @@ -0,0 +1,252 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_relationship_doc_actor_delete + +import ( + "fmt" + "testing" + + "github.com/sourcenetwork/immutable" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_OwnerRevokesDeleteWriteAccess_OtherActorCanNoLongerDelete(t *testing.T) { + expectedPolicyID := "fc56b7509c20ac8ce682b3b9b4fdaad868a9c70dda6ec16720298be64f16e9a4" + + test := testUtils.TestCase{ + + Description: "Test acp, owner revokes write(delete) access from another actor, they can not delete anymore", + + Actions: []any{ + testUtils.AddPolicy{ + + Identity: immutable.Some(1), + + Policy: ` + name: Test Policy + + description: A Policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + writer + + write: + expr: owner + writer + + nothing: + expr: dummy + + relations: + owner: + types: + - actor + + reader: + types: + - actor + + writer: + types: + - actor + + admin: + manages: + - reader + types: + - actor + + dummy: + types: + - actor + `, + + ExpectedPolicyID: expectedPolicyID, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + expectedPolicyID, + ), + }, + + // Creating two documents because need one to do the test on after one is deleted. + testUtils.CreateDoc{ + Identity: immutable.Some(1), + + CollectionID: 0, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + testUtils.CreateDoc{ + Identity: immutable.Some(1), + + CollectionID: 0, + + Doc: ` + { + "name": "Shahzad Lone", + "age": 28 + } + `, + }, + + // Give access to the other actor to delete and read both documents. + testUtils.AddDocActorRelationship{ + RequestorIdentity: 1, + + TargetIdentity: 2, + + CollectionID: 0, + + DocID: 0, + + Relation: "writer", + + ExpectedExistence: false, + }, + testUtils.AddDocActorRelationship{ + RequestorIdentity: 1, + + TargetIdentity: 2, + + CollectionID: 0, + + DocID: 1, + + Relation: "writer", + + ExpectedExistence: false, + }, + + // Now the other identity can read both and delete both of those documents + testUtils.Request{ + Identity: immutable.Some(2), // This identity can read. + + Request: ` + query { + Users { + name + age + } + } + `, + + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "Shahzad", + "age": int64(28), + }, + { + "name": "Shahzad Lone", + "age": int64(28), + }, + }, + }, + }, + + testUtils.DeleteDoc{ + CollectionID: 0, + + Identity: immutable.Some(2), // This identity can also delete. + + DocID: 1, + }, + + testUtils.DeleteDocActorRelationship{ // Revoke access from being able to delete (and read) the document. + RequestorIdentity: 1, + + TargetIdentity: 2, + + CollectionID: 0, + + DocID: 0, + + Relation: "writer", + + ExpectedRecordFound: true, + }, + + // The other identity can neither delete nor read the other document anymore. + testUtils.Request{ + Identity: immutable.Some(2), + + Request: ` + query { + Users { + name + age + } + } + `, + + Results: map[string]any{ + "Users": []map[string]any{}, // Can't read the document anymore + }, + }, + + testUtils.DeleteDoc{ + CollectionID: 0, + + Identity: immutable.Some(2), + + DocID: 0, + + ExpectedError: "document not found or not authorized to access", // Can't delete the document anymore. + }, + + // Ensure document was not accidentally deleted using owner identity. + testUtils.Request{ + Identity: immutable.Some(1), + + Request: ` + query { + Users { + name + age + } + } + `, + + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "Shahzad", + "age": int64(28), + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/relationship/doc_actor/delete/with_dummy_relation_test.go b/tests/integration/acp/relationship/doc_actor/delete/with_dummy_relation_test.go new file mode 100644 index 0000000000..190850dfdd --- /dev/null +++ b/tests/integration/acp/relationship/doc_actor/delete/with_dummy_relation_test.go @@ -0,0 +1,302 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_relationship_doc_actor_delete + +import ( + "fmt" + "testing" + + "github.com/sourcenetwork/immutable" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_DeleteDocActorRelationshipWithDummyRelationDefinedOnPolicy_NothingChanges(t *testing.T) { + expectedPolicyID := "fc56b7509c20ac8ce682b3b9b4fdaad868a9c70dda6ec16720298be64f16e9a4" + + test := testUtils.TestCase{ + + Description: "Test acp, delete doc actor relationship with a dummy relation defined on policy, nothing happens", + + Actions: []any{ + testUtils.AddPolicy{ + + Identity: immutable.Some(1), + + Policy: ` + name: Test Policy + + description: A Policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + writer + + write: + expr: owner + writer + + nothing: + expr: dummy + + relations: + owner: + types: + - actor + + reader: + types: + - actor + + writer: + types: + - actor + + admin: + manages: + - reader + types: + - actor + + dummy: + types: + - actor + `, + + ExpectedPolicyID: expectedPolicyID, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + expectedPolicyID, + ), + }, + + testUtils.CreateDoc{ + Identity: immutable.Some(1), + + CollectionID: 0, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.Request{ + Identity: immutable.Some(2), // This identity can not read yet. + + Request: ` + query { + Users { + _docID + name + age + } + } + `, + + Results: map[string]any{ + "Users": []map[string]any{}, // Can't see the documents + }, + }, + + testUtils.DeleteDocActorRelationship{ + RequestorIdentity: 1, + + TargetIdentity: 2, + + CollectionID: 0, + + DocID: 0, + + Relation: "dummy", // Doesn't mean anything to the database. + + ExpectedRecordFound: false, + }, + + testUtils.Request{ + Identity: immutable.Some(2), // This identity can still not read. + + Request: ` + query { + Users { + _docID + name + age + } + } + `, + + Results: map[string]any{ + "Users": []map[string]any{}, // Can't see the documents + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_DeleteDocActorRelationshipWithDummyRelationNotDefinedOnPolicy_Error(t *testing.T) { + expectedPolicyID := "fc56b7509c20ac8ce682b3b9b4fdaad868a9c70dda6ec16720298be64f16e9a4" + + test := testUtils.TestCase{ + + Description: "Test acp, delete doc actor relationship with an invalid relation (not defined on policy), error", + + Actions: []any{ + testUtils.AddPolicy{ + + Identity: immutable.Some(1), + + Policy: ` + name: Test Policy + + description: A Policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + writer + + write: + expr: owner + writer + + nothing: + expr: dummy + + relations: + owner: + types: + - actor + + reader: + types: + - actor + + writer: + types: + - actor + + admin: + manages: + - reader + types: + - actor + + dummy: + types: + - actor + `, + + ExpectedPolicyID: expectedPolicyID, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + expectedPolicyID, + ), + }, + + testUtils.CreateDoc{ + Identity: immutable.Some(1), + + CollectionID: 0, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.Request{ + Identity: immutable.Some(2), // This identity can not read yet. + + Request: ` + query { + Users { + _docID + name + age + } + } + `, + + Results: map[string]any{ + "Users": []map[string]any{}, // Can't see the documents + }, + }, + + testUtils.DeleteDocActorRelationship{ + RequestorIdentity: 1, + + TargetIdentity: 2, + + CollectionID: 0, + + DocID: 0, + + Relation: "NotOnPolicy", // Doesn't mean anything to the database and not on policy either. + + ExpectedError: "failed to delete document actor relationship with acp", + }, + + testUtils.Request{ + Identity: immutable.Some(2), // This identity can still not read. + + Request: ` + query { + Users { + _docID + name + age + } + } + `, + + Results: map[string]any{ + "Users": []map[string]any{}, // Can't see the documents + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/relationship/doc_actor/delete/with_manager_test.go b/tests/integration/acp/relationship/doc_actor/delete/with_manager_test.go new file mode 100644 index 0000000000..fd841c562a --- /dev/null +++ b/tests/integration/acp/relationship/doc_actor/delete/with_manager_test.go @@ -0,0 +1,534 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_relationship_doc_actor_delete + +import ( + "fmt" + "testing" + + "github.com/sourcenetwork/immutable" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_ManagerRevokesReadAccess_OtherActorCanNoLongerRead(t *testing.T) { + expectedPolicyID := "fc56b7509c20ac8ce682b3b9b4fdaad868a9c70dda6ec16720298be64f16e9a4" + + test := testUtils.TestCase{ + + Description: "Test acp, manager revokes read access, other actor that can read before no longer read.", + + Actions: []any{ + testUtils.AddPolicy{ + + Identity: immutable.Some(1), + + Policy: ` + name: Test Policy + + description: A Policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + writer + + write: + expr: owner + writer + + nothing: + expr: dummy + + relations: + owner: + types: + - actor + + reader: + types: + - actor + + writer: + types: + - actor + + admin: + manages: + - reader + types: + - actor + + dummy: + types: + - actor + `, + + ExpectedPolicyID: expectedPolicyID, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + expectedPolicyID, + ), + }, + + testUtils.CreateDoc{ + Identity: immutable.Some(1), + + CollectionID: 0, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.AddDocActorRelationship{ // Owner makes admin / manager + RequestorIdentity: 1, + + TargetIdentity: 2, + + CollectionID: 0, + + DocID: 0, + + Relation: "admin", + + ExpectedExistence: false, + }, + + testUtils.AddDocActorRelationship{ // Owner gives an actor read access + RequestorIdentity: 1, + + TargetIdentity: 3, + + CollectionID: 0, + + DocID: 0, + + Relation: "reader", + + ExpectedExistence: false, + }, + + testUtils.Request{ + Identity: immutable.Some(3), // The other actor can read + + Request: ` + query { + Users { + name + age + } + } + `, + + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "Shahzad", + "age": int64(28), + }, + }, + }, + }, + + testUtils.DeleteDocActorRelationship{ // Admin revokes access of the other actor that could read. + RequestorIdentity: 2, + + TargetIdentity: 3, + + CollectionID: 0, + + DocID: 0, + + Relation: "reader", + + ExpectedRecordFound: true, + }, + + // The other actor can no longer read. + testUtils.Request{ + Identity: immutable.Some(3), + + Request: ` + query { + Users { + name + age + } + } + `, + + Results: map[string]any{ + "Users": []map[string]any{}, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_OwnerRevokesManagersAccess_ManagerCanNoLongerManageOthers(t *testing.T) { + expectedPolicyID := "fc56b7509c20ac8ce682b3b9b4fdaad868a9c70dda6ec16720298be64f16e9a4" + + test := testUtils.TestCase{ + + Description: "Test acp, owner revokes manager's access, manager can not longer manage others.", + + Actions: []any{ + testUtils.AddPolicy{ + + Identity: immutable.Some(1), + + Policy: ` + name: Test Policy + + description: A Policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + writer + + write: + expr: owner + writer + + nothing: + expr: dummy + + relations: + owner: + types: + - actor + + reader: + types: + - actor + + writer: + types: + - actor + + admin: + manages: + - reader + types: + - actor + + dummy: + types: + - actor + `, + + ExpectedPolicyID: expectedPolicyID, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + expectedPolicyID, + ), + }, + + testUtils.CreateDoc{ + Identity: immutable.Some(1), + + CollectionID: 0, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.AddDocActorRelationship{ // Owner makes admin / manager + RequestorIdentity: 1, + + TargetIdentity: 2, + + CollectionID: 0, + + DocID: 0, + + Relation: "admin", + + ExpectedExistence: false, + }, + + testUtils.AddDocActorRelationship{ // Manager gives an actor read access + RequestorIdentity: 2, + + TargetIdentity: 3, + + CollectionID: 0, + + DocID: 0, + + Relation: "reader", + + ExpectedExistence: false, + }, + + testUtils.Request{ + Identity: immutable.Some(3), // The other actor can read + + Request: ` + query { + Users { + name + age + } + } + `, + + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "Shahzad", + "age": int64(28), + }, + }, + }, + }, + + testUtils.DeleteDocActorRelationship{ // Admin revokes access of the admin. + RequestorIdentity: 1, + + TargetIdentity: 2, + + CollectionID: 0, + + DocID: 0, + + Relation: "admin", + + ExpectedRecordFound: true, + }, + + testUtils.AddDocActorRelationship{ // Manager can no longer grant read access. + RequestorIdentity: 2, + + TargetIdentity: 4, // This identity has no access previously. + + CollectionID: 0, + + DocID: 0, + + Relation: "reader", + + ExpectedError: "failed to add document actor relationship with acp", + }, + + testUtils.Request{ + Identity: immutable.Some(4), // The other actor can ofcourse still not read. + + Request: ` + query { + Users { + name + age + } + } + `, + + Results: map[string]any{ + "Users": []map[string]any{}, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_AdminTriesToRevokeOwnersAccess_NotAllowedError(t *testing.T) { + expectedPolicyID := "fc56b7509c20ac8ce682b3b9b4fdaad868a9c70dda6ec16720298be64f16e9a4" + + test := testUtils.TestCase{ + + Description: "Test acp, admin tries to revoke owner's access, not allowed error.", + + Actions: []any{ + testUtils.AddPolicy{ + + Identity: immutable.Some(1), + + Policy: ` + name: Test Policy + + description: A Policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + writer + + write: + expr: owner + writer + + nothing: + expr: dummy + + relations: + owner: + types: + - actor + + reader: + types: + - actor + + writer: + types: + - actor + + admin: + manages: + - reader + types: + - actor + + dummy: + types: + - actor + `, + + ExpectedPolicyID: expectedPolicyID, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + expectedPolicyID, + ), + }, + + testUtils.CreateDoc{ + Identity: immutable.Some(1), + + CollectionID: 0, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.AddDocActorRelationship{ // Owner makes admin / manager + RequestorIdentity: 1, + + TargetIdentity: 2, + + CollectionID: 0, + + DocID: 0, + + Relation: "admin", + + ExpectedExistence: false, + }, + + testUtils.DeleteDocActorRelationship{ // Admin tries to revoke owners `owner` relation. + RequestorIdentity: 2, + + TargetIdentity: 1, + + CollectionID: 0, + + DocID: 0, + + Relation: "owner", + + ExpectedError: "cannot delete an owner relationship", + }, + + testUtils.DeleteDocActorRelationship{ // Owner can still perform owner operations, like restrict admin. + RequestorIdentity: 1, + + TargetIdentity: 2, + + CollectionID: 0, + + DocID: 0, + + Relation: "admin", + + ExpectedRecordFound: true, + }, + + testUtils.Request{ + Identity: immutable.Some(1), // The owner can still read + + Request: ` + query { + Users { + name + age + } + } + `, + + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "Shahzad", + "age": int64(28), + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/relationship/doc_actor/delete/with_no_policy_on_collection_test.go b/tests/integration/acp/relationship/doc_actor/delete/with_no_policy_on_collection_test.go new file mode 100644 index 0000000000..3039d32e5f --- /dev/null +++ b/tests/integration/acp/relationship/doc_actor/delete/with_no_policy_on_collection_test.go @@ -0,0 +1,66 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_relationship_doc_actor_delete + +import ( + "testing" + + "github.com/sourcenetwork/immutable" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_DeleteDocActorRelationshipWithCollectionThatHasNoPolicy_NotAllowedError(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Test acp, delete doc actor relationship on a collection with no policy, not allowed error", + + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + age: Int + } + `, + }, + + testUtils.CreateDoc{ + Identity: immutable.Some(1), + + CollectionID: 0, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.DeleteDocActorRelationship{ + RequestorIdentity: 1, + + TargetIdentity: 2, + + CollectionID: 0, + + DocID: 0, + + Relation: "reader", + + ExpectedError: "operation requires ACP, but collection has no policy", // Everything is public anyway + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/relationship/doc_actor/delete/with_public_document_test.go b/tests/integration/acp/relationship/doc_actor/delete/with_public_document_test.go new file mode 100644 index 0000000000..fa071c6806 --- /dev/null +++ b/tests/integration/acp/relationship/doc_actor/delete/with_public_document_test.go @@ -0,0 +1,147 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_relationship_doc_actor_delete + +import ( + "fmt" + "testing" + + "github.com/sourcenetwork/immutable" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_DeleteDocActorRelationshipWithPublicDocument_CanAlreadyAccess_Error(t *testing.T) { + expectedPolicyID := "fc56b7509c20ac8ce682b3b9b4fdaad868a9c70dda6ec16720298be64f16e9a4" + + test := testUtils.TestCase{ + + Description: "Test acp, delete doc actor relationship on a public document, return error", + + Actions: []any{ + testUtils.AddPolicy{ + + Identity: immutable.Some(1), + + Policy: ` + name: Test Policy + + description: A Policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + writer + + write: + expr: owner + writer + + nothing: + expr: dummy + + relations: + owner: + types: + - actor + + reader: + types: + - actor + + writer: + types: + - actor + + admin: + manages: + - reader + types: + - actor + + dummy: + types: + - actor + `, + + ExpectedPolicyID: expectedPolicyID, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + expectedPolicyID, + ), + }, + + testUtils.CreateDoc{ // Note: Is a public document (without an identity). + CollectionID: 0, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.Request{ + Identity: immutable.Some(2), // Can read as it is a public document + + Request: ` + query { + Users { + _docID + name + age + } + } + `, + + Results: map[string]any{ + "Users": []map[string]any{ + { + "_docID": "bae-9d443d0c-52f6-568b-8f74-e8ff0825697b", + "name": "Shahzad", + "age": int64(28), + }, + }, + }, + }, + + testUtils.DeleteDocActorRelationship{ + RequestorIdentity: 1, + + TargetIdentity: 2, + + CollectionID: 0, + + DocID: 0, + + Relation: "reader", + + ExpectedError: "failed to delete document actor relationship with acp", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/relationship/doc_actor/delete/with_reader_test.go b/tests/integration/acp/relationship/doc_actor/delete/with_reader_test.go new file mode 100644 index 0000000000..58b74e4dc1 --- /dev/null +++ b/tests/integration/acp/relationship/doc_actor/delete/with_reader_test.go @@ -0,0 +1,314 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_relationship_doc_actor_delete + +import ( + "fmt" + "testing" + + "github.com/sourcenetwork/immutable" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_OwnerRevokesReadAccessTwice_ShowThatTheRecordWasNotFoundSecondTime(t *testing.T) { + expectedPolicyID := "fc56b7509c20ac8ce682b3b9b4fdaad868a9c70dda6ec16720298be64f16e9a4" + + test := testUtils.TestCase{ + + Description: "Test acp, owner revokes read access twice, second time is no-op", + + Actions: []any{ + testUtils.AddPolicy{ + + Identity: immutable.Some(1), + + Policy: ` + name: Test Policy + + description: A Policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + writer + + write: + expr: owner + writer + + nothing: + expr: dummy + + relations: + owner: + types: + - actor + + reader: + types: + - actor + + writer: + types: + - actor + + admin: + manages: + - reader + types: + - actor + + dummy: + types: + - actor + `, + + ExpectedPolicyID: expectedPolicyID, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + expectedPolicyID, + ), + }, + + testUtils.CreateDoc{ + Identity: immutable.Some(1), + + CollectionID: 0, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.AddDocActorRelationship{ + RequestorIdentity: 1, + + TargetIdentity: 2, + + CollectionID: 0, + + DocID: 0, + + Relation: "reader", + + ExpectedExistence: false, + }, + + testUtils.DeleteDocActorRelationship{ + RequestorIdentity: 1, + + TargetIdentity: 2, + + CollectionID: 0, + + DocID: 0, + + Relation: "reader", + + ExpectedRecordFound: true, + }, + + testUtils.DeleteDocActorRelationship{ + RequestorIdentity: 1, + + TargetIdentity: 2, + + CollectionID: 0, + + DocID: 0, + + Relation: "reader", + + ExpectedRecordFound: false, // is a no-op + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_OwnerRevokesGivenReadAccess_OtherActorCanNoLongerRead(t *testing.T) { + expectedPolicyID := "fc56b7509c20ac8ce682b3b9b4fdaad868a9c70dda6ec16720298be64f16e9a4" + + test := testUtils.TestCase{ + + Description: "Test acp, owner revokes read access from another actor, they can not read anymore", + + Actions: []any{ + testUtils.AddPolicy{ + + Identity: immutable.Some(1), + + Policy: ` + name: Test Policy + + description: A Policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + writer + + write: + expr: owner + writer + + nothing: + expr: dummy + + relations: + owner: + types: + - actor + + reader: + types: + - actor + + writer: + types: + - actor + + admin: + manages: + - reader + types: + - actor + + dummy: + types: + - actor + `, + + ExpectedPolicyID: expectedPolicyID, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + expectedPolicyID, + ), + }, + + testUtils.CreateDoc{ + Identity: immutable.Some(1), + + CollectionID: 0, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.AddDocActorRelationship{ + RequestorIdentity: 1, + + TargetIdentity: 2, + + CollectionID: 0, + + DocID: 0, + + Relation: "reader", + + ExpectedExistence: false, + }, + + testUtils.Request{ + Identity: immutable.Some(2), // This identity can read. + + Request: ` + query { + Users { + _docID + name + age + } + } + `, + + Results: map[string]any{ + "Users": []map[string]any{ + { + "_docID": "bae-9d443d0c-52f6-568b-8f74-e8ff0825697b", + "name": "Shahzad", + "age": int64(28), + }, + }, + }, + }, + + testUtils.DeleteDocActorRelationship{ + RequestorIdentity: 1, + + TargetIdentity: 2, + + CollectionID: 0, + + DocID: 0, + + Relation: "reader", + + ExpectedRecordFound: true, + }, + + testUtils.Request{ + Identity: immutable.Some(2), // This identity can not read anymore. + + Request: ` + query { + Users { + _docID + name + age + } + } + `, + + Results: map[string]any{ + "Users": []map[string]any{}, // Can't see the documents now + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/relationship/doc_actor/delete/with_self_test.go b/tests/integration/acp/relationship/doc_actor/delete/with_self_test.go new file mode 100644 index 0000000000..563359fcd4 --- /dev/null +++ b/tests/integration/acp/relationship/doc_actor/delete/with_self_test.go @@ -0,0 +1,272 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_relationship_doc_actor_delete + +import ( + "fmt" + "testing" + + "github.com/sourcenetwork/immutable" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_AdminTriesToRevokeItsOwnAccess_NotAllowedError(t *testing.T) { + expectedPolicyID := "fc56b7509c20ac8ce682b3b9b4fdaad868a9c70dda6ec16720298be64f16e9a4" + + test := testUtils.TestCase{ + + Description: "Test acp, admin tries to revoke it's own access, not allowed error.", + + Actions: []any{ + testUtils.AddPolicy{ + + Identity: immutable.Some(1), + + Policy: ` + name: Test Policy + + description: A Policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + writer + + write: + expr: owner + writer + + nothing: + expr: dummy + + relations: + owner: + types: + - actor + + reader: + types: + - actor + + writer: + types: + - actor + + admin: + manages: + - reader + types: + - actor + + dummy: + types: + - actor + `, + + ExpectedPolicyID: expectedPolicyID, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + expectedPolicyID, + ), + }, + + testUtils.CreateDoc{ + Identity: immutable.Some(1), + + CollectionID: 0, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.AddDocActorRelationship{ // Owner makes admin / manager + RequestorIdentity: 1, + + TargetIdentity: 2, + + CollectionID: 0, + + DocID: 0, + + Relation: "admin", + + ExpectedExistence: false, + }, + + testUtils.DeleteDocActorRelationship{ // Admin tries to revoke it's own relation. + RequestorIdentity: 2, + + TargetIdentity: 2, + + CollectionID: 0, + + DocID: 0, + + Relation: "admin", + + ExpectedError: "failed to delete document actor relationship with acp", + }, + + testUtils.AddDocActorRelationship{ // Admin can still perform admin operations. + RequestorIdentity: 2, + + TargetIdentity: 3, + + CollectionID: 0, + + DocID: 0, + + Relation: "reader", + + ExpectedExistence: false, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_OwnerTriesToRevokeItsOwnAccess_NotAllowedError(t *testing.T) { + expectedPolicyID := "fc56b7509c20ac8ce682b3b9b4fdaad868a9c70dda6ec16720298be64f16e9a4" + + test := testUtils.TestCase{ + + Description: "Test acp, owner tries to revoke it's own access, not allowed error.", + + Actions: []any{ + testUtils.AddPolicy{ + + Identity: immutable.Some(1), + + Policy: ` + name: Test Policy + + description: A Policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + writer + + write: + expr: owner + writer + + nothing: + expr: dummy + + relations: + owner: + types: + - actor + + reader: + types: + - actor + + writer: + types: + - actor + + admin: + manages: + - reader + types: + - actor + + dummy: + types: + - actor + `, + + ExpectedPolicyID: expectedPolicyID, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + expectedPolicyID, + ), + }, + + testUtils.CreateDoc{ + Identity: immutable.Some(1), + + CollectionID: 0, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + testUtils.DeleteDocActorRelationship{ // Owner tries to revoke it's own relation. + RequestorIdentity: 1, + + TargetIdentity: 1, + + CollectionID: 0, + + DocID: 0, + + Relation: "owner", + + ExpectedError: "failed to delete document actor relationship with acp", + }, + + testUtils.AddDocActorRelationship{ // Owner can still perform admin operations. + RequestorIdentity: 1, + + TargetIdentity: 2, + + CollectionID: 0, + + DocID: 0, + + Relation: "reader", + + ExpectedExistence: false, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/relationship/doc_actor/delete/with_update_test.go b/tests/integration/acp/relationship/doc_actor/delete/with_update_test.go new file mode 100644 index 0000000000..e51edc22ca --- /dev/null +++ b/tests/integration/acp/relationship/doc_actor/delete/with_update_test.go @@ -0,0 +1,458 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_relationship_doc_actor_delete + +import ( + "fmt" + "testing" + + "github.com/sourcenetwork/immutable" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_OwnerRevokesUpdateWriteAccess_OtherActorCanNoLongerUpdate(t *testing.T) { + expectedPolicyID := "fc56b7509c20ac8ce682b3b9b4fdaad868a9c70dda6ec16720298be64f16e9a4" + + test := testUtils.TestCase{ + + Description: "Test acp, owner revokes write(update) access from another actor, they can not update anymore", + + SupportedMutationTypes: immutable.Some([]testUtils.MutationType{ + testUtils.CollectionNamedMutationType, + testUtils.CollectionSaveMutationType, + }), + + Actions: []any{ + testUtils.AddPolicy{ + + Identity: immutable.Some(1), + + Policy: ` + name: Test Policy + + description: A Policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + writer + + write: + expr: owner + writer + + nothing: + expr: dummy + + relations: + owner: + types: + - actor + + reader: + types: + - actor + + writer: + types: + - actor + + admin: + manages: + - reader + types: + - actor + + dummy: + types: + - actor + `, + + ExpectedPolicyID: expectedPolicyID, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + expectedPolicyID, + ), + }, + + testUtils.CreateDoc{ + Identity: immutable.Some(1), + + CollectionID: 0, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + // Give access to the other actor to update and read the document. + testUtils.AddDocActorRelationship{ + RequestorIdentity: 1, + + TargetIdentity: 2, + + CollectionID: 0, + + DocID: 0, + + Relation: "writer", + + ExpectedExistence: false, + }, + + testUtils.UpdateDoc{ + CollectionID: 0, + + Identity: immutable.Some(2), // This identity can update. + + DocID: 0, + + Doc: ` + { + "name": "Shahzad Lone" + } + `, + }, + + // Ensure the other identity can read and update the document. + testUtils.Request{ + Identity: immutable.Some(2), // This identity can also read. + + Request: ` + query { + Users { + name + age + } + } + `, + + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "Shahzad Lone", + "age": int64(28), + }, + }, + }, + }, + + testUtils.DeleteDocActorRelationship{ // Revoke access from being able to update (and read) the document. + RequestorIdentity: 1, + + TargetIdentity: 2, + + CollectionID: 0, + + DocID: 0, + + Relation: "writer", + + ExpectedRecordFound: true, + }, + + // The other identity can neither update nor read the other document anymore. + testUtils.Request{ + Identity: immutable.Some(2), + + Request: ` + query { + Users { + name + age + } + } + `, + + Results: map[string]any{ + "Users": []map[string]any{}, // Can't read the document anymore + }, + }, + + testUtils.UpdateDoc{ + CollectionID: 0, + + Identity: immutable.Some(2), + + DocID: 0, + + Doc: ` + { + "name": "Shahzad Update Not Possible" + } + `, + + ExpectedError: "document not found or not authorized to access", // Can't update the document anymore. + }, + + // Ensure document was not accidentally updated using owner identity. + testUtils.Request{ + Identity: immutable.Some(1), + + Request: ` + query { + Users { + name + age + } + } + `, + + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "Shahzad Lone", + "age": int64(28), + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestACP_OwnerRevokesUpdateWriteAccess_GQL_OtherActorCanNoLongerUpdate(t *testing.T) { + expectedPolicyID := "fc56b7509c20ac8ce682b3b9b4fdaad868a9c70dda6ec16720298be64f16e9a4" + + test := testUtils.TestCase{ + + Description: "Test acp, owner revokes write(update) access from another actor, they can not update anymore (gql)", + + SupportedMutationTypes: immutable.Some([]testUtils.MutationType{ + // GQL mutation will return no error. + testUtils.GQLRequestMutationType, + }), + + Actions: []any{ + testUtils.AddPolicy{ + + Identity: immutable.Some(1), + + Policy: ` + name: Test Policy + + description: A Policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + writer + + write: + expr: owner + writer + + nothing: + expr: dummy + + relations: + owner: + types: + - actor + + reader: + types: + - actor + + writer: + types: + - actor + + admin: + manages: + - reader + types: + - actor + + dummy: + types: + - actor + `, + + ExpectedPolicyID: expectedPolicyID, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + expectedPolicyID, + ), + }, + + testUtils.CreateDoc{ + Identity: immutable.Some(1), + + CollectionID: 0, + + Doc: ` + { + "name": "Shahzad", + "age": 28 + } + `, + }, + + // Give access to the other actor to update and read the document. + testUtils.AddDocActorRelationship{ + RequestorIdentity: 1, + + TargetIdentity: 2, + + CollectionID: 0, + + DocID: 0, + + Relation: "writer", + + ExpectedExistence: false, + }, + + testUtils.UpdateDoc{ + CollectionID: 0, + + Identity: immutable.Some(2), // This identity can update. + + DocID: 0, + + Doc: ` + { + "name": "Shahzad Lone" + } + `, + }, + + // Ensure the other identity can read and update the document. + testUtils.Request{ + Identity: immutable.Some(2), // This identity can also read. + + Request: ` + query { + Users { + name + age + } + } + `, + + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "Shahzad Lone", + "age": int64(28), + }, + }, + }, + }, + + testUtils.DeleteDocActorRelationship{ // Revoke access from being able to update (and read) the document. + RequestorIdentity: 1, + + TargetIdentity: 2, + + CollectionID: 0, + + DocID: 0, + + Relation: "writer", + + ExpectedRecordFound: true, + }, + + // The other identity can neither update nor read the other document anymore. + testUtils.Request{ + Identity: immutable.Some(2), + + Request: ` + query { + Users { + name + age + } + } + `, + + Results: map[string]any{ + "Users": []map[string]any{}, // Can't read the document anymore + }, + }, + + testUtils.UpdateDoc{ + CollectionID: 0, + + Identity: immutable.Some(2), + + DocID: 0, + + Doc: ` + { + "name": "Shahzad Update Not Possible" + } + `, + + SkipLocalUpdateEvent: true, + }, + + // Ensure document was not accidentally updated using owner identity. + testUtils.Request{ + Identity: immutable.Some(1), + + Request: ` + query { + Users { + name + age + } + } + `, + + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "Shahzad Lone", + "age": int64(28), + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/utils.go b/tests/integration/utils.go index eb0128ab00..6aac10e5e4 100644 --- a/tests/integration/utils.go +++ b/tests/integration/utils.go @@ -343,6 +343,9 @@ func performAction( case AddDocActorRelationship: addDocActorRelationshipACP(s, action) + case DeleteDocActorRelationship: + deleteDocActorRelationshipACP(s, action) + case CreateDoc: createDoc(s, action) From 4c708b3808003ccc20bb9bee5e8d6e82df672834 Mon Sep 17 00:00:00 2001 From: Shahzad Lone Date: Mon, 7 Oct 2024 04:10:32 -0400 Subject: [PATCH 54/71] chore(i): Bump linter (`golangci-lint`) to v1.61 (#3105) ## Relevant issue(s) Resolves #3104 ## Description - Bump linter - Resolve deprecated stuff - Resolve new linter warning improvements --- .github/workflows/lint.yml | 2 +- Makefile | 2 +- cmd/genopenapi/main.go | 5 ++++- http/client_collection.go | 3 +-- http/handler_collection.go | 5 ++++- http/handler_store.go | 5 ++++- tests/clients/cli/wrapper_collection.go | 3 +-- tools/configs/golangci.yaml | 10 ++++------ 8 files changed, 20 insertions(+), 15 deletions(-) diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 20f9128f59..111586ca38 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -50,7 +50,7 @@ jobs: # Required: the version of golangci-lint is required. # Note: The version should not pick the patch version as the latest patch # version is what will always be used. - version: v1.54 + version: v1.61 # Optional: working directory, useful for monorepos or if we wanted to run this # on a non-root directory. diff --git a/Makefile b/Makefile index b5fb0ea43a..2010e3e8c7 100644 --- a/Makefile +++ b/Makefile @@ -124,7 +124,7 @@ client\:add-schema: .PHONY: deps\:lint-go deps\:lint-go: - go install github.com/golangci/golangci-lint/cmd/golangci-lint@v1.54 + go install github.com/golangci/golangci-lint/cmd/golangci-lint@v1.61 .PHONY: deps\:lint-yaml deps\:lint-yaml: diff --git a/cmd/genopenapi/main.go b/cmd/genopenapi/main.go index ed655eb932..5a28dcab41 100644 --- a/cmd/genopenapi/main.go +++ b/cmd/genopenapi/main.go @@ -29,5 +29,8 @@ func main() { if err != nil { panic(err) } - fmt.Fprint(os.Stdout, string(json)) + _, err = fmt.Fprint(os.Stdout, string(json)) + if err != nil { + panic(err) + } } diff --git a/http/client_collection.go b/http/client_collection.go index 54167de222..3abfa61002 100644 --- a/http/client_collection.go +++ b/http/client_collection.go @@ -15,7 +15,6 @@ import ( "context" "encoding/json" "errors" - "fmt" "net/http" "net/url" "strings" @@ -369,7 +368,7 @@ func (c *Collection) GetAllDocIDs( ID: docID, } if res.Error != "" { - docIDResult.Err = fmt.Errorf(res.Error) + docIDResult.Err = errors.New(res.Error) } docIDCh <- docIDResult } diff --git a/http/handler_collection.go b/http/handler_collection.go index 57202a4b9f..8f45a7948f 100644 --- a/http/handler_collection.go +++ b/http/handler_collection.go @@ -243,7 +243,10 @@ func (s *collectionHandler) GetAllDocIDs(rw http.ResponseWriter, req *http.Reque if err != nil { return } - fmt.Fprintf(rw, "data: %s\n\n", data) + _, err = fmt.Fprintf(rw, "data: %s\n\n", data) + if err != nil { + return + } flusher.Flush() } } diff --git a/http/handler_store.go b/http/handler_store.go index 2f98cda0ff..e08f2aa9cf 100644 --- a/http/handler_store.go +++ b/http/handler_store.go @@ -333,7 +333,10 @@ func (s *storeHandler) ExecRequest(rw http.ResponseWriter, req *http.Request) { if err != nil { return } - fmt.Fprintf(rw, "data: %s\n\n", data) + _, err = fmt.Fprintf(rw, "data: %s\n\n", data) + if err != nil { + return + } flusher.Flush() } } diff --git a/tests/clients/cli/wrapper_collection.go b/tests/clients/cli/wrapper_collection.go index d03c23532f..cfa49b9e8e 100644 --- a/tests/clients/cli/wrapper_collection.go +++ b/tests/clients/cli/wrapper_collection.go @@ -13,7 +13,6 @@ package cli import ( "context" "encoding/json" - "fmt" "strings" "github.com/sourcenetwork/immutable" @@ -322,7 +321,7 @@ func (c *Collection) GetAllDocIDs( ID: docID, } if res.Error != "" { - docIDResult.Err = fmt.Errorf(res.Error) + docIDResult.Err = errors.New(res.Error) } docIDCh <- docIDResult } diff --git a/tools/configs/golangci.yaml b/tools/configs/golangci.yaml index 0b92bd2d50..d8162783df 100644 --- a/tools/configs/golangci.yaml +++ b/tools/configs/golangci.yaml @@ -61,8 +61,10 @@ run: #=====================================================================================[ Output Configuration Options ] output: - # colored-line-number|line-number|json|tab|checkstyle|code-climate|junit-xml|github-actions - format: colored-line-number + formats: + # colored-line-number|line-number|json|tab|checkstyle|code-climate|junit-xml|github-actions + - format: colored-line-number + path: stdout # print lines of code with issue. print-issued-lines: true @@ -258,8 +260,6 @@ linters-settings: local-prefixes: github.com/sourcenetwork/defradb gosimple: - # Select the Go version to target. - go: "1.22" # https://staticcheck.io/docs/options#checks checks: ["all", "-S1038"] # Turn on all except (these are disabled): @@ -351,8 +351,6 @@ linters-settings: - must not end in punctuation staticcheck: - # Select the Go version to target. - go: "1.22" # https://staticcheck.io/docs/options#checks checks: ["all"] From 33e91c96e87d7e4e6a3a1676f9babd9b9a03b554 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Mon, 7 Oct 2024 10:11:52 -0700 Subject: [PATCH 55/71] feat: JSON type coercion (#3098) ## Relevant issue(s) Resolves #3097 ## Description This PR fixes JSON type coercion and makes it much simpler to filter, create, and update JSON fields. ## Tasks - [x] I made sure the code is well commented, particularly hard-to-understand areas. - [x] I made sure the repository-held documentation is changed accordingly. - [x] I made sure the pull request title adheres to the conventional commit style (the subset used in the project can be found in [tools/configs/chglog/config.yml](tools/configs/chglog/config.yml)). - [x] I made sure to discuss its limitations such as threats to validity, vulnerability to mistake and misuse, robustness to invalidation of assumptions, resource requirements, ... ## How has this been tested? Added / updated unit and integration tests. Specify the platform(s) on which this was tested: - MacOS --- client/document.go | 74 ++++- client/document_test.go | 30 +- client/errors.go | 2 +- client/normal_new.go | 2 + client/normal_scalar.go | 24 ++ client/normal_value.go | 3 + client/normal_value_test.go | 59 ++++ client/normal_void.go | 4 + .../i3097-json-type-coercion.md | 3 + go.mod | 2 +- go.sum | 4 +- internal/connor/connor.go | 73 +++-- internal/connor/eq.go | 26 +- internal/core/encoding.go | 42 +++ internal/planner/mapper/mapper.go | 5 + internal/request/graphql/schema/collection.go | 4 +- .../request/graphql/schema/types/scalars.go | 79 ++--- .../graphql/schema/types/scalars_test.go | 103 ++----- .../field_kinds/field_kind_json_test.go | 287 ++++++++++++++++-- .../mutation/create/with_variables_test.go | 36 +++ .../mutation/update/field_kinds/json_test.go | 2 +- .../simple/with_filter/with_eq_json_test.go | 125 +++++++- .../simple/with_filter/with_in_json_test.go | 25 +- .../updates/add/field/kind/json_test.go | 8 +- 24 files changed, 826 insertions(+), 196 deletions(-) create mode 100644 docs/data_format_changes/i3097-json-type-coercion.md diff --git a/client/document.go b/client/document.go index a49a60307b..cc15d45673 100644 --- a/client/document.go +++ b/client/document.go @@ -341,7 +341,7 @@ func validateFieldSchema(val any, field FieldDefinition) (NormalValue, error) { if err != nil { return nil, err } - return NewNormalString(v), nil + return NewNormalJSON(&JSON{v}), nil } return nil, NewErrUnhandledType("FieldKind", field.Kind) @@ -417,16 +417,70 @@ func getDateTime(v any) (time.Time, error) { return time.Parse(time.RFC3339, s) } -func getJSON(v any) (string, error) { - s, err := getString(v) - if err != nil { - return "", err - } - val, err := fastjson.Parse(s) - if err != nil { - return "", NewErrInvalidJSONPaylaod(s) +// getJSON converts the given value to a valid JSON value. +// +// If the value is of type *fastjson.Value it needs to be +// manually parsed. All other values are valid JSON. +func getJSON(v any) (any, error) { + val, ok := v.(*fastjson.Value) + if !ok { + return v, nil + } + switch val.Type() { + case fastjson.TypeArray: + arr, err := val.Array() + if err != nil { + return nil, err + } + out := make([]any, len(arr)) + for i, v := range arr { + c, err := getJSON(v) + if err != nil { + return nil, err + } + out[i] = c + } + return out, nil + + case fastjson.TypeObject: + obj, err := val.Object() + if err != nil { + return nil, err + } + out := make(map[string]any) + obj.Visit(func(key []byte, v *fastjson.Value) { + c, e := getJSON(v) + out[string(key)] = c + err = errors.Join(err, e) + }) + return out, err + + case fastjson.TypeFalse: + return false, nil + + case fastjson.TypeTrue: + return true, nil + + case fastjson.TypeNumber: + out, err := val.Int64() + if err == nil { + return out, nil + } + return val.Float64() + + case fastjson.TypeString: + out, err := val.StringBytes() + if err != nil { + return nil, err + } + return string(out), nil + + case fastjson.TypeNull: + return nil, nil + + default: + return nil, NewErrInvalidJSONPayload(v) } - return val.String(), nil } func getArray[T any]( diff --git a/client/document_test.go b/client/document_test.go index a11a6a67c8..b74af54b27 100644 --- a/client/document_test.go +++ b/client/document_test.go @@ -161,7 +161,16 @@ func TestNewFromJSON_WithValidJSONFieldValue_NoError(t *testing.T) { objWithJSONField := []byte(`{ "Name": "John", "Age": 26, - "Custom": "{\"tree\":\"maple\", \"age\": 260}" + "Custom": { + "string": "maple", + "int": 260, + "float": 3.14, + "false": false, + "true": true, + "null": null, + "array": ["one", 1], + "object": {"one": 1} + } }`) doc, err := NewDocFromJSON(objWithJSONField, def) if err != nil { @@ -183,7 +192,16 @@ func TestNewFromJSON_WithValidJSONFieldValue_NoError(t *testing.T) { assert.Equal(t, doc.values[doc.fields["Name"]].IsDocument(), false) assert.Equal(t, doc.values[doc.fields["Age"]].Value(), int64(26)) assert.Equal(t, doc.values[doc.fields["Age"]].IsDocument(), false) - assert.Equal(t, doc.values[doc.fields["Custom"]].Value(), "{\"tree\":\"maple\",\"age\":260}") + assert.Equal(t, doc.values[doc.fields["Custom"]].Value(), map[string]any{ + "string": "maple", + "int": int64(260), + "float": float64(3.14), + "false": false, + "true": true, + "null": nil, + "array": []any{"one", int64(1)}, + "object": map[string]any{"one": int64(1)}, + }) assert.Equal(t, doc.values[doc.fields["Custom"]].IsDocument(), false) } @@ -191,20 +209,20 @@ func TestNewFromJSON_WithInvalidJSONFieldValue_Error(t *testing.T) { objWithJSONField := []byte(`{ "Name": "John", "Age": 26, - "Custom": "{\"tree\":\"maple, \"age\": 260}" + "Custom": {"tree":"maple, "age": 260} }`) _, err := NewDocFromJSON(objWithJSONField, def) - require.ErrorContains(t, err, "invalid JSON payload. Payload: {\"tree\":\"maple, \"age\": 260}") + require.ErrorContains(t, err, "cannot parse JSON") } -func TestNewFromJSON_WithInvalidJSONFieldValueSimpleString_Error(t *testing.T) { +func TestNewFromJSON_WithJSONFieldValueSimpleString_Succeed(t *testing.T) { objWithJSONField := []byte(`{ "Name": "John", "Age": 26, "Custom": "blah" }`) _, err := NewDocFromJSON(objWithJSONField, def) - require.ErrorContains(t, err, "invalid JSON payload. Payload: blah") + require.NoError(t, err) } func TestIsJSONArray(t *testing.T) { diff --git a/client/errors.go b/client/errors.go index ceb526b35e..81ebf2e3f5 100644 --- a/client/errors.go +++ b/client/errors.go @@ -166,7 +166,7 @@ func NewErrCRDTKindMismatch(cType, kind string) error { return errors.New(fmt.Sprintf(errCRDTKindMismatch, cType, kind)) } -func NewErrInvalidJSONPaylaod(payload string) error { +func NewErrInvalidJSONPayload(payload any) error { return errors.New(errInvalidJSONPayload, errors.NewKV("Payload", payload)) } diff --git a/client/normal_new.go b/client/normal_new.go index 55ac46ce73..bcd0f00929 100644 --- a/client/normal_new.go +++ b/client/normal_new.go @@ -64,6 +64,8 @@ func NewNormalValue(val any) (NormalValue, error) { return NewNormalTime(v), nil case *Document: return NewNormalDocument(v), nil + case *JSON: + return NewNormalJSON(v), nil case immutable.Option[bool]: return NewNormalNillableBool(v), nil diff --git a/client/normal_scalar.go b/client/normal_scalar.go index f30eca78d7..ae92fbe3a6 100644 --- a/client/normal_scalar.go +++ b/client/normal_scalar.go @@ -17,6 +17,13 @@ import ( "golang.org/x/exp/constraints" ) +// JSON contains a valid JSON value. +// +// The inner type can be any valid normal value or normal value array. +type JSON struct { + inner any +} + // NormalValue is dummy implementation of NormalValue to be embedded in other types. type baseNormalValue[T any] struct { NormalVoid @@ -118,6 +125,18 @@ func (v normalDocument) Document() (*Document, bool) { return v.val, true } +type normalJSON struct { + baseNormalValue[*JSON] +} + +func (v normalJSON) JSON() (*JSON, bool) { + return v.val, true +} + +func (v normalJSON) Unwrap() any { + return v.val.inner +} + func newNormalInt(val int64) NormalValue { return normalInt{newBaseNormalValue(val)} } @@ -161,6 +180,11 @@ func NewNormalDocument(val *Document) NormalValue { return normalDocument{baseNormalValue[*Document]{val: val}} } +// NewNormalJSON creates a new NormalValue that represents a `JSON` value. +func NewNormalJSON(val *JSON) NormalValue { + return normalJSON{baseNormalValue[*JSON]{val: val}} +} + func areNormalScalarsEqual[T comparable](val T, f func() (T, bool)) bool { if otherVal, ok := f(); ok { return val == otherVal diff --git a/client/normal_value.go b/client/normal_value.go index 18bdd74ff0..081814ffe2 100644 --- a/client/normal_value.go +++ b/client/normal_value.go @@ -62,6 +62,9 @@ type NormalValue interface { // Document returns the value as a [*Document]. The second return flag is true if the value is a [*Document]. // Otherwise it will return nil and false. Document() (*Document, bool) + // JSON returns the value as JSON. The second return flag is true if the value is JSON. + // Otherwise it will return nil and false. + JSON() (*JSON, bool) // NillableBool returns the value as a nillable bool. // The second return flag is true if the value is [immutable.Option[bool]]. diff --git a/client/normal_value_test.go b/client/normal_value_test.go index c368a300e3..bcea59e046 100644 --- a/client/normal_value_test.go +++ b/client/normal_value_test.go @@ -30,6 +30,7 @@ const ( BytesType nType = "Bytes" TimeType nType = "Time" DocumentType nType = "Document" + JSONType nType = "JSON" NillableBoolType nType = "NillableBool" NillableIntType nType = "NillableInt" @@ -76,6 +77,11 @@ const ( // If it is and contains a value, it returns the contained value. // Otherwise, it returns the input itself. func extractValue(input any) any { + // unwrap JSON inner values + if v, ok := input.(*JSON); ok { + return v.inner + } + inputVal := reflect.ValueOf(input) // Check if the type is Option[T] by seeing if it has the HasValue and Value methods. @@ -112,6 +118,7 @@ func TestNormalValue_NewValueAndTypeAssertion(t *testing.T) { BytesType: func(v NormalValue) (any, bool) { return v.Bytes() }, TimeType: func(v NormalValue) (any, bool) { return v.Time() }, DocumentType: func(v NormalValue) (any, bool) { return v.Document() }, + JSONType: func(v NormalValue) (any, bool) { return v.JSON() }, NillableBoolType: func(v NormalValue) (any, bool) { return v.NillableBool() }, NillableIntType: func(v NormalValue) (any, bool) { return v.NillableInt() }, @@ -164,6 +171,7 @@ func TestNormalValue_NewValueAndTypeAssertion(t *testing.T) { BytesType: func(v any) NormalValue { return NewNormalBytes(v.([]byte)) }, TimeType: func(v any) NormalValue { return NewNormalTime(v.(time.Time)) }, DocumentType: func(v any) NormalValue { return NewNormalDocument(v.(*Document)) }, + JSONType: func(v any) NormalValue { return NewNormalJSON(v.(*JSON)) }, NillableBoolType: func(v any) NormalValue { return NewNormalNillableBool(v.(immutable.Option[bool])) }, NillableIntType: func(v any) NormalValue { return NewNormalNillableInt(v.(immutable.Option[int64])) }, @@ -283,6 +291,10 @@ func TestNormalValue_NewValueAndTypeAssertion(t *testing.T) { nType: DocumentType, input: &Document{}, }, + { + nType: JSONType, + input: &JSON{nil}, + }, { nType: NillableBoolType, input: immutable.Some(true), @@ -830,6 +842,53 @@ func TestNormalValue_NewNormalValueFromAnyArray(t *testing.T) { } } +func TestNormalValue_NewNormalJSON(t *testing.T) { + var expect *JSON + var actual *JSON + + expect = &JSON{nil} + normal := NewNormalJSON(expect) + + actual, _ = normal.JSON() + assert.Equal(t, expect, actual) + + expect = &JSON{"hello"} + normal = NewNormalJSON(expect) + + actual, _ = normal.JSON() + assert.Equal(t, expect, actual) + + expect = &JSON{true} + normal = NewNormalJSON(expect) + + actual, _ = normal.JSON() + assert.Equal(t, expect, actual) + + expect = &JSON{int64(10)} + normal = NewNormalJSON(expect) + + actual, _ = normal.JSON() + assert.Equal(t, expect, actual) + + expect = &JSON{float64(3.14)} + normal = NewNormalJSON(expect) + + actual, _ = normal.JSON() + assert.Equal(t, expect, actual) + + expect = &JSON{map[string]any{"one": 1}} + normal = NewNormalJSON(expect) + + actual, _ = normal.JSON() + assert.Equal(t, expect, actual) + + expect = &JSON{[]any{1, "two"}} + normal = NewNormalJSON(expect) + + actual, _ = normal.JSON() + assert.Equal(t, expect, actual) +} + func TestNormalValue_NewNormalInt(t *testing.T) { i64 := int64(2) v := NewNormalInt(i64) diff --git a/client/normal_void.go b/client/normal_void.go index 3e13fe489d..3238a25ad2 100644 --- a/client/normal_void.go +++ b/client/normal_void.go @@ -65,6 +65,10 @@ func (NormalVoid) Document() (*Document, bool) { return nil, false } +func (NormalVoid) JSON() (*JSON, bool) { + return nil, false +} + func (NormalVoid) NillableBool() (immutable.Option[bool], bool) { return immutable.None[bool](), false } diff --git a/docs/data_format_changes/i3097-json-type-coercion.md b/docs/data_format_changes/i3097-json-type-coercion.md new file mode 100644 index 0000000000..f1978c4e57 --- /dev/null +++ b/docs/data_format_changes/i3097-json-type-coercion.md @@ -0,0 +1,3 @@ +# JSON type coercion + +JSON types are now stored as parsed values instead of strings. \ No newline at end of file diff --git a/go.mod b/go.mod index 43acdd72ca..708e2889c8 100644 --- a/go.mod +++ b/go.mod @@ -48,7 +48,7 @@ require ( github.com/sourcenetwork/badger/v4 v4.2.1-0.20231113215945-a63444ca5276 github.com/sourcenetwork/corelog v0.0.8 github.com/sourcenetwork/go-libp2p-pubsub-rpc v0.0.14 - github.com/sourcenetwork/graphql-go v0.7.10-0.20240924172903-a4088313b20d + github.com/sourcenetwork/graphql-go v0.7.10-0.20241003221550-224346887b4a github.com/sourcenetwork/immutable v0.3.0 github.com/sourcenetwork/sourcehub v0.2.1-0.20240704194128-f43f5e427274 github.com/spf13/cobra v1.8.1 diff --git a/go.sum b/go.sum index 83aa313637..6151cbdb1e 100644 --- a/go.sum +++ b/go.sum @@ -1398,8 +1398,8 @@ github.com/sourcenetwork/corelog v0.0.8 h1:jCo0mFBpWrfhUCGzzN3uUtPGyQv3jnITdPO1s github.com/sourcenetwork/corelog v0.0.8/go.mod h1:cMabHgs3kARgYTQeQYSOmaGGP8XMU6sZrHd8LFrL3zA= github.com/sourcenetwork/go-libp2p-pubsub-rpc v0.0.14 h1:620zKV4rOn7U5j/WsPkk4SFj0z9/pVV4bBx0BpZQgro= github.com/sourcenetwork/go-libp2p-pubsub-rpc v0.0.14/go.mod h1:jUoQv592uUX1u7QBjAY4C+l24X9ArhPfifOqXpDHz4U= -github.com/sourcenetwork/graphql-go v0.7.10-0.20240924172903-a4088313b20d h1:P5y4g1ONf8HK36L86/8zDYjY7rRLM7AaqlQDRHOBMH8= -github.com/sourcenetwork/graphql-go v0.7.10-0.20240924172903-a4088313b20d/go.mod h1:rkahXkgRH/3vZErN1Bx+qt1+w+CV5fgaJyKKWgISe4U= +github.com/sourcenetwork/graphql-go v0.7.10-0.20241003221550-224346887b4a h1:wF7VhX0XKuNqhbbDI7RjoIBKFvdKQKucpZlNr/BGE40= +github.com/sourcenetwork/graphql-go v0.7.10-0.20241003221550-224346887b4a/go.mod h1:rkahXkgRH/3vZErN1Bx+qt1+w+CV5fgaJyKKWgISe4U= github.com/sourcenetwork/immutable v0.3.0 h1:gHPtGvLrTBTK5YpDAhMU+u+S8v1F6iYmc3nbZLryMdc= github.com/sourcenetwork/immutable v0.3.0/go.mod h1:GD7ceuh/HD7z6cdIwzKK2ctzgZ1qqYFJpsFp+8qYnbI= github.com/sourcenetwork/raccoondb v0.2.1-0.20240606193653-1e91e9be9234 h1:8dA9bVC1A0ChJygtsUfNsek3oR0GnwpLoYpmEo4t2mk= diff --git a/internal/connor/connor.go b/internal/connor/connor.go index 086ba0cd49..a7a8290dbe 100644 --- a/internal/connor/connor.go +++ b/internal/connor/connor.go @@ -5,6 +5,43 @@ It is derived from https://github.com/SierraSoftworks/connor. */ package connor +const ( + AndOp = "_and" + OrOp = "_or" + NotOp = "_not" + + AnyOp = "_any" + AllOp = "_all" + NoneOp = "_none" + + EqualOp = "_eq" + GreaterOrEqualOp = "_ge" + GreaterOp = "_gt" + InOp = "_in" + LesserOrEqualOp = "_le" + LesserOp = "_lt" + NotEqualOp = "_ne" + NotInOp = "_nin" + LikeOp = "_like" + NotLikeOp = "_nlike" + CaseInsensitiveLikeOp = "_ilike" + CaseInsensitiveNotLikeOp = "_nilike" +) + +// IsOpSimple returns true if the given operator is simple (not compound). +// +// This is useful for checking if a filter operator requires further expansion. +func IsOpSimple(op string) bool { + switch op { + case EqualOp, GreaterOrEqualOp, GreaterOp, InOp, + LesserOrEqualOp, LesserOp, NotEqualOp, NotInOp, + LikeOp, NotLikeOp, CaseInsensitiveLikeOp, CaseInsensitiveNotLikeOp: + return true + default: + return false + } +} + // Match is the default method used in Connor to match some data to a // set of conditions. func Match(conditions map[FilterKey]any, data any) (bool, error) { @@ -16,41 +53,41 @@ func Match(conditions map[FilterKey]any, data any) (bool, error) { // if you wish to override the behavior of another operator. func matchWith(op string, conditions, data any) (bool, error) { switch op { - case "_and": + case AndOp: return and(conditions, data) - case "_any": + case AnyOp: return anyOp(conditions, data) - case "_all": + case AllOp: return all(conditions, data) - case "_eq": + case EqualOp: return eq(conditions, data) - case "_ge": + case GreaterOrEqualOp: return ge(conditions, data) - case "_gt": + case GreaterOp: return gt(conditions, data) - case "_in": + case InOp: return in(conditions, data) - case "_le": + case LesserOrEqualOp: return le(conditions, data) - case "_lt": + case LesserOp: return lt(conditions, data) - case "_ne": + case NotEqualOp: return ne(conditions, data) - case "_nin": + case NotInOp: return nin(conditions, data) - case "_or": + case OrOp: return or(conditions, data) - case "_like": + case LikeOp: return like(conditions, data) - case "_nlike": + case NotLikeOp: return nlike(conditions, data) - case "_ilike": + case CaseInsensitiveLikeOp: return ilike(conditions, data) - case "_nilike": + case CaseInsensitiveNotLikeOp: return nilike(conditions, data) - case "_none": + case NoneOp: return none(conditions, data) - case "_not": + case NotOp: return not(conditions, data) default: return false, NewErrUnknownOperator(op) diff --git a/internal/connor/eq.go b/internal/connor/eq.go index 3f849348b8..6b9f56293a 100644 --- a/internal/connor/eq.go +++ b/internal/connor/eq.go @@ -34,7 +34,7 @@ func eq(condition, data any) (bool, error) { switch cn := condition.(type) { case map[FilterKey]any: for prop, cond := range cn { - m, err := matchWith(prop.GetOperatorOrDefault("_eq"), cond, prop.GetProp(data)) + m, err := matchWith(prop.GetOperatorOrDefault(EqualOp), cond, prop.GetProp(data)) if err != nil { return false, err } else if !m { @@ -43,6 +43,9 @@ func eq(condition, data any) (bool, error) { } return true, nil + case map[string]any: + return objectsEqual(cn, data) + case string: if d, ok := data.(string); ok { return d == cn, nil @@ -66,6 +69,27 @@ func eq(condition, data any) (bool, error) { } } +// objectsEqual returns true if the given condition and data +// contain equal key value pairs. +func objectsEqual(condition map[string]any, data any) (bool, error) { + if data == nil { + return condition == nil, nil + } + d := data.(map[string]any) + if len(d) != len(condition) { + return false, nil + } + for k, v := range d { + m, err := eq(condition[k], v) + if err != nil { + return false, err + } else if !m { + return false, nil + } + } + return true, nil +} + func immutableValueOrNil[T any](data immutable.Option[T]) any { if data.HasValue() { return data.Value() diff --git a/internal/core/encoding.go b/internal/core/encoding.go index 6e2e0e0dcb..8c7930d6b9 100644 --- a/internal/core/encoding.go +++ b/internal/core/encoding.go @@ -94,6 +94,9 @@ func NormalizeFieldValue(fieldDesc client.FieldDefinition, val any) (any, error) if err != nil { return nil, err } + + case client.FieldKind_NILLABLE_JSON: + return convertToJSON(fieldDesc.Name, val) } } else { // CBOR often encodes values typed as floats as ints switch fieldDesc.Kind { @@ -136,6 +139,8 @@ func NormalizeFieldValue(fieldDesc client.FieldDefinition, val any) (any, error) case []byte: return string(v), nil } + case client.FieldKind_NILLABLE_JSON: + return convertToJSON(fieldDesc.Name, val) } } @@ -191,6 +196,43 @@ func convertToInt(propertyName string, untypedValue any) (int64, error) { } } +// convertToJSON converts the given value to a valid JSON value. +// +// When maps are decoded, they are of type map[any]any, and need to +// be converted to map[string]any. All other values are valid JSON. +func convertToJSON(propertyName string, untypedValue any) (any, error) { + switch t := untypedValue.(type) { + case map[any]any: + resultValue := make(map[string]any) + for k, v := range t { + key, ok := k.(string) + if !ok { + return nil, client.NewErrUnexpectedType[string](propertyName, k) + } + val, err := convertToJSON(fmt.Sprintf("%s.%s", propertyName, key), v) + if err != nil { + return nil, err + } + resultValue[key] = val + } + return resultValue, nil + + case []any: + resultValue := make([]any, len(t)) + for i, v := range t { + val, err := convertToJSON(fmt.Sprintf("%s[%d]", propertyName, i), v) + if err != nil { + return nil, err + } + resultValue[i] = val + } + return resultValue, nil + + default: + return untypedValue, nil + } +} + // DecodeIndexDataStoreKey decodes a IndexDataStoreKey from bytes. // It expects the input bytes is in the following format: // diff --git a/internal/planner/mapper/mapper.go b/internal/planner/mapper/mapper.go index dfadd2f06c..da8390e293 100644 --- a/internal/planner/mapper/mapper.go +++ b/internal/planner/mapper/mapper.go @@ -1309,6 +1309,11 @@ func toFilterMap( key := &Operator{ Operation: sourceKey, } + // if the operator is simple (not compound) then + // it does not require further expansion + if connor.IsOpSimple(sourceKey) { + return key, sourceClause + } switch typedClause := sourceClause.(type) { case []any: // If the clause is an array then we need to convert any inner maps. diff --git a/internal/request/graphql/schema/collection.go b/internal/request/graphql/schema/collection.go index 5c196f1f0f..81d5182366 100644 --- a/internal/request/graphql/schema/collection.go +++ b/internal/request/graphql/schema/collection.go @@ -434,9 +434,9 @@ func defaultFromAST( } switch t := arg.Value.(type) { case *ast.IntValue: - value = gql.Int.ParseLiteral(arg.Value) + value = gql.Int.ParseLiteral(arg.Value, nil) case *ast.FloatValue: - value = gql.Float.ParseLiteral(arg.Value) + value = gql.Float.ParseLiteral(arg.Value, nil) case *ast.BooleanValue: value = t.Value case *ast.StringValue: diff --git a/internal/request/graphql/schema/types/scalars.go b/internal/request/graphql/schema/types/scalars.go index b86c744607..20fd3d13b6 100644 --- a/internal/request/graphql/schema/types/scalars.go +++ b/internal/request/graphql/schema/types/scalars.go @@ -16,7 +16,6 @@ import ( "github.com/sourcenetwork/graphql-go" "github.com/sourcenetwork/graphql-go/language/ast" - "github.com/valyala/fastjson" ) // BlobPattern is a regex for validating blob hex strings @@ -55,7 +54,7 @@ func BlobScalarType() *graphql.Scalar { // ParseValue converts the value to a hex string ParseValue: coerceBlob, // ParseLiteral converts the ast value to a hex string - ParseLiteral: func(valueAST ast.Value) any { + ParseLiteral: func(valueAST ast.Value, variables map[string]any) any { switch valueAST := valueAST.(type) { case *ast.StringValue: return coerceBlob(valueAST.Value) @@ -67,33 +66,39 @@ func BlobScalarType() *graphql.Scalar { }) } -// coerceJSON converts the given value into a valid json string. -// If the value cannot be converted nil is returned. -func coerceJSON(value any) any { - switch value := value.(type) { - case []byte: - err := fastjson.ValidateBytes(value) - if err != nil { - // ignore this error because the value - // cannot be converted to a json string - return nil +func parseJSON(valueAST ast.Value, variables map[string]any) any { + switch valueAST := valueAST.(type) { + case *ast.ObjectValue: + out := make(map[string]any) + for _, f := range valueAST.Fields { + out[f.Name.Value] = parseJSON(f.Value, variables) } - return string(value) - - case *[]byte: - return coerceJSON(*value) + return out - case string: - err := fastjson.Validate(value) - if err != nil { - // ignore this error because the value - // cannot be converted to a json string - return nil + case *ast.ListValue: + out := make([]any, len(valueAST.Values)) + for i, v := range valueAST.Values { + out[i] = parseJSON(v, variables) } - return value + return out - case *string: - return coerceJSON(*value) + case *ast.BooleanValue: + return graphql.Boolean.ParseLiteral(valueAST, variables) + + case *ast.FloatValue: + return graphql.Float.ParseLiteral(valueAST, variables) + + case *ast.IntValue: + return graphql.Int.ParseLiteral(valueAST, variables) + + case *ast.StringValue: + return graphql.String.ParseLiteral(valueAST, variables) + + case *ast.EnumValue: + return valueAST.Value + + case *ast.Variable: + return variables[valueAST.Name.Value] default: return nil @@ -103,20 +108,16 @@ func coerceJSON(value any) any { func JSONScalarType() *graphql.Scalar { return graphql.NewScalar(graphql.ScalarConfig{ Name: "JSON", - Description: "The `JSON` scalar type represents a JSON string.", - // Serialize converts the value to a json string - Serialize: coerceJSON, - // ParseValue converts the value to a json string - ParseValue: coerceJSON, - // ParseLiteral converts the ast value to a json string - ParseLiteral: func(valueAST ast.Value) any { - switch valueAST := valueAST.(type) { - case *ast.StringValue: - return coerceJSON(valueAST.Value) - default: - // return nil if the value cannot be parsed - return nil - } + Description: "The `JSON` scalar type represents a JSON value.", + // Serialize converts the value to json value + Serialize: func(value any) any { + return value + }, + // ParseValue converts the value to json value + ParseValue: func(value any) any { + return value }, + // ParseLiteral converts the ast value to a json value + ParseLiteral: parseJSON, }) } diff --git a/internal/request/graphql/schema/types/scalars_test.go b/internal/request/graphql/schema/types/scalars_test.go index fba94ce67b..a0b977aee0 100644 --- a/internal/request/graphql/schema/types/scalars_test.go +++ b/internal/request/graphql/schema/types/scalars_test.go @@ -82,90 +82,49 @@ func TestBlobScalarTypeParseLiteral(t *testing.T) { {&ast.ObjectValue{}, nil}, } for _, c := range cases { - result := BlobScalarType().ParseLiteral(c.input) + result := BlobScalarType().ParseLiteral(c.input, nil) assert.Equal(t, c.expect, result) } } -func TestJSONScalarTypeParseAndSerialize(t *testing.T) { - validString := `"hello"` - validBytes := []byte(`"hello"`) - - boolString := "true" - boolBytes := []byte("true") - - intString := "0" - intBytes := []byte("0") - - floatString := "3.14" - floatBytes := []byte("3.14") - - objectString := `{"name": "Bob"}` - objectBytes := []byte(`{"name": "Bob"}`) - - invalidString := "invalid" - invalidBytes := []byte("invalid") - - cases := []struct { - input any - expect any - }{ - {validString, `"hello"`}, - {&validString, `"hello"`}, - {validBytes, `"hello"`}, - {&validBytes, `"hello"`}, - {boolString, "true"}, - {&boolString, "true"}, - {boolBytes, "true"}, - {&boolBytes, "true"}, - {[]byte("true"), "true"}, - {[]byte("false"), "false"}, - {intString, "0"}, - {&intString, "0"}, - {intBytes, "0"}, - {&intBytes, "0"}, - {floatString, "3.14"}, - {&floatString, "3.14"}, - {floatBytes, "3.14"}, - {&floatBytes, "3.14"}, - {invalidString, nil}, - {&invalidString, nil}, - {invalidBytes, nil}, - {&invalidBytes, nil}, - {objectString, `{"name": "Bob"}`}, - {&objectString, `{"name": "Bob"}`}, - {objectBytes, `{"name": "Bob"}`}, - {&objectBytes, `{"name": "Bob"}`}, - {nil, nil}, - {0, nil}, - {false, nil}, - } - for _, c := range cases { - parsed := JSONScalarType().ParseValue(c.input) - assert.Equal(t, c.expect, parsed) - - serialized := JSONScalarType().Serialize(c.input) - assert.Equal(t, c.expect, serialized) - } -} - func TestJSONScalarTypeParseLiteral(t *testing.T) { cases := []struct { input ast.Value expect any }{ - {&ast.StringValue{Value: "0"}, "0"}, - {&ast.StringValue{Value: "invalid"}, nil}, - {&ast.IntValue{}, nil}, - {&ast.BooleanValue{}, nil}, + {&ast.StringValue{Value: "hello"}, "hello"}, + {&ast.IntValue{Value: "10"}, int32(10)}, + {&ast.BooleanValue{Value: true}, true}, {&ast.NullValue{}, nil}, - {&ast.EnumValue{}, nil}, - {&ast.FloatValue{}, nil}, - {&ast.ListValue{}, nil}, - {&ast.ObjectValue{}, nil}, + {&ast.EnumValue{Value: "DESC"}, "DESC"}, + {&ast.Variable{Name: &ast.Name{Value: "message"}}, "hello"}, + {&ast.Variable{Name: &ast.Name{Value: "invalid"}}, nil}, + {&ast.FloatValue{Value: "3.14"}, 3.14}, + {&ast.ListValue{Values: []ast.Value{ + &ast.StringValue{Value: "hello"}, + &ast.IntValue{Value: "10"}, + }}, []any{"hello", int32(10)}}, + {&ast.ObjectValue{ + Fields: []*ast.ObjectField{ + { + Name: &ast.Name{Value: "int"}, + Value: &ast.IntValue{Value: "10"}, + }, + { + Name: &ast.Name{Value: "string"}, + Value: &ast.StringValue{Value: "hello"}, + }, + }, + }, map[string]any{ + "int": int32(10), + "string": "hello", + }}, + } + variables := map[string]any{ + "message": "hello", } for _, c := range cases { - result := JSONScalarType().ParseLiteral(c.input) + result := JSONScalarType().ParseLiteral(c.input, variables) assert.Equal(t, c.expect, result) } } diff --git a/tests/integration/mutation/create/field_kinds/field_kind_json_test.go b/tests/integration/mutation/create/field_kinds/field_kind_json_test.go index cc97bd162f..b578bf3928 100644 --- a/tests/integration/mutation/create/field_kinds/field_kind_json_test.go +++ b/tests/integration/mutation/create/field_kinds/field_kind_json_test.go @@ -14,11 +14,13 @@ import ( "testing" testUtils "github.com/sourcenetwork/defradb/tests/integration" + + "github.com/sourcenetwork/immutable" ) -func TestMutationCreate_WithJSONFieldGivenValidJSON_NoError(t *testing.T) { +func TestMutationCreate_WithJSONFieldGivenObjectValue_Succeeds(t *testing.T) { test := testUtils.TestCase{ - Description: "Create mutation with JSON field given a valid JSON string.", + Description: "Create mutation with JSON field given an object value.", Actions: []any{ testUtils.SchemaUpdate{ Schema: ` @@ -28,19 +30,70 @@ func TestMutationCreate_WithJSONFieldGivenValidJSON_NoError(t *testing.T) { } `, }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "custom": {"tree": "maple", "age": 250} + }`, + }, testUtils.Request{ - Request: `mutation { - create_Users(input: {name: "John", custom: "{\"tree\": \"maple\", \"age\": 250}"}) { + Request: `query { + Users { _docID name custom } }`, Results: map[string]any{ - "create_Users": []map[string]any{ + "Users": []map[string]any{ { - "_docID": "bae-84ae4ef8-ca0c-5f32-bc85-cee97e731bc0", - "custom": "{\"tree\":\"maple\",\"age\":250}", + "_docID": "bae-a948a3b2-3e89-5654-b0f0-71685a66b4d7", + "custom": map[string]any{ + "tree": "maple", + "age": uint64(250), + }, + "name": "John", + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestMutationCreate_WithJSONFieldGivenListOfScalarsValue_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Create mutation with JSON field given a list of scalars value.", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + custom: JSON + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "custom": ["maple", 250] + }`, + }, + testUtils.Request{ + Request: `query { + Users { + _docID + name + custom + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "_docID": "bae-90fd8b1b-bd11-56b5-a78c-2fb6f7b4dca0", + "custom": []any{"maple", uint64(250)}, "name": "John", }, }, @@ -52,9 +105,9 @@ func TestMutationCreate_WithJSONFieldGivenValidJSON_NoError(t *testing.T) { testUtils.ExecuteTestCase(t, test) } -func TestMutationCreate_WithJSONFieldGivenInvalidJSON_Error(t *testing.T) { +func TestMutationCreate_WithJSONFieldGivenListOfObjectsValue_Succeeds(t *testing.T) { test := testUtils.TestCase{ - Description: "Create mutation with JSON field given a valid JSON string.", + Description: "Create mutation with JSON field given a list of objects value.", Actions: []any{ testUtils.SchemaUpdate{ Schema: ` @@ -64,16 +117,35 @@ func TestMutationCreate_WithJSONFieldGivenInvalidJSON_Error(t *testing.T) { } `, }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "custom": [ + {"tree": "maple"}, + {"tree": "oak"} + ] + }`, + }, testUtils.Request{ - Request: `mutation { - create_Users(input: {name: "John", custom: "{\"tree\": \"maple, \"age\": 250}"}) { + Request: `query { + Users { _docID name custom } }`, - ExpectedError: `Argument "input" has invalid value {name: "John", custom: "{\"tree\": \"maple, \"age\": 250}"}. -In field "custom": Expected type "JSON", found "{\"tree\": \"maple, \"age\": 250}".`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "_docID": "bae-dd7c12f5-a7c5-55c6-8b35-ece853ae7f9e", + "custom": []any{ + map[string]any{"tree": "maple"}, + map[string]any{"tree": "oak"}, + }, + "name": "John", + }, + }, + }, }, }, } @@ -81,9 +153,9 @@ In field "custom": Expected type "JSON", found "{\"tree\": \"maple, \"age\": 250 testUtils.ExecuteTestCase(t, test) } -func TestMutationCreate_WithJSONFieldGivenSimpleString_Error(t *testing.T) { +func TestMutationCreate_WithJSONFieldGivenIntValue_Succeeds(t *testing.T) { test := testUtils.TestCase{ - Description: "Create mutation with JSON field given a valid JSON string.", + Description: "Create mutation with JSON field given a int value.", Actions: []any{ testUtils.SchemaUpdate{ Schema: ` @@ -93,16 +165,193 @@ func TestMutationCreate_WithJSONFieldGivenSimpleString_Error(t *testing.T) { } `, }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "custom": 250 + }`, + }, + testUtils.Request{ + Request: `query { + Users { + _docID + name + custom + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "_docID": "bae-59731737-8793-5794-a9a5-0ed0ad696d5c", + "custom": uint64(250), + "name": "John", + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestMutationCreate_WithJSONFieldGivenStringValue_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Create mutation with JSON field given a string value.", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + custom: JSON + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "custom": "hello" + }`, + }, testUtils.Request{ - Request: `mutation { - create_Users(input: {name: "John", custom: "blah"}) { + Request: `query { + Users { _docID name custom } }`, - ExpectedError: `Argument "input" has invalid value {name: "John", custom: "blah"}. -In field "custom": Expected type "JSON", found "blah".`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "_docID": "bae-608582c3-979e-5f34-80f8-a70fce875d05", + "custom": "hello", + "name": "John", + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestMutationCreate_WithJSONFieldGivenBooleanValue_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Create mutation with JSON field given a boolean value.", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + custom: JSON + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "custom": true + }`, + }, + testUtils.Request{ + Request: `query { + Users { + _docID + name + custom + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "_docID": "bae-0c4b39cf-433c-5a9c-9bed-1e2796c35d14", + "custom": true, + "name": "John", + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestMutationCreate_WithJSONFieldGivenNullValue_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Create mutation with JSON field given a null value.", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + custom: JSON + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "custom": null + }`, + }, + testUtils.Request{ + Request: `query { + Users { + _docID + name + custom + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "_docID": "bae-f405f600-56d9-5de4-8d02-75fdced35e3b", + "custom": nil, + "name": "John", + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +// This test confirms that our JSON value encoding is determinstic. +func TestMutationCreate_WithDuplicateJSONField_ReturnsError(t *testing.T) { + test := testUtils.TestCase{ + Description: "Create mutation with duplicate JSON field errors.", + SupportedMutationTypes: immutable.Some([]testUtils.MutationType{ + // Save will not produce an error on duplicate + // because it will just update the previous doc + testUtils.GQLRequestMutationType, + testUtils.CollectionNamedMutationType, + }), + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + custom: JSON + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "custom": {"one": 1, "two": 2, "three": [0, 1, 2]} + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "custom": {"three": [0, 1, 2], "two": 2, "one": 1} + }`, + ExpectedError: `a document with the given ID already exists`, }, }, } diff --git a/tests/integration/mutation/create/with_variables_test.go b/tests/integration/mutation/create/with_variables_test.go index 5bd4814b16..84b75f6407 100644 --- a/tests/integration/mutation/create/with_variables_test.go +++ b/tests/integration/mutation/create/with_variables_test.go @@ -84,3 +84,39 @@ func TestMutationCreateWithDefaultVariable(t *testing.T) { testUtils.ExecuteTestCase(t, test) } + +func TestMutationCreate_WithJSONVariable_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple create mutation with JSON variable input.", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + embed: JSON + } + `, + }, + testUtils.Request{ + Variables: immutable.Some(map[string]any{ + "message": "hello", + }), + Request: `mutation($message: String) { + create_Users(input: {embed: {message: $message}}) { + embed + } + }`, + Results: map[string]any{ + "create_Users": []map[string]any{ + { + "embed": map[string]any{ + "message": "hello", + }, + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/mutation/update/field_kinds/json_test.go b/tests/integration/mutation/update/field_kinds/json_test.go index 868e0c5a7d..fcf32948fe 100644 --- a/tests/integration/mutation/update/field_kinds/json_test.go +++ b/tests/integration/mutation/update/field_kinds/json_test.go @@ -29,7 +29,7 @@ func TestMutationUpdate_IfJSONFieldSetToNull_ShouldBeNil(t *testing.T) { }, testUtils.CreateDoc{ Doc: `{ - "custom": "{\"foo\": \"bar\"}" + "custom": {"foo": "bar"} }`, }, testUtils.UpdateDoc{ diff --git a/tests/integration/query/simple/with_filter/with_eq_json_test.go b/tests/integration/query/simple/with_filter/with_eq_json_test.go index da0c03ed4b..3f85a033cb 100644 --- a/tests/integration/query/simple/with_filter/with_eq_json_test.go +++ b/tests/integration/query/simple/with_filter/with_eq_json_test.go @@ -16,7 +16,7 @@ import ( testUtils "github.com/sourcenetwork/defradb/tests/integration" ) -func TestQuerySimple_WithEqOpOnJSONField_ShouldFilter(t *testing.T) { +func TestQuerySimple_WithEqOpOnJSONFieldWithObject_ShouldFilter(t *testing.T) { test := testUtils.TestCase{ Actions: []any{ testUtils.SchemaUpdate{ @@ -28,27 +28,132 @@ func TestQuerySimple_WithEqOpOnJSONField_ShouldFilter(t *testing.T) { `, }, testUtils.CreateDoc{ - DocMap: map[string]any{ - "name": "John", - "custom": "{\"tree\": \"maple\", \"age\": 250}", + Doc: `{ + "name": "John", + "custom": { + "tree": "maple", + "age": 250 + } + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Andy", + "custom": { + "tree": "oak", + "age": 450 + } + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "custom": null + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {custom: {_eq: {tree:"oak",age:450}}}) { + name + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + {"name": "Andy"}, + }, }, }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQuerySimple_WithEqOpOnJSONFieldWithNestedObjects_ShouldFilter(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + custom: JSON + } + `, + }, testUtils.CreateDoc{ - DocMap: map[string]any{ - "name": "Andy", - "custom": "{\"tree\": \"oak\", \"age\": 450}", + Doc: `{ + "name": "John", + "custom": { + "level_1": { + "level_2": { + "level_3": [true, false] + } + } + } + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Andy", + "custom": { + "level_1": { + "level_2": { + "level_3": [false, true] + } + } + } + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {custom: {_eq: {level_1: {level_2: {level_3: [true, false]}}}}}) { + name + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + {"name": "John"}, + }, }, }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQuerySimple_WithEqOpOnJSONFieldWithNullValue_ShouldFilter(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + custom: JSON + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "custom": null + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Andy", + "custom": {} + }`, + }, testUtils.Request{ - // the filtered-by JSON has no spaces, because this is now it's stored. Request: `query { - Users(filter: {custom: {_eq: "{\"tree\":\"oak\",\"age\":450}"}}) { + Users(filter: {custom: {_eq: null}}) { name } }`, Results: map[string]any{ "Users": []map[string]any{ - {"name": "Andy"}, + {"name": "John"}, }, }, }, diff --git a/tests/integration/query/simple/with_filter/with_in_json_test.go b/tests/integration/query/simple/with_filter/with_in_json_test.go index b9bab035f4..568862ee52 100644 --- a/tests/integration/query/simple/with_filter/with_in_json_test.go +++ b/tests/integration/query/simple/with_filter/with_in_json_test.go @@ -28,21 +28,26 @@ func TestQuerySimple_WithInOpOnJSONField_ShouldFilter(t *testing.T) { `, }, testUtils.CreateDoc{ - DocMap: map[string]any{ - "name": "John", - "custom": "{\"tree\": \"maple\", \"age\": 250}", - }, + Doc: `{ + "name": "John", + "custom": { + "tree": "maple", + "age": 250 + } + }`, }, testUtils.CreateDoc{ - DocMap: map[string]any{ - "name": "Andy", - "custom": "{\"tree\": \"oak\", \"age\": 450}", - }, + Doc: `{ + "name": "Andy", + "custom": { + "tree": "oak", + "age": 450 + } + }`, }, testUtils.Request{ - // the filtered-by JSON has no spaces, because this is now it's stored. Request: `query { - Users(filter: {custom: {_in: ["{\"tree\":\"oak\",\"age\":450}"]}}) { + Users(filter: {custom: {_in: [{tree:"oak",age:450}]}}) { name } }`, diff --git a/tests/integration/schema/updates/add/field/kind/json_test.go b/tests/integration/schema/updates/add/field/kind/json_test.go index 371e12d074..faecdb1977 100644 --- a/tests/integration/schema/updates/add/field/kind/json_test.go +++ b/tests/integration/schema/updates/add/field/kind/json_test.go @@ -72,7 +72,7 @@ func TestSchemaUpdatesAddFieldKindJSONWithCreate(t *testing.T) { CollectionID: 0, Doc: `{ "name": "John", - "foo": "{}" + "foo": {} }`, }, testUtils.Request{ @@ -86,7 +86,7 @@ func TestSchemaUpdatesAddFieldKindJSONWithCreate(t *testing.T) { "Users": []map[string]any{ { "name": "John", - "foo": "{}", + "foo": map[string]any{}, }, }, }, @@ -118,7 +118,7 @@ func TestSchemaUpdatesAddFieldKindJSONSubstitutionWithCreate(t *testing.T) { CollectionID: 0, Doc: `{ "name": "John", - "foo": "{}" + "foo": {} }`, }, testUtils.Request{ @@ -132,7 +132,7 @@ func TestSchemaUpdatesAddFieldKindJSONSubstitutionWithCreate(t *testing.T) { "Users": []map[string]any{ { "name": "John", - "foo": "{}", + "foo": map[string]any{}, }, }, }, From c60588a93115faa39e99ee193dee13d426a8ba29 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Tue, 8 Oct 2024 00:48:15 -0400 Subject: [PATCH 56/71] bot: Update dependencies (bulk dependabot PRs) 07-10-2024 (#3118) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ✅ This PR was created by combining the following PRs: #3117 bot: Bump @types/react from 18.3.10 to 18.3.11 in /playground #3116 bot: Bump @typescript-eslint/parser from 8.7.0 to 8.8.0 in /playground #3114 bot: Bump eslint from 9.11.1 to 9.12.0 in /playground #3113 bot: Bump golang.org/x/crypto from 0.27.0 to 0.28.0 #3112 bot: Bump google.golang.org/grpc from 1.67.0 to 1.67.1 #3111 bot: Bump github.com/ipfs/boxo from 0.23.0 to 0.24.0 #3110 bot: Bump github.com/libp2p/go-libp2p-kad-dht from 0.26.1 to 0.27.0 #3109 bot: Bump github.com/getkin/kin-openapi from 0.127.0 to 0.128.0 ⚠️ The following PR was resolved manually due to merge conflicts: #3115 bot: Bump @typescript-eslint/eslint-plugin from 8.7.0 to 8.8.0 in /playground --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> Co-authored-by: Shahzad Lone --- go.mod | 22 +- go.sum | 48 ++-- playground/package-lock.json | 438 +++++++++++++++++++++++++++-------- playground/package.json | 8 +- 4 files changed, 374 insertions(+), 142 deletions(-) diff --git a/go.mod b/go.mod index 708e2889c8..483980affd 100644 --- a/go.mod +++ b/go.mod @@ -13,13 +13,13 @@ require ( github.com/decred/dcrd/dcrec/secp256k1/v4 v4.3.0 github.com/evanphx/json-patch/v5 v5.9.0 github.com/fxamacker/cbor/v2 v2.7.0 - github.com/getkin/kin-openapi v0.127.0 + github.com/getkin/kin-openapi v0.128.0 github.com/go-chi/chi/v5 v5.1.0 github.com/go-chi/cors v1.2.1 github.com/go-errors/errors v1.5.1 github.com/gofrs/uuid/v5 v5.3.0 github.com/iancoleman/strcase v0.3.0 - github.com/ipfs/boxo v0.23.0 + github.com/ipfs/boxo v0.24.0 github.com/ipfs/go-block-format v0.2.0 github.com/ipfs/go-cid v0.4.1 github.com/ipfs/go-datastore v0.6.0 @@ -32,9 +32,9 @@ require ( github.com/joho/godotenv v1.5.1 github.com/lens-vm/lens/host-go v0.0.0-20231127204031-8d858ed2926c github.com/lestrrat-go/jwx/v2 v2.1.1 - github.com/libp2p/go-libp2p v0.36.3 + github.com/libp2p/go-libp2p v0.36.4 github.com/libp2p/go-libp2p-gostream v0.6.0 - github.com/libp2p/go-libp2p-kad-dht v0.26.1 + github.com/libp2p/go-libp2p-kad-dht v0.27.0 github.com/libp2p/go-libp2p-pubsub v0.12.0 github.com/libp2p/go-libp2p-record v0.2.0 github.com/mr-tron/base58 v1.2.0 @@ -62,9 +62,9 @@ require ( go.opentelemetry.io/otel/metric v1.30.0 go.opentelemetry.io/otel/sdk/metric v1.30.0 go.uber.org/zap v1.27.0 - golang.org/x/crypto v0.27.0 + golang.org/x/crypto v0.28.0 golang.org/x/exp v0.0.0-20240808152545-0cdaa3abc0fa - google.golang.org/grpc v1.67.0 + google.golang.org/grpc v1.67.1 ) require ( @@ -249,7 +249,7 @@ require ( github.com/libp2p/go-cidranger v1.1.0 // indirect github.com/libp2p/go-flow-metrics v0.1.0 // indirect github.com/libp2p/go-libp2p-asn-util v0.4.1 // indirect - github.com/libp2p/go-libp2p-kbucket v0.6.3 // indirect + github.com/libp2p/go-libp2p-kbucket v0.6.4 // indirect github.com/libp2p/go-libp2p-routing-helpers v0.7.4 // indirect github.com/libp2p/go-msgio v0.3.0 // indirect github.com/libp2p/go-nat v0.2.0 // indirect @@ -276,7 +276,7 @@ require ( github.com/mtibben/percent v0.2.1 // indirect github.com/multiformats/go-base32 v0.1.0 // indirect github.com/multiformats/go-base36 v0.2.0 // indirect - github.com/multiformats/go-multiaddr-dns v0.3.1 // indirect + github.com/multiformats/go-multiaddr-dns v0.4.0 // indirect github.com/multiformats/go-multiaddr-fmt v0.1.0 // indirect github.com/multiformats/go-multistream v0.5.0 // indirect github.com/multiformats/go-varint v0.0.7 // indirect @@ -363,9 +363,9 @@ require ( golang.org/x/net v0.28.0 // indirect golang.org/x/oauth2 v0.22.0 // indirect golang.org/x/sync v0.8.0 // indirect - golang.org/x/sys v0.25.0 // indirect - golang.org/x/term v0.24.0 // indirect - golang.org/x/text v0.18.0 // indirect + golang.org/x/sys v0.26.0 // indirect + golang.org/x/term v0.25.0 // indirect + golang.org/x/text v0.19.0 // indirect golang.org/x/time v0.5.0 // indirect golang.org/x/tools v0.24.0 // indirect gonum.org/v1/gonum v0.15.0 // indirect diff --git a/go.sum b/go.sum index 6151cbdb1e..6c9746ad1b 100644 --- a/go.sum +++ b/go.sum @@ -512,8 +512,8 @@ github.com/fxamacker/cbor/v2 v2.7.0 h1:iM5WgngdRBanHcxugY4JySA0nk1wZorNOpTgCMedv github.com/fxamacker/cbor/v2 v2.7.0/go.mod h1:pxXPTn3joSm21Gbwsv0w9OSA2y1HFR9qXEeXQVeNoDQ= github.com/gabriel-vasile/mimetype v1.4.4 h1:QjV6pZ7/XZ7ryI2KuyeEDE8wnh7fHP9YnQy+R0LnH8I= github.com/gabriel-vasile/mimetype v1.4.4/go.mod h1:JwLei5XPtWdGiMFB5Pjle1oEeoSeEuJfJE+TtfvdB/s= -github.com/getkin/kin-openapi v0.127.0 h1:Mghqi3Dhryf3F8vR370nN67pAERW+3a95vomb3MAREY= -github.com/getkin/kin-openapi v0.127.0/go.mod h1:OZrfXzUfGrNbsKj+xmFBx6E5c6yH3At/tAKSc2UszXM= +github.com/getkin/kin-openapi v0.128.0 h1:jqq3D9vC9pPq1dGcOCv7yOp1DaEe7c/T1vzcLbITSp4= +github.com/getkin/kin-openapi v0.128.0/go.mod h1:OZrfXzUfGrNbsKj+xmFBx6E5c6yH3At/tAKSc2UszXM= github.com/getsentry/sentry-go v0.27.0 h1:Pv98CIbtB3LkMWmXi4Joa5OOcwbmnX88sF5qbK3r3Ps= github.com/getsentry/sentry-go v0.27.0/go.mod h1:lc76E2QywIyW8WuBnwl8Lc4bkmQH4+w1gwTf25trprY= github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= @@ -854,8 +854,8 @@ github.com/invopop/yaml v0.3.1 h1:f0+ZpmhfBSS4MhG+4HYseMdJhoeeopbSKbq5Rpeelso= github.com/invopop/yaml v0.3.1/go.mod h1:PMOp3nn4/12yEZUFfmOuNHJsZToEEOwoWsT+D81KkeA= github.com/ipfs/bbloom v0.0.4 h1:Gi+8EGJ2y5qiD5FbsbpX/TMNcJw8gSqr7eyjHa4Fhvs= github.com/ipfs/bbloom v0.0.4/go.mod h1:cS9YprKXpoZ9lT0n/Mw/a6/aFV6DTjTLYHeA+gyqMG0= -github.com/ipfs/boxo v0.23.0 h1:dY1PpcvPJ//VuUQ1TUd5TZvmaGuzxJ8dOP6mXaw+ke8= -github.com/ipfs/boxo v0.23.0/go.mod h1:ulu5I6avTmgGmvjuCaBRKwsaOOKjBfQw1EiOOQp8M6E= +github.com/ipfs/boxo v0.24.0 h1:D9gTU3QdxyjPMlJ6QfqhHTG3TIJPplKzjXLO2J30h9U= +github.com/ipfs/boxo v0.24.0/go.mod h1:iP7xUPpHq2QAmVAjwtQvsNBTxTwLpFuy6ZpiRFwmzDA= github.com/ipfs/go-bitfield v1.1.0 h1:fh7FIo8bSwaJEh6DdTWbCeZ1eqOaOkKFI74SCnsWbGA= github.com/ipfs/go-bitfield v1.1.0/go.mod h1:paqf1wjq/D2BBmzfTVFlJQ9IlFOZpg422HL0HqsGWHU= github.com/ipfs/go-block-format v0.2.0 h1:ZqrkxBA2ICbDRbK8KJs/u0O3dlp6gmAuuXUJNiW1Ycs= @@ -993,16 +993,16 @@ github.com/libp2p/go-cidranger v1.1.0 h1:ewPN8EZ0dd1LSnrtuwd4709PXVcITVeuwbag38y github.com/libp2p/go-cidranger v1.1.0/go.mod h1:KWZTfSr+r9qEo9OkI9/SIEeAtw+NNoU0dXIXt15Okic= github.com/libp2p/go-flow-metrics v0.1.0 h1:0iPhMI8PskQwzh57jB9WxIuIOQ0r+15PChFGkx3Q3WM= github.com/libp2p/go-flow-metrics v0.1.0/go.mod h1:4Xi8MX8wj5aWNDAZttg6UPmc0ZrnFNsMtpsYUClFtro= -github.com/libp2p/go-libp2p v0.36.3 h1:NHz30+G7D8Y8YmznrVZZla0ofVANrvBl2c+oARfMeDQ= -github.com/libp2p/go-libp2p v0.36.3/go.mod h1:4Y5vFyCUiJuluEPmpnKYf6WFx5ViKPUYs/ixe9ANFZ8= +github.com/libp2p/go-libp2p v0.36.4 h1:ZaKyKSHBFbzs6CnAYMhaMc5QgV1UoCN+9WXrg8SEwI4= +github.com/libp2p/go-libp2p v0.36.4/go.mod h1:4Y5vFyCUiJuluEPmpnKYf6WFx5ViKPUYs/ixe9ANFZ8= github.com/libp2p/go-libp2p-asn-util v0.4.1 h1:xqL7++IKD9TBFMgnLPZR6/6iYhawHKHl950SO9L6n94= github.com/libp2p/go-libp2p-asn-util v0.4.1/go.mod h1:d/NI6XZ9qxw67b4e+NgpQexCIiFYJjErASrYW4PFDN8= github.com/libp2p/go-libp2p-gostream v0.6.0 h1:QfAiWeQRce6pqnYfmIVWJFXNdDyfiR/qkCnjyaZUPYU= github.com/libp2p/go-libp2p-gostream v0.6.0/go.mod h1:Nywu0gYZwfj7Jc91PQvbGU8dIpqbQQkjWgDuOrFaRdA= -github.com/libp2p/go-libp2p-kad-dht v0.26.1 h1:AazV3LCImYVkDUGAHx5lIEgZ9iUI2QQKH5GMRQU8uEA= -github.com/libp2p/go-libp2p-kad-dht v0.26.1/go.mod h1:mqRUGJ/+7ziQ3XknU2kKHfsbbgb9xL65DXjPOJwmZF8= -github.com/libp2p/go-libp2p-kbucket v0.6.3 h1:p507271wWzpy2f1XxPzCQG9NiN6R6lHL9GiSErbQQo0= -github.com/libp2p/go-libp2p-kbucket v0.6.3/go.mod h1:RCseT7AH6eJWxxk2ol03xtP9pEHetYSPXOaJnOiD8i0= +github.com/libp2p/go-libp2p-kad-dht v0.27.0 h1:1Ea32tVTPiAfaLpPMbaBWFJgbsi/JpMqC2YBuFdf32o= +github.com/libp2p/go-libp2p-kad-dht v0.27.0/go.mod h1:ixhjLuzaXSGtWsKsXTj7erySNuVC4UP7NO015cRrF14= +github.com/libp2p/go-libp2p-kbucket v0.6.4 h1:OjfiYxU42TKQSB8t8WYd8MKhYhMJeO2If+NiuKfb6iQ= +github.com/libp2p/go-libp2p-kbucket v0.6.4/go.mod h1:jp6w82sczYaBsAypt5ayACcRJi0lgsba7o4TzJKEfWA= github.com/libp2p/go-libp2p-pubsub v0.12.0 h1:PENNZjSfk8KYxANRlpipdS7+BfLmOl3L2E/6vSNjbdI= github.com/libp2p/go-libp2p-pubsub v0.12.0/go.mod h1:Oi0zw9aw8/Y5GC99zt+Ef2gYAl+0nZlwdJonDyOz/sE= github.com/libp2p/go-libp2p-record v0.2.0 h1:oiNUOCWno2BFuxt3my4i1frNrt7PerzB3queqa1NkQ0= @@ -1059,7 +1059,6 @@ github.com/mattn/go-runewidth v0.0.4/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzp github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= github.com/microcosm-cc/bluemonday v1.0.1/go.mod h1:hsXNsILzKxV+sX77C5b8FSuKF00vh2OMYv+xgHpAMF4= github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg= -github.com/miekg/dns v1.1.41/go.mod h1:p6aan82bvRIyn+zDIv9xYNUpwa73JcSh9BKwknJysuI= github.com/miekg/dns v1.1.62 h1:cN8OuEF1/x5Rq6Np+h1epln8OiyPWV+lROx9LxcGgIQ= github.com/miekg/dns v1.1.62/go.mod h1:mvDlcItzm+br7MToIKqkglaGhlFMHJ9DTNNWONWXbNQ= github.com/mikioh/tcp v0.0.0-20190314235350-803a9b46060c h1:bzE/A84HN25pxAuk9Eej1Kz9OUelF97nAc82bDquQI8= @@ -1106,11 +1105,10 @@ github.com/multiformats/go-base32 v0.1.0/go.mod h1:Kj3tFY6zNr+ABYMqeUNeGvkIC/UYg github.com/multiformats/go-base36 v0.2.0 h1:lFsAbNOGeKtuKozrtBsAkSVhv1p9D0/qedU9rQyccr0= github.com/multiformats/go-base36 v0.2.0/go.mod h1:qvnKE++v+2MWCfePClUEjE78Z7P2a1UV0xHgWc0hkp4= github.com/multiformats/go-multiaddr v0.1.1/go.mod h1:aMKBKNEYmzmDmxfX88/vz+J5IU55txyt0p4aiWVohjo= -github.com/multiformats/go-multiaddr v0.2.0/go.mod h1:0nO36NvPpyV4QzvTLi/lafl2y95ncPj0vFwVF6k6wJ4= github.com/multiformats/go-multiaddr v0.13.0 h1:BCBzs61E3AGHcYYTv8dqRH43ZfyrqM8RXVPT8t13tLQ= github.com/multiformats/go-multiaddr v0.13.0/go.mod h1:sBXrNzucqkFJhvKOiwwLyqamGa/P5EIXNPLovyhQCII= -github.com/multiformats/go-multiaddr-dns v0.3.1 h1:QgQgR+LQVt3NPTjbrLLpsaT2ufAA2y0Mkk+QRVJbW3A= -github.com/multiformats/go-multiaddr-dns v0.3.1/go.mod h1:G/245BRQ6FJGmryJCrOuTdB37AMA5AMOVuO6NY3JwTk= +github.com/multiformats/go-multiaddr-dns v0.4.0 h1:P76EJ3qzBXpUXZ3twdCDx/kvagMsNo0LMFXpyms/zgU= +github.com/multiformats/go-multiaddr-dns v0.4.0/go.mod h1:7hfthtB4E4pQwirrz+J0CcDUfbWzTqEzVyYKKIKpgkc= github.com/multiformats/go-multiaddr-fmt v0.1.0 h1:WLEFClPycPkp4fnIzoFoV9FVd49/eQsuaL3/CWe167E= github.com/multiformats/go-multiaddr-fmt v0.1.0/go.mod h1:hGtDIW4PU4BqJ50gW2quDuPVjyWNZxToGUh/HwTZYJo= github.com/multiformats/go-multibase v0.2.0 h1:isdYCVLvksgWlMW9OZRYJEa9pZETFivncJHmHnnd87g= @@ -1122,7 +1120,6 @@ github.com/multiformats/go-multihash v0.2.3 h1:7Lyc8XfX/IY2jWb/gI7JP+o7JEq9hOa7B github.com/multiformats/go-multihash v0.2.3/go.mod h1:dXgKXCXjBzdscBLk9JkjINiEsCKRVch90MdaGiKsvSM= github.com/multiformats/go-multistream v0.5.0 h1:5htLSLl7lvJk3xx3qT/8Zm9J4K8vEOf/QGkvOGQAyiE= github.com/multiformats/go-multistream v0.5.0/go.mod h1:n6tMZiwiP2wUsR8DgfDWw1dydlEqV3l6N3/GBsX6ILA= -github.com/multiformats/go-varint v0.0.1/go.mod h1:3Ls8CIEsrijN6+B7PbrXRPxHRPuXSrVKRY101jdMZYE= github.com/multiformats/go-varint v0.0.7 h1:sWSGR+f/eu5ABZA2ZpYKBILXTTs9JWpdEM/nEGOHFS8= github.com/multiformats/go-varint v0.0.7/go.mod h1:r8PUYw/fD/SjBCiKOoDlGF6QawOELpZAu9eioSos/OU= github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 h1:C3w9PqII01/Oq1c1nUAm88MOHcQC9l5mIlSMApZMrHA= @@ -1602,8 +1599,8 @@ golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5y golang.org/x/crypto v0.8.0/go.mod h1:mRqEX+O9/h5TFCrQhkgjo2yKi0yYA+9ecGkdQoHrywE= golang.org/x/crypto v0.12.0/go.mod h1:NF0Gs7EO5K4qLn+Ylc+fih8BSTeIjAP05siRnAh98yw= golang.org/x/crypto v0.18.0/go.mod h1:R0j02AL6hcrfOiy9T4ZYp/rcWeMxM3L6QYxlOuEG1mg= -golang.org/x/crypto v0.27.0 h1:GXm2NjJrPaiv/h1tb2UH8QfgC/hOf/+z0p6PT8o1w7A= -golang.org/x/crypto v0.27.0/go.mod h1:1Xngt8kV6Dvbssa53Ziq6Eqn0HqbZi5Z6R0ZpwQzt70= +golang.org/x/crypto v0.28.0 h1:GBDwsMXVQi34v5CCYUm2jkJvu4cbtru2U4TN2PSyQnw= +golang.org/x/crypto v0.28.0/go.mod h1:rmgy+3RHxRZMyY0jjAJShp2zgEdOqj2AO7U0pYmeQ7U= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= @@ -1824,7 +1821,6 @@ golang.org/x/sys v0.0.0-20210112080510-489259a85091/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210220050731-9a76102bfb43/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210303074136-134d130e1a04/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210305230114-8fe3ee5dd75b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210315160823-c6e025ad8005/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -1872,8 +1868,8 @@ golang.org/x/sys v0.9.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.11.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.16.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/sys v0.25.0 h1:r+8e+loiHxRqhXVl6ML1nO3l1+oFoWbnlu2Ehimmi34= -golang.org/x/sys v0.25.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.26.0 h1:KHjCJyddX0LoSTb3J+vWpupP9p0oznkqVk/IfjymZbo= +golang.org/x/sys v0.26.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.1.0/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= @@ -1882,8 +1878,8 @@ golang.org/x/term v0.7.0/go.mod h1:P32HKFT3hSsZrRxla30E9HqToFYAQPCMs/zFMBUFqPY= golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo= golang.org/x/term v0.11.0/go.mod h1:zC9APTIj3jG3FdV/Ons+XE1riIZXG4aZ4GTHiPZJPIU= golang.org/x/term v0.16.0/go.mod h1:yn7UURbUtPyrVJPGPq404EukNFxcm/foM+bV/bfcDsY= -golang.org/x/term v0.24.0 h1:Mh5cbb+Zk2hqqXNO7S1iTjEphVL+jb8ZWaqh/g+JWkM= -golang.org/x/term v0.24.0/go.mod h1:lOBK/LVxemqiMij05LGJ0tzNr8xlmwBRJ81PX6wVLH8= +golang.org/x/term v0.25.0 h1:WtHI/ltw4NvSUig5KARz9h521QvRC8RmF/cuYqifU24= +golang.org/x/term v0.25.0/go.mod h1:RPyXicDX+6vLxogjjRxjgD2TKtmAO6NZBsBRfrOLu7M= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -1898,8 +1894,8 @@ golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= golang.org/x/text v0.12.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= -golang.org/x/text v0.18.0 h1:XvMDiNzPAl0jr17s6W9lcaIhGUfUORdGCNsuLmPG224= -golang.org/x/text v0.18.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY= +golang.org/x/text v0.19.0 h1:kTxAhCbGbxhK0IwgSKiMO5awPoDQ0RpfiVYBfK860YM= +golang.org/x/text v0.19.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY= golang.org/x/time v0.0.0-20180412165947-fbb02b2291d2/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= @@ -2206,8 +2202,8 @@ google.golang.org/grpc v1.48.0/go.mod h1:vN9eftEi1UMyUsIF80+uQXhHjbXYbm0uXoFCACu google.golang.org/grpc v1.49.0/go.mod h1:ZgQEeidpAuNRZ8iRrlBKXZQP1ghovWIVhdJRyCDK+GI= google.golang.org/grpc v1.50.0/go.mod h1:ZgQEeidpAuNRZ8iRrlBKXZQP1ghovWIVhdJRyCDK+GI= google.golang.org/grpc v1.50.1/go.mod h1:ZgQEeidpAuNRZ8iRrlBKXZQP1ghovWIVhdJRyCDK+GI= -google.golang.org/grpc v1.67.0 h1:IdH9y6PF5MPSdAntIcpjQ+tXO41pcQsfZV2RxtQgVcw= -google.golang.org/grpc v1.67.0/go.mod h1:1gLDyUQU7CTLJI90u3nXZ9ekeghjeM7pTDZlqFNg2AA= +google.golang.org/grpc v1.67.1 h1:zWnc1Vrcno+lHZCOofnIMvycFcc0QRGIzm9dhnDX68E= +google.golang.org/grpc v1.67.1/go.mod h1:1gLDyUQU7CTLJI90u3nXZ9ekeghjeM7pTDZlqFNg2AA= google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0/go.mod h1:6Kw0yEErY5E/yWrBtf03jp27GLLJujG4z/JK95pnjjw= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= diff --git a/playground/package-lock.json b/playground/package-lock.json index a08f776df2..2dedfb6b4a 100644 --- a/playground/package-lock.json +++ b/playground/package-lock.json @@ -15,13 +15,13 @@ "swagger-ui-react": "^5.17.14" }, "devDependencies": { - "@types/react": "^18.3.10", + "@types/react": "^18.3.11", "@types/react-dom": "^18.3.0", "@types/swagger-ui-react": "^4.18.3", - "@typescript-eslint/eslint-plugin": "^8.7.0", - "@typescript-eslint/parser": "^8.7.0", + "@typescript-eslint/eslint-plugin": "^8.8.0", + "@typescript-eslint/parser": "^8.8.0", "@vitejs/plugin-react-swc": "^3.7.1", - "eslint": "^9.11.1", + "eslint": "^9.12.0", "eslint-plugin-react-hooks": "^4.6.2", "eslint-plugin-react-refresh": "^0.4.12", "typescript": "^5.6.2", @@ -616,9 +616,9 @@ } }, "node_modules/@eslint/js": { - "version": "9.11.1", - "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.11.1.tgz", - "integrity": "sha512-/qu+TWz8WwPWc7/HcIJKi+c+MOm46GdVaSlTTQcaqaL53+GsoA6MxWp5PtTx48qbSP7ylM1Kn7nhvkugfJvRSA==", + "version": "9.12.0", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.12.0.tgz", + "integrity": "sha512-eohesHH8WFRUprDNyEREgqP6beG6htMeUYeCpkEgBCieCMme5r9zFWjzAJp//9S+Kub4rqE+jXe9Cp1a7IYIIA==", "dev": true, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -741,6 +741,28 @@ "react-dom": "^16 || ^17 || ^18" } }, + "node_modules/@humanfs/core": { + "version": "0.19.0", + "resolved": "https://registry.npmjs.org/@humanfs/core/-/core-0.19.0.tgz", + "integrity": "sha512-2cbWIHbZVEweE853g8jymffCA+NCMiuqeECeBBLm8dg2oFdjuGJhgN4UAbI+6v0CKbbhvtXA4qV8YR5Ji86nmw==", + "dev": true, + "engines": { + "node": ">=18.18.0" + } + }, + "node_modules/@humanfs/node": { + "version": "0.16.5", + "resolved": "https://registry.npmjs.org/@humanfs/node/-/node-0.16.5.tgz", + "integrity": "sha512-KSPA4umqSG4LHYRodq31VDwKAvaTF4xmVlzM8Aeh4PlU1JQ3IG0wiA8C25d3RQ9nJyM3mBHyI53K06VVL/oFFg==", + "dev": true, + "dependencies": { + "@humanfs/core": "^0.19.0", + "@humanwhocodes/retry": "^0.3.0" + }, + "engines": { + "node": ">=18.18.0" + } + }, "node_modules/@humanwhocodes/module-importer": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz", @@ -756,11 +778,10 @@ } }, "node_modules/@humanwhocodes/retry": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/@humanwhocodes/retry/-/retry-0.3.0.tgz", - "integrity": "sha512-d2CGZR2o7fS6sWB7DG/3a95bGKQyHMACZ5aW8qGkkqQpUoZV6C0X7Pc7l4ZNMZkfNBf4VWNe9E1jRsf0G146Ew==", + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/@humanwhocodes/retry/-/retry-0.3.1.tgz", + "integrity": "sha512-JBxkERygn7Bv/GbN5Rv8Ul6LVknS+5Bp6RgDC/O8gEBU/yeH5Ui5C/OlWrTb6qct7LjjfT6Re2NxB0ln0yYybA==", "dev": true, - "license": "Apache-2.0", "engines": { "node": ">=18.18" }, @@ -2454,9 +2475,9 @@ } }, "node_modules/@types/react": { - "version": "18.3.10", - "resolved": "https://registry.npmjs.org/@types/react/-/react-18.3.10.tgz", - "integrity": "sha512-02sAAlBnP39JgXwkAq3PeU9DVaaGpZyF3MGcC0MKgQVkZor5IiiDAipVaxQHtDJAmO4GIy/rVBy/LzVj76Cyqg==", + "version": "18.3.11", + "resolved": "https://registry.npmjs.org/@types/react/-/react-18.3.11.tgz", + "integrity": "sha512-r6QZ069rFTjrEYgFdOck1gK7FLVsgJE7tTz0pQBczlBNUhBNk0MQH4UbnFSwjpQLMkLzgqvBBa+qGpLje16eTQ==", "devOptional": true, "dependencies": { "@types/prop-types": "*", @@ -2504,16 +2525,16 @@ "license": "MIT" }, "node_modules/@typescript-eslint/eslint-plugin": { - "version": "8.7.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.7.0.tgz", - "integrity": "sha512-RIHOoznhA3CCfSTFiB6kBGLQtB/sox+pJ6jeFu6FxJvqL8qRxq/FfGO/UhsGgQM9oGdXkV4xUgli+dt26biB6A==", + "version": "8.8.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.8.0.tgz", + "integrity": "sha512-wORFWjU30B2WJ/aXBfOm1LX9v9nyt9D3jsSOxC3cCaTQGCW5k4jNpmjFv3U7p/7s4yvdjHzwtv2Sd2dOyhjS0A==", "dev": true, "dependencies": { "@eslint-community/regexpp": "^4.10.0", - "@typescript-eslint/scope-manager": "8.7.0", - "@typescript-eslint/type-utils": "8.7.0", - "@typescript-eslint/utils": "8.7.0", - "@typescript-eslint/visitor-keys": "8.7.0", + "@typescript-eslint/scope-manager": "8.8.0", + "@typescript-eslint/type-utils": "8.8.0", + "@typescript-eslint/utils": "8.8.0", + "@typescript-eslint/visitor-keys": "8.8.0", "graphemer": "^1.4.0", "ignore": "^5.3.1", "natural-compare": "^1.4.0", @@ -2536,16 +2557,63 @@ } } }, + "node_modules/@typescript-eslint/eslint-plugin/node_modules/@typescript-eslint/scope-manager": { + "version": "8.8.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.8.0.tgz", + "integrity": "sha512-EL8eaGC6gx3jDd8GwEFEV091210U97J0jeEHrAYvIYosmEGet4wJ+g0SYmLu+oRiAwbSA5AVrt6DxLHfdd+bUg==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "8.8.0", + "@typescript-eslint/visitor-keys": "8.8.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/eslint-plugin/node_modules/@typescript-eslint/types": { + "version": "8.8.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.8.0.tgz", + "integrity": "sha512-QJwc50hRCgBd/k12sTykOJbESe1RrzmX6COk8Y525C9l7oweZ+1lw9JiU56im7Amm8swlz00DRIlxMYLizr2Vw==", + "dev": true, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/eslint-plugin/node_modules/@typescript-eslint/visitor-keys": { + "version": "8.8.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.8.0.tgz", + "integrity": "sha512-8mq51Lx6Hpmd7HnA2fcHQo3YgfX1qbccxQOgZcb4tvasu//zXRaA1j5ZRFeCw/VRAdFi4mRM9DnZw0Nu0Q2d1g==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "8.8.0", + "eslint-visitor-keys": "^3.4.3" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, "node_modules/@typescript-eslint/parser": { - "version": "8.7.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.7.0.tgz", - "integrity": "sha512-lN0btVpj2unxHlNYLI//BQ7nzbMJYBVQX5+pbNXvGYazdlgYonMn4AhhHifQ+J4fGRYA/m1DjaQjx+fDetqBOQ==", + "version": "8.8.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.8.0.tgz", + "integrity": "sha512-uEFUsgR+tl8GmzmLjRqz+VrDv4eoaMqMXW7ruXfgThaAShO9JTciKpEsB+TvnfFfbg5IpujgMXVV36gOJRLtZg==", "dev": true, "dependencies": { - "@typescript-eslint/scope-manager": "8.7.0", - "@typescript-eslint/types": "8.7.0", - "@typescript-eslint/typescript-estree": "8.7.0", - "@typescript-eslint/visitor-keys": "8.7.0", + "@typescript-eslint/scope-manager": "8.8.0", + "@typescript-eslint/types": "8.8.0", + "@typescript-eslint/typescript-estree": "8.8.0", + "@typescript-eslint/visitor-keys": "8.8.0", "debug": "^4.3.4" }, "engines": { @@ -2564,6 +2632,81 @@ } } }, + "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/scope-manager": { + "version": "8.8.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.8.0.tgz", + "integrity": "sha512-EL8eaGC6gx3jDd8GwEFEV091210U97J0jeEHrAYvIYosmEGet4wJ+g0SYmLu+oRiAwbSA5AVrt6DxLHfdd+bUg==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "8.8.0", + "@typescript-eslint/visitor-keys": "8.8.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/types": { + "version": "8.8.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.8.0.tgz", + "integrity": "sha512-QJwc50hRCgBd/k12sTykOJbESe1RrzmX6COk8Y525C9l7oweZ+1lw9JiU56im7Amm8swlz00DRIlxMYLizr2Vw==", + "dev": true, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/typescript-estree": { + "version": "8.8.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.8.0.tgz", + "integrity": "sha512-ZaMJwc/0ckLz5DaAZ+pNLmHv8AMVGtfWxZe/x2JVEkD5LnmhWiQMMcYT7IY7gkdJuzJ9P14fRy28lUrlDSWYdw==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "8.8.0", + "@typescript-eslint/visitor-keys": "8.8.0", + "debug": "^4.3.4", + "fast-glob": "^3.3.2", + "is-glob": "^4.0.3", + "minimatch": "^9.0.4", + "semver": "^7.6.0", + "ts-api-utils": "^1.3.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/visitor-keys": { + "version": "8.8.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.8.0.tgz", + "integrity": "sha512-8mq51Lx6Hpmd7HnA2fcHQo3YgfX1qbccxQOgZcb4tvasu//zXRaA1j5ZRFeCw/VRAdFi4mRM9DnZw0Nu0Q2d1g==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "8.8.0", + "eslint-visitor-keys": "^3.4.3" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, "node_modules/@typescript-eslint/scope-manager": { "version": "8.7.0", "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.7.0.tgz", @@ -2582,13 +2725,13 @@ } }, "node_modules/@typescript-eslint/type-utils": { - "version": "8.7.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.7.0.tgz", - "integrity": "sha512-tl0N0Mj3hMSkEYhLkjREp54OSb/FI6qyCzfiiclvJvOqre6hsZTGSnHtmFLDU8TIM62G7ygEa1bI08lcuRwEnQ==", + "version": "8.8.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.8.0.tgz", + "integrity": "sha512-IKwJSS7bCqyCeG4NVGxnOP6lLT9Okc3Zj8hLO96bpMkJab+10HIfJbMouLrlpyOr3yrQ1cA413YPFiGd1mW9/Q==", "dev": true, "dependencies": { - "@typescript-eslint/typescript-estree": "8.7.0", - "@typescript-eslint/utils": "8.7.0", + "@typescript-eslint/typescript-estree": "8.8.0", + "@typescript-eslint/utils": "8.8.0", "debug": "^4.3.4", "ts-api-utils": "^1.3.0" }, @@ -2605,6 +2748,64 @@ } } }, + "node_modules/@typescript-eslint/type-utils/node_modules/@typescript-eslint/types": { + "version": "8.8.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.8.0.tgz", + "integrity": "sha512-QJwc50hRCgBd/k12sTykOJbESe1RrzmX6COk8Y525C9l7oweZ+1lw9JiU56im7Amm8swlz00DRIlxMYLizr2Vw==", + "dev": true, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/type-utils/node_modules/@typescript-eslint/typescript-estree": { + "version": "8.8.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.8.0.tgz", + "integrity": "sha512-ZaMJwc/0ckLz5DaAZ+pNLmHv8AMVGtfWxZe/x2JVEkD5LnmhWiQMMcYT7IY7gkdJuzJ9P14fRy28lUrlDSWYdw==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "8.8.0", + "@typescript-eslint/visitor-keys": "8.8.0", + "debug": "^4.3.4", + "fast-glob": "^3.3.2", + "is-glob": "^4.0.3", + "minimatch": "^9.0.4", + "semver": "^7.6.0", + "ts-api-utils": "^1.3.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/type-utils/node_modules/@typescript-eslint/visitor-keys": { + "version": "8.8.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.8.0.tgz", + "integrity": "sha512-8mq51Lx6Hpmd7HnA2fcHQo3YgfX1qbccxQOgZcb4tvasu//zXRaA1j5ZRFeCw/VRAdFi4mRM9DnZw0Nu0Q2d1g==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "8.8.0", + "eslint-visitor-keys": "^3.4.3" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, "node_modules/@typescript-eslint/types": { "version": "8.7.0", "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.7.0.tgz", @@ -2647,15 +2848,15 @@ } }, "node_modules/@typescript-eslint/utils": { - "version": "8.7.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.7.0.tgz", - "integrity": "sha512-ZbdUdwsl2X/s3CiyAu3gOlfQzpbuG3nTWKPoIvAu1pu5r8viiJvv2NPN2AqArL35NCYtw/lrPPfM4gxrMLNLPw==", + "version": "8.8.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.8.0.tgz", + "integrity": "sha512-QE2MgfOTem00qrlPgyByaCHay9yb1+9BjnMFnSFkUKQfu7adBXDTnCAivURnuPPAG/qiB+kzKkZKmKfaMT0zVg==", "dev": true, "dependencies": { "@eslint-community/eslint-utils": "^4.4.0", - "@typescript-eslint/scope-manager": "8.7.0", - "@typescript-eslint/types": "8.7.0", - "@typescript-eslint/typescript-estree": "8.7.0" + "@typescript-eslint/scope-manager": "8.8.0", + "@typescript-eslint/types": "8.8.0", + "@typescript-eslint/typescript-estree": "8.8.0" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -2668,6 +2869,81 @@ "eslint": "^8.57.0 || ^9.0.0" } }, + "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/scope-manager": { + "version": "8.8.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.8.0.tgz", + "integrity": "sha512-EL8eaGC6gx3jDd8GwEFEV091210U97J0jeEHrAYvIYosmEGet4wJ+g0SYmLu+oRiAwbSA5AVrt6DxLHfdd+bUg==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "8.8.0", + "@typescript-eslint/visitor-keys": "8.8.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/types": { + "version": "8.8.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.8.0.tgz", + "integrity": "sha512-QJwc50hRCgBd/k12sTykOJbESe1RrzmX6COk8Y525C9l7oweZ+1lw9JiU56im7Amm8swlz00DRIlxMYLizr2Vw==", + "dev": true, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/typescript-estree": { + "version": "8.8.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.8.0.tgz", + "integrity": "sha512-ZaMJwc/0ckLz5DaAZ+pNLmHv8AMVGtfWxZe/x2JVEkD5LnmhWiQMMcYT7IY7gkdJuzJ9P14fRy28lUrlDSWYdw==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "8.8.0", + "@typescript-eslint/visitor-keys": "8.8.0", + "debug": "^4.3.4", + "fast-glob": "^3.3.2", + "is-glob": "^4.0.3", + "minimatch": "^9.0.4", + "semver": "^7.6.0", + "ts-api-utils": "^1.3.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/visitor-keys": { + "version": "8.8.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.8.0.tgz", + "integrity": "sha512-8mq51Lx6Hpmd7HnA2fcHQo3YgfX1qbccxQOgZcb4tvasu//zXRaA1j5ZRFeCw/VRAdFi4mRM9DnZw0Nu0Q2d1g==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "8.8.0", + "eslint-visitor-keys": "^3.4.3" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, "node_modules/@typescript-eslint/visitor-keys": { "version": "8.7.0", "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.7.0.tgz", @@ -2737,16 +3013,6 @@ "url": "https://github.com/sponsors/epoberezkin" } }, - "node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, "node_modules/ansi-styles": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", @@ -3290,9 +3556,9 @@ } }, "node_modules/eslint": { - "version": "9.11.1", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-9.11.1.tgz", - "integrity": "sha512-MobhYKIoAO1s1e4VUrgx1l1Sk2JBR/Gqjjgw8+mfgoLE2xwsHur4gdfTxyTgShrhvdVFTaJSgMiQBl1jv/AWxg==", + "version": "9.12.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-9.12.0.tgz", + "integrity": "sha512-UVIOlTEWxwIopRL1wgSQYdnVDcEvs2wyaO6DGo5mXqe3r16IoCNWkR29iHhyaP4cICWjbgbmFUGAhh0GJRuGZw==", "dev": true, "dependencies": { "@eslint-community/eslint-utils": "^4.2.0", @@ -3300,11 +3566,11 @@ "@eslint/config-array": "^0.18.0", "@eslint/core": "^0.6.0", "@eslint/eslintrc": "^3.1.0", - "@eslint/js": "9.11.1", + "@eslint/js": "9.12.0", "@eslint/plugin-kit": "^0.2.0", + "@humanfs/node": "^0.16.5", "@humanwhocodes/module-importer": "^1.0.1", - "@humanwhocodes/retry": "^0.3.0", - "@nodelib/fs.walk": "^1.2.8", + "@humanwhocodes/retry": "^0.3.1", "@types/estree": "^1.0.6", "@types/json-schema": "^7.0.15", "ajv": "^6.12.4", @@ -3312,9 +3578,9 @@ "cross-spawn": "^7.0.2", "debug": "^4.3.2", "escape-string-regexp": "^4.0.0", - "eslint-scope": "^8.0.2", - "eslint-visitor-keys": "^4.0.0", - "espree": "^10.1.0", + "eslint-scope": "^8.1.0", + "eslint-visitor-keys": "^4.1.0", + "espree": "^10.2.0", "esquery": "^1.5.0", "esutils": "^2.0.2", "fast-deep-equal": "^3.1.3", @@ -3324,13 +3590,11 @@ "ignore": "^5.2.0", "imurmurhash": "^0.1.4", "is-glob": "^4.0.0", - "is-path-inside": "^3.0.3", "json-stable-stringify-without-jsonify": "^1.0.1", "lodash.merge": "^4.6.2", "minimatch": "^3.1.2", "natural-compare": "^1.4.0", "optionator": "^0.9.3", - "strip-ansi": "^6.0.1", "text-table": "^0.2.0" }, "bin": { @@ -3374,11 +3638,10 @@ } }, "node_modules/eslint-scope": { - "version": "8.0.2", - "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-8.0.2.tgz", - "integrity": "sha512-6E4xmrTw5wtxnLA5wYL3WDfhZ/1bUBGOXV0zQvVRDOtrR8D0p6W7fs3JweNYhwRYeGvd/1CKX2se0/2s7Q/nJA==", + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-8.1.0.tgz", + "integrity": "sha512-14dSvlhaVhKKsa9Fx1l8A17s7ah7Ef7wCakJ10LYk6+GYmP9yDti2oq2SEwcyndt6knfcZyhyxwY3i9yL78EQw==", "dev": true, - "license": "BSD-2-Clause", "dependencies": { "esrecurse": "^4.3.0", "estraverse": "^5.2.0" @@ -3421,11 +3684,10 @@ } }, "node_modules/eslint/node_modules/eslint-visitor-keys": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.0.0.tgz", - "integrity": "sha512-OtIRv/2GyiF6o/d8K7MYKKbXrOUBIK6SfkIRM4Z0dY3w+LiQ0vy3F57m0Z71bjbyeiWFiHJ8brqnmE6H6/jEuw==", + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.1.0.tgz", + "integrity": "sha512-Q7lok0mqMUSf5a/AdAZkA5a/gHcO6snwQClVNNvFKCAVlxXucdU8pKydU5ZVZjBx5xr37vGbFFWtLQYreLzrZg==", "dev": true, - "license": "Apache-2.0", "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" }, @@ -3447,15 +3709,14 @@ } }, "node_modules/espree": { - "version": "10.1.0", - "resolved": "https://registry.npmjs.org/espree/-/espree-10.1.0.tgz", - "integrity": "sha512-M1M6CpiE6ffoigIOWYO9UDP8TMUw9kqb21tf+08IgDYjCsOvCuDt4jQcZmoYxx+w7zlKw9/N0KXfto+I8/FrXA==", + "version": "10.2.0", + "resolved": "https://registry.npmjs.org/espree/-/espree-10.2.0.tgz", + "integrity": "sha512-upbkBJbckcCNBDBDXEbuhjbP68n+scUd3k/U2EkyM9nw+I/jPiL4cLF/Al06CF96wRltFda16sxDFrxsI1v0/g==", "dev": true, - "license": "BSD-2-Clause", "dependencies": { "acorn": "^8.12.0", "acorn-jsx": "^5.3.2", - "eslint-visitor-keys": "^4.0.0" + "eslint-visitor-keys": "^4.1.0" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -3465,11 +3726,10 @@ } }, "node_modules/espree/node_modules/eslint-visitor-keys": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.0.0.tgz", - "integrity": "sha512-OtIRv/2GyiF6o/d8K7MYKKbXrOUBIK6SfkIRM4Z0dY3w+LiQ0vy3F57m0Z71bjbyeiWFiHJ8brqnmE6H6/jEuw==", + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.1.0.tgz", + "integrity": "sha512-Q7lok0mqMUSf5a/AdAZkA5a/gHcO6snwQClVNNvFKCAVlxXucdU8pKydU5ZVZjBx5xr37vGbFFWtLQYreLzrZg==", "dev": true, - "license": "Apache-2.0", "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" }, @@ -3495,7 +3755,6 @@ "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", "dev": true, - "license": "BSD-2-Clause", "dependencies": { "estraverse": "^5.2.0" }, @@ -4082,16 +4341,6 @@ "node": ">=0.12.0" } }, - "node_modules/is-path-inside": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.3.tgz", - "integrity": "sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, "node_modules/is-plain-object": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-2.0.4.tgz", @@ -5527,19 +5776,6 @@ "safe-buffer": "~5.2.0" } }, - "node_modules/strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/strip-json-comments": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", diff --git a/playground/package.json b/playground/package.json index 658c97c8d2..c895a0496b 100644 --- a/playground/package.json +++ b/playground/package.json @@ -17,13 +17,13 @@ "swagger-ui-react": "^5.17.14" }, "devDependencies": { - "@types/react": "^18.3.10", + "@types/react": "^18.3.11", "@types/react-dom": "^18.3.0", "@types/swagger-ui-react": "^4.18.3", - "@typescript-eslint/eslint-plugin": "^8.7.0", - "@typescript-eslint/parser": "^8.7.0", + "@typescript-eslint/eslint-plugin": "^8.8.0", + "@typescript-eslint/parser": "^8.8.0", "@vitejs/plugin-react-swc": "^3.7.1", - "eslint": "^9.11.1", + "eslint": "^9.12.0", "eslint-plugin-react-hooks": "^4.6.2", "eslint-plugin-react-refresh": "^0.4.12", "typescript": "^5.6.2", From 8edd48ae44fa0cd00973612bf068bc7b198afd83 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 8 Oct 2024 01:15:17 -0400 Subject: [PATCH 57/71] bot: Bump @typescript-eslint/parser from 8.8.0 to 8.8.1 in /playground (#3120) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [@typescript-eslint/parser](https://github.com/typescript-eslint/typescript-eslint/tree/HEAD/packages/parser) from 8.8.0 to 8.8.1.
Release notes

Sourced from @​typescript-eslint/parser's releases.

v8.8.1

8.8.1 (2024-10-07)

🩹 Fixes

  • eslint-plugin: stop warning on @​ts-nocheck comments which aren't at the beginning of the file (#10046)
  • typescript-estree: fix crash when running from a node --eval script (#10098)
  • typescript-estree: ensure mjs/mts files are always be parsed as ESM (#10011)

❤️ Thank You

You can read about our versioning strategy and releases on our website.

Changelog

Sourced from @​typescript-eslint/parser's changelog.

8.8.1 (2024-10-07)

This was a version bump only for parser to align it with other projects, there were no code changes.

You can read about our versioning strategy and releases on our website.

Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=@typescript-eslint/parser&package-manager=npm_and_yarn&previous-version=8.8.0&new-version=8.8.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- playground/package-lock.json | 125 +++++++---------------------------- playground/package.json | 2 +- 2 files changed, 26 insertions(+), 101 deletions(-) diff --git a/playground/package-lock.json b/playground/package-lock.json index 2dedfb6b4a..f618fc978e 100644 --- a/playground/package-lock.json +++ b/playground/package-lock.json @@ -19,7 +19,7 @@ "@types/react-dom": "^18.3.0", "@types/swagger-ui-react": "^4.18.3", "@typescript-eslint/eslint-plugin": "^8.8.0", - "@typescript-eslint/parser": "^8.8.0", + "@typescript-eslint/parser": "^8.8.1", "@vitejs/plugin-react-swc": "^3.7.1", "eslint": "^9.12.0", "eslint-plugin-react-hooks": "^4.6.2", @@ -2605,15 +2605,15 @@ } }, "node_modules/@typescript-eslint/parser": { - "version": "8.8.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.8.0.tgz", - "integrity": "sha512-uEFUsgR+tl8GmzmLjRqz+VrDv4eoaMqMXW7ruXfgThaAShO9JTciKpEsB+TvnfFfbg5IpujgMXVV36gOJRLtZg==", + "version": "8.8.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.8.1.tgz", + "integrity": "sha512-hQUVn2Lij2NAxVFEdvIGxT9gP1tq2yM83m+by3whWFsWC+1y8pxxxHUFE1UqDu2VsGi2i6RLcv4QvouM84U+ow==", "dev": true, "dependencies": { - "@typescript-eslint/scope-manager": "8.8.0", - "@typescript-eslint/types": "8.8.0", - "@typescript-eslint/typescript-estree": "8.8.0", - "@typescript-eslint/visitor-keys": "8.8.0", + "@typescript-eslint/scope-manager": "8.8.1", + "@typescript-eslint/types": "8.8.1", + "@typescript-eslint/typescript-estree": "8.8.1", + "@typescript-eslint/visitor-keys": "8.8.1", "debug": "^4.3.4" }, "engines": { @@ -2632,89 +2632,14 @@ } } }, - "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/scope-manager": { - "version": "8.8.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.8.0.tgz", - "integrity": "sha512-EL8eaGC6gx3jDd8GwEFEV091210U97J0jeEHrAYvIYosmEGet4wJ+g0SYmLu+oRiAwbSA5AVrt6DxLHfdd+bUg==", - "dev": true, - "dependencies": { - "@typescript-eslint/types": "8.8.0", - "@typescript-eslint/visitor-keys": "8.8.0" - }, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/types": { - "version": "8.8.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.8.0.tgz", - "integrity": "sha512-QJwc50hRCgBd/k12sTykOJbESe1RrzmX6COk8Y525C9l7oweZ+1lw9JiU56im7Amm8swlz00DRIlxMYLizr2Vw==", - "dev": true, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/typescript-estree": { - "version": "8.8.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.8.0.tgz", - "integrity": "sha512-ZaMJwc/0ckLz5DaAZ+pNLmHv8AMVGtfWxZe/x2JVEkD5LnmhWiQMMcYT7IY7gkdJuzJ9P14fRy28lUrlDSWYdw==", - "dev": true, - "dependencies": { - "@typescript-eslint/types": "8.8.0", - "@typescript-eslint/visitor-keys": "8.8.0", - "debug": "^4.3.4", - "fast-glob": "^3.3.2", - "is-glob": "^4.0.3", - "minimatch": "^9.0.4", - "semver": "^7.6.0", - "ts-api-utils": "^1.3.0" - }, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - }, - "peerDependenciesMeta": { - "typescript": { - "optional": true - } - } - }, - "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/visitor-keys": { - "version": "8.8.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.8.0.tgz", - "integrity": "sha512-8mq51Lx6Hpmd7HnA2fcHQo3YgfX1qbccxQOgZcb4tvasu//zXRaA1j5ZRFeCw/VRAdFi4mRM9DnZw0Nu0Q2d1g==", - "dev": true, - "dependencies": { - "@typescript-eslint/types": "8.8.0", - "eslint-visitor-keys": "^3.4.3" - }, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, "node_modules/@typescript-eslint/scope-manager": { - "version": "8.7.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.7.0.tgz", - "integrity": "sha512-87rC0k3ZlDOuz82zzXRtQ7Akv3GKhHs0ti4YcbAJtaomllXoSO8hi7Ix3ccEvCd824dy9aIX+j3d2UMAfCtVpg==", + "version": "8.8.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.8.1.tgz", + "integrity": "sha512-X4JdU+66Mazev/J0gfXlcC/dV6JI37h+93W9BRYXrSn0hrE64IoWgVkO9MSJgEzoWkxONgaQpICWg8vAN74wlA==", "dev": true, "dependencies": { - "@typescript-eslint/types": "8.7.0", - "@typescript-eslint/visitor-keys": "8.7.0" + "@typescript-eslint/types": "8.8.1", + "@typescript-eslint/visitor-keys": "8.8.1" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -2807,9 +2732,9 @@ } }, "node_modules/@typescript-eslint/types": { - "version": "8.7.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.7.0.tgz", - "integrity": "sha512-LLt4BLHFwSfASHSF2K29SZ+ZCsbQOM+LuarPjRUuHm+Qd09hSe3GCeaQbcCr+Mik+0QFRmep/FyZBO6fJ64U3w==", + "version": "8.8.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.8.1.tgz", + "integrity": "sha512-WCcTP4SDXzMd23N27u66zTKMuEevH4uzU8C9jf0RO4E04yVHgQgW+r+TeVTNnO1KIfrL8ebgVVYYMMO3+jC55Q==", "dev": true, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -2820,13 +2745,13 @@ } }, "node_modules/@typescript-eslint/typescript-estree": { - "version": "8.7.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.7.0.tgz", - "integrity": "sha512-MC8nmcGHsmfAKxwnluTQpNqceniT8SteVwd2voYlmiSWGOtjvGXdPl17dYu2797GVscK30Z04WRM28CrKS9WOg==", + "version": "8.8.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.8.1.tgz", + "integrity": "sha512-A5d1R9p+X+1js4JogdNilDuuq+EHZdsH9MjTVxXOdVFfTJXunKJR/v+fNNyO4TnoOn5HqobzfRlc70NC6HTcdg==", "dev": true, "dependencies": { - "@typescript-eslint/types": "8.7.0", - "@typescript-eslint/visitor-keys": "8.7.0", + "@typescript-eslint/types": "8.8.1", + "@typescript-eslint/visitor-keys": "8.8.1", "debug": "^4.3.4", "fast-glob": "^3.3.2", "is-glob": "^4.0.3", @@ -2945,12 +2870,12 @@ } }, "node_modules/@typescript-eslint/visitor-keys": { - "version": "8.7.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.7.0.tgz", - "integrity": "sha512-b1tx0orFCCh/THWPQa2ZwWzvOeyzzp36vkJYOpVg0u8UVOIsfVrnuC9FqAw9gRKn+rG2VmWQ/zDJZzkxUnj/XQ==", + "version": "8.8.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.8.1.tgz", + "integrity": "sha512-0/TdC3aeRAsW7MDvYRwEc1Uwm0TIBfzjPFgg60UU2Haj5qsCs9cc3zNgY71edqE3LbWfF/WoZQd3lJoDXFQpag==", "dev": true, "dependencies": { - "@typescript-eslint/types": "8.7.0", + "@typescript-eslint/types": "8.8.1", "eslint-visitor-keys": "^3.4.3" }, "engines": { diff --git a/playground/package.json b/playground/package.json index c895a0496b..2ace966a0f 100644 --- a/playground/package.json +++ b/playground/package.json @@ -21,7 +21,7 @@ "@types/react-dom": "^18.3.0", "@types/swagger-ui-react": "^4.18.3", "@typescript-eslint/eslint-plugin": "^8.8.0", - "@typescript-eslint/parser": "^8.8.0", + "@typescript-eslint/parser": "^8.8.1", "@vitejs/plugin-react-swc": "^3.7.1", "eslint": "^9.12.0", "eslint-plugin-react-hooks": "^4.6.2", From 655e1a43bf10fd5f187ca437b689e4e923cd7080 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 8 Oct 2024 04:28:27 -0400 Subject: [PATCH 58/71] bot: Bump @typescript-eslint/eslint-plugin from 8.8.0 to 8.8.1 in /playground (#3121) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [@typescript-eslint/eslint-plugin](https://github.com/typescript-eslint/typescript-eslint/tree/HEAD/packages/eslint-plugin) from 8.8.0 to 8.8.1.
Release notes

Sourced from @​typescript-eslint/eslint-plugin's releases.

v8.8.1

8.8.1 (2024-10-07)

🩹 Fixes

  • eslint-plugin: stop warning on @​ts-nocheck comments which aren't at the beginning of the file (#10046)
  • typescript-estree: fix crash when running from a node --eval script (#10098)
  • typescript-estree: ensure mjs/mts files are always be parsed as ESM (#10011)

❤️ Thank You

You can read about our versioning strategy and releases on our website.

Changelog

Sourced from @​typescript-eslint/eslint-plugin's changelog.

8.8.1 (2024-10-07)

🩹 Fixes

  • eslint-plugin: stop warning on @​ts-nocheck comments which aren't at the beginning of the file

❤️ Thank You

  • Brad Zacher
  • Ronen Amiel
  • WhitePiano

You can read about our versioning strategy and releases on our website.

Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=@typescript-eslint/eslint-plugin&package-manager=npm_and_yarn&previous-version=8.8.0&new-version=8.8.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- playground/package-lock.json | 218 +++-------------------------------- playground/package.json | 2 +- 2 files changed, 20 insertions(+), 200 deletions(-) diff --git a/playground/package-lock.json b/playground/package-lock.json index f618fc978e..af40f82bc2 100644 --- a/playground/package-lock.json +++ b/playground/package-lock.json @@ -18,7 +18,7 @@ "@types/react": "^18.3.11", "@types/react-dom": "^18.3.0", "@types/swagger-ui-react": "^4.18.3", - "@typescript-eslint/eslint-plugin": "^8.8.0", + "@typescript-eslint/eslint-plugin": "^8.8.1", "@typescript-eslint/parser": "^8.8.1", "@vitejs/plugin-react-swc": "^3.7.1", "eslint": "^9.12.0", @@ -2525,16 +2525,16 @@ "license": "MIT" }, "node_modules/@typescript-eslint/eslint-plugin": { - "version": "8.8.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.8.0.tgz", - "integrity": "sha512-wORFWjU30B2WJ/aXBfOm1LX9v9nyt9D3jsSOxC3cCaTQGCW5k4jNpmjFv3U7p/7s4yvdjHzwtv2Sd2dOyhjS0A==", + "version": "8.8.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.8.1.tgz", + "integrity": "sha512-xfvdgA8AP/vxHgtgU310+WBnLB4uJQ9XdyP17RebG26rLtDrQJV3ZYrcopX91GrHmMoH8bdSwMRh2a//TiJ1jQ==", "dev": true, "dependencies": { "@eslint-community/regexpp": "^4.10.0", - "@typescript-eslint/scope-manager": "8.8.0", - "@typescript-eslint/type-utils": "8.8.0", - "@typescript-eslint/utils": "8.8.0", - "@typescript-eslint/visitor-keys": "8.8.0", + "@typescript-eslint/scope-manager": "8.8.1", + "@typescript-eslint/type-utils": "8.8.1", + "@typescript-eslint/utils": "8.8.1", + "@typescript-eslint/visitor-keys": "8.8.1", "graphemer": "^1.4.0", "ignore": "^5.3.1", "natural-compare": "^1.4.0", @@ -2557,53 +2557,6 @@ } } }, - "node_modules/@typescript-eslint/eslint-plugin/node_modules/@typescript-eslint/scope-manager": { - "version": "8.8.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.8.0.tgz", - "integrity": "sha512-EL8eaGC6gx3jDd8GwEFEV091210U97J0jeEHrAYvIYosmEGet4wJ+g0SYmLu+oRiAwbSA5AVrt6DxLHfdd+bUg==", - "dev": true, - "dependencies": { - "@typescript-eslint/types": "8.8.0", - "@typescript-eslint/visitor-keys": "8.8.0" - }, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@typescript-eslint/eslint-plugin/node_modules/@typescript-eslint/types": { - "version": "8.8.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.8.0.tgz", - "integrity": "sha512-QJwc50hRCgBd/k12sTykOJbESe1RrzmX6COk8Y525C9l7oweZ+1lw9JiU56im7Amm8swlz00DRIlxMYLizr2Vw==", - "dev": true, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@typescript-eslint/eslint-plugin/node_modules/@typescript-eslint/visitor-keys": { - "version": "8.8.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.8.0.tgz", - "integrity": "sha512-8mq51Lx6Hpmd7HnA2fcHQo3YgfX1qbccxQOgZcb4tvasu//zXRaA1j5ZRFeCw/VRAdFi4mRM9DnZw0Nu0Q2d1g==", - "dev": true, - "dependencies": { - "@typescript-eslint/types": "8.8.0", - "eslint-visitor-keys": "^3.4.3" - }, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, "node_modules/@typescript-eslint/parser": { "version": "8.8.1", "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.8.1.tgz", @@ -2650,55 +2603,14 @@ } }, "node_modules/@typescript-eslint/type-utils": { - "version": "8.8.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.8.0.tgz", - "integrity": "sha512-IKwJSS7bCqyCeG4NVGxnOP6lLT9Okc3Zj8hLO96bpMkJab+10HIfJbMouLrlpyOr3yrQ1cA413YPFiGd1mW9/Q==", - "dev": true, - "dependencies": { - "@typescript-eslint/typescript-estree": "8.8.0", - "@typescript-eslint/utils": "8.8.0", - "debug": "^4.3.4", - "ts-api-utils": "^1.3.0" - }, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - }, - "peerDependenciesMeta": { - "typescript": { - "optional": true - } - } - }, - "node_modules/@typescript-eslint/type-utils/node_modules/@typescript-eslint/types": { - "version": "8.8.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.8.0.tgz", - "integrity": "sha512-QJwc50hRCgBd/k12sTykOJbESe1RrzmX6COk8Y525C9l7oweZ+1lw9JiU56im7Amm8swlz00DRIlxMYLizr2Vw==", - "dev": true, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@typescript-eslint/type-utils/node_modules/@typescript-eslint/typescript-estree": { - "version": "8.8.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.8.0.tgz", - "integrity": "sha512-ZaMJwc/0ckLz5DaAZ+pNLmHv8AMVGtfWxZe/x2JVEkD5LnmhWiQMMcYT7IY7gkdJuzJ9P14fRy28lUrlDSWYdw==", + "version": "8.8.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.8.1.tgz", + "integrity": "sha512-qSVnpcbLP8CALORf0za+vjLYj1Wp8HSoiI8zYU5tHxRVj30702Z1Yw4cLwfNKhTPWp5+P+k1pjmD5Zd1nhxiZA==", "dev": true, "dependencies": { - "@typescript-eslint/types": "8.8.0", - "@typescript-eslint/visitor-keys": "8.8.0", + "@typescript-eslint/typescript-estree": "8.8.1", + "@typescript-eslint/utils": "8.8.1", "debug": "^4.3.4", - "fast-glob": "^3.3.2", - "is-glob": "^4.0.3", - "minimatch": "^9.0.4", - "semver": "^7.6.0", "ts-api-utils": "^1.3.0" }, "engines": { @@ -2714,23 +2626,6 @@ } } }, - "node_modules/@typescript-eslint/type-utils/node_modules/@typescript-eslint/visitor-keys": { - "version": "8.8.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.8.0.tgz", - "integrity": "sha512-8mq51Lx6Hpmd7HnA2fcHQo3YgfX1qbccxQOgZcb4tvasu//zXRaA1j5ZRFeCw/VRAdFi4mRM9DnZw0Nu0Q2d1g==", - "dev": true, - "dependencies": { - "@typescript-eslint/types": "8.8.0", - "eslint-visitor-keys": "^3.4.3" - }, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, "node_modules/@typescript-eslint/types": { "version": "8.8.1", "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.8.1.tgz", @@ -2773,15 +2668,15 @@ } }, "node_modules/@typescript-eslint/utils": { - "version": "8.8.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.8.0.tgz", - "integrity": "sha512-QE2MgfOTem00qrlPgyByaCHay9yb1+9BjnMFnSFkUKQfu7adBXDTnCAivURnuPPAG/qiB+kzKkZKmKfaMT0zVg==", + "version": "8.8.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.8.1.tgz", + "integrity": "sha512-/QkNJDbV0bdL7H7d0/y0qBbV2HTtf0TIyjSDTvvmQEzeVx8jEImEbLuOA4EsvE8gIgqMitns0ifb5uQhMj8d9w==", "dev": true, "dependencies": { "@eslint-community/eslint-utils": "^4.4.0", - "@typescript-eslint/scope-manager": "8.8.0", - "@typescript-eslint/types": "8.8.0", - "@typescript-eslint/typescript-estree": "8.8.0" + "@typescript-eslint/scope-manager": "8.8.1", + "@typescript-eslint/types": "8.8.1", + "@typescript-eslint/typescript-estree": "8.8.1" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -2794,81 +2689,6 @@ "eslint": "^8.57.0 || ^9.0.0" } }, - "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/scope-manager": { - "version": "8.8.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.8.0.tgz", - "integrity": "sha512-EL8eaGC6gx3jDd8GwEFEV091210U97J0jeEHrAYvIYosmEGet4wJ+g0SYmLu+oRiAwbSA5AVrt6DxLHfdd+bUg==", - "dev": true, - "dependencies": { - "@typescript-eslint/types": "8.8.0", - "@typescript-eslint/visitor-keys": "8.8.0" - }, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/types": { - "version": "8.8.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.8.0.tgz", - "integrity": "sha512-QJwc50hRCgBd/k12sTykOJbESe1RrzmX6COk8Y525C9l7oweZ+1lw9JiU56im7Amm8swlz00DRIlxMYLizr2Vw==", - "dev": true, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/typescript-estree": { - "version": "8.8.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.8.0.tgz", - "integrity": "sha512-ZaMJwc/0ckLz5DaAZ+pNLmHv8AMVGtfWxZe/x2JVEkD5LnmhWiQMMcYT7IY7gkdJuzJ9P14fRy28lUrlDSWYdw==", - "dev": true, - "dependencies": { - "@typescript-eslint/types": "8.8.0", - "@typescript-eslint/visitor-keys": "8.8.0", - "debug": "^4.3.4", - "fast-glob": "^3.3.2", - "is-glob": "^4.0.3", - "minimatch": "^9.0.4", - "semver": "^7.6.0", - "ts-api-utils": "^1.3.0" - }, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - }, - "peerDependenciesMeta": { - "typescript": { - "optional": true - } - } - }, - "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/visitor-keys": { - "version": "8.8.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.8.0.tgz", - "integrity": "sha512-8mq51Lx6Hpmd7HnA2fcHQo3YgfX1qbccxQOgZcb4tvasu//zXRaA1j5ZRFeCw/VRAdFi4mRM9DnZw0Nu0Q2d1g==", - "dev": true, - "dependencies": { - "@typescript-eslint/types": "8.8.0", - "eslint-visitor-keys": "^3.4.3" - }, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, "node_modules/@typescript-eslint/visitor-keys": { "version": "8.8.1", "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.8.1.tgz", diff --git a/playground/package.json b/playground/package.json index 2ace966a0f..a1bd34ea88 100644 --- a/playground/package.json +++ b/playground/package.json @@ -20,7 +20,7 @@ "@types/react": "^18.3.11", "@types/react-dom": "^18.3.0", "@types/swagger-ui-react": "^4.18.3", - "@typescript-eslint/eslint-plugin": "^8.8.0", + "@typescript-eslint/eslint-plugin": "^8.8.1", "@typescript-eslint/parser": "^8.8.1", "@vitejs/plugin-react-swc": "^3.7.1", "eslint": "^9.12.0", From bc68f57c258cd39fea86d041adfb7a0c77095cf4 Mon Sep 17 00:00:00 2001 From: Shahzad Lone Date: Tue, 8 Oct 2024 13:58:56 -0400 Subject: [PATCH 59/71] feat: Inherit `read` permission if only `write` access (#3108) ## Relevant issue(s) Resolves #2992 ## Description An actor granted a write permission still couldn't write unless also given `read` permission Example Policy where reader can strictly only read and writer can strictly only write: ```yaml name: Test Policy description: A Policy actor: name: actor resources: users: permissions: read: expr: owner + reader write: expr: owner + writer relations: owner: types: - actor reader: types: - actor writer: types: - actor ``` Then the policy above (assume `XYZ` is resulting `policyID`) is linked in a schema that is loaded: ```gql type Users @policy(id: XYZ, resource: "users") { name: String age: Int } ``` Now if the `owner` (index `1`) makes a relationship giving `write` access to the `second` actor (index `2`) in our testing frame work like syntax: ```go testUtils.AddDocActorRelationship{ DocID: 0, RequestorIdentity: 1, TargetIdentity: 2, Relation: "writer", } ``` The identity `2` still could not mutate due to lack of read permission. ```go testUtils.UpdateDoc{ Identity: immutable.Some(2), // This identity can still not update. DocID: 0, Doc: ` { "name": "Shahzad Lone" } `, ExpectedError: "document not found or not authorized to access", } ``` Some existing tests that documented this have now been updated with the new behavior: - `TestACP_OwnerGivesUpdateWriteAccessToAnotherActorWithoutExplicitReadPerm_OtherActorCantUpdate` -> `TestACP_OwnerGivesUpdateWriteAccessToAnotherActorWithoutExplicitReadPerm_OtherActorCanUpdate` ## How has this been tested? - CI & Fixed the asserted test that documented this behavior --- acp/acp_local_test.go | 30 +++++- acp/dpi.go | 8 ++ acp/errors.go | 2 + acp/source_hub_client.go | 94 ++++++++++++++----- .../doc_actor/add/with_only_write_gql_test.go | 18 ++-- .../doc_actor/add/with_only_write_test.go | 52 +++++++--- 6 files changed, 159 insertions(+), 45 deletions(-) diff --git a/acp/acp_local_test.go b/acp/acp_local_test.go index fce65e9974..b3763da2a9 100644 --- a/acp/acp_local_test.go +++ b/acp/acp_local_test.go @@ -482,7 +482,7 @@ func Test_LocalACP_InMemory_CheckDocAccess_TrueIfHaveAccessFalseIfNotErrorOtherw policyID, ) - // Invalid empty arguments such that we can't check doc access. + // Invalid empty arguments such that we can't check doc access (read). hasAccess, errCheckDocAccess := localACP.CheckDocAccess( ctx, ReadPermission, @@ -495,6 +495,19 @@ func Test_LocalACP_InMemory_CheckDocAccess_TrueIfHaveAccessFalseIfNotErrorOtherw require.ErrorIs(t, errCheckDocAccess, ErrFailedToVerifyDocAccessWithACP) require.False(t, hasAccess) + // Invalid empty arguments such that we can't check doc access (write). + hasAccess, errCheckDocAccess = localACP.CheckDocAccess( + ctx, + WritePermission, + identity1.DID, + validPolicyID, + "", + "", + ) + require.Error(t, errCheckDocAccess) + require.ErrorIs(t, errCheckDocAccess, ErrFailedToVerifyDocAccessWithACP) + require.False(t, hasAccess) + // Check document accesss for a document that does not exist. hasAccess, errCheckDocAccess = localACP.CheckDocAccess( ctx, @@ -568,7 +581,7 @@ func Test_LocalACP_PersistentMemory_CheckDocAccess_TrueIfHaveAccessFalseIfNotErr policyID, ) - // Invalid empty arguments such that we can't check doc access. + // Invalid empty arguments such that we can't check doc access (read). hasAccess, errCheckDocAccess := localACP.CheckDocAccess( ctx, ReadPermission, @@ -581,6 +594,19 @@ func Test_LocalACP_PersistentMemory_CheckDocAccess_TrueIfHaveAccessFalseIfNotErr require.ErrorIs(t, errCheckDocAccess, ErrFailedToVerifyDocAccessWithACP) require.False(t, hasAccess) + // Invalid empty arguments such that we can't check doc access (write). + hasAccess, errCheckDocAccess = localACP.CheckDocAccess( + ctx, + WritePermission, + identity1.DID, + validPolicyID, + "", + "", + ) + require.Error(t, errCheckDocAccess) + require.ErrorIs(t, errCheckDocAccess, ErrFailedToVerifyDocAccessWithACP) + require.False(t, hasAccess) + // Check document accesss for a document that does not exist. hasAccess, errCheckDocAccess = localACP.CheckDocAccess( ctx, diff --git a/acp/dpi.go b/acp/dpi.go index 85da972131..c2c18b245a 100644 --- a/acp/dpi.go +++ b/acp/dpi.go @@ -22,6 +22,14 @@ const ( WritePermission ) +// permissionsThatImplyRead is a list of any permissions that if we have, we assume that the user can read. +// This is because for DefraDB's purposes if an identity has access to the write permission, then they don't +// need to explicitly have read permission inorder to read, we can just imply that they have read access. +var permissionsThatImplyRead = []DPIPermission{ + ReadPermission, + WritePermission, +} + // List of all valid DPI permissions, the order of permissions in this list must match // the above defined ordering such that iota matches the index position within the list. var dpiRequiredPermissions = []string{ diff --git a/acp/errors.go b/acp/errors.go index 72fbc00b95..c0e1b9bbec 100644 --- a/acp/errors.go +++ b/acp/errors.go @@ -129,6 +129,7 @@ func NewErrFailedToCheckIfDocIsRegisteredWithACP( func NewErrFailedToVerifyDocAccessWithACP( inner error, Type string, + permission string, policyID string, actorID string, resourceName string, @@ -138,6 +139,7 @@ func NewErrFailedToVerifyDocAccessWithACP( errFailedToVerifyDocAccessWithACP, inner, errors.NewKV("Type", Type), + errors.NewKV("Permission", permission), errors.NewKV("PolicyID", policyID), errors.NewKV("ActorID", actorID), errors.NewKV("ResourceName", resourceName), diff --git a/acp/source_hub_client.go b/acp/source_hub_client.go index b211214d9f..78a0a38925 100644 --- a/acp/source_hub_client.go +++ b/acp/source_hub_client.go @@ -12,6 +12,7 @@ package acp import ( "context" + "strconv" protoTypes "github.com/cosmos/gogoproto/types" "github.com/sourcenetwork/corelog" @@ -342,7 +343,55 @@ func (a *sourceHubBridge) CheckDocAccess( resourceName string, docID string, ) (bool, error) { - isValid, err := a.client.VerifyAccessRequest( + // We grant "read" access even if the identity does not explicitly have the "read" permission, + // as long as they have any of the permissions that imply read access. + if permission == ReadPermission { + var canRead bool = false + var withPermission string + var err error + + for _, permissionThatImpliesRead := range permissionsThatImplyRead { + canRead, err = a.client.VerifyAccessRequest( + ctx, + permissionThatImpliesRead, + actorID, + policyID, + resourceName, + docID, + ) + + if err != nil { + return false, NewErrFailedToVerifyDocAccessWithACP( + err, + "Local", + permissionThatImpliesRead.String(), + policyID, + actorID, + resourceName, + docID, + ) + } + + if canRead { + withPermission = permissionThatImpliesRead.String() + break + } + } + + log.InfoContext( + ctx, + "Document readable="+strconv.FormatBool(canRead), + corelog.Any("Permission", withPermission), + corelog.Any("PolicyID", policyID), + corelog.Any("Resource", resourceName), + corelog.Any("ActorID", actorID), + corelog.Any("DocID", docID), + ) + + return canRead, nil + } + + hasAccess, err := a.client.VerifyAccessRequest( ctx, permission, actorID, @@ -350,31 +399,30 @@ func (a *sourceHubBridge) CheckDocAccess( resourceName, docID, ) - if err != nil { - return false, NewErrFailedToVerifyDocAccessWithACP(err, "Local", policyID, actorID, resourceName, docID) - } - if isValid { - log.InfoContext( - ctx, - "Document accessible", - corelog.Any("PolicyID", policyID), - corelog.Any("ActorID", actorID), - corelog.Any("Resource", resourceName), - corelog.Any("DocID", docID), - ) - return true, nil - } else { - log.InfoContext( - ctx, - "Document inaccessible", - corelog.Any("PolicyID", policyID), - corelog.Any("ActorID", actorID), - corelog.Any("Resource", resourceName), - corelog.Any("DocID", docID), + if err != nil { + return false, NewErrFailedToVerifyDocAccessWithACP( + err, + "Local", + permission.String(), + policyID, + actorID, + resourceName, + docID, ) - return false, nil } + + log.InfoContext( + ctx, + "Document accessible="+strconv.FormatBool(hasAccess), + corelog.Any("Permission", permission), + corelog.Any("PolicyID", policyID), + corelog.Any("Resource", resourceName), + corelog.Any("ActorID", actorID), + corelog.Any("DocID", docID), + ) + + return hasAccess, nil } func (a *sourceHubBridge) AddDocActorRelationship( diff --git a/tests/integration/acp/relationship/doc_actor/add/with_only_write_gql_test.go b/tests/integration/acp/relationship/doc_actor/add/with_only_write_gql_test.go index 36bf181478..9c6649c2c1 100644 --- a/tests/integration/acp/relationship/doc_actor/add/with_only_write_gql_test.go +++ b/tests/integration/acp/relationship/doc_actor/add/with_only_write_gql_test.go @@ -19,12 +19,12 @@ import ( testUtils "github.com/sourcenetwork/defradb/tests/integration" ) -func TestACP_OwnerGivesUpdateWriteAccessToAnotherActorWithoutExplicitReadPerm_GQL_OtherActorCantUpdate(t *testing.T) { +func TestACP_OwnerGivesUpdateWriteAccessToAnotherActorWithoutExplicitReadPerm_GQL_OtherActorCanUpdate(t *testing.T) { expectedPolicyID := "0a243b1e61f990bccde41db7e81a915ffa1507c1403ae19727ce764d3b08846b" test := testUtils.TestCase{ - Description: "Test acp, owner gives write(update) access to another actor, without explicit read permission", + Description: "Test acp, owner gives write(update) access without explicit read permission, can still update", SupportedMutationTypes: immutable.Some([]testUtils.MutationType{ // GQL mutation will return no error when wrong identity is used so test that separately. @@ -161,7 +161,7 @@ func TestACP_OwnerGivesUpdateWriteAccessToAnotherActorWithoutExplicitReadPerm_GQ testUtils.UpdateDoc{ CollectionID: 0, - Identity: immutable.Some(2), // This identity can still not update. + Identity: immutable.Some(2), // This identity can now update. DocID: 0, @@ -170,12 +170,10 @@ func TestACP_OwnerGivesUpdateWriteAccessToAnotherActorWithoutExplicitReadPerm_GQ "name": "Shahzad Lone" } `, - - SkipLocalUpdateEvent: true, }, testUtils.Request{ - Identity: immutable.Some(2), // This identity can still not read. + Identity: immutable.Some(2), // This identity can now also read. Request: ` query { @@ -188,7 +186,13 @@ func TestACP_OwnerGivesUpdateWriteAccessToAnotherActorWithoutExplicitReadPerm_GQ `, Results: map[string]any{ - "Users": []map[string]any{}, + "Users": []map[string]any{ + { + "_docID": "bae-9d443d0c-52f6-568b-8f74-e8ff0825697b", + "name": "Shahzad Lone", // Note: updated name + "age": int64(28), + }, + }, }, }, }, diff --git a/tests/integration/acp/relationship/doc_actor/add/with_only_write_test.go b/tests/integration/acp/relationship/doc_actor/add/with_only_write_test.go index 09703f93aa..8333790f3d 100644 --- a/tests/integration/acp/relationship/doc_actor/add/with_only_write_test.go +++ b/tests/integration/acp/relationship/doc_actor/add/with_only_write_test.go @@ -19,12 +19,12 @@ import ( testUtils "github.com/sourcenetwork/defradb/tests/integration" ) -func TestACP_OwnerGivesUpdateWriteAccessToAnotherActorWithoutExplicitReadPerm_OtherActorCantUpdate(t *testing.T) { +func TestACP_OwnerGivesUpdateWriteAccessToAnotherActorWithoutExplicitReadPerm_OtherActorCanUpdate(t *testing.T) { expectedPolicyID := "0a243b1e61f990bccde41db7e81a915ffa1507c1403ae19727ce764d3b08846b" test := testUtils.TestCase{ - Description: "Test acp, owner gives write(update) access to another actor, without explicit read permission", + Description: "Test acp, owner gives write(update) access without explicit read permission, can still update", SupportedMutationTypes: immutable.Some([]testUtils.MutationType{ testUtils.CollectionNamedMutationType, @@ -161,7 +161,7 @@ func TestACP_OwnerGivesUpdateWriteAccessToAnotherActorWithoutExplicitReadPerm_Ot testUtils.UpdateDoc{ CollectionID: 0, - Identity: immutable.Some(2), // This identity can still not update. + Identity: immutable.Some(2), // This identity can now update. DocID: 0, @@ -170,12 +170,10 @@ func TestACP_OwnerGivesUpdateWriteAccessToAnotherActorWithoutExplicitReadPerm_Ot "name": "Shahzad Lone" } `, - - ExpectedError: "document not found or not authorized to access", }, testUtils.Request{ - Identity: immutable.Some(2), // This identity can still not read. + Identity: immutable.Some(2), // This identity can now also read. Request: ` query { @@ -188,7 +186,13 @@ func TestACP_OwnerGivesUpdateWriteAccessToAnotherActorWithoutExplicitReadPerm_Ot `, Results: map[string]any{ - "Users": []map[string]any{}, + "Users": []map[string]any{ + { + "_docID": "bae-9d443d0c-52f6-568b-8f74-e8ff0825697b", + "name": "Shahzad Lone", // Note: updated name + "age": int64(28), + }, + }, }, }, }, @@ -197,12 +201,12 @@ func TestACP_OwnerGivesUpdateWriteAccessToAnotherActorWithoutExplicitReadPerm_Ot testUtils.ExecuteTestCase(t, test) } -func TestACP_OwnerGivesDeleteWriteAccessToAnotherActorWithoutExplicitReadPerm_OtherActorCantDelete(t *testing.T) { +func TestACP_OwnerGivesDeleteWriteAccessToAnotherActorWithoutExplicitReadPerm_OtherActorCanDelete(t *testing.T) { expectedPolicyID := "0a243b1e61f990bccde41db7e81a915ffa1507c1403ae19727ce764d3b08846b" test := testUtils.TestCase{ - Description: "Test acp, owner gives write(delete) access to another actor, without explicit read permission", + Description: "Test acp, owner gives write(delete) access without explicit read permission, can still delete", Actions: []any{ testUtils.AddPolicy{ @@ -326,7 +330,7 @@ func TestACP_OwnerGivesDeleteWriteAccessToAnotherActorWithoutExplicitReadPerm_Ot }, testUtils.Request{ - Identity: immutable.Some(2), // This identity can still not read. + Identity: immutable.Some(2), // This identity can now read. Request: ` query { @@ -339,18 +343,40 @@ func TestACP_OwnerGivesDeleteWriteAccessToAnotherActorWithoutExplicitReadPerm_Ot `, Results: map[string]any{ - "Users": []map[string]any{}, + "Users": []map[string]any{ + { + "_docID": "bae-9d443d0c-52f6-568b-8f74-e8ff0825697b", + "name": "Shahzad", + "age": int64(28), + }, + }, }, }, testUtils.DeleteDoc{ CollectionID: 0, - Identity: immutable.Some(2), // This identity can still not delete. + Identity: immutable.Some(2), // This identity can now delete. DocID: 0, + }, - ExpectedError: "document not found or not authorized to access", + testUtils.Request{ + Identity: immutable.Some(2), // Check if actually deleted. + + Request: ` + query { + Users { + _docID + name + age + } + } + `, + + Results: map[string]any{ + "Users": []map[string]any{}, + }, }, }, } From 24a479f18255b3be4f1ff4b574539468eae60a1c Mon Sep 17 00:00:00 2001 From: AndrewSisley Date: Fri, 11 Oct 2024 13:17:23 -0400 Subject: [PATCH 60/71] fix: Prevent mutations from secondary side of relation (#3124) ## Relevant issue(s) Resolves #3102 ## Description Prevents mutations from secondary side of relation. Also validates that values given to relation fields are actually valid docIDs - previously it only validated this on the secondary side, and it was as easy to introduce it to the primary side as it was to correct the tests expecting the failure, so I've added this here. As IDs set via secondary affected the secondary docID the order of results in some tests have changed. Also changed are tests that accidentally created docs via the secondary side that didn't test the secondary mutation (e.g. a lot of tests testing queries from the secondary side also created the test docs from this direction). --- client/document.go | 34 ++- client/errors.go | 5 + .../i3102-no-change-tests-updated.md | 3 + internal/db/collection.go | 25 -- internal/db/collection_update.go | 87 ------- internal/request/graphql/schema/generate.go | 14 ++ tests/integration/events.go | 73 +----- .../one_to_many/with_alias_test.go | 29 --- .../field_kinds/one_to_one/with_alias_test.go | 83 +++---- .../one_to_one/with_null_value_test.go | 77 +----- .../one_to_one/with_simple_test.go | 116 +++------ .../field_kinds/one_to_many/simple_test.go | 39 +-- .../one_to_many/with_alias_test.go | 116 +++++---- .../field_kinds/one_to_one/with_alias_test.go | 165 +++++-------- .../one_to_one/with_self_ref_test.go | 91 ------- .../one_to_one/with_simple_test.go | 222 +++++++----------- .../with_group_related_id_alias_test.go | 40 ++-- .../one_to_one/with_group_related_id_test.go | 36 +-- .../query/one_to_one_multiple/simple_test.go | 96 ++++---- .../query/one_to_one_to_many/simple_test.go | 28 +-- .../query/one_to_one_to_one/simple_test.go | 100 ++++---- .../one_to_one_to_one/with_order_test.go | 36 +-- .../schema/create_one_one_data_test.go | 142 +++++++++++ .../updates/add/field/kind/doc_id_test.go | 8 +- tests/predefined/gen_predefined_test.go | 49 ---- 25 files changed, 629 insertions(+), 1085 deletions(-) create mode 100644 docs/data_format_changes/i3102-no-change-tests-updated.md create mode 100644 tests/integration/schema/create_one_one_data_test.go diff --git a/client/document.go b/client/document.go index cc15d45673..4abadcac52 100644 --- a/client/document.go +++ b/client/document.go @@ -241,11 +241,32 @@ func validateFieldSchema(val any, field FieldDefinition) (NormalValue, error) { if err != nil { return nil, err } + + // Validate that the given value is a valid docID + _, err = NewDocIDFromString(v) + if err != nil { + return nil, err + } + return NewNormalString(v), nil } switch field.Kind { - case FieldKind_DocID, FieldKind_NILLABLE_STRING, FieldKind_NILLABLE_BLOB: + case FieldKind_DocID: + v, err := getString(val) + if err != nil { + return nil, err + } + + // Validate that the given value is a valid docID + _, err = NewDocIDFromString(v) + if err != nil { + return nil, err + } + + return NewNormalString(v), nil + + case FieldKind_NILLABLE_STRING, FieldKind_NILLABLE_BLOB: v, err := getString(val) if err != nil { return nil, err @@ -692,6 +713,17 @@ func (doc *Document) Set(field string, value any) error { if !exists { return NewErrFieldNotExist(field) } + + if fd.Kind == FieldKind_DocID && strings.HasSuffix(field, request.RelatedObjectID) { + objFieldName := strings.TrimSuffix(field, request.RelatedObjectID) + ofd, exists := doc.collectionDefinition.GetFieldByName(objFieldName) + if exists && !ofd.IsPrimaryRelation { + return NewErrCannotSetRelationFromSecondarySide(field) + } + } else if fd.Kind.IsObject() && !fd.IsPrimaryRelation { + return NewErrCannotSetRelationFromSecondarySide(field) + } + if fd.Kind.IsObject() && !fd.Kind.IsArray() { if !strings.HasSuffix(field, request.RelatedObjectID) { field = field + request.RelatedObjectID diff --git a/client/errors.go b/client/errors.go index 81ebf2e3f5..caf2fc5c10 100644 --- a/client/errors.go +++ b/client/errors.go @@ -34,6 +34,7 @@ const ( errCanNotTurnNormalValueIntoArray string = "can not turn normal value into array" errCanNotMakeNormalNilFromFieldKind string = "can not make normal nil from field kind" errFailedToParseKind string = "failed to parse kind" + errCannotSetRelationFromSecondarySide string = "cannot set relation from secondary side" ) // Errors returnable from this package. @@ -190,3 +191,7 @@ func ReviveError(message string) error { return fmt.Errorf("%s", message) } } + +func NewErrCannotSetRelationFromSecondarySide(name string) error { + return errors.New(errCannotSetRelationFromSecondarySide, errors.NewKV("Name", name)) +} diff --git a/docs/data_format_changes/i3102-no-change-tests-updated.md b/docs/data_format_changes/i3102-no-change-tests-updated.md new file mode 100644 index 0000000000..fdbcd8e4a7 --- /dev/null +++ b/docs/data_format_changes/i3102-no-change-tests-updated.md @@ -0,0 +1,3 @@ +# Prevent mutations from secondary side of relation + +The docIDs in some tests that relied on the removed behaviour have changed. diff --git a/internal/db/collection.go b/internal/db/collection.go index 8f78e51429..b746226767 100644 --- a/internal/db/collection.go +++ b/internal/db/collection.go @@ -655,31 +655,6 @@ func (c *collection) save( // that it's set to the same as the field description CRDT type. val.SetType(fieldDescription.Typ) - relationFieldDescription, isSecondaryRelationID := fieldDescription.GetSecondaryRelationField(c.Definition()) - if isSecondaryRelationID { - if val.Value() == nil { - // If the value (relation) is nil, we don't need to check for any documents already linked to it - continue - } - - primaryId := val.Value().(string) - - err = c.patchPrimaryDoc( - ctx, - c.Name().Value(), - relationFieldDescription, - primaryKey.DocID, - primaryId, - ) - if err != nil { - return cid.Undef, err - } - - // If this field was a secondary relation ID the related document will have been - // updated instead and we should discard this value - continue - } - err = c.validateOneToOneLinkDoesntAlreadyExist( ctx, doc.ID().String(), diff --git a/internal/db/collection_update.go b/internal/db/collection_update.go index 9b44f217b1..2348095500 100644 --- a/internal/db/collection_update.go +++ b/internal/db/collection_update.go @@ -13,13 +13,11 @@ package db import ( "context" - ds "github.com/ipfs/go-datastore" "github.com/sourcenetwork/immutable" "github.com/valyala/fastjson" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/client/request" - "github.com/sourcenetwork/defradb/errors" "github.com/sourcenetwork/defradb/internal/planner" ) @@ -133,91 +131,6 @@ func (c *collection) updateWithFilter( return results, nil } -// patchPrimaryDoc patches the (primary) document linked to from the document of the given DocID via the -// given (secondary) relationship field description (hosted on the collection of the document matching the -// given DocID). -// -// The given field value should be the string representation of the DocID of the primary document to be -// patched. -func (c *collection) patchPrimaryDoc( - ctx context.Context, - secondaryCollectionName string, - relationFieldDescription client.FieldDefinition, - docID string, - fieldValue string, -) error { - primaryDocID, err := client.NewDocIDFromString(fieldValue) - if err != nil { - return err - } - - primaryDef, _, err := client.GetDefinitionFromStore(ctx, c.db, c.Definition(), relationFieldDescription.Kind) - if err != nil { - return err - } - - primaryField, ok := primaryDef.Description.GetFieldByRelation( - relationFieldDescription.RelationName, - secondaryCollectionName, - relationFieldDescription.Name, - ) - if !ok { - return client.NewErrFieldNotExist(relationFieldDescription.RelationName) - } - - primaryIDField, ok := primaryDef.GetFieldByName(primaryField.Name + request.RelatedObjectID) - if !ok { - return client.NewErrFieldNotExist(primaryField.Name + request.RelatedObjectID) - } - - primaryCol := c.db.newCollection(primaryDef.Description, primaryDef.Schema) - doc, err := primaryCol.Get( - ctx, - primaryDocID, - false, - ) - - if err != nil && !errors.Is(err, ds.ErrNotFound) { - return err - } - - // If the document doesn't exist then there is nothing to update. - if doc == nil { - return nil - } - - err = primaryCol.validateOneToOneLinkDoesntAlreadyExist( - ctx, - primaryDocID.String(), - primaryIDField, - docID, - ) - if err != nil { - return err - } - - existingVal, err := doc.GetValue(primaryIDField.Name) - if err != nil && !errors.Is(err, client.ErrFieldNotExist) { - return err - } - - if existingVal != nil && existingVal.Value() != "" && existingVal.Value() != docID { - return NewErrOneOneAlreadyLinked(docID, fieldValue, relationFieldDescription.RelationName) - } - - err = doc.Set(primaryIDField.Name, docID) - if err != nil { - return err - } - - err = primaryCol.Update(ctx, doc) - if err != nil { - return err - } - - return nil -} - // makeSelectionPlan constructs a simple read-only plan of the collection using the given filter. // currently it doesn't support any other operations other than filters. // (IE: No limit, order, etc) diff --git a/internal/request/graphql/schema/generate.go b/internal/request/graphql/schema/generate.go index f326a8232a..c1cf92e4dc 100644 --- a/internal/request/graphql/schema/generate.go +++ b/internal/request/graphql/schema/generate.go @@ -566,6 +566,20 @@ func (g *Generator) buildMutationInputTypes(collections []client.CollectionDefin continue } + if field.Kind == client.FieldKind_DocID && strings.HasSuffix(field.Name, request.RelatedObjectID) { + objFieldName := strings.TrimSuffix(field.Name, request.RelatedObjectID) + ofd, exists := collection.GetFieldByName(objFieldName) + if exists && !ofd.IsPrimaryRelation { + // We do not allow the mutation of relations from the secondary side, + // they must not be included in the input type(s) + continue + } + } else if field.Kind.IsObject() && !field.IsPrimaryRelation { + // We do not allow the mutation of relations from the secondary side, + // they must not be included in the input type(s) + continue + } + var ttype gql.Type if field.Kind.IsObject() { if field.Kind.IsArray() { diff --git a/tests/integration/events.go b/tests/integration/events.go index 5e57a97294..6129d600ee 100644 --- a/tests/integration/events.go +++ b/tests/integration/events.go @@ -275,47 +275,20 @@ func updateNetworkState(s *state, nodeID int, evt event.Update) { // getEventsForUpdateDoc returns a map of docIDs that should be // published to the local event bus after an UpdateDoc action. -// -// This will take into account any primary documents that are patched as a result -// of the create or update. func getEventsForUpdateDoc(s *state, action UpdateDoc) map[string]struct{} { - var collection client.Collection - if action.NodeID.HasValue() { - collection = s.collections[action.NodeID.Value()][action.CollectionID] - } else { - collection = s.collections[0][action.CollectionID] - } - docID := s.docIDs[action.CollectionID][action.DocID] - def := collection.Definition() docMap := make(map[string]any) err := json.Unmarshal([]byte(action.Doc), &docMap) require.NoError(s.t, err) - expect := make(map[string]struct{}) - expect[docID.String()] = struct{}{} - - // check for any secondary relation fields that could publish an event - for name, value := range docMap { - field, ok := def.GetFieldByName(name) - if !ok { - continue // ignore unknown field - } - _, ok = field.GetSecondaryRelationField(def) - if ok { - expect[value.(string)] = struct{}{} - } + return map[string]struct{}{ + docID.String(): {}, } - - return expect } // getEventsForCreateDoc returns a map of docIDs that should be // published to the local event bus after a CreateDoc action. -// -// This will take into account any primary documents that are patched as a result -// of the create or update. func getEventsForCreateDoc(s *state, action CreateDoc) map[string]struct{} { var collection client.Collection if action.NodeID.HasValue() { @@ -327,29 +300,10 @@ func getEventsForCreateDoc(s *state, action CreateDoc) map[string]struct{} { docs, err := parseCreateDocs(action, collection) require.NoError(s.t, err) - def := collection.Definition() expect := make(map[string]struct{}) for _, doc := range docs { expect[doc.ID().String()] = struct{}{} - - // check for any secondary relation fields that could publish an event - for f, v := range doc.Values() { - if v.Value() == nil { - // If the new relation value is nil there will be no related document - // to get an event for - continue - } - - field, ok := def.GetFieldByName(f.Name()) - if !ok { - continue // ignore unknown field - } - _, ok = field.GetSecondaryRelationField(def) - if ok { - expect[v.Value().(string)] = struct{}{} - } - } } return expect @@ -361,42 +315,19 @@ func waitForSync(s *state) { // getEventsForUpdateWithFilter returns a map of docIDs that should be // published to the local event bus after a UpdateWithFilter action. -// -// This will take into account any primary documents that are patched as a result -// of the create or update. func getEventsForUpdateWithFilter( s *state, action UpdateWithFilter, result *client.UpdateResult, ) map[string]struct{} { - var collection client.Collection - if action.NodeID.HasValue() { - collection = s.collections[action.NodeID.Value()][action.CollectionID] - } else { - collection = s.collections[0][action.CollectionID] - } - var docPatch map[string]any err := json.Unmarshal([]byte(action.Updater), &docPatch) require.NoError(s.t, err) - def := collection.Definition() expect := make(map[string]struct{}) for _, docID := range result.DocIDs { expect[docID] = struct{}{} - - // check for any secondary relation fields that could publish an event - for name, value := range docPatch { - field, ok := def.GetFieldByName(name) - if !ok { - continue // ignore unknown field - } - _, ok = field.GetSecondaryRelationField(def) - if ok { - expect[value.(string)] = struct{}{} - } - } } return expect diff --git a/tests/integration/mutation/create/field_kinds/one_to_many/with_alias_test.go b/tests/integration/mutation/create/field_kinds/one_to_many/with_alias_test.go index fe66f58d98..7b43fe8499 100644 --- a/tests/integration/mutation/create/field_kinds/one_to_many/with_alias_test.go +++ b/tests/integration/mutation/create/field_kinds/one_to_many/with_alias_test.go @@ -94,35 +94,6 @@ func TestMutationCreateOneToMany_AliasedRelationNameNonExistingRelationManySide_ } executeTestCase(t, test) } -func TestMutationCreateOneToMany_AliasedRelationNameInvalidIDManySide_CreatedDoc(t *testing.T) { - test := testUtils.TestCase{ - Description: "One to many create mutation, invalid id, from the many side, with alias", - Actions: []any{ - testUtils.CreateDoc{ - CollectionID: 0, - Doc: `{ - "name": "Painted House", - "author": "ValueDoesntMatter" - }`, - }, - testUtils.Request{ - Request: `query { - Book { - name - } - }`, - Results: map[string]any{ - "Book": []map[string]any{ - { - "name": "Painted House", - }, - }, - }, - }, - }, - } - executeTestCase(t, test) -} func TestMutationCreateOneToMany_AliasedRelationNameToLinkFromManySide(t *testing.T) { test := testUtils.TestCase{ diff --git a/tests/integration/mutation/create/field_kinds/one_to_one/with_alias_test.go b/tests/integration/mutation/create/field_kinds/one_to_one/with_alias_test.go index 8c7dddb1c2..7dc5aaf8f9 100644 --- a/tests/integration/mutation/create/field_kinds/one_to_one/with_alias_test.go +++ b/tests/integration/mutation/create/field_kinds/one_to_one/with_alias_test.go @@ -73,23 +73,6 @@ func TestMutationCreateOneToOne_UseAliasWithNonExistingRelationPrimarySide_Creat executeTestCase(t, test) } -func TestMutationCreateOneToOne_UseAliasWithNonExistingRelationSecondarySide_Error(t *testing.T) { - test := testUtils.TestCase{ - Description: "One to one create mutation, alias relation, from the secondary side", - Actions: []any{ - testUtils.CreateDoc{ - CollectionID: 0, - Doc: `{ - "name": "Painted House", - "author": "bae-be6d8024-4953-5a92-84b4-f042d25230c6" - }`, - ExpectedError: "document not found or not authorized to access", - }, - }, - } - executeTestCase(t, test) -} - func TestMutationCreateOneToOne_UseAliasedRelationNameToLink_QueryFromPrimarySide(t *testing.T) { test := testUtils.TestCase{ Description: "One to one create mutation with an alias relation.", @@ -153,9 +136,13 @@ func TestMutationCreateOneToOne_UseAliasedRelationNameToLink_QueryFromPrimarySid executeTestCase(t, test) } -func TestMutationCreateOneToOne_UseAliasedRelationNameToLink_QueryFromSecondarySide(t *testing.T) { +func TestMutationCreateOneToOne_UseAliasedRelationNameToLink_CollectionAPI_Errors(t *testing.T) { test := testUtils.TestCase{ Description: "One to one create mutation from secondary side with alias relation.", + SupportedMutationTypes: immutable.Some([]testUtils.MutationType{ + testUtils.CollectionSaveMutationType, + testUtils.CollectionNamedMutationType, + }), Actions: []any{ testUtils.CreateDoc{ CollectionID: 1, @@ -169,46 +156,34 @@ func TestMutationCreateOneToOne_UseAliasedRelationNameToLink_QueryFromSecondaryS "name": "Painted House", "author": testUtils.NewDocIndex(1, 0), }, + ExpectedError: "cannot set relation from secondary side", }, - testUtils.Request{ - Request: `query { - Author { - name - published { - name - } - } + }, + } + + executeTestCase(t, test) +} + +func TestMutationCreateOneToOne_UseAliasedRelationNameToLink_GQL_Errors(t *testing.T) { + test := testUtils.TestCase{ + Description: "One to one create mutation from secondary side with alias relation.", + SupportedMutationTypes: immutable.Some([]testUtils.MutationType{ + testUtils.GQLRequestMutationType, + }), + Actions: []any{ + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "John Grisham" }`, - Results: map[string]any{ - "Author": []map[string]any{ - { - "name": "John Grisham", - "published": map[string]any{ - "name": "Painted House", - }, - }, - }, - }, }, - testUtils.Request{ - Request: `query { - Book { - name - author { - name - } - } - }`, - Results: map[string]any{ - "Book": []map[string]any{ - { - "name": "Painted House", - "author": map[string]any{ - "name": "John Grisham", - }, - }, - }, + testUtils.CreateDoc{ + CollectionID: 0, + DocMap: map[string]any{ + "name": "Painted House", + "author": testUtils.NewDocIndex(1, 0), }, + ExpectedError: "Argument \"input\" has invalid value", }, }, } diff --git a/tests/integration/mutation/create/field_kinds/one_to_one/with_null_value_test.go b/tests/integration/mutation/create/field_kinds/one_to_one/with_null_value_test.go index a6421aec5c..d82b515dc7 100644 --- a/tests/integration/mutation/create/field_kinds/one_to_one/with_null_value_test.go +++ b/tests/integration/mutation/create/field_kinds/one_to_one/with_null_value_test.go @@ -32,77 +32,17 @@ func TestMutationCreateOneToOne_WithExplicitNullOnPrimarySide(t *testing.T) { } `, }, - testUtils.CreateDoc{ - Doc: `{ - "name": "How to Be a Canadian", - "author": null - }`, - }, - testUtils.CreateDoc{ - Doc: `{ - "name": "Secrets at Maple Syrup Farm", - "author": null - }`, - }, testUtils.CreateDoc{ CollectionID: 1, DocMap: map[string]any{ - "name": "Will Ferguson", - "published": testUtils.NewDocIndex(0, 0), + "name": "Will Ferguson", }, }, - testUtils.Request{ - Request: ` - query { - Book { - name - author { - name - } - } - }`, - Results: map[string]any{ - "Book": []map[string]any{ - { - "name": "Secrets at Maple Syrup Farm", - "author": nil, - }, - { - "name": "How to Be a Canadian", - "author": map[string]any{ - "name": "Will Ferguson", - }, - }, - }, - }, - }, - }, - } - - testUtils.ExecuteTestCase(t, test) -} - -func TestMutationCreateOneToOne_WithExplicitNullOnSecondarySide(t *testing.T) { - test := testUtils.TestCase{ - Actions: []any{ - testUtils.SchemaUpdate{ - Schema: ` - type Book { - name: String - author: Author - } - - type Author { - name: String - published: Book @primary - } - `, - }, testUtils.CreateDoc{ - Doc: `{ - "name": "How to Be a Canadian", - "author": null - }`, + DocMap: map[string]any{ + "name": "How to Be a Canadian", + "author": testUtils.NewDocIndex(1, 0), + }, }, testUtils.CreateDoc{ Doc: `{ @@ -110,13 +50,6 @@ func TestMutationCreateOneToOne_WithExplicitNullOnSecondarySide(t *testing.T) { "author": null }`, }, - testUtils.CreateDoc{ - CollectionID: 1, - DocMap: map[string]any{ - "name": "Will Ferguson", - "published": testUtils.NewDocIndex(0, 0), - }, - }, testUtils.Request{ Request: ` query { diff --git a/tests/integration/mutation/create/field_kinds/one_to_one/with_simple_test.go b/tests/integration/mutation/create/field_kinds/one_to_one/with_simple_test.go index fabced3505..653364621b 100644 --- a/tests/integration/mutation/create/field_kinds/one_to_one/with_simple_test.go +++ b/tests/integration/mutation/create/field_kinds/one_to_one/with_simple_test.go @@ -73,23 +73,6 @@ func TestMutationCreateOneToOneNoChild(t *testing.T) { executeTestCase(t, test) } -func TestMutationCreateOneToOne_NonExistingRelationSecondarySide_Error(t *testing.T) { - test := testUtils.TestCase{ - Description: "One to one create mutation, from the secondary side", - Actions: []any{ - testUtils.CreateDoc{ - CollectionID: 0, - Doc: `{ - "name": "Painted House", - "author_id": "bae-be6d8024-4953-5a92-84b4-f042d25230c6" - }`, - ExpectedError: "document not found or not authorized to access", - }, - }, - } - executeTestCase(t, test) -} - func TestMutationCreateOneToOne(t *testing.T) { test := testUtils.TestCase{ Description: "One to one create mutation", @@ -155,9 +138,13 @@ func TestMutationCreateOneToOne(t *testing.T) { executeTestCase(t, test) } -func TestMutationCreateOneToOneSecondarySide(t *testing.T) { +func TestMutationCreateOneToOneSecondarySide_CollectionApi(t *testing.T) { test := testUtils.TestCase{ Description: "One to one create mutation from secondary side", + SupportedMutationTypes: immutable.Some([]testUtils.MutationType{ + testUtils.CollectionSaveMutationType, + testUtils.CollectionNamedMutationType, + }), Actions: []any{ testUtils.CreateDoc{ CollectionID: 1, @@ -168,51 +155,10 @@ func TestMutationCreateOneToOneSecondarySide(t *testing.T) { testUtils.CreateDoc{ CollectionID: 0, DocMap: map[string]any{ - "name": "Painted House", - "author_id": testUtils.NewDocIndex(1, 0), - }, - }, - testUtils.Request{ - Request: ` - query { - Author { - name - published { - name - } - } - }`, - Results: map[string]any{ - "Author": []map[string]any{ - { - "name": "John Grisham", - "published": map[string]any{ - "name": "Painted House", - }, - }, - }, - }, - }, - testUtils.Request{ - Request: ` - query { - Book { - name - author { - name - } - } - }`, - Results: map[string]any{ - "Book": []map[string]any{ - { - "name": "Painted House", - "author": map[string]any{ - "name": "John Grisham", - }, - }, - }, + "name": "Painted House", + "author": testUtils.NewDocIndex(1, 0), }, + ExpectedError: "cannot set relation from secondary side", }, }, } @@ -220,30 +166,26 @@ func TestMutationCreateOneToOneSecondarySide(t *testing.T) { executeTestCase(t, test) } -func TestMutationCreateOneToOne_ErrorsGivenRelationAlreadyEstablishedViaPrimary(t *testing.T) { +func TestMutationCreateOneToOneSecondarySide_GQL(t *testing.T) { test := testUtils.TestCase{ - Description: "One to one create mutation, errors due to link already existing, primary side", + Description: "One to one create mutation from secondary side", + SupportedMutationTypes: immutable.Some([]testUtils.MutationType{ + testUtils.GQLRequestMutationType, + }), Actions: []any{ testUtils.CreateDoc{ - CollectionID: 0, + CollectionID: 1, Doc: `{ - "name": "Painted House" + "name": "John Grisham" }`, }, testUtils.CreateDoc{ - CollectionID: 1, - DocMap: map[string]any{ - "name": "John Grisham", - "published_id": testUtils.NewDocIndex(0, 0), - }, - }, - testUtils.CreateDoc{ - CollectionID: 1, + CollectionID: 0, DocMap: map[string]any{ - "name": "Saadi Shirazi", - "published_id": testUtils.NewDocIndex(0, 0), + "name": "Painted House", + "author": testUtils.NewDocIndex(1, 0), }, - ExpectedError: "target document is already linked to another document.", + ExpectedError: "Argument \"input\" has invalid value", }, }, } @@ -251,28 +193,28 @@ func TestMutationCreateOneToOne_ErrorsGivenRelationAlreadyEstablishedViaPrimary( executeTestCase(t, test) } -func TestMutationCreateOneToOne_ErrorsGivenRelationAlreadyEstablishedViaSecondary(t *testing.T) { +func TestMutationCreateOneToOne_ErrorsGivenRelationAlreadyEstablishedViaPrimary(t *testing.T) { test := testUtils.TestCase{ - Description: "One to one create mutation, errors due to link already existing, secondary side", + Description: "One to one create mutation, errors due to link already existing, primary side", Actions: []any{ testUtils.CreateDoc{ - CollectionID: 1, + CollectionID: 0, Doc: `{ - "name": "John Grisham" + "name": "Painted House" }`, }, testUtils.CreateDoc{ - CollectionID: 0, + CollectionID: 1, DocMap: map[string]any{ - "name": "Painted House", - "author_id": testUtils.NewDocIndex(1, 0), + "name": "John Grisham", + "published_id": testUtils.NewDocIndex(0, 0), }, }, testUtils.CreateDoc{ - CollectionID: 0, + CollectionID: 1, DocMap: map[string]any{ - "name": "Golestan", - "author_id": testUtils.NewDocIndex(1, 0), + "name": "Saadi Shirazi", + "published_id": testUtils.NewDocIndex(0, 0), }, ExpectedError: "target document is already linked to another document.", }, diff --git a/tests/integration/mutation/update/field_kinds/one_to_many/simple_test.go b/tests/integration/mutation/update/field_kinds/one_to_many/simple_test.go index e8ac15e4da..7c6d33af56 100644 --- a/tests/integration/mutation/update/field_kinds/one_to_many/simple_test.go +++ b/tests/integration/mutation/update/field_kinds/one_to_many/simple_test.go @@ -72,8 +72,6 @@ func TestMutationUpdateOneToMany_RelationIDToLinkFromSingleSide_Error(t *testing executeTestCase(t, test) } -// Note: This test should probably not pass, as it contains a -// reference to a document that doesnt exist. func TestMutationUpdateOneToMany_InvalidRelationIDToLinkFromManySide(t *testing.T) { author1ID := "bae-a47f80ab-1c30-53b3-9dac-04a4a3fda77e" invalidAuthorID := "bae-35953ca-518d-9e6b-9ce6cd00eff5" @@ -106,42 +104,7 @@ func TestMutationUpdateOneToMany_InvalidRelationIDToLinkFromManySide(t *testing. }`, invalidAuthorID, ), - }, - testUtils.Request{ - Request: `query { - Author { - name - published { - name - } - } - }`, - Results: map[string]any{ - "Author": []map[string]any{ - { - "name": "John Grisham", - "published": []map[string]any{}, - }, - }, - }, - }, - testUtils.Request{ - Request: `query { - Book { - name - author { - name - } - } - }`, - Results: map[string]any{ - "Book": []map[string]any{ - { - "name": "Painted House", - "author": nil, // Linked to incorrect id - }, - }, - }, + ExpectedError: "uuid: incorrect UUID length 30 in string", }, }, } diff --git a/tests/integration/mutation/update/field_kinds/one_to_many/with_alias_test.go b/tests/integration/mutation/update/field_kinds/one_to_many/with_alias_test.go index 1f0375e6ba..a4c78fea67 100644 --- a/tests/integration/mutation/update/field_kinds/one_to_many/with_alias_test.go +++ b/tests/integration/mutation/update/field_kinds/one_to_many/with_alias_test.go @@ -19,12 +19,16 @@ import ( testUtils "github.com/sourcenetwork/defradb/tests/integration" ) -func TestMutationUpdateOneToMany_AliasRelationNameToLinkFromSingleSide_Collection(t *testing.T) { +func TestMutationUpdateOneToMany_AliasRelationNameToLinkFromSingleSide_CollectionApi(t *testing.T) { author1ID := "bae-a47f80ab-1c30-53b3-9dac-04a4a3fda77e" bookID := "bae-22e0a1c2-d12b-5bfd-b039-0cf72f963991" test := testUtils.TestCase{ Description: "One to many update mutation using relation alias name from single side (wrong)", + SupportedMutationTypes: immutable.Some([]testUtils.MutationType{ + testUtils.CollectionSaveMutationType, + testUtils.CollectionNamedMutationType, + }), Actions: []any{ testUtils.CreateDoc{ CollectionID: 1, @@ -51,14 +55,62 @@ func TestMutationUpdateOneToMany_AliasRelationNameToLinkFromSingleSide_Collectio testUtils.UpdateDoc{ CollectionID: 1, DocID: 1, - // NOTE: There is no `published` on book. Doc: fmt.Sprintf( `{ "published": "%s" }`, bookID, ), - ExpectedError: "The given field does not exist. Name: published", + ExpectedError: "cannot set relation from secondary side", + }, + }, + } + + executeTestCase(t, test) +} + +func TestMutationUpdateOneToMany_AliasRelationNameToLinkFromSingleSide_GQL(t *testing.T) { + author1ID := "bae-a47f80ab-1c30-53b3-9dac-04a4a3fda77e" + bookID := "bae-22e0a1c2-d12b-5bfd-b039-0cf72f963991" + + test := testUtils.TestCase{ + Description: "One to many update mutation using relation alias name from single side (wrong)", + SupportedMutationTypes: immutable.Some([]testUtils.MutationType{ + testUtils.GQLRequestMutationType, + }), + Actions: []any{ + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "John Grisham" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "New Shahzad" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: fmt.Sprintf( + `{ + "name": "Painted House", + "author": "%s" + }`, + author1ID, + ), + }, + testUtils.UpdateDoc{ + CollectionID: 1, + DocID: 1, + Doc: fmt.Sprintf( + `{ + "published": "%s" + }`, + bookID, + ), + ExpectedError: "Argument \"input\" has invalid value", }, }, } @@ -100,42 +152,7 @@ func TestMutationUpdateOneToMany_InvalidAliasRelationNameToLinkFromManySide_GQL( }`, invalidAuthorID, ), - }, - testUtils.Request{ - Request: `query { - Author { - name - published { - name - } - } - }`, - Results: map[string]any{ - "Author": []map[string]any{ - { - "name": "John Grisham", - "published": []map[string]any{}, - }, - }, - }, - }, - testUtils.Request{ - Request: `query { - Book { - name - author { - name - } - } - }`, - Results: map[string]any{ - "Book": []map[string]any{ - { - "name": "Painted House", - "author": nil, // Linked to incorrect id - }, - }, - }, + ExpectedError: "uuid: incorrect UUID length 30 in string", }, }, } @@ -143,8 +160,6 @@ func TestMutationUpdateOneToMany_InvalidAliasRelationNameToLinkFromManySide_GQL( executeTestCase(t, test) } -// Note: This test should probably not pass, as it contains a -// reference to a document that doesnt exist. func TestMutationUpdateOneToMany_InvalidAliasRelationNameToLinkFromManySide_Collection(t *testing.T) { author1ID := "bae-a47f80ab-1c30-53b3-9dac-04a4a3fda77e" invalidAuthorID := "bae-35953ca-518d-9e6b-9ce6cd00eff5" @@ -177,24 +192,7 @@ func TestMutationUpdateOneToMany_InvalidAliasRelationNameToLinkFromManySide_Coll }`, invalidAuthorID, ), - }, - testUtils.Request{ - Request: `query { - Book { - name - author { - name - } - } - }`, - Results: map[string]any{ - "Book": []map[string]any{ - { - "name": "Painted House", - "author": nil, - }, - }, - }, + ExpectedError: "uuid: incorrect UUID length 30 in string", }, }, } diff --git a/tests/integration/mutation/update/field_kinds/one_to_one/with_alias_test.go b/tests/integration/mutation/update/field_kinds/one_to_one/with_alias_test.go index eda176bbcc..f631ef8cab 100644 --- a/tests/integration/mutation/update/field_kinds/one_to_one/with_alias_test.go +++ b/tests/integration/mutation/update/field_kinds/one_to_one/with_alias_test.go @@ -20,34 +20,30 @@ import ( ) func TestMutationUpdateOneToOne_AliasRelationNameToLinkFromPrimarySide(t *testing.T) { - author1ID := "bae-53eff350-ad8e-532c-b72d-f95c4f47909c" - bookID := "bae-89d64ba1-44e3-5d75-a610-7226077ece48" + bookID := "bae-dafb74e9-2bf1-5f12-aea9-967814592bad" test := testUtils.TestCase{ Description: "One to one update mutation using alias relation id from single side", Actions: []any{ testUtils.CreateDoc{ - CollectionID: 1, + CollectionID: 0, Doc: `{ - "name": "John Grisham" + "name": "Painted House" }`, }, + testUtils.CreateDoc{ + CollectionID: 1, + DocMap: map[string]any{ + "name": "John Grisham", + "published": testUtils.NewDocIndex(0, 0), + }, + }, testUtils.CreateDoc{ CollectionID: 1, Doc: `{ "name": "New Shahzad" }`, }, - testUtils.CreateDoc{ - CollectionID: 0, - Doc: fmt.Sprintf( - `{ - "name": "Painted House", - "author": "%s" - }`, - author1ID, - ), - }, testUtils.UpdateDoc{ CollectionID: 1, DocID: 1, @@ -65,35 +61,35 @@ func TestMutationUpdateOneToOne_AliasRelationNameToLinkFromPrimarySide(t *testin executeTestCase(t, test) } -func TestMutationUpdateOneToOne_AliasRelationNameToLinkFromSecondarySide(t *testing.T) { - author1ID := "bae-53eff350-ad8e-532c-b72d-f95c4f47909c" +func TestMutationUpdateOneToOne_AliasRelationNameToLinkFromSecondarySide_CollectionApi(t *testing.T) { author2ID := "bae-c058cfd4-259f-5b08-975d-106f13a143d5" test := testUtils.TestCase{ Description: "One to one update mutation using alias relation id from secondary side", + SupportedMutationTypes: immutable.Some([]testUtils.MutationType{ + testUtils.CollectionSaveMutationType, + testUtils.CollectionNamedMutationType, + }), Actions: []any{ testUtils.CreateDoc{ - CollectionID: 1, + CollectionID: 0, Doc: `{ - "name": "John Grisham" + "name": "Painted House" }`, }, + testUtils.CreateDoc{ + CollectionID: 1, + DocMap: map[string]any{ + "name": "John Grisham", + "published": testUtils.NewDocIndex(0, 0), + }, + }, testUtils.CreateDoc{ CollectionID: 1, Doc: `{ "name": "New Shahzad" }`, }, - testUtils.CreateDoc{ - CollectionID: 0, - Doc: fmt.Sprintf( - `{ - "name": "Painted House", - "author": "%s" - }`, - author1ID, - ), - }, testUtils.UpdateDoc{ CollectionID: 0, DocID: 0, @@ -103,7 +99,7 @@ func TestMutationUpdateOneToOne_AliasRelationNameToLinkFromSecondarySide(t *test }`, author2ID, ), - ExpectedError: "target document is already linked to another document.", + ExpectedError: "cannot set relation from secondary side", }, }, } @@ -111,70 +107,34 @@ func TestMutationUpdateOneToOne_AliasRelationNameToLinkFromSecondarySide(t *test executeTestCase(t, test) } -func TestMutationUpdateOneToOne_AliasWithInvalidLengthRelationIDToLink_Error(t *testing.T) { - author1ID := "bae-53eff350-ad8e-532c-b72d-f95c4f47909c" - invalidLenSubID := "35953ca-518d-9e6b-9ce6cd00eff5" - invalidAuthorID := "bae-" + invalidLenSubID +func TestMutationUpdateOneToOne_AliasRelationNameToLinkFromSecondarySide_GQL(t *testing.T) { + author2ID := "bae-c058cfd4-259f-5b08-975d-106f13a143d5" test := testUtils.TestCase{ - Description: "One to one update mutation using invalid alias relation id", + Description: "One to one update mutation using alias relation id from secondary side", + SupportedMutationTypes: immutable.Some([]testUtils.MutationType{ + testUtils.GQLRequestMutationType, + }), Actions: []any{ testUtils.CreateDoc{ - CollectionID: 1, + CollectionID: 0, Doc: `{ - "name": "John Grisham" + "name": "Painted House" }`, }, testUtils.CreateDoc{ - CollectionID: 0, - Doc: fmt.Sprintf( - `{ - "name": "Painted House", - "author": "%s" - }`, - author1ID, - ), - }, - testUtils.UpdateDoc{ - CollectionID: 0, - DocID: 0, - Doc: fmt.Sprintf( - `{ - "author": "%s" - }`, - invalidAuthorID, - ), - ExpectedError: "uuid: incorrect UUID length 30 in string \"" + invalidLenSubID + "\"", + CollectionID: 1, + DocMap: map[string]any{ + "name": "John Grisham", + "published": testUtils.NewDocIndex(0, 0), + }, }, - }, - } - - executeTestCase(t, test) -} - -func TestMutationUpdateOneToOne_InvalidAliasRelationNameToLinkFromSecondarySide_Error(t *testing.T) { - author1ID := "bae-53eff350-ad8e-532c-b72d-f95c4f47909c" - invalidAuthorID := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ee" - - test := testUtils.TestCase{ - Description: "One to one update mutation using alias relation id from secondary side", - Actions: []any{ testUtils.CreateDoc{ CollectionID: 1, Doc: `{ - "name": "John Grisham" + "name": "New Shahzad" }`, }, - testUtils.CreateDoc{ - CollectionID: 0, - Doc: fmt.Sprintf( - `{ - "name": "Painted House", - "author": "%s" - }`, - author1ID, - ), - }, testUtils.UpdateDoc{ CollectionID: 0, DocID: 0, @@ -182,9 +142,9 @@ func TestMutationUpdateOneToOne_InvalidAliasRelationNameToLinkFromSecondarySide_ `{ "author": "%s" }`, - invalidAuthorID, + author2ID, ), - ExpectedError: "document not found or not authorized to access", + ExpectedError: "Argument \"input\" has invalid value", }, }, } @@ -192,51 +152,36 @@ func TestMutationUpdateOneToOne_InvalidAliasRelationNameToLinkFromSecondarySide_ executeTestCase(t, test) } -func TestMutationUpdateOneToOne_AliasRelationNameToLinkFromSecondarySideWithWrongField_Error(t *testing.T) { - author1ID := "bae-53eff350-ad8e-532c-b72d-f95c4f47909c" - author2ID := "bae-c058cfd4-259f-5b08-975d-106f13a143d5" +func TestMutationUpdateOneToOne_AliasWithInvalidLengthRelationIDToLink_Error(t *testing.T) { + invalidLenSubID := "35953ca-518d-9e6b-9ce6cd00eff5" + invalidBookID := "bae-" + invalidLenSubID test := testUtils.TestCase{ - Description: "One to one update mutation using relation alias name from secondary side, with a wrong field.", - // This restiction is temporary due to a bug in the collection api, see - // https://github.com/sourcenetwork/defradb/issues/1703 for more info. - SupportedMutationTypes: immutable.Some([]testUtils.MutationType{ - testUtils.GQLRequestMutationType, - }), + Description: "One to one update mutation using invalid alias relation id", Actions: []any{ testUtils.CreateDoc{ - CollectionID: 1, + CollectionID: 0, Doc: `{ - "name": "John Grisham" + "name": "Painted House" }`, }, testUtils.CreateDoc{ CollectionID: 1, - Doc: `{ - "name": "New Shahzad" - }`, - }, - testUtils.CreateDoc{ - CollectionID: 0, - Doc: fmt.Sprintf( - `{ - "name": "Painted House", - "author": "%s" - }`, - author1ID, - ), + DocMap: map[string]any{ + "name": "John Grisham", + "published": testUtils.NewDocIndex(0, 0), + }, }, testUtils.UpdateDoc{ - CollectionID: 0, + CollectionID: 1, DocID: 0, Doc: fmt.Sprintf( `{ - "notName": "Unpainted Condo", - "author": "%s" + "published": "%s" }`, - author2ID, + invalidBookID, ), - ExpectedError: "Unknown field.", + ExpectedError: "uuid: incorrect UUID length 30 in string \"" + invalidLenSubID + "\"", }, }, } diff --git a/tests/integration/mutation/update/field_kinds/one_to_one/with_self_ref_test.go b/tests/integration/mutation/update/field_kinds/one_to_one/with_self_ref_test.go index dd630c0721..77087f2186 100644 --- a/tests/integration/mutation/update/field_kinds/one_to_one/with_self_ref_test.go +++ b/tests/integration/mutation/update/field_kinds/one_to_one/with_self_ref_test.go @@ -106,94 +106,3 @@ func TestMutationUpdateOneToOne_SelfReferencingFromPrimary(t *testing.T) { testUtils.ExecuteTestCase(t, test) } - -func TestMutationUpdateOneToOne_SelfReferencingFromSecondary(t *testing.T) { - user1ID := "bae-a86ab69e-a2be-54b9-b66e-4e30d6778ffe" - - test := testUtils.TestCase{ - Description: "One to one update mutation, self referencing from secondary", - - Actions: []any{ - testUtils.SchemaUpdate{ - Schema: ` - type User { - name: String - boss: User - underling: User @primary - } - `, - }, - testUtils.CreateDoc{ - Doc: `{ - "name": "John" - }`, - }, - testUtils.CreateDoc{ - Doc: `{ - "name": "Fred" - }`, - }, - testUtils.UpdateDoc{ - DocID: 1, - Doc: fmt.Sprintf( - `{ - "boss_id": "%s" - }`, - user1ID, - ), - }, - testUtils.Request{ - Request: ` - query { - User { - name - boss { - name - } - } - }`, - Results: map[string]any{ - "User": []map[string]any{ - { - "name": "Fred", - "boss": map[string]any{ - "name": "John", - }, - }, - { - "name": "John", - "boss": nil, - }, - }, - }, - }, - testUtils.Request{ - Request: ` - query { - User { - name - underling { - name - } - } - }`, - Results: map[string]any{ - "User": []map[string]any{ - { - "name": "Fred", - "underling": nil, - }, - { - "name": "John", - "underling": map[string]any{ - "name": "Fred", - }, - }, - }, - }, - }, - }, - } - - testUtils.ExecuteTestCase(t, test) -} diff --git a/tests/integration/mutation/update/field_kinds/one_to_one/with_simple_test.go b/tests/integration/mutation/update/field_kinds/one_to_one/with_simple_test.go index b2b3859d2d..9aa2a4ff3e 100644 --- a/tests/integration/mutation/update/field_kinds/one_to_one/with_simple_test.go +++ b/tests/integration/mutation/update/field_kinds/one_to_one/with_simple_test.go @@ -138,11 +138,15 @@ func TestMutationUpdateOneToOne(t *testing.T) { executeTestCase(t, test) } -func TestMutationUpdateOneToOneSecondarySide(t *testing.T) { +func TestMutationUpdateOneToOneSecondarySide_CollectionApi(t *testing.T) { authorID := "bae-53eff350-ad8e-532c-b72d-f95c4f47909c" test := testUtils.TestCase{ Description: "One to one create mutation, from the secondary side", + SupportedMutationTypes: immutable.Some([]testUtils.MutationType{ + testUtils.CollectionSaveMutationType, + testUtils.CollectionNamedMutationType, + }), Actions: []any{ testUtils.CreateDoc{ CollectionID: 0, @@ -161,141 +165,87 @@ func TestMutationUpdateOneToOneSecondarySide(t *testing.T) { DocID: 0, Doc: fmt.Sprintf( `{ - "author_id": "%s" + "author": "%s" }`, authorID, ), - }, - testUtils.Request{ - Request: ` - query { - Book { - name - author { - name - } - } - }`, - Results: map[string]any{ - "Book": []map[string]any{ - { - "name": "Painted House", - "author": map[string]any{ - "name": "John Grisham", - }, - }, - }, - }, - }, - testUtils.Request{ - Request: ` - query { - Author { - name - published { - name - } - } - }`, - Results: map[string]any{ - "Author": []map[string]any{ - { - "name": "John Grisham", - "published": map[string]any{ - "name": "Painted House", - }, - }, - }, - }, + ExpectedError: "cannot set relation from secondary side", }, }, } executeTestCase(t, test) } -func TestMutationUpdateOneToOne_RelationIDToLinkFromPrimarySide(t *testing.T) { - author1ID := "bae-53eff350-ad8e-532c-b72d-f95c4f47909c" - bookID := "bae-89d64ba1-44e3-5d75-a610-7226077ece48" +func TestMutationUpdateOneToOneSecondarySide_GQL(t *testing.T) { + authorID := "bae-53eff350-ad8e-532c-b72d-f95c4f47909c" test := testUtils.TestCase{ - Description: "One to one update mutation using relation id from single side (wrong)", + Description: "One to one create mutation, from the secondary side", + SupportedMutationTypes: immutable.Some([]testUtils.MutationType{ + testUtils.GQLRequestMutationType, + }), Actions: []any{ testUtils.CreateDoc{ - CollectionID: 1, + CollectionID: 0, Doc: `{ - "name": "John Grisham" + "name": "Painted House" }`, }, testUtils.CreateDoc{ CollectionID: 1, Doc: `{ - "name": "New Shahzad" + "name": "John Grisham" }`, }, - testUtils.CreateDoc{ - CollectionID: 0, - Doc: fmt.Sprintf( - `{ - "name": "Painted House", - "author_id": "%s" - }`, - author1ID, - ), - }, testUtils.UpdateDoc{ - CollectionID: 1, - DocID: 1, + CollectionID: 0, + DocID: 0, Doc: fmt.Sprintf( `{ - "published_id": "%s" + "author": "%s" }`, - bookID, + authorID, ), - ExpectedError: "target document is already linked to another document.", + ExpectedError: "Argument \"input\" has invalid value", }, }, } - executeTestCase(t, test) } -func TestMutationUpdateOneToOne_RelationIDToLinkFromSecondarySide(t *testing.T) { - author1ID := "bae-53eff350-ad8e-532c-b72d-f95c4f47909c" - author2ID := "bae-c058cfd4-259f-5b08-975d-106f13a143d5" +func TestMutationUpdateOneToOne_RelationIDToLinkFromPrimarySide(t *testing.T) { + bookID := "bae-dafb74e9-2bf1-5f12-aea9-967814592bad" test := testUtils.TestCase{ - Description: "One to one update mutation using relation id from secondary side", + Description: "One to one update mutation using relation id from single side (wrong)", Actions: []any{ testUtils.CreateDoc{ - CollectionID: 1, + CollectionID: 0, Doc: `{ - "name": "John Grisham" + "name": "Painted House" }`, }, + testUtils.CreateDoc{ + CollectionID: 1, + DocMap: map[string]any{ + "name": "John Grisham", + "published_id": testUtils.NewDocIndex(0, 0), + }, + }, testUtils.CreateDoc{ CollectionID: 1, Doc: `{ "name": "New Shahzad" }`, }, - testUtils.CreateDoc{ - CollectionID: 0, - Doc: fmt.Sprintf( - `{ - "name": "Painted House", - "author_id": "%s" - }`, - author1ID, - ), - }, testUtils.UpdateDoc{ - CollectionID: 0, - DocID: 0, + CollectionID: 1, + DocID: 1, Doc: fmt.Sprintf( `{ - "author_id": "%s" + "published_id": "%s" }`, - author2ID, + bookID, ), ExpectedError: "target document is already linked to another document.", }, @@ -305,13 +255,15 @@ func TestMutationUpdateOneToOne_RelationIDToLinkFromSecondarySide(t *testing.T) executeTestCase(t, test) } -func TestMutationUpdateOneToOne_InvalidLengthRelationIDToLink_Error(t *testing.T) { - author1ID := "bae-53eff350-ad8e-532c-b72d-f95c4f47909c" - invalidLenSubID := "35953ca-518d-9e6b-9ce6cd00eff5" - invalidAuthorID := "bae-" + invalidLenSubID +func TestMutationUpdateOneToOne_RelationIDToLinkFromSecondarySide_CollectionApi(t *testing.T) { + author2ID := "bae-c058cfd4-259f-5b08-975d-106f13a143d5" test := testUtils.TestCase{ - Description: "One to one update mutation using invalid relation id", + Description: "One to one update mutation using relation id from secondary side", + SupportedMutationTypes: immutable.Some([]testUtils.MutationType{ + testUtils.CollectionSaveMutationType, + testUtils.CollectionNamedMutationType, + }), Actions: []any{ testUtils.CreateDoc{ CollectionID: 1, @@ -319,15 +271,17 @@ func TestMutationUpdateOneToOne_InvalidLengthRelationIDToLink_Error(t *testing.T "name": "John Grisham" }`, }, + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "New Shahzad" + }`, + }, testUtils.CreateDoc{ CollectionID: 0, - Doc: fmt.Sprintf( - `{ - "name": "Painted House", - "author_id": "%s" - }`, - author1ID, - ), + Doc: `{ + "name": "Painted House" + }`, }, testUtils.UpdateDoc{ CollectionID: 0, @@ -336,9 +290,9 @@ func TestMutationUpdateOneToOne_InvalidLengthRelationIDToLink_Error(t *testing.T `{ "author_id": "%s" }`, - invalidAuthorID, + author2ID, ), - ExpectedError: "uuid: incorrect UUID length 30 in string \"" + invalidLenSubID + "\"", + ExpectedError: "cannot set relation from secondary side", }, }, } @@ -346,12 +300,14 @@ func TestMutationUpdateOneToOne_InvalidLengthRelationIDToLink_Error(t *testing.T executeTestCase(t, test) } -func TestMutationUpdateOneToOne_InvalidRelationIDToLinkFromSecondarySide_Error(t *testing.T) { - author1ID := "bae-53eff350-ad8e-532c-b72d-f95c4f47909c" - invalidAuthorID := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ee" +func TestMutationUpdateOneToOne_RelationIDToLinkFromSecondarySide_GQL(t *testing.T) { + author2ID := "bae-c058cfd4-259f-5b08-975d-106f13a143d5" test := testUtils.TestCase{ Description: "One to one update mutation using relation id from secondary side", + SupportedMutationTypes: immutable.Some([]testUtils.MutationType{ + testUtils.GQLRequestMutationType, + }), Actions: []any{ testUtils.CreateDoc{ CollectionID: 1, @@ -359,15 +315,17 @@ func TestMutationUpdateOneToOne_InvalidRelationIDToLinkFromSecondarySide_Error(t "name": "John Grisham" }`, }, + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "New Shahzad" + }`, + }, testUtils.CreateDoc{ CollectionID: 0, - Doc: fmt.Sprintf( - `{ - "name": "Painted House", - "author_id": "%s" - }`, - author1ID, - ), + Doc: `{ + "name": "Painted House" + }`, }, testUtils.UpdateDoc{ CollectionID: 0, @@ -376,9 +334,9 @@ func TestMutationUpdateOneToOne_InvalidRelationIDToLinkFromSecondarySide_Error(t `{ "author_id": "%s" }`, - invalidAuthorID, + author2ID, ), - ExpectedError: "document not found or not authorized to access", + ExpectedError: "Argument \"input\" has invalid value", }, }, } @@ -386,51 +344,35 @@ func TestMutationUpdateOneToOne_InvalidRelationIDToLinkFromSecondarySide_Error(t executeTestCase(t, test) } -func TestMutationUpdateOneToOne_RelationIDToLinkFromSecondarySideWithWrongField_Error(t *testing.T) { - author1ID := "bae-53eff350-ad8e-532c-b72d-f95c4f47909c" - author2ID := "bae-c058cfd4-259f-5b08-975d-106f13a143d5" +func TestMutationUpdateOneToOne_InvalidLengthRelationIDToLink_Error(t *testing.T) { + invalidLenSubID := "35953ca-518d-9e6b-9ce6cd00eff5" + invalidBookID := "bae-" + invalidLenSubID test := testUtils.TestCase{ - Description: "One to one update mutation using relation id from secondary side, with a wrong field.", - // This restiction is temporary due to a bug in the collection api, see - // https://github.com/sourcenetwork/defradb/issues/1852 for more info. - SupportedMutationTypes: immutable.Some([]testUtils.MutationType{ - testUtils.GQLRequestMutationType, - }), + Description: "One to one update mutation using invalid relation id", Actions: []any{ testUtils.CreateDoc{ - CollectionID: 1, + CollectionID: 0, Doc: `{ - "name": "John Grisham" + "name": "Painted House" }`, }, testUtils.CreateDoc{ CollectionID: 1, Doc: `{ - "name": "New Shahzad" + "name": "John Grisham" }`, }, - testUtils.CreateDoc{ - CollectionID: 0, - Doc: fmt.Sprintf( - `{ - "name": "Painted House", - "author_id": "%s" - }`, - author1ID, - ), - }, testUtils.UpdateDoc{ - CollectionID: 0, + CollectionID: 1, DocID: 0, Doc: fmt.Sprintf( `{ - "notName": "Unpainted Condo", - "author_id": "%s" + "published_id": "%s" }`, - author2ID, + invalidBookID, ), - ExpectedError: "In field \"notName\": Unknown field.", + ExpectedError: "uuid: incorrect UUID length 30 in string \"" + invalidLenSubID + "\"", }, }, } diff --git a/tests/integration/query/one_to_one/with_group_related_id_alias_test.go b/tests/integration/query/one_to_one/with_group_related_id_alias_test.go index 0ae9548536..4581914473 100644 --- a/tests/integration/query/one_to_one/with_group_related_id_alias_test.go +++ b/tests/integration/query/one_to_one/with_group_related_id_alias_test.go @@ -34,29 +34,29 @@ func TestQueryOneToOneWithGroupRelatedIDAlias(t *testing.T) { `, }, testUtils.CreateDoc{ - CollectionID: 0, - Doc: `{ - "name": "Painted House" - }`, + CollectionID: 1, + DocMap: map[string]any{ + "name": "John Grisham", + }, }, testUtils.CreateDoc{ - CollectionID: 0, - Doc: `{ - "name": "Go Guide for Rust developers" - }`, + CollectionID: 1, + DocMap: map[string]any{ + "name": "Andrew Lone", + }, }, testUtils.CreateDoc{ - CollectionID: 1, + CollectionID: 0, DocMap: map[string]any{ - "name": "John Grisham", - "published_id": testUtils.NewDocIndex(0, 0), + "name": "Painted House", + "author_id": testUtils.NewDocIndex(1, 0), }, }, testUtils.CreateDoc{ - CollectionID: 1, + CollectionID: 0, DocMap: map[string]any{ - "name": "Andrew Lone", - "published_id": testUtils.NewDocIndex(0, 1), + "name": "Go Guide for Rust developers", + "author_id": testUtils.NewDocIndex(1, 1), }, }, testUtils.Request{ @@ -74,24 +74,24 @@ func TestQueryOneToOneWithGroupRelatedIDAlias(t *testing.T) { Results: map[string]any{ "Book": []map[string]any{ { - "author_id": "bae-fc7bf08d-9117-5acd-8b49-bc7431b1b238", + "author_id": "bae-547eb3d8-7fc8-5c21-bcef-590813451e55", "author": map[string]any{ - "name": "John Grisham", + "name": "Andrew Lone", }, "_group": []map[string]any{ { - "name": "Painted House", + "name": "Go Guide for Rust developers", }, }, }, { - "author_id": "bae-fcb12812-4c38-574e-bc8b-91b37ee6cd9b", + "author_id": "bae-ee5973cf-73c3-558f-8aec-8b590b8e77cf", "author": map[string]any{ - "name": "Andrew Lone", + "name": "John Grisham", }, "_group": []map[string]any{ { - "name": "Go Guide for Rust developers", + "name": "Painted House", }, }, }, diff --git a/tests/integration/query/one_to_one/with_group_related_id_test.go b/tests/integration/query/one_to_one/with_group_related_id_test.go index 5b1aa09dce..aa3c0e9c17 100644 --- a/tests/integration/query/one_to_one/with_group_related_id_test.go +++ b/tests/integration/query/one_to_one/with_group_related_id_test.go @@ -34,29 +34,29 @@ func TestQueryOneToOneWithGroupRelatedID(t *testing.T) { `, }, testUtils.CreateDoc{ - CollectionID: 0, - Doc: `{ - "name": "Painted House" - }`, + CollectionID: 1, + DocMap: map[string]any{ + "name": "John Grisham", + }, }, testUtils.CreateDoc{ - CollectionID: 0, - Doc: `{ - "name": "Go Guide for Rust developers" - }`, + CollectionID: 1, + DocMap: map[string]any{ + "name": "Andrew Lone", + }, }, testUtils.CreateDoc{ - CollectionID: 1, + CollectionID: 0, DocMap: map[string]any{ - "name": "John Grisham", - "published_id": testUtils.NewDocIndex(0, 0), + "name": "Painted House", + "author_id": testUtils.NewDocIndex(1, 0), }, }, testUtils.CreateDoc{ - CollectionID: 1, + CollectionID: 0, DocMap: map[string]any{ - "name": "John Grisham", - "published_id": testUtils.NewDocIndex(0, 1), + "name": "Go Guide for Rust developers", + "author_id": testUtils.NewDocIndex(1, 1), }, }, testUtils.Request{ @@ -71,18 +71,18 @@ func TestQueryOneToOneWithGroupRelatedID(t *testing.T) { Results: map[string]any{ "Book": []map[string]any{ { - "author_id": "bae-fc7bf08d-9117-5acd-8b49-bc7431b1b238", + "author_id": "bae-547eb3d8-7fc8-5c21-bcef-590813451e55", "_group": []map[string]any{ { - "name": "Painted House", + "name": "Go Guide for Rust developers", }, }, }, { - "author_id": "bae-f2dcf043-d24d-5885-9a0a-60196094c782", + "author_id": "bae-ee5973cf-73c3-558f-8aec-8b590b8e77cf", "_group": []map[string]any{ { - "name": "Go Guide for Rust developers", + "name": "Painted House", }, }, }, diff --git a/tests/integration/query/one_to_one_multiple/simple_test.go b/tests/integration/query/one_to_one_multiple/simple_test.go index 4696db5dcf..5ab21e7543 100644 --- a/tests/integration/query/one_to_one_multiple/simple_test.go +++ b/tests/integration/query/one_to_one_multiple/simple_test.go @@ -143,18 +143,6 @@ func TestQueryOneToOneMultiple_FromMixedPrimaryAndSecondary(t *testing.T) { } `, }, - testUtils.CreateDoc{ - CollectionID: 0, - Doc: `{ - "name": "Old Publisher" - }`, - }, - testUtils.CreateDoc{ - CollectionID: 0, - Doc: `{ - "name": "New Publisher" - }`, - }, testUtils.CreateDoc{ CollectionID: 1, Doc: `{ @@ -170,17 +158,29 @@ func TestQueryOneToOneMultiple_FromMixedPrimaryAndSecondary(t *testing.T) { testUtils.CreateDoc{ CollectionID: 2, DocMap: map[string]any{ - "name": "Painted House", - "publisher_id": testUtils.NewDocIndex(0, 0), - "author_id": testUtils.NewDocIndex(1, 0), + "name": "Painted House", + "author_id": testUtils.NewDocIndex(1, 0), }, }, testUtils.CreateDoc{ CollectionID: 2, DocMap: map[string]any{ - "name": "Theif Lord", - "publisher_id": testUtils.NewDocIndex(0, 1), - "author_id": testUtils.NewDocIndex(1, 1), + "name": "Theif Lord", + "author_id": testUtils.NewDocIndex(1, 1), + }, + }, + testUtils.CreateDoc{ + CollectionID: 0, + DocMap: map[string]any{ + "name": "Old Publisher", + "printed_id": testUtils.NewDocIndex(2, 0), + }, + }, + testUtils.CreateDoc{ + CollectionID: 0, + DocMap: map[string]any{ + "name": "New Publisher", + "printed_id": testUtils.NewDocIndex(2, 1), }, }, testUtils.Request{ @@ -248,43 +248,43 @@ func TestQueryOneToOneMultiple_FromSecondary(t *testing.T) { `, }, testUtils.CreateDoc{ - CollectionID: 0, - Doc: `{ - "name": "Old Publisher" - }`, + CollectionID: 2, + DocMap: map[string]any{ + "name": "Painted House", + }, }, testUtils.CreateDoc{ - CollectionID: 0, - Doc: `{ - "name": "New Publisher" - }`, + CollectionID: 2, + DocMap: map[string]any{ + "name": "Theif Lord", + }, }, testUtils.CreateDoc{ - CollectionID: 1, - Doc: `{ - "name": "John Grisham" - }`, + CollectionID: 0, + DocMap: map[string]any{ + "name": "Old Publisher", + "printed_id": testUtils.NewDocIndex(2, 0), + }, }, testUtils.CreateDoc{ - CollectionID: 1, - Doc: `{ - "name": "Cornelia Funke" - }`, + CollectionID: 0, + DocMap: map[string]any{ + "name": "New Publisher", + "printed_id": testUtils.NewDocIndex(2, 1), + }, }, testUtils.CreateDoc{ - CollectionID: 2, + CollectionID: 1, DocMap: map[string]any{ - "name": "Painted House", - "publisher_id": testUtils.NewDocIndex(0, 0), - "author_id": testUtils.NewDocIndex(1, 0), + "name": "John Grisham", + "published_id": testUtils.NewDocIndex(2, 0), }, }, testUtils.CreateDoc{ - CollectionID: 2, + CollectionID: 1, DocMap: map[string]any{ - "name": "Theif Lord", - "publisher_id": testUtils.NewDocIndex(0, 1), - "author_id": testUtils.NewDocIndex(1, 1), + "name": "Cornelia Funke", + "published_id": testUtils.NewDocIndex(2, 1), }, }, testUtils.Request{ @@ -302,21 +302,21 @@ func TestQueryOneToOneMultiple_FromSecondary(t *testing.T) { Results: map[string]any{ "Book": []map[string]any{ { - "name": "Theif Lord", + "name": "Painted House", "publisher": map[string]any{ - "name": "New Publisher", + "name": "Old Publisher", }, "author": map[string]any{ - "name": "Cornelia Funke", + "name": "John Grisham", }, }, { - "name": "Painted House", + "name": "Theif Lord", "publisher": map[string]any{ - "name": "Old Publisher", + "name": "New Publisher", }, "author": map[string]any{ - "name": "John Grisham", + "name": "Cornelia Funke", }, }, }, diff --git a/tests/integration/query/one_to_one_to_many/simple_test.go b/tests/integration/query/one_to_one_to_many/simple_test.go index 3e65193cc7..e360d7a698 100644 --- a/tests/integration/query/one_to_one_to_many/simple_test.go +++ b/tests/integration/query/one_to_one_to_many/simple_test.go @@ -115,16 +115,16 @@ func TestQueryOneToOneToManyFromSecondaryOnOneToMany(t *testing.T) { `, }, testUtils.CreateDoc{ - CollectionID: 0, - Doc: `{ - "name": "Indicator1" - }`, + CollectionID: 1, + DocMap: map[string]any{ + "name": "Observable1", + }, }, testUtils.CreateDoc{ - CollectionID: 1, + CollectionID: 0, DocMap: map[string]any{ - "name": "Observable1", - "indicator_id": testUtils.NewDocIndex(0, 0), + "name": "Indicator1", + "observable_id": testUtils.NewDocIndex(1, 0), }, }, testUtils.CreateDoc{ @@ -192,16 +192,16 @@ func TestQueryOneToOneToManyFromSecondaryOnOneToOne(t *testing.T) { `, }, testUtils.CreateDoc{ - CollectionID: 0, - Doc: `{ - "name": "Indicator1" - }`, + CollectionID: 1, + DocMap: map[string]any{ + "name": "Observable1", + }, }, testUtils.CreateDoc{ - CollectionID: 1, + CollectionID: 0, DocMap: map[string]any{ - "name": "Observable1", - "indicator_id": testUtils.NewDocIndex(0, 0), + "name": "Indicator1", + "observable_id": testUtils.NewDocIndex(1, 0), }, }, testUtils.CreateDoc{ diff --git a/tests/integration/query/one_to_one_to_one/simple_test.go b/tests/integration/query/one_to_one_to_one/simple_test.go index 0486d3db77..e338d0f47e 100644 --- a/tests/integration/query/one_to_one_to_one/simple_test.go +++ b/tests/integration/query/one_to_one_to_one/simple_test.go @@ -40,43 +40,43 @@ func TestQueryOneToOneToOne(t *testing.T) { `, }, testUtils.CreateDoc{ - CollectionID: 0, - Doc: `{ - "name": "Old Publisher" - }`, + CollectionID: 2, + DocMap: map[string]any{ + "name": "John Grisham", + }, }, testUtils.CreateDoc{ - CollectionID: 0, - Doc: `{ - "name": "New Publisher" - }`, + CollectionID: 2, + DocMap: map[string]any{ + "name": "Cornelia Funke", + }, }, testUtils.CreateDoc{ CollectionID: 1, DocMap: map[string]any{ - "name": "Painted House", - "publisher_id": testUtils.NewDocIndex(0, 0), + "name": "Painted House", + "author_id": testUtils.NewDocIndex(2, 0), }, }, testUtils.CreateDoc{ CollectionID: 1, DocMap: map[string]any{ - "name": "Theif Lord", - "publisher_id": testUtils.NewDocIndex(0, 1), + "name": "Theif Lord", + "author_id": testUtils.NewDocIndex(2, 1), }, }, testUtils.CreateDoc{ - CollectionID: 2, + CollectionID: 0, DocMap: map[string]any{ - "name": "John Grisham", - "published_id": testUtils.NewDocIndex(1, 0), + "name": "Old Publisher", + "printed_id": testUtils.NewDocIndex(1, 0), }, }, testUtils.CreateDoc{ - CollectionID: 2, + CollectionID: 0, DocMap: map[string]any{ - "name": "Cornelia Funke", - "published_id": testUtils.NewDocIndex(1, 1), + "name": "New Publisher", + "printed_id": testUtils.NewDocIndex(1, 1), }, }, testUtils.Request{ @@ -156,31 +156,31 @@ func TestQueryOneToOneToOneSecondaryThenPrimary(t *testing.T) { }`, }, testUtils.CreateDoc{ - CollectionID: 1, + CollectionID: 2, DocMap: map[string]any{ - "name": "Painted House", - "publisher_id": testUtils.NewDocIndex(0, 0), + "name": "John Grisham", }, }, testUtils.CreateDoc{ - CollectionID: 1, + CollectionID: 2, DocMap: map[string]any{ - "name": "Theif Lord", - "publisher_id": testUtils.NewDocIndex(0, 1), + "name": "Cornelia Funke", }, }, testUtils.CreateDoc{ - CollectionID: 2, + CollectionID: 1, DocMap: map[string]any{ - "name": "John Grisham", - "published_id": testUtils.NewDocIndex(1, 0), + "name": "Painted House", + "publisher_id": testUtils.NewDocIndex(0, 0), + "author_id": testUtils.NewDocIndex(2, 0), }, }, testUtils.CreateDoc{ - CollectionID: 2, + CollectionID: 1, DocMap: map[string]any{ - "name": "Cornelia Funke", - "published_id": testUtils.NewDocIndex(1, 1), + "name": "Theif Lord", + "publisher_id": testUtils.NewDocIndex(0, 1), + "author_id": testUtils.NewDocIndex(2, 1), }, }, testUtils.Request{ @@ -248,29 +248,29 @@ func TestQueryOneToOneToOnePrimaryThenSecondary(t *testing.T) { `, }, testUtils.CreateDoc{ - CollectionID: 0, - Doc: `{ - "name": "Old Publisher" - }`, + CollectionID: 1, + DocMap: map[string]any{ + "name": "Painted House", + }, }, testUtils.CreateDoc{ - CollectionID: 0, - Doc: `{ - "name": "New Publisher" - }`, + CollectionID: 1, + DocMap: map[string]any{ + "name": "Theif Lord", + }, }, testUtils.CreateDoc{ - CollectionID: 1, + CollectionID: 0, DocMap: map[string]any{ - "name": "Painted House", - "publisher_id": testUtils.NewDocIndex(0, 0), + "name": "Old Publisher", + "printed_id": testUtils.NewDocIndex(1, 0), }, }, testUtils.CreateDoc{ - CollectionID: 1, + CollectionID: 0, DocMap: map[string]any{ - "name": "Theif Lord", - "publisher_id": testUtils.NewDocIndex(0, 1), + "name": "New Publisher", + "printed_id": testUtils.NewDocIndex(1, 1), }, }, testUtils.CreateDoc{ @@ -302,20 +302,20 @@ func TestQueryOneToOneToOnePrimaryThenSecondary(t *testing.T) { Results: map[string]any{ "Publisher": []map[string]any{ { - "name": "Old Publisher", + "name": "New Publisher", "printed": map[string]any{ - "name": "Painted House", + "name": "Theif Lord", "author": map[string]any{ - "name": "John Grisham", + "name": "Cornelia Funke", }, }, }, { - "name": "New Publisher", + "name": "Old Publisher", "printed": map[string]any{ - "name": "Theif Lord", + "name": "Painted House", "author": map[string]any{ - "name": "Cornelia Funke", + "name": "John Grisham", }, }, }, diff --git a/tests/integration/query/one_to_one_to_one/with_order_test.go b/tests/integration/query/one_to_one_to_one/with_order_test.go index 4c241cc281..a5835c8527 100644 --- a/tests/integration/query/one_to_one_to_one/with_order_test.go +++ b/tests/integration/query/one_to_one_to_one/with_order_test.go @@ -40,43 +40,43 @@ func TestQueryOneToOneToOneWithNestedOrder(t *testing.T) { `, }, testUtils.CreateDoc{ - CollectionID: 0, - Doc: `{ - "name": "Old Publisher" - }`, + CollectionID: 2, + DocMap: map[string]any{ + "name": "John Grisham", + }, }, testUtils.CreateDoc{ - CollectionID: 0, - Doc: `{ - "name": "New Publisher" - }`, + CollectionID: 2, + DocMap: map[string]any{ + "name": "Cornelia Funke", + }, }, testUtils.CreateDoc{ CollectionID: 1, DocMap: map[string]any{ - "name": "Painted House", - "publisher_id": testUtils.NewDocIndex(0, 0), + "name": "Painted House", + "author_id": testUtils.NewDocIndex(2, 0), }, }, testUtils.CreateDoc{ CollectionID: 1, DocMap: map[string]any{ - "name": "Theif Lord", - "publisher_id": testUtils.NewDocIndex(0, 1), + "name": "Theif Lord", + "author_id": testUtils.NewDocIndex(2, 1), }, }, testUtils.CreateDoc{ - CollectionID: 2, + CollectionID: 0, DocMap: map[string]any{ - "name": "John Grisham", - "published_id": testUtils.NewDocIndex(1, 0), + "name": "Old Publisher", + "printed_id": testUtils.NewDocIndex(1, 0), }, }, testUtils.CreateDoc{ - CollectionID: 2, + CollectionID: 0, DocMap: map[string]any{ - "name": "Cornelia Funke", - "published_id": testUtils.NewDocIndex(1, 1), + "name": "New Publisher", + "printed_id": testUtils.NewDocIndex(1, 1), }, }, testUtils.Request{ diff --git a/tests/integration/schema/create_one_one_data_test.go b/tests/integration/schema/create_one_one_data_test.go new file mode 100644 index 0000000000..59ead8f5db --- /dev/null +++ b/tests/integration/schema/create_one_one_data_test.go @@ -0,0 +1,142 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package schema + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestCreateOneToOne_Input_PrimaryObject(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Book { + name: String + author: Author + } + + type Author { + name: String + wrote: Book @primary + } + `, + }, + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "AuthorMutationInputArg") { + name + inputFields { + name + type { + name + ofType { + name + kind + } + } + } + } + } + `, + ContainsData: map[string]any{ + "__type": map[string]any{ + "name": "AuthorMutationInputArg", + "inputFields": []any{ + map[string]any{ + "name": "name", + "type": map[string]any{ + "name": "String", + "ofType": nil, + }, + }, + map[string]any{ + "name": "wrote", + "type": map[string]any{ + "name": "ID", + "ofType": nil, + }, + }, + map[string]any{ + "name": "wrote", + "type": map[string]any{ + "name": "ID", + "ofType": nil, + }, + }, + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestCreateOneToOne_Input_SecondaryObject(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Book { + name: String + author: Author + } + + type Author { + name: String + wrote: Book @primary + } + `, + }, + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "BookMutationInputArg") { + name + inputFields { + name + type { + name + ofType { + name + kind + } + } + } + } + } + `, + ContainsData: map[string]any{ + "__type": map[string]any{ + "name": "BookMutationInputArg", + // Note: the secondary relation fields should not be here! + "inputFields": []any{ + map[string]any{ + "name": "name", + "type": map[string]any{ + "name": "String", + "ofType": nil, + }, + }, + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/schema/updates/add/field/kind/doc_id_test.go b/tests/integration/schema/updates/add/field/kind/doc_id_test.go index be62192a1f..45b0471a81 100644 --- a/tests/integration/schema/updates/add/field/kind/doc_id_test.go +++ b/tests/integration/schema/updates/add/field/kind/doc_id_test.go @@ -72,7 +72,7 @@ func TestSchemaUpdatesAddFieldKindDocIDWithCreate(t *testing.T) { CollectionID: 0, Doc: `{ "name": "John", - "foo": "nhgfdsfd" + "foo": "bae-547eb3d8-7fc8-5c21-bcef-590813451e55" }`, }, testUtils.Request{ @@ -86,7 +86,7 @@ func TestSchemaUpdatesAddFieldKindDocIDWithCreate(t *testing.T) { "Users": []map[string]any{ { "name": "John", - "foo": "nhgfdsfd", + "foo": "bae-547eb3d8-7fc8-5c21-bcef-590813451e55", }, }, }, @@ -118,7 +118,7 @@ func TestSchemaUpdatesAddFieldKindDocIDSubstitutionWithCreate(t *testing.T) { CollectionID: 0, Doc: `{ "name": "John", - "foo": "nhgfdsfd" + "foo": "bae-547eb3d8-7fc8-5c21-bcef-590813451e55" }`, }, testUtils.Request{ @@ -132,7 +132,7 @@ func TestSchemaUpdatesAddFieldKindDocIDSubstitutionWithCreate(t *testing.T) { "Users": []map[string]any{ { "name": "John", - "foo": "nhgfdsfd", + "foo": "bae-547eb3d8-7fc8-5c21-bcef-590813451e55", }, }, }, diff --git a/tests/predefined/gen_predefined_test.go b/tests/predefined/gen_predefined_test.go index a32c261ce7..fc68681b4e 100644 --- a/tests/predefined/gen_predefined_test.go +++ b/tests/predefined/gen_predefined_test.go @@ -233,55 +233,6 @@ func TestGeneratePredefinedFromSchema_OneToOneToOnePrimary(t *testing.T) { } } -func TestGeneratePredefinedFromSchema_TwoPrimaryToOneMiddle(t *testing.T) { - schema := ` - type User { - name: String - device: Device - } - type Device { - model: String - owner: User @primary - specs: Specs @primary - } - type Specs { - OS: String - device: Device - }` - - docs, err := CreateFromSDL(schema, DocsList{ - ColName: "User", - Docs: []map[string]any{ - { - "name": "John", - "device": map[string]any{ - "model": "iPhone", - "specs": map[string]any{ - "OS": "iOS", - }, - }, - }, - }, - }) - assert.NoError(t, err) - - colDefMap, err := gen.ParseSDL(schema) - require.NoError(t, err) - - specsDoc := mustAddDocIDToDoc(map[string]any{"OS": "iOS"}, colDefMap["Specs"]) - userDoc := mustAddDocIDToDoc(map[string]any{"name": "John"}, colDefMap["User"]) - deviceDoc := mustAddDocIDToDoc(map[string]any{ - "model": "iPhone", - "specs_id": specsDoc[request.DocIDFieldName], - "owner_id": userDoc[request.DocIDFieldName], - }, colDefMap["Device"]) - - errorMsg := assertDocs([]map[string]any{userDoc, deviceDoc, specsDoc}, docs) - if errorMsg != "" { - t.Error(errorMsg) - } -} - func TestGeneratePredefinedFromSchema_OneToTwoPrimary(t *testing.T) { schema := ` type User { From 858f4f1253721ee34c2e7d9ae8eec40c335d6452 Mon Sep 17 00:00:00 2001 From: Fred Carle Date: Fri, 11 Oct 2024 13:42:38 -0400 Subject: [PATCH 61/71] feat: Add replicator retry (#3107) ## Relevant issue(s) Resolves #3072 ## Description This PR adds a replication retry functionality to the database. It uses an exponential backoff until 32 minutes is reached and then it will continuously retry every 32 minutes until the inactive peer is removed from the list of replicators. --- client/db.go | 2 +- client/mocks/db.go | 12 +- client/replicator.go | 22 +- datastore/mocks/txn.go | 12 +- datastore/multi.go | 7 +- datastore/store.go | 7 +- docs/website/references/http/openapi.json | 9 + event/event.go | 18 +- http/client.go | 2 +- http/client_tx.go | 2 +- internal/core/key.go | 81 +++- internal/db/collection.go | 1 - internal/db/config.go | 29 +- internal/db/config_test.go | 7 + internal/db/db.go | 44 +- internal/db/errors.go | 3 + internal/db/messages.go | 6 + internal/db/p2p_replicator.go | 446 +++++++++++++++++- net/client.go | 17 +- net/client_test.go | 2 +- net/peer.go | 103 +--- net/peer_test.go | 151 +----- net/server.go | 21 +- tests/bench/query/planner/utils.go | 2 +- tests/clients/cli/wrapper.go | 2 +- tests/clients/cli/wrapper_tx.go | 2 +- tests/clients/http/wrapper.go | 2 +- tests/clients/http/wrapper_tx.go | 2 +- tests/integration/db.go | 12 +- .../net/simple/replicator/with_create_test.go | 70 +++ .../net/simple/replicator/with_update_test.go | 161 +++++++ tests/integration/test_case.go | 23 + tests/integration/utils.go | 89 +++- 33 files changed, 1045 insertions(+), 324 deletions(-) diff --git a/client/db.go b/client/db.go index 4838773dde..30f123d286 100644 --- a/client/db.go +++ b/client/db.go @@ -60,7 +60,7 @@ type DB interface { // Peerstore returns the peerstore where known host information is stored. // // It sits within the rootstore returned by [Root]. - Peerstore() datastore.DSBatching + Peerstore() datastore.DSReaderWriter // Headstore returns the headstore where the current heads of the database are stored. // diff --git a/client/mocks/db.go b/client/mocks/db.go index 73cf4b3665..024d2ea31c 100644 --- a/client/mocks/db.go +++ b/client/mocks/db.go @@ -1564,19 +1564,19 @@ func (_c *DB_PeerInfo_Call) RunAndReturn(run func() peer.AddrInfo) *DB_PeerInfo_ } // Peerstore provides a mock function with given fields: -func (_m *DB) Peerstore() datastore.DSBatching { +func (_m *DB) Peerstore() datastore.DSReaderWriter { ret := _m.Called() if len(ret) == 0 { panic("no return value specified for Peerstore") } - var r0 datastore.DSBatching - if rf, ok := ret.Get(0).(func() datastore.DSBatching); ok { + var r0 datastore.DSReaderWriter + if rf, ok := ret.Get(0).(func() datastore.DSReaderWriter); ok { r0 = rf() } else { if ret.Get(0) != nil { - r0 = ret.Get(0).(datastore.DSBatching) + r0 = ret.Get(0).(datastore.DSReaderWriter) } } @@ -1600,12 +1600,12 @@ func (_c *DB_Peerstore_Call) Run(run func()) *DB_Peerstore_Call { return _c } -func (_c *DB_Peerstore_Call) Return(_a0 datastore.DSBatching) *DB_Peerstore_Call { +func (_c *DB_Peerstore_Call) Return(_a0 datastore.DSReaderWriter) *DB_Peerstore_Call { _c.Call.Return(_a0) return _c } -func (_c *DB_Peerstore_Call) RunAndReturn(run func() datastore.DSBatching) *DB_Peerstore_Call { +func (_c *DB_Peerstore_Call) RunAndReturn(run func() datastore.DSReaderWriter) *DB_Peerstore_Call { _c.Call.Return(run) return _c } diff --git a/client/replicator.go b/client/replicator.go index 8df204906f..730d3e2609 100644 --- a/client/replicator.go +++ b/client/replicator.go @@ -10,10 +10,26 @@ package client -import "github.com/libp2p/go-libp2p/core/peer" +import ( + "time" + + "github.com/libp2p/go-libp2p/core/peer" +) // Replicator is a peer that a set of local collections are replicated to. type Replicator struct { - Info peer.AddrInfo - Schemas []string + Info peer.AddrInfo + Schemas []string + Status ReplicatorStatus + LastStatusChange time.Time } + +// ReplicatorStatus is the status of a Replicator. +type ReplicatorStatus uint8 + +const ( + // ReplicatorStatusActive is the status of a Replicator that is actively replicating. + ReplicatorStatusActive ReplicatorStatus = iota + // ReplicatorStatusInactive is the status of a Replicator that is inactive/offline. + ReplicatorStatusInactive +) diff --git a/datastore/mocks/txn.go b/datastore/mocks/txn.go index ea923d5de4..a7bd1c6fd2 100644 --- a/datastore/mocks/txn.go +++ b/datastore/mocks/txn.go @@ -533,19 +533,19 @@ func (_c *Txn_OnSuccessAsync_Call) RunAndReturn(run func(func())) *Txn_OnSuccess } // Peerstore provides a mock function with given fields: -func (_m *Txn) Peerstore() datastore.DSBatching { +func (_m *Txn) Peerstore() datastore.DSReaderWriter { ret := _m.Called() if len(ret) == 0 { panic("no return value specified for Peerstore") } - var r0 datastore.DSBatching - if rf, ok := ret.Get(0).(func() datastore.DSBatching); ok { + var r0 datastore.DSReaderWriter + if rf, ok := ret.Get(0).(func() datastore.DSReaderWriter); ok { r0 = rf() } else { if ret.Get(0) != nil { - r0 = ret.Get(0).(datastore.DSBatching) + r0 = ret.Get(0).(datastore.DSReaderWriter) } } @@ -569,12 +569,12 @@ func (_c *Txn_Peerstore_Call) Run(run func()) *Txn_Peerstore_Call { return _c } -func (_c *Txn_Peerstore_Call) Return(_a0 datastore.DSBatching) *Txn_Peerstore_Call { +func (_c *Txn_Peerstore_Call) Return(_a0 datastore.DSReaderWriter) *Txn_Peerstore_Call { _c.Call.Return(_a0) return _c } -func (_c *Txn_Peerstore_Call) RunAndReturn(run func() datastore.DSBatching) *Txn_Peerstore_Call { +func (_c *Txn_Peerstore_Call) RunAndReturn(run func() datastore.DSReaderWriter) *Txn_Peerstore_Call { _c.Call.Return(run) return _c } diff --git a/datastore/multi.go b/datastore/multi.go index cbbf80e23f..5a2c934852 100644 --- a/datastore/multi.go +++ b/datastore/multi.go @@ -12,7 +12,6 @@ package datastore import ( ds "github.com/ipfs/go-datastore" - "github.com/ipfs/go-datastore/namespace" ) var ( @@ -31,7 +30,7 @@ type multistore struct { data DSReaderWriter enc Blockstore head DSReaderWriter - peer DSBatching + peer DSReaderWriter system DSReaderWriter dag Blockstore } @@ -46,7 +45,7 @@ func MultiStoreFrom(rootstore ds.Datastore) MultiStore { data: prefix(rootRW, dataStoreKey), enc: newBlockstore(prefix(rootRW, encStoreKey)), head: prefix(rootRW, headStoreKey), - peer: namespace.Wrap(rootstore, peerStoreKey), + peer: prefix(rootRW, peerStoreKey), system: prefix(rootRW, systemStoreKey), dag: newBlockstore(prefix(rootRW, blockStoreKey)), } @@ -70,7 +69,7 @@ func (ms multistore) Headstore() DSReaderWriter { } // Peerstore implements MultiStore. -func (ms multistore) Peerstore() DSBatching { +func (ms multistore) Peerstore() DSReaderWriter { return ms.peer } diff --git a/datastore/store.go b/datastore/store.go index 641cd10b1a..7f4048ec90 100644 --- a/datastore/store.go +++ b/datastore/store.go @@ -47,7 +47,7 @@ type MultiStore interface { // Peerstore is a wrapped root DSReaderWriter as a ds.Batching, embedded into a DSBatching // under the /peers namespace - Peerstore() DSBatching + Peerstore() DSReaderWriter // Blockstore is a wrapped root DSReaderWriter as a Blockstore, embedded into a Blockstore // under the /blocks namespace @@ -81,8 +81,3 @@ type IPLDStorage interface { storage.ReadableStorage storage.WritableStorage } - -// DSBatching wraps the Batching interface from go-datastore -type DSBatching interface { - ds.Batching -} diff --git a/docs/website/references/http/openapi.json b/docs/website/references/http/openapi.json index 77168f7e93..056d53085d 100644 --- a/docs/website/references/http/openapi.json +++ b/docs/website/references/http/openapi.json @@ -509,11 +509,20 @@ }, "type": "object" }, + "LastStatusChange": { + "format": "date-time", + "type": "string" + }, "Schemas": { "items": { "type": "string" }, "type": "array" + }, + "Status": { + "maximum": 255, + "minimum": 0, + "type": "integer" } }, "type": "object" diff --git a/event/event.go b/event/event.go index 698cb8dc90..5ae882c6bb 100644 --- a/event/event.go +++ b/event/event.go @@ -35,6 +35,8 @@ const ( PeerInfoName = Name("peer-info") // ReplicatorName is the name of the replicator event. ReplicatorName = Name("replicator") + // ReplicatorFailureName is the name of the replicator failure event. + ReplicatorFailureName = Name("replicator-failure") // P2PTopicCompletedName is the name of the network p2p topic update completed event. P2PTopicCompletedName = Name("p2p-topic-completed") // ReplicatorCompletedName is the name of the replicator completed event. @@ -68,8 +70,12 @@ type Update struct { // also formed this update. Block []byte - // IsCreate is true if this update is the creation of a new document. - IsCreate bool + // IsRetry is true if this update is a retry of a previously failed update. + IsRetry bool + + // Success is a channel that will receive a boolean value indicating if the update was successful. + // It is used during retries. + Success chan bool } // Merge is a notification that a merge can be performed up to the provided CID. @@ -137,3 +143,11 @@ type Replicator struct { // and those collections have documents to be replicated. Docs <-chan Update } + +// ReplicatorFailure is an event that is published when a replicator fails to replicate a document. +type ReplicatorFailure struct { + // PeerID is the id of the peer that failed to replicate the document. + PeerID peer.ID + // DocID is the unique immutable identifier of the document that failed to replicate. + DocID string +} diff --git a/http/client.go b/http/client.go index 777cf4a733..ca43181c3c 100644 --- a/http/client.go +++ b/http/client.go @@ -493,7 +493,7 @@ func (c *Client) Encstore() datastore.Blockstore { panic("client side database") } -func (c *Client) Peerstore() datastore.DSBatching { +func (c *Client) Peerstore() datastore.DSReaderWriter { panic("client side database") } diff --git a/http/client_tx.go b/http/client_tx.go index daacb4128e..4a993d66d9 100644 --- a/http/client_tx.go +++ b/http/client_tx.go @@ -99,7 +99,7 @@ func (c *Transaction) Headstore() datastore.DSReaderWriter { panic("client side transaction") } -func (c *Transaction) Peerstore() datastore.DSBatching { +func (c *Transaction) Peerstore() datastore.DSReaderWriter { panic("client side transaction") } diff --git a/internal/core/key.go b/internal/core/key.go index 60601795b2..3e0bde87cb 100644 --- a/internal/core/key.go +++ b/internal/core/key.go @@ -58,8 +58,10 @@ const ( FIELD_ID_SEQ = "/seq/field" PRIMARY_KEY = "/pk" DATASTORE_DOC_VERSION_FIELD_ID = "v" - REPLICATOR = "/replicator/id" P2P_COLLECTION = "/p2p/collection" + REPLICATOR = "/rep/id" + REPLICATOR_RETRY_ID = "/rep/retry/id" + REPLICATOR_RETRY_DOC = "/rep/retry/doc" ) // Key is an interface that represents a key in the database. @@ -946,3 +948,80 @@ func bytesPrefixEnd(b []byte) []byte { // maximal byte string (i.e. already \xff...). return b } + +type ReplicatorRetryIDKey struct { + PeerID string +} + +var _ Key = (*ReplicatorRetryIDKey)(nil) + +func NewReplicatorRetryIDKey(peerID string) ReplicatorRetryIDKey { + return ReplicatorRetryIDKey{ + PeerID: peerID, + } +} + +// NewReplicatorRetryIDKeyFromString creates a new [ReplicatorRetryIDKey] from a string. +// +// It expects the input string to be in the format `/rep/retry/id/[PeerID]`. +func NewReplicatorRetryIDKeyFromString(key string) (ReplicatorRetryIDKey, error) { + peerID := strings.TrimPrefix(key, REPLICATOR_RETRY_ID+"/") + if peerID == "" { + return ReplicatorRetryIDKey{}, errors.WithStack(ErrInvalidKey, errors.NewKV("Key", key)) + } + return NewReplicatorRetryIDKey(peerID), nil +} + +func (k ReplicatorRetryIDKey) ToString() string { + return REPLICATOR_RETRY_ID + "/" + k.PeerID +} + +func (k ReplicatorRetryIDKey) Bytes() []byte { + return []byte(k.ToString()) +} + +func (k ReplicatorRetryIDKey) ToDS() ds.Key { + return ds.NewKey(k.ToString()) +} + +type ReplicatorRetryDocIDKey struct { + PeerID string + DocID string +} + +var _ Key = (*ReplicatorRetryDocIDKey)(nil) + +func NewReplicatorRetryDocIDKey(peerID, docID string) ReplicatorRetryDocIDKey { + return ReplicatorRetryDocIDKey{ + PeerID: peerID, + DocID: docID, + } +} + +// NewReplicatorRetryDocIDKeyFromString creates a new [ReplicatorRetryDocIDKey] from a string. +// +// It expects the input string to be in the format `/rep/retry/doc/[PeerID]/[DocID]`. +func NewReplicatorRetryDocIDKeyFromString(key string) (ReplicatorRetryDocIDKey, error) { + trimmedKey := strings.TrimPrefix(key, REPLICATOR_RETRY_DOC+"/") + keyArr := strings.Split(trimmedKey, "/") + if len(keyArr) != 2 { + return ReplicatorRetryDocIDKey{}, errors.WithStack(ErrInvalidKey, errors.NewKV("Key", key)) + } + return NewReplicatorRetryDocIDKey(keyArr[0], keyArr[1]), nil +} + +func (k ReplicatorRetryDocIDKey) ToString() string { + keyString := REPLICATOR_RETRY_DOC + "/" + k.PeerID + if k.DocID != "" { + keyString += "/" + k.DocID + } + return keyString +} + +func (k ReplicatorRetryDocIDKey) Bytes() []byte { + return []byte(k.ToString()) +} + +func (k ReplicatorRetryDocIDKey) ToDS() ds.Key { + return ds.NewKey(k.ToString()) +} diff --git a/internal/db/collection.go b/internal/db/collection.go index b746226767..39e8757598 100644 --- a/internal/db/collection.go +++ b/internal/db/collection.go @@ -702,7 +702,6 @@ func (c *collection) save( Cid: link.Cid, SchemaRoot: c.Schema().Root, Block: headNode, - IsCreate: isCreate, } txn.OnSuccess(func() { c.db.events.Publish(event.NewMessage(event.UpdateName, updateEvent)) diff --git a/internal/db/config.go b/internal/db/config.go index 3d69e833c4..f2fc942ae2 100644 --- a/internal/db/config.go +++ b/internal/db/config.go @@ -11,6 +11,8 @@ package db import ( + "time" + "github.com/sourcenetwork/immutable" ) @@ -20,7 +22,24 @@ const ( ) type dbOptions struct { - maxTxnRetries immutable.Option[int] + maxTxnRetries immutable.Option[int] + RetryIntervals []time.Duration +} + +// defaultOptions returns the default db options. +func defaultOptions() *dbOptions { + return &dbOptions{ + RetryIntervals: []time.Duration{ + // exponential backoff retry intervals + time.Second * 30, + time.Minute, + time.Minute * 2, + time.Minute * 4, + time.Minute * 8, + time.Minute * 16, + time.Minute * 32, + }, + } } // Option is a funtion that sets a config value on the db. @@ -32,3 +51,11 @@ func WithMaxRetries(num int) Option { opts.maxTxnRetries = immutable.Some(num) } } + +func WithRetryInterval(interval []time.Duration) Option { + return func(opt *dbOptions) { + if len(interval) > 0 { + opt.RetryIntervals = interval + } + } +} diff --git a/internal/db/config_test.go b/internal/db/config_test.go index a52d494a21..7f73c2e755 100644 --- a/internal/db/config_test.go +++ b/internal/db/config_test.go @@ -12,6 +12,7 @@ package db import ( "testing" + "time" "github.com/stretchr/testify/assert" ) @@ -22,3 +23,9 @@ func TestWithMaxRetries(t *testing.T) { assert.True(t, d.maxTxnRetries.HasValue()) assert.Equal(t, 10, d.maxTxnRetries.Value()) } + +func TestWithRetryInterval(t *testing.T) { + d := dbOptions{} + WithRetryInterval([]time.Duration{time.Minute, time.Hour})(&d) + assert.Equal(t, []time.Duration{time.Minute, time.Hour}, d.RetryIntervals) +} diff --git a/internal/db/db.go b/internal/db/db.go index d5872cef0c..1e52b16437 100644 --- a/internal/db/db.go +++ b/internal/db/db.go @@ -18,6 +18,7 @@ import ( "context" "sync" "sync/atomic" + "time" ds "github.com/ipfs/go-datastore" dsq "github.com/ipfs/go-datastore/query" @@ -80,6 +81,15 @@ type db struct { // The peer ID and network address information for the current node // if network is enabled. The `atomic.Value` should hold a `peer.AddrInfo` struct. peerInfo atomic.Value + + // To be able to close the context passed to NewDB on DB close, + // we need to keep a reference to the cancel function. Otherwise, + // some goroutines might leak. + ctxCancel context.CancelFunc + + // The intervals at which to retry replicator failures. + // For example, this can define an exponential backoff strategy. + retryIntervals []time.Duration } // NewDB creates a new instance of the DB using the given options. @@ -107,20 +117,23 @@ func newDB( return nil, err } - db := &db{ - rootstore: rootstore, - multistore: multistore, - acp: acp, - lensRegistry: lens, - parser: parser, - options: options, - events: event.NewBus(commandBufferSize, eventBufferSize), + opts := defaultOptions() + for _, opt := range options { + opt(opts) } - // apply options - var opts dbOptions - for _, opt := range options { - opt(&opts) + ctx, cancel := context.WithCancel(ctx) + + db := &db{ + rootstore: rootstore, + multistore: multistore, + acp: acp, + lensRegistry: lens, + parser: parser, + options: options, + events: event.NewBus(commandBufferSize, eventBufferSize), + ctxCancel: cancel, + retryIntervals: opts.RetryIntervals, } if opts.maxTxnRetries.HasValue() { @@ -136,11 +149,12 @@ func newDB( return nil, err } - sub, err := db.events.Subscribe(event.MergeName, event.PeerInfoName) + sub, err := db.events.Subscribe(event.MergeName, event.PeerInfoName, event.ReplicatorFailureName) if err != nil { return nil, err } go db.handleMessages(ctx, sub) + go db.handleReplicatorRetries(ctx) return db, nil } @@ -173,7 +187,7 @@ func (db *db) Encstore() datastore.Blockstore { } // Peerstore returns the internal DAG store which contains IPLD blocks. -func (db *db) Peerstore() datastore.DSBatching { +func (db *db) Peerstore() datastore.DSReaderWriter { return db.multistore.Peerstore() } @@ -370,6 +384,8 @@ func (db *db) PrintDump(ctx context.Context) error { func (db *db) Close() { log.Info("Closing DefraDB process...") + db.ctxCancel() + db.events.Close() err := db.rootstore.Close() diff --git a/internal/db/errors.go b/internal/db/errors.go index 612d5ddb40..d210860501 100644 --- a/internal/db/errors.go +++ b/internal/db/errors.go @@ -148,6 +148,9 @@ var ( ErrSelfReferenceWithoutSelf = errors.New(errSelfReferenceWithoutSelf) ErrColNotMaterialized = errors.New(errColNotMaterialized) ErrMaterializedViewAndACPNotSupported = errors.New(errMaterializedViewAndACPNotSupported) + ErrContextDone = errors.New("context done") + ErrFailedToRetryDoc = errors.New("failed to retry doc") + ErrTimeoutDocRetry = errors.New("timeout while retrying doc") ) // NewErrFailedToGetHeads returns a new error indicating that the heads of a document diff --git a/internal/db/messages.go b/internal/db/messages.go index 04b81cd210..51efba982e 100644 --- a/internal/db/messages.go +++ b/internal/db/messages.go @@ -78,6 +78,12 @@ func (db *db) handleMessages(ctx context.Context, sub *event.Subscription) { log.ErrorContextE(ctx, "Failed to load replicators", err) } }) + case event.ReplicatorFailure: + // ReplicatorFailure is a notification that a replicator has failed to replicate a document. + err := db.handleReplicatorFailure(ctx, evt.PeerID.String(), evt.DocID) + if err != nil { + log.ErrorContextE(ctx, "Failed to handle replicator failure", err) + } } } } diff --git a/internal/db/p2p_replicator.go b/internal/db/p2p_replicator.go index b66ab4f2cf..2d81f123c8 100644 --- a/internal/db/p2p_replicator.go +++ b/internal/db/p2p_replicator.go @@ -13,19 +13,31 @@ package db import ( "context" "encoding/json" + "time" - dsq "github.com/ipfs/go-datastore/query" + "github.com/fxamacker/cbor/v2" + "github.com/ipfs/go-datastore/query" "github.com/libp2p/go-libp2p/core/peer" "github.com/sourcenetwork/corelog" "github.com/sourcenetwork/defradb/client" + "github.com/sourcenetwork/defradb/datastore" "github.com/sourcenetwork/defradb/errors" "github.com/sourcenetwork/defradb/event" "github.com/sourcenetwork/defradb/internal/core" + coreblock "github.com/sourcenetwork/defradb/internal/core/block" "github.com/sourcenetwork/defradb/internal/merkle/clock" ) +const ( + // retryLoopInterval is the interval at which the retry handler checks for + // replicators that are due for a retry. + retryLoopInterval = 2 * time.Second + // retryTimeout is the timeout for a single doc retry. + retryTimeout = 10 * time.Second +) + func (db *db) SetReplicator(ctx context.Context, rep client.Replicator) error { txn, err := db.NewTxn(ctx, false) if err != nil { @@ -50,12 +62,12 @@ func (db *db) SetReplicator(ctx context.Context, rep client.Replicator) error { storedRep := client.Replicator{} storedSchemas := make(map[string]struct{}) repKey := core.NewReplicatorKey(rep.Info.ID.String()) - hasOldRep, err := txn.Systemstore().Has(ctx, repKey.ToDS()) + hasOldRep, err := txn.Peerstore().Has(ctx, repKey.ToDS()) if err != nil { return err } if hasOldRep { - repBytes, err := txn.Systemstore().Get(ctx, repKey.ToDS()) + repBytes, err := txn.Peerstore().Get(ctx, repKey.ToDS()) if err != nil { return err } @@ -68,6 +80,7 @@ func (db *db) SetReplicator(ctx context.Context, rep client.Replicator) error { } } else { storedRep.Info = rep.Info + storedRep.LastStatusChange = time.Now() } var collections []client.Collection @@ -113,7 +126,7 @@ func (db *db) SetReplicator(ctx context.Context, rep client.Replicator) error { return err } - err = txn.Systemstore().Put(ctx, repKey.ToDS(), newRepBytes) + err = txn.Peerstore().Put(ctx, repKey.ToDS(), newRepBytes) if err != nil { return err } @@ -214,14 +227,14 @@ func (db *db) DeleteReplicator(ctx context.Context, rep client.Replicator) error storedRep := client.Replicator{} storedSchemas := make(map[string]struct{}) repKey := core.NewReplicatorKey(rep.Info.ID.String()) - hasOldRep, err := txn.Systemstore().Has(ctx, repKey.ToDS()) + hasOldRep, err := txn.Peerstore().Has(ctx, repKey.ToDS()) if err != nil { return err } if !hasOldRep { return ErrReplicatorNotFound } - repBytes, err := txn.Systemstore().Get(ctx, repKey.ToDS()) + repBytes, err := txn.Peerstore().Get(ctx, repKey.ToDS()) if err != nil { return err } @@ -245,7 +258,7 @@ func (db *db) DeleteReplicator(ctx context.Context, rep client.Replicator) error } // make sure the replicator exists in the datastore key := core.NewReplicatorKey(rep.Info.ID.String()) - _, err = txn.Systemstore().Get(ctx, key.ToDS()) + _, err = txn.Peerstore().Get(ctx, key.ToDS()) if err != nil { return err } @@ -265,7 +278,7 @@ func (db *db) DeleteReplicator(ctx context.Context, rep client.Replicator) error // Persist the replicator to the store, deleting it if no schemas remain key := core.NewReplicatorKey(rep.Info.ID.String()) if len(rep.Schemas) == 0 { - err := txn.Systemstore().Delete(ctx, key.ToDS()) + err := txn.Peerstore().Delete(ctx, key.ToDS()) if err != nil { return err } @@ -274,7 +287,7 @@ func (db *db) DeleteReplicator(ctx context.Context, rep client.Replicator) error if err != nil { return err } - err = txn.Systemstore().Put(ctx, key.ToDS(), repBytes) + err = txn.Peerstore().Put(ctx, key.ToDS(), repBytes) if err != nil { return err } @@ -298,10 +311,10 @@ func (db *db) GetAllReplicators(ctx context.Context) ([]client.Replicator, error defer txn.Discard(ctx) // create collection system prefix query - query := dsq.Query{ + query := query.Query{ Prefix: core.NewReplicatorKey("").ToString(), } - results, err := txn.Systemstore().Query(ctx, query) + results, err := txn.Peerstore().Query(ctx, query) if err != nil { return nil, err } @@ -335,3 +348,414 @@ func (db *db) loadAndPublishReplicators(ctx context.Context) error { } return nil } + +// handleReplicatorRetries manages retries for failed replication attempts. +func (db *db) handleReplicatorRetries(ctx context.Context) { + for { + select { + case <-ctx.Done(): + return + + case <-time.After(retryLoopInterval): + db.retryReplicators(ctx) + } + } +} + +func (db *db) handleReplicatorFailure(ctx context.Context, peerID, docID string) error { + ctx, txn, err := ensureContextTxn(ctx, db, false) + if err != nil { + return err + } + defer txn.Discard(ctx) + err = updateReplicatorStatus(ctx, txn, peerID, false) + if err != nil { + return err + } + err = createIfNotExistsReplicatorRetry(ctx, txn, peerID, db.retryIntervals) + if err != nil { + return err + } + docIDKey := core.NewReplicatorRetryDocIDKey(peerID, docID) + err = txn.Peerstore().Put(ctx, docIDKey.ToDS(), []byte{}) + if err != nil { + return err + } + return txn.Commit(ctx) +} + +func (db *db) handleCompletedReplicatorRetry(ctx context.Context, peerID string, success bool) error { + ctx, txn, err := ensureContextTxn(ctx, db, false) + if err != nil { + return err + } + defer txn.Discard(ctx) + var done bool + if success { + done, err = deleteReplicatorRetryIfNoMoreDocs(ctx, txn, peerID) + if err != nil { + return err + } + if done { + err := updateReplicatorStatus(ctx, txn, peerID, true) + if err != nil { + return err + } + } else { + // If there are more docs to retry, set the next retry time to be immediate. + err := setReplicatorNextRetry(ctx, txn, peerID, []time.Duration{0}) + if err != nil { + return err + } + } + } else { + err := setReplicatorNextRetry(ctx, txn, peerID, db.retryIntervals) + if err != nil { + return err + } + } + return txn.Commit(ctx) +} + +// updateReplicatorStatus updates the status of a replicator in the peerstore. +func updateReplicatorStatus( + ctx context.Context, + txn datastore.Txn, + peerID string, + active bool, +) error { + key := core.NewReplicatorKey(peerID) + repBytes, err := txn.Peerstore().Get(ctx, key.ToDS()) + if err != nil { + return err + } + rep := client.Replicator{} + err = json.Unmarshal(repBytes, &rep) + if err != nil { + return err + } + switch active { + case true: + if rep.Status == client.ReplicatorStatusInactive { + rep.LastStatusChange = time.Time{} + } + rep.Status = client.ReplicatorStatusActive + case false: + if rep.Status == client.ReplicatorStatusActive { + rep.LastStatusChange = time.Now() + } + rep.Status = client.ReplicatorStatusInactive + } + b, err := json.Marshal(rep) + if err != nil { + return err + } + return txn.Peerstore().Put(ctx, key.ToDS(), b) +} + +type retryInfo struct { + NextRetry time.Time + NumRetries int + Retrying bool +} + +func createIfNotExistsReplicatorRetry( + ctx context.Context, + txn datastore.Txn, + peerID string, + retryIntervals []time.Duration, +) error { + key := core.NewReplicatorRetryIDKey(peerID) + exists, err := txn.Peerstore().Has(ctx, key.ToDS()) + if err != nil { + return err + } + if exists { + return nil + } + r := retryInfo{ + NextRetry: time.Now().Add(retryIntervals[0]), + NumRetries: 0, + } + b, err := cbor.Marshal(r) + if err != nil { + return err + } + err = txn.Peerstore().Put(ctx, key.ToDS(), b) + if err != nil { + return err + } + return nil +} + +func (db *db) retryReplicators(ctx context.Context) { + q := query.Query{ + Prefix: core.REPLICATOR_RETRY_ID, + } + results, err := db.Peerstore().Query(ctx, q) + if err != nil { + log.ErrorContextE(ctx, "Failed to query replicator retries", err) + return + } + defer closeQueryResults(results) + now := time.Now() + for result := range results.Next() { + key, err := core.NewReplicatorRetryIDKeyFromString(result.Key) + if err != nil { + log.ErrorContextE(ctx, "Failed to parse replicator retry ID key", err) + continue + } + rInfo := retryInfo{} + err = cbor.Unmarshal(result.Value, &rInfo) + if err != nil { + log.ErrorContextE(ctx, "Failed to unmarshal replicator retry info", err) + // If we can't unmarshal the retry info, we delete the retry key and all related retry docs. + err = db.deleteReplicatorRetryAndDocs(ctx, key.PeerID) + if err != nil { + log.ErrorContextE(ctx, "Failed to delete replicator retry and docs", err) + } + continue + } + // If the next retry time has passed and the replicator is not already retrying. + if now.After(rInfo.NextRetry) && !rInfo.Retrying { + // The replicator might have been deleted by the time we reach this point. + // If it no longer exists, we delete the retry key and all retry docs. + exists, err := db.Peerstore().Has(ctx, core.NewReplicatorKey(key.PeerID).ToDS()) + if err != nil { + log.ErrorContextE(ctx, "Failed to check if replicator exists", err) + continue + } + if !exists { + err = db.deleteReplicatorRetryAndDocs(ctx, key.PeerID) + if err != nil { + log.ErrorContextE(ctx, "Failed to delete replicator retry and docs", err) + } + continue + } + + err = db.setReplicatorAsRetrying(ctx, key, rInfo) + if err != nil { + log.ErrorContextE(ctx, "Failed to set replicator as retrying", err) + continue + } + go db.retryReplicator(ctx, key.PeerID) + } + } +} + +func (db *db) setReplicatorAsRetrying(ctx context.Context, key core.ReplicatorRetryIDKey, rInfo retryInfo) error { + rInfo.Retrying = true + rInfo.NumRetries++ + b, err := cbor.Marshal(rInfo) + if err != nil { + return err + } + return db.Peerstore().Put(ctx, key.ToDS(), b) +} + +func setReplicatorNextRetry( + ctx context.Context, + txn datastore.Txn, + peerID string, + retryIntervals []time.Duration, +) error { + key := core.NewReplicatorRetryIDKey(peerID) + b, err := txn.Peerstore().Get(ctx, key.ToDS()) + if err != nil { + return err + } + rInfo := retryInfo{} + err = cbor.Unmarshal(b, &rInfo) + if err != nil { + return err + } + if rInfo.NumRetries >= len(retryIntervals) { + rInfo.NextRetry = time.Now().Add(retryIntervals[len(retryIntervals)-1]) + } else { + rInfo.NextRetry = time.Now().Add(retryIntervals[rInfo.NumRetries]) + } + rInfo.Retrying = false + b, err = cbor.Marshal(rInfo) + if err != nil { + return err + } + return txn.Peerstore().Put(ctx, key.ToDS(), b) +} + +// retryReplicator retries all unsycned docs for a replicator. +// +// The retry process is as follows: +// 1. Query the retry docs for the replicator. +// 2. For each doc, retry the doc. +// 3. If the doc is successfully retried, delete the retry doc. +// 4. If the doc fails to retry, stop retrying the rest of the docs and wait for the next retry. +// 5. If all docs are successfully retried, delete the replicator retry. +// 6. If there are more docs to retry, set the next retry time to be immediate. +// +// All action within this function are done outside a transaction to always get the most recent data +// and post updates as soon as possible. Because of the asyncronous nature of the retryDoc step, there +// would be a high chance of unnecessary transaction conflicts. +func (db *db) retryReplicator(ctx context.Context, peerID string) { + log.InfoContext(ctx, "Retrying replicator", corelog.String("PeerID", peerID)) + key := core.NewReplicatorRetryDocIDKey(peerID, "") + q := query.Query{ + Prefix: key.ToString(), + } + results, err := db.Peerstore().Query(ctx, q) + if err != nil { + log.ErrorContextE(ctx, "Failed to query retry docs", err) + return + } + defer closeQueryResults(results) + for result := range results.Next() { + select { + case <-ctx.Done(): + return + default: + } + key, err := core.NewReplicatorRetryDocIDKeyFromString(result.Key) + if err != nil { + log.ErrorContextE(ctx, "Failed to parse retry doc key", err) + continue + } + err = db.retryDoc(ctx, key.DocID) + if err != nil { + log.ErrorContextE(ctx, "Failed to retry doc", err) + err = db.handleCompletedReplicatorRetry(ctx, peerID, false) + if err != nil { + log.ErrorContextE(ctx, "Failed to handle completed replicator retry", err) + } + // if one doc fails, stop retrying the rest and just wait for the next retry + return + } + err = db.Peerstore().Delete(ctx, key.ToDS()) + if err != nil { + log.ErrorContextE(ctx, "Failed to delete retry docID", err) + } + } + err = db.handleCompletedReplicatorRetry(ctx, peerID, true) + if err != nil { + log.ErrorContextE(ctx, "Failed to handle completed replicator retry", err) + } +} + +func (db *db) retryDoc(ctx context.Context, docID string) error { + ctx, txn, err := ensureContextTxn(ctx, db, false) + if err != nil { + return err + } + defer txn.Discard(ctx) + headStoreKey := core.HeadStoreKey{ + DocID: docID, + FieldID: core.COMPOSITE_NAMESPACE, + } + headset := clock.NewHeadSet(txn.Headstore(), headStoreKey) + cids, _, err := headset.List(ctx) + if err != nil { + return err + } + + for _, c := range cids { + select { + case <-ctx.Done(): + return ErrContextDone + default: + } + rawblk, err := txn.Blockstore().Get(ctx, c) + if err != nil { + return err + } + blk, err := coreblock.GetFromBytes(rawblk.RawData()) + if err != nil { + return err + } + schema, err := db.getSchemaByVersionID(ctx, blk.Delta.GetSchemaVersionID()) + if err != nil { + return err + } + successChan := make(chan bool) + defer close(successChan) + updateEvent := event.Update{ + DocID: docID, + Cid: c, + SchemaRoot: schema.Root, + Block: rawblk.RawData(), + IsRetry: true, + // Because the retry is done in a separate goroutine but the retry handling process should be synchronous, + // we use a channel to block while waiting for the success status of the retry. + Success: successChan, + } + db.events.Publish(event.NewMessage(event.UpdateName, updateEvent)) + + select { + case success := <-successChan: + if !success { + return ErrFailedToRetryDoc + } + case <-time.After(retryTimeout): + return ErrTimeoutDocRetry + } + } + return nil +} + +// deleteReplicatorRetryIfNoMoreDocs deletes the replicator retry key if there are no more docs to retry. +// It returns true if there are no more docs to retry, false otherwise. +func deleteReplicatorRetryIfNoMoreDocs( + ctx context.Context, + txn datastore.Txn, + peerID string, +) (bool, error) { + key := core.NewReplicatorRetryDocIDKey(peerID, "") + q := query.Query{ + Prefix: key.ToString(), + KeysOnly: true, + } + results, err := txn.Peerstore().Query(ctx, q) + if err != nil { + return false, err + } + defer closeQueryResults(results) + entries, err := results.Rest() + if err != nil { + return false, err + } + if len(entries) == 0 { + key := core.NewReplicatorRetryIDKey(peerID) + return true, txn.Peerstore().Delete(ctx, key.ToDS()) + } + return false, nil +} + +// deleteReplicatorRetryAndDocs deletes the replicator retry and all retry docs. +func (db *db) deleteReplicatorRetryAndDocs(ctx context.Context, peerID string) error { + key := core.NewReplicatorRetryIDKey(peerID) + err := db.Peerstore().Delete(ctx, key.ToDS()) + if err != nil { + return err + } + docKey := core.NewReplicatorRetryDocIDKey(peerID, "") + q := query.Query{ + Prefix: docKey.ToString(), + KeysOnly: true, + } + results, err := db.Peerstore().Query(ctx, q) + if err != nil { + return err + } + defer closeQueryResults(results) + for result := range results.Next() { + err = db.Peerstore().Delete(ctx, core.NewReplicatorRetryDocIDKey(peerID, result.Key).ToDS()) + if err != nil { + return err + } + } + return nil +} + +func closeQueryResults(results query.Results) { + err := results.Close() + if err != nil { + log.ErrorE("Failed to close query results", err) + } +} diff --git a/net/client.go b/net/client.go index 35c1de139d..d5276f292b 100644 --- a/net/client.go +++ b/net/client.go @@ -30,7 +30,22 @@ var ( // pushLog creates a pushLog request and sends it to another node // over libp2p grpc connection -func (s *server) pushLog(evt event.Update, pid peer.ID) error { +func (s *server) pushLog(evt event.Update, pid peer.ID) (err error) { + defer func() { + // When the event is a retry, we don't need to republish the failure as + // it is already being handled by the retry mechanism through the success channel. + if err != nil && !evt.IsRetry { + s.peer.bus.Publish(event.NewMessage(event.ReplicatorFailureName, event.ReplicatorFailure{ + DocID: evt.DocID, + PeerID: pid, + })) + } + // Success is not nil when the pushLog is called from a retry + if evt.Success != nil { + evt.Success <- err == nil + } + }() + client, err := s.dial(pid) // grpc dial over P2P stream if err != nil { return NewErrPushLog(err) diff --git a/net/client_test.go b/net/client_test.go index 629b176605..410ced82a7 100644 --- a/net/client_test.go +++ b/net/client_test.go @@ -95,7 +95,7 @@ func TestPushlogWithInvalidPeerID(t *testing.T) { require.Contains(t, err.Error(), "failed to parse peer ID") } -func TestPushlogW_WithValidPeerID_NoError(t *testing.T) { +func TestPushlog_WithValidPeerID_NoError(t *testing.T) { ctx := context.Background() db1, p1 := newTestPeer(ctx, t) defer db1.Close() diff --git a/net/peer.go b/net/peer.go index 7b855a1ca2..e4ebfe8573 100644 --- a/net/peer.go +++ b/net/peer.go @@ -22,7 +22,6 @@ import ( "github.com/ipfs/boxo/blockservice" "github.com/ipfs/boxo/bootstrap" blocks "github.com/ipfs/go-block-format" - "github.com/ipfs/go-cid" gostream "github.com/libp2p/go-libp2p-gostream" pubsub "github.com/libp2p/go-libp2p-pubsub" "github.com/libp2p/go-libp2p/core/host" @@ -237,13 +236,7 @@ func (p *Peer) handleMessageLoop() { switch evt := msg.Data.(type) { case event.Update: - var err error - if evt.IsCreate { - err = p.handleDocCreateLog(evt) - } else { - err = p.handleDocUpdateLog(evt) - } - + err := p.handleLog(evt) if err != nil { log.ErrorE("Error while handling broadcast log", err) } @@ -253,6 +246,7 @@ func (p *Peer) handleMessageLoop() { case event.Replicator: p.server.updateReplicators(evt) + default: // ignore other events continue @@ -260,77 +254,32 @@ func (p *Peer) handleMessageLoop() { } } -// RegisterNewDocument registers a new document with the peer node. -func (p *Peer) RegisterNewDocument( - ctx context.Context, - docID client.DocID, - c cid.Cid, - rawBlock []byte, - schemaRoot string, -) error { - // register topic - err := p.server.addPubSubTopic(docID.String(), !p.server.hasPubSubTopic(schemaRoot), nil) - if err != nil { - log.ErrorE( - "Failed to create new pubsub topic", - err, - corelog.String("DocID", docID.String()), - ) - return err - } - - req := &pushLogRequest{ - DocID: docID.String(), - CID: c.Bytes(), - SchemaRoot: schemaRoot, - Creator: p.host.ID().String(), - Block: rawBlock, - } - - return p.server.publishLog(ctx, schemaRoot, req) -} - -func (p *Peer) handleDocCreateLog(evt event.Update) error { - docID, err := client.NewDocIDFromString(evt.DocID) +func (p *Peer) handleLog(evt event.Update) error { + _, err := client.NewDocIDFromString(evt.DocID) if err != nil { return NewErrFailedToGetDocID(err) } - // We need to register the document before pushing to the replicators if we want to - // ensure that we have subscribed to the topic. - err = p.RegisterNewDocument(p.ctx, docID, evt.Cid, evt.Block, evt.SchemaRoot) - if err != nil { - return err - } - // push to each peer (replicator) - p.pushLogToReplicators(evt) - - return nil -} - -func (p *Peer) handleDocUpdateLog(evt event.Update) error { // push to each peer (replicator) p.pushLogToReplicators(evt) - _, err := client.NewDocIDFromString(evt.DocID) - if err != nil { - return NewErrFailedToGetDocID(err) - } - - req := &pushLogRequest{ - DocID: evt.DocID, - CID: evt.Cid.Bytes(), - SchemaRoot: evt.SchemaRoot, - Creator: p.host.ID().String(), - Block: evt.Block, - } + // Retries are for replicators only and should not polluting the pubsub network. + if !evt.IsRetry { + req := &pushLogRequest{ + DocID: evt.DocID, + CID: evt.Cid.Bytes(), + SchemaRoot: evt.SchemaRoot, + Creator: p.host.ID().String(), + Block: evt.Block, + } - if err := p.server.publishLog(p.ctx, evt.DocID, req); err != nil { - return NewErrPublishingToDocIDTopic(err, evt.Cid.String(), evt.DocID) - } + if err := p.server.publishLog(p.ctx, evt.DocID, req); err != nil { + return NewErrPublishingToDocIDTopic(err, evt.Cid.String(), evt.DocID) + } - if err := p.server.publishLog(p.ctx, evt.SchemaRoot, req); err != nil { - return NewErrPublishingToSchemaTopic(err, evt.Cid.String(), evt.SchemaRoot) + if err := p.server.publishLog(p.ctx, evt.SchemaRoot, req); err != nil { + return NewErrPublishingToSchemaTopic(err, evt.Cid.String(), evt.SchemaRoot) + } } return nil @@ -344,26 +293,12 @@ func (p *Peer) pushLogToReplicators(lg event.Update) { log.ErrorE("Failed to notify new blocks", err) } - // push to each peer (replicator) - peers := make(map[string]struct{}) - for _, peer := range p.ps.ListPeers(lg.DocID) { - peers[peer.String()] = struct{}{} - } - for _, peer := range p.ps.ListPeers(lg.SchemaRoot) { - peers[peer.String()] = struct{}{} - } - p.server.mu.Lock() reps, exists := p.server.replicators[lg.SchemaRoot] p.server.mu.Unlock() if exists { for pid := range reps { - // Don't push if pid is in the list of peers for the topic. - // It will be handled by the pubsub system. - if _, ok := peers[pid.String()]; ok { - continue - } go func(peerID peer.ID) { if err := p.server.pushLog(lg, peerID); err != nil { log.ErrorE( diff --git a/net/peer_test.go b/net/peer_test.go index 10af3a3ab4..40249192ea 100644 --- a/net/peer_test.go +++ b/net/peer_test.go @@ -13,6 +13,7 @@ package net import ( "context" "testing" + "time" "github.com/ipfs/go-cid" mh "github.com/multiformats/go-multihash" @@ -67,7 +68,13 @@ func newTestPeer(ctx context.Context, t *testing.T) (client.DB, *Peer) { store := memory.NewDatastore(ctx) acpLocal := acp.NewLocalACP() acpLocal.Init(context.Background(), "") - db, err := db.NewDB(ctx, store, immutable.Some[acp.ACP](acpLocal), nil) + db, err := db.NewDB( + ctx, + store, + immutable.Some[acp.ACP](acpLocal), + nil, + db.WithRetryInterval([]time.Duration{time.Second}), + ) require.NoError(t, err) n, err := NewPeer( @@ -134,60 +141,7 @@ func TestStart_WithKnownPeer_NoError(t *testing.T) { require.NoError(t, err) } -func TestRegisterNewDocument_NoError(t *testing.T) { - ctx := context.Background() - db, p := newTestPeer(ctx, t) - defer db.Close() - defer p.Close() - - _, err := db.AddSchema(ctx, `type User { - name: String - age: Int - }`) - require.NoError(t, err) - - col, err := db.GetCollectionByName(ctx, "User") - require.NoError(t, err) - - doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Definition()) - require.NoError(t, err) - - cid, err := createCID(doc) - require.NoError(t, err) - - err = p.RegisterNewDocument(ctx, doc.ID(), cid, emptyBlock(), col.SchemaRoot()) - require.NoError(t, err) -} - -func TestRegisterNewDocument_RPCTopicAlreadyRegisteredError(t *testing.T) { - ctx := context.Background() - db, p := newTestPeer(ctx, t) - defer db.Close() - defer p.Close() - - _, err := db.AddSchema(ctx, `type User { - name: String - age: Int - }`) - require.NoError(t, err) - - col, err := db.GetCollectionByName(ctx, "User") - require.NoError(t, err) - - doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Definition()) - require.NoError(t, err) - - _, err = rpc.NewTopic(ctx, p.ps, p.host.ID(), doc.ID().String(), true) - require.NoError(t, err) - - cid, err := createCID(doc) - require.NoError(t, err) - - err = p.RegisterNewDocument(ctx, doc.ID(), cid, emptyBlock(), col.SchemaRoot()) - require.Equal(t, err.Error(), "creating topic: joining topic: topic already exists") -} - -func TestHandleDocCreateLog_NoError(t *testing.T) { +func TestHandleLog_NoError(t *testing.T) { ctx := context.Background() db, p := newTestPeer(ctx, t) defer db.Close() @@ -214,7 +168,7 @@ func TestHandleDocCreateLog_NoError(t *testing.T) { b, err := db.Blockstore().AsIPLDStorage().Get(ctx, headCID.KeyString()) require.NoError(t, err) - err = p.handleDocCreateLog(event.Update{ + err = p.handleLog(event.Update{ DocID: doc.ID().String(), Cid: headCID, SchemaRoot: col.SchemaRoot(), @@ -223,88 +177,19 @@ func TestHandleDocCreateLog_NoError(t *testing.T) { require.NoError(t, err) } -func TestHandleDocCreateLog_WithInvalidDocID_NoError(t *testing.T) { - ctx := context.Background() - db, p := newTestPeer(ctx, t) - defer db.Close() - defer p.Close() - - err := p.handleDocCreateLog(event.Update{ - DocID: "some-invalid-key", - }) - require.ErrorContains(t, err, "failed to get DocID from broadcast message: selected encoding not supported") -} - -func TestHandleDocCreateLog_WithExistingTopic_TopicExistsError(t *testing.T) { - ctx := context.Background() - db, p := newTestPeer(ctx, t) - defer db.Close() - defer p.Close() - - _, err := db.AddSchema(ctx, `type User { - name: String - age: Int - }`) - require.NoError(t, err) - - col, err := db.GetCollectionByName(ctx, "User") - require.NoError(t, err) - - doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Definition()) - require.NoError(t, err) - - _, err = rpc.NewTopic(ctx, p.ps, p.host.ID(), "bae-7fca96a2-5f01-5558-a81f-09b47587f26d", true) - require.NoError(t, err) - - err = p.handleDocCreateLog(event.Update{ - DocID: doc.ID().String(), - SchemaRoot: col.SchemaRoot(), - }) - require.ErrorContains(t, err, "topic already exists") -} - -func TestHandleDocUpdateLog_NoError(t *testing.T) { - ctx := context.Background() - db, p := newTestPeer(ctx, t) - defer db.Close() - defer p.Close() - - _, err := db.AddSchema(ctx, `type User { - name: String - age: Int - }`) - require.NoError(t, err) - - col, err := db.GetCollectionByName(ctx, "User") - require.NoError(t, err) - - doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Definition()) - require.NoError(t, err) - - cid, err := createCID(doc) - require.NoError(t, err) - - err = p.handleDocUpdateLog(event.Update{ - DocID: doc.ID().String(), - Cid: cid, - SchemaRoot: col.SchemaRoot(), - }) - require.NoError(t, err) -} - -func TestHandleDoUpdateLog_WithInvalidDocID_NoError(t *testing.T) { +func TestHandleLog_WithInvalidDocID_NoError(t *testing.T) { ctx := context.Background() db, p := newTestPeer(ctx, t) defer db.Close() defer p.Close() - err := p.handleDocUpdateLog(event.Update{ + err := p.handleLog(event.Update{ DocID: "some-invalid-key", }) require.ErrorContains(t, err, "failed to get DocID from broadcast message: selected encoding not supported") } -func TestHandleDocUpdateLog_WithExistingDocIDTopic_TopicExistsError(t *testing.T) { +func TestHandleLog_WithExistingTopic_TopicExistsError(t *testing.T) { ctx := context.Background() db, p := newTestPeer(ctx, t) defer db.Close() @@ -322,21 +207,17 @@ func TestHandleDocUpdateLog_WithExistingDocIDTopic_TopicExistsError(t *testing.T doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Definition()) require.NoError(t, err) - cid, err := createCID(doc) - require.NoError(t, err) - _, err = rpc.NewTopic(ctx, p.ps, p.host.ID(), "bae-7fca96a2-5f01-5558-a81f-09b47587f26d", true) require.NoError(t, err) - err = p.handleDocUpdateLog(event.Update{ + err = p.handleLog(event.Update{ DocID: doc.ID().String(), - Cid: cid, SchemaRoot: col.SchemaRoot(), }) require.ErrorContains(t, err, "topic already exists") } -func TestHandleDocUpdateLog_WithExistingSchemaTopic_TopicExistsError(t *testing.T) { +func TestHandleLog_WithExistingSchemaTopic_TopicExistsError(t *testing.T) { ctx := context.Background() db, p := newTestPeer(ctx, t) defer db.Close() @@ -360,7 +241,7 @@ func TestHandleDocUpdateLog_WithExistingSchemaTopic_TopicExistsError(t *testing. _, err = rpc.NewTopic(ctx, p.ps, p.host.ID(), "bafkreia7ljiy5oief4dp5xsk7t7zlgfjzqh3537hw7rtttjzchybfxtn4u", true) require.NoError(t, err) - err = p.handleDocUpdateLog(event.Update{ + err = p.handleLog(event.Update{ DocID: doc.ID().String(), Cid: cid, SchemaRoot: col.SchemaRoot(), diff --git a/net/server.go b/net/server.go index 2e4939c77f..c83ba3f6be 100644 --- a/net/server.go +++ b/net/server.go @@ -143,7 +143,7 @@ func (s *server) PushLog(ctx context.Context, req *pushLogRequest) (*pushLogRepl // Once processed, subscribe to the DocID topic on the pubsub network unless we already // subscribed to the collection. - if !s.hasPubSubTopic(req.SchemaRoot) { + if !s.hasPubSubTopicAndSubscribed(req.SchemaRoot) { err = s.addPubSubTopic(docID.String(), true, nil) if err != nil { return nil, err @@ -217,12 +217,12 @@ func (s *server) AddPubSubTopic(topicName string, handler rpc.MessageHandler) er return s.addPubSubTopic(topicName, true, handler) } -// hasPubSubTopic checks if we are subscribed to a topic. -func (s *server) hasPubSubTopic(topic string) bool { +// hasPubSubTopicAndSubscribed checks if we are subscribed to a topic. +func (s *server) hasPubSubTopicAndSubscribed(topic string) bool { s.mu.Lock() defer s.mu.Unlock() - _, ok := s.topics[topic] - return ok + t, ok := s.topics[topic] + return ok && t.subscribed } // removePubSubTopic unsubscribes to a topic @@ -266,10 +266,6 @@ func (s *server) removeAllPubsubTopics() error { // publishLog publishes the given PushLogRequest object on the PubSub network via the // corresponding topic func (s *server) publishLog(ctx context.Context, topic string, req *pushLogRequest) error { - log.InfoContext(ctx, "Publish log", - corelog.String("PeerID", s.peer.PeerID().String()), - corelog.String("Topic", topic)) - if s.peer.ps == nil { // skip if we aren't running with a pubsub net return nil } @@ -277,13 +273,18 @@ func (s *server) publishLog(ctx context.Context, topic string, req *pushLogReque t, ok := s.topics[topic] s.mu.Unlock() if !ok { - err := s.addPubSubTopic(topic, false, nil) + subscribe := topic != req.SchemaRoot && !s.hasPubSubTopicAndSubscribed(req.SchemaRoot) + err := s.addPubSubTopic(topic, subscribe, nil) if err != nil { return errors.Wrap(fmt.Sprintf("failed to created single use topic %s", topic), err) } return s.publishLog(ctx, topic, req) } + log.InfoContext(ctx, "Publish log", + corelog.String("PeerID", s.peer.PeerID().String()), + corelog.String("Topic", topic)) + data, err := cbor.Marshal(req) if err != nil { return errors.Wrap("failed to marshal pubsub message", err) diff --git a/tests/bench/query/planner/utils.go b/tests/bench/query/planner/utils.go index b9e077867b..0ab739ac20 100644 --- a/tests/bench/query/planner/utils.go +++ b/tests/bench/query/planner/utils.go @@ -137,7 +137,7 @@ func (*dummyTxn) Rootstore() datastore.DSReaderWriter { return nil } func (*dummyTxn) Datastore() datastore.DSReaderWriter { return nil } func (*dummyTxn) Encstore() datastore.Blockstore { return nil } func (*dummyTxn) Headstore() datastore.DSReaderWriter { return nil } -func (*dummyTxn) Peerstore() datastore.DSBatching { return nil } +func (*dummyTxn) Peerstore() datastore.DSReaderWriter { return nil } func (*dummyTxn) Blockstore() datastore.Blockstore { return nil } func (*dummyTxn) Systemstore() datastore.DSReaderWriter { return nil } func (*dummyTxn) Commit(ctx context.Context) error { return nil } diff --git a/tests/clients/cli/wrapper.go b/tests/clients/cli/wrapper.go index b3261f09a8..6983aa1797 100644 --- a/tests/clients/cli/wrapper.go +++ b/tests/clients/cli/wrapper.go @@ -531,7 +531,7 @@ func (w *Wrapper) Headstore() ds.Read { return w.node.DB.Headstore() } -func (w *Wrapper) Peerstore() datastore.DSBatching { +func (w *Wrapper) Peerstore() datastore.DSReaderWriter { return w.node.DB.Peerstore() } diff --git a/tests/clients/cli/wrapper_tx.go b/tests/clients/cli/wrapper_tx.go index e3bf41d818..d4b0f244a5 100644 --- a/tests/clients/cli/wrapper_tx.go +++ b/tests/clients/cli/wrapper_tx.go @@ -83,7 +83,7 @@ func (w *Transaction) Headstore() datastore.DSReaderWriter { return w.tx.Headstore() } -func (w *Transaction) Peerstore() datastore.DSBatching { +func (w *Transaction) Peerstore() datastore.DSReaderWriter { return w.tx.Peerstore() } diff --git a/tests/clients/http/wrapper.go b/tests/clients/http/wrapper.go index ae6cd61529..35a386e18a 100644 --- a/tests/clients/http/wrapper.go +++ b/tests/clients/http/wrapper.go @@ -252,7 +252,7 @@ func (w *Wrapper) Headstore() ds.Read { return w.node.DB.Headstore() } -func (w *Wrapper) Peerstore() datastore.DSBatching { +func (w *Wrapper) Peerstore() datastore.DSReaderWriter { return w.node.DB.Peerstore() } diff --git a/tests/clients/http/wrapper_tx.go b/tests/clients/http/wrapper_tx.go index baf841871a..8778dcdd7d 100644 --- a/tests/clients/http/wrapper_tx.go +++ b/tests/clients/http/wrapper_tx.go @@ -77,7 +77,7 @@ func (w *TxWrapper) Headstore() datastore.DSReaderWriter { return w.server.Headstore() } -func (w *TxWrapper) Peerstore() datastore.DSBatching { +func (w *TxWrapper) Peerstore() datastore.DSReaderWriter { return w.server.Peerstore() } diff --git a/tests/integration/db.go b/tests/integration/db.go index b9c1b3791d..784ff6952f 100644 --- a/tests/integration/db.go +++ b/tests/integration/db.go @@ -37,9 +37,9 @@ const ( ) const ( - badgerIMType DatabaseType = "badger-in-memory" - defraIMType DatabaseType = "defra-memory-datastore" - badgerFileType DatabaseType = "badger-file-system" + BadgerIMType DatabaseType = "badger-in-memory" + DefraIMType DatabaseType = "defra-memory-datastore" + BadgerFileType DatabaseType = "badger-file-system" ) var ( @@ -165,10 +165,10 @@ func setupNode(s *state, opts ...node.Option) (*node.Node, string, error) { var path string switch s.dbt { - case badgerIMType: + case BadgerIMType: opts = append(opts, node.WithBadgerInMemory(true)) - case badgerFileType: + case BadgerFileType: switch { case databaseDir != "": // restarting database @@ -185,7 +185,7 @@ func setupNode(s *state, opts ...node.Option) (*node.Node, string, error) { opts = append(opts, node.WithStorePath(path), node.WithACPPath(path)) - case defraIMType: + case DefraIMType: opts = append(opts, node.WithStoreType(node.MemoryStore)) default: diff --git a/tests/integration/net/simple/replicator/with_create_test.go b/tests/integration/net/simple/replicator/with_create_test.go index 1eab640b83..d8bad791b6 100644 --- a/tests/integration/net/simple/replicator/with_create_test.go +++ b/tests/integration/net/simple/replicator/with_create_test.go @@ -585,3 +585,73 @@ func TestP2POneToOneReplicatorOrderIndependentDirectCreate(t *testing.T) { testUtils.ExecuteTestCase(t, test) } + +func TestP2POneToOneReplicator_ManyDocsWithTargetNodeTemporarilyOffline_ShouldSucceed(t *testing.T) { + test := testUtils.TestCase{ + SupportedDatabaseTypes: immutable.Some( + []testUtils.DatabaseType{ + // This test only supports file type databases since it requires the ability to + // stop and start a node without losing data. + testUtils.BadgerFileType, + }, + ), + Actions: []any{ + testUtils.RandomNetworkingConfig(), + testUtils.RandomNetworkingConfig(), + testUtils.SchemaUpdate{ + Schema: ` + type Users { + Name: String + Age: Int + } + `, + }, + testUtils.ConfigureReplicator{ + SourceNodeID: 0, + TargetNodeID: 1, + }, + testUtils.Close{ + NodeID: immutable.Some(1), + }, + testUtils.CreateDoc{ + // Create John on the first (source) node only, and allow the value to sync + NodeID: immutable.Some(0), + Doc: `{ + "Name": "John", + "Age": 21 + }`, + }, + testUtils.CreateDoc{ + // Create Fred on the first (source) node only, and allow the value to sync + NodeID: immutable.Some(0), + Doc: `{ + "Name": "Fred", + "Age": 22 + }`, + }, + testUtils.Start{ + NodeID: immutable.Some(1), + }, + testUtils.WaitForSync{}, + testUtils.Request{ + Request: `query { + Users { + Age + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Age": int64(22), + }, + { + "Age": int64(21), + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/net/simple/replicator/with_update_test.go b/tests/integration/net/simple/replicator/with_update_test.go index 32175dac3b..f7486c484e 100644 --- a/tests/integration/net/simple/replicator/with_update_test.go +++ b/tests/integration/net/simple/replicator/with_update_test.go @@ -125,3 +125,164 @@ func TestP2POneToOneReplicatorUpdatesDocCreatedBeforeReplicatorConfigWithNodesIn testUtils.ExecuteTestCase(t, test) } + +func TestP2POneToOneReplicator_ManyDocsUpdateWithTargetNodeTemporarilyOffline_ShouldSucceed(t *testing.T) { + test := testUtils.TestCase{ + SupportedDatabaseTypes: immutable.Some( + []testUtils.DatabaseType{ + // This test only supports file type databases since it requires the ability to + // stop and start a node without losing data. + testUtils.BadgerFileType, + }, + ), + Actions: []any{ + testUtils.RandomNetworkingConfig(), + testUtils.RandomNetworkingConfig(), + testUtils.SchemaUpdate{ + Schema: ` + type Users { + Name: String + Age: Int + } + `, + }, + testUtils.ConfigureReplicator{ + SourceNodeID: 0, + TargetNodeID: 1, + }, + testUtils.Close{ + NodeID: immutable.Some(1), + }, + testUtils.CreateDoc{ + // Create John on the first (source) node only, and allow the value to sync + NodeID: immutable.Some(0), + Doc: `{ + "Name": "John", + "Age": 21 + }`, + }, + testUtils.CreateDoc{ + // Create Fred on the first (source) node only, and allow the value to sync + NodeID: immutable.Some(0), + Doc: `{ + "Name": "Fred", + "Age": 22 + }`, + }, + testUtils.UpdateDoc{ + NodeID: immutable.Some(0), + DocID: 0, + Doc: `{"Age": 22}`, + }, + testUtils.UpdateDoc{ + NodeID: immutable.Some(0), + DocID: 1, + Doc: `{"Age": 23}`, + }, + testUtils.Start{ + NodeID: immutable.Some(1), + }, + testUtils.WaitForSync{}, + testUtils.Request{ + Request: `query { + Users { + Age + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Age": int64(23), + }, + { + "Age": int64(22), + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestP2POneToOneReplicator_ManyDocsUpdateWithTargetNodeTemporarilyOfflineAfterCreate_ShouldSucceed(t *testing.T) { + test := testUtils.TestCase{ + SupportedDatabaseTypes: immutable.Some( + []testUtils.DatabaseType{ + // This test only supports file type databases since it requires the ability to + // stop and start a node without losing data. + testUtils.BadgerFileType, + }, + ), + Actions: []any{ + testUtils.RandomNetworkingConfig(), + testUtils.RandomNetworkingConfig(), + testUtils.SchemaUpdate{ + Schema: ` + type Users { + Name: String + Age: Int + } + `, + }, + testUtils.ConfigureReplicator{ + SourceNodeID: 0, + TargetNodeID: 1, + }, + testUtils.CreateDoc{ + // Create John on the first (source) node only, and allow the value to sync + NodeID: immutable.Some(0), + Doc: `{ + "Name": "John", + "Age": 21 + }`, + }, + testUtils.CreateDoc{ + // Create Fred on the first (source) node only, and allow the value to sync + NodeID: immutable.Some(0), + Doc: `{ + "Name": "Fred", + "Age": 22 + }`, + }, + testUtils.WaitForSync{}, + testUtils.Close{ + NodeID: immutable.Some(1), + }, + testUtils.UpdateDoc{ + NodeID: immutable.Some(0), + DocID: 0, + Doc: `{"Age": 22}`, + }, + testUtils.UpdateDoc{ + NodeID: immutable.Some(0), + DocID: 1, + Doc: `{"Age": 23}`, + }, + testUtils.Start{ + NodeID: immutable.Some(1), + }, + testUtils.WaitForSync{}, + testUtils.Request{ + Request: `query { + Users { + Age + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Age": int64(23), + }, + { + "Age": int64(22), + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/test_case.go b/tests/integration/test_case.go index f102294e97..3103d674ca 100644 --- a/tests/integration/test_case.go +++ b/tests/integration/test_case.go @@ -60,6 +60,13 @@ type TestCase struct { // differences between view types, or we need to temporarily document a bug. SupportedViewTypes immutable.Option[[]ViewType] + // If provided a value, SupportedDatabaseTypes will cause this test to be skipped + // if the active database type is not within the given set. + // + // This is to only be used in the very rare cases where we really do want behavioural + // differences between database types, or we need to temporarily document a bug. + SupportedDatabaseTypes immutable.Option[[]DatabaseType] + // Configuration for KMS to be used in the test KMS KMS } @@ -93,6 +100,22 @@ type ConfigureNode func() []net.NodeOpt // Restart is an action that will close and then start all nodes. type Restart struct{} +// Close is an action that will close a node. +type Close struct { + // NodeID may hold the ID (index) of a node to close. + // + // If a value is not provided the close will be applied to all nodes. + NodeID immutable.Option[int] +} + +// Start is an action that will start a node that has been previously closed. +type Start struct { + // NodeID may hold the ID (index) of a node to start. + // + // If a value is not provided the start will be applied to all nodes. + NodeID immutable.Option[int] +} + // SchemaUpdate is an action that will update the database schema. // // WARNING: getCollectionNames will not work with schemas ending in `type`, e.g. `user_type` diff --git a/tests/integration/utils.go b/tests/integration/utils.go index 6aac10e5e4..744f874423 100644 --- a/tests/integration/utils.go +++ b/tests/integration/utils.go @@ -178,13 +178,13 @@ func ExecuteTestCase( var databases []DatabaseType if badgerInMemory { - databases = append(databases, badgerIMType) + databases = append(databases, BadgerIMType) } if badgerFile { - databases = append(databases, badgerFileType) + databases = append(databases, BadgerFileType) } if inMemoryStore { - databases = append(databases, defraIMType) + databases = append(databases, DefraIMType) } var kmsList []KMSType @@ -203,6 +203,7 @@ func ExecuteTestCase( require.NotEmpty(t, databases) require.NotEmpty(t, clients) + databases = skipIfDatabaseTypeUnsupported(t, databases, testCase.SupportedDatabaseTypes) clients = skipIfClientTypeUnsupported(t, clients, testCase.SupportedClientTypes) ctx := context.Background() @@ -251,7 +252,7 @@ func executeTestCase( // It is very important that the databases are always closed, otherwise resources will leak // as tests run. This is particularly important for file based datastores. - defer closeNodes(s) + defer closeNodes(s, Close{}) // Documents and Collections may already exist in the database if actions have been split // by the change detector so we should fetch them here at the start too (if they exist). @@ -292,6 +293,12 @@ func performAction( case Restart: restartNodes(s) + case Close: + closeNodes(s, action) + + case Start: + startNodes(s, action) + case ConnectPeers: connectPeers(s, action) @@ -458,7 +465,7 @@ func benchmarkAction( actionIndex int, bench Benchmark, ) { - if s.dbt == defraIMType { + if s.dbt == DefraIMType { // Benchmarking makes no sense for test in-memory storage return } @@ -557,8 +564,9 @@ func getCollectionNamesFromSchema(result map[string]int, schema string, nextInde // closeNodes closes all the given nodes, ensuring that resources are properly released. func closeNodes( s *state, + action Close, ) { - for _, node := range s.nodes { + for _, node := range getNodes(action.NodeID, s.nodes) { node.Close() } } @@ -723,20 +731,18 @@ func setStartingNodes( } } -func restartNodes( - s *state, -) { - if s.dbt == badgerIMType || s.dbt == defraIMType { - return - } - closeNodes(s) - +func startNodes(s *state, action Start) { + nodes := getNodes(action.NodeID, s.nodes) // We need to restart the nodes in reverse order, to avoid dial backoff issues. - for i := len(s.nodes) - 1; i >= 0; i-- { + for i := len(nodes) - 1; i >= 0; i-- { + nodeIndex := i + if action.NodeID.HasValue() { + nodeIndex = action.NodeID.Value() + } originalPath := databaseDir - databaseDir = s.dbPaths[i] + databaseDir = s.dbPaths[nodeIndex] node, _, err := setupNode(s) - require.Nil(s.t, err) + require.NoError(s.t, err) databaseDir = originalPath if len(s.nodeConfigs) == 0 { @@ -744,22 +750,22 @@ func restartNodes( // basic (i.e. no P2P stuff) and can be yielded now. c, err := setupClient(s, node) require.NoError(s.t, err) - s.nodes[i] = c + s.nodes[nodeIndex] = c eventState, err := newEventState(c.Events()) require.NoError(s.t, err) - s.nodeEvents[i] = eventState + s.nodeEvents[nodeIndex] = eventState continue } // We need to make sure the node is configured with its old address, otherwise // a new one may be selected and reconnnection to it will fail. var addresses []string - for _, addr := range s.nodeAddresses[i].Addrs { + for _, addr := range s.nodeAddresses[nodeIndex].Addrs { addresses = append(addresses, addr.String()) } - nodeOpts := s.nodeConfigs[i] + nodeOpts := s.nodeConfigs[nodeIndex] nodeOpts = append(nodeOpts, net.WithListenAddresses(addresses...)) node.Peer, err = net.NewPeer(s.ctx, node.DB.Blockstore(), node.DB.Encstore(), node.DB.Events(), nodeOpts...) @@ -767,11 +773,11 @@ func restartNodes( c, err := setupClient(s, node) require.NoError(s.t, err) - s.nodes[i] = c + s.nodes[nodeIndex] = c eventState, err := newEventState(c.Events()) require.NoError(s.t, err) - s.nodeEvents[i] = eventState + s.nodeEvents[nodeIndex] = eventState waitForNetworkSetupEvents(s, i) } @@ -780,6 +786,16 @@ func restartNodes( // will reference the old (closed) database instances. refreshCollections(s) refreshIndexes(s) +} + +func restartNodes( + s *state, +) { + if s.dbt == BadgerIMType || s.dbt == DefraIMType { + return + } + closeNodes(s, Close{}) + startNodes(s, Start{}) reconnectPeers(s) } @@ -840,7 +856,7 @@ func configureNode( netNodeOpts := action() netNodeOpts = append(netNodeOpts, net.WithPrivateKey(privateKey)) - nodeOpts := []node.Option{node.WithDisableP2P(false)} + nodeOpts := []node.Option{node.WithDisableP2P(false), db.WithRetryInterval([]time.Duration{time.Millisecond * 1})} for _, opt := range netNodeOpts { nodeOpts = append(nodeOpts, opt) } @@ -2456,6 +2472,31 @@ func skipIfACPTypeUnsupported(t testing.TB, supporteACPTypes immutable.Option[[] } } +func skipIfDatabaseTypeUnsupported( + t testing.TB, + databases []DatabaseType, + supporteDatabaseTypes immutable.Option[[]DatabaseType], +) []DatabaseType { + if !supporteDatabaseTypes.HasValue() { + return databases + } + filteredDatabases := []DatabaseType{} + for _, supportedType := range supporteDatabaseTypes.Value() { + for _, database := range databases { + if supportedType == database { + filteredDatabases = append(filteredDatabases, database) + break + } + } + } + + if len(filteredDatabases) == 0 { + t.Skipf("test does not support any given database type. Type: %v", filteredDatabases) + } + + return filteredDatabases +} + // skipIfNetworkTest skips the current test if the given actions // contain network actions and skipNetworkTests is true. func skipIfNetworkTest(t testing.TB, actions []any) { From aa6f235d2c920ef29f3754318440b3bd961c690b Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Fri, 11 Oct 2024 13:40:34 -0700 Subject: [PATCH 62/71] feat: JSON type filter (#3122) ## Relevant issue(s) Resolves #3106 ## Description This PR enables filtering on JSON field types. ## Tasks - [x] I made sure the code is well commented, particularly hard-to-understand areas. - [x] I made sure the repository-held documentation is changed accordingly. - [x] I made sure the pull request title adheres to the conventional commit style (the subset used in the project can be found in [tools/configs/chglog/config.yml](tools/configs/chglog/config.yml)). - [x] I made sure to discuss its limitations such as threats to validity, vulnerability to mistake and misuse, robustness to invalidation of assumptions, resource requirements, ... ## How has this been tested? Added integration tests. Specify the platform(s) on which this was tested: - MacOS --- internal/connor/all.go | 3 + internal/connor/any.go | 3 + internal/planner/mapper/mapper.go | 139 ++--- internal/planner/mapper/targetable.go | 37 ++ internal/request/graphql/schema/generate.go | 72 +-- internal/request/graphql/schema/manager.go | 4 - internal/request/graphql/schema/types/base.go | 84 --- .../explain/default/with_filter_test.go | 49 ++ tests/integration/query/json/with_all_test.go | 59 +++ tests/integration/query/json/with_any_test.go | 59 +++ .../with_eq_test.go} | 8 +- tests/integration/query/json/with_ge_test.go | 499 ++++++++++++++++++ tests/integration/query/json/with_gt_test.go | 497 +++++++++++++++++ .../with_in_test.go} | 4 +- tests/integration/query/json/with_le_test.go | 493 +++++++++++++++++ .../integration/query/json/with_like_test.go | 77 +++ tests/integration/query/json/with_lt_test.go | 489 +++++++++++++++++ tests/integration/query/json/with_ne_test.go | 165 ++++++ .../with_nin_test.go} | 30 +- .../integration/query/json/with_nlike_test.go | 89 ++++ .../integration/query/json/with_none_test.go | 59 +++ tests/integration/schema/filter_test.go | 123 +++++ tests/integration/schema/simple_test.go | 46 ++ 23 files changed, 2886 insertions(+), 202 deletions(-) create mode 100644 tests/integration/query/json/with_all_test.go create mode 100644 tests/integration/query/json/with_any_test.go rename tests/integration/query/{simple/with_filter/with_eq_json_test.go => json/with_eq_test.go} (91%) create mode 100644 tests/integration/query/json/with_ge_test.go create mode 100644 tests/integration/query/json/with_gt_test.go rename tests/integration/query/{simple/with_filter/with_in_json_test.go => json/with_in_test.go} (93%) create mode 100644 tests/integration/query/json/with_le_test.go create mode 100644 tests/integration/query/json/with_like_test.go create mode 100644 tests/integration/query/json/with_lt_test.go create mode 100644 tests/integration/query/json/with_ne_test.go rename tests/integration/query/{simple/with_filter/with_like_json_test.go => json/with_nin_test.go} (70%) create mode 100644 tests/integration/query/json/with_nlike_test.go create mode 100644 tests/integration/query/json/with_none_test.go diff --git a/internal/connor/all.go b/internal/connor/all.go index 0b9800de89..ce2557d25b 100644 --- a/internal/connor/all.go +++ b/internal/connor/all.go @@ -11,6 +11,9 @@ import ( // matching if all of them match. func all(condition, data any) (bool, error) { switch t := data.(type) { + case []any: + return allSlice(condition, t) + case []string: return allSlice(condition, t) diff --git a/internal/connor/any.go b/internal/connor/any.go index a9c02b1369..7eea2a7bce 100644 --- a/internal/connor/any.go +++ b/internal/connor/any.go @@ -11,6 +11,9 @@ import ( // matching if any of them match. func anyOp(condition, data any) (bool, error) { switch t := data.(type) { + case []any: + return anySlice(condition, t) + case []string: return anySlice(condition, t) diff --git a/internal/planner/mapper/mapper.go b/internal/planner/mapper/mapper.go index da8390e293..9845e93d13 100644 --- a/internal/planner/mapper/mapper.go +++ b/internal/planner/mapper/mapper.go @@ -1286,7 +1286,7 @@ func ToFilter(source request.Filter, mapping *core.DocumentMapping) *Filter { conditions := make(map[connor.FilterKey]any, len(source.Conditions)) for sourceKey, sourceClause := range source.Conditions { - key, clause := toFilterMap(sourceKey, sourceClause, mapping) + key, clause := toFilterKeyValue(sourceKey, sourceClause, mapping) conditions[key] = clause } @@ -1296,87 +1296,102 @@ func ToFilter(source request.Filter, mapping *core.DocumentMapping) *Filter { } } -// toFilterMap converts a consumer-defined filter key-value into a filter clause -// keyed by field index. +// toFilterKeyValue converts a consumer-defined filter key-value into a filter clause +// keyed by connor.FilterKey. // -// Return key will either be an int (field index), or a string (operator). -func toFilterMap( +// The returned key will be one of the following: +// - Operator: if the sourceKey is one of the defined filter operators +// - PropertyIndex: if the sourceKey exists in the document mapping +// - ObjectProperty: if the sourceKey does not match one of the above +func toFilterKeyValue( sourceKey string, sourceClause any, mapping *core.DocumentMapping, ) (connor.FilterKey, any) { + var returnKey connor.FilterKey if strings.HasPrefix(sourceKey, "_") && sourceKey != request.DocIDFieldName { - key := &Operator{ + returnKey = &Operator{ Operation: sourceKey, } // if the operator is simple (not compound) then // it does not require further expansion if connor.IsOpSimple(sourceKey) { - return key, sourceClause - } - switch typedClause := sourceClause.(type) { - case []any: - // If the clause is an array then we need to convert any inner maps. - returnClauses := []any{} - for _, innerSourceClause := range typedClause { - var returnClause any - switch typedInnerSourceClause := innerSourceClause.(type) { - case map[string]any: - innerMapClause := map[connor.FilterKey]any{} - for innerSourceKey, innerSourceValue := range typedInnerSourceClause { - rKey, rValue := toFilterMap(innerSourceKey, innerSourceValue, mapping) - innerMapClause[rKey] = rValue - } - returnClause = innerMapClause - default: - returnClause = innerSourceClause - } - returnClauses = append(returnClauses, returnClause) - } - return key, returnClauses - case map[string]any: - innerMapClause := map[connor.FilterKey]any{} - for innerSourceKey, innerSourceValue := range typedClause { - rKey, rValue := toFilterMap(innerSourceKey, innerSourceValue, mapping) - innerMapClause[rKey] = rValue - } - return key, innerMapClause - default: - return key, typedClause + return returnKey, sourceClause } - } else { + } else if mapping != nil && len(mapping.IndexesByName[sourceKey]) > 0 { // If there are multiple properties of the same name we can just take the first as // we have no other reasonable way of identifying which property they mean if multiple // consumer specified requestables are available. Aggregate dependencies should not // impact this as they are added after selects. - index := mapping.FirstIndexOfName(sourceKey) - key := &PropertyIndex{ - Index: index, + returnKey = &PropertyIndex{ + Index: mapping.FirstIndexOfName(sourceKey), } - switch typedClause := sourceClause.(type) { - case map[string]any: - returnClause := map[connor.FilterKey]any{} - for innerSourceKey, innerSourceValue := range typedClause { - var innerMapping *core.DocumentMapping - // innerSourceValue may refer to a child mapping or - // an inline array if we don't have a child mapping - _, ok := innerSourceValue.(map[string]any) - if ok && index < len(mapping.ChildMappings) { - // If the innerSourceValue is also a map, then we should parse the nested clause - // using the child mapping, as this key must refer to a host property in a join - // and deeper keys must refer to properties on the child items. - innerMapping = mapping.ChildMappings[index] - } else { - innerMapping = mapping - } - rKey, rValue := toFilterMap(innerSourceKey, innerSourceValue, innerMapping) - returnClause[rKey] = rValue + } else { + returnKey = &ObjectProperty{ + Name: sourceKey, + } + } + + switch typedClause := sourceClause.(type) { + case []any: + return returnKey, toFilterList(typedClause, mapping) + + case map[string]any: + return returnKey, toFilterMap(returnKey, typedClause, mapping) + + default: + return returnKey, typedClause + } +} + +func toFilterMap( + sourceKey connor.FilterKey, + sourceClause map[string]any, + mapping *core.DocumentMapping, +) map[connor.FilterKey]any { + innerMapClause := make(map[connor.FilterKey]any) + for innerSourceKey, innerSourceValue := range sourceClause { + var innerMapping *core.DocumentMapping + switch t := sourceKey.(type) { + case *PropertyIndex: + _, ok := innerSourceValue.(map[string]any) + if ok && mapping != nil && t.Index < len(mapping.ChildMappings) { + // If the innerSourceValue is also a map, then we should parse the nested clause + // using the child mapping, as this key must refer to a host property in a join + // and deeper keys must refer to properties on the child items. + innerMapping = mapping.ChildMappings[t.Index] + } else { + innerMapping = mapping } - return key, returnClause - default: - return key, sourceClause + case *ObjectProperty: + // Object properties can never refer to mapped document fields. + // Set the mapping to null for any nested filter values so + // that we don't filter any fields outside of this object. + innerMapping = nil + case *Operator: + innerMapping = mapping + } + rKey, rValue := toFilterKeyValue(innerSourceKey, innerSourceValue, innerMapping) + innerMapClause[rKey] = rValue + } + return innerMapClause +} + +func toFilterList(sourceClause []any, mapping *core.DocumentMapping) []any { + returnClauses := make([]any, len(sourceClause)) + for i, innerSourceClause := range sourceClause { + // innerSourceClause must be a map because only compound + // operators (_and, _or) can reach this function and should + // have already passed GQL type validation + typedInnerSourceClause := innerSourceClause.(map[string]any) + innerMapClause := make(map[connor.FilterKey]any) + for innerSourceKey, innerSourceValue := range typedInnerSourceClause { + rKey, rValue := toFilterKeyValue(innerSourceKey, innerSourceValue, mapping) + innerMapClause[rKey] = rValue } + returnClauses[i] = innerMapClause } + return returnClauses } func toLimit(limit immutable.Option[uint64], offset immutable.Option[uint64]) *Limit { diff --git a/internal/planner/mapper/targetable.go b/internal/planner/mapper/targetable.go index f85e6c8016..2611d297dc 100644 --- a/internal/planner/mapper/targetable.go +++ b/internal/planner/mapper/targetable.go @@ -21,6 +21,7 @@ import ( var ( _ connor.FilterKey = (*PropertyIndex)(nil) _ connor.FilterKey = (*Operator)(nil) + _ connor.FilterKey = (*ObjectProperty)(nil) ) // PropertyIndex is a FilterKey that represents a property in a document. @@ -71,6 +72,34 @@ func (k *Operator) Equal(other connor.FilterKey) bool { return false } +// ObjectProperty is a FilterKey that represents a property in an object. +// +// This is used to target properties of an object when the fields +// are not explicitly mapped, such as with JSON. +type ObjectProperty struct { + // The name of the property on object. + Name string +} + +func (k *ObjectProperty) GetProp(data any) any { + if data == nil { + return nil + } + object := data.(map[string]any) + return object[k.Name] +} + +func (k *ObjectProperty) GetOperatorOrDefault(defaultOp string) string { + return defaultOp +} + +func (k *ObjectProperty) Equal(other connor.FilterKey) bool { + if otherKey, isOk := other.(*ObjectProperty); isOk && *k == *otherKey { + return true + } + return false +} + // Filter represents a series of conditions that may reduce the number of // records that a request returns. type Filter struct { @@ -144,6 +173,14 @@ func filterObjectToMap(mapping *core.DocumentMapping, obj map[connor.FilterKey]a default: outmap[keyType.Operation] = v } + + case *ObjectProperty: + switch subObj := v.(type) { + case map[connor.FilterKey]any: + outmap[keyType.Name] = filterObjectToMap(mapping, subObj) + case nil: + outmap[keyType.Name] = nil + } } } return outmap diff --git a/internal/request/graphql/schema/generate.go b/internal/request/graphql/schema/generate.go index c1cf92e4dc..254fae6e7d 100644 --- a/internal/request/graphql/schema/generate.go +++ b/internal/request/graphql/schema/generate.go @@ -1202,43 +1202,18 @@ func (g *Generator) genTypeFilterArgInput(obj *gql.Object) *gql.InputObject { } // generate basic filter operator blocks - // @todo: Extract object field loop into its own utility func for f, field := range obj.Fields() { - if _, ok := request.ReservedFields[f]; ok && f != request.DocIDFieldName { + _, ok := request.ReservedFields[f] + if ok && f != request.DocIDFieldName { continue } - // scalars (leafs) - if gql.IsLeafType(field.Type) { - var operatorName string - if list, isList := field.Type.(*gql.List); isList { - if notNull, isNotNull := list.OfType.(*gql.NonNull); isNotNull { - operatorName = "NotNull" + notNull.OfType.Name() + "ListOperatorBlock" - } else { - operatorName = list.OfType.Name() + "ListOperatorBlock" - } - } else { - operatorName = field.Type.Name() + "OperatorBlock" - } - operatorType, isFilterable := g.manager.schema.TypeMap()[operatorName] - if !isFilterable { - continue - } - fields[field.Name] = &gql.InputObjectFieldConfig{ - Type: operatorType, - } - } else { // objects (relations) - fieldType := field.Type - if l, isList := field.Type.(*gql.List); isList { - // We want the FilterArg for the object, not the list of objects. - fieldType = l.OfType - } - filterType, isFilterable := g.manager.schema.TypeMap()[genTypeName(fieldType, filterInputNameSuffix)] - if !isFilterable { - filterType = &gql.InputObjectField{} - } - fields[field.Name] = &gql.InputObjectFieldConfig{ - Type: filterType, - } + operatorName := genFilterOperatorName(field.Type) + filterType, isFilterable := g.manager.schema.TypeMap()[operatorName] + if !isFilterable { + continue + } + fields[field.Name] = &gql.InputObjectFieldConfig{ + Type: filterType, } } @@ -1408,6 +1383,35 @@ func isNumericArray(list *gql.List) bool { list.OfType == gql.Float } +func genFilterOperatorName(fieldType gql.Type) string { + list, isList := fieldType.(*gql.List) + if isList { + fieldType = list.OfType + } + if !gql.IsLeafType(fieldType) { + return genTypeName(fieldType, filterInputNameSuffix) + } + notNull, isNotNull := fieldType.(*gql.NonNull) + if isNotNull { + fieldType = notNull.OfType + } + switch { + case fieldType.Name() == "JSON": + return fieldType.Name() + + case isList && isNotNull: + // todo: There's a potential to have a name clash + // https://github.com/sourcenetwork/defradb/issues/3123 + return "NotNull" + fieldType.Name() + "ListOperatorBlock" + + case isList: + return fieldType.Name() + "ListOperatorBlock" + + default: + return fieldType.Name() + "OperatorBlock" + } +} + /* Example typeDefs := ` ... ` diff --git a/internal/request/graphql/schema/manager.go b/internal/request/graphql/schema/manager.go index 792535fda0..881c94c144 100644 --- a/internal/request/graphql/schema/manager.go +++ b/internal/request/graphql/schema/manager.go @@ -193,7 +193,6 @@ func defaultTypes( floatOpBlock := schemaTypes.FloatOperatorBlock() booleanOpBlock := schemaTypes.BooleanOperatorBlock() stringOpBlock := schemaTypes.StringOperatorBlock() - jsonOpBlock := schemaTypes.JSONOperatorBlock(jsonScalarType) blobOpBlock := schemaTypes.BlobOperatorBlock(blobScalarType) dateTimeOpBlock := schemaTypes.DateTimeOperatorBlock() @@ -201,7 +200,6 @@ func defaultTypes( notNullFloatOpBlock := schemaTypes.NotNullFloatOperatorBlock() notNullBooleanOpBlock := schemaTypes.NotNullBooleanOperatorBlock() notNullStringOpBlock := schemaTypes.NotNullStringOperatorBlock() - notNullJSONOpBlock := schemaTypes.NotNullJSONOperatorBlock(jsonScalarType) notNullBlobOpBlock := schemaTypes.NotNullBlobOperatorBlock(blobScalarType) return []gql.Type{ @@ -228,7 +226,6 @@ func defaultTypes( floatOpBlock, booleanOpBlock, stringOpBlock, - jsonOpBlock, blobOpBlock, dateTimeOpBlock, @@ -237,7 +234,6 @@ func defaultTypes( notNullFloatOpBlock, notNullBooleanOpBlock, notNullStringOpBlock, - notNullJSONOpBlock, notNullBlobOpBlock, // Filter scalar list blocks diff --git a/internal/request/graphql/schema/types/base.go b/internal/request/graphql/schema/types/base.go index 4675169989..8dc3b35717 100644 --- a/internal/request/graphql/schema/types/base.go +++ b/internal/request/graphql/schema/types/base.go @@ -536,90 +536,6 @@ func NotNullStringListOperatorBlock(op *gql.InputObject) *gql.InputObject { }) } -// JSONOperatorBlock filter block for string types. -func JSONOperatorBlock(jsonScalarType *gql.Scalar) *gql.InputObject { - return gql.NewInputObject(gql.InputObjectConfig{ - Name: "JSONOperatorBlock", - Description: stringOperatorBlockDescription, - Fields: gql.InputObjectConfigFieldMap{ - "_eq": &gql.InputObjectFieldConfig{ - Description: eqOperatorDescription, - Type: jsonScalarType, - }, - "_ne": &gql.InputObjectFieldConfig{ - Description: neOperatorDescription, - Type: jsonScalarType, - }, - "_in": &gql.InputObjectFieldConfig{ - Description: inOperatorDescription, - Type: gql.NewList(jsonScalarType), - }, - "_nin": &gql.InputObjectFieldConfig{ - Description: ninOperatorDescription, - Type: gql.NewList(jsonScalarType), - }, - "_like": &gql.InputObjectFieldConfig{ - Description: likeStringOperatorDescription, - Type: gql.String, - }, - "_nlike": &gql.InputObjectFieldConfig{ - Description: nlikeStringOperatorDescription, - Type: gql.String, - }, - "_ilike": &gql.InputObjectFieldConfig{ - Description: ilikeStringOperatorDescription, - Type: gql.String, - }, - "_nilike": &gql.InputObjectFieldConfig{ - Description: nilikeStringOperatorDescription, - Type: gql.String, - }, - }, - }) -} - -// NotNullJSONOperatorBlock filter block for string! types. -func NotNullJSONOperatorBlock(jsonScalarType *gql.Scalar) *gql.InputObject { - return gql.NewInputObject(gql.InputObjectConfig{ - Name: "NotNullJSONOperatorBlock", - Description: notNullStringOperatorBlockDescription, - Fields: gql.InputObjectConfigFieldMap{ - "_eq": &gql.InputObjectFieldConfig{ - Description: eqOperatorDescription, - Type: jsonScalarType, - }, - "_ne": &gql.InputObjectFieldConfig{ - Description: neOperatorDescription, - Type: jsonScalarType, - }, - "_in": &gql.InputObjectFieldConfig{ - Description: inOperatorDescription, - Type: gql.NewList(gql.NewNonNull(jsonScalarType)), - }, - "_nin": &gql.InputObjectFieldConfig{ - Description: ninOperatorDescription, - Type: gql.NewList(gql.NewNonNull(jsonScalarType)), - }, - "_like": &gql.InputObjectFieldConfig{ - Description: likeStringOperatorDescription, - Type: gql.String, - }, - "_nlike": &gql.InputObjectFieldConfig{ - Description: nlikeStringOperatorDescription, - Type: gql.String, - }, - "_ilike": &gql.InputObjectFieldConfig{ - Description: ilikeStringOperatorDescription, - Type: gql.String, - }, - "_nilike": &gql.InputObjectFieldConfig{ - Description: nilikeStringOperatorDescription, - Type: gql.String, - }, - }, - }) -} - func BlobOperatorBlock(blobScalarType *gql.Scalar) *gql.InputObject { return gql.NewInputObject(gql.InputObjectConfig{ Name: "BlobOperatorBlock", diff --git a/tests/integration/explain/default/with_filter_test.go b/tests/integration/explain/default/with_filter_test.go index 2d3751f562..96e99e19ac 100644 --- a/tests/integration/explain/default/with_filter_test.go +++ b/tests/integration/explain/default/with_filter_test.go @@ -320,3 +320,52 @@ func TestDefaultExplainRequestWithMatchInsideList(t *testing.T) { explainUtils.ExecuteTestCase(t, test) } + +func TestDefaultExplainRequest_WithJSONEqualFilter_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Explain (default) request with JSON equal (_eq) filter.", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: `type Users { + name: String + custom: JSON + }`, + }, + testUtils.ExplainRequest{ + Request: `query @explain { + Users(filter: {custom: {_eq: {one: {two: 3}}}}) { + name + } + }`, + ExpectedPatterns: basicPattern, + ExpectedTargets: []testUtils.PlanNodeTargetCase{ + { + TargetNodeName: "scanNode", + IncludeChildNodes: true, // should be last node, so will have no child nodes. + ExpectedAttributes: dataMap{ + "collectionID": "1", + "collectionName": "Users", + "filter": dataMap{ + "custom": dataMap{ + "_eq": dataMap{ + "one": dataMap{ + "two": int32(3), + }, + }, + }, + }, + "spans": []dataMap{ + { + "start": "/1", + "end": "/2", + }, + }, + }, + }, + }, + }, + }, + } + + explainUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/query/json/with_all_test.go b/tests/integration/query/json/with_all_test.go new file mode 100644 index 0000000000..918c51e5bb --- /dev/null +++ b/tests/integration/query/json/with_all_test.go @@ -0,0 +1,59 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package json + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestQueryJSON_WithAllFilter_ShouldFilter(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple JSON array, filtered all of string array", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: `type Users { + name: String + custom: JSON + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "custom": [1, false, "second", {"one": 1}, [1, 2]] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Fred", + "custom": [null, false, "second", {"one": 1}, [1, 2]] + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {custom: {_all: {_ne: null}}}) { + name + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "Shahzad", + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/query/json/with_any_test.go b/tests/integration/query/json/with_any_test.go new file mode 100644 index 0000000000..d38d3e83e8 --- /dev/null +++ b/tests/integration/query/json/with_any_test.go @@ -0,0 +1,59 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package json + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestQueryJSON_WithAnyFilter_ShouldFilter(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple JSON array, filtered any of string array", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: `type Users { + name: String + custom: JSON + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "custom": [1, false, "second", {"one": 1}, [1, 2]] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Fred", + "custom": [null, false, "second", {"one": 1}, [1, 2]] + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {custom: {_any: {_eq: null}}}) { + name + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "Fred", + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/query/simple/with_filter/with_eq_json_test.go b/tests/integration/query/json/with_eq_test.go similarity index 91% rename from tests/integration/query/simple/with_filter/with_eq_json_test.go rename to tests/integration/query/json/with_eq_test.go index 3f85a033cb..4434412a1a 100644 --- a/tests/integration/query/simple/with_filter/with_eq_json_test.go +++ b/tests/integration/query/json/with_eq_test.go @@ -8,7 +8,7 @@ // by the Apache License, Version 2.0, included in the file // licenses/APL.txt. -package simple +package json import ( "testing" @@ -16,7 +16,7 @@ import ( testUtils "github.com/sourcenetwork/defradb/tests/integration" ) -func TestQuerySimple_WithEqOpOnJSONFieldWithObject_ShouldFilter(t *testing.T) { +func TestQueryJSON_WithEqualFilterWithObject_ShouldFilter(t *testing.T) { test := testUtils.TestCase{ Actions: []any{ testUtils.SchemaUpdate{ @@ -69,7 +69,7 @@ func TestQuerySimple_WithEqOpOnJSONFieldWithObject_ShouldFilter(t *testing.T) { testUtils.ExecuteTestCase(t, test) } -func TestQuerySimple_WithEqOpOnJSONFieldWithNestedObjects_ShouldFilter(t *testing.T) { +func TestQueryJSON_WithEqualFilterWithNestedObjects_ShouldFilter(t *testing.T) { test := testUtils.TestCase{ Actions: []any{ testUtils.SchemaUpdate{ @@ -122,7 +122,7 @@ func TestQuerySimple_WithEqOpOnJSONFieldWithNestedObjects_ShouldFilter(t *testin testUtils.ExecuteTestCase(t, test) } -func TestQuerySimple_WithEqOpOnJSONFieldWithNullValue_ShouldFilter(t *testing.T) { +func TestQueryJSON_WithEqualFilterWithNullValue_ShouldFilter(t *testing.T) { test := testUtils.TestCase{ Actions: []any{ testUtils.SchemaUpdate{ diff --git a/tests/integration/query/json/with_ge_test.go b/tests/integration/query/json/with_ge_test.go new file mode 100644 index 0000000000..bfb574170e --- /dev/null +++ b/tests/integration/query/json/with_ge_test.go @@ -0,0 +1,499 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package json + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestQueryJSON_WithGreaterEqualFilterWithEqualValue_ShouldFilter(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with JSON _ge filter equal value", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + Name: String + Custom: JSON + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Custom": 21 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "David", + "Custom": 32 + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {Custom: {_ge: 32}}) { + Name + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Name": "David", + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryJSON_WithGreaterEqualFilterWithGreaterValue_ShouldFilter(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with JSON _ge filter greater value", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + Name: String + Custom: JSON + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Custom": 21 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "David", + "Custom": 32 + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {Custom: {_ge: 31}}) { + Name + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Name": "David", + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryJSON_WithGreaterEqualFilterWithNullValue_ShouldFilter(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with JSON _ge filter null value", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + Name: String + Custom: JSON + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Custom": 21 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "David" + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {Custom: {_ge: null}}) { + Name + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Name": "David", + }, + { + "Name": "John", + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryJSON_WithGreaterEqualFilterWithNestedEqualValue_ShouldFilter(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with JSON _ge filter nested equal value", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + Name: String + Custom: JSON + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Custom": {"age": 21} + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "David", + "Custom": {"age": 32} + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {Custom: {age: {_ge: 32}}}) { + Name + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Name": "David", + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryJSON_WithGreaterEqualFilterWithNestedGreaterValue_ShouldFilter(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with JSON _ge nested filter nested greater value", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + Name: String + Custom: JSON + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Custom": {"age": 21} + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "David", + "Custom": {"age": 32} + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {Custom: {age: {_ge: 31}}}) { + Name + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Name": "David", + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryJSON_WithGreaterEqualFilterWithNestedNullValue_ShouldFilter(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with JSON _ge filter nested null value", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + Name: String + Custom: JSON + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Custom": {"age": 21} + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "David" + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {Custom: {age: {_ge: null}}}) { + Name + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Name": "John", + }, + { + "Name": "David", + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryJSON_WithGreaterEqualFilterWithBoolValue_ReturnsError(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with JSON _ge filter bool value", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + Name: String + Custom: JSON + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Custom": 21 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "David", + "Custom": 32 + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {Custom: {_ge: true}}) { + Name + } + }`, + ExpectedError: `unexpected type. Property: condition, Actual: bool`, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryJSON_WithGreaterEqualFilterWithStringValue_ReturnsError(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with JSON _ge filter string value", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + Name: String + Custom: JSON + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Custom": 21 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "David", + "Custom": 32 + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {Custom: {_ge: ""}}) { + Name + } + }`, + ExpectedError: `unexpected type. Property: condition, Actual: string`, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryJSON_WithGreaterEqualFilterWithObjectValue_ReturnsError(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with JSON _ge filter object value", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + Name: String + Custom: JSON + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Custom": 21 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "David", + "Custom": 32 + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {Custom: {_ge: {one: 1}}}) { + Name + } + }`, + ExpectedError: `unexpected type. Property: condition, Actual: map[string]interface {}`, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryJSON_WithGreaterEqualFilterWithArrayValue_ReturnsError(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with JSON _ge filter array value", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + Name: String + Custom: JSON + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Custom": 21 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "David", + "Custom": 32 + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {Custom: {_ge: [1, 2]}}) { + Name + } + }`, + ExpectedError: `unexpected type. Property: condition, Actual: []interface {}`, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryJSON_WithGreaterEqualFilterWithAllTypes_ShouldFilter(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with JSON _ge filter all types", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + Name: String + Custom: JSON + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Shahzad", + "Custom": "32" + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Andy", + "Custom": [1, 2] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Fred", + "Custom": {"one": 1} + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Custom": false + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "David", + "Custom": 32 + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {Custom: {_ge: 32}}) { + Name + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Name": "David", + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/query/json/with_gt_test.go b/tests/integration/query/json/with_gt_test.go new file mode 100644 index 0000000000..3a2972320b --- /dev/null +++ b/tests/integration/query/json/with_gt_test.go @@ -0,0 +1,497 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package json + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestQueryJSON_WithGreaterThanFilterBlockWithGreaterValue_ShouldFilter(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with basic filter(custom), greater than", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + Name: String + Custom: JSON + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Custom": 21 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "David", + "Custom": 19 + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {Custom: {_gt: 20}}) { + Name + Custom + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Name": "John", + "Custom": int64(21), + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryJSON_WithGreaterThanFilterBlockWithLesserValue_ShouldFilter(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with basic filter(custom), greater than", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + Name: String + Custom: JSON + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Custom": 21 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "David", + "Custom": 19 + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {Custom: {_gt: 22}}) { + Name + Custom + } + }`, + Results: map[string]any{ + "Users": []map[string]any{}, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryJSON_WithGreaterThanFilterBlockWithNullFilterValue_ShouldFilter(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with basic JSON greater than filter, with null filter value", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + Name: String + Custom: JSON + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Custom": 21 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "David" + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {Custom: {_gt: null}}) { + Name + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Name": "John", + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryJSON_WithGreaterThanFilterBlockWithNestedGreaterValue_ShouldFilter(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with basic filter(custom), nested greater than", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + Name: String + Custom: JSON + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Custom": {"age": 21} + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "David", + "Custom": {"age": 19} + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {Custom: {age: {_gt: 20}}}) { + Name + Custom + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Name": "John", + "Custom": map[string]any{ + "age": uint64(21), + }, + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryJSON_WithGreaterThanFilterBlockWithNestedLesserValue_ShouldFilter(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with basic filter(custom), nested greater than", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + Name: String + Custom: JSON + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Custom": {"age": 21} + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "David", + "Custom": {"age": 19} + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {Custom: {age: {_gt: 22}}}) { + Name + Custom + } + }`, + Results: map[string]any{ + "Users": []map[string]any{}, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryJSON_WithGreaterThanFilterBlockWithNestedNullFilterValue_ShouldFilter(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with basic JSON greater than filter, with nested null filter value", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + Name: String + Custom: JSON + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Custom": {"age": 21} + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "David" + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {Custom: {age: {_gt: null}}}) { + Name + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Name": "John", + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryJSON_WithGreaterThanFilterBlockWithBoolValue_ReturnsError(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with basic filter(custom), greater than", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + Name: String + Custom: JSON + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Custom": 21 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "David", + "Custom": 19 + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {Custom: {_gt: false}}) { + Name + Custom + } + }`, + ExpectedError: `unexpected type. Property: condition, Actual: bool`, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryJSON_WithGreaterThanFilterBlockWithStringValue_ReturnsError(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with basic filter(custom), greater than", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + Name: String + Custom: JSON + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Custom": 21 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "David", + "Custom": 19 + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {Custom: {_gt: ""}}) { + Name + Custom + } + }`, + ExpectedError: `unexpected type. Property: condition, Actual: string`, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryJSON_WithGreaterThanFilterBlockWithObjectValue_ReturnsError(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with basic filter(custom), greater than", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + Name: String + Custom: JSON + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Custom": 21 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "David", + "Custom": 19 + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {Custom: {_gt: {one: 1}}}) { + Name + Custom + } + }`, + ExpectedError: `unexpected type. Property: condition, Actual: map[string]interface {}`, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryJSON_WithGreaterThanFilterBlockWithArrayValue_ReturnsError(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with basic filter(custom), greater than", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + Name: String + Custom: JSON + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Custom": 21 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "David", + "Custom": 19 + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {Custom: {_gt: [1,2]}}) { + Name + Custom + } + }`, + ExpectedError: `unexpected type. Property: condition, Actual: []interface {}`, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryJSON_WithGreaterThanFilterWithAllTypes_ShouldFilter(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with JSON _gt filter all types", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + Name: String + Custom: JSON + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Shahzad", + "Custom": "32" + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Andy", + "Custom": [1, 2] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Fred", + "Custom": {"one": 1} + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Custom": false + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "David", + "Custom": 32 + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {Custom: {_gt: 30}}) { + Name + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Name": "David", + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/query/simple/with_filter/with_in_json_test.go b/tests/integration/query/json/with_in_test.go similarity index 93% rename from tests/integration/query/simple/with_filter/with_in_json_test.go rename to tests/integration/query/json/with_in_test.go index 568862ee52..335a81d4ae 100644 --- a/tests/integration/query/simple/with_filter/with_in_json_test.go +++ b/tests/integration/query/json/with_in_test.go @@ -8,7 +8,7 @@ // by the Apache License, Version 2.0, included in the file // licenses/APL.txt. -package simple +package json import ( "testing" @@ -16,7 +16,7 @@ import ( testUtils "github.com/sourcenetwork/defradb/tests/integration" ) -func TestQuerySimple_WithInOpOnJSONField_ShouldFilter(t *testing.T) { +func TestQueryJSON_WithInFilter_ShouldFilter(t *testing.T) { test := testUtils.TestCase{ Actions: []any{ testUtils.SchemaUpdate{ diff --git a/tests/integration/query/json/with_le_test.go b/tests/integration/query/json/with_le_test.go new file mode 100644 index 0000000000..49ae99e51b --- /dev/null +++ b/tests/integration/query/json/with_le_test.go @@ -0,0 +1,493 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package json + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestQueryJSON_WithLesserEqualFilterWithEqualValue_ShouldFilter(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with JSON _le filter equal value", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + Name: String + Custom: JSON + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Custom": 21 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "David", + "Custom": 32 + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {Custom: {_le: 21}}) { + Name + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Name": "John", + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryJSON_WithLesserEqualFilterWithLesserValue_ShouldFilter(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with JSON _le filter lesser value", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + Name: String + Custom: JSON + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Custom": 21 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "David", + "Custom": 32 + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {Custom: {_le: 31}}) { + Name + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Name": "John", + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryJSON_WithLesserEqualFilterWithNullValue_ShouldFilter(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with JSON _le filter null value", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + Name: String + Custom: JSON + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Custom": 21 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "David" + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {Custom: {_le: null}}) { + Name + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Name": "David", + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryJSON_WithLesserEqualFilterWithNestedEqualValue_ShouldFilter(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with JSON _le filter nested equal value", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + Name: String + Custom: JSON + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Custom": {"age": 21} + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "David", + "Custom": {"age": 32} + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {Custom: {age: {_le: 21}}}) { + Name + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Name": "John", + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryJSON_WithLesserEqualFilterWithNestedLesserValue_ShouldFilter(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with JSON _le nested filter nested lesser value", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + Name: String + Custom: JSON + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Custom": {"age": 21} + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "David", + "Custom": {"age": 32} + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {Custom: {age: {_le: 31}}}) { + Name + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Name": "John", + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryJSON_WithLesserEqualFilterWithNestedNullValue_ShouldFilter(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with JSON _le filter nested null value", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + Name: String + Custom: JSON + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Custom": {"age": 21} + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "David" + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {Custom: {age: {_le: null}}}) { + Name + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Name": "David", + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryJSON_WithLesserEqualFilterWithBoolValue_ReturnsError(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with JSON _le filter bool value", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + Name: String + Custom: JSON + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Custom": 21 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "David", + "Custom": 32 + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {Custom: {_le: true}}) { + Name + } + }`, + ExpectedError: `unexpected type. Property: condition, Actual: bool`, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryJSON_WithLesserEqualFilterWithStringValue_ReturnsError(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with JSON _le filter string value", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + Name: String + Custom: JSON + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Custom": 21 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "David", + "Custom": 32 + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {Custom: {_le: ""}}) { + Name + } + }`, + ExpectedError: `unexpected type. Property: condition, Actual: string`, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryJSON_WithLesserEqualFilterWithObjectValue_ReturnsError(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with JSON _le filter object value", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + Name: String + Custom: JSON + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Custom": 21 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "David", + "Custom": 32 + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {Custom: {_le: {one: 1}}}) { + Name + } + }`, + ExpectedError: `unexpected type. Property: condition, Actual: map[string]interface {}`, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryJSON_WithLesserEqualFilterWithArrayValue_ReturnsError(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with JSON _le filter array value", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + Name: String + Custom: JSON + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Custom": 21 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "David", + "Custom": 32 + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {Custom: {_le: [1, 2]}}) { + Name + } + }`, + ExpectedError: `unexpected type. Property: condition, Actual: []interface {}`, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryJSON_WithLesserEqualFilterWithAllTypes_ShouldFilter(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with JSON _le filter all types", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + Name: String + Custom: JSON + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Shahzad", + "Custom": "32" + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Andy", + "Custom": [1, 2] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Fred", + "Custom": {"one": 1} + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Custom": false + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "David", + "Custom": 32 + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {Custom: {_le: 32}}) { + Name + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Name": "David", + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/query/json/with_like_test.go b/tests/integration/query/json/with_like_test.go new file mode 100644 index 0000000000..bb7ed62507 --- /dev/null +++ b/tests/integration/query/json/with_like_test.go @@ -0,0 +1,77 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package json + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestQueryJSON_WithLikeFilter_ShouldFilter(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + custom: JSON + } + `, + }, + testUtils.CreateDoc{ + DocMap: map[string]any{ + "custom": "Daenerys Stormborn of House Targaryen, the First of Her Name", + }, + }, + testUtils.CreateDoc{ + DocMap: map[string]any{ + "custom": "Viserys I Targaryen, King of the Andals", + }, + }, + testUtils.CreateDoc{ + Doc: `{ + "custom": [1, 2] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "custom": {"one": 1} + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "custom": false + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "custom": 32 + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {custom: {_like: "Daenerys%Name"}}) { + custom + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "custom": "Daenerys Stormborn of House Targaryen, the First of Her Name", + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/query/json/with_lt_test.go b/tests/integration/query/json/with_lt_test.go new file mode 100644 index 0000000000..14a422d5ad --- /dev/null +++ b/tests/integration/query/json/with_lt_test.go @@ -0,0 +1,489 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package json + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestQueryJSON_WithLesserThanFilterBlockWithGreaterValue_ShouldFilter(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with basic filter(custom), lesser than", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + Name: String + Custom: JSON + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Custom": 21 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Bob", + "Custom": 19 + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {Custom: {_lt: 20}}) { + Name + Custom + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Name": "Bob", + "Custom": int64(19), + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryJSON_WithLesserThanFilterBlockWithLesserValue_ShouldFilter(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with basic filter(custom), lesser than", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + Name: String + Custom: JSON + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Custom": 21 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Bob", + "Custom": 19 + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {Custom: {_lt: 19}}) { + Name + Custom + } + }`, + Results: map[string]any{ + "Users": []map[string]any{}, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryJSON_WithLesserThanFilterBlockWithNullFilterValue_ShouldFilter(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with basic JSON lesser than filter, with null filter value", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + Name: String + Custom: JSON + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Custom": 21 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Bob" + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {Custom: {_lt: null}}) { + Name + } + }`, + Results: map[string]any{ + "Users": []map[string]any{}, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryJSON_WithLesserThanFilterBlockWithNestedGreaterValue_ShouldFilter(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with basic filter(custom), nested lesser than", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + Name: String + Custom: JSON + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Custom": {"age": 21} + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Bob", + "Custom": {"age": 19} + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {Custom: {age: {_lt: 20}}}) { + Name + Custom + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Name": "Bob", + "Custom": map[string]any{ + "age": uint64(19), + }, + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryJSON_WithLesserThanFilterBlockWithNestedLesserValue_ShouldFilter(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with basic filter(custom), nested lesser than", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + Name: String + Custom: JSON + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Custom": {"age": 21} + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Bob", + "Custom": {"age": 19} + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {Custom: {age: {_lt: 19}}}) { + Name + Custom + } + }`, + Results: map[string]any{ + "Users": []map[string]any{}, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryJSON_WithLesserThanFilterBlockWithNestedNullFilterValue_ShouldFilter(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with basic JSON lesser than filter, with nested null filter value", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + Name: String + Custom: JSON + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Custom": {"age": 21} + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Bob" + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {Custom: {age: {_lt: null}}}) { + Name + } + }`, + Results: map[string]any{ + "Users": []map[string]any{}, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryJSON_WithLesserThanFilterBlockWithBoolValue_ReturnsError(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with basic filter(custom), lesser than", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + Name: String + Custom: JSON + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Custom": 21 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Bob", + "Custom": 19 + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {Custom: {_lt: false}}) { + Name + Custom + } + }`, + ExpectedError: `unexpected type. Property: condition, Actual: bool`, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryJSON_WithLesserThanFilterBlockWithStringValue_ReturnsError(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with basic filter(custom), lesser than", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + Name: String + Custom: JSON + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Custom": 21 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Bob", + "Custom": 19 + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {Custom: {_lt: ""}}) { + Name + Custom + } + }`, + ExpectedError: `unexpected type. Property: condition, Actual: string`, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryJSON_WithLesserThanFilterBlockWithObjectValue_ReturnsError(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with basic filter(custom), lesser than", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + Name: String + Custom: JSON + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Custom": 21 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Bob", + "Custom": 19 + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {Custom: {_lt: {one: 1}}}) { + Name + Custom + } + }`, + ExpectedError: `unexpected type. Property: condition, Actual: map[string]interface {}`, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryJSON_WithLesserThanFilterBlockWithArrayValue_ReturnsError(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with basic filter(custom), lesser than", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + Name: String + Custom: JSON + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Custom": 21 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Bob", + "Custom": 19 + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {Custom: {_lt: [1,2]}}) { + Name + Custom + } + }`, + ExpectedError: `unexpected type. Property: condition, Actual: []interface {}`, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryJSON_WithLesserThanFilterWithAllTypes_ShouldFilter(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with JSON _lt filter all types", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + Name: String + Custom: JSON + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Shahzad", + "Custom": "32" + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Andy", + "Custom": [1, 2] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Fred", + "Custom": {"one": 1} + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Custom": false + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "David", + "Custom": 32 + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {Custom: {_lt: 33}}) { + Name + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Name": "David", + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/query/json/with_ne_test.go b/tests/integration/query/json/with_ne_test.go new file mode 100644 index 0000000000..6a4d619552 --- /dev/null +++ b/tests/integration/query/json/with_ne_test.go @@ -0,0 +1,165 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package json + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestQueryJSON_WithNotEqualFilterWithObject_ShouldFilter(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + custom: JSON + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "custom": { + "tree": "maple", + "age": 250 + } + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Andy", + "custom": { + "tree": "oak", + "age": 450 + } + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "custom": null + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {custom: {_ne: {tree:"oak",age:450}}}) { + name + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + {"name": "John"}, + {"name": "Shahzad"}, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryJSON_WithNotEqualFilterWithNestedObjects_ShouldFilter(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + custom: JSON + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "custom": { + "level_1": { + "level_2": { + "level_3": [true, false] + } + } + } + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Andy", + "custom": { + "level_1": { + "level_2": { + "level_3": [false, true] + } + } + } + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {custom: {_ne: {level_1: {level_2: {level_3: [true, false]}}}}}) { + name + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + {"name": "Andy"}, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryJSON_WithNotEqualFilterWithNullValue_ShouldFilter(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + custom: JSON + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "custom": null + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Andy", + "custom": {} + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {custom: {_ne: null}}) { + name + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + {"name": "Andy"}, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/query/simple/with_filter/with_like_json_test.go b/tests/integration/query/json/with_nin_test.go similarity index 70% rename from tests/integration/query/simple/with_filter/with_like_json_test.go rename to tests/integration/query/json/with_nin_test.go index f77041ada0..12cfb4d650 100644 --- a/tests/integration/query/simple/with_filter/with_like_json_test.go +++ b/tests/integration/query/json/with_nin_test.go @@ -8,7 +8,7 @@ // by the Apache License, Version 2.0, included in the file // licenses/APL.txt. -package simple +package json import ( "testing" @@ -16,7 +16,7 @@ import ( testUtils "github.com/sourcenetwork/defradb/tests/integration" ) -func TestQuerySimple_WithLikeOpOnJSONField_ShouldFilter(t *testing.T) { +func TestQueryJSON_WithNotInFilter_ShouldFilter(t *testing.T) { test := testUtils.TestCase{ Actions: []any{ testUtils.SchemaUpdate{ @@ -28,26 +28,32 @@ func TestQuerySimple_WithLikeOpOnJSONField_ShouldFilter(t *testing.T) { `, }, testUtils.CreateDoc{ - DocMap: map[string]any{ - "name": "John", - "custom": "{\"tree\": \"maple\", \"age\": 250}", - }, + Doc: `{ + "name": "John", + "custom": { + "tree": "maple", + "age": 250 + } + }`, }, testUtils.CreateDoc{ - DocMap: map[string]any{ - "name": "Andy", - "custom": "{\"tree\": \"oak\", \"age\": 450}", - }, + Doc: `{ + "name": "Andy", + "custom": { + "tree": "oak", + "age": 450 + } + }`, }, testUtils.Request{ Request: `query { - Users(filter: {custom: {_like: "%oak%"}}) { + Users(filter: {custom: {_nin: [{tree:"oak",age:450}]}}) { name } }`, Results: map[string]any{ "Users": []map[string]any{ - {"name": "Andy"}, + {"name": "John"}, }, }, }, diff --git a/tests/integration/query/json/with_nlike_test.go b/tests/integration/query/json/with_nlike_test.go new file mode 100644 index 0000000000..db0615b2ca --- /dev/null +++ b/tests/integration/query/json/with_nlike_test.go @@ -0,0 +1,89 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package json + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestQueryJSON_WithNotLikeFilter_ShouldFilter(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + custom: JSON + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "custom": "Daenerys Stormborn of House Targaryen, the First of Her Name" + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "custom": "Viserys I Targaryen, King of the Andals" + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "custom": [1, 2] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "custom": {"one": 1} + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "custom": false + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "custom": 32 + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {custom: {_nlike: "%Stormborn%"}}) { + custom + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "custom": uint64(32), + }, + { + "custom": "Viserys I Targaryen, King of the Andals", + }, + { + "custom": map[string]any{"one": uint64(1)}, + }, + { + "custom": []any{uint64(1), uint64(2)}, + }, + { + "custom": false, + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/query/json/with_none_test.go b/tests/integration/query/json/with_none_test.go new file mode 100644 index 0000000000..2355810423 --- /dev/null +++ b/tests/integration/query/json/with_none_test.go @@ -0,0 +1,59 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package json + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestQueryJSON_WithNoneFilter_ShouldFilter(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple JSON array, filtered none of string array", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: `type Users { + name: String + custom: JSON + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "custom": [1, false, "second", {"one": 1}, [1, 2]] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Fred", + "custom": [null, false, "second", {"one": 1}, [1, 2]] + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {custom: {_none: {_eq: null}}}) { + name + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "name": "Shahzad", + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/schema/filter_test.go b/tests/integration/schema/filter_test.go index c3dc47d668..a48ac2e296 100644 --- a/tests/integration/schema/filter_test.go +++ b/tests/integration/schema/filter_test.go @@ -287,3 +287,126 @@ var defaultBookArgsWithoutFilter = trimFields( }, testFilterForOneToOneSchemaArgProps, ) + +func TestSchemaFilterInputs_WithJSONField_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + custom: JSON + } + `, + }, + testUtils.IntrospectionRequest{ + Request: ` + query { + __schema { + queryType { + fields { + name + args { + name + type { + name + inputFields { + name + type { + name + ofType { + name + } + } + } + } + } + } + } + } + } + `, + ContainsData: map[string]any{ + "__schema": map[string]any{ + "queryType": map[string]any{ + "fields": []any{ + map[string]any{ + "name": "Users", + "args": append( + // default args without filter + trimFields( + fields{ + cidArg, + docIDArg, + showDeletedArg, + groupByArg, + limitArg, + offsetArg, + buildOrderArg("Users"), + }, + map[string]any{ + "name": struct{}{}, + "type": map[string]any{ + "name": struct{}{}, + "inputFields": struct{}{}, + }, + }, + ), + map[string]any{ + "name": "filter", + "type": map[string]any{ + "name": "UsersFilterArg", + "inputFields": []any{ + map[string]any{ + "name": "_and", + "type": map[string]any{ + "name": nil, + "ofType": map[string]any{ + "name": nil, + }, + }, + }, + map[string]any{ + "name": "_docID", + "type": map[string]any{ + "name": "IDOperatorBlock", + "ofType": nil, + }, + }, + map[string]any{ + "name": "_not", + "type": map[string]any{ + "name": "UsersFilterArg", + "ofType": nil, + }, + }, + map[string]any{ + "name": "_or", + "type": map[string]any{ + "name": nil, + "ofType": map[string]any{ + "name": nil, + }, + }, + }, + map[string]any{ + "name": "custom", + "type": map[string]any{ + "name": "JSON", + "ofType": nil, + }, + }, + }, + }, + }, + ).Tidy(), + }, + }, + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/schema/simple_test.go b/tests/integration/schema/simple_test.go index c3603b4b11..739e7da49a 100644 --- a/tests/integration/schema/simple_test.go +++ b/tests/integration/schema/simple_test.go @@ -329,3 +329,49 @@ func TestSchemaSimpleCreatesSchemaGivenTypeWithBlobField(t *testing.T) { testUtils.ExecuteTestCase(t, test) } + +func TestSchemaSimple_WithJSONField_CreatesSchemaGivenType(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + data: JSON + } + `, + }, + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "Users") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": map[string]any{ + "name": "Users", + "fields": DefaultFields.Append( + Field{ + "name": "data", + "type": map[string]any{ + "kind": "SCALAR", + "name": "JSON", + }, + }, + ).Tidy(), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} From 4990ff53a1f819f39d8613b53a4ba74515be7c1e Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Mon, 14 Oct 2024 11:15:14 -0400 Subject: [PATCH 63/71] bot: Update dependencies (bulk dependabot PRs) 14-10-2024 (#3131) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ✅ This PR was created by combining the following PRs: #3130 bot: Bump go.opentelemetry.io/otel/sdk/metric from 1.30.0 to 1.31.0 #3128 bot: Bump typescript from 5.6.2 to 5.6.3 in /playground #3127 bot: Bump @types/react-dom from 18.3.0 to 18.3.1 in /playground #3126 bot: Bump eslint-plugin-react-hooks from 4.6.2 to 5.0.0 in /playground ⚠️ The following PRs were resolved manually due to merge conflicts: #3129 bot: Bump go.opentelemetry.io/otel/metric from 1.30.0 to 1.31.0 --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> Co-authored-by: Shahzad Lone --- go.mod | 10 +++++----- go.sum | 20 ++++++++++---------- playground/package-lock.json | 28 +++++++++++++--------------- playground/package.json | 6 +++--- 4 files changed, 31 insertions(+), 33 deletions(-) diff --git a/go.mod b/go.mod index 483980affd..805c27b80c 100644 --- a/go.mod +++ b/go.mod @@ -59,8 +59,8 @@ require ( github.com/valyala/fastjson v1.6.4 github.com/vito/go-sse v1.1.2 github.com/zalando/go-keyring v0.2.5 - go.opentelemetry.io/otel/metric v1.30.0 - go.opentelemetry.io/otel/sdk/metric v1.30.0 + go.opentelemetry.io/otel/metric v1.31.0 + go.opentelemetry.io/otel/sdk/metric v1.31.0 go.uber.org/zap v1.27.0 golang.org/x/crypto v0.28.0 golang.org/x/exp v0.0.0-20240808152545-0cdaa3abc0fa @@ -352,9 +352,9 @@ require ( go.opencensus.io v0.24.0 // indirect go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0 // indirect go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.52.0 // indirect - go.opentelemetry.io/otel v1.30.0 // indirect - go.opentelemetry.io/otel/sdk v1.30.0 // indirect - go.opentelemetry.io/otel/trace v1.30.0 // indirect + go.opentelemetry.io/otel v1.31.0 // indirect + go.opentelemetry.io/otel/sdk v1.31.0 // indirect + go.opentelemetry.io/otel/trace v1.31.0 // indirect go.uber.org/dig v1.18.0 // indirect go.uber.org/fx v1.22.2 // indirect go.uber.org/mock v0.4.0 // indirect diff --git a/go.sum b/go.sum index 6c9746ad1b..99786417bc 100644 --- a/go.sum +++ b/go.sum @@ -1536,16 +1536,16 @@ go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.4 go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0/go.mod h1:Mjt1i1INqiaoZOMGR1RIUJN+i3ChKoFRqzrRQhlkbs0= go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.52.0 h1:9l89oX4ba9kHbBol3Xin3leYJ+252h0zszDtBwyKe2A= go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.52.0/go.mod h1:XLZfZboOJWHNKUv7eH0inh0E9VV6eWDFB/9yJyTLPp0= -go.opentelemetry.io/otel v1.30.0 h1:F2t8sK4qf1fAmY9ua4ohFS/K+FUuOPemHUIXHtktrts= -go.opentelemetry.io/otel v1.30.0/go.mod h1:tFw4Br9b7fOS+uEao81PJjVMjW/5fvNCbpsDIXqP0pc= -go.opentelemetry.io/otel/metric v1.30.0 h1:4xNulvn9gjzo4hjg+wzIKG7iNFEaBMX00Qd4QIZs7+w= -go.opentelemetry.io/otel/metric v1.30.0/go.mod h1:aXTfST94tswhWEb+5QjlSqG+cZlmyXy/u8jFpor3WqQ= -go.opentelemetry.io/otel/sdk v1.30.0 h1:cHdik6irO49R5IysVhdn8oaiR9m8XluDaJAs4DfOrYE= -go.opentelemetry.io/otel/sdk v1.30.0/go.mod h1:p14X4Ok8S+sygzblytT1nqG98QG2KYKv++HE0LY/mhg= -go.opentelemetry.io/otel/sdk/metric v1.30.0 h1:QJLT8Pe11jyHBHfSAgYH7kEmT24eX792jZO1bo4BXkM= -go.opentelemetry.io/otel/sdk/metric v1.30.0/go.mod h1:waS6P3YqFNzeP01kuo/MBBYqaoBJl7efRQHOaydhy1Y= -go.opentelemetry.io/otel/trace v1.30.0 h1:7UBkkYzeg3C7kQX8VAidWh2biiQbtAKjyIML8dQ9wmc= -go.opentelemetry.io/otel/trace v1.30.0/go.mod h1:5EyKqTzzmyqB9bwtCCq6pDLktPK6fmGf/Dph+8VI02o= +go.opentelemetry.io/otel v1.31.0 h1:NsJcKPIW0D0H3NgzPDHmo0WW6SptzPdqg/L1zsIm2hY= +go.opentelemetry.io/otel v1.31.0/go.mod h1:O0C14Yl9FgkjqcCZAsE053C13OaddMYr/hz6clDkEJE= +go.opentelemetry.io/otel/metric v1.31.0 h1:FSErL0ATQAmYHUIzSezZibnyVlft1ybhy4ozRPcF2fE= +go.opentelemetry.io/otel/metric v1.31.0/go.mod h1:C3dEloVbLuYoX41KpmAhOqNriGbA+qqH6PQ5E5mUfnY= +go.opentelemetry.io/otel/sdk v1.31.0 h1:xLY3abVHYZ5HSfOg3l2E5LUj2Cwva5Y7yGxnSW9H5Gk= +go.opentelemetry.io/otel/sdk v1.31.0/go.mod h1:TfRbMdhvxIIr/B2N2LQW2S5v9m3gOQ/08KsbbO5BPT0= +go.opentelemetry.io/otel/sdk/metric v1.31.0 h1:i9hxxLJF/9kkvfHppyLL55aW7iIJz4JjxTeYusH7zMc= +go.opentelemetry.io/otel/sdk/metric v1.31.0/go.mod h1:CRInTMVvNhUKgSAMbKyTMxqOBC0zgyxzW55lZzX43Y8= +go.opentelemetry.io/otel/trace v1.31.0 h1:ffjsj1aRouKewfr85U2aGagJ46+MvodynlQ1HYdmJys= +go.opentelemetry.io/otel/trace v1.31.0/go.mod h1:TXZkRk7SM2ZQLtR6eoAWQFIHPvzQ06FJAsO1tJg480A= go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI= go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= diff --git a/playground/package-lock.json b/playground/package-lock.json index af40f82bc2..5f1f350d92 100644 --- a/playground/package-lock.json +++ b/playground/package-lock.json @@ -16,15 +16,15 @@ }, "devDependencies": { "@types/react": "^18.3.11", - "@types/react-dom": "^18.3.0", + "@types/react-dom": "^18.3.1", "@types/swagger-ui-react": "^4.18.3", "@typescript-eslint/eslint-plugin": "^8.8.1", "@typescript-eslint/parser": "^8.8.1", "@vitejs/plugin-react-swc": "^3.7.1", "eslint": "^9.12.0", - "eslint-plugin-react-hooks": "^4.6.2", + "eslint-plugin-react-hooks": "^5.0.0", "eslint-plugin-react-refresh": "^0.4.12", - "typescript": "^5.6.2", + "typescript": "^5.6.3", "vite": "^5.4.8" } }, @@ -2485,11 +2485,10 @@ } }, "node_modules/@types/react-dom": { - "version": "18.3.0", - "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-18.3.0.tgz", - "integrity": "sha512-EhwApuTmMBmXuFOikhQLIBUn6uFg81SwLMOAUgodJF14SOBOCMdU04gDoYi0WOJJHD144TL32z4yDqCW3dnkQg==", + "version": "18.3.1", + "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-18.3.1.tgz", + "integrity": "sha512-qW1Mfv8taImTthu4KoXgDfLuk4bydU6Q/TkADnDWWHwi4NX4BR+LWfTp2sVmTqRrsHvyDDTelgelxJ+SsejKKQ==", "devOptional": true, - "license": "MIT", "dependencies": { "@types/react": "*" } @@ -3361,16 +3360,15 @@ } }, "node_modules/eslint-plugin-react-hooks": { - "version": "4.6.2", - "resolved": "https://registry.npmjs.org/eslint-plugin-react-hooks/-/eslint-plugin-react-hooks-4.6.2.tgz", - "integrity": "sha512-QzliNJq4GinDBcD8gPB5v0wh6g8q3SUi6EFF0x8N/BL9PoVs0atuGc47ozMRyOWAKdwaZ5OnbOEa3WR+dSGKuQ==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-react-hooks/-/eslint-plugin-react-hooks-5.0.0.tgz", + "integrity": "sha512-hIOwI+5hYGpJEc4uPRmz2ulCjAGD/N13Lukkh8cLV0i2IRk/bdZDYjgLVHj+U9Z704kLIdIO6iueGvxNur0sgw==", "dev": true, - "license": "MIT", "engines": { "node": ">=10" }, "peerDependencies": { - "eslint": "^3.0.0 || ^4.0.0 || ^5.0.0 || ^6.0.0 || ^7.0.0 || ^8.0.0-0" + "eslint": "^3.0.0 || ^4.0.0 || ^5.0.0 || ^6.0.0 || ^7.0.0 || ^8.0.0-0 || ^9.0.0" } }, "node_modules/eslint-plugin-react-refresh": { @@ -5800,9 +5798,9 @@ } }, "node_modules/typescript": { - "version": "5.6.2", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.6.2.tgz", - "integrity": "sha512-NW8ByodCSNCwZeghjN3o+JX5OFH0Ojg6sadjEKY4huZ52TqbJTJnDo5+Tw98lSy63NZvi4n+ez5m2u5d4PkZyw==", + "version": "5.6.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.6.3.tgz", + "integrity": "sha512-hjcS1mhfuyi4WW8IWtjP7brDrG2cuDZukyrYrSauoXGNgx0S7zceP07adYkJycEr56BOUTNPzbInooiN3fn1qw==", "dev": true, "bin": { "tsc": "bin/tsc", diff --git a/playground/package.json b/playground/package.json index a1bd34ea88..1175a6a788 100644 --- a/playground/package.json +++ b/playground/package.json @@ -18,15 +18,15 @@ }, "devDependencies": { "@types/react": "^18.3.11", - "@types/react-dom": "^18.3.0", + "@types/react-dom": "^18.3.1", "@types/swagger-ui-react": "^4.18.3", "@typescript-eslint/eslint-plugin": "^8.8.1", "@typescript-eslint/parser": "^8.8.1", "@vitejs/plugin-react-swc": "^3.7.1", "eslint": "^9.12.0", - "eslint-plugin-react-hooks": "^4.6.2", + "eslint-plugin-react-hooks": "^5.0.0", "eslint-plugin-react-refresh": "^0.4.12", - "typescript": "^5.6.2", + "typescript": "^5.6.3", "vite": "^5.4.8" } } From 6cc39a7418fd8d1f3246357f5665302e48817305 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Tue, 15 Oct 2024 15:36:54 -0700 Subject: [PATCH 64/71] fix(i): Default field value validation (#3137) ## Relevant issue(s) Resolves #3133 ## Description This PR fixes a bug where default field value types were not properly validated and would cause a panic when creating a new document. ## Tasks - [x] I made sure the code is well commented, particularly hard-to-understand areas. - [x] I made sure the repository-held documentation is changed accordingly. - [x] I made sure the pull request title adheres to the conventional commit style (the subset used in the project can be found in [tools/configs/chglog/config.yml](tools/configs/chglog/config.yml)). - [x] I made sure to discuss its limitations such as threats to validity, vulnerability to mistake and misuse, robustness to invalidation of assumptions, resource requirements, ... ## How has this been tested? Added an integration test. Specify the platform(s) on which this was tested: - *(modify the list accordingly*) - Arch Linux - Debian Linux - MacOS - Windows --- .../i3137-default-value-fix.md | 3 ++ internal/db/definition_validation.go | 18 ++++++++ internal/db/errors.go | 9 ++++ internal/request/graphql/schema/collection.go | 44 ++++++++++++------- internal/request/graphql/schema/errors.go | 21 ++++++++- .../with_default_fields_test.go | 21 ++++++++- 6 files changed, 96 insertions(+), 20 deletions(-) create mode 100644 docs/data_format_changes/i3137-default-value-fix.md diff --git a/docs/data_format_changes/i3137-default-value-fix.md b/docs/data_format_changes/i3137-default-value-fix.md new file mode 100644 index 0000000000..038caf8425 --- /dev/null +++ b/docs/data_format_changes/i3137-default-value-fix.md @@ -0,0 +1,3 @@ +# Default Value Fix + +Default value parsing has changed slightly and causes the change detector to fail. diff --git a/internal/db/definition_validation.go b/internal/db/definition_validation.go index 2d178f1e07..7afef5dd13 100644 --- a/internal/db/definition_validation.go +++ b/internal/db/definition_validation.go @@ -166,6 +166,7 @@ var globalValidators = []definitionValidator{ validateSelfReferences, validateCollectionMaterialized, validateMaterializedHasNoPolicy, + validateCollectionFieldDefaultValue, } var createValidators = append( @@ -1018,3 +1019,20 @@ func validateMaterializedHasNoPolicy( return nil } + +func validateCollectionFieldDefaultValue( + ctx context.Context, + db *db, + newState *definitionState, + oldState *definitionState, +) error { + for name, col := range newState.definitionsByName { + // default values are set when a doc is first created + _, err := client.NewDocFromMap(map[string]any{}, col) + if err != nil { + return NewErrDefaultFieldValueInvalid(name, err) + } + } + + return nil +} diff --git a/internal/db/errors.go b/internal/db/errors.go index d210860501..bd38cf052e 100644 --- a/internal/db/errors.go +++ b/internal/db/errors.go @@ -105,6 +105,7 @@ const ( errSelfReferenceWithoutSelf string = "must specify 'Self' kind for self referencing relations" errColNotMaterialized string = "non-materialized collections are not supported" errMaterializedViewAndACPNotSupported string = "materialized views do not support ACP" + errInvalidDefaultFieldValue string = "default field value is invalid" ) var ( @@ -681,3 +682,11 @@ func NewErrMaterializedViewAndACPNotSupported(collection string) error { errors.NewKV("Collection", collection), ) } + +func NewErrDefaultFieldValueInvalid(collection string, inner error) error { + return errors.New( + errInvalidDefaultFieldValue, + errors.NewKV("Collection", collection), + errors.NewKV("Inner", inner), + ) +} diff --git a/internal/request/graphql/schema/collection.go b/internal/request/graphql/schema/collection.go index 81d5182366..df450fb986 100644 --- a/internal/request/graphql/schema/collection.go +++ b/internal/request/graphql/schema/collection.go @@ -427,23 +427,35 @@ func defaultFromAST( if !ok { return nil, NewErrDefaultValueNotAllowed(field.Name.Value, astNamed.Name.Value) } + if len(directive.Arguments) != 1 { + return nil, NewErrDefaultValueOneArg(field.Name.Value) + } + arg := directive.Arguments[0] + if propName != arg.Name.Value { + return nil, NewErrDefaultValueType(field.Name.Value, propName, arg.Name.Value) + } var value any - for _, arg := range directive.Arguments { - if propName != arg.Name.Value { - return nil, NewErrDefaultValueInvalid(field.Name.Value, propName, arg.Name.Value) - } - switch t := arg.Value.(type) { - case *ast.IntValue: - value = gql.Int.ParseLiteral(arg.Value, nil) - case *ast.FloatValue: - value = gql.Float.ParseLiteral(arg.Value, nil) - case *ast.BooleanValue: - value = t.Value - case *ast.StringValue: - value = t.Value - default: - value = arg.Value.GetValue() - } + switch propName { + case types.DefaultDirectivePropInt: + value = gql.Int.ParseLiteral(arg.Value, nil) + case types.DefaultDirectivePropFloat: + value = gql.Float.ParseLiteral(arg.Value, nil) + case types.DefaultDirectivePropBool: + value = gql.Boolean.ParseLiteral(arg.Value, nil) + case types.DefaultDirectivePropString: + value = gql.String.ParseLiteral(arg.Value, nil) + case types.DefaultDirectivePropDateTime: + value = gql.DateTime.ParseLiteral(arg.Value, nil) + case types.DefaultDirectivePropJSON: + value = types.JSONScalarType().ParseLiteral(arg.Value, nil) + case types.DefaultDirectivePropBlob: + value = types.BlobScalarType().ParseLiteral(arg.Value, nil) + } + // If the value is nil, then parsing has failed, or a nil value was provided. + // Since setting a default value to nil is the same as not providing one, + // it is safer to return an error to let the user know something is wrong. + if value == nil { + return nil, NewErrDefaultValueInvalid(field.Name.Value, propName) } return value, nil } diff --git a/internal/request/graphql/schema/errors.go b/internal/request/graphql/schema/errors.go index 41f17bf373..a19a940ebf 100644 --- a/internal/request/graphql/schema/errors.go +++ b/internal/request/graphql/schema/errors.go @@ -30,8 +30,10 @@ const ( errPolicyUnknownArgument string = "policy with unknown argument" errPolicyInvalidIDProp string = "policy directive with invalid id property" errPolicyInvalidResourceProp string = "policy directive with invalid resource property" - errDefaultValueInvalid string = "default value type must match field type" + errDefaultValueType string = "default value type must match field type" errDefaultValueNotAllowed string = "default value is not allowed for this field type" + errDefaultValueInvalid string = "default value is invalid" + errDefaultValueOneArg string = "default value must specify one argument" errFieldTypeNotSpecified string = "field type not specified" ) @@ -141,9 +143,24 @@ func NewErrRelationNotFound(relationName string) error { ) } -func NewErrDefaultValueInvalid(name string, expected string, actual string) error { +func NewErrDefaultValueOneArg(field string) error { + return errors.New( + errDefaultValueOneArg, + errors.NewKV("Field", field), + ) +} + +func NewErrDefaultValueInvalid(field string, arg string) error { return errors.New( errDefaultValueInvalid, + errors.NewKV("Field", field), + errors.NewKV("Arg", arg), + ) +} + +func NewErrDefaultValueType(name string, expected string, actual string) error { + return errors.New( + errDefaultValueType, errors.NewKV("Name", name), errors.NewKV("Expected", expected), errors.NewKV("Actual", actual), diff --git a/tests/integration/collection_description/with_default_fields_test.go b/tests/integration/collection_description/with_default_fields_test.go index 4a0f86af77..dca776532d 100644 --- a/tests/integration/collection_description/with_default_fields_test.go +++ b/tests/integration/collection_description/with_default_fields_test.go @@ -58,7 +58,7 @@ func TestCollectionDescription_WithDefaultFieldValues(t *testing.T) { { ID: 3, Name: "created", - DefaultValue: "2000-07-23T03:00:00-00:00", + DefaultValue: "2000-07-23T03:00:00Z", }, { ID: 4, @@ -90,6 +90,23 @@ func TestCollectionDescription_WithDefaultFieldValues(t *testing.T) { testUtils.ExecuteTestCase(t, test) } +func TestCollectionDescription_WithInvalidDefaultFieldValueType_ReturnsError(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + active: Boolean @default(bool: invalid) + } + `, + ExpectedError: "default value is invalid. Field: active, Arg: bool", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + func TestCollectionDescription_WithIncorrectDefaultFieldValueType_ReturnsError(t *testing.T) { test := testUtils.TestCase{ Actions: []any{ @@ -116,7 +133,7 @@ func TestCollectionDescription_WithMultipleDefaultFieldValueTypes_ReturnsError(t name: String @default(string: "Bob", int: 10, bool: true, float: 10) } `, - ExpectedError: "default value type must match field type. Name: name, Expected: string, Actual: int", + ExpectedError: "default value must specify one argument. Field: name", }, }, } From c549570aeb03ecd2d37b6e498a5982af72f53423 Mon Sep 17 00:00:00 2001 From: AndrewSisley Date: Wed, 16 Oct 2024 11:55:14 -0400 Subject: [PATCH 65/71] test: Add bug bash tests for gql fragments (#3136) ## Relevant issue(s) Resolves #3135 ## Description Adds bug bash tests for gql fragments with inner objects. No bugs found. --- .../query/one_to_one/with_fragments_test.go | 142 ++++++++++++++++++ 1 file changed, 142 insertions(+) create mode 100644 tests/integration/query/one_to_one/with_fragments_test.go diff --git a/tests/integration/query/one_to_one/with_fragments_test.go b/tests/integration/query/one_to_one/with_fragments_test.go new file mode 100644 index 0000000000..8f43f2629a --- /dev/null +++ b/tests/integration/query/one_to_one/with_fragments_test.go @@ -0,0 +1,142 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package one_to_one + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestQueryOneToOne_WithFragment(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Book { + name: String + author: Author + } + + type Author { + name: String + age: Int + } + `, + }, + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "John Grisham", + "age": 65 + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + DocMap: map[string]any{ + "name": "Painted House", + "author": testUtils.NewDocIndex(1, 0), + }, + }, + testUtils.Request{ + Request: `query { + Book { + name + ...BookAuthorInfo + } + } + fragment BookAuthorInfo on Book { + author { + name + age + } + }`, + Results: map[string]any{ + "Book": []map[string]any{ + { + "name": "Painted House", + "author": map[string]any{ + "name": "John Grisham", + "age": int64(65), + }, + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryOneToOne_WithFragmentWithObjectWithFragment(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Book { + name: String + author: Author + } + + type Author { + name: String + age: Int + } + `, + }, + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "John Grisham", + "age": 65 + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + DocMap: map[string]any{ + "name": "Painted House", + "author": testUtils.NewDocIndex(1, 0), + }, + }, + testUtils.Request{ + Request: `query { + Book { + name + ...BookAuthorInfo + } + } + fragment BookAuthorInfo on Book { + author { + ...BookInfo + } + } + fragment BookInfo on Author { + name + age + }`, + Results: map[string]any{ + "Book": []map[string]any{ + { + "name": "Painted House", + "author": map[string]any{ + "name": "John Grisham", + "age": int64(65), + }, + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} From ba2df4a68b9edbfe85daccc1c1e709fc3843fff3 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Thu, 17 Oct 2024 09:51:33 -0700 Subject: [PATCH 66/71] fix(i): Index directive field arg name (#3138) ## Relevant issue(s) Resolves #3132 ## Description This PR fixes a mistake I made when refactoring the index directive. The `name` argument of the `includes` fields has been changed to `field`. ## Tasks - [x] I made sure the code is well commented, particularly hard-to-understand areas. - [x] I made sure the repository-held documentation is changed accordingly. - [x] I made sure the pull request title adheres to the conventional commit style (the subset used in the project can be found in [tools/configs/chglog/config.yml](tools/configs/chglog/config.yml)). - [x] I made sure to discuss its limitations such as threats to validity, vulnerability to mistake and misuse, robustness to invalidation of assumptions, resource requirements, ... ## How has this been tested? Updated unit and integration tests. Specify the platform(s) on which this was tested: - MacOS --- internal/request/graphql/schema/collection.go | 4 +- .../graphql/schema/index_parse_test.go | 36 +++++++------- .../request/graphql/schema/types/types.go | 8 ++-- .../integration/index/array_composite_test.go | 16 +++---- .../index/array_unique_composite_test.go | 8 ++-- .../index/create_composite_test.go | 12 ++--- tests/integration/index/create_get_test.go | 2 +- .../index/create_unique_composite_test.go | 2 +- ...y_with_composite_index_field_order_test.go | 20 ++++---- ...y_with_composite_index_only_filter_test.go | 38 +++++++-------- ...y_with_composite_inxed_on_relation_test.go | 2 +- ...with_unique_composite_index_filter_test.go | 48 +++++++++---------- ...th_unique_index_on_relation_filter_test.go | 2 +- .../index/update_unique_composite_test.go | 2 +- .../mutation/upsert/simple_test.go | 2 +- 15 files changed, 101 insertions(+), 101 deletions(-) diff --git a/internal/request/graphql/schema/collection.go b/internal/request/graphql/schema/collection.go index df450fb986..e2bf4a2455 100644 --- a/internal/request/graphql/schema/collection.go +++ b/internal/request/graphql/schema/collection.go @@ -381,14 +381,14 @@ func indexFieldFromAST(value ast.Value, defaultDirection *ast.EnumValue) (client for _, field := range argTypeObject.Fields { switch field.Name.Value { - case types.IndexFieldInputName: + case types.IncludesPropField: nameVal, ok := field.Value.(*ast.StringValue) if !ok { return client.IndexedFieldDescription{}, ErrIndexWithInvalidArg } name = nameVal.Value - case types.IndexFieldInputDirection: + case types.IncludesPropDirection: directionVal, ok := field.Value.(*ast.EnumValue) if !ok { return client.IndexedFieldDescription{}, ErrIndexWithInvalidArg diff --git a/internal/request/graphql/schema/index_parse_test.go b/internal/request/graphql/schema/index_parse_test.go index 0c8413ec85..e10394f38d 100644 --- a/internal/request/graphql/schema/index_parse_test.go +++ b/internal/request/graphql/schema/index_parse_test.go @@ -24,7 +24,7 @@ func TestParseIndexOnStruct(t *testing.T) { cases := []indexTestCase{ { description: "Index with a single field", - sdl: `type user @index(includes: [{name: "name"}]) {}`, + sdl: `type user @index(includes: [{field: "name"}]) {}`, targetDescriptions: []client.IndexDescription{ { Name: "", @@ -37,7 +37,7 @@ func TestParseIndexOnStruct(t *testing.T) { }, { description: "Index with a name", - sdl: `type user @index(name: "userIndex", includes: [{name: "name"}]) {}`, + sdl: `type user @index(name: "userIndex", includes: [{field: "name"}]) {}`, targetDescriptions: []client.IndexDescription{ { Name: "userIndex", @@ -49,7 +49,7 @@ func TestParseIndexOnStruct(t *testing.T) { }, { description: "Unique index", - sdl: `type user @index(includes: [{name: "name"}], unique: true) {}`, + sdl: `type user @index(includes: [{field: "name"}], unique: true) {}`, targetDescriptions: []client.IndexDescription{ { Fields: []client.IndexedFieldDescription{ @@ -61,7 +61,7 @@ func TestParseIndexOnStruct(t *testing.T) { }, { description: "Index explicitly not unique", - sdl: `type user @index(includes: [{name: "name"}], unique: false) {}`, + sdl: `type user @index(includes: [{field: "name"}], unique: false) {}`, targetDescriptions: []client.IndexDescription{ { Fields: []client.IndexedFieldDescription{ @@ -73,7 +73,7 @@ func TestParseIndexOnStruct(t *testing.T) { }, { description: "Index with explicit ascending field", - sdl: `type user @index(includes: [{name: "name", direction: ASC}]) {}`, + sdl: `type user @index(includes: [{field: "name", direction: ASC}]) {}`, targetDescriptions: []client.IndexDescription{ { Fields: []client.IndexedFieldDescription{ @@ -83,7 +83,7 @@ func TestParseIndexOnStruct(t *testing.T) { }, { description: "Index with descending field", - sdl: `type user @index(includes: [{name: "name", direction: DESC}]) {}`, + sdl: `type user @index(includes: [{field: "name", direction: DESC}]) {}`, targetDescriptions: []client.IndexDescription{ { Fields: []client.IndexedFieldDescription{ @@ -93,7 +93,7 @@ func TestParseIndexOnStruct(t *testing.T) { }, { description: "Index with 2 fields", - sdl: `type user @index(includes: [{name: "name"}, {name: "age"}]) {}`, + sdl: `type user @index(includes: [{field: "name"}, {field: "age"}]) {}`, targetDescriptions: []client.IndexDescription{ { Fields: []client.IndexedFieldDescription{ @@ -105,7 +105,7 @@ func TestParseIndexOnStruct(t *testing.T) { }, { description: "Index with 2 fields and 2 directions", - sdl: `type user @index(includes: [{name: "name", direction: ASC}, {name: "age", direction: DESC}]) {}`, + sdl: `type user @index(includes: [{field: "name", direction: ASC}, {field: "age", direction: DESC}]) {}`, targetDescriptions: []client.IndexDescription{ { Fields: []client.IndexedFieldDescription{ @@ -131,37 +131,37 @@ func TestParseInvalidIndexOnStruct(t *testing.T) { }, { description: "unknown argument", - sdl: `type user @index(unknown: "something", includes: [{name: "name"}]) {}`, + sdl: `type user @index(unknown: "something", includes: [{field: "name"}]) {}`, expectedErr: errIndexUnknownArgument, }, { description: "invalid index name type", - sdl: `type user @index(name: 1, includes: [{name: "name"}]) {}`, + sdl: `type user @index(name: 1, includes: [{field: "name"}]) {}`, expectedErr: errIndexInvalidArgument, }, { description: "index name starts with a number", - sdl: `type user @index(name: "1_user_name", includes: [{name: "name"}]) {}`, + sdl: `type user @index(name: "1_user_name", includes: [{field: "name"}]) {}`, expectedErr: errIndexInvalidName, }, { description: "index with empty name", - sdl: `type user @index(name: "", includes: [{name: "name"}]) {}`, + sdl: `type user @index(name: "", includes: [{field: "name"}]) {}`, expectedErr: errIndexInvalidName, }, { description: "index name with spaces", - sdl: `type user @index(name: "user name", includes: [{name: "name"}]) {}`, + sdl: `type user @index(name: "user name", includes: [{field: "name"}]) {}`, expectedErr: errIndexInvalidName, }, { description: "index name with special symbols", - sdl: `type user @index(name: "user!name", includes: [{name: "name"}]) {}`, + sdl: `type user @index(name: "user!name", includes: [{field: "name"}]) {}`, expectedErr: errIndexInvalidName, }, { description: "invalid 'unique' value type", - sdl: `type user @index(includes: [{name: "name"}], unique: "true") {}`, + sdl: `type user @index(includes: [{field: "name"}], unique: "true") {}`, expectedErr: errIndexInvalidArgument, }, { @@ -272,7 +272,7 @@ func TestParseIndexOnField(t *testing.T) { { description: "composite field index with implicit include and implicit ordering", sdl: `type user { - name: String @index(direction: DESC, includes: [{name: "age"}]) + name: String @index(direction: DESC, includes: [{field: "age"}]) age: Int }`, targetDescriptions: []client.IndexDescription{ @@ -289,7 +289,7 @@ func TestParseIndexOnField(t *testing.T) { { description: "composite field index with implicit include and explicit ordering", sdl: `type user { - name: String @index(direction: DESC, includes: [{name: "age", direction: ASC}]) + name: String @index(direction: DESC, includes: [{field: "age", direction: ASC}]) age: Int }`, targetDescriptions: []client.IndexDescription{ @@ -306,7 +306,7 @@ func TestParseIndexOnField(t *testing.T) { { description: "composite field index with explicit includes", sdl: `type user { - name: String @index(includes: [{name: "age"}, {name: "name"}]) + name: String @index(includes: [{field: "age"}, {field: "name"}]) age: Int }`, targetDescriptions: []client.IndexDescription{ diff --git a/internal/request/graphql/schema/types/types.go b/internal/request/graphql/schema/types/types.go index e5ab3c5277..8b02c59056 100644 --- a/internal/request/graphql/schema/types/types.go +++ b/internal/request/graphql/schema/types/types.go @@ -39,8 +39,8 @@ const ( IndexDirectivePropDirection = "direction" IndexDirectivePropIncludes = "includes" - IndexFieldInputName = "name" - IndexFieldInputDirection = "direction" + IncludesPropField = "field" + IncludesPropDirection = "direction" DefaultDirectiveLabel = "default" DefaultDirectivePropString = "string" @@ -175,10 +175,10 @@ func IndexFieldInputObject(orderingEnum *gql.Enum) *gql.InputObject { Name: "IndexField", Description: "Used to create an index from a field.", Fields: gql.InputObjectConfigFieldMap{ - IndexFieldInputName: &gql.InputObjectFieldConfig{ + IncludesPropField: &gql.InputObjectFieldConfig{ Type: gql.String, }, - IndexFieldInputDirection: &gql.InputObjectFieldConfig{ + IncludesPropDirection: &gql.InputObjectFieldConfig{ Type: orderingEnum, }, }, diff --git a/tests/integration/index/array_composite_test.go b/tests/integration/index/array_composite_test.go index 94ccc9d659..ede73283c9 100644 --- a/tests/integration/index/array_composite_test.go +++ b/tests/integration/index/array_composite_test.go @@ -27,7 +27,7 @@ func TestArrayCompositeIndex_WithFilterOnIndexedArrayUsingAny_ShouldUseIndex(t * Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(includes: [{name: "name"}, {name: "numbers"}, {name: "age"}]) { + type User @index(includes: [{field: "name"}, {field: "numbers"}, {field: "age"}]) { name: String numbers: [Int!] age: Int @@ -93,7 +93,7 @@ func TestArrayCompositeIndex_WithFilterOnIndexedArrayUsingAll_ShouldUseIndex(t * Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(includes: [{name: "name"}, {name: "numbers"}, {name: "age"}]) { + type User @index(includes: [{field: "name"}, {field: "numbers"}, {field: "age"}]) { name: String numbers: [Int!] age: Int @@ -160,7 +160,7 @@ func TestArrayCompositeIndex_WithFilterOnIndexedArrayUsingNone_ShouldUseIndex(t Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(includes: [{name: "name"}, {name: "numbers"}, {name: "age"}]) { + type User @index(includes: [{field: "name"}, {field: "numbers"}, {field: "age"}]) { name: String numbers: [Int!] age: Int @@ -228,7 +228,7 @@ func TestArrayCompositeIndex_With2ConsecutiveArrayFields_Succeed(t *testing.T) { Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(includes: [{name: "name"}, {name: "numbers"}, {name: "hobbies"}, {name: "age"}]) { + type User @index(includes: [{field: "name"}, {field: "numbers"}, {field: "hobbies"}, {field: "age"}]) { name: String numbers: [Int!] hobbies: [String!] @@ -302,7 +302,7 @@ func TestArrayCompositeIndex_With2SeparateArrayFields_Succeed(t *testing.T) { Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(includes: [{name: "numbers"}, {name: "name"}, {name: "age"}, {name: "hobbies"}]) { + type User @index(includes: [{field: "numbers"}, {field: "name"}, {field: "age"}, {field: "hobbies"}]) { name: String numbers: [Int!] hobbies: [String!] @@ -377,7 +377,7 @@ func TestArrayCompositeIndex_WithAnyNoneAll_Succeed(t *testing.T) { Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(includes: [{name: "numbers1"}, {name: "numbers2"}, {name: "numbers3"}]) { + type User @index(includes: [{field: "numbers1"}, {field: "numbers2"}, {field: "numbers3"}]) { name: String numbers1: [Int!] numbers2: [Int!] @@ -444,7 +444,7 @@ func TestArrayCompositeIndexUpdate_With2ArrayFields_Succeed(t *testing.T) { Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(includes: [{name: "name"}, {name: "numbers"}, {name: "hobbies"}]) { + type User @index(includes: [{field: "name"}, {field: "numbers"}, {field: "hobbies"}]) { name: String numbers: [Int!] hobbies: [String!] @@ -549,7 +549,7 @@ func TestArrayCompositeIndexDelete_With2ConsecutiveArrayFields_Succeed(t *testin Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(includes: [{name: "name"}, {name: "numbers"}, {name: "hobbies"}]) { + type User @index(includes: [{field: "name"}, {field: "numbers"}, {field: "hobbies"}]) { name: String numbers: [Int!] hobbies: [String!] diff --git a/tests/integration/index/array_unique_composite_test.go b/tests/integration/index/array_unique_composite_test.go index ec1b10ee0f..57b21344ea 100644 --- a/tests/integration/index/array_unique_composite_test.go +++ b/tests/integration/index/array_unique_composite_test.go @@ -28,7 +28,7 @@ func TestArrayUniqueCompositeIndex_WithUniqueCombinations_Succeed(t *testing.T) Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(unique: true, includes: [{name: "nfts1"}, {name: "nfts2"}]) { + type User @index(unique: true, includes: [{field: "nfts1"}, {field: "nfts2"}]) { name: String nfts1: [Int!] nfts2: [Int!] @@ -78,7 +78,7 @@ func TestArrayUniqueCompositeIndex_IfDocIsCreatedThatViolatesUniqueness_Error(t Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(unique: true, includes: [{name: "nfts1"}, {name: "nfts2"}]) { + type User @index(unique: true, includes: [{field: "nfts1"}, {field: "nfts2"}]) { name: String nfts1: [Int!] nfts2: [Int!] @@ -122,7 +122,7 @@ func TestArrayUniqueCompositeIndex_IfDocIsUpdatedThatViolatesUniqueness_Error(t Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(unique: true, includes: [{name: "nfts1"}, {name: "nfts2"}]) { + type User @index(unique: true, includes: [{field: "nfts1"}, {field: "nfts2"}]) { name: String nfts1: [Int!] nfts2: [Int!] @@ -164,7 +164,7 @@ func TestArrayUniqueCompositeIndex_IfDocsHaveNilValues_Succeed(t *testing.T) { Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(unique: true, includes: [{name: "nfts1"}, {name: "nfts2"}]) { + type User @index(unique: true, includes: [{field: "nfts1"}, {field: "nfts2"}]) { name: String nfts1: [Int] nfts2: [Int] diff --git a/tests/integration/index/create_composite_test.go b/tests/integration/index/create_composite_test.go index 6c1fe6c058..6f8bc75cc5 100644 --- a/tests/integration/index/create_composite_test.go +++ b/tests/integration/index/create_composite_test.go @@ -79,7 +79,7 @@ func TestCompositeIndexCreate_UsingObjectDirective_SetsDefaultDirection(t *testi Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(direction: DESC, includes: [{name: "name"}, {name: "age"}]) { + type User @index(direction: DESC, includes: [{field: "name"}, {field: "age"}]) { name: String age: Int } @@ -117,7 +117,7 @@ func TestCompositeIndexCreate_UsingObjectDirective_OverridesDefaultDirection(t * Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(direction: DESC, includes: [{name: "name"}, {name: "age", direction: ASC}]) { + type User @index(direction: DESC, includes: [{field: "name"}, {field: "age", direction: ASC}]) { name: String age: Int } @@ -156,7 +156,7 @@ func TestCompositeIndexCreate_UsingFieldDirective_ImplicitlyAddsField(t *testing testUtils.SchemaUpdate{ Schema: ` type User { - name: String @index(includes: [{name: "age"}]) + name: String @index(includes: [{field: "age"}]) age: Int } `, @@ -191,7 +191,7 @@ func TestCompositeIndexCreate_UsingFieldDirective_SetsDefaultDirection(t *testin testUtils.SchemaUpdate{ Schema: ` type User { - name: String @index(direction: DESC, includes: [{name: "age"}]) + name: String @index(direction: DESC, includes: [{field: "age"}]) age: Int } `, @@ -229,7 +229,7 @@ func TestCompositeIndexCreate_UsingFieldDirective_OverridesDefaultDirection(t *t testUtils.SchemaUpdate{ Schema: ` type User { - name: String @index(direction: DESC, includes: [{name: "age", direction: ASC}]) + name: String @index(direction: DESC, includes: [{field: "age", direction: ASC}]) age: Int } `, @@ -267,7 +267,7 @@ func TestCompositeIndexCreate_UsingFieldDirective_WithExplicitIncludes_RespectsO testUtils.SchemaUpdate{ Schema: ` type User { - name: String @index(includes: [{name: "age"}, {name: "name"}]) + name: String @index(includes: [{field: "age"}, {field: "name"}]) age: Int } `, diff --git a/tests/integration/index/create_get_test.go b/tests/integration/index/create_get_test.go index 76b63980c4..346175e2bf 100644 --- a/tests/integration/index/create_get_test.go +++ b/tests/integration/index/create_get_test.go @@ -23,7 +23,7 @@ func TestIndexGet_ShouldReturnListOfExistingIndexes(t *testing.T) { Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(name: "age_index", includes: [{name: "age"}]) { + type User @index(name: "age_index", includes: [{field: "age"}]) { name: String @index(name: "name_index") age: Int } diff --git a/tests/integration/index/create_unique_composite_test.go b/tests/integration/index/create_unique_composite_test.go index 44123eaefe..7a7e9fc5e0 100644 --- a/tests/integration/index/create_unique_composite_test.go +++ b/tests/integration/index/create_unique_composite_test.go @@ -75,7 +75,7 @@ func TestUniqueCompositeIndexCreate_UponAddingDocWithExistingFieldValue_ReturnEr Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(unique: true, includes: [{name: "name"}, {name: "age"}]) { + type User @index(unique: true, includes: [{field: "name"}, {field: "age"}]) { name: String age: Int email: String diff --git a/tests/integration/index/query_with_composite_index_field_order_test.go b/tests/integration/index/query_with_composite_index_field_order_test.go index f53fbc3312..339deea11a 100644 --- a/tests/integration/index/query_with_composite_index_field_order_test.go +++ b/tests/integration/index/query_with_composite_index_field_order_test.go @@ -22,7 +22,7 @@ func TestQueryWithCompositeIndex_WithDefaultOrder_ShouldFetchInDefaultOrder(t *t Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(includes: [{name: "name"}, {name: "age"}]) { + type User @index(includes: [{field: "name"}, {field: "age"}]) { name: String age: Int }`, @@ -100,7 +100,7 @@ func TestQueryWithCompositeIndex_WithDefaultOrderCaseInsensitive_ShouldFetchInDe Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(includes: [{name: "name"}, {name: "age"}]) { + type User @index(includes: [{field: "name"}, {field: "age"}]) { name: String age: Int }`, @@ -178,7 +178,7 @@ func TestQueryWithCompositeIndex_WithRevertedOrderOnFirstField_ShouldFetchInReve Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(includes: [{name: "name", direction: DESC}, {name: "age", direction: ASC}]) { + type User @index(includes: [{field: "name", direction: DESC}, {field: "age", direction: ASC}]) { name: String age: Int }`, @@ -268,7 +268,7 @@ func TestQueryWithCompositeIndex_WithRevertedOrderOnFirstFieldCaseInsensitive_Sh Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(includes: [{name: "name", direction: DESC}, {name: "age", direction: ASC}]) { + type User @index(includes: [{field: "name", direction: DESC}, {field: "age", direction: ASC}]) { name: String age: Int }`, @@ -358,7 +358,7 @@ func TestQueryWithCompositeIndex_WithRevertedOrderOnSecondField_ShouldFetchInRev Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(includes: [{name: "name", direction: ASC}, {name: "age", direction: DESC}]) { + type User @index(includes: [{field: "name", direction: ASC}, {field: "age", direction: DESC}]) { name: String age: Int }`, @@ -438,7 +438,7 @@ func TestQueryWithCompositeIndex_WithRevertedOrderOnSecondFieldCaseInsensitive_S Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(includes: [{name: "name", direction: ASC}, {name: "age", direction: DESC}]) { + type User @index(includes: [{field: "name", direction: ASC}, {field: "age", direction: DESC}]) { name: String age: Int }`, @@ -516,7 +516,7 @@ func TestQueryWithCompositeIndex_IfExactMatchWithRevertedOrderOnFirstField_Shoul Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(includes: [{name: "name", direction: DESC}, {name: "age", direction: ASC}]) { + type User @index(includes: [{field: "name", direction: DESC}, {field: "age", direction: ASC}]) { name: String age: Int }`, @@ -574,7 +574,7 @@ func TestQueryWithCompositeIndex_IfExactMatchWithRevertedOrderOnSecondField_Shou Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(includes: [{name: "name", direction: ASC}, {name: "age", direction: DESC}]) { + type User @index(includes: [{field: "name", direction: ASC}, {field: "age", direction: DESC}]) { name: String age: Int }`, @@ -632,7 +632,7 @@ func TestQueryWithCompositeIndex_WithInFilterOnFirstFieldWithRevertedOrder_Shoul Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(includes: [{name: "name", direction: DESC}, {name: "age", direction: ASC}]) { + type User @index(includes: [{field: "name", direction: DESC}, {field: "age", direction: ASC}]) { name: String age: Int email: String @@ -667,7 +667,7 @@ func TestQueryWithCompositeIndex_WithInFilterOnSecondFieldWithRevertedOrder_Shou Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(includes: [{name: "name", direction: ASC}, {name: "age", direction: DESC}]) { + type User @index(includes: [{field: "name", direction: ASC}, {field: "age", direction: DESC}]) { name: String age: Int email: String diff --git a/tests/integration/index/query_with_composite_index_only_filter_test.go b/tests/integration/index/query_with_composite_index_only_filter_test.go index adaef0d481..eb4871ecfc 100644 --- a/tests/integration/index/query_with_composite_index_only_filter_test.go +++ b/tests/integration/index/query_with_composite_index_only_filter_test.go @@ -40,7 +40,7 @@ func TestQueryWithCompositeIndex_WithEqualFilter_ShouldFetch(t *testing.T) { Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(includes: [{name: "name"}, {name: "age"}]) { + type User @index(includes: [{field: "name"}, {field: "age"}]) { name: String age: Int email: String @@ -96,7 +96,7 @@ func TestQueryWithCompositeIndex_WithGreaterThanFilterOnFirstField_ShouldFetch(t Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(includes: [{name: "age"}, {name: "name"}]) { + type User @index(includes: [{field: "age"}, {field: "name"}]) { name: String age: Int email: String @@ -134,7 +134,7 @@ func TestQueryWithCompositeIndex_WithGreaterThanFilterOnSecondField_ShouldFetch( Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(includes: [{name: "name"}, {name: "age"}]) { + type User @index(includes: [{field: "name"}, {field: "age"}]) { name: String age: Int email: String @@ -172,7 +172,7 @@ func TestQueryWithCompositeIndex_WithGreaterOrEqualFilterOnFirstField_ShouldFetc Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(includes: [{name: "age"}, {name: "name"}]) { + type User @index(includes: [{field: "age"}, {field: "name"}]) { name: String age: Int email: String @@ -211,7 +211,7 @@ func TestQueryWithCompositeIndex_WithGreaterOrEqualFilterOnSecondField_ShouldFet Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(includes: [{name: "name"}, {name: "age"}]) { + type User @index(includes: [{field: "name"}, {field: "age"}]) { name: String age: Int email: String @@ -250,7 +250,7 @@ func TestQueryWithCompositeIndex_WithLessThanFilterOnFirstField_ShouldFetch(t *t Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(includes: [{name: "age"}, {name: "name"}]) { + type User @index(includes: [{field: "age"}, {field: "name"}]) { name: String age: Int email: String @@ -288,7 +288,7 @@ func TestQueryWithCompositeIndex_WithLessThanFilterOnSecondField_ShouldFetch(t * Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(includes: [{name: "name"}, {name: "age"}]) { + type User @index(includes: [{field: "name"}, {field: "age"}]) { name: String age: Int email: String @@ -326,7 +326,7 @@ func TestQueryWithCompositeIndex_WithLessOrEqualFilterOnFirstField_ShouldFetch(t Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(includes: [{name: "age"}, {name: "name"}]) { + type User @index(includes: [{field: "age"}, {field: "name"}]) { name: String age: Int email: String @@ -365,7 +365,7 @@ func TestQueryWithCompositeIndex_WithLessOrEqualFilterOnSecondField_ShouldFetch( Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(includes: [{name: "name"}, {name: "age"}]) { + type User @index(includes: [{field: "name"}, {field: "age"}]) { name: String age: Int email: String @@ -404,7 +404,7 @@ func TestQueryWithCompositeIndex_WithNotEqualFilter_ShouldFetch(t *testing.T) { Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(includes: [{name: "name"}, {name: "age"}]) { + type User @index(includes: [{field: "name"}, {field: "age"}]) { name: String age: Int email: String @@ -449,7 +449,7 @@ func TestQueryWithCompositeIndex_WithInFilter_ShouldFetch(t *testing.T) { Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(includes: [{name: "name"}, {name: "age"}]) { + type User @index(includes: [{field: "name"}, {field: "age"}]) { name: String age: Int email: String @@ -488,7 +488,7 @@ func TestQueryWithCompositeIndex_WithNotInFilter_ShouldFetch(t *testing.T) { Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(includes: [{name: "name"}, {name: "age"}]) { + type User @index(includes: [{field: "name"}, {field: "age"}]) { name: String age: Int email: String @@ -558,7 +558,7 @@ func TestQueryWithCompositeIndex_WithLikeFilter_ShouldFetch(t *testing.T) { Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(includes: [{name: "name"}, {name: "email"}]) { + type User @index(includes: [{field: "name"}, {field: "email"}]) { name: String email: String }`, @@ -655,7 +655,7 @@ func TestQueryWithCompositeIndex_WithNotLikeFilter_ShouldFetch(t *testing.T) { Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(includes: [{name: "name"}, {name: "email"}]) { + type User @index(includes: [{field: "name"}, {field: "email"}]) { name: String email: String }`, @@ -690,7 +690,7 @@ func TestQueryWithCompositeIndex_IfFirstFieldIsNotInFilter_ShouldNotUseIndex(t * Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(includes: [{name: "name"}, {name: "age"}]) { + type User @index(includes: [{field: "name"}, {field: "age"}]) { name: String age: Int email: String @@ -719,7 +719,7 @@ func TestQueryWithCompositeIndex_WithEqualFilterOnNilValueOnFirst_ShouldFetch(t Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(includes: [{name: "name"}, {name: "age"}]) { + type User @index(includes: [{field: "name"}, {field: "age"}]) { name: String age: Int email: String @@ -766,7 +766,7 @@ func TestQueryWithCompositeIndex_WithEqualFilterOnNilValueOnSecond_ShouldFetch(t Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(includes: [{name: "name"}, {name: "age"}]) { + type User @index(includes: [{field: "name"}, {field: "age"}]) { name: String age: Int email: String @@ -823,7 +823,7 @@ func TestQueryWithCompositeIndex_IfMiddleFieldIsNotInFilter_ShouldIgnoreValue(t Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(includes: [{name: "name"}, {name: "email"}, {name: "age"}]) { + type User @index(includes: [{field: "name"}, {field: "email"}, {field: "age"}]) { name: String email: String age: Int @@ -898,7 +898,7 @@ func TestQueryWithCompositeIndex_IfConsecutiveEqOps_ShouldUseAllToOptimizeQuery( Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(includes: [{name: "name"}, {name: "age"}, {name: "numChildren"}]) { + type User @index(includes: [{field: "name"}, {field: "age"}, {field: "numChildren"}]) { name: String age: Int numChildren: Int diff --git a/tests/integration/index/query_with_composite_inxed_on_relation_test.go b/tests/integration/index/query_with_composite_inxed_on_relation_test.go index aab19f2d07..c3492f0b6c 100644 --- a/tests/integration/index/query_with_composite_inxed_on_relation_test.go +++ b/tests/integration/index/query_with_composite_inxed_on_relation_test.go @@ -31,7 +31,7 @@ func TestQueryWithCompositeIndexOnManyToOne_WithMultipleIndexedChildNodes_Should devices: [Device] } - type Device @index(includes: [{name: "owner_id"}, {name: "manufacturer_id"}]) { + type Device @index(includes: [{field: "owner_id"}, {field: "manufacturer_id"}]) { model: String owner: User manufacturer: Manufacturer diff --git a/tests/integration/index/query_with_unique_composite_index_filter_test.go b/tests/integration/index/query_with_unique_composite_index_filter_test.go index 190bfca53a..6c1222e069 100644 --- a/tests/integration/index/query_with_unique_composite_index_filter_test.go +++ b/tests/integration/index/query_with_unique_composite_index_filter_test.go @@ -40,7 +40,7 @@ func TestQueryWithUniqueCompositeIndex_WithEqualFilter_ShouldFetch(t *testing.T) Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(unique: true, includes: [{name: "name"}, {name: "age"}]) { + type User @index(unique: true, includes: [{field: "name"}, {field: "age"}]) { name: String age: Int email: String @@ -114,7 +114,7 @@ func TestQueryWithUniqueCompositeIndex_WithGreaterThanFilterOnFirstField_ShouldF Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(unique: true, includes: [{name: "age"}, {name: "name"}]) { + type User @index(unique: true, includes: [{field: "age"}, {field: "name"}]) { name: String age: Int email: String @@ -152,7 +152,7 @@ func TestQueryWithUniqueCompositeIndex_WithGreaterThanFilterOnSecondField_Should Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(unique: true, includes: [{name: "name"}, {name: "age"}]) { + type User @index(unique: true, includes: [{field: "name"}, {field: "age"}]) { name: String age: Int email: String @@ -190,7 +190,7 @@ func TestQueryWithUniqueCompositeIndex_WithGreaterOrEqualFilterOnFirstField_Shou Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(unique: true, includes: [{name: "age"}, {name: "name"}]) { + type User @index(unique: true, includes: [{field: "age"}, {field: "name"}]) { name: String age: Int email: String @@ -229,7 +229,7 @@ func TestQueryWithUniqueCompositeIndex_WithGreaterOrEqualFilterOnSecondField_Sho Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(unique: true, includes: [{name: "name"}, {name: "age"}]) { + type User @index(unique: true, includes: [{field: "name"}, {field: "age"}]) { name: String age: Int email: String @@ -268,7 +268,7 @@ func TestQueryWithUniqueCompositeIndex_WithLessThanFilterOnFirstField_ShouldFetc Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(unique: true, includes: [{name: "age"}, {name: "name"}]) { + type User @index(unique: true, includes: [{field: "age"}, {field: "name"}]) { name: String age: Int email: String @@ -306,7 +306,7 @@ func TestQueryWithUniqueCompositeIndex_WithLessThanFilterOnSecondField_ShouldFet Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(unique: true, includes: [{name: "name"}, {name: "age"}]) { + type User @index(unique: true, includes: [{field: "name"}, {field: "age"}]) { name: String age: Int email: String @@ -344,7 +344,7 @@ func TestQueryWithUniqueCompositeIndex_WithLessOrEqualFilterOnFirstField_ShouldF Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(unique: true, includes: [{name: "age"}, {name: "name"}]) { + type User @index(unique: true, includes: [{field: "age"}, {field: "name"}]) { name: String age: Int email: String @@ -383,7 +383,7 @@ func TestQueryWithUniqueCompositeIndex_WithLessOrEqualFilterOnSecondField_Should Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(unique: true, includes: [{name: "name"}, {name: "age"}]) { + type User @index(unique: true, includes: [{field: "name"}, {field: "age"}]) { name: String age: Int email: String @@ -422,7 +422,7 @@ func TestQueryWithUniqueCompositeIndex_WithNotEqualFilter_ShouldFetch(t *testing Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(unique: true, includes: [{name: "name"}, {name: "age"}]) { + type User @index(unique: true, includes: [{field: "name"}, {field: "age"}]) { name: String age: Int email: String @@ -468,7 +468,7 @@ func TestQueryWithUniqueCompositeIndex_WithInForFirstAndEqForRest_ShouldFetchEff Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(unique: true, includes: [{name: "name"}, {name: "age"}]) { + type User @index(unique: true, includes: [{field: "name"}, {field: "age"}]) { name: String age: Int email: String @@ -544,7 +544,7 @@ func TestQueryWithUniqueCompositeIndex_WithInFilter_ShouldFetch(t *testing.T) { Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(unique: true, includes: [{name: "name"}, {name: "age"}]) { + type User @index(unique: true, includes: [{field: "name"}, {field: "age"}]) { name: String age: Int email: String @@ -599,7 +599,7 @@ func TestQueryWithUniqueCompositeIndex_WithNotInFilter_ShouldFetch(t *testing.T) Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(unique: true, includes: [{name: "name"}, {name: "age"}]) { + type User @index(unique: true, includes: [{field: "name"}, {field: "age"}]) { name: String age: Int email: String @@ -669,7 +669,7 @@ func TestQueryWithUniqueCompositeIndex_WithLikeFilter_ShouldFetch(t *testing.T) Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(unique: true, includes: [{name: "name"}, {name: "email"}]) { + type User @index(unique: true, includes: [{field: "name"}, {field: "email"}]) { name: String email: String }`, @@ -766,7 +766,7 @@ func TestQueryWithUniqueCompositeIndex_WithNotLikeFilter_ShouldFetch(t *testing. Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(unique: true, includes: [{name: "name"}, {name: "email"}]) { + type User @index(unique: true, includes: [{field: "name"}, {field: "email"}]) { name: String email: String }`, @@ -806,7 +806,7 @@ func TestQueryWithUniqueCompositeIndex_WithNotCaseInsensitiveLikeFilter_ShouldFe Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(unique: true, includes: [{name: "name"}, {name: "email"}]) { + type User @index(unique: true, includes: [{field: "name"}, {field: "email"}]) { name: String email: String }`, @@ -842,7 +842,7 @@ func TestQueryWithUniqueCompositeIndex_IfFirstFieldIsNotInFilter_ShouldNotUseInd Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(unique: true, includes: [{name: "name"}, {name: "age"}]) { + type User @index(unique: true, includes: [{field: "name"}, {field: "age"}]) { name: String age: Int email: String @@ -871,7 +871,7 @@ func TestQueryWithUniqueCompositeIndex_WithEqualFilterOnNilValueOnFirst_ShouldFe Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(unique: true, includes: [{name: "name"}, {name: "age"}]) { + type User @index(unique: true, includes: [{field: "name"}, {field: "age"}]) { name: String age: Int email: String @@ -925,7 +925,7 @@ func TestQueryWithUniqueCompositeIndex_WithMultipleNilOnFirstFieldAndNilFilter_S Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(unique: true, includes: [{name: "name"}, {name: "age"}]) { + type User @index(unique: true, includes: [{field: "name"}, {field: "age"}]) { name: String age: Int email: String @@ -981,7 +981,7 @@ func TestQueryWithUniqueCompositeIndex_WithEqualFilterOnNilValueOnSecond_ShouldF Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(unique: true, includes: [{name: "name"}, {name: "age"}]) { + type User @index(unique: true, includes: [{field: "name"}, {field: "age"}]) { name: String age: Int about: String @@ -1048,7 +1048,7 @@ func TestQueryWithUniqueCompositeIndex_WithMultipleNilOnSecondFieldsAndNilFilter Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(unique: true, includes: [{name: "name"}, {name: "age"}]) { + type User @index(unique: true, includes: [{field: "name"}, {field: "age"}]) { name: String age: Int email: String @@ -1113,7 +1113,7 @@ func TestQueryWithUniqueCompositeIndex_WithMultipleNilOnBothFieldsAndNilFilter_S Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(unique: true, includes: [{name: "name"}, {name: "age"}]) { + type User @index(unique: true, includes: [{field: "name"}, {field: "age"}]) { name: String age: Int about: String @@ -1214,7 +1214,7 @@ func TestQueryWithUniqueCompositeIndex_AfterUpdateOnNilFields_ShouldFetch(t *tes Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(unique: true, includes: [{name: "name"}, {name: "age"}]) { + type User @index(unique: true, includes: [{field: "name"}, {field: "age"}]) { name: String age: Int about: String @@ -1355,7 +1355,7 @@ func TestQueryWithUniqueCompositeIndex_IfMiddleFieldIsNotInFilter_ShouldIgnoreVa Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(unique: true, includes: [{name: "name"}, {name: "email"}, {name: "age"}]) { + type User @index(unique: true, includes: [{field: "name"}, {field: "email"}, {field: "age"}]) { name: String email: String age: Int diff --git a/tests/integration/index/query_with_unique_index_on_relation_filter_test.go b/tests/integration/index/query_with_unique_index_on_relation_filter_test.go index 05c4b05395..8c5fa75d1c 100644 --- a/tests/integration/index/query_with_unique_index_on_relation_filter_test.go +++ b/tests/integration/index/query_with_unique_index_on_relation_filter_test.go @@ -28,7 +28,7 @@ func TestQueryWithUniqueCompositeIndex_WithFilterOnIndexedRelation_ShouldFilter( type Device { manufacturer: String - owner: User @index(unique: true, includes: [{name: "manufacturer"}]) + owner: User @index(unique: true, includes: [{field: "manufacturer"}]) } `, }, diff --git a/tests/integration/index/update_unique_composite_test.go b/tests/integration/index/update_unique_composite_test.go index 17a831369c..c37d44c254 100644 --- a/tests/integration/index/update_unique_composite_test.go +++ b/tests/integration/index/update_unique_composite_test.go @@ -22,7 +22,7 @@ func TestUniqueCompositeIndexUpdate_UponUpdatingDocWithExistingFieldValue_Should Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type User @index(unique: true, includes: [{name: "name"}, {name: "age"}]) { + type User @index(unique: true, includes: [{field: "name"}, {field: "age"}]) { name: String age: Int email: String diff --git a/tests/integration/mutation/upsert/simple_test.go b/tests/integration/mutation/upsert/simple_test.go index ba2ac7fe58..82370df4d4 100644 --- a/tests/integration/mutation/upsert/simple_test.go +++ b/tests/integration/mutation/upsert/simple_test.go @@ -292,7 +292,7 @@ func TestMutationUpsertSimple_WithUniqueCompositeIndexAndDuplicateUpdate_Returns Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type Users @index(includes: [{name: "name"}, {name: "age"}], unique: true) { + type Users @index(includes: [{field: "name"}, {field: "age"}], unique: true) { name: String age: Int } From f528a6b39c546bc23a7e98ed45ef2fba0b8bf719 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Thu, 17 Oct 2024 10:38:54 -0700 Subject: [PATCH 67/71] fix(i): Panic in object equal filter (#3143) ## Relevant issue(s) Resolves #3141 ## Description This PR fixes a bug where the object equality check would panic when comparing with a non object type. ## Tasks - [x] I made sure the code is well commented, particularly hard-to-understand areas. - [x] I made sure the repository-held documentation is changed accordingly. - [x] I made sure the pull request title adheres to the conventional commit style (the subset used in the project can be found in [tools/configs/chglog/config.yml](tools/configs/chglog/config.yml)). - [x] I made sure to discuss its limitations such as threats to validity, vulnerability to mistake and misuse, robustness to invalidation of assumptions, resource requirements, ... ## How has this been tested? Added integration test. Specify the platform(s) on which this was tested: - MacOS --- internal/connor/connor.go | 13 ++-- internal/connor/eq.go | 4 +- tests/integration/query/json/with_eq_test.go | 62 ++++++++++++++++++++ 3 files changed, 72 insertions(+), 7 deletions(-) diff --git a/internal/connor/connor.go b/internal/connor/connor.go index a7a8290dbe..da4f7f5b4d 100644 --- a/internal/connor/connor.go +++ b/internal/connor/connor.go @@ -1,8 +1,11 @@ -/* -Package connor provides a domain-specific language to express conditions against data. - -It is derived from https://github.com/SierraSoftworks/connor. -*/ +// Package connor provides a domain-specific language to express conditions against data. +// +// It is derived from https://github.com/SierraSoftworks/connor. +// +// Note to developers: +// Never assume that the data given to an operator is of a certain type. +// Operators should work with any data type so that untyped data +// such as JSON can be filtered as expected. package connor const ( diff --git a/internal/connor/eq.go b/internal/connor/eq.go index 6b9f56293a..65c17356f0 100644 --- a/internal/connor/eq.go +++ b/internal/connor/eq.go @@ -75,8 +75,8 @@ func objectsEqual(condition map[string]any, data any) (bool, error) { if data == nil { return condition == nil, nil } - d := data.(map[string]any) - if len(d) != len(condition) { + d, ok := data.(map[string]any) + if !ok || len(d) != len(condition) { return false, nil } for k, v := range d { diff --git a/tests/integration/query/json/with_eq_test.go b/tests/integration/query/json/with_eq_test.go index 4434412a1a..d319148915 100644 --- a/tests/integration/query/json/with_eq_test.go +++ b/tests/integration/query/json/with_eq_test.go @@ -162,3 +162,65 @@ func TestQueryJSON_WithEqualFilterWithNullValue_ShouldFilter(t *testing.T) { testUtils.ExecuteTestCase(t, test) } + +func TestQueryJSON_WithEqualFilterWithAllTypes_ShouldFilter(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with JSON _eq filter all types", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + Name: String + Custom: JSON + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Shahzad", + "Custom": "32" + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Andy", + "Custom": [1, 2] + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "Fred", + "Custom": {"one": 1} + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "John", + "Custom": false + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "Name": "David", + "Custom": 32 + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {Custom: {_eq: {one: 1}}}) { + Name + } + }`, + Results: map[string]any{ + "Users": []map[string]any{ + { + "Name": "Fred", + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} From e81133e6085078cb851031676ec2ecd4cdf99d94 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Thu, 17 Oct 2024 15:56:39 -0700 Subject: [PATCH 68/71] fix(i): Aggregate json filter (#3150) ## Relevant issue(s) Resolves #3149 ## Description This PR fixes an issue with aggregate JSON filtering. ## Tasks - [x] I made sure the code is well commented, particularly hard-to-understand areas. - [x] I made sure the repository-held documentation is changed accordingly. - [x] I made sure the pull request title adheres to the conventional commit style (the subset used in the project can be found in [tools/configs/chglog/config.yml](tools/configs/chglog/config.yml)). - [x] I made sure to discuss its limitations such as threats to validity, vulnerability to mistake and misuse, robustness to invalidation of assumptions, resource requirements, ... ## How has this been tested? Added integration test. Specify the platform(s) on which this was tested: - MacOS --- internal/planner/mapper/mapper.go | 27 ++++++ .../query/json/with_aggregate_test.go | 65 +++++++++++++++ .../one_to_many/with_count_filter_test.go | 83 +++++++++++++++++++ 3 files changed, 175 insertions(+) create mode 100644 tests/integration/query/json/with_aggregate_test.go diff --git a/internal/planner/mapper/mapper.go b/internal/planner/mapper/mapper.go index 9845e93d13..4717b7cba0 100644 --- a/internal/planner/mapper/mapper.go +++ b/internal/planner/mapper/mapper.go @@ -439,6 +439,7 @@ func resolveAggregates( if err != nil { return nil, err } + removeJSONSubFields(childMapping, hostSelectRequest) childFields, _, err := getRequestables( ctx, @@ -569,6 +570,32 @@ func resolveAggregates( return fields, nil } +// removeJSONSubFields ensures that selections of +// JSON objects are not interpreted as joins. +// +// This can happen when an aggregate contains a filter +// on a JSON object, but we can't tell if it is a relation +// until the child mapping is created. +func removeJSONSubFields( + mapping *core.DocumentMapping, + hostSelectRequest *request.Select, +) { + var fields []request.Selection + for _, field := range hostSelectRequest.Fields { + switch f := field.(type) { + case *request.Select: + _, isMapped := mapping.IndexesByName[f.Name] + if !isMapped { + fields = append(fields, field) + } + + default: + fields = append(fields, field) + } + } + hostSelectRequest.Fields = fields +} + func mapAggregateNestedTargets( target *aggregateRequestTarget, hostSelectRequest *request.Select, diff --git a/tests/integration/query/json/with_aggregate_test.go b/tests/integration/query/json/with_aggregate_test.go new file mode 100644 index 0000000000..4d7e4bf87d --- /dev/null +++ b/tests/integration/query/json/with_aggregate_test.go @@ -0,0 +1,65 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package json + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestQueryJSON_WithAggregateFilter_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple JSON, aggregate with filter", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: `type Users { + name: String + custom: JSON + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "custom": { + "tree": "maple", + "age": 250 + } + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Andy", + "custom": { + "tree": "oak", + "age": 450 + } + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Shahzad", + "custom": null + }`, + }, + testUtils.Request{ + Request: `query { + _count(Users: {filter: {custom: {tree: {_eq: "oak"}}}}) + }`, + Results: map[string]any{ + "_count": 1, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/query/one_to_many/with_count_filter_test.go b/tests/integration/query/one_to_many/with_count_filter_test.go index 74d4cd6e29..9bc8f672e8 100644 --- a/tests/integration/query/one_to_many/with_count_filter_test.go +++ b/tests/integration/query/one_to_many/with_count_filter_test.go @@ -178,3 +178,86 @@ func TestQueryOneToManyWithCountWithFilterAndChildFilter(t *testing.T) { executeTestCase(t, test) } + +func TestQueryOneToMany_WithCountWithJSONFilterAndChildFilter_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "One-to-many relation query from many side with count with JSON filter", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Book { + name: String + rating: Float + author: Author + } + + type Author { + name: String + age: Int + verified: Boolean + published: [Book] + metadata: JSON + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "John Grisham", + "age": 65, + "verified": true, + "metadata": { + "yearOfBirth": 1955 + } + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "Cornelia Funke", + "age": 62, + "verified": false, + "metadata": { + "yearOfBirth": 1958 + } + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + DocMap: map[string]any{ + "name": "Painted House", + "rating": 4.9, + "author_id": testUtils.NewDocIndex(1, 0), + }, + }, + testUtils.CreateDoc{ + CollectionID: 0, + DocMap: map[string]any{ + "name": "A Time for Mercy", + "rating": 4.5, + "author_id": testUtils.NewDocIndex(1, 0), + }, + }, + testUtils.CreateDoc{ + CollectionID: 0, + DocMap: map[string]any{ + "name": "Theif Lord", + "rating": 4.8, + "author_id": testUtils.NewDocIndex(1, 1), + }, + }, + testUtils.Request{ + Request: `query { + _count(Author: {filter: { + metadata: {yearOfBirth: {_eq: 1958}}, + published: {name: {_ilike: "%lord%"}} + }}) + }`, + Results: map[string]any{ + "_count": 1, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} From 8181a15500e95e1f7da073b336c6967b630b7e5c Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Thu, 17 Oct 2024 17:43:46 -0700 Subject: [PATCH 69/71] fix: Validate GraphQL schemas (#3152) ## Relevant issue(s) Resolves #3151 ## Description This PR fixes an issue where GraphQL schemas were never validated. ## Tasks - [x] I made sure the code is well commented, particularly hard-to-understand areas. - [x] I made sure the repository-held documentation is changed accordingly. - [x] I made sure the pull request title adheres to the conventional commit style (the subset used in the project can be found in [tools/configs/chglog/config.yml](tools/configs/chglog/config.yml)). - [x] I made sure to discuss its limitations such as threats to validity, vulnerability to mistake and misuse, robustness to invalidation of assumptions, resource requirements, ... ## How has this been tested? Updated integration tests. Specify the platform(s) on which this was tested: - MacOS --- examples/schema/user.graphql | 2 +- internal/core/parser.go | 2 +- internal/db/schema.go | 2 +- internal/db/view.go | 2 +- internal/request/graphql/parser.go | 14 +- internal/request/graphql/schema/collection.go | 24 -- .../graphql/schema/index_parse_test.go | 27 +-- internal/request/graphql/schema/manager.go | 214 +++--------------- internal/request/graphql/schema/schema.go | 206 +++++++++++++++++ .../request/graphql/schema/types/types.go | 4 +- tests/bench/query/planner/utils.go | 3 +- .../reject_invalid_arg_type_on_schema_test.go | 4 +- .../with_default_fields_test.go | 2 +- tests/integration/encryption/commit_test.go | 4 +- .../integration/encryption/peer_share_test.go | 4 +- tests/integration/encryption/query_test.go | 2 +- tests/integration/encryption/utils.go | 2 +- tests/integration/issues/2566_test.go | 4 +- tests/integration/issues/2569_test.go | 6 +- .../mutation/create/crdt/pcounter_test.go | 2 +- .../mutation/create/crdt/pncounter_test.go | 2 +- .../mutation/update/crdt/pcounter_test.go | 10 +- .../mutation/update/crdt/pncounter_test.go | 12 +- .../net/simple/peer/crdt/pcounter_test.go | 4 +- .../net/simple/peer/crdt/pncounter_test.go | 4 +- .../peer_replicator/crdt/pcounter_test.go | 4 +- .../peer_replicator/crdt/pncounter_test.go | 4 +- .../simple/replicator/crdt/pcounter_test.go | 2 +- .../simple/replicator/crdt/pncounter_test.go | 2 +- .../query/simple/with_cid_doc_id_test.go | 8 +- tests/integration/schema/crdt_type_test.go | 14 +- tests/integration/utils.go | 2 +- 32 files changed, 306 insertions(+), 292 deletions(-) create mode 100644 internal/request/graphql/schema/schema.go diff --git a/examples/schema/user.graphql b/examples/schema/user.graphql index 9390a28f64..fbdea6a418 100644 --- a/examples/schema/user.graphql +++ b/examples/schema/user.graphql @@ -2,5 +2,5 @@ type User { name: String age: Int verified: Boolean - points: Int @crdt(type: "pncounter") + points: Int @crdt(type: pncounter) } diff --git a/internal/core/parser.go b/internal/core/parser.go index 8955314e26..accb192373 100644 --- a/internal/core/parser.go +++ b/internal/core/parser.go @@ -55,7 +55,7 @@ type Parser interface { // The parsing should validate the syntax, but not validate what that syntax expresses // is valid or not, i.e. we don't want the parser to make remote calls to verify the // policy description is valid or not (that is the callers responsiblity). - ParseSDL(ctx context.Context, schemaString string) ([]client.CollectionDefinition, error) + ParseSDL(sdl string) ([]client.CollectionDefinition, error) // Adds the given schema to this parser's model. // diff --git a/internal/db/schema.go b/internal/db/schema.go index f1906feb54..b5e2ff4e61 100644 --- a/internal/db/schema.go +++ b/internal/db/schema.go @@ -38,7 +38,7 @@ func (db *db) addSchema( ctx context.Context, schemaString string, ) ([]client.CollectionDescription, error) { - newDefinitions, err := db.parser.ParseSDL(ctx, schemaString) + newDefinitions, err := db.parser.ParseSDL(schemaString) if err != nil { return nil, err } diff --git a/internal/db/view.go b/internal/db/view.go index 23bb3cad42..8fb54ccb24 100644 --- a/internal/db/view.go +++ b/internal/db/view.go @@ -38,7 +38,7 @@ func (db *db) addView( // with the all calls to the parser appart from `ParseSDL` when we implement the DQL stuff. query := fmt.Sprintf(`query { %s }`, inputQuery) - newDefinitions, err := db.parser.ParseSDL(ctx, sdl) + newDefinitions, err := db.parser.ParseSDL(sdl) if err != nil { return nil, err } diff --git a/internal/request/graphql/parser.go b/internal/request/graphql/parser.go index 26e09230ad..7ff23394fa 100644 --- a/internal/request/graphql/parser.go +++ b/internal/request/graphql/parser.go @@ -94,19 +94,11 @@ func (p *parser) Parse(ast *ast.Document, options *client.GQLOptions) (*request. return nil, errors } - query, parsingErrors := defrap.ParseRequest(*schema, ast, options) - if len(parsingErrors) > 0 { - return nil, parsingErrors - } - - return query, nil + return defrap.ParseRequest(*schema, ast, options) } -func (p *parser) ParseSDL(ctx context.Context, schemaString string) ( - []client.CollectionDefinition, - error, -) { - return schema.FromString(ctx, schemaString) +func (p *parser) ParseSDL(sdl string) ([]client.CollectionDefinition, error) { + return p.schemaManager.ParseSDL(sdl) } func (p *parser) SetSchema(ctx context.Context, txn datastore.Txn, collections []client.CollectionDefinition) error { diff --git a/internal/request/graphql/schema/collection.go b/internal/request/graphql/schema/collection.go index e2bf4a2455..7085ba5c97 100644 --- a/internal/request/graphql/schema/collection.go +++ b/internal/request/graphql/schema/collection.go @@ -11,15 +11,12 @@ package schema import ( - "context" "fmt" "sort" "strings" gql "github.com/sourcenetwork/graphql-go" "github.com/sourcenetwork/graphql-go/language/ast" - gqlp "github.com/sourcenetwork/graphql-go/language/parser" - "github.com/sourcenetwork/graphql-go/language/source" "github.com/sourcenetwork/immutable" "github.com/sourcenetwork/defradb/client" @@ -50,27 +47,6 @@ var TypeToDefaultPropName = map[string]string{ typeBlob: types.DefaultDirectivePropBlob, } -// FromString parses a GQL SDL string into a set of collection descriptions. -func FromString(ctx context.Context, schemaString string) ( - []client.CollectionDefinition, - error, -) { - source := source.NewSource(&source.Source{ - Body: []byte(schemaString), - }) - - doc, err := gqlp.Parse( - gqlp.ParseParams{ - Source: source, - }, - ) - if err != nil { - return nil, err - } - - return fromAst(doc) -} - // fromAst parses a GQL AST into a set of collection descriptions. func fromAst(doc *ast.Document) ( []client.CollectionDefinition, diff --git a/internal/request/graphql/schema/index_parse_test.go b/internal/request/graphql/schema/index_parse_test.go index e10394f38d..df813f14e2 100644 --- a/internal/request/graphql/schema/index_parse_test.go +++ b/internal/request/graphql/schema/index_parse_test.go @@ -11,7 +11,6 @@ package schema import ( - "context" "testing" "github.com/stretchr/testify/assert" @@ -132,12 +131,12 @@ func TestParseInvalidIndexOnStruct(t *testing.T) { { description: "unknown argument", sdl: `type user @index(unknown: "something", includes: [{field: "name"}]) {}`, - expectedErr: errIndexUnknownArgument, + expectedErr: `Unknown argument "unknown" on directive "@index".`, }, { description: "invalid index name type", sdl: `type user @index(name: 1, includes: [{field: "name"}]) {}`, - expectedErr: errIndexInvalidArgument, + expectedErr: `Argument "name" has invalid value 1`, }, { description: "index name starts with a number", @@ -162,17 +161,17 @@ func TestParseInvalidIndexOnStruct(t *testing.T) { { description: "invalid 'unique' value type", sdl: `type user @index(includes: [{field: "name"}], unique: "true") {}`, - expectedErr: errIndexInvalidArgument, + expectedErr: `Argument "unique" has invalid value "true"`, }, { description: "invalid 'includes' value type (not a list)", sdl: `type user @index(includes: "name") {}`, - expectedErr: errIndexInvalidArgument, + expectedErr: `Argument "includes" has invalid value "name"`, }, { description: "invalid 'includes' value type (not an object list)", sdl: `type user @index(includes: [1]) {}`, - expectedErr: errIndexInvalidArgument, + expectedErr: `Argument "includes" has invalid value [1]`, }, } @@ -334,14 +333,14 @@ func TestParseInvalidIndexOnField(t *testing.T) { sdl: `type user { name: String @index(field: "name") }`, - expectedErr: errIndexUnknownArgument, + expectedErr: `Unknown argument "field" on directive "@index`, }, { description: "invalid field index name type", sdl: `type user { name: String @index(name: 1) }`, - expectedErr: errIndexInvalidArgument, + expectedErr: `Argument "name" has invalid value 1`, }, { description: "field index name starts with a number", @@ -376,7 +375,7 @@ func TestParseInvalidIndexOnField(t *testing.T) { sdl: `type user { name: String @index(unique: "true") }`, - expectedErr: errIndexInvalidArgument, + expectedErr: `Argument "unique" has invalid value "true"`, }, } @@ -386,9 +385,10 @@ func TestParseInvalidIndexOnField(t *testing.T) { } func parseIndexAndTest(t *testing.T, testCase indexTestCase) { - ctx := context.Background() + schemaManager, err := NewSchemaManager() + require.NoError(t, err) - cols, err := FromString(ctx, testCase.sdl) + cols, err := schemaManager.ParseSDL(testCase.sdl) require.NoError(t, err, testCase.description) require.Equal(t, len(cols), 1, testCase.description) @@ -400,9 +400,10 @@ func parseIndexAndTest(t *testing.T, testCase indexTestCase) { } func parseInvalidIndexAndTest(t *testing.T, testCase invalidIndexTestCase) { - ctx := context.Background() + schemaManager, err := NewSchemaManager() + require.NoError(t, err) - _, err := FromString(ctx, testCase.sdl) + _, err = schemaManager.ParseSDL(testCase.sdl) assert.ErrorContains(t, err, testCase.expectedErr, testCase.description) } diff --git a/internal/request/graphql/schema/manager.go b/internal/request/graphql/schema/manager.go index 881c94c144..1a3f923ddc 100644 --- a/internal/request/graphql/schema/manager.go +++ b/internal/request/graphql/schema/manager.go @@ -11,9 +11,13 @@ package schema import ( - gql "github.com/sourcenetwork/graphql-go" + "errors" + + "github.com/sourcenetwork/defradb/client" - schemaTypes "github.com/sourcenetwork/defradb/internal/request/graphql/schema/types" + gql "github.com/sourcenetwork/graphql-go" + gqlp "github.com/sourcenetwork/graphql-go/language/parser" + "github.com/sourcenetwork/graphql-go/language/source" ) // SchemaManager creates an instanced management point @@ -26,40 +30,14 @@ type SchemaManager struct { // NewSchemaManager returns a new instance of a SchemaManager // with a new default type map func NewSchemaManager() (*SchemaManager, error) { - sm := &SchemaManager{} - - orderEnum := schemaTypes.OrderingEnum() - crdtEnum := schemaTypes.CRDTEnum() - explainEnum := schemaTypes.ExplainEnum() - - commitLinkObject := schemaTypes.CommitLinkObject() - commitObject := schemaTypes.CommitObject(commitLinkObject) - commitsOrderArg := schemaTypes.CommitsOrderArg(orderEnum) - - indexFieldInput := schemaTypes.IndexFieldInputObject(orderEnum) - - schema, err := gql.NewSchema(gql.SchemaConfig{ - Types: defaultTypes( - commitObject, - commitLinkObject, - commitsOrderArg, - orderEnum, - crdtEnum, - explainEnum, - indexFieldInput, - ), - Query: defaultQueryType(commitObject, commitsOrderArg), - Mutation: defaultMutationType(), - Directives: defaultDirectivesType(crdtEnum, explainEnum, orderEnum, indexFieldInput), - Subscription: defaultSubscriptionType(), - }) + schema, err := defaultSchema() if err != nil { - return sm, err + return nil, err + } + sm := &SchemaManager{ + schema: schema, } - sm.schema = schema - sm.NewGenerator() - return sm, nil } @@ -99,162 +77,24 @@ func (s *SchemaManager) ResolveTypes() error { return s.schema.AppendType(query) } -// @todo: Use a better default Query type -func defaultQueryType(commitObject *gql.Object, commitsOrderArg *gql.InputObject) *gql.Object { - queryCommits := schemaTypes.QueryCommits(commitObject, commitsOrderArg) - queryLatestCommits := schemaTypes.QueryLatestCommits(commitObject) - - return gql.NewObject(gql.ObjectConfig{ - Name: "Query", - Fields: gql.Fields{ - "_": &gql.Field{ - Name: "_", - Type: gql.Boolean, - }, - - // database API queries - queryCommits.Name: queryCommits, - queryLatestCommits.Name: queryLatestCommits, - }, - }) -} - -func defaultMutationType() *gql.Object { - return gql.NewObject(gql.ObjectConfig{ - Name: "Mutation", - Fields: gql.Fields{ - "_": &gql.Field{ - Name: "_", - Type: gql.Boolean, - }, - }, +func (s *SchemaManager) ParseSDL(sdl string) ([]client.CollectionDefinition, error) { + src := source.NewSource(&source.Source{ + Body: []byte(sdl), }) -} - -func defaultSubscriptionType() *gql.Object { - return gql.NewObject(gql.ObjectConfig{ - Name: "Subscription", - Fields: gql.Fields{ - "_": &gql.Field{ - Name: "_", - Type: gql.Boolean, - }, - }, + doc, err := gqlp.Parse(gqlp.ParseParams{ + Source: src, }) -} - -// default directives type. -func defaultDirectivesType( - crdtEnum *gql.Enum, - explainEnum *gql.Enum, - orderEnum *gql.Enum, - indexFieldInput *gql.InputObject, -) []*gql.Directive { - return []*gql.Directive{ - schemaTypes.CRDTFieldDirective(crdtEnum), - schemaTypes.DefaultDirective(), - schemaTypes.ExplainDirective(explainEnum), - schemaTypes.PolicyDirective(), - schemaTypes.IndexDirective(orderEnum, indexFieldInput), - schemaTypes.PrimaryDirective(), - schemaTypes.RelationDirective(), - schemaTypes.MaterializedDirective(), - } -} - -func inlineArrayTypes() []gql.Type { - return []gql.Type{ - gql.Boolean, - gql.Float, - gql.Int, - gql.String, - gql.NewNonNull(gql.Boolean), - gql.NewNonNull(gql.Float), - gql.NewNonNull(gql.Int), - gql.NewNonNull(gql.String), + if err != nil { + return nil, err } -} - -// default type map includes all the native scalar types -func defaultTypes( - commitObject *gql.Object, - commitLinkObject *gql.Object, - commitsOrderArg *gql.InputObject, - orderEnum *gql.Enum, - crdtEnum *gql.Enum, - explainEnum *gql.Enum, - indexFieldInput *gql.InputObject, -) []gql.Type { - blobScalarType := schemaTypes.BlobScalarType() - jsonScalarType := schemaTypes.JSONScalarType() - - idOpBlock := schemaTypes.IDOperatorBlock() - intOpBlock := schemaTypes.IntOperatorBlock() - floatOpBlock := schemaTypes.FloatOperatorBlock() - booleanOpBlock := schemaTypes.BooleanOperatorBlock() - stringOpBlock := schemaTypes.StringOperatorBlock() - blobOpBlock := schemaTypes.BlobOperatorBlock(blobScalarType) - dateTimeOpBlock := schemaTypes.DateTimeOperatorBlock() - - notNullIntOpBlock := schemaTypes.NotNullIntOperatorBlock() - notNullFloatOpBlock := schemaTypes.NotNullFloatOperatorBlock() - notNullBooleanOpBlock := schemaTypes.NotNullBooleanOperatorBlock() - notNullStringOpBlock := schemaTypes.NotNullStringOperatorBlock() - notNullBlobOpBlock := schemaTypes.NotNullBlobOperatorBlock(blobScalarType) - - return []gql.Type{ - // Base Scalar types - gql.Boolean, - gql.DateTime, - gql.Float, - gql.ID, - gql.Int, - gql.String, - - // Custom Scalar types - blobScalarType, - jsonScalarType, - - // Base Query types - - // Sort/Order enum - orderEnum, - - // Filter scalar blocks - idOpBlock, - intOpBlock, - floatOpBlock, - booleanOpBlock, - stringOpBlock, - blobOpBlock, - dateTimeOpBlock, - - // Filter non null scalar blocks - notNullIntOpBlock, - notNullFloatOpBlock, - notNullBooleanOpBlock, - notNullStringOpBlock, - notNullBlobOpBlock, - - // Filter scalar list blocks - schemaTypes.IntListOperatorBlock(intOpBlock), - schemaTypes.FloatListOperatorBlock(floatOpBlock), - schemaTypes.BooleanListOperatorBlock(booleanOpBlock), - schemaTypes.StringListOperatorBlock(stringOpBlock), - - // Filter non null scalar list blocks - schemaTypes.NotNullIntListOperatorBlock(notNullIntOpBlock), - schemaTypes.NotNullFloatListOperatorBlock(notNullFloatOpBlock), - schemaTypes.NotNullBooleanListOperatorBlock(notNullBooleanOpBlock), - schemaTypes.NotNullStringListOperatorBlock(notNullStringOpBlock), - - commitsOrderArg, - commitLinkObject, - commitObject, - - crdtEnum, - explainEnum, - - indexFieldInput, + // The user provided SDL must be validated using the latest generated schema + // so that relations to other user defined types do not return an error. + validation := gql.ValidateDocument(&s.schema, doc, gql.SpecifiedRules) + if !validation.IsValid { + for _, e := range validation.Errors { + err = errors.Join(err, e) + } + return nil, err } + return fromAst(doc) } diff --git a/internal/request/graphql/schema/schema.go b/internal/request/graphql/schema/schema.go new file mode 100644 index 0000000000..d18911c929 --- /dev/null +++ b/internal/request/graphql/schema/schema.go @@ -0,0 +1,206 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package schema + +import ( + "github.com/sourcenetwork/defradb/internal/request/graphql/schema/types" + + gql "github.com/sourcenetwork/graphql-go" +) + +// defaultSchema returns a new gql.Schema containing the default type definitions. +func defaultSchema() (gql.Schema, error) { + orderEnum := types.OrderingEnum() + crdtEnum := types.CRDTEnum() + explainEnum := types.ExplainEnum() + + commitLinkObject := types.CommitLinkObject() + commitObject := types.CommitObject(commitLinkObject) + commitsOrderArg := types.CommitsOrderArg(orderEnum) + + indexFieldInput := types.IndexFieldInputObject(orderEnum) + + return gql.NewSchema(gql.SchemaConfig{ + Types: defaultTypes( + commitObject, + commitLinkObject, + commitsOrderArg, + orderEnum, + crdtEnum, + explainEnum, + indexFieldInput, + ), + Query: defaultQueryType(commitObject, commitsOrderArg), + Mutation: defaultMutationType(), + Directives: defaultDirectivesType(crdtEnum, explainEnum, orderEnum, indexFieldInput), + Subscription: defaultSubscriptionType(), + }) +} + +// @todo: Use a better default Query type +func defaultQueryType(commitObject *gql.Object, commitsOrderArg *gql.InputObject) *gql.Object { + queryCommits := types.QueryCommits(commitObject, commitsOrderArg) + queryLatestCommits := types.QueryLatestCommits(commitObject) + + return gql.NewObject(gql.ObjectConfig{ + Name: "Query", + Fields: gql.Fields{ + "_": &gql.Field{ + Name: "_", + Type: gql.Boolean, + }, + + // database API queries + queryCommits.Name: queryCommits, + queryLatestCommits.Name: queryLatestCommits, + }, + }) +} + +func defaultMutationType() *gql.Object { + return gql.NewObject(gql.ObjectConfig{ + Name: "Mutation", + Fields: gql.Fields{ + "_": &gql.Field{ + Name: "_", + Type: gql.Boolean, + }, + }, + }) +} + +func defaultSubscriptionType() *gql.Object { + return gql.NewObject(gql.ObjectConfig{ + Name: "Subscription", + Fields: gql.Fields{ + "_": &gql.Field{ + Name: "_", + Type: gql.Boolean, + }, + }, + }) +} + +// default directives type. +func defaultDirectivesType( + crdtEnum *gql.Enum, + explainEnum *gql.Enum, + orderEnum *gql.Enum, + indexFieldInput *gql.InputObject, +) []*gql.Directive { + return []*gql.Directive{ + types.CRDTFieldDirective(crdtEnum), + types.DefaultDirective(), + types.ExplainDirective(explainEnum), + types.PolicyDirective(), + types.IndexDirective(orderEnum, indexFieldInput), + types.PrimaryDirective(), + types.RelationDirective(), + types.MaterializedDirective(), + } +} + +func inlineArrayTypes() []gql.Type { + return []gql.Type{ + gql.Boolean, + gql.Float, + gql.Int, + gql.String, + gql.NewNonNull(gql.Boolean), + gql.NewNonNull(gql.Float), + gql.NewNonNull(gql.Int), + gql.NewNonNull(gql.String), + } +} + +// default type map includes all the native scalar types +func defaultTypes( + commitObject *gql.Object, + commitLinkObject *gql.Object, + commitsOrderArg *gql.InputObject, + orderEnum *gql.Enum, + crdtEnum *gql.Enum, + explainEnum *gql.Enum, + indexFieldInput *gql.InputObject, +) []gql.Type { + blobScalarType := types.BlobScalarType() + jsonScalarType := types.JSONScalarType() + + idOpBlock := types.IDOperatorBlock() + intOpBlock := types.IntOperatorBlock() + floatOpBlock := types.FloatOperatorBlock() + booleanOpBlock := types.BooleanOperatorBlock() + stringOpBlock := types.StringOperatorBlock() + blobOpBlock := types.BlobOperatorBlock(blobScalarType) + dateTimeOpBlock := types.DateTimeOperatorBlock() + + notNullIntOpBlock := types.NotNullIntOperatorBlock() + notNullFloatOpBlock := types.NotNullFloatOperatorBlock() + notNullBooleanOpBlock := types.NotNullBooleanOperatorBlock() + notNullStringOpBlock := types.NotNullStringOperatorBlock() + notNullBlobOpBlock := types.NotNullBlobOperatorBlock(blobScalarType) + + return []gql.Type{ + // Base Scalar types + gql.Boolean, + gql.DateTime, + gql.Float, + gql.ID, + gql.Int, + gql.String, + + // Custom Scalar types + blobScalarType, + jsonScalarType, + + // Base Query types + + // Sort/Order enum + orderEnum, + + // Filter scalar blocks + idOpBlock, + intOpBlock, + floatOpBlock, + booleanOpBlock, + stringOpBlock, + blobOpBlock, + dateTimeOpBlock, + + // Filter non null scalar blocks + notNullIntOpBlock, + notNullFloatOpBlock, + notNullBooleanOpBlock, + notNullStringOpBlock, + notNullBlobOpBlock, + + // Filter scalar list blocks + types.IntListOperatorBlock(intOpBlock), + types.FloatListOperatorBlock(floatOpBlock), + types.BooleanListOperatorBlock(booleanOpBlock), + types.StringListOperatorBlock(stringOpBlock), + + // Filter non null scalar list blocks + types.NotNullIntListOperatorBlock(notNullIntOpBlock), + types.NotNullFloatListOperatorBlock(notNullFloatOpBlock), + types.NotNullBooleanListOperatorBlock(notNullBooleanOpBlock), + types.NotNullStringListOperatorBlock(notNullStringOpBlock), + + commitsOrderArg, + commitLinkObject, + commitObject, + + crdtEnum, + explainEnum, + + indexFieldInput, + } +} diff --git a/internal/request/graphql/schema/types/types.go b/internal/request/graphql/schema/types/types.go index 8b02c59056..5770b4b579 100644 --- a/internal/request/graphql/schema/types/types.go +++ b/internal/request/graphql/schema/types/types.go @@ -232,7 +232,7 @@ func MaterializedDirective() *gql.Directive { }, }, Locations: []string{ - gql.DirectiveLocationSchema, + gql.DirectiveLocationObject, }, }) } @@ -277,7 +277,7 @@ func CRDTFieldDirective(crdtEnum *gql.Enum) *gql.Directive { }, }, Locations: []string{ - gql.DirectiveLocationField, + gql.DirectiveLocationFieldDefinition, }, }) } diff --git a/tests/bench/query/planner/utils.go b/tests/bench/query/planner/utils.go index 0ab739ac20..a4fc69e091 100644 --- a/tests/bench/query/planner/utils.go +++ b/tests/bench/query/planner/utils.go @@ -23,7 +23,6 @@ import ( "github.com/sourcenetwork/defradb/internal/core" "github.com/sourcenetwork/defradb/internal/planner" "github.com/sourcenetwork/defradb/internal/request/graphql" - gqlSchema "github.com/sourcenetwork/defradb/internal/request/graphql/schema" benchutils "github.com/sourcenetwork/defradb/tests/bench" "github.com/sourcenetwork/defradb/tests/bench/fixtures" ) @@ -116,7 +115,7 @@ func buildParser( return nil, err } - collectionDescriptions, err := gqlSchema.FromString(ctx, schema) + collectionDescriptions, err := parser.ParseSDL(schema) if err != nil { return nil, err } diff --git a/tests/integration/acp/schema/add_dpi/reject_invalid_arg_type_on_schema_test.go b/tests/integration/acp/schema/add_dpi/reject_invalid_arg_type_on_schema_test.go index 5aaa44bc56..efb05fca7b 100644 --- a/tests/integration/acp/schema/add_dpi/reject_invalid_arg_type_on_schema_test.go +++ b/tests/integration/acp/schema/add_dpi/reject_invalid_arg_type_on_schema_test.go @@ -66,7 +66,7 @@ func TestACP_AddDPISchema_InvalidPolicyIDArgTypeWasSpecifiedOnSchema_SchemaRejec age: Int } `, - ExpectedError: "policy directive with invalid id property", + ExpectedError: `Argument "id" has invalid value 123`, }, testUtils.IntrospectionRequest{ @@ -144,7 +144,7 @@ func TestACP_AddDPISchema_InvalidResourceArgTypeWasSpecifiedOnSchema_SchemaRejec policyIDOfValidDPI, ), - ExpectedError: "policy directive with invalid resource property", + ExpectedError: `Argument "resource" has invalid value 123`, }, testUtils.IntrospectionRequest{ diff --git a/tests/integration/collection_description/with_default_fields_test.go b/tests/integration/collection_description/with_default_fields_test.go index dca776532d..0d35254ca9 100644 --- a/tests/integration/collection_description/with_default_fields_test.go +++ b/tests/integration/collection_description/with_default_fields_test.go @@ -99,7 +99,7 @@ func TestCollectionDescription_WithInvalidDefaultFieldValueType_ReturnsError(t * active: Boolean @default(bool: invalid) } `, - ExpectedError: "default value is invalid. Field: active, Arg: bool", + ExpectedError: "Argument \"bool\" has invalid value invalid", }, }, } diff --git a/tests/integration/encryption/commit_test.go b/tests/integration/encryption/commit_test.go index 592f1e0501..fbe94ea53b 100644 --- a/tests/integration/encryption/commit_test.go +++ b/tests/integration/encryption/commit_test.go @@ -200,7 +200,7 @@ func TestDocEncryption_WithEncryptionOnCounterCRDT_ShouldStoreCommitsDeltaEncryp testUtils.SchemaUpdate{ Schema: ` type Users { - points: Int @crdt(type: "pcounter") + points: Int @crdt(type: pcounter) } `}, testUtils.CreateDoc{ @@ -243,7 +243,7 @@ func TestDocEncryption_UponUpdateOnCounterCRDT_ShouldEncryptedCommitDelta(t *tes testUtils.SchemaUpdate{ Schema: ` type Users { - points: Int @crdt(type: "pcounter") + points: Int @crdt(type: pcounter) } `}, testUtils.CreateDoc{ diff --git a/tests/integration/encryption/peer_share_test.go b/tests/integration/encryption/peer_share_test.go index c04d204a84..25e1cbd7cf 100644 --- a/tests/integration/encryption/peer_share_test.go +++ b/tests/integration/encryption/peer_share_test.go @@ -283,7 +283,7 @@ func TestDocEncryptionPeer_WithUpdatesOnEncryptedDeltaBasedCRDTField_ShouldDecry Schema: ` type User { name: String - age: Int @crdt(type: "pcounter") + age: Int @crdt(type: pcounter) } `, }, @@ -342,7 +342,7 @@ func TestDocEncryptionPeer_WithUpdatesOnDeltaBasedCRDTFieldOfEncryptedDoc_Should Schema: ` type User { name: String - age: Int @crdt(type: "pcounter") + age: Int @crdt(type: pcounter) } `, }, diff --git a/tests/integration/encryption/query_test.go b/tests/integration/encryption/query_test.go index a949081120..286192f66d 100644 --- a/tests/integration/encryption/query_test.go +++ b/tests/integration/encryption/query_test.go @@ -70,7 +70,7 @@ func TestDocEncryption_WithEncryptionOnCounterCRDT_ShouldFetchDecrypted(t *testi Schema: ` type Users { name: String - points: Int @crdt(type: "pcounter") + points: Int @crdt(type: pcounter) } `}, testUtils.CreateDoc{ diff --git a/tests/integration/encryption/utils.go b/tests/integration/encryption/utils.go index 685a2567fb..3686fae5ac 100644 --- a/tests/integration/encryption/utils.go +++ b/tests/integration/encryption/utils.go @@ -20,7 +20,7 @@ import ( const userCollectionGQLSchema = (` type Users { name: String - age: Int @crdt(type: "lww") + age: Int @crdt(type: lww) verified: Boolean } `) diff --git a/tests/integration/issues/2566_test.go b/tests/integration/issues/2566_test.go index eecf0d20b4..189ee17fd4 100644 --- a/tests/integration/issues/2566_test.go +++ b/tests/integration/issues/2566_test.go @@ -37,7 +37,7 @@ func TestP2PUpdate_WithPNCounterSimultaneousOverflowIncrement_DoesNotReachConsit Schema: ` type Users { Name: String - Age: Float @crdt(type: "pncounter") + Age: Float @crdt(type: pncounter) } `, }, @@ -133,7 +133,7 @@ func TestP2PUpdate_WithPNCounterSimultaneousOverflowDecrement_DoesNotReachConsit Schema: ` type Users { Name: String - Age: Float @crdt(type: "pncounter") + Age: Float @crdt(type: pncounter) } `, }, diff --git a/tests/integration/issues/2569_test.go b/tests/integration/issues/2569_test.go index 2d942177d6..1d7727fd26 100644 --- a/tests/integration/issues/2569_test.go +++ b/tests/integration/issues/2569_test.go @@ -36,7 +36,7 @@ func TestP2PUpdate_WithPNCounterFloatOverflowIncrement_PreventsQuerying(t *testi Schema: ` type Users { name: String - points: Float @crdt(type: "pncounter") + points: Float @crdt(type: pncounter) } `, }, @@ -81,7 +81,7 @@ func TestP2PUpdate_WithPNCounterFloatOverflowDecrement_PreventsQuerying(t *testi Schema: ` type Users { name: String - points: Float @crdt(type: "pncounter") + points: Float @crdt(type: pncounter) } `, }, @@ -135,7 +135,7 @@ func TestP2PUpdate_WithPNCounterFloatOverflow_PreventsCollectionGet(t *testing.T Schema: ` type Users { name: String - points: Float @crdt(type: "pncounter") + points: Float @crdt(type: pncounter) } `, }, diff --git a/tests/integration/mutation/create/crdt/pcounter_test.go b/tests/integration/mutation/create/crdt/pcounter_test.go index 724b157006..ef8f1c5688 100644 --- a/tests/integration/mutation/create/crdt/pcounter_test.go +++ b/tests/integration/mutation/create/crdt/pcounter_test.go @@ -24,7 +24,7 @@ func TestPCounterCreate_IntKindWithPositiveValue_NoError(t *testing.T) { Schema: ` type Users { name: String - points: Int @crdt(type: "pcounter") + points: Int @crdt(type: pcounter) } `, }, diff --git a/tests/integration/mutation/create/crdt/pncounter_test.go b/tests/integration/mutation/create/crdt/pncounter_test.go index c16a0c5bb6..a224946471 100644 --- a/tests/integration/mutation/create/crdt/pncounter_test.go +++ b/tests/integration/mutation/create/crdt/pncounter_test.go @@ -24,7 +24,7 @@ func TestPNCounterCreate_IntKindWithPositiveValue_NoError(t *testing.T) { Schema: ` type Users { name: String - points: Int @crdt(type: "pncounter") + points: Int @crdt(type: pncounter) } `, }, diff --git a/tests/integration/mutation/update/crdt/pcounter_test.go b/tests/integration/mutation/update/crdt/pcounter_test.go index 784bf9f2c4..a174b009eb 100644 --- a/tests/integration/mutation/update/crdt/pcounter_test.go +++ b/tests/integration/mutation/update/crdt/pcounter_test.go @@ -28,7 +28,7 @@ func TestPCounterUpdate_IntKindWithNegativeIncrement_ShouldError(t *testing.T) { Schema: ` type Users { name: String - points: Int @crdt(type: "pcounter") + points: Int @crdt(type: pcounter) } `, }, @@ -75,7 +75,7 @@ func TestPCounterUpdate_IntKindWithPositiveIncrement_ShouldIncrement(t *testing. Schema: ` type Users { name: String - points: Int @crdt(type: "pcounter") + points: Int @crdt(type: pcounter) } `, }, @@ -135,7 +135,7 @@ func TestPCounterUpdate_IntKindWithPositiveIncrementOverflow_RollsOverToMinInt64 Schema: ` type Users { name: String - points: Int @crdt(type: "pcounter") + points: Int @crdt(type: pcounter) } `, }, @@ -181,7 +181,7 @@ func TestPCounterUpdate_FloatKindWithPositiveIncrement_ShouldIncrement(t *testin Schema: ` type Users { name: String - points: Float @crdt(type: "pcounter") + points: Float @crdt(type: pcounter) } `, }, @@ -236,7 +236,7 @@ func TestPCounterUpdate_FloatKindWithPositiveIncrementOverflow_NoOp(t *testing.T Schema: ` type Users { name: String - points: Float @crdt(type: "pcounter") + points: Float @crdt(type: pcounter) } `, }, diff --git a/tests/integration/mutation/update/crdt/pncounter_test.go b/tests/integration/mutation/update/crdt/pncounter_test.go index 11efc0452b..ad9f0541a9 100644 --- a/tests/integration/mutation/update/crdt/pncounter_test.go +++ b/tests/integration/mutation/update/crdt/pncounter_test.go @@ -28,7 +28,7 @@ func TestPNCounterUpdate_IntKindWithPositiveIncrement_ShouldIncrement(t *testing Schema: ` type Users { name: String - points: Int @crdt(type: "pncounter") + points: Int @crdt(type: pncounter) } `, }, @@ -88,7 +88,7 @@ func TestPNCounterUpdate_IntKindWithPositiveIncrementOverflow_RollsOverToMinInt6 Schema: ` type Users { name: String - points: Int @crdt(type: "pncounter") + points: Int @crdt(type: pncounter) } `, }, @@ -134,7 +134,7 @@ func TestPNCounterUpdate_FloatKindWithPositiveIncrement_ShouldIncrement(t *testi Schema: ` type Users { name: String - points: Float @crdt(type: "pncounter") + points: Float @crdt(type: pncounter) } `, }, @@ -195,7 +195,7 @@ func TestPNCounterUpdate_FloatKindWithPositiveIncrementOverflow_PositiveInf(t *t Schema: ` type Users { name: String - points: Float @crdt(type: "pncounter") + points: Float @crdt(type: pncounter) } `, }, @@ -249,7 +249,7 @@ func TestPNCounterUpdate_FloatKindWithDecrementOverflow_NegativeInf(t *testing.T Schema: ` type Users { name: String - points: Float @crdt(type: "pncounter") + points: Float @crdt(type: pncounter) } `, }, @@ -295,7 +295,7 @@ func TestPNCounterUpdate_FloatKindWithPositiveIncrementInsignificantValue_DoesNo Schema: ` type Users { name: String - points: Float @crdt(type: "pncounter") + points: Float @crdt(type: pncounter) } `, }, diff --git a/tests/integration/net/simple/peer/crdt/pcounter_test.go b/tests/integration/net/simple/peer/crdt/pcounter_test.go index 569e4bb153..f379ab8d46 100644 --- a/tests/integration/net/simple/peer/crdt/pcounter_test.go +++ b/tests/integration/net/simple/peer/crdt/pcounter_test.go @@ -27,7 +27,7 @@ func TestP2PUpdate_WithPCounter_NoError(t *testing.T) { Schema: ` type Users { name: String - points: Int @crdt(type: "pcounter") + points: Int @crdt(type: pcounter) } `, }, @@ -79,7 +79,7 @@ func TestP2PUpdate_WithPCounterSimultaneousUpdate_NoError(t *testing.T) { Schema: ` type Users { Name: String - Age: Int @crdt(type: "pcounter") + Age: Int @crdt(type: pcounter) } `, }, diff --git a/tests/integration/net/simple/peer/crdt/pncounter_test.go b/tests/integration/net/simple/peer/crdt/pncounter_test.go index de0c67fc2c..4b4d21eec4 100644 --- a/tests/integration/net/simple/peer/crdt/pncounter_test.go +++ b/tests/integration/net/simple/peer/crdt/pncounter_test.go @@ -27,7 +27,7 @@ func TestP2PUpdate_WithPNCounter_NoError(t *testing.T) { Schema: ` type Users { name: String - points: Int @crdt(type: "pncounter") + points: Int @crdt(type: pncounter) } `, }, @@ -79,7 +79,7 @@ func TestP2PUpdate_WithPNCounterSimultaneousUpdate_NoError(t *testing.T) { Schema: ` type Users { Name: String - Age: Int @crdt(type: "pncounter") + Age: Int @crdt(type: pncounter) } `, }, diff --git a/tests/integration/net/simple/peer_replicator/crdt/pcounter_test.go b/tests/integration/net/simple/peer_replicator/crdt/pcounter_test.go index 914f8daba3..612095be45 100644 --- a/tests/integration/net/simple/peer_replicator/crdt/pcounter_test.go +++ b/tests/integration/net/simple/peer_replicator/crdt/pcounter_test.go @@ -28,7 +28,7 @@ func TestP2PPeerReplicatorWithCreate_PCounter_NoError(t *testing.T) { Schema: ` type Users { name: String - points: Int @crdt(type: "pcounter") + points: Int @crdt(type: pcounter) } `, }, @@ -121,7 +121,7 @@ func TestP2PPeerReplicatorWithUpdate_PCounter_NoError(t *testing.T) { Schema: ` type Users { name: String - points: Int @crdt(type: "pcounter") + points: Int @crdt(type: pcounter) } `, }, diff --git a/tests/integration/net/simple/peer_replicator/crdt/pncounter_test.go b/tests/integration/net/simple/peer_replicator/crdt/pncounter_test.go index b990631880..fdbfef9360 100644 --- a/tests/integration/net/simple/peer_replicator/crdt/pncounter_test.go +++ b/tests/integration/net/simple/peer_replicator/crdt/pncounter_test.go @@ -28,7 +28,7 @@ func TestP2PPeerReplicatorWithCreate_PNCounter_NoError(t *testing.T) { Schema: ` type Users { name: String - points: Int @crdt(type: "pncounter") + points: Int @crdt(type: pncounter) } `, }, @@ -121,7 +121,7 @@ func TestP2PPeerReplicatorWithUpdate_PNCounter_NoError(t *testing.T) { Schema: ` type Users { name: String - points: Int @crdt(type: "pncounter") + points: Int @crdt(type: pncounter) } `, }, diff --git a/tests/integration/net/simple/replicator/crdt/pcounter_test.go b/tests/integration/net/simple/replicator/crdt/pcounter_test.go index 2062b912df..f546025a4f 100644 --- a/tests/integration/net/simple/replicator/crdt/pcounter_test.go +++ b/tests/integration/net/simple/replicator/crdt/pcounter_test.go @@ -27,7 +27,7 @@ func TestP2POneToOneReplicatorUpdate_PCounter_NoError(t *testing.T) { Schema: ` type Users { name: String - points: Int @crdt(type: "pcounter") + points: Int @crdt(type: pcounter) } `, }, diff --git a/tests/integration/net/simple/replicator/crdt/pncounter_test.go b/tests/integration/net/simple/replicator/crdt/pncounter_test.go index 48d1783504..db57744ee3 100644 --- a/tests/integration/net/simple/replicator/crdt/pncounter_test.go +++ b/tests/integration/net/simple/replicator/crdt/pncounter_test.go @@ -27,7 +27,7 @@ func TestP2POneToOneReplicatorUpdate_PNCounter_NoError(t *testing.T) { Schema: ` type Users { name: String - points: Int @crdt(type: "pncounter") + points: Int @crdt(type: pncounter) } `, }, diff --git a/tests/integration/query/simple/with_cid_doc_id_test.go b/tests/integration/query/simple/with_cid_doc_id_test.go index 0536099128..8c6476b1e5 100644 --- a/tests/integration/query/simple/with_cid_doc_id_test.go +++ b/tests/integration/query/simple/with_cid_doc_id_test.go @@ -311,7 +311,7 @@ func TestCidAndDocIDQuery_ContainsPNCounterWithIntKind_NoError(t *testing.T) { Schema: ` type Users { name: String - points: Int @crdt(type: "pncounter") + points: Int @crdt(type: pncounter) } `, }, @@ -365,7 +365,7 @@ func TestCidAndDocIDQuery_ContainsPNCounterWithFloatKind_NoError(t *testing.T) { Schema: ` type Users { name: String - points: Float @crdt(type: "pncounter") + points: Float @crdt(type: pncounter) } `, }, @@ -419,7 +419,7 @@ func TestCidAndDocIDQuery_ContainsPCounterWithIntKind_NoError(t *testing.T) { Schema: ` type Users { name: String - points: Int @crdt(type: "pcounter") + points: Int @crdt(type: pcounter) } `, }, @@ -468,7 +468,7 @@ func TestCidAndDocIDQuery_ContainsPCounterWithFloatKind_NoError(t *testing.T) { Schema: ` type Users { name: String - points: Float @crdt(type: "pcounter") + points: Float @crdt(type: pcounter) } `, }, diff --git a/tests/integration/schema/crdt_type_test.go b/tests/integration/schema/crdt_type_test.go index 2a321ef751..afc7a6f539 100644 --- a/tests/integration/schema/crdt_type_test.go +++ b/tests/integration/schema/crdt_type_test.go @@ -27,7 +27,7 @@ func TestSchemaCreate_ContainsPNCounterTypeWithIntKind_NoError(t *testing.T) { testUtils.SchemaUpdate{ Schema: ` type Users { - points: Int @crdt(type: "pncounter") + points: Int @crdt(type: pncounter) } `, }, @@ -66,7 +66,7 @@ func TestSchemaCreate_ContainsPNCounterTypeWithFloatKind_NoError(t *testing.T) { testUtils.SchemaUpdate{ Schema: ` type Users { - points: Float @crdt(type: "pncounter") + points: Float @crdt(type: pncounter) } `, }, @@ -103,7 +103,7 @@ func TestSchemaCreate_ContainsPNCounterTypeWithWrongKind_Error(t *testing.T) { testUtils.SchemaUpdate{ Schema: ` type Users { - points: String @crdt(type: "pncounter") + points: String @crdt(type: pncounter) } `, ExpectedError: "CRDT type pncounter can't be assigned to field kind String", @@ -123,7 +123,7 @@ func TestSchemaCreate_ContainsPNCounterWithInvalidType_Error(t *testing.T) { points: Int @crdt(type: "invalid") } `, - ExpectedError: "CRDT type not supported. Name: points, CRDTType: invalid", + ExpectedError: `Argument "type" has invalid value "invalid"`, }, }, } @@ -139,7 +139,7 @@ func TestSchemaCreate_ContainsPCounterTypeWithIntKind_NoError(t *testing.T) { testUtils.SchemaUpdate{ Schema: ` type Users { - points: Int @crdt(type: "pcounter") + points: Int @crdt(type: pcounter) } `, }, @@ -178,7 +178,7 @@ func TestSchemaCreate_ContainsPCounterTypeWithFloatKind_NoError(t *testing.T) { testUtils.SchemaUpdate{ Schema: ` type Users { - points: Float @crdt(type: "pcounter") + points: Float @crdt(type: pcounter) } `, }, @@ -215,7 +215,7 @@ func TestSchemaCreate_ContainsPCounterTypeWithWrongKind_Error(t *testing.T) { testUtils.SchemaUpdate{ Schema: ` type Users { - points: String @crdt(type: "pcounter") + points: String @crdt(type: pcounter) } `, ExpectedError: "CRDT type pcounter can't be assigned to field kind String", diff --git a/tests/integration/utils.go b/tests/integration/utils.go index 744f874423..8d12d03b29 100644 --- a/tests/integration/utils.go +++ b/tests/integration/utils.go @@ -2517,7 +2517,7 @@ func ParseSDL(gqlSDL string) (map[string]client.CollectionDefinition, error) { if err != nil { return nil, err } - cols, err := parser.ParseSDL(context.Background(), gqlSDL) + cols, err := parser.ParseSDL(gqlSDL) if err != nil { return nil, err } From 76f5c8a0af7d284468d4637b4f03b2fa5eaa5453 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Fri, 18 Oct 2024 09:27:12 -0700 Subject: [PATCH 70/71] fix(i): Inline array filter types (#3145) ## Relevant issue(s) Resolves #3142 ## Description This PR fixes an issue with inline array filters where a non array value would return an error. ## Tasks - [x] I made sure the code is well commented, particularly hard-to-understand areas. - [x] I made sure the repository-held documentation is changed accordingly. - [x] I made sure the pull request title adheres to the conventional commit style (the subset used in the project can be found in [tools/configs/chglog/config.yml](tools/configs/chglog/config.yml)). - [x] I made sure to discuss its limitations such as threats to validity, vulnerability to mistake and misuse, robustness to invalidation of assumptions, resource requirements, ... ## How has this been tested? Updated / added integration tests. Specify the platform(s) on which this was tested: - MacOS --- internal/connor/all.go | 4 +-- internal/connor/any.go | 4 +-- .../inline_array/with_filter_all_test.go | 26 +++++++++++++++++ .../inline_array/with_filter_any_test.go | 26 +++++++++++++++++ tests/integration/query/json/with_all_test.go | 28 +++++++++++++++++-- tests/integration/query/json/with_any_test.go | 28 +++++++++++++++++-- 6 files changed, 106 insertions(+), 10 deletions(-) diff --git a/internal/connor/all.go b/internal/connor/all.go index ce2557d25b..bf607b583b 100644 --- a/internal/connor/all.go +++ b/internal/connor/all.go @@ -1,8 +1,6 @@ package connor import ( - "github.com/sourcenetwork/defradb/client" - "github.com/sourcenetwork/immutable" ) @@ -39,7 +37,7 @@ func all(condition, data any) (bool, error) { return allSlice(condition, t) default: - return false, client.NewErrUnhandledType("data", data) + return false, nil } } diff --git a/internal/connor/any.go b/internal/connor/any.go index 7eea2a7bce..ecd16ce992 100644 --- a/internal/connor/any.go +++ b/internal/connor/any.go @@ -1,8 +1,6 @@ package connor import ( - "github.com/sourcenetwork/defradb/client" - "github.com/sourcenetwork/immutable" ) @@ -39,7 +37,7 @@ func anyOp(condition, data any) (bool, error) { return anySlice(condition, t) default: - return false, client.NewErrUnhandledType("data", data) + return false, nil } } diff --git a/tests/integration/query/inline_array/with_filter_all_test.go b/tests/integration/query/inline_array/with_filter_all_test.go index 1661c54731..a558a70a23 100644 --- a/tests/integration/query/inline_array/with_filter_all_test.go +++ b/tests/integration/query/inline_array/with_filter_all_test.go @@ -303,3 +303,29 @@ func TestQueryInlineNotNullBooleanArray_WithAllFilter_Succeeds(t *testing.T) { executeTestCase(t, test) } + +func TestQueryInlineStringArray_WithAllFilterAndNullValue_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple inline array, filtered all of string array with null", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "name": "Islam", + "pageHeaders": null + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {pageHeaders: {_all: {_eq: null}}}) { + name + } + }`, + Results: map[string]any{ + "Users": []map[string]any{}, + }, + }, + }, + } + + executeTestCase(t, test) +} diff --git a/tests/integration/query/inline_array/with_filter_any_test.go b/tests/integration/query/inline_array/with_filter_any_test.go index 0dfc815595..15d3f701e4 100644 --- a/tests/integration/query/inline_array/with_filter_any_test.go +++ b/tests/integration/query/inline_array/with_filter_any_test.go @@ -303,3 +303,29 @@ func TestQueryInlineNotNullBooleanArray_WithAnyFilter_Succeeds(t *testing.T) { executeTestCase(t, test) } + +func TestQueryInlineStringArray_WithAnyFilterAndNullValue_Succeeds(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple inline array, filtered any of string array with null", + Actions: []any{ + testUtils.CreateDoc{ + Doc: `{ + "name": "Islam", + "pageHeaders": null + }`, + }, + testUtils.Request{ + Request: `query { + Users(filter: {pageHeaders: {_any: {_eq: null}}}) { + name + } + }`, + Results: map[string]any{ + "Users": []map[string]any{}, + }, + }, + }, + } + + executeTestCase(t, test) +} diff --git a/tests/integration/query/json/with_all_test.go b/tests/integration/query/json/with_all_test.go index 918c51e5bb..b0d12765ae 100644 --- a/tests/integration/query/json/with_all_test.go +++ b/tests/integration/query/json/with_all_test.go @@ -16,9 +16,9 @@ import ( testUtils "github.com/sourcenetwork/defradb/tests/integration" ) -func TestQueryJSON_WithAllFilter_ShouldFilter(t *testing.T) { +func TestQueryJSON_WithAllFilterWithAllTypes_ShouldFilter(t *testing.T) { test := testUtils.TestCase{ - Description: "Simple JSON array, filtered all of string array", + Description: "Simple JSON array, filtered all of all types array", Actions: []any{ testUtils.SchemaUpdate{ Schema: `type Users { @@ -38,6 +38,30 @@ func TestQueryJSON_WithAllFilter_ShouldFilter(t *testing.T) { "custom": [null, false, "second", {"one": 1}, [1, 2]] }`, }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Islam", + "custom": null + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Keenan", + "custom": 0 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Andy", + "custom": "" + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "custom": true + }`, + }, testUtils.Request{ Request: `query { Users(filter: {custom: {_all: {_ne: null}}}) { diff --git a/tests/integration/query/json/with_any_test.go b/tests/integration/query/json/with_any_test.go index d38d3e83e8..e79e90946b 100644 --- a/tests/integration/query/json/with_any_test.go +++ b/tests/integration/query/json/with_any_test.go @@ -16,9 +16,9 @@ import ( testUtils "github.com/sourcenetwork/defradb/tests/integration" ) -func TestQueryJSON_WithAnyFilter_ShouldFilter(t *testing.T) { +func TestQueryJSON_WithAnyFilterWithAllTypes_ShouldFilter(t *testing.T) { test := testUtils.TestCase{ - Description: "Simple JSON array, filtered any of string array", + Description: "Simple JSON array, filtered any of all types array", Actions: []any{ testUtils.SchemaUpdate{ Schema: `type Users { @@ -38,6 +38,30 @@ func TestQueryJSON_WithAnyFilter_ShouldFilter(t *testing.T) { "custom": [null, false, "second", {"one": 1}, [1, 2]] }`, }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Islam", + "custom": null + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Keenan", + "custom": 0 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Andy", + "custom": "" + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "custom": true + }`, + }, testUtils.Request{ Request: `query { Users(filter: {custom: {_any: {_eq: null}}}) { From 8a1a02321a345ee1ad7fb8b3d1fdb97255aaadd5 Mon Sep 17 00:00:00 2001 From: Fred Carle Date: Fri, 18 Oct 2024 23:13:35 -0400 Subject: [PATCH 71/71] Release v0.14.0 --- CHANGELOG.md | 86 ++++++++++++++++++++++++++++++++++++++++++++++++ licenses/BSL.txt | 4 +-- 2 files changed, 88 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 2e8fa26f17..b7c252a22e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,89 @@ + +## [v0.14.0](https://github.com/sourcenetwork/defradb/compare/v0.13.0...v0.14.0) + +> 2024-10-18 + +DefraDB v0.14 is a major pre-production release. Until the stable version 1.0 is reached, the SemVer minor patch number will denote notable releases, which will give the project freedom to experiment and explore potentially breaking changes. + +To get a full outline of the changes, we invite you to review the official changelog below. This release does include a Breaking Change to existing v0.13.x databases. If you need help migrating an existing deployment, reach out at [hello@source.network](mailto:hello@source.network) or join our Discord at https://discord.gg/w7jYQVJ/. + +### Features + +* JSON type filter ([#3122](https://github.com/sourcenetwork/defradb/issues/3122)) +* Add replicator retry ([#3107](https://github.com/sourcenetwork/defradb/issues/3107)) +* Inherit `read` permission if only `write` access ([#3108](https://github.com/sourcenetwork/defradb/issues/3108)) +* JSON type coercion ([#3098](https://github.com/sourcenetwork/defradb/issues/3098)) +* Ability to unrelate private documents from actors ([#3099](https://github.com/sourcenetwork/defradb/issues/3099)) +* Enable Indexing of array fields ([#3092](https://github.com/sourcenetwork/defradb/issues/3092)) +* Min and max numerical aggregates ([#3078](https://github.com/sourcenetwork/defradb/issues/3078)) +* Ability to relate private documents to actors ([#2907](https://github.com/sourcenetwork/defradb/issues/2907)) +* GraphQL upsert mutation ([#3075](https://github.com/sourcenetwork/defradb/issues/3075)) +* GraphQL fragments ([#3066](https://github.com/sourcenetwork/defradb/issues/3066)) +* Secure document encryption key exchange ([#2891](https://github.com/sourcenetwork/defradb/issues/2891)) +* Inline array filters ([#3028](https://github.com/sourcenetwork/defradb/issues/3028)) +* CLI purge command ([#2998](https://github.com/sourcenetwork/defradb/issues/2998)) +* Add support for one sided relations ([#3021](https://github.com/sourcenetwork/defradb/issues/3021)) +* Add materialized views ([#3000](https://github.com/sourcenetwork/defradb/issues/3000)) +* Default scalar field values ([#2997](https://github.com/sourcenetwork/defradb/issues/2997)) +* GQL variables and operation name ([#2993](https://github.com/sourcenetwork/defradb/issues/2993)) + +### Fixes + +* Make GraphQL errors spec compliant ([#3040](https://github.com/sourcenetwork/defradb/issues/3040)) +* Ignore badger path if in-memory ([#2967](https://github.com/sourcenetwork/defradb/issues/2967)) +* Rework relation field kinds ([#2961](https://github.com/sourcenetwork/defradb/issues/2961)) +* Panic with filter on unique composite index on relation ([#3020](https://github.com/sourcenetwork/defradb/issues/3020)) +* Handle missing type in an SDL ([#3023](https://github.com/sourcenetwork/defradb/issues/3023)) +* GraphQL null argument parsing ([#3013](https://github.com/sourcenetwork/defradb/issues/3013)) +* Prevent mutations from secondary side of relation ([#3124](https://github.com/sourcenetwork/defradb/issues/3124)) +* Treat explicitly set nil values like omitted values ([#3101](https://github.com/sourcenetwork/defradb/issues/3101)) +* Remove duplication of block heads on delete ([#3096](https://github.com/sourcenetwork/defradb/issues/3096)) +* Log GQL endpoint correctly on node start ([#3037](https://github.com/sourcenetwork/defradb/issues/3037)) +* Panic with different composite-indexed child objects ([#2947](https://github.com/sourcenetwork/defradb/issues/2947)) +* Validate GraphQL schemas ([#3152](https://github.com/sourcenetwork/defradb/issues/3152)) +* Queries with filter on 2 rel fields of composite index ([#3035](https://github.com/sourcenetwork/defradb/issues/3035)) + +### Documentation + +* Rename _key to _docID in docs ([#2989](https://github.com/sourcenetwork/defradb/issues/2989)) + +### Refactoring + +* Change from protobuf to cbor for gRPC ([#3061](https://github.com/sourcenetwork/defradb/issues/3061)) +* GraphQL order input ([#3044](https://github.com/sourcenetwork/defradb/issues/3044)) +* Merge duplicate input args ([#3046](https://github.com/sourcenetwork/defradb/issues/3046)) +* Index field directive ([#2994](https://github.com/sourcenetwork/defradb/issues/2994)) +* Make SourceHub dep internal-only ([#2963](https://github.com/sourcenetwork/defradb/issues/2963)) + +### Testing + +* Add bug bash tests for gql fragments ([#3136](https://github.com/sourcenetwork/defradb/issues/3136)) + +### Chore + +* Make keyring non-interactive ([#3026](https://github.com/sourcenetwork/defradb/issues/3026)) +* Change from ipld traversal to direct link access ([#2931](https://github.com/sourcenetwork/defradb/issues/2931)) +* Bump to GoLang v1.22 ([#2913](https://github.com/sourcenetwork/defradb/issues/2913)) + +### Bot + +* Update dependencies (bulk dependabot PRs) 14-10-2024 ([#3131](https://github.com/sourcenetwork/defradb/issues/3131)) +* Bump [@typescript](https://github.com/typescript)-eslint/eslint-plugin from 8.8.0 to 8.8.1 in /playground ([#3121](https://github.com/sourcenetwork/defradb/issues/3121)) +* Bump [@typescript](https://github.com/typescript)-eslint/parser from 8.8.0 to 8.8.1 in /playground ([#3120](https://github.com/sourcenetwork/defradb/issues/3120)) +* Update dependencies (bulk dependabot PRs) 07-10-2024 ([#3118](https://github.com/sourcenetwork/defradb/issues/3118)) +* Update dependencies (bulk dependabot PRs) 30-09-2024 ([#3088](https://github.com/sourcenetwork/defradb/issues/3088)) +* Bump [@typescript](https://github.com/typescript)-eslint/parser from 8.6.0 to 8.7.0 in /playground ([#3060](https://github.com/sourcenetwork/defradb/issues/3060)) +* Bump rollup from 4.21.0 to 4.22.4 in /playground ([#3058](https://github.com/sourcenetwork/defradb/issues/3058)) +* Bump eslint from 9.11.0 to 9.11.1 in /playground ([#3059](https://github.com/sourcenetwork/defradb/issues/3059)) +* Update dependencies (bulk dependabot PRs) 23-09-2024 ([#3055](https://github.com/sourcenetwork/defradb/issues/3055)) +* Update dependencies (bulk dependabot PRs) 17-09-2024 ([#3019](https://github.com/sourcenetwork/defradb/issues/3019)) +* Update dependencies (bulk dependabot PRs) 16-09-2024 ([#3011](https://github.com/sourcenetwork/defradb/issues/3011)) +* Bump [@typescript](https://github.com/typescript)-eslint/parser from 8.4.0 to 8.5.0 in /playground ([#2991](https://github.com/sourcenetwork/defradb/issues/2991)) +* Update dependencies (bulk dependabot PRs) 09-09-2024 ([#2990](https://github.com/sourcenetwork/defradb/issues/2990)) +* Update dependencies (bulk dependabot PRs) 02-09-2024 ([#2975](https://github.com/sourcenetwork/defradb/issues/2975)) +* Update dependencies (bulk dependabot PRs) 27-08-2024 ([#2966](https://github.com/sourcenetwork/defradb/issues/2966)) + + ## [v0.13.0](https://github.com/sourcenetwork/defradb/compare/v0.12.0...v0.13.0) diff --git a/licenses/BSL.txt b/licenses/BSL.txt index 47d08e8a1e..773ce8dc09 100644 --- a/licenses/BSL.txt +++ b/licenses/BSL.txt @@ -7,7 +7,7 @@ Parameters Licensor: Democratized Data (D2) Foundation -Licensed Work: DefraDB v0.13.0 +Licensed Work: DefraDB v0.14.0 The Licensed Work is (c) 2023 D2 Foundation. @@ -28,7 +28,7 @@ Additional Use Grant: You may only use the Licensed Work for the -Change Date: 2028-08-23 +Change Date: 2028-10-18 Change License: Apache License, Version 2.0