Skip to content

Commit

Permalink
chore: prep for switching golint -> go vet (googleapis#10223)
Browse files Browse the repository at this point in the history
Fix all the things.

Updates: googleapis#9784
  • Loading branch information
codyoss committed May 20, 2024
1 parent cb965ed commit 21cba14
Show file tree
Hide file tree
Showing 47 changed files with 261 additions and 234 deletions.
2 changes: 1 addition & 1 deletion bigtable/admin.go
Original file line number Diff line number Diff line change
Expand Up @@ -2103,7 +2103,7 @@ func (ac *AdminClient) RestoreTableFrom(ctx context.Context, sourceInstance, tab
req := &btapb.RestoreTableRequest{
Parent: parent,
TableId: table,
Source: &btapb.RestoreTableRequest_Backup{sourceBackupPath},
Source: &btapb.RestoreTableRequest_Backup{Backup: sourceBackupPath},
}
op, err := ac.tClient.RestoreTable(ctx, req)
if err != nil {
Expand Down
124 changes: 63 additions & 61 deletions bigtable/bttest/inmem_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -1691,7 +1691,7 @@ func TestServer_ReadModifyWriteRow(t *testing.T) {
func populateTable(ctx context.Context, s *server) (*btapb.Table, error) {
newTbl := btapb.Table{
ColumnFamilies: map[string]*btapb.ColumnFamily{
"cf0": {GcRule: &btapb.GcRule{Rule: &btapb.GcRule_MaxNumVersions{1}}},
"cf0": {GcRule: &btapb.GcRule{Rule: &btapb.GcRule_MaxNumVersions{MaxNumVersions: 1}}},
},
}
tblInfo, err := s.CreateTable(ctx, &btapb.CreateTableRequest{Parent: "cluster", TableId: "t", Table: &newTbl})
Expand All @@ -1704,7 +1704,7 @@ func populateTable(ctx context.Context, s *server) (*btapb.Table, error) {
Name: tblInfo.Name,
Modifications: []*btapb.ModifyColumnFamiliesRequest_Modification{{
Id: "cf" + strconv.Itoa(i),
Mod: &btapb.ModifyColumnFamiliesRequest_Modification_Create{&btapb.ColumnFamily{}},
Mod: &btapb.ModifyColumnFamiliesRequest_Modification_Create{Create: &btapb.ColumnFamily{}},
}},
}
}
Expand All @@ -1722,12 +1722,13 @@ func populateTable(ctx context.Context, s *server) (*btapb.Table, error) {
TableName: tblInfo.Name,
RowKey: []byte("row"),
Mutations: []*btpb.Mutation{{
Mutation: &btpb.Mutation_SetCell_{&btpb.Mutation_SetCell{
FamilyName: "cf" + strconv.Itoa(fc),
ColumnQualifier: []byte("col" + strconv.Itoa(cc)),
TimestampMicros: int64((tc + 1) * 1000),
Value: []byte{},
}},
Mutation: &btpb.Mutation_SetCell_{
SetCell: &btpb.Mutation_SetCell{
FamilyName: "cf" + strconv.Itoa(fc),
ColumnQualifier: []byte("col" + strconv.Itoa(cc)),
TimestampMicros: int64((tc + 1) * 1000),
Value: []byte{},
}},
}},
}
if _, err := s.MutateRow(ctx, req); err != nil {
Expand All @@ -1746,10 +1747,10 @@ func TestFilters(t *testing.T) {
code codes.Code
out int
}{
{in: &btpb.RowFilter{Filter: &btpb.RowFilter_BlockAllFilter{true}}, out: 0},
{in: &btpb.RowFilter{Filter: &btpb.RowFilter_BlockAllFilter{false}}, code: codes.InvalidArgument},
{in: &btpb.RowFilter{Filter: &btpb.RowFilter_PassAllFilter{true}}, out: 1},
{in: &btpb.RowFilter{Filter: &btpb.RowFilter_PassAllFilter{false}}, code: codes.InvalidArgument},
{in: &btpb.RowFilter{Filter: &btpb.RowFilter_BlockAllFilter{BlockAllFilter: true}}, out: 0},
{in: &btpb.RowFilter{Filter: &btpb.RowFilter_BlockAllFilter{BlockAllFilter: false}}, code: codes.InvalidArgument},
{in: &btpb.RowFilter{Filter: &btpb.RowFilter_PassAllFilter{PassAllFilter: true}}, out: 1},
{in: &btpb.RowFilter{Filter: &btpb.RowFilter_PassAllFilter{PassAllFilter: false}}, code: codes.InvalidArgument},
}

ctx := context.Background()
Expand Down Expand Up @@ -1809,20 +1810,21 @@ func TestMutateRowsAggregate(t *testing.T) {
Name: tblInfo.Name,
Modifications: []*btapb.ModifyColumnFamiliesRequest_Modification{{
Id: "sum",
Mod: &btapb.ModifyColumnFamiliesRequest_Modification_Create{&btapb.ColumnFamily{
ValueType: &btapb.Type{
Kind: &btapb.Type_AggregateType{
AggregateType: &btapb.Type_Aggregate{
InputType: &btapb.Type{
Kind: &btapb.Type_Int64Type{},
},
Aggregator: &btapb.Type_Aggregate_Sum_{
Sum: &btapb.Type_Aggregate_Sum{},
Mod: &btapb.ModifyColumnFamiliesRequest_Modification_Create{
Create: &btapb.ColumnFamily{
ValueType: &btapb.Type{
Kind: &btapb.Type_AggregateType{
AggregateType: &btapb.Type_Aggregate{
InputType: &btapb.Type{
Kind: &btapb.Type_Int64Type{},
},
Aggregator: &btapb.Type_Aggregate_Sum_{
Sum: &btapb.Type_Aggregate_Sum{},
},
},
},
},
},
},
}},
}})

Expand Down Expand Up @@ -1997,30 +1999,30 @@ func TestFilterRow(t *testing.T) {
want bool
}{
// The regexp-based filters perform whole-string, case-sensitive matches.
{&btpb.RowFilter{Filter: &btpb.RowFilter_RowKeyRegexFilter{[]byte("row")}}, true},
{&btpb.RowFilter{Filter: &btpb.RowFilter_RowKeyRegexFilter{[]byte("ro")}}, false},
{&btpb.RowFilter{Filter: &btpb.RowFilter_RowKeyRegexFilter{[]byte("ROW")}}, false},
{&btpb.RowFilter{Filter: &btpb.RowFilter_RowKeyRegexFilter{[]byte("moo")}}, false},

{&btpb.RowFilter{Filter: &btpb.RowFilter_FamilyNameRegexFilter{"fam"}}, true},
{&btpb.RowFilter{Filter: &btpb.RowFilter_FamilyNameRegexFilter{"f.*"}}, true},
{&btpb.RowFilter{Filter: &btpb.RowFilter_FamilyNameRegexFilter{"[fam]+"}}, true},
{&btpb.RowFilter{Filter: &btpb.RowFilter_FamilyNameRegexFilter{"fa"}}, false},
{&btpb.RowFilter{Filter: &btpb.RowFilter_FamilyNameRegexFilter{"FAM"}}, false},
{&btpb.RowFilter{Filter: &btpb.RowFilter_FamilyNameRegexFilter{"moo"}}, false},

{&btpb.RowFilter{Filter: &btpb.RowFilter_ColumnQualifierRegexFilter{[]byte("col")}}, true},
{&btpb.RowFilter{Filter: &btpb.RowFilter_ColumnQualifierRegexFilter{[]byte("co")}}, false},
{&btpb.RowFilter{Filter: &btpb.RowFilter_ColumnQualifierRegexFilter{[]byte("COL")}}, false},
{&btpb.RowFilter{Filter: &btpb.RowFilter_ColumnQualifierRegexFilter{[]byte("moo")}}, false},

{&btpb.RowFilter{Filter: &btpb.RowFilter_ValueRegexFilter{[]byte("val")}}, true},
{&btpb.RowFilter{Filter: &btpb.RowFilter_ValueRegexFilter{[]byte("va")}}, false},
{&btpb.RowFilter{Filter: &btpb.RowFilter_ValueRegexFilter{[]byte("VAL")}}, false},
{&btpb.RowFilter{Filter: &btpb.RowFilter_ValueRegexFilter{[]byte("moo")}}, false},

{&btpb.RowFilter{Filter: &btpb.RowFilter_TimestampRangeFilter{&btpb.TimestampRange{StartTimestampMicros: int64(0), EndTimestampMicros: int64(1000)}}}, false},
{&btpb.RowFilter{Filter: &btpb.RowFilter_TimestampRangeFilter{&btpb.TimestampRange{StartTimestampMicros: int64(1000), EndTimestampMicros: int64(2001)}}}, true},
{&btpb.RowFilter{Filter: &btpb.RowFilter_RowKeyRegexFilter{RowKeyRegexFilter: []byte("row")}}, true},
{&btpb.RowFilter{Filter: &btpb.RowFilter_RowKeyRegexFilter{RowKeyRegexFilter: []byte("ro")}}, false},
{&btpb.RowFilter{Filter: &btpb.RowFilter_RowKeyRegexFilter{RowKeyRegexFilter: []byte("ROW")}}, false},
{&btpb.RowFilter{Filter: &btpb.RowFilter_RowKeyRegexFilter{RowKeyRegexFilter: []byte("moo")}}, false},

{&btpb.RowFilter{Filter: &btpb.RowFilter_FamilyNameRegexFilter{FamilyNameRegexFilter: "fam"}}, true},
{&btpb.RowFilter{Filter: &btpb.RowFilter_FamilyNameRegexFilter{FamilyNameRegexFilter: "f.*"}}, true},
{&btpb.RowFilter{Filter: &btpb.RowFilter_FamilyNameRegexFilter{FamilyNameRegexFilter: "[fam]+"}}, true},
{&btpb.RowFilter{Filter: &btpb.RowFilter_FamilyNameRegexFilter{FamilyNameRegexFilter: "fa"}}, false},
{&btpb.RowFilter{Filter: &btpb.RowFilter_FamilyNameRegexFilter{FamilyNameRegexFilter: "FAM"}}, false},
{&btpb.RowFilter{Filter: &btpb.RowFilter_FamilyNameRegexFilter{FamilyNameRegexFilter: "moo"}}, false},

{&btpb.RowFilter{Filter: &btpb.RowFilter_ColumnQualifierRegexFilter{ColumnQualifierRegexFilter: []byte("col")}}, true},
{&btpb.RowFilter{Filter: &btpb.RowFilter_ColumnQualifierRegexFilter{ColumnQualifierRegexFilter: []byte("co")}}, false},
{&btpb.RowFilter{Filter: &btpb.RowFilter_ColumnQualifierRegexFilter{ColumnQualifierRegexFilter: []byte("COL")}}, false},
{&btpb.RowFilter{Filter: &btpb.RowFilter_ColumnQualifierRegexFilter{ColumnQualifierRegexFilter: []byte("moo")}}, false},

{&btpb.RowFilter{Filter: &btpb.RowFilter_ValueRegexFilter{ValueRegexFilter: []byte("val")}}, true},
{&btpb.RowFilter{Filter: &btpb.RowFilter_ValueRegexFilter{ValueRegexFilter: []byte("va")}}, false},
{&btpb.RowFilter{Filter: &btpb.RowFilter_ValueRegexFilter{ValueRegexFilter: []byte("VAL")}}, false},
{&btpb.RowFilter{Filter: &btpb.RowFilter_ValueRegexFilter{ValueRegexFilter: []byte("moo")}}, false},

{&btpb.RowFilter{Filter: &btpb.RowFilter_TimestampRangeFilter{TimestampRangeFilter: &btpb.TimestampRange{StartTimestampMicros: int64(0), EndTimestampMicros: int64(1000)}}}, false},
{&btpb.RowFilter{Filter: &btpb.RowFilter_TimestampRangeFilter{TimestampRangeFilter: &btpb.TimestampRange{StartTimestampMicros: int64(1000), EndTimestampMicros: int64(2001)}}}, true},
} {
got, err := filterRow(test.filter, row.copy())
if err != nil {
Expand All @@ -2047,25 +2049,25 @@ func TestFilterRowWithErrors(t *testing.T) {
for _, test := range []struct {
badRegex *btpb.RowFilter
}{
{&btpb.RowFilter{Filter: &btpb.RowFilter_RowKeyRegexFilter{[]byte("[")}}},
{&btpb.RowFilter{Filter: &btpb.RowFilter_FamilyNameRegexFilter{"["}}},
{&btpb.RowFilter{Filter: &btpb.RowFilter_ColumnQualifierRegexFilter{[]byte("[")}}},
{&btpb.RowFilter{Filter: &btpb.RowFilter_ValueRegexFilter{[]byte("[")}}},
{&btpb.RowFilter{Filter: &btpb.RowFilter_RowKeyRegexFilter{RowKeyRegexFilter: []byte("[")}}},
{&btpb.RowFilter{Filter: &btpb.RowFilter_FamilyNameRegexFilter{FamilyNameRegexFilter: "["}}},
{&btpb.RowFilter{Filter: &btpb.RowFilter_ColumnQualifierRegexFilter{ColumnQualifierRegexFilter: []byte("[")}}},
{&btpb.RowFilter{Filter: &btpb.RowFilter_ValueRegexFilter{ValueRegexFilter: []byte("[")}}},
{&btpb.RowFilter{Filter: &btpb.RowFilter_Chain_{
Chain: &btpb.RowFilter_Chain{
Filters: []*btpb.RowFilter{
{Filter: &btpb.RowFilter_ValueRegexFilter{[]byte("[")}},
{Filter: &btpb.RowFilter_ValueRegexFilter{ValueRegexFilter: []byte("[")}},
},
},
}}},
{&btpb.RowFilter{Filter: &btpb.RowFilter_Condition_{
Condition: &btpb.RowFilter_Condition{
PredicateFilter: &btpb.RowFilter{Filter: &btpb.RowFilter_ValueRegexFilter{[]byte("[")}},
PredicateFilter: &btpb.RowFilter{Filter: &btpb.RowFilter_ValueRegexFilter{ValueRegexFilter: []byte("[")}},
},
}}},

{&btpb.RowFilter{Filter: &btpb.RowFilter_RowSampleFilter{0.0}}}, // 0.0 is invalid.
{&btpb.RowFilter{Filter: &btpb.RowFilter_RowSampleFilter{1.0}}}, // 1.0 is invalid.
{&btpb.RowFilter{Filter: &btpb.RowFilter_RowSampleFilter{RowSampleFilter: 0.0}}}, // 0.0 is invalid.
{&btpb.RowFilter{Filter: &btpb.RowFilter_RowSampleFilter{RowSampleFilter: 1.0}}}, // 1.0 is invalid.
} {
got, err := filterRow(test.badRegex, row.copy())
if got != false {
Expand All @@ -2089,7 +2091,7 @@ func TestFilterRowWithRowSampleFilter(t *testing.T) {
{0.5, false}, // Equal to random float. Return no rows.
{0.9, true}, // Greater than random float. Return all rows.
} {
got, err := filterRow(&btpb.RowFilter{Filter: &btpb.RowFilter_RowSampleFilter{test.p}}, &row{})
got, err := filterRow(&btpb.RowFilter{Filter: &btpb.RowFilter_RowSampleFilter{RowSampleFilter: test.p}}, &row{})
if err != nil {
t.Fatalf("%f: %v", test.p, err)
}
Expand Down Expand Up @@ -2123,7 +2125,7 @@ func TestFilterRowWithBinaryColumnQualifier(t *testing.T) {
{`[\x7f\x80]{2}`, true}, // succeeds: exactly two of either 127 or 128
{`\C{2}`, true}, // succeeds: two bytes
} {
got, _ := filterRow(&btpb.RowFilter{Filter: &btpb.RowFilter_ColumnQualifierRegexFilter{[]byte(test.filter)}}, row.copy())
got, _ := filterRow(&btpb.RowFilter{Filter: &btpb.RowFilter_ColumnQualifierRegexFilter{ColumnQualifierRegexFilter: []byte(test.filter)}}, row.copy())
if got != test.want {
t.Errorf("%v: got %t, want %t", test.filter, got, test.want)
}
Expand Down Expand Up @@ -2162,7 +2164,7 @@ func TestFilterRowWithUnicodeColumnQualifier(t *testing.T) {
{`a\C{2}b`, true}, // succeeds: § is two bytes
{`\C{4}`, true}, // succeeds: four bytes
} {
got, _ := filterRow(&btpb.RowFilter{Filter: &btpb.RowFilter_ColumnQualifierRegexFilter{[]byte(test.filter)}}, row.copy())
got, _ := filterRow(&btpb.RowFilter{Filter: &btpb.RowFilter_ColumnQualifierRegexFilter{ColumnQualifierRegexFilter: []byte(test.filter)}}, row.copy())
if got != test.want {
t.Errorf("%v: got %t, want %t", test.filter, got, test.want)
}
Expand Down Expand Up @@ -2446,10 +2448,10 @@ func TestFilterRowCellsPerRowLimitFilterTruthiness(t *testing.T) {
want bool
}{
// The regexp-based filters perform whole-string, case-sensitive matches.
{&btpb.RowFilter{Filter: &btpb.RowFilter_CellsPerRowOffsetFilter{1}}, true},
{&btpb.RowFilter{Filter: &btpb.RowFilter_CellsPerRowOffsetFilter{2}}, true},
{&btpb.RowFilter{Filter: &btpb.RowFilter_CellsPerRowOffsetFilter{3}}, false},
{&btpb.RowFilter{Filter: &btpb.RowFilter_CellsPerRowOffsetFilter{4}}, false},
{&btpb.RowFilter{Filter: &btpb.RowFilter_CellsPerRowOffsetFilter{CellsPerRowOffsetFilter: 1}}, true},
{&btpb.RowFilter{Filter: &btpb.RowFilter_CellsPerRowOffsetFilter{CellsPerRowOffsetFilter: 2}}, true},
{&btpb.RowFilter{Filter: &btpb.RowFilter_CellsPerRowOffsetFilter{CellsPerRowOffsetFilter: 3}}, false},
{&btpb.RowFilter{Filter: &btpb.RowFilter_CellsPerRowOffsetFilter{CellsPerRowOffsetFilter: 4}}, false},
} {
got, err := filterRow(test.filter, row.copy())
if err != nil {
Expand Down
6 changes: 4 additions & 2 deletions bigtable/export_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -213,7 +213,8 @@ func (e *EmulatedEnv) AdminClientOptions() (context.Context, []option.ClientOpti
headersInterceptor.UnaryInterceptors())...)

timeout := 20 * time.Second
ctx, _ := context.WithTimeout(context.Background(), timeout)
ctx, cancel := context.WithTimeout(context.Background(), timeout)
_ = cancel // ignore for test

o = append(o, option.WithGRPCDialOption(grpc.WithBlock()))
conn, err := gtransport.DialInsecure(ctx, o...)
Expand Down Expand Up @@ -252,7 +253,8 @@ func (e *EmulatedEnv) NewClient() (*Client, error) {
headersInterceptor.UnaryInterceptors())...)

timeout := 20 * time.Second
ctx, _ := context.WithTimeout(context.Background(), timeout)
ctx, cancel := context.WithTimeout(context.Background(), timeout)
_ = cancel // ignore for test

o = append(o, option.WithGRPCDialOption(grpc.WithBlock()))
o = append(o, option.WithGRPCDialOption(grpc.WithDefaultCallOptions(
Expand Down
6 changes: 4 additions & 2 deletions bigtable/integration_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -3000,7 +3000,7 @@ func TestIntegration_Granularity(t *testing.T) {
Name: prefix + "/tables/" + myTableName,
Modifications: []*btapb.ModifyColumnFamiliesRequest_Modification{{
Id: "cf",
Mod: &btapb.ModifyColumnFamiliesRequest_Modification_Create{&btapb.ColumnFamily{}},
Mod: &btapb.ModifyColumnFamiliesRequest_Modification_Create{Create: &btapb.ColumnFamily{}},
}},
}
table, err := adminClient.tClient.ModifyColumnFamilies(ctx, req)
Expand Down Expand Up @@ -4069,6 +4069,7 @@ func setupIntegration(ctx context.Context, t *testing.T) (_ IntegrationEnv, _ *C
t.Logf("bttest.Server running on %s", testEnv.Config().AdminEndpoint)
}
ctx, cancel := context.WithTimeout(ctx, timeout)
_ = cancel // ignore for test

client, err := testEnv.NewClient()
if err != nil {
Expand Down Expand Up @@ -4143,7 +4144,8 @@ func deleteTable(ctx context.Context, t *testing.T, ac *AdminClient, name string
Max: 2 * time.Second,
Multiplier: 1.2,
}
ctx, _ = context.WithTimeout(ctx, time.Second*60)
ctx, cancel := context.WithTimeout(ctx, time.Second*60)
defer cancel()

err := internal.Retry(ctx, bo, func() (bool, error) {
err := ac.DeleteTable(ctx, name)
Expand Down
6 changes: 4 additions & 2 deletions datastore/integration_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -233,7 +233,8 @@ func TestIntegration_NewClient(t *testing.T) {
}

func TestIntegration_Basics(t *testing.T) {
ctx, _ := context.WithTimeout(context.Background(), time.Second*20)
ctx, cancel := context.WithTimeout(context.Background(), time.Second*20)
defer cancel()
client := newTestClient(ctx, t)
defer client.Close()

Expand Down Expand Up @@ -309,7 +310,8 @@ func TestIntegration_GetWithReadTime(t *testing.T) {
}

func TestIntegration_TopLevelKeyLoaded(t *testing.T) {
ctx, _ := context.WithTimeout(context.Background(), time.Second*20)
ctx, cancel := context.WithTimeout(context.Background(), time.Second*20)
defer cancel()
client := newTestClient(ctx, t)
defer client.Close()

Expand Down
21 changes: 10 additions & 11 deletions firestore/client_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@ import (
pb "cloud.google.com/go/firestore/apiv1/firestorepb"
"google.golang.org/grpc/codes"
"google.golang.org/grpc/status"
"google.golang.org/protobuf/types/known/timestamppb"
tspb "google.golang.org/protobuf/types/known/timestamppb"
)

Expand Down Expand Up @@ -189,15 +188,15 @@ func testGetAll(t *testing.T, c *Client, srv *mockServer, dbPath string, getAll
[]interface{}{
// deliberately put these out of order
&pb.BatchGetDocumentsResponse{
Result: &pb.BatchGetDocumentsResponse_Found{wantPBDocs[2]},
Result: &pb.BatchGetDocumentsResponse_Found{Found: wantPBDocs[2]},
ReadTime: aTimestamp3,
},
&pb.BatchGetDocumentsResponse{
Result: &pb.BatchGetDocumentsResponse_Found{wantPBDocs[0]},
Result: &pb.BatchGetDocumentsResponse_Found{Found: wantPBDocs[0]},
ReadTime: aTimestamp,
},
&pb.BatchGetDocumentsResponse{
Result: &pb.BatchGetDocumentsResponse_Missing{dbPath + "/documents/C/b"},
Result: &pb.BatchGetDocumentsResponse_Missing{Missing: dbPath + "/documents/C/b"},
ReadTime: aTimestamp2,
},
},
Expand Down Expand Up @@ -267,15 +266,15 @@ func testGetAllWithEqualRefs(t *testing.T, c *Client, srv *mockServer, dbPath st
[]interface{}{
// deliberately put these out of order
&pb.BatchGetDocumentsResponse{
Result: &pb.BatchGetDocumentsResponse_Found{wantPBDocs[1]},
Result: &pb.BatchGetDocumentsResponse_Found{Found: wantPBDocs[1]},
ReadTime: aTimestamp3,
},
&pb.BatchGetDocumentsResponse{
Result: &pb.BatchGetDocumentsResponse_Found{wantPBDocs[0]},
Result: &pb.BatchGetDocumentsResponse_Found{Found: wantPBDocs[0]},
ReadTime: aTimestamp,
},
&pb.BatchGetDocumentsResponse{
Result: &pb.BatchGetDocumentsResponse_Missing{dbPath + "/documents/C/b"},
Result: &pb.BatchGetDocumentsResponse_Missing{Missing: dbPath + "/documents/C/b"},
ReadTime: aTimestamp2,
},
},
Expand Down Expand Up @@ -340,11 +339,11 @@ func TestGetAllErrors(t *testing.T) {
},
[]interface{}{
&pb.BatchGetDocumentsResponse{
Result: &pb.BatchGetDocumentsResponse_Found{&pb.Document{Name: docPath}},
Result: &pb.BatchGetDocumentsResponse_Found{Found: &pb.Document{Name: docPath}},
ReadTime: aTimestamp,
},
&pb.BatchGetDocumentsResponse{
Result: &pb.BatchGetDocumentsResponse_Missing{docPath},
Result: &pb.BatchGetDocumentsResponse_Missing{Missing: docPath},
ReadTime: aTimestamp,
},
},
Expand Down Expand Up @@ -375,11 +374,11 @@ func TestClient_WithReadOptions(t *testing.T) {
Database: dbPath,
Documents: []string{docPath},
ConsistencySelector: &pb.BatchGetDocumentsRequest_ReadTime{
ReadTime: &timestamppb.Timestamp{Seconds: tm.Unix()},
ReadTime: &tspb.Timestamp{Seconds: tm.Unix()},
},
}, []interface{}{
&pb.BatchGetDocumentsResponse{
ReadTime: &timestamppb.Timestamp{Seconds: tm.Unix()},
ReadTime: &tspb.Timestamp{Seconds: tm.Unix()},
Result: &pb.BatchGetDocumentsResponse_Found{
Found: &pb.Document{},
},
Expand Down
2 changes: 1 addition & 1 deletion firestore/collref_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ func TestAdd(t *testing.T) {
wantReq := commitRequestForSet()
w := wantReq.Writes[0]
w.CurrentDocument = &pb.Precondition{
ConditionType: &pb.Precondition_Exists{false},
ConditionType: &pb.Precondition_Exists{Exists: false},
}
srv.addRPCAdjust(wantReq, commitResponseForSet, func(gotReq proto.Message) {
// We can't know the doc ID before Add is called, so we take it from
Expand Down
Loading

0 comments on commit 21cba14

Please sign in to comment.