Skip to content

Commit aa17af9

Browse files
committed
add proto backwards compatibility tests
The generated files in this change were generated using (generated) protofiles pre-proto upgrades. The tests verify that the newly generated proto files will result in same json objects, as well as unmarshal correctly into the same proto go structs. Signed-off-by: Humair Khan <[email protected]>
1 parent e751ecb commit aa17af9

21 files changed

+598
-12
lines changed

backend/src/apiserver/main.go

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,6 @@ import (
1919
"errors"
2020
"flag"
2121
"fmt"
22-
"google.golang.org/protobuf/encoding/protojson"
2322
"io"
2423
"math"
2524
"net"
@@ -28,6 +27,8 @@ import (
2827
"strings"
2928
"sync"
3029

30+
"google.golang.org/protobuf/encoding/protojson"
31+
3132
"github.com/fsnotify/fsnotify"
3233
"github.com/golang/glog"
3334
"github.com/gorilla/mux"

backend/src/apiserver/server/api_converter.go

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -470,7 +470,7 @@ func toApiPipelineVersion(pv *model.PipelineVersion) *apiv2beta1.PipelineVersion
470470
}
471471

472472
// Convert pipeline spec
473-
spec, err := yamlStringToPipelineSpecStruct(pv.PipelineSpec)
473+
spec, err := YamlStringToPipelineSpecStruct(pv.PipelineSpec)
474474
if err != nil {
475475
return &apiv2beta1.PipelineVersion{
476476
PipelineVersionId: pv.UUID,
@@ -1506,7 +1506,7 @@ func toApiRun(r *model.Run) *apiv2beta1.Run {
15061506
}
15071507
return apiRunV2
15081508
} else if r.PipelineSpec.PipelineSpecManifest != "" {
1509-
spec, err1 := yamlStringToPipelineSpecStruct(r.PipelineSpec.PipelineSpecManifest)
1509+
spec, err1 := YamlStringToPipelineSpecStruct(r.PipelineSpec.PipelineSpecManifest)
15101510
if err1 == nil {
15111511
apiRunV2.PipelineSource = &apiv2beta1.Run_PipelineSpec{
15121512
PipelineSpec: spec,
@@ -1515,7 +1515,7 @@ func toApiRun(r *model.Run) *apiv2beta1.Run {
15151515
}
15161516
err = util.Wrap(err1, err.Error()).(*util.UserError)
15171517
} else if r.PipelineSpec.WorkflowSpecManifest != "" {
1518-
spec, err1 := yamlStringToPipelineSpecStruct(r.PipelineSpec.WorkflowSpecManifest)
1518+
spec, err1 := YamlStringToPipelineSpecStruct(r.PipelineSpec.WorkflowSpecManifest)
15191519
if err1 == nil {
15201520
apiRunV2.PipelineSource = &apiv2beta1.Run_PipelineSpec{
15211521
PipelineSpec: spec,
@@ -2209,7 +2209,7 @@ func toApiRecurringRun(j *model.Job) *apiv2beta1.RecurringRun {
22092209
}
22102210

22112211
if j.PipelineSpec.PipelineId == "" && j.PipelineSpec.PipelineVersionId == "" {
2212-
spec, err := yamlStringToPipelineSpecStruct(j.PipelineSpec.PipelineSpecManifest)
2212+
spec, err := YamlStringToPipelineSpecStruct(j.PipelineSpec.PipelineSpecManifest)
22132213
if err != nil {
22142214
return &apiv2beta1.RecurringRun{
22152215
RecurringRunId: j.UUID,

backend/src/apiserver/server/api_util.go

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -166,7 +166,7 @@ func GetResourceReferenceFromRunInterface(r interface{}) ([]*apiv1beta1.Resource
166166
// and there may be a second doc for platform specific spec.
167167
// If platform spec is empty, then return pipeline spec directly. Else, return a struct with
168168
// pipeline spec and platform spec as subfields.
169-
func yamlStringToPipelineSpecStruct(s string) (*structpb.Struct, error) {
169+
func YamlStringToPipelineSpecStruct(s string) (*structpb.Struct, error) {
170170
if s == "" {
171171
return nil, nil
172172
}

backend/src/apiserver/server/api_util_test.go

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -384,7 +384,7 @@ func loadYaml(t *testing.T, path string) string {
384384
return string(res)
385385
}
386386

387-
// Tests both yamlStringToPipelineSpecStruct and pipelineSpecStructToYamlString.
387+
// Tests both YamlStringToPipelineSpecStruct and pipelineSpecStructToYamlString.
388388
func TestPipelineSpecStructToYamlString_DirectSpec(t *testing.T) {
389389
template := loadYaml(t, "test/pipeline_with_volume.yaml")
390390

@@ -397,7 +397,7 @@ func TestPipelineSpecStructToYamlString_DirectSpec(t *testing.T) {
397397
actualTemplate, err := pipelineSpecStructToYamlString(&pipeline)
398398
assert.Nil(t, err)
399399

400-
actualPipeline, err := yamlStringToPipelineSpecStruct(actualTemplate)
400+
actualPipeline, err := YamlStringToPipelineSpecStruct(actualTemplate)
401401
assert.Nil(t, err)
402402

403403
// Compare the marshalled JSON due to flakiness of structpb values
@@ -407,7 +407,7 @@ func TestPipelineSpecStructToYamlString_DirectSpec(t *testing.T) {
407407
assert.Equal(t, j1, j2)
408408
}
409409

410-
// Tests both yamlStringToPipelineSpecStruct and pipelineSpecStructToYamlString.
410+
// Tests both YamlStringToPipelineSpecStruct and pipelineSpecStructToYamlString.
411411
func TestPipelineSpecStructToYamlString_WithPlatform(t *testing.T) {
412412
template := loadYaml(t, "test/pipeline_with_volume.yaml")
413413

@@ -433,7 +433,7 @@ func TestPipelineSpecStructToYamlString_WithPlatform(t *testing.T) {
433433
actualTemplate, err := pipelineSpecStructToYamlString(&pipeline)
434434
assert.Nil(t, err)
435435

436-
actualPipeline, err := yamlStringToPipelineSpecStruct(actualTemplate)
436+
actualPipeline, err := YamlStringToPipelineSpecStruct(actualTemplate)
437437
assert.Nil(t, err)
438438

439439
// Compare the marshalled JSON due to flakiness of structpb values
@@ -443,7 +443,7 @@ func TestPipelineSpecStructToYamlString_WithPlatform(t *testing.T) {
443443
assert.Equal(t, j1, j2)
444444
}
445445

446-
// Tests both yamlStringToPipelineSpecStruct and pipelineSpecStructToYamlString.
446+
// Tests both YamlStringToPipelineSpecStruct and pipelineSpecStructToYamlString.
447447
// In this case although the received pipeline spec is nested, because platform spec is empty,
448448
// we return the pipeline spec directly.
449449
func TestPipelineSpecStructToYamlString_NestedPipelineSpec(t *testing.T) {
@@ -464,7 +464,7 @@ func TestPipelineSpecStructToYamlString_NestedPipelineSpec(t *testing.T) {
464464
actualTemplate, err := pipelineSpecStructToYamlString(&pipeline)
465465
assert.Nil(t, err)
466466

467-
actualPipeline, err := yamlStringToPipelineSpecStruct(actualTemplate)
467+
actualPipeline, err := YamlStringToPipelineSpecStruct(actualTemplate)
468468
assert.Nil(t, err)
469469

470470
// Compare the marshalled JSON due to flakiness of structpb values

backend/test/proto_tests/README.md

Lines changed: 29 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,29 @@
1+
# Proto backwards compatibility testing
2+
3+
This folder contains tests that verify that newly generated proto files do not break backwards compatibility with proto binary message translation and JSON unmarshalling.
4+
5+
The structure is as follows:
6+
7+
## objects.go
8+
These contain the Go Structs that use the latest proto generated Go Code.
9+
10+
## testdata/generated-<commit>
11+
These files contain the proto binary messages and the JSON unmarshalled data using proto generated files found in the commit: `<commit>`.
12+
13+
This allows us to have a ground truth to compare the newly generated code against. The tests will unmrshal the proto binaries from `<commit>` and compare it with the structs created using new generated code.
14+
15+
In much the same way we also test for JSON unmarshalling. You will notice that we adjust the unmarshaling options for JSON to match what we use in the grpc-gateway unmarshalling configuration for the grpc server.
16+
17+
## Updating generate code
18+
19+
Generated code can be updated by running the test code with `UPDATE_EXPECTED=true` environment variable set in your commandline. For example:
20+
21+
```shell
22+
cd backend/test/proto_tests
23+
export UPDATE_EXPECTED=true
24+
go test . # update generate code
25+
export UPDATE_EXPECTED=false
26+
go test . # verify your changes
27+
```
28+
29+
Note that it is very unlikely you should need to update this code, if you do then it is a good sign you are introducing breaking changes, so use it wisely.

backend/test/proto_tests/objects.go

Lines changed: 139 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,139 @@
1+
package proto_tests
2+
3+
import (
4+
"os"
5+
"path/filepath"
6+
"time"
7+
8+
"github.com/golang/glog"
9+
pb "github.com/kubeflow/pipelines/backend/api/v2beta1/go_client"
10+
"github.com/kubeflow/pipelines/backend/src/apiserver/server"
11+
"google.golang.org/genproto/googleapis/rpc/status"
12+
"google.golang.org/protobuf/types/known/structpb"
13+
"google.golang.org/protobuf/types/known/timestamppb"
14+
)
15+
16+
const mockPipelineID = "9b187b86-7c0a-42ae-a0bc-2a746b6eb7a3"
17+
const mockPipelineVersionID = "e15dc3ec-b45e-4cc7-bb07-e76b5dbce99a"
18+
const pipelineSpecYamlPath = "pipelinespec.yaml"
19+
20+
func mockPipelineSpec() *structpb.Struct {
21+
yamlContent, err := os.ReadFile(filepath.Join("testdata", pipelineSpecYamlPath))
22+
if err != nil {
23+
glog.Fatal(err)
24+
}
25+
spec, err := server.YamlStringToPipelineSpecStruct(string(yamlContent))
26+
if err != nil {
27+
glog.Fatal(err)
28+
}
29+
return spec
30+
}
31+
32+
func fixedTimestamp() *timestamppb.Timestamp {
33+
return timestamppb.New(time.Date(2024, 1, 1, 12, 0, 0, 0, time.UTC))
34+
}
35+
36+
var completedRun = &pb.Run{
37+
RunId: "completed-run-123",
38+
DisplayName: "Production Pipeline Run",
39+
ExperimentId: "exp-456",
40+
StorageState: pb.Run_AVAILABLE,
41+
Description: "Production pipeline execution for data processing",
42+
ServiceAccount: "sa1",
43+
CreatedAt: fixedTimestamp(),
44+
ScheduledAt: fixedTimestamp(),
45+
FinishedAt: fixedTimestamp(),
46+
RecurringRunId: "recurring-schedule-001",
47+
State: pb.RuntimeState_SUCCEEDED,
48+
PipelineSource: &pb.Run_PipelineVersionReference{
49+
PipelineVersionReference: &pb.PipelineVersionReference{
50+
PipelineId: mockPipelineID,
51+
PipelineVersionId: mockPipelineVersionID,
52+
},
53+
},
54+
RuntimeConfig: &pb.RuntimeConfig{
55+
Parameters: map[string]*structpb.Value{
56+
"batch_size": structpb.NewNumberValue(1000),
57+
"learning_rate": structpb.NewStringValue("foo"),
58+
},
59+
},
60+
}
61+
62+
var completedRunWithPipelineSpec = &pb.Run{
63+
RunId: "completed-run-123",
64+
DisplayName: "Production Pipeline Run",
65+
ExperimentId: "exp-456",
66+
StorageState: pb.Run_AVAILABLE,
67+
Description: "Production pipeline execution for data processing",
68+
ServiceAccount: "sa1",
69+
CreatedAt: fixedTimestamp(),
70+
ScheduledAt: fixedTimestamp(),
71+
FinishedAt: fixedTimestamp(),
72+
RecurringRunId: "recurring-schedule-001",
73+
State: pb.RuntimeState_SUCCEEDED,
74+
PipelineSource: &pb.Run_PipelineSpec{
75+
PipelineSpec: mockPipelineSpec(),
76+
},
77+
RuntimeConfig: &pb.RuntimeConfig{
78+
Parameters: map[string]*structpb.Value{
79+
"batch_size": structpb.NewNumberValue(1000),
80+
"learning_rate": structpb.NewStringValue("foo"),
81+
},
82+
},
83+
}
84+
85+
var failedRun = &pb.Run{
86+
RunId: "failed-run-456",
87+
DisplayName: "Data Processing Pipeline",
88+
ExperimentId: "exp-789",
89+
StorageState: pb.Run_AVAILABLE,
90+
Description: "Failed attempt to process customer data",
91+
ServiceAccount: "sa2",
92+
CreatedAt: fixedTimestamp(),
93+
ScheduledAt: fixedTimestamp(),
94+
FinishedAt: fixedTimestamp(),
95+
State: pb.RuntimeState_FAILED,
96+
Error: &status.Status{
97+
Code: 1,
98+
Message: "This was a Failed Run.",
99+
},
100+
}
101+
102+
var pipeline = &pb.Pipeline{
103+
PipelineId: mockPipelineID,
104+
DisplayName: "Production Data Processing Pipeline",
105+
Name: "pipeline1",
106+
Description: "Pipeline for processing and analyzing production data",
107+
CreatedAt: fixedTimestamp(),
108+
Namespace: "namespace1",
109+
Error: &status.Status{
110+
Code: 0,
111+
Message: "This a successful pipeline.",
112+
},
113+
}
114+
115+
var pipelineVersion = &pb.PipelineVersion{
116+
PipelineVersionId: mockPipelineVersionID,
117+
PipelineId: mockPipelineID,
118+
DisplayName: "v1.0.0 Production Data Processing Pipeline",
119+
Name: "pipelineversion1",
120+
Description: "First stable version of the production data processing pipeline",
121+
CreatedAt: fixedTimestamp(),
122+
PipelineSpec: mockPipelineSpec(),
123+
PackageUrl: &pb.Url{PipelineUrl: "gs://my-bucket/pipelines/pipeline1-v1.0.0.yaml"},
124+
CodeSourceUrl: "https://github.com/org/repo/pipeline1/tree/v1.0.0",
125+
Error: &status.Status{
126+
Code: 0,
127+
Message: "This is a successful pipeline version.",
128+
},
129+
}
130+
131+
var experiment = &pb.Experiment{
132+
ExperimentId: "exp-456",
133+
DisplayName: "Production Data Processing Experiment",
134+
Description: "Experiment for testing production data processing pipeline",
135+
CreatedAt: fixedTimestamp(),
136+
Namespace: "namespace1",
137+
StorageState: pb.Experiment_AVAILABLE,
138+
LastRunCreatedAt: fixedTimestamp(),
139+
}
Lines changed: 53 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,53 @@
1+
package proto_tests
2+
3+
import (
4+
"fmt"
5+
"path/filepath"
6+
"testing"
7+
8+
pb "github.com/kubeflow/pipelines/backend/api/v2beta1/go_client"
9+
)
10+
11+
// This is the commit that contains the proto generated files
12+
// that were used to generate the test data.
13+
const commit = "1791485"
14+
15+
func generatePath(path string) string {
16+
return filepath.Join(fmt.Sprintf("generated-%s", commit), path)
17+
}
18+
19+
func TestRuns(t *testing.T) {
20+
testOBJ(t, caseOpts[*pb.Run]{
21+
message: completedRun,
22+
expectedPBPath: generatePath("run_completed.pb"),
23+
expectedJSONPath: generatePath("run_completed.json"),
24+
})
25+
26+
testOBJ(t, caseOpts[*pb.Run]{
27+
message: completedRunWithPipelineSpec,
28+
expectedPBPath: generatePath("run_completed_with_spec.pb"),
29+
expectedJSONPath: generatePath("run_completed_with_spec.json"),
30+
})
31+
32+
testOBJ(t, caseOpts[*pb.Run]{
33+
message: failedRun,
34+
expectedPBPath: generatePath("run_failed.pb"),
35+
expectedJSONPath: generatePath("run_failed.json"),
36+
})
37+
38+
testOBJ(t, caseOpts[*pb.Pipeline]{
39+
message: pipeline,
40+
expectedPBPath: generatePath("pipeline.pb"),
41+
expectedJSONPath: generatePath("pipeline.json"),
42+
})
43+
testOBJ(t, caseOpts[*pb.PipelineVersion]{
44+
message: pipelineVersion,
45+
expectedPBPath: generatePath("pipeline_version.pb"),
46+
expectedJSONPath: generatePath("pipeline_version.json"),
47+
})
48+
testOBJ(t, caseOpts[*pb.Experiment]{
49+
message: experiment,
50+
expectedPBPath: generatePath("experiment.pb"),
51+
expectedJSONPath: generatePath("experiment.json"),
52+
})
53+
}
Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,9 @@
1+
{
2+
"experiment_id": "exp-456",
3+
"display_name": "Production Data Processing Experiment",
4+
"description": "Experiment for testing production data processing pipeline",
5+
"created_at": "2024-01-01T12:00:00Z",
6+
"namespace": "namespace1",
7+
"storage_state": "AVAILABLE",
8+
"last_run_created_at": "2024-01-01T12:00:00Z"
9+
}
Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
2+
exp-456%Production Data Processing Experiment:Experiment for testing production data processing pipeline"��ʬ*
3+
namespace10:��ʬ
Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,13 @@
1+
{
2+
"pipeline_id": "9b187b86-7c0a-42ae-a0bc-2a746b6eb7a3",
3+
"display_name": "Production Data Processing Pipeline",
4+
"name": "pipeline1",
5+
"description": "Pipeline for processing and analyzing production data",
6+
"created_at": "2024-01-01T12:00:00Z",
7+
"namespace": "namespace1",
8+
"error": {
9+
"code": 0,
10+
"message": "This a successful pipeline.",
11+
"details": []
12+
}
13+
}

0 commit comments

Comments
 (0)