Skip to content

Commit

Permalink
fix: import with lock sql file
Browse files Browse the repository at this point in the history
Signed-off-by: thxCode <[email protected]>
  • Loading branch information
thxCode committed Jun 12, 2023
1 parent 82c6fe9 commit 17e4e77
Show file tree
Hide file tree
Showing 28 changed files with 643 additions and 185 deletions.
11 changes: 0 additions & 11 deletions byteset/provider_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,6 @@ import (
"context"
"html/template"
"io"
"os"
"path/filepath"
"strings"
"testing"

Expand Down Expand Up @@ -60,15 +58,6 @@ func renderConfigTemplate(ct string, keyValuePairs ...any) string {
return s.String()
}

func testdataPath() string {
dir, err := os.Getwd()
if err != nil {
panic(err)
}

return filepath.Join(dir, "testdata")
}

type dockerContainer struct {
Name string
Image string
Expand Down
56 changes: 32 additions & 24 deletions byteset/resource_pipeline.go
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,12 @@ import (
"strings"
"time"

"github.com/hashicorp/terraform-plugin-framework-validators/int64validator"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/int64default"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-log/tflog"

Expand Down Expand Up @@ -41,7 +43,6 @@ type ResourcePipelineDestination struct {
Address types.String `tfsdk:"address"`
ConnMaxOpen types.Int64 `tfsdk:"conn_max_open"`
ConnMaxIdle types.Int64 `tfsdk:"conn_max_idle"`
ConnMaxLife types.Int64 `tfsdk:"conn_max_life"`
Salt types.String `tfsdk:"salt"`
}

Expand All @@ -51,9 +52,6 @@ func (r ResourcePipelineDestination) Reflect(ctx context.Context) (pipeline.Dest
r.Address.ValueString(),
pipeline.WithConnMaxOpen(int(r.ConnMaxOpen.ValueInt64())),
pipeline.WithConnMaxIdle(int(r.ConnMaxIdle.ValueInt64())),
pipeline.WithConnMaxLife(
time.Duration(r.ConnMaxLife.ValueInt64())*time.Second,
),
)
}

Expand Down Expand Up @@ -118,21 +116,27 @@ choose from local/remote SQL file or database.
"conn_max_open": schema.Int64Attribute{
Optional: true,
Computed: true,
Default: int64default.StaticInt64(15),
Default: int64default.StaticInt64(5),
Description: `The maximum opening connectors of source database.`,
Validators: []validator.Int64{
int64validator.AtLeast(1),
},
},
"conn_max_idle": schema.Int64Attribute{
Optional: true,
Computed: true,
Default: int64default.StaticInt64(5),
Description: `The maximum idling connections of source database.`,
Validators: []validator.Int64{
int64validator.AtLeast(1),
int64validator.AtMostSumOf(
path.MatchRelative().AtParent().AtName("conn_max_open")),
},
},
"conn_max_life": schema.Int64Attribute{
Optional: true,
Computed: true,
Default: int64default.StaticInt64(
5 * 60,
),
Optional: true,
Computed: true,
Default: int64default.StaticInt64(5 * 60),
Description: `The maximum lifetime in seconds of source database.`,
},
},
Expand All @@ -153,24 +157,28 @@ choose from local/remote SQL file or database.
- mssql://[username:[password]@][address][:port][/instance][?database=dbname&param1=value1&...]`,
},
"conn_max_open": schema.Int64Attribute{
Optional: true,
Computed: true,
Default: int64default.StaticInt64(15),
Description: `The maximum opening connectors of destination database.`,
Optional: true,
Computed: true,
Default: int64default.StaticInt64(5),
Description: `The maximum opening connectors of destination database,
if the given SQL file is using single transaction, should turn down the "conn_max_open" to 1.
`,
Validators: []validator.Int64{
int64validator.AtLeast(1),
},
},
"conn_max_idle": schema.Int64Attribute{
Optional: true,
Computed: true,
Default: int64default.StaticInt64(5),
Description: `The maximum idling connections of destination database.`,
},
"conn_max_life": schema.Int64Attribute{
Optional: true,
Computed: true,
Default: int64default.StaticInt64(
5 * 60,
),
Description: `The maximum lifetime in seconds of destination database.`,
Default: int64default.StaticInt64(5),
Description: `The maximum idling connections of destination database,
if the given SQL file is using single transaction, should turn down the "conn_max_idle" to 1.
`,
Validators: []validator.Int64{
int64validator.AtLeast(1),
int64validator.AtMostSumOf(
path.MatchRelative().AtParent().AtName("conn_max_open")),
},
},
"salt": schema.StringAttribute{
Optional: true,
Expand Down
68 changes: 46 additions & 22 deletions byteset/resource_pipeline_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -8,17 +8,20 @@ import (
"github.com/hashicorp/terraform-plugin-testing/helper/resource"

"github.com/seal-io/terraform-provider-byteset/utils/strx"
"github.com/seal-io/terraform-provider-byteset/utils/testx"
)

func TestAccResourcePipeline_sqlite(t *testing.T) {
func TestAccResourcePipeline_file_to_sqlite(t *testing.T) {
// Test pipeline.
var (
testdataPath = testx.AbsolutePath("testdata")

resourceName = "byteset_pipeline.test"

basicSrc = fmt.Sprintf("file://%s/sqlite.sql", testdataPath())
basicSrc = fmt.Sprintf("file://%s/sqlite.sql", testdataPath)
basicDst = "sqlite:///tmp/sqlite.db"

fkSrc = fmt.Sprintf("file://%s/sqlite-fk.sql", testdataPath())
fkSrc = fmt.Sprintf("file://%s/sqlite-fk.sql", testdataPath)
fkDst = "sqlite:///tmp/sqlite.db?_pragma=foreign_keys(1)"
)

Expand All @@ -28,28 +31,29 @@ func TestAccResourcePipeline_sqlite(t *testing.T) {
Steps: []resource.TestStep{
// Basic.
{
Config: testConfig(basicSrc, basicDst),
Config: testConfigOfSourceFile(basicSrc, basicDst, 1, 1),
Check: resource.ComposeAggregateTestCheckFunc(
resource.TestCheckResourceAttr(resourceName, "source.address", basicSrc),
resource.TestCheckResourceAttr(resourceName, "destination.address", basicDst),
resource.TestCheckResourceAttr(resourceName, "destination.conn_max_open", "15"),
resource.TestCheckResourceAttr(resourceName, "destination.conn_max_idle", "5"),
resource.TestCheckResourceAttr(resourceName, "destination.conn_max_life", "300"),
resource.TestCheckResourceAttr(resourceName, "destination.conn_max_open", "1"),
resource.TestCheckResourceAttr(resourceName, "destination.conn_max_idle", "1"),
),
},
// Foreign Key.
{
Config: testConfig(fkSrc, fkDst),
Config: testConfigOfSourceFile(fkSrc, fkDst, 1, 1),
Check: resource.ComposeAggregateTestCheckFunc(
resource.TestCheckResourceAttr(resourceName, "source.address", fkSrc),
resource.TestCheckResourceAttr(resourceName, "destination.address", fkDst),
resource.TestCheckResourceAttr(resourceName, "destination.conn_max_open", "1"),
resource.TestCheckResourceAttr(resourceName, "destination.conn_max_idle", "1"),
),
},
},
})
}

func TestAccResourcePipeline_mysql(t *testing.T) {
func TestAccResourcePipeline_file_to_mysql(t *testing.T) {
// Start Database.
var (
database = "byteset"
Expand Down Expand Up @@ -78,12 +82,13 @@ func TestAccResourcePipeline_mysql(t *testing.T) {

// Test pipeline.
var (
testdataPath = testx.AbsolutePath("testdata")
resourceName = "byteset_pipeline.test"

basicSrc = fmt.Sprintf("file://%s/mysql.sql", testdataPath())
basicSrc = fmt.Sprintf("file://%s/mysql.sql", testdataPath)
basicDst = fmt.Sprintf("mysql://root:%s@tcp(127.0.0.1:3306)/%s", password, database)

fkSrc = fmt.Sprintf("file://%s/mysql-fk.sql", testdataPath())
fkSrc = fmt.Sprintf("file://%s/mysql-fk.sql", testdataPath)
fkDst = fmt.Sprintf("mysql://root:%s@tcp(127.0.0.1)/%s", password, database)

largeSrc = "https://raw.githubusercontent.com/seal-io/terraform-provider-byteset/main/byteset/testdata/mysql-lg.sql"
Expand All @@ -95,31 +100,37 @@ func TestAccResourcePipeline_mysql(t *testing.T) {
ProtoV6ProviderFactories: testAccProviderFactories,
Steps: []resource.TestStep{
{
Config: testConfig(basicSrc, basicDst),
Config: testConfigOfSourceFile(basicSrc, basicDst, 5, 5),
Check: resource.ComposeAggregateTestCheckFunc(
resource.TestCheckResourceAttr(resourceName, "source.address", basicSrc),
resource.TestCheckResourceAttr(resourceName, "destination.address", basicDst),
resource.TestCheckResourceAttr(resourceName, "destination.conn_max_open", "5"),
resource.TestCheckResourceAttr(resourceName, "destination.conn_max_idle", "5"),
),
},
{
Config: testConfig(fkSrc, fkDst),
Config: testConfigOfSourceFile(fkSrc, fkDst, 5, 5),
Check: resource.ComposeAggregateTestCheckFunc(
resource.TestCheckResourceAttr(resourceName, "source.address", fkSrc),
resource.TestCheckResourceAttr(resourceName, "destination.address", fkDst),
resource.TestCheckResourceAttr(resourceName, "destination.conn_max_open", "5"),
resource.TestCheckResourceAttr(resourceName, "destination.conn_max_idle", "5"),
),
},
{
Config: testConfig(largeSrc, largeDst),
Config: testConfigOfSourceFile(largeSrc, largeDst, 5, 5),
Check: resource.ComposeAggregateTestCheckFunc(
resource.TestCheckResourceAttr(resourceName, "source.address", largeSrc),
resource.TestCheckResourceAttr(resourceName, "destination.address", largeDst),
resource.TestCheckResourceAttr(resourceName, "destination.conn_max_open", "5"),
resource.TestCheckResourceAttr(resourceName, "destination.conn_max_idle", "5"),
),
},
},
})
}

func TestAccResourcePipeline_postgres(t *testing.T) {
func TestAccResourcePipeline_file_to_postgres(t *testing.T) {
// Start Database.
var (
database = "byteset"
Expand Down Expand Up @@ -149,12 +160,13 @@ func TestAccResourcePipeline_postgres(t *testing.T) {

// Test pipeline.
var (
testdataPath = testx.AbsolutePath("testdata")
resourceName = "byteset_pipeline.test"

basicSrc = fmt.Sprintf("file://%s/postgres.sql", testdataPath())
basicSrc = fmt.Sprintf("file://%s/postgres.sql", testdataPath)
basicDst = fmt.Sprintf("postgres://root:%[email protected]:5432/%s?sslmode=disable", password, database)

fkSrc = fmt.Sprintf("file://%s/postgres-fk.sql", testdataPath())
fkSrc = fmt.Sprintf("file://%s/postgres-fk.sql", testdataPath)
fkDst = fmt.Sprintf("postgres://root:%[email protected]/%s?sslmode=disable", password, database)

largeSrc = "https://raw.githubusercontent.com/seal-io/terraform-provider-byteset/main/byteset/testdata/postgres-lg.sql"
Expand All @@ -166,40 +178,52 @@ func TestAccResourcePipeline_postgres(t *testing.T) {
ProtoV6ProviderFactories: testAccProviderFactories,
Steps: []resource.TestStep{
{
Config: testConfig(basicSrc, basicDst),
Config: testConfigOfSourceFile(basicSrc, basicDst, 5, 5),
Check: resource.ComposeAggregateTestCheckFunc(
resource.TestCheckResourceAttr(resourceName, "source.address", basicSrc),
resource.TestCheckResourceAttr(resourceName, "destination.address", basicDst),
resource.TestCheckResourceAttr(resourceName, "destination.conn_max_open", "5"),
resource.TestCheckResourceAttr(resourceName, "destination.conn_max_idle", "5"),
),
},
{
Config: testConfig(fkSrc, fkDst),
Config: testConfigOfSourceFile(fkSrc, fkDst, 5, 5),
Check: resource.ComposeAggregateTestCheckFunc(
resource.TestCheckResourceAttr(resourceName, "source.address", fkSrc),
resource.TestCheckResourceAttr(resourceName, "destination.address", fkDst),
resource.TestCheckResourceAttr(resourceName, "destination.conn_max_open", "5"),
resource.TestCheckResourceAttr(resourceName, "destination.conn_max_idle", "5"),
),
},
{
Config: testConfig(largeSrc, largeDst),
Config: testConfigOfSourceFile(largeSrc, largeDst, 5, 5),
Check: resource.ComposeAggregateTestCheckFunc(
resource.TestCheckResourceAttr(resourceName, "source.address", largeSrc),
resource.TestCheckResourceAttr(resourceName, "destination.address", largeDst),
resource.TestCheckResourceAttr(resourceName, "destination.conn_max_open", "5"),
resource.TestCheckResourceAttr(resourceName, "destination.conn_max_idle", "5"),
),
},
},
})
}

func testConfig(src, dst string) string {
func testConfigOfSourceFile(src, dst string, dstMaxOpen, dstMaxIdle int) string {
const tmpl = `
resource "byteset_pipeline" "test" {
source = {
address = "{{ .Src }}"
}
destination = {
address = "{{ .Dst }}"
conn_max_open = {{ .DstMaxOpen }}
conn_max_idle = {{ .DstMaxIdle }}
}
}`

return renderConfigTemplate(tmpl, "Src", src, "Dst", dst)
return renderConfigTemplate(tmpl,
"Src", src,
"Dst", dst,
"DstMaxOpen", dstMaxOpen,
"DstMaxIdle", dstMaxIdle)
}
4 changes: 0 additions & 4 deletions byteset/testdata/mysql-fk.sql
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,6 @@ CREATE TABLE members
);

-- members data
;;
;;
INSERT INTO members (id, last_name, first_name, team_id)
VALUES (1, 'Lucy', 'Li', 1);
INSERT INTO members (id, last_name, first_name, team_id)
Expand All @@ -33,8 +31,6 @@ INSERT INTO members (id, last_name, first_name, team_id)
VALUES (4, 'Frank', NULL, 2);

-- teams data
;;
;;
INSERT INTO teams (id, name)
VALUES (1, 'Finance');
INSERT INTO teams (id, name)
Expand Down
4 changes: 1 addition & 3 deletions byteset/testdata/mysql.sql
Original file line number Diff line number Diff line change
Expand Up @@ -7,12 +7,10 @@ CREATE TABLE company
age INT NOT NULL,
address CHAR(50),
salary NUMERIC
);;
);


-- company data
;;
;;
INSERT INTO company (name, age, address, salary)
VALUES ('Paul', 32, 'California', 20000.00);
INSERT INTO company (name, age, address, salary)
Expand Down
4 changes: 0 additions & 4 deletions byteset/testdata/postgres-fk.sql
Original file line number Diff line number Diff line change
Expand Up @@ -22,8 +22,6 @@ CREATE TABLE members


-- members data
;;
;;
INSERT INTO members (id, last_name, first_name, team_id)
VALUES (1, 'Lucy', 'Li', 1);
INSERT INTO members (id, last_name, first_name, team_id)
Expand All @@ -34,8 +32,6 @@ INSERT INTO members (id, last_name, first_name, team_id)
VALUES (4, 'Frank', NULL, 2);

-- teams data
;;
;;
INSERT INTO teams (id, name)
VALUES (1, 'Finance');
INSERT INTO teams (id, name)
Expand Down
4 changes: 1 addition & 3 deletions byteset/testdata/postgres.sql
Original file line number Diff line number Diff line change
Expand Up @@ -7,12 +7,10 @@ CREATE TABLE company
age INT NOT NULL,
address CHAR(50),
salary REAL
);;
);


-- company data
;;
;;
INSERT INTO company (name, age, address, salary)
VALUES ('Paul', 32, 'California', 20000.00);
INSERT INTO company (name, age, address, salary)
Expand Down
Loading

0 comments on commit 17e4e77

Please sign in to comment.