You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@devlake.apache.org by wa...@apache.org on 2022/06/15 07:56:27 UTC

[incubator-devlake] branch main updated (f3b9ab01 -> 0c273670)

This is an automated email from the ASF dual-hosted git repository.

warren pushed a change to branch main
in repository https://gitbox.apache.org/repos/asf/incubator-devlake.git


    from f3b9ab01 fix: migration table name error (#2198)
     new 33c44090 add TableName for domain layer
     new ccb526f6 add some helper for e2e test
     new d7feca9d use updated e2e test helper to update gitlab e2e test
     new bf18029d fix some bug from the difference in pg & mysql
     new f2255fd4 change name for flush name
     new a0e4a352 append
     new fe6d26ab change a name
     new 45089f27 fix for lint
     new 7269ca2c add E2E_DB_URL
     new 29da8a89 fix default test db url name
     new 0af33d30 fix linter
     new 0c273670 replace hack code

The 12 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .env.example                                       |   1 +
 .github/workflows/test-e2e.yml                     |   2 +
 e2e/database.go                                    |  22 ++-
 helpers/e2ehelper/data_flow_tester.go              | 156 +++++++++++++++++----
 ...{csv_file_iterator_test.go => csv_file_test.go} |  22 ++-
 .../{csv_file_iterator.go => csv_file_writer.go}   |  55 +++-----
 models/domainlayer/code/commit.go                  |   8 ++
 models/domainlayer/code/commit_parent.go           |   4 +
 models/domainlayer/code/note.go                    |   4 +
 models/domainlayer/code/pull_request.go            |   4 +
 models/domainlayer/code/pull_request_comment.go    |   4 +
 models/domainlayer/code/pull_request_commit.go     |   4 +
 models/domainlayer/code/pull_request_labels.go     |   4 +
 models/domainlayer/code/ref.go                     |   4 +
 models/domainlayer/code/refs_commits_diff.go       |   4 +
 models/domainlayer/code/refs_pr_cherry_pick.go     |   4 +
 models/domainlayer/code/repo.go                    |   8 ++
 models/domainlayer/code/repo_commit.go             |   4 +
 models/domainlayer/crossdomain/board_repo.go       |   4 +
 models/domainlayer/crossdomain/issue_commit.go     |   4 +
 .../domainlayer/crossdomain/issue_repo_commits.go  |   4 +
 .../domainlayer/crossdomain/pull_request_issue.go  |   4 +
 models/domainlayer/crossdomain/refs_issues_diff.go |   4 +
 models/domainlayer/devops/build.go                 |   4 +
 models/domainlayer/devops/job.go                   |   4 +
 models/domainlayer/ticket/board.go                 |   8 ++
 models/domainlayer/ticket/board_issue.go           |   4 +
 models/domainlayer/ticket/changelog.go             |   4 +
 models/domainlayer/ticket/issue.go                 |   4 +
 models/domainlayer/ticket/issue_comment.go         |   4 +
 models/domainlayer/ticket/issue_label.go           |   4 +
 models/domainlayer/ticket/sprint.go                |   8 ++
 models/domainlayer/ticket/worklog.go               |   4 +
 plugins/gitlab/e2e/project_test.go                 |  17 ++-
 .../gitlab/e2e/tables/_raw_gitlab_api_projects.csv |  16 ---
 .../gitlab/e2e/tables/_tool_gitlab_projects.csv    |  20 +--
 plugins/gitlab/e2e/tables/repos.csv                |  20 +--
 plugins/gitlab/tasks/project_convertor.go          |   2 +-
 plugins/helper/default_task_context.go             |   5 +-
 39 files changed, 337 insertions(+), 125 deletions(-)
 rename helpers/pluginhelper/{csv_file_iterator_test.go => csv_file_test.go} (58%)
 copy helpers/pluginhelper/{csv_file_iterator.go => csv_file_writer.go} (54%)


[incubator-devlake] 03/12: use updated e2e test helper to update gitlab e2e test

Posted by wa...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

warren pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/incubator-devlake.git

commit d7feca9d81580fd20c227ded4057f96c31da54d9
Author: linyh <ya...@meri.co>
AuthorDate: Tue Jun 14 22:42:29 2022 +0800

    use updated e2e test helper to update gitlab e2e test
---
 helpers/e2ehelper/data_flow_tester.go                |  1 -
 plugins/gitlab/e2e/project_test.go                   | 20 ++++++++++++--------
 .../gitlab/e2e/tables/_raw_gitlab_api_projects.csv   | 16 ----------------
 plugins/gitlab/e2e/tables/_tool_gitlab_projects.csv  | 20 ++------------------
 plugins/gitlab/e2e/tables/repos.csv                  | 20 ++------------------
 plugins/gitlab/tasks/project_convertor.go            |  2 +-
 6 files changed, 17 insertions(+), 62 deletions(-)

diff --git a/helpers/e2ehelper/data_flow_tester.go b/helpers/e2ehelper/data_flow_tester.go
index 20625013..e5f1cbf0 100644
--- a/helpers/e2ehelper/data_flow_tester.go
+++ b/helpers/e2ehelper/data_flow_tester.go
@@ -246,7 +246,6 @@ func (t *DataFlowTester) VerifyTable(dst schema.Tabler, csvRelPath string, pkfie
 		for _, field := range targetfields {
 			actualValue := ""
 			switch actual[field].(type) {
-			// TODO: ensure testing database is in UTC timezone
 			case time.Time:
 				if actual[field] != nil {
 					actualValue = actual[field].(time.Time).In(location).Format("2006-01-02T15:04:05.000-07:00")
diff --git a/plugins/gitlab/e2e/project_test.go b/plugins/gitlab/e2e/project_test.go
index 32a7f02b..7e12c03e 100644
--- a/plugins/gitlab/e2e/project_test.go
+++ b/plugins/gitlab/e2e/project_test.go
@@ -21,7 +21,9 @@ import (
 	"testing"
 
 	"github.com/apache/incubator-devlake/helpers/e2ehelper"
+	"github.com/apache/incubator-devlake/models/domainlayer/code"
 	"github.com/apache/incubator-devlake/plugins/gitlab/impl"
+	"github.com/apache/incubator-devlake/plugins/gitlab/models"
 	"github.com/apache/incubator-devlake/plugins/gitlab/tasks"
 )
 
@@ -32,20 +34,22 @@ func TestGitlabDataFlow(t *testing.T) {
 
 	taskData := &tasks.GitlabTaskData{
 		Options: &tasks.GitlabOptions{
-			ProjectId: 3472737,
+			ConnectionId: 1,
+			ProjectId:    3472737,
 		},
 	}
 
 	// import raw data table
+	dataflowTester.MigrateRawTableAndFlush("_raw_gitlab_api_project")
 	dataflowTester.ImportCsv("./tables/_raw_gitlab_api_projects.csv", "_raw_gitlab_api_project")
 
 	// verify extraction
-	dataflowTester.FlushTable("_tool_gitlab_projects")
+	dataflowTester.MigrateTableAndFlush(&models.GitlabProject{})
 	dataflowTester.Subtask(tasks.ExtractProjectMeta, taskData)
-	dataflowTester.VerifyTable(
-		"_tool_gitlab_projects",
+	dataflowTester.CreateSnapshotOrVerify(
+		models.GitlabProject{},
 		"tables/_tool_gitlab_projects.csv",
-		[]string{"gitlab_id"},
+		[]string{"connection_id", "gitlab_id"},
 		[]string{
 			"name",
 			"description",
@@ -68,10 +72,10 @@ func TestGitlabDataFlow(t *testing.T) {
 	)
 
 	// verify conversion
-	dataflowTester.FlushTable("repos")
+	dataflowTester.MigrateTableAndFlush(&code.Repo{})
 	dataflowTester.Subtask(tasks.ConvertProjectMeta, taskData)
-	dataflowTester.VerifyTable(
-		"repos",
+	dataflowTester.CreateSnapshotOrVerify(
+		code.Repo{},
 		"tables/repos.csv",
 		[]string{"id"},
 		[]string{
diff --git a/plugins/gitlab/e2e/tables/_raw_gitlab_api_projects.csv b/plugins/gitlab/e2e/tables/_raw_gitlab_api_projects.csv
index b9fef1bb..9b5559c3 100644
--- a/plugins/gitlab/e2e/tables/_raw_gitlab_api_projects.csv
+++ b/plugins/gitlab/e2e/tables/_raw_gitlab_api_projects.csv
@@ -1,18 +1,2 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
 "id","params","data","url","input","created_at"
 2,"{""ProjectId"":3472737}","{""id"": 3472737, ""name"": ""inkscape"", ""path"": ""inkscape"", ""_links"": {""self"": ""https://gitlab.com/api/v4/projects/3472737"", ""events"": ""https://gitlab.com/api/v4/projects/3472737/events"", ""issues"": ""https://gitlab.com/api/v4/projects/3472737/issues"", ""labels"": ""https://gitlab.com/api/v4/projects/3472737/labels"", ""members"": ""https://gitlab.com/api/v4/projects/3472737/members"", ""repo_branches"": ""https://gitlab.com/api/v4/projects/ [...]
diff --git a/plugins/gitlab/e2e/tables/_tool_gitlab_projects.csv b/plugins/gitlab/e2e/tables/_tool_gitlab_projects.csv
index fd30a417..4c8690a9 100644
--- a/plugins/gitlab/e2e/tables/_tool_gitlab_projects.csv
+++ b/plugins/gitlab/e2e/tables/_tool_gitlab_projects.csv
@@ -1,18 +1,2 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-"gitlab_id","name","description","default_branch","path_with_namespace","web_url","creator_id","visibility","open_issues_count","star_count","forked_from_project_id","forked_from_project_web_url","created_date","updated_date","created_at","updated_at","_raw_data_params","_raw_data_table","_raw_data_id","_raw_data_remark"
-3472737,inkscape,Inkscape vector image editor,master,inkscape/inkscape,https://gitlab.com/inkscape/inkscape,0,public,1472,2627,0,"","2017-06-09 14:16:35.615000000","2022-05-05 08:26:20.527000000","2022-05-05 09:38:23.184000000","2022-05-05 09:38:23.184000000","{""ProjectId"":3472737}",_raw_gitlab_api_project,2,""
+connection_id,gitlab_id,name,description,default_branch,path_with_namespace,web_url,creator_id,visibility,open_issues_count,star_count,forked_from_project_id,forked_from_project_web_url,created_date,updated_date,_raw_data_params,_raw_data_table,_raw_data_id,_raw_data_remark
+0,3472737,inkscape,Inkscape vector image editor,master,inkscape/inkscape,https://gitlab.com/inkscape/inkscape,0,public,1472,2627,0,,2017-06-09T14:16:35.615+00:00,2022-05-05T08:26:20.527+00:00,"{""ProjectId"":3472737}",_raw_gitlab_api_project,2,
diff --git a/plugins/gitlab/e2e/tables/repos.csv b/plugins/gitlab/e2e/tables/repos.csv
index f392403d..0ec8467f 100644
--- a/plugins/gitlab/e2e/tables/repos.csv
+++ b/plugins/gitlab/e2e/tables/repos.csv
@@ -1,18 +1,2 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-"id","created_at","updated_at","_raw_data_params","_raw_data_table","_raw_data_id","_raw_data_remark","name","url","description","owner_id","language","forked_from","created_date","updated_date","deleted"
-gitlab:GitlabProject:3472737,"2022-05-05 09:56:43.438000000","2022-05-05 09:56:43.438000000","{""ProjectId"":3472737}",_raw_gitlab_api_project,2,"",inkscape,https://gitlab.com/inkscape/inkscape,Inkscape vector image editor,"","","","2017-06-09 14:16:35.615000000","2022-05-05 08:26:20.527000000",0
+id,_raw_data_params,_raw_data_table,_raw_data_id,_raw_data_remark,name,url,description,owner_id,language,forked_from,created_date,updated_date,deleted
+gitlab:GitlabProject:0:3472737,"{""ProjectId"":3472737}",_raw_gitlab_api_project,2,,inkscape,https://gitlab.com/inkscape/inkscape,Inkscape vector image editor,,,,2017-06-09T14:16:35.615+00:00,2022-05-05T08:26:20.527+00:00,0
diff --git a/plugins/gitlab/tasks/project_convertor.go b/plugins/gitlab/tasks/project_convertor.go
index f191ea3c..2b32bbf5 100644
--- a/plugins/gitlab/tasks/project_convertor.go
+++ b/plugins/gitlab/tasks/project_convertor.go
@@ -98,7 +98,7 @@ func convertProject(gitlabApiProject *GitlabApiProject) *models.GitlabProject {
 func convertToRepositoryModel(project *models.GitlabProject) *code.Repo {
 	domainRepository := &code.Repo{
 		DomainEntity: domainlayer.DomainEntity{
-			Id: didgen.NewDomainIdGenerator(project).Generate(project.GitlabId),
+			Id: didgen.NewDomainIdGenerator(project).Generate(project.ConnectionId, project.GitlabId),
 		},
 		Name:        project.Name,
 		Url:         project.WebUrl,


[incubator-devlake] 01/12: add TableName for domain layer

Posted by wa...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

warren pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/incubator-devlake.git

commit 33c44090584bd5b95c39b838f126f5b0e696965c
Author: linyh <ya...@meri.co>
AuthorDate: Tue Jun 14 22:14:28 2022 +0800

    add TableName for domain layer
---
 models/domainlayer/code/commit.go                    | 8 ++++++++
 models/domainlayer/code/commit_parent.go             | 4 ++++
 models/domainlayer/code/note.go                      | 4 ++++
 models/domainlayer/code/pull_request.go              | 4 ++++
 models/domainlayer/code/pull_request_comment.go      | 4 ++++
 models/domainlayer/code/pull_request_commit.go       | 4 ++++
 models/domainlayer/code/pull_request_labels.go       | 4 ++++
 models/domainlayer/code/ref.go                       | 4 ++++
 models/domainlayer/code/refs_commits_diff.go         | 4 ++++
 models/domainlayer/code/refs_pr_cherry_pick.go       | 4 ++++
 models/domainlayer/code/repo.go                      | 8 ++++++++
 models/domainlayer/code/repo_commit.go               | 4 ++++
 models/domainlayer/crossdomain/board_repo.go         | 4 ++++
 models/domainlayer/crossdomain/issue_commit.go       | 4 ++++
 models/domainlayer/crossdomain/issue_repo_commits.go | 4 ++++
 models/domainlayer/crossdomain/pull_request_issue.go | 4 ++++
 models/domainlayer/crossdomain/refs_issues_diff.go   | 4 ++++
 models/domainlayer/devops/build.go                   | 4 ++++
 models/domainlayer/devops/job.go                     | 4 ++++
 models/domainlayer/ticket/board.go                   | 8 ++++++++
 models/domainlayer/ticket/board_issue.go             | 4 ++++
 models/domainlayer/ticket/changelog.go               | 4 ++++
 models/domainlayer/ticket/issue.go                   | 4 ++++
 models/domainlayer/ticket/issue_comment.go           | 4 ++++
 models/domainlayer/ticket/issue_label.go             | 4 ++++
 models/domainlayer/ticket/sprint.go                  | 8 ++++++++
 models/domainlayer/ticket/worklog.go                 | 4 ++++
 27 files changed, 124 insertions(+)

diff --git a/models/domainlayer/code/commit.go b/models/domainlayer/code/commit.go
index 61c4d072..551e88aa 100644
--- a/models/domainlayer/code/commit.go
+++ b/models/domainlayer/code/commit.go
@@ -40,6 +40,10 @@ type Commit struct {
 	CommitterId    string `gorm:"index;type:varchar(255)"`
 }
 
+func (Commit) TableName() string {
+	return "commits"
+}
+
 type CommitFile struct {
 	common.NoPKModel
 	CommitSha string `gorm:"primaryKey;type:varchar(40)"`
@@ -47,3 +51,7 @@ type CommitFile struct {
 	Additions int
 	Deletions int
 }
+
+func (CommitFile) TableName() string {
+	return "commit_files"
+}
diff --git a/models/domainlayer/code/commit_parent.go b/models/domainlayer/code/commit_parent.go
index ff9f490f..610e2854 100644
--- a/models/domainlayer/code/commit_parent.go
+++ b/models/domainlayer/code/commit_parent.go
@@ -21,3 +21,7 @@ type CommitParent struct {
 	CommitSha       string `json:"commitSha" gorm:"primaryKey;type:varchar(40);comment:commit hash"`
 	ParentCommitSha string `json:"parentCommitSha" gorm:"primaryKey;type:varchar(40);comment:parent commit hash"`
 }
+
+func (CommitParent) TableName() string {
+	return "commit_parents"
+}
diff --git a/models/domainlayer/code/note.go b/models/domainlayer/code/note.go
index 2b4d0f71..b3c628cf 100644
--- a/models/domainlayer/code/note.go
+++ b/models/domainlayer/code/note.go
@@ -33,3 +33,7 @@ type Note struct {
 	IsSystem    bool `gorm:"comment:Is or is not auto-generated vs. human generated"`
 	CreatedDate time.Time
 }
+
+func (Note) TableName() string {
+	return "notes"
+}
diff --git a/models/domainlayer/code/pull_request.go b/models/domainlayer/code/pull_request.go
index 70a84bce..7196036b 100644
--- a/models/domainlayer/code/pull_request.go
+++ b/models/domainlayer/code/pull_request.go
@@ -46,3 +46,7 @@ type PullRequest struct {
 	BaseCommitSha  string `gorm:"type:varchar(40)"`
 	HeadCommitSha  string `gorm:"type:varchar(40)"`
 }
+
+func (PullRequest) TableName() string {
+	return "pull_requests"
+}
diff --git a/models/domainlayer/code/pull_request_comment.go b/models/domainlayer/code/pull_request_comment.go
index 55f9ee93..ba00ecb0 100644
--- a/models/domainlayer/code/pull_request_comment.go
+++ b/models/domainlayer/code/pull_request_comment.go
@@ -31,3 +31,7 @@ type PullRequestComment struct {
 	CommitSha     string `gorm:"type:varchar(255)"`
 	Position      int
 }
+
+func (PullRequestComment) TableName() string {
+	return "pull_request_comments"
+}
diff --git a/models/domainlayer/code/pull_request_commit.go b/models/domainlayer/code/pull_request_commit.go
index 02f47905..a2b2c939 100644
--- a/models/domainlayer/code/pull_request_commit.go
+++ b/models/domainlayer/code/pull_request_commit.go
@@ -24,3 +24,7 @@ type PullRequestCommit struct {
 	PullRequestId string `json:"id" gorm:"primaryKey;type:varchar(255);comment:This key is generated based on details from the original plugin"` // format: <Plugin>:<Entity>:<PK0>:<PK1>
 	common.NoPKModel
 }
+
+func (PullRequestCommit) TableName() string {
+	return "pull_request_commits"
+}
diff --git a/models/domainlayer/code/pull_request_labels.go b/models/domainlayer/code/pull_request_labels.go
index 118b1abd..3a4910e8 100644
--- a/models/domainlayer/code/pull_request_labels.go
+++ b/models/domainlayer/code/pull_request_labels.go
@@ -29,3 +29,7 @@ type PullRequestLabel struct {
 	LabelName     string `gorm:"primaryKey;type:varchar(255)"`
 	common.NoPKModel
 }
+
+func (PullRequestLabel) TableName() string {
+	return "pull_request_labels"
+}
diff --git a/models/domainlayer/code/ref.go b/models/domainlayer/code/ref.go
index 7567d5fc..1de44882 100644
--- a/models/domainlayer/code/ref.go
+++ b/models/domainlayer/code/ref.go
@@ -32,3 +32,7 @@ type Ref struct {
 	RefType     string `gorm:"type:varchar(255)"`
 	CreatedDate *time.Time
 }
+
+func (Ref) TableName() string {
+	return "refs"
+}
diff --git a/models/domainlayer/code/refs_commits_diff.go b/models/domainlayer/code/refs_commits_diff.go
index 009b53b0..178c507d 100644
--- a/models/domainlayer/code/refs_commits_diff.go
+++ b/models/domainlayer/code/refs_commits_diff.go
@@ -25,3 +25,7 @@ type RefsCommitsDiff struct {
 	OldRefCommitSha string `gorm:"type:varchar(40)"`
 	SortingIndex    int
 }
+
+func (RefsCommitsDiff) TableName() string {
+	return "refs_commits_diffs"
+}
diff --git a/models/domainlayer/code/refs_pr_cherry_pick.go b/models/domainlayer/code/refs_pr_cherry_pick.go
index dab32d76..1a3166b3 100644
--- a/models/domainlayer/code/refs_pr_cherry_pick.go
+++ b/models/domainlayer/code/refs_pr_cherry_pick.go
@@ -29,3 +29,7 @@ type RefsPrCherrypick struct {
 	ParentPrId             string `json:"parent_pr_id" gorm:"primaryKey;type:varchar(255);comment:This key is generated based on details from the original plugin"` // format: <Plugin>:<Entity>:<PK0>:<PK1>
 	common.NoPKModel
 }
+
+func (RefsPrCherrypick) TableName() string {
+	return "refs_pr_cherrypicks"
+}
diff --git a/models/domainlayer/code/repo.go b/models/domainlayer/code/repo.go
index 077b126a..106259b0 100644
--- a/models/domainlayer/code/repo.go
+++ b/models/domainlayer/code/repo.go
@@ -36,8 +36,16 @@ type Repo struct {
 	Deleted     bool       `json:"deleted"`
 }
 
+func (Repo) TableName() string {
+	return "repos"
+}
+
 type RepoLanguage struct {
 	RepoId   string `json:"repoId" gorm:"index;type:varchar(255)"`
 	Language string `json:"language" gorm:"type:varchar(255)"`
 	Bytes    int
 }
+
+func (RepoLanguage) TableName() string {
+	return "repo_languages"
+}
diff --git a/models/domainlayer/code/repo_commit.go b/models/domainlayer/code/repo_commit.go
index 50ae4a3a..744d9961 100644
--- a/models/domainlayer/code/repo_commit.go
+++ b/models/domainlayer/code/repo_commit.go
@@ -24,3 +24,7 @@ type RepoCommit struct {
 	CommitSha string `json:"commitSha" gorm:"primaryKey;type:varchar(40)"`
 	common.NoPKModel
 }
+
+func (RepoCommit) TableName() string {
+	return "repo_commits"
+}
diff --git a/models/domainlayer/crossdomain/board_repo.go b/models/domainlayer/crossdomain/board_repo.go
index 17008775..003ca7ee 100644
--- a/models/domainlayer/crossdomain/board_repo.go
+++ b/models/domainlayer/crossdomain/board_repo.go
@@ -21,3 +21,7 @@ type BoardRepo struct {
 	BoardId string `gorm:"primaryKey;type:varchar(255)"`
 	RepoId  string `gorm:"primaryKey;type:varchar(255)"`
 }
+
+func (BoardRepo) TableName() string {
+	return "board_repos"
+}
diff --git a/models/domainlayer/crossdomain/issue_commit.go b/models/domainlayer/crossdomain/issue_commit.go
index d2559e02..ff70e6f1 100644
--- a/models/domainlayer/crossdomain/issue_commit.go
+++ b/models/domainlayer/crossdomain/issue_commit.go
@@ -24,3 +24,7 @@ type IssueCommit struct {
 	IssueId   string `gorm:"primaryKey;type:varchar(255)"`
 	CommitSha string `gorm:"primaryKey;type:varchar(255)"`
 }
+
+func (IssueCommit) TableName() string {
+	return "issue_commits"
+}
diff --git a/models/domainlayer/crossdomain/issue_repo_commits.go b/models/domainlayer/crossdomain/issue_repo_commits.go
index cb12044a..c6868338 100644
--- a/models/domainlayer/crossdomain/issue_repo_commits.go
+++ b/models/domainlayer/crossdomain/issue_repo_commits.go
@@ -25,3 +25,7 @@ type IssueRepoCommit struct {
 	RepoUrl   string `gorm:"primaryKey;type:varchar(255)"`
 	CommitSha string `gorm:"primaryKey;type:varchar(255)"`
 }
+
+func (IssueRepoCommit) TableName() string {
+	return "issue_repo_commits"
+}
diff --git a/models/domainlayer/crossdomain/pull_request_issue.go b/models/domainlayer/crossdomain/pull_request_issue.go
index 97d6623a..35569e76 100644
--- a/models/domainlayer/crossdomain/pull_request_issue.go
+++ b/models/domainlayer/crossdomain/pull_request_issue.go
@@ -26,3 +26,7 @@ type PullRequestIssue struct {
 	IssueNumber       int
 	common.NoPKModel
 }
+
+func (PullRequestIssue) TableName() string {
+	return "pull_request_issues"
+}
diff --git a/models/domainlayer/crossdomain/refs_issues_diff.go b/models/domainlayer/crossdomain/refs_issues_diff.go
index 3dd4e652..a0f7c9bf 100644
--- a/models/domainlayer/crossdomain/refs_issues_diff.go
+++ b/models/domainlayer/crossdomain/refs_issues_diff.go
@@ -28,3 +28,7 @@ type RefsIssuesDiffs struct {
 	IssueId         string `gorm:"primaryKey;type:varchar(255)"`
 	common.NoPKModel
 }
+
+func (RefsIssuesDiffs) TableName() string {
+	return "refs_issues_diffs"
+}
diff --git a/models/domainlayer/devops/build.go b/models/domainlayer/devops/build.go
index 8d289ad8..76886f95 100644
--- a/models/domainlayer/devops/build.go
+++ b/models/domainlayer/devops/build.go
@@ -31,3 +31,7 @@ type Build struct {
 	Status      string `gorm:"type:varchar(100)"`
 	StartedDate time.Time
 }
+
+func (Build) TableName() string {
+	return "builds"
+}
diff --git a/models/domainlayer/devops/job.go b/models/domainlayer/devops/job.go
index 255ae606..9a008c66 100644
--- a/models/domainlayer/devops/job.go
+++ b/models/domainlayer/devops/job.go
@@ -25,3 +25,7 @@ type Job struct {
 	Name string `gorm:"type:varchar(255)"`
 	domainlayer.DomainEntity
 }
+
+func (Job) TableName() string {
+	return "jobs"
+}
diff --git a/models/domainlayer/ticket/board.go b/models/domainlayer/ticket/board.go
index e1b9c1d2..bd2ba21b 100644
--- a/models/domainlayer/ticket/board.go
+++ b/models/domainlayer/ticket/board.go
@@ -32,8 +32,16 @@ type Board struct {
 	CreatedDate *time.Time
 }
 
+func (Board) TableName() string {
+	return "boards"
+}
+
 type BoardSprint struct {
 	common.NoPKModel
 	BoardId  string `gorm:"primaryKey;type:varchar(255)"`
 	SprintId string `gorm:"primaryKey;type:varchar(255)"`
 }
+
+func (BoardSprint) TableName() string {
+	return "board_sprints"
+}
diff --git a/models/domainlayer/ticket/board_issue.go b/models/domainlayer/ticket/board_issue.go
index 4772cfc8..0d30422a 100644
--- a/models/domainlayer/ticket/board_issue.go
+++ b/models/domainlayer/ticket/board_issue.go
@@ -24,3 +24,7 @@ type BoardIssue struct {
 	IssueId string `gorm:"primaryKey;type:varchar(255)"`
 	common.NoPKModel
 }
+
+func (BoardIssue) TableName() string {
+	return "board_issues"
+}
diff --git a/models/domainlayer/ticket/changelog.go b/models/domainlayer/ticket/changelog.go
index 70499ac3..f16eba1d 100644
--- a/models/domainlayer/ticket/changelog.go
+++ b/models/domainlayer/ticket/changelog.go
@@ -38,3 +38,7 @@ type Changelog struct {
 	ToValue           string
 	CreatedDate       time.Time
 }
+
+func (Changelog) TableName() string {
+	return "changelogs"
+}
diff --git a/models/domainlayer/ticket/issue.go b/models/domainlayer/ticket/issue.go
index 589f61d1..fcea3c29 100644
--- a/models/domainlayer/ticket/issue.go
+++ b/models/domainlayer/ticket/issue.go
@@ -52,6 +52,10 @@ type Issue struct {
 	Component               string `gorm:"type:varchar(255)"`
 }
 
+func (Issue) TableName() string {
+	return "issues"
+}
+
 const (
 	BUG         = "BUG"
 	REQUIREMENT = "REQUIREMENT"
diff --git a/models/domainlayer/ticket/issue_comment.go b/models/domainlayer/ticket/issue_comment.go
index 8150e42a..f8c8888e 100644
--- a/models/domainlayer/ticket/issue_comment.go
+++ b/models/domainlayer/ticket/issue_comment.go
@@ -29,3 +29,7 @@ type IssueComment struct {
 	UserId      string `gorm:"type:varchar(255)"`
 	CreatedDate time.Time
 }
+
+func (IssueComment) TableName() string {
+	return "issue_comments"
+}
diff --git a/models/domainlayer/ticket/issue_label.go b/models/domainlayer/ticket/issue_label.go
index 79be6363..0e661ce0 100644
--- a/models/domainlayer/ticket/issue_label.go
+++ b/models/domainlayer/ticket/issue_label.go
@@ -27,3 +27,7 @@ type IssueLabel struct {
 	LabelName string `gorm:"primaryKey;type:varchar(255)"`
 	common.NoPKModel
 }
+
+func (IssueLabel) TableName() string {
+	return "issue_labels"
+}
diff --git a/models/domainlayer/ticket/sprint.go b/models/domainlayer/ticket/sprint.go
index bd89d6e7..f9355fcf 100644
--- a/models/domainlayer/ticket/sprint.go
+++ b/models/domainlayer/ticket/sprint.go
@@ -41,8 +41,16 @@ type Sprint struct {
 	OriginalBoardID string `gorm:"type:varchar(255)"`
 }
 
+func (Sprint) TableName() string {
+	return "sprints"
+}
+
 type SprintIssue struct {
 	common.NoPKModel
 	SprintId string `gorm:"primaryKey;type:varchar(255)"`
 	IssueId  string `gorm:"primaryKey;type:varchar(255)"`
 }
+
+func (SprintIssue) TableName() string {
+	return "sprint_issues"
+}
diff --git a/models/domainlayer/ticket/worklog.go b/models/domainlayer/ticket/worklog.go
index e0d4da9c..30c82f7d 100644
--- a/models/domainlayer/ticket/worklog.go
+++ b/models/domainlayer/ticket/worklog.go
@@ -32,3 +32,7 @@ type IssueWorklog struct {
 	StartedDate      *time.Time
 	IssueId          string `gorm:"index;type:varchar(255)"`
 }
+
+func (IssueWorklog) TableName() string {
+	return "issue_worklogs"
+}


[incubator-devlake] 05/12: change name for flush name

Posted by wa...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

warren pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/incubator-devlake.git

commit f2255fd49028b93984f44411e9080b90bd7f0021
Author: linyh <ya...@meri.co>
AuthorDate: Wed Jun 15 12:20:39 2022 +0800

    change name for flush name
---
 helpers/e2ehelper/data_flow_tester.go | 38 +++++++++++++++--------------------
 plugins/gitlab/e2e/project_test.go    |  7 +++----
 2 files changed, 19 insertions(+), 26 deletions(-)

diff --git a/helpers/e2ehelper/data_flow_tester.go b/helpers/e2ehelper/data_flow_tester.go
index ab85a7ad..ccd32fb9 100644
--- a/helpers/e2ehelper/data_flow_tester.go
+++ b/helpers/e2ehelper/data_flow_tester.go
@@ -92,16 +92,11 @@ func NewDataFlowTester(t *testing.T, pluginName string, pluginMeta core.PluginMe
 	}
 }
 
-// ImportCsv imports records from specified csv file into target table, note that existing data would be deleted first.
-func (t *DataFlowTester) ImportCsv(csvRelPath string, tableName string) {
+// ImportCsvIntoRawTable imports records from specified csv file into target table, note that existing data would be deleted first.
+func (t *DataFlowTester) ImportCsvIntoRawTable(csvRelPath string, tableName string) {
 	csvIter := pluginhelper.NewCsvFileIterator(csvRelPath)
 	defer csvIter.Close()
-	// create table if not exists
-	err := t.Db.Table(tableName).AutoMigrate(&helper.RawData{})
-	if err != nil {
-		panic(err)
-	}
-	t.MigrateRawTableAndFlush(tableName)
+	t.FlushRawTable(tableName)
 	// load rows and insert into target table
 	for csvIter.HasNext() {
 		toInsertValues := csvIter.Fetch()
@@ -117,36 +112,35 @@ func (t *DataFlowTester) ImportCsv(csvRelPath string, tableName string) {
 	}
 }
 
-// MigrateTableAndFlush migrate table and deletes all records from specified table
-func (t *DataFlowTester) MigrateRawTableAndFlush(rawRableName string) {
+// MigrateRawTableAndFlush migrate table and deletes all records from specified table
+func (t *DataFlowTester) FlushRawTable(rawTableName string) {
 	// flush target table
-	err := t.Db.Table(rawRableName).AutoMigrate(&helper.RawData{})
+	err := t.Db.Migrator().DropTable(rawTableName)
+	if err != nil {
+		panic(err)
+	}
+	err = t.Db.Table(rawTableName).AutoMigrate(&helper.RawData{})
 	if err != nil {
 		panic(err)
 	}
-	err = t.Db.Exec(fmt.Sprintf("DELETE FROM %s", rawRableName)).Error
+	err = t.Db.Exec(fmt.Sprintf("DELETE FROM %s", rawTableName)).Error
 	if err != nil {
 		panic(err)
 	}
 }
 
-// MigrateTableAndFlush migrate table and deletes all records from specified table
-func (t *DataFlowTester) MigrateTableAndFlush(dst schema.Tabler) {
+// FlushTabler migrate table and deletes all records from specified table
+func (t *DataFlowTester) FlushTabler(dst schema.Tabler) {
 	// flush target table
-	err := t.Db.AutoMigrate(dst)
+	err := t.Db.Migrator().DropTable(dst)
 	if err != nil {
 		panic(err)
 	}
-	err = t.Db.Delete(dst, `true`).Error
+	err = t.Db.AutoMigrate(dst)
 	if err != nil {
 		panic(err)
 	}
-}
-
-// FlushTable deletes all records from specified table
-func (t *DataFlowTester) FlushTable(tableName string) {
-	// flush target table
-	err := t.Db.Exec(fmt.Sprintf("DELETE FROM %s", tableName)).Error
+	err = t.Db.Delete(dst, `true`).Error
 	if err != nil {
 		panic(err)
 	}
diff --git a/plugins/gitlab/e2e/project_test.go b/plugins/gitlab/e2e/project_test.go
index 7e12c03e..e25461be 100644
--- a/plugins/gitlab/e2e/project_test.go
+++ b/plugins/gitlab/e2e/project_test.go
@@ -40,11 +40,10 @@ func TestGitlabDataFlow(t *testing.T) {
 	}
 
 	// import raw data table
-	dataflowTester.MigrateRawTableAndFlush("_raw_gitlab_api_project")
-	dataflowTester.ImportCsv("./tables/_raw_gitlab_api_projects.csv", "_raw_gitlab_api_project")
+	dataflowTester.ImportCsvIntoRawTable("./tables/_raw_gitlab_api_projects.csv", "_raw_gitlab_api_project")
 
 	// verify extraction
-	dataflowTester.MigrateTableAndFlush(&models.GitlabProject{})
+	dataflowTester.FlushTabler(&models.GitlabProject{})
 	dataflowTester.Subtask(tasks.ExtractProjectMeta, taskData)
 	dataflowTester.CreateSnapshotOrVerify(
 		models.GitlabProject{},
@@ -72,7 +71,7 @@ func TestGitlabDataFlow(t *testing.T) {
 	)
 
 	// verify conversion
-	dataflowTester.MigrateTableAndFlush(&code.Repo{})
+	dataflowTester.FlushTabler(&code.Repo{})
 	dataflowTester.Subtask(tasks.ConvertProjectMeta, taskData)
 	dataflowTester.CreateSnapshotOrVerify(
 		code.Repo{},


[incubator-devlake] 04/12: fix some bug from the difference in pg & mysql

Posted by wa...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

warren pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/incubator-devlake.git

commit bf18029d5d2a4d966a2d2e07022a8bf4e7685400
Author: linyh <ya...@meri.co>
AuthorDate: Wed Jun 15 00:38:55 2022 +0800

    fix some bug from the difference in pg & mysql
---
 helpers/e2ehelper/data_flow_tester.go | 27 +++++++++++++++++++++++----
 1 file changed, 23 insertions(+), 4 deletions(-)

diff --git a/helpers/e2ehelper/data_flow_tester.go b/helpers/e2ehelper/data_flow_tester.go
index e5f1cbf0..ab85a7ad 100644
--- a/helpers/e2ehelper/data_flow_tester.go
+++ b/helpers/e2ehelper/data_flow_tester.go
@@ -104,8 +104,12 @@ func (t *DataFlowTester) ImportCsv(csvRelPath string, tableName string) {
 	t.MigrateRawTableAndFlush(tableName)
 	// load rows and insert into target table
 	for csvIter.HasNext() {
-		// make sure
-		result := t.Db.Table(tableName).Create(csvIter.Fetch())
+		toInsertValues := csvIter.Fetch()
+		// FIXME Hack code
+		if t.Db.Dialector.Name() == `postgres` {
+			toInsertValues[`data`] = strings.Replace(toInsertValues[`data`].(string), `\`, `\\`, -1)
+		}
+		result := t.Db.Table(tableName).Create(toInsertValues)
 		if result.Error != nil {
 			panic(result.Error)
 		}
@@ -173,6 +177,7 @@ func (t *DataFlowTester) CreateSnapshotOrVerify(dst schema.Tabler, csvRelPath st
 	dbCursor, err := t.Dal.Cursor(
 		dal.Select(strings.Join(allFields, `,`)),
 		dal.From(dst.TableName()),
+		dal.Orderby(strings.Join(pkfields, `,`)),
 	)
 	if err != nil {
 		panic(err)
@@ -191,6 +196,8 @@ func (t *DataFlowTester) CreateSnapshotOrVerify(dst schema.Tabler, csvRelPath st
 	for i, columnType := range columnTypes {
 		if columnType.ScanType().Name() == `Time` || columnType.ScanType().Name() == `NullTime` {
 			forScanValues[i] = new(sql.NullTime)
+		} else if columnType.ScanType().Name() == `bool` {
+			forScanValues[i] = new(bool)
 		} else {
 			forScanValues[i] = new(string)
 		}
@@ -211,7 +218,13 @@ func (t *DataFlowTester) CreateSnapshotOrVerify(dst schema.Tabler, csvRelPath st
 				} else {
 					values[i] = ``
 				}
-			default:
+			case *bool:
+				if *forScanValues[i].(*bool) {
+					values[i] = `1`
+				} else {
+					values[i] = `0`
+				}
+			case *string:
 				values[i] = fmt.Sprint(*forScanValues[i].(*string))
 			}
 		}
@@ -250,6 +263,12 @@ func (t *DataFlowTester) VerifyTable(dst schema.Tabler, csvRelPath string, pkfie
 				if actual[field] != nil {
 					actualValue = actual[field].(time.Time).In(location).Format("2006-01-02T15:04:05.000-07:00")
 				}
+			case bool:
+				if actual[field].(bool) {
+					actualValue = `1`
+				} else {
+					actualValue = `0`
+				}
 			default:
 				if actual[field] != nil {
 					actualValue = fmt.Sprint(actual[field])
@@ -265,5 +284,5 @@ func (t *DataFlowTester) VerifyTable(dst schema.Tabler, csvRelPath string, pkfie
 	if err != nil {
 		panic(err)
 	}
-	assert.Equal(t.T, expectedTotal, actualTotal)
+	assert.Equal(t.T, expectedTotal, actualTotal, fmt.Sprintf(`%s count not match`, dst.TableName()))
 }


[incubator-devlake] 07/12: change a name

Posted by wa...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

warren pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/incubator-devlake.git

commit fe6d26ab8f0df252e395a81fda6091a475fb77ca
Author: linyh <ya...@meri.co>
AuthorDate: Wed Jun 15 12:27:55 2022 +0800

    change a name
---
 helpers/e2ehelper/data_flow_tester.go | 20 +++++++++-----------
 plugins/gitlab/e2e/project_test.go    |  4 ++--
 2 files changed, 11 insertions(+), 13 deletions(-)

diff --git a/helpers/e2ehelper/data_flow_tester.go b/helpers/e2ehelper/data_flow_tester.go
index d8fc8585..1c49bbcb 100644
--- a/helpers/e2ehelper/data_flow_tester.go
+++ b/helpers/e2ehelper/data_flow_tester.go
@@ -147,16 +147,8 @@ func (t *DataFlowTester) Subtask(subtaskMeta core.SubTaskMeta, taskData interfac
 	}
 }
 
-// VerifyTable reads rows from csv file and compare with records from database one by one. You must specified the
-// Primary Key Fields with `pkfields` so DataFlowTester could select the exact record from database, as well as which
-// fields to compare with by specifying `targetfields` parameter.
-func (t *DataFlowTester) CreateSnapshotOrVerify(dst schema.Tabler, csvRelPath string, pkfields []string, targetfields []string) {
-	_, err := os.Stat(csvRelPath)
-	if err == nil {
-		t.VerifyTable(dst, csvRelPath, pkfields, targetfields)
-		return
-	}
-
+// CreateSnapshot reads rows from database and write them into .csv file.
+func (t *DataFlowTester) CreateSnapshot(dst schema.Tabler, csvRelPath string, pkfields []string, targetfields []string) {
 	location, _ := time.LoadLocation(`UTC`)
 	allFields := []string{}
 	allFields = append(pkfields, targetfields...)
@@ -222,6 +214,12 @@ func (t *DataFlowTester) CreateSnapshotOrVerify(dst schema.Tabler, csvRelPath st
 // Primary Key Fields with `pkfields` so DataFlowTester could select the exact record from database, as well as which
 // fields to compare with by specifying `targetfields` parameter.
 func (t *DataFlowTester) VerifyTable(dst schema.Tabler, csvRelPath string, pkfields []string, targetfields []string) {
+	_, err := os.Stat(csvRelPath)
+	if os.IsNotExist(err) {
+		t.CreateSnapshot(dst, csvRelPath, pkfields, targetfields)
+		return
+	}
+
 	csvIter := pluginhelper.NewCsvFileIterator(csvRelPath)
 	location, _ := time.LoadLocation(`UTC`)
 	defer csvIter.Close()
@@ -266,7 +264,7 @@ func (t *DataFlowTester) VerifyTable(dst schema.Tabler, csvRelPath string, pkfie
 	}
 
 	var actualTotal int64
-	err := t.Db.Table(dst.TableName()).Count(&actualTotal).Error
+	err = t.Db.Table(dst.TableName()).Count(&actualTotal).Error
 	if err != nil {
 		panic(err)
 	}
diff --git a/plugins/gitlab/e2e/project_test.go b/plugins/gitlab/e2e/project_test.go
index e25461be..c7342ef1 100644
--- a/plugins/gitlab/e2e/project_test.go
+++ b/plugins/gitlab/e2e/project_test.go
@@ -45,7 +45,7 @@ func TestGitlabDataFlow(t *testing.T) {
 	// verify extraction
 	dataflowTester.FlushTabler(&models.GitlabProject{})
 	dataflowTester.Subtask(tasks.ExtractProjectMeta, taskData)
-	dataflowTester.CreateSnapshotOrVerify(
+	dataflowTester.VerifyTable(
 		models.GitlabProject{},
 		"tables/_tool_gitlab_projects.csv",
 		[]string{"connection_id", "gitlab_id"},
@@ -73,7 +73,7 @@ func TestGitlabDataFlow(t *testing.T) {
 	// verify conversion
 	dataflowTester.FlushTabler(&code.Repo{})
 	dataflowTester.Subtask(tasks.ConvertProjectMeta, taskData)
-	dataflowTester.CreateSnapshotOrVerify(
+	dataflowTester.VerifyTable(
 		code.Repo{},
 		"tables/repos.csv",
 		[]string{"id"},


[incubator-devlake] 06/12: append

Posted by wa...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

warren pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/incubator-devlake.git

commit a0e4a352a1a277eaddb34a62b53ddaea4f3da96c
Author: linyh <ya...@meri.co>
AuthorDate: Wed Jun 15 12:25:04 2022 +0800

    append
---
 helpers/e2ehelper/data_flow_tester.go | 8 --------
 1 file changed, 8 deletions(-)

diff --git a/helpers/e2ehelper/data_flow_tester.go b/helpers/e2ehelper/data_flow_tester.go
index ccd32fb9..d8fc8585 100644
--- a/helpers/e2ehelper/data_flow_tester.go
+++ b/helpers/e2ehelper/data_flow_tester.go
@@ -123,10 +123,6 @@ func (t *DataFlowTester) FlushRawTable(rawTableName string) {
 	if err != nil {
 		panic(err)
 	}
-	err = t.Db.Exec(fmt.Sprintf("DELETE FROM %s", rawTableName)).Error
-	if err != nil {
-		panic(err)
-	}
 }
 
 // FlushTabler migrate table and deletes all records from specified table
@@ -140,10 +136,6 @@ func (t *DataFlowTester) FlushTabler(dst schema.Tabler) {
 	if err != nil {
 		panic(err)
 	}
-	err = t.Db.Delete(dst, `true`).Error
-	if err != nil {
-		panic(err)
-	}
 }
 
 // Subtask executes specified subtasks


[incubator-devlake] 12/12: replace hack code

Posted by wa...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

warren pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/incubator-devlake.git

commit 0c273670f7fdff2b9995b84cc0f4b9343642bbc5
Author: linyh <ya...@meri.co>
AuthorDate: Wed Jun 15 15:42:41 2022 +0800

    replace hack code
---
 helpers/e2ehelper/data_flow_tester.go | 6 ++----
 1 file changed, 2 insertions(+), 4 deletions(-)

diff --git a/helpers/e2ehelper/data_flow_tester.go b/helpers/e2ehelper/data_flow_tester.go
index 6fb97580..4f30f681 100644
--- a/helpers/e2ehelper/data_flow_tester.go
+++ b/helpers/e2ehelper/data_flow_tester.go
@@ -20,6 +20,7 @@ package e2ehelper
 import (
 	"context"
 	"database/sql"
+	"encoding/json"
 	"fmt"
 	"github.com/apache/incubator-devlake/config"
 	"github.com/apache/incubator-devlake/helpers/pluginhelper"
@@ -105,10 +106,7 @@ func (t *DataFlowTester) ImportCsvIntoRawTable(csvRelPath string, tableName stri
 	// load rows and insert into target table
 	for csvIter.HasNext() {
 		toInsertValues := csvIter.Fetch()
-		// FIXME Hack code
-		if t.Db.Dialector.Name() == `postgres` {
-			toInsertValues[`data`] = strings.Replace(toInsertValues[`data`].(string), `\`, `\\`, -1)
-		}
+		toInsertValues[`data`] = json.RawMessage(toInsertValues[`data`].(string))
 		result := t.Db.Table(tableName).Create(toInsertValues)
 		if result.Error != nil {
 			panic(result.Error)


[incubator-devlake] 02/12: add some helper for e2e test

Posted by wa...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

warren pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/incubator-devlake.git

commit ccb526f6bb3dd8966c3cbe5f63ab0ab38411f42e
Author: linyh <ya...@meri.co>
AuthorDate: Tue Jun 14 22:27:37 2022 +0800

    add some helper for e2e test
---
 .github/workflows/test-e2e.yml                     |   1 +
 e2e/database.go                                    |  22 +++-
 helpers/e2ehelper/data_flow_tester.go              | 124 +++++++++++++++++++--
 ...{csv_file_iterator_test.go => csv_file_test.go} |  22 +++-
 helpers/pluginhelper/csv_file_writer.go            |  82 ++++++++++++++
 plugins/helper/default_task_context.go             |   5 +-
 6 files changed, 237 insertions(+), 19 deletions(-)

diff --git a/.github/workflows/test-e2e.yml b/.github/workflows/test-e2e.yml
index 1a083811..1e42cff0 100644
--- a/.github/workflows/test-e2e.yml
+++ b/.github/workflows/test-e2e.yml
@@ -43,3 +43,4 @@ jobs:
         run: |
           cp .env.example .env
           make e2e-test
+          make e2e-plugins
diff --git a/e2e/database.go b/e2e/database.go
index 42aecb29..c69db039 100644
--- a/e2e/database.go
+++ b/e2e/database.go
@@ -22,8 +22,10 @@ import (
 	"fmt"
 	"log"
 	"net/url"
+	"strings"
 
 	mysqlGorm "gorm.io/driver/mysql"
+	postgresGorm "gorm.io/driver/postgres"
 	"gorm.io/gorm"
 )
 
@@ -34,10 +36,17 @@ func InitializeDb() (*sql.DB, error) {
 	if err != nil {
 		return nil, err
 	}
-	if u.Scheme == "mysql" {
+
+	var db *sql.DB
+	switch strings.ToLower(u.Scheme) {
+	case "mysql":
 		dbUrl = fmt.Sprintf(("%s@tcp(%s)%s?%s"), u.User.String(), u.Host, u.Path, u.RawQuery)
+		db, err = sql.Open(u.Scheme, dbUrl)
+	case "postgresql", "postgres", "pg":
+		db, err = sql.Open(`pgx`, dbUrl)
+	default:
+		return nil, fmt.Errorf("invalid DB_URL:%s", dbUrl)
 	}
-	db, err := sql.Open("mysql", dbUrl)
 	if err != nil {
 		return nil, err
 	}
@@ -58,3 +67,12 @@ func InitializeGormDb() (*gorm.DB, error) {
 	}
 	return db, nil
 }
+
+func InitializeGormDb2() (*gorm.DB, error) {
+	connectionString := "merico:merico@tcp(localhost:3306)/lake"
+	db, err := gorm.Open(postgresGorm.Open(connectionString))
+	if err != nil {
+		return nil, err
+	}
+	return db, nil
+}
diff --git a/helpers/e2ehelper/data_flow_tester.go b/helpers/e2ehelper/data_flow_tester.go
index df9950b4..20625013 100644
--- a/helpers/e2ehelper/data_flow_tester.go
+++ b/helpers/e2ehelper/data_flow_tester.go
@@ -19,19 +19,24 @@ package e2ehelper
 
 import (
 	"context"
+	"database/sql"
 	"fmt"
-	"testing"
-	"time"
-
 	"github.com/apache/incubator-devlake/config"
 	"github.com/apache/incubator-devlake/helpers/pluginhelper"
+	"github.com/apache/incubator-devlake/impl/dalgorm"
 	"github.com/apache/incubator-devlake/logger"
 	"github.com/apache/incubator-devlake/plugins/core"
+	"github.com/apache/incubator-devlake/plugins/core/dal"
 	"github.com/apache/incubator-devlake/plugins/helper"
 	"github.com/apache/incubator-devlake/runner"
 	"github.com/spf13/viper"
 	"github.com/stretchr/testify/assert"
 	"gorm.io/gorm"
+	"gorm.io/gorm/schema"
+	"os"
+	"strings"
+	"testing"
+	"time"
 )
 
 // DataFlowTester provides a universal data integrity validation facility to help `Plugin` verifying records between
@@ -58,6 +63,7 @@ import (
 type DataFlowTester struct {
 	Cfg    *viper.Viper
 	Db     *gorm.DB
+	Dal    dal.Dal
 	T      *testing.T
 	Name   string
 	Plugin core.PluginMeta
@@ -78,6 +84,7 @@ func NewDataFlowTester(t *testing.T, pluginName string, pluginMeta core.PluginMe
 	return &DataFlowTester{
 		Cfg:    cfg,
 		Db:     db,
+		Dal:    dalgorm.NewDalgorm(db),
 		T:      t,
 		Name:   pluginName,
 		Plugin: pluginMeta,
@@ -94,7 +101,7 @@ func (t *DataFlowTester) ImportCsv(csvRelPath string, tableName string) {
 	if err != nil {
 		panic(err)
 	}
-	t.FlushTable(tableName)
+	t.MigrateRawTableAndFlush(tableName)
 	// load rows and insert into target table
 	for csvIter.HasNext() {
 		// make sure
@@ -106,6 +113,32 @@ func (t *DataFlowTester) ImportCsv(csvRelPath string, tableName string) {
 	}
 }
 
+// MigrateTableAndFlush migrate table and deletes all records from specified table
+func (t *DataFlowTester) MigrateRawTableAndFlush(rawRableName string) {
+	// flush target table
+	err := t.Db.Table(rawRableName).AutoMigrate(&helper.RawData{})
+	if err != nil {
+		panic(err)
+	}
+	err = t.Db.Exec(fmt.Sprintf("DELETE FROM %s", rawRableName)).Error
+	if err != nil {
+		panic(err)
+	}
+}
+
+// MigrateTableAndFlush migrate table and deletes all records from specified table
+func (t *DataFlowTester) MigrateTableAndFlush(dst schema.Tabler) {
+	// flush target table
+	err := t.Db.AutoMigrate(dst)
+	if err != nil {
+		panic(err)
+	}
+	err = t.Db.Delete(dst, `true`).Error
+	if err != nil {
+		panic(err)
+	}
+}
+
 // FlushTable deletes all records from specified table
 func (t *DataFlowTester) FlushTable(tableName string) {
 	// flush target table
@@ -127,8 +160,71 @@ func (t *DataFlowTester) Subtask(subtaskMeta core.SubTaskMeta, taskData interfac
 // VerifyTable reads rows from csv file and compare with records from database one by one. You must specified the
 // Primary Key Fields with `pkfields` so DataFlowTester could select the exact record from database, as well as which
 // fields to compare with by specifying `targetfields` parameter.
-func (t *DataFlowTester) VerifyTable(tableName string, csvRelPath string, pkfields []string, targetfields []string) {
+func (t *DataFlowTester) CreateSnapshotOrVerify(dst schema.Tabler, csvRelPath string, pkfields []string, targetfields []string) {
+	_, err := os.Stat(csvRelPath)
+	if err == nil {
+		t.VerifyTable(dst, csvRelPath, pkfields, targetfields)
+		return
+	}
+
+	location, _ := time.LoadLocation(`UTC`)
+	allFields := []string{}
+	allFields = append(pkfields, targetfields...)
+	dbCursor, err := t.Dal.Cursor(
+		dal.Select(strings.Join(allFields, `,`)),
+		dal.From(dst.TableName()),
+	)
+	if err != nil {
+		panic(err)
+	}
+
+	columns, err := dbCursor.Columns()
+	if err != nil {
+		panic(err)
+	}
+	csvWriter := pluginhelper.NewCsvFileWriter(csvRelPath, columns)
+	defer csvWriter.Close()
+
+	// define how to scan value
+	columnTypes, _ := dbCursor.ColumnTypes()
+	forScanValues := make([]interface{}, len(allFields))
+	for i, columnType := range columnTypes {
+		if columnType.ScanType().Name() == `Time` || columnType.ScanType().Name() == `NullTime` {
+			forScanValues[i] = new(sql.NullTime)
+		} else {
+			forScanValues[i] = new(string)
+		}
+	}
+
+	for dbCursor.Next() {
+		err = dbCursor.Scan(forScanValues...)
+		if err != nil {
+			panic(err)
+		}
+		values := make([]string, len(allFields))
+		for i := range forScanValues {
+			switch forScanValues[i].(type) {
+			case *sql.NullTime:
+				value := forScanValues[i].(*sql.NullTime)
+				if value.Valid {
+					values[i] = value.Time.In(location).Format("2006-01-02T15:04:05.000-07:00")
+				} else {
+					values[i] = ``
+				}
+			default:
+				values[i] = fmt.Sprint(*forScanValues[i].(*string))
+			}
+		}
+		csvWriter.Write(values)
+	}
+}
+
+// VerifyTable reads rows from csv file and compare with records from database one by one. You must specified the
+// Primary Key Fields with `pkfields` so DataFlowTester could select the exact record from database, as well as which
+// fields to compare with by specifying `targetfields` parameter.
+func (t *DataFlowTester) VerifyTable(dst schema.Tabler, csvRelPath string, pkfields []string, targetfields []string) {
 	csvIter := pluginhelper.NewCsvFileIterator(csvRelPath)
+	location, _ := time.LoadLocation(`UTC`)
 	defer csvIter.Close()
 
 	var expectedTotal int64
@@ -139,11 +235,11 @@ func (t *DataFlowTester) VerifyTable(tableName string, csvRelPath string, pkfiel
 			pkvalues = append(pkvalues, expected[pkf])
 		}
 		actual := make(map[string]interface{})
-		where := ""
+		where := []string{}
 		for _, field := range pkfields {
-			where += fmt.Sprintf(" %s = ?", field)
+			where = append(where, fmt.Sprintf(" %s = ?", field))
 		}
-		err := t.Db.Table(tableName).Where(where, pkvalues...).Find(actual).Error
+		err := t.Db.Table(dst.TableName()).Where(strings.Join(where, ` AND `), pkvalues...).Find(actual).Error
 		if err != nil {
 			panic(err)
 		}
@@ -152,17 +248,21 @@ func (t *DataFlowTester) VerifyTable(tableName string, csvRelPath string, pkfiel
 			switch actual[field].(type) {
 			// TODO: ensure testing database is in UTC timezone
 			case time.Time:
-				actualValue = actual[field].(time.Time).Format("2006-01-02 15:04:05.000000000")
+				if actual[field] != nil {
+					actualValue = actual[field].(time.Time).In(location).Format("2006-01-02T15:04:05.000-07:00")
+				}
 			default:
-				actualValue = fmt.Sprint(actual[field])
+				if actual[field] != nil {
+					actualValue = fmt.Sprint(actual[field])
+				}
 			}
-			assert.Equal(t.T, expected[field], actualValue)
+			assert.Equal(t.T, expected[field], actualValue, fmt.Sprintf(`%s.%s not match`, dst.TableName(), field))
 		}
 		expectedTotal++
 	}
 
 	var actualTotal int64
-	err := t.Db.Table(tableName).Count(&actualTotal).Error
+	err := t.Db.Table(dst.TableName()).Count(&actualTotal).Error
 	if err != nil {
 		panic(err)
 	}
diff --git a/helpers/pluginhelper/csv_file_iterator_test.go b/helpers/pluginhelper/csv_file_test.go
similarity index 58%
rename from helpers/pluginhelper/csv_file_iterator_test.go
rename to helpers/pluginhelper/csv_file_test.go
index 9c6c6394..644c88b3 100644
--- a/helpers/pluginhelper/csv_file_iterator_test.go
+++ b/helpers/pluginhelper/csv_file_test.go
@@ -17,12 +17,26 @@ limitations under the License.
 
 package pluginhelper
 
-func ExampleCsvFileIterator() {
-	iter := NewCsvFileIterator("/path/to/foobar.csv")
+import (
+	"fmt"
+	"github.com/magiconair/properties/assert"
+	"testing"
+)
+
+func TestExampleCsvFile(t *testing.T) {
+	tmpPath := t.TempDir()
+	filename := fmt.Sprintf(`%s/foobar.csv`, tmpPath)
+	println(filename)
+
+	writer := NewCsvFileWriter(filename, []string{"id", "name", "json", "created_at"})
+	writer.Write([]string{"123", "foobar", `{"url": "https://example.com"}`, "2022-05-05 09:56:43.438000000"})
+	writer.Close()
+
+	iter := NewCsvFileIterator(filename)
 	defer iter.Close()
 	for iter.HasNext() {
 		row := iter.Fetch()
-		println(row["name"]) // foobar
-		println(row["json"]) // {"url": "https://example.com"}
+		assert.Equal(t, row["name"], "foobar", "name not euqal")
+		assert.Equal(t, row["json"], `{"url": "https://example.com"}`, "json not euqal")
 	}
 }
diff --git a/helpers/pluginhelper/csv_file_writer.go b/helpers/pluginhelper/csv_file_writer.go
new file mode 100644
index 00000000..c413c5ad
--- /dev/null
+++ b/helpers/pluginhelper/csv_file_writer.go
@@ -0,0 +1,82 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one or more
+contributor license agreements.  See the NOTICE file distributed with
+this work for additional information regarding copyright ownership.
+The ASF licenses this file to You under the Apache License, Version 2.0
+(the "License"); you may not use this file except in compliance with
+the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+package pluginhelper
+
+import (
+	"encoding/csv"
+	"os"
+)
+
+// CsvFileWriter make writer for saving csv file easier, it write tuple to csv file
+//
+// Example CSV format (exported by dbeaver):
+//
+//   "id","name","json","created_at"
+//   123,"foobar","{""url"": ""https://example.com""}","2022-05-05 09:56:43.438000000"
+//
+type CsvFileWriter struct {
+	file   *os.File
+	writer *csv.Writer
+	fields []string
+}
+
+// NewCsvFileWriter create a `*CsvFileWriter` based on path to saving csv file
+func NewCsvFileWriter(csvPath string, fields []string) *CsvFileWriter {
+	// open csv file
+	csvFile, err := os.Create(csvPath)
+	if err != nil {
+		panic(err)
+	}
+	csvWriter := csv.NewWriter(csvFile)
+	// write field names
+	err = csvWriter.Write(fields)
+	if err != nil {
+		panic(err)
+	}
+	csvWriter.Flush()
+	if err != nil {
+		panic(err)
+	}
+	return &CsvFileWriter{
+		file:   csvFile,
+		writer: csvWriter,
+		fields: fields,
+	}
+}
+
+// Close releases resource
+func (ci *CsvFileWriter) Close() {
+	ci.writer.Flush()
+	err := ci.file.Close()
+	if err != nil {
+		panic(err)
+	}
+}
+
+// Write the values into csv
+func (ci *CsvFileWriter) Write(values []string) {
+	err := ci.writer.Write(values)
+	if err != nil {
+		panic(err)
+	}
+}
+
+// Flush the wrote data into file physically
+func (ci *CsvFileWriter) Flush() {
+	ci.writer.Flush()
+}
diff --git a/plugins/helper/default_task_context.go b/plugins/helper/default_task_context.go
index 9f4b6747..dcccb49d 100644
--- a/plugins/helper/default_task_context.go
+++ b/plugins/helper/default_task_context.go
@@ -239,7 +239,7 @@ func (c *DefaultTaskContext) SubTaskContext(subtask string) (core.SubTaskContext
 	return nil, fmt.Errorf("subtask %s doesn't exist", subtask)
 }
 
-// This returns a stand-alone core.SubTaskContext,
+// NewStandaloneSubTaskContext returns a stand-alone core.SubTaskContext,
 // not attached to any core.TaskContext.
 // Use this if you need to run/debug a subtask without
 // going through the usual workflow.
@@ -265,6 +265,9 @@ func (c *DefaultTaskContext) SetData(data interface{}) {
 var _ core.TaskContext = (*DefaultTaskContext)(nil)
 
 func (c *DefaultSubTaskContext) TaskContext() core.TaskContext {
+	if c.taskCtx == nil {
+		return nil
+	}
 	return c.taskCtx
 }
 


[incubator-devlake] 08/12: fix for lint

Posted by wa...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

warren pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/incubator-devlake.git

commit 45089f27f2fa76d39859973d16f2cab0c201fc76
Author: linyh <ya...@meri.co>
AuthorDate: Wed Jun 15 12:33:14 2022 +0800

    fix for lint
---
 helpers/e2ehelper/data_flow_tester.go | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/helpers/e2ehelper/data_flow_tester.go b/helpers/e2ehelper/data_flow_tester.go
index 1c49bbcb..7b585c96 100644
--- a/helpers/e2ehelper/data_flow_tester.go
+++ b/helpers/e2ehelper/data_flow_tester.go
@@ -150,7 +150,7 @@ func (t *DataFlowTester) Subtask(subtaskMeta core.SubTaskMeta, taskData interfac
 // CreateSnapshot reads rows from database and write them into .csv file.
 func (t *DataFlowTester) CreateSnapshot(dst schema.Tabler, csvRelPath string, pkfields []string, targetfields []string) {
 	location, _ := time.LoadLocation(`UTC`)
-	allFields := []string{}
+	var allFields []string
 	allFields = append(pkfields, targetfields...)
 	dbCursor, err := t.Dal.Cursor(
 		dal.Select(strings.Join(allFields, `,`)),


[incubator-devlake] 10/12: fix default test db url name

Posted by wa...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

warren pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/incubator-devlake.git

commit 29da8a89706311410875701364b2fc85397f8ef3
Author: linyh <ya...@meri.co>
AuthorDate: Wed Jun 15 15:09:57 2022 +0800

    fix default test db url name
---
 .env.example | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/.env.example b/.env.example
index 818331dd..0ceca13e 100644
--- a/.env.example
+++ b/.env.example
@@ -7,7 +7,7 @@ PLUGIN_DIR=bin/plugins
 
 # Lake Database Connection String
 DB_URL=mysql://merico:merico@mysql:3306/lake?charset=utf8mb4&parseTime=True
-E2E_DB_URL=mysql://merico:merico@mysql:3306/lake?charset=utf8mb4&parseTime=True
+E2E_DB_URL=mysql://merico:merico@mysql:3306/lake_test?charset=utf8mb4&parseTime=True
 # Silent Error Warn Info
 DB_LOGGING_LEVEL=Error
 


[incubator-devlake] 09/12: add E2E_DB_URL

Posted by wa...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

warren pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/incubator-devlake.git

commit 7269ca2ce994458fb678c0663a289e25d0eaf784
Author: linyh <ya...@meri.co>
AuthorDate: Wed Jun 15 13:00:06 2022 +0800

    add E2E_DB_URL
---
 .env.example                          | 1 +
 .github/workflows/test-e2e.yml        | 1 +
 helpers/e2ehelper/data_flow_tester.go | 5 +++++
 3 files changed, 7 insertions(+)

diff --git a/.env.example b/.env.example
index a7a161d8..818331dd 100644
--- a/.env.example
+++ b/.env.example
@@ -7,6 +7,7 @@ PLUGIN_DIR=bin/plugins
 
 # Lake Database Connection String
 DB_URL=mysql://merico:merico@mysql:3306/lake?charset=utf8mb4&parseTime=True
+E2E_DB_URL=mysql://merico:merico@mysql:3306/lake?charset=utf8mb4&parseTime=True
 # Silent Error Warn Info
 DB_LOGGING_LEVEL=Error
 
diff --git a/.github/workflows/test-e2e.yml b/.github/workflows/test-e2e.yml
index 1e42cff0..e6420c44 100644
--- a/.github/workflows/test-e2e.yml
+++ b/.github/workflows/test-e2e.yml
@@ -40,6 +40,7 @@ jobs:
       - name: Test
         env:
           DB_URL: mysql://root:root@db:3306/lake?charset=utf8mb4&loc=Asia%2fShanghai&parseTime=True
+          E2E_DB_URL: mysql://root:root@db:3306/lake?charset=utf8mb4&loc=Asia%2fShanghai&parseTime=True
         run: |
           cp .env.example .env
           make e2e-test
diff --git a/helpers/e2ehelper/data_flow_tester.go b/helpers/e2ehelper/data_flow_tester.go
index 7b585c96..a006d56f 100644
--- a/helpers/e2ehelper/data_flow_tester.go
+++ b/helpers/e2ehelper/data_flow_tester.go
@@ -77,6 +77,11 @@ func NewDataFlowTester(t *testing.T, pluginName string, pluginMeta core.PluginMe
 		panic(err)
 	}
 	cfg := config.GetConfig()
+	e2eDbUrl := cfg.GetString(`E2E_DB_URL`)
+	if e2eDbUrl == `` {
+		panic(fmt.Errorf(`e2e can only run with E2E_DB_URL, please set it in .env`))
+	}
+	cfg.Set(`DB_URL`, cfg.GetString(`E2E_DB_URL`))
 	db, err := runner.NewGormDb(cfg, logger.Global)
 	if err != nil {
 		panic(err)


[incubator-devlake] 11/12: fix linter

Posted by wa...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

warren pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/incubator-devlake.git

commit 0af33d30f083bd579a54b380ed7977f286326e2e
Author: linyh <ya...@meri.co>
AuthorDate: Wed Jun 15 15:31:36 2022 +0800

    fix linter
---
 helpers/e2ehelper/data_flow_tester.go | 3 +--
 1 file changed, 1 insertion(+), 2 deletions(-)

diff --git a/helpers/e2ehelper/data_flow_tester.go b/helpers/e2ehelper/data_flow_tester.go
index a006d56f..6fb97580 100644
--- a/helpers/e2ehelper/data_flow_tester.go
+++ b/helpers/e2ehelper/data_flow_tester.go
@@ -155,8 +155,7 @@ func (t *DataFlowTester) Subtask(subtaskMeta core.SubTaskMeta, taskData interfac
 // CreateSnapshot reads rows from database and write them into .csv file.
 func (t *DataFlowTester) CreateSnapshot(dst schema.Tabler, csvRelPath string, pkfields []string, targetfields []string) {
 	location, _ := time.LoadLocation(`UTC`)
-	var allFields []string
-	allFields = append(pkfields, targetfields...)
+	allFields := append(pkfields, targetfields...)
 	dbCursor, err := t.Dal.Cursor(
 		dal.Select(strings.Join(allFields, `,`)),
 		dal.From(dst.TableName()),