You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@devlake.apache.org by zh...@apache.org on 2023/02/24 04:55:59 UTC

[incubator-devlake] branch main updated: feat(sonarqube): add more metrics (#4509)

This is an automated email from the ASF dual-hosted git repository.

zhangliang2022 pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/incubator-devlake.git


The following commit(s) were added to refs/heads/main by this push:
     new c65bb92d1 feat(sonarqube): add more metrics (#4509)
c65bb92d1 is described below

commit c65bb92d1baa76b78e0aeb039c988760ea7430b4
Author: Warren Chen <yi...@merico.dev>
AuthorDate: Fri Feb 24 12:55:52 2023 +0800

    feat(sonarqube): add more metrics (#4509)
    
    * feat(sonarqube): add more metrics
    
    * feat(sonarqube): add e2e
    
    * fix(sonarqube): delete unused struct
---
 .../domainlayer/codequality/cq_file_metrics.go     |  43 ++++---
 .../codequality/cq_issue_code_blocks.go            |  12 +-
 .../models/domainlayer/codequality/cq_issues.go    |  30 ++---
 .../models/domainlayer/codequality/cq_projects.go  |  10 +-
 .../migrationscripts/20230208_add_code_quality.go  |   2 +-
 .../migrationscripts/archived/cq_file_metrics.go   |  43 ++++---
 .../archived/cq_issue_code_blocks.go               |  12 +-
 .../models/migrationscripts/archived/cq_issues.go  |  30 ++---
 .../migrationscripts/archived/cq_projects.go       |  10 +-
 backend/plugins/gitlab/models/project.go           |   2 +-
 backend/plugins/sonarqube/e2e/account_test.go      |   4 +-
 backend/plugins/sonarqube/e2e/filemetrics_test.go  |  20 +--
 backend/plugins/sonarqube/e2e/hotspot_test.go      |   4 +-
 backend/plugins/sonarqube/e2e/issue_test.go        |   4 +-
 ...ccounts.csv => _raw_sonarqube_api_accounts.csv} |   0
 .../raw_tables/_raw_sonarqube_api_filemetrics.csv  |   7 ++
 .../_raw_sonarqube_api_filemetrics_additional.csv  |   7 ++
 ...otspots.csv => _raw_sonarqube_api_hotspots.csv} |   0
 ...be_issues.csv => _raw_sonarqube_api_issues.csv} |   0
 .../e2e/raw_tables/_raw_sonarqube_filemetrics.csv  |  11 --
 .../_tool_sonarqube_filemetrics.csv                |  14 +--
 .../sonarqube/e2e/snapshot_tables/filemetrics.csv  |  11 +-
 backend/plugins/sonarqube/impl/impl.go             |   2 +
 .../migrationscripts/20230111_add_init_tables.go   |   2 +-
 .../models/migrationscripts/archived/connection.go |  19 ---
 .../migrationscripts/archived/sonarqube_account.go |  10 +-
 .../archived/sonarqube_file_metrics.go             |  18 ++-
 .../migrationscripts/archived/sonarqube_hotspot.go |  12 +-
 .../migrationscripts/archived/sonarqube_issue.go   |  14 +--
 .../archived/sonarqube_issue_code_block.go         |   2 +-
 .../plugins/sonarqube/models/sonarqube_account.go  |   4 +-
 .../sonarqube/models/sonarqube_file_metrics.go     |  51 +++++++-
 .../plugins/sonarqube/models/sonarqube_hotspot.go  |  12 +-
 .../plugins/sonarqube/models/sonarqube_issue.go    |  16 +--
 .../plugins/sonarqube/tasks/accounts_collector.go  |   2 +-
 ...ctor.go => filemetrics_additional_collector.go} |  24 ++--
 ...ctor.go => filemetrics_additional_extractor.go} |  58 ++++-----
 .../sonarqube/tasks/filemetrics_collector.go       |   2 +-
 .../sonarqube/tasks/filemetrics_convertor.go       |  55 +++++----
 .../sonarqube/tasks/filemetrics_extractor.go       |  96 +--------------
 .../plugins/sonarqube/tasks/hotspots_collector.go  |   2 +-
 .../plugins/sonarqube/tasks/hotspots_convertor.go  |   2 +-
 .../plugins/sonarqube/tasks/issues_collector.go    |   2 +-
 .../plugins/sonarqube/tasks/issues_convertor.go    |   2 +-
 .../plugins/sonarqube/tasks/projects_convertor.go  |   2 +
 backend/plugins/sonarqube/tasks/shared.go          | 136 ++++++++++++++++++++-
 backend/plugins/sonarqube/tasks/shared_test.go     |  50 +++++++-
 47 files changed, 498 insertions(+), 373 deletions(-)

diff --git a/backend/core/models/domainlayer/codequality/cq_file_metrics.go b/backend/core/models/domainlayer/codequality/cq_file_metrics.go
index 97416c17b..8aa25bc87 100644
--- a/backend/core/models/domainlayer/codequality/cq_file_metrics.go
+++ b/backend/core/models/domainlayer/codequality/cq_file_metrics.go
@@ -23,25 +23,30 @@ import (
 
 type CqFileMetrics struct {
 	domainlayer.DomainEntity
-	ProjectKey               string `gorm:"index;type:varchar(255)"` //domain project key
-	FileName                 string `json:"file_name"`
-	FilePath                 string `json:"file_path"`
-	FileLanguage             string `json:"file_language"`
-	CodeSmells               int    `json:"code_smells"`
-	SqaleIndex               int
-	SqaleRating              float64
-	Bugs                     int     `json:"bugs"`
-	ReliabilityRating        string  `json:"reliability_rating"`
-	Vulnerabilities          int     `json:"vulnerabilities"`
-	SecurityRating           string  `json:"security_rating"`
-	SecurityHotspots         int     `json:"security_hotspots"`
-	SecurityHotspotsReviewed float64 `json:"security_hotspots_reviewed"`
-	SecurityReviewRating     string  `json:"security_review_rating"`
-	Ncloc                    int     `json:"ncloc"`
-	Coverage                 float64 `json:"coverage"`
-	LinesToCover             int     `json:"lines_to_cover"`
-	DuplicatedLinesDensity   float64 `json:"duplicated_lines_density"`
-	DuplicatedBlocks         int     `json:"duplicated_blocks"`
+	ProjectKey                          string `gorm:"index;type:varchar(255)"` //domain project key
+	FileName                            string `gorm:"type:varchar(255)"`
+	FilePath                            string
+	FileLanguage                        string `gorm:"type:varchar(20)"`
+	CodeSmells                          int
+	SqaleIndex                          int
+	SqaleRating                         float64
+	Bugs                                int
+	ReliabilityRating                   string `gorm:"type:varchar(20)"`
+	Vulnerabilities                     int
+	SecurityRating                      string `gorm:"type:varchar(20)"`
+	SecurityHotspots                    int
+	SecurityHotspotsReviewed            float64
+	SecurityReviewRating                string `gorm:"type:varchar(20)"`
+	Ncloc                               int    `json:"ncloc"`
+	UnoveredLines                       int
+	LinesToCover                        int     `json:"lines_to_cover"`
+	DuplicatedLinesDensity              float64 `json:"duplicated_lines_density"`
+	DuplicatedBlocks                    int     `json:"duplicated_blocks"`
+	DuplicatedFiles                     int
+	DuplicatedLines                     int
+	EffortToReachMaintainabilityRatingA int
+	Complexity                          int
+	CognitiveComplexity                 int
 }
 
 func (CqFileMetrics) TableName() string {
diff --git a/backend/core/models/domainlayer/codequality/cq_issue_code_blocks.go b/backend/core/models/domainlayer/codequality/cq_issue_code_blocks.go
index 8d6a20198..294bc5d3c 100644
--- a/backend/core/models/domainlayer/codequality/cq_issue_code_blocks.go
+++ b/backend/core/models/domainlayer/codequality/cq_issue_code_blocks.go
@@ -22,12 +22,12 @@ import "github.com/apache/incubator-devlake/core/models/domainlayer"
 type CqIssueCodeBlock struct {
 	domainlayer.DomainEntity
 	IssueKey    string `json:"key" gorm:"index"`
-	Component   string `json:"component" gorm:"index"`
-	StartLine   int    `json:"startLine" `
-	EndLine     int    `json:"endLine" `
-	StartOffset int    `json:"startOffset" `
-	EndOffset   int    `json:"endOffset" `
-	Msg         string `json:"msg" `
+	Component   string `gorm:"index"`
+	StartLine   int
+	EndLine     int
+	StartOffset int
+	EndOffset   int
+	Msg         string
 }
 
 func (CqIssueCodeBlock) TableName() string {
diff --git a/backend/core/models/domainlayer/codequality/cq_issues.go b/backend/core/models/domainlayer/codequality/cq_issues.go
index bdec07115..dbbbb9e0a 100644
--- a/backend/core/models/domainlayer/codequality/cq_issues.go
+++ b/backend/core/models/domainlayer/codequality/cq_issues.go
@@ -24,21 +24,21 @@ import (
 
 type CqIssue struct {
 	domainlayer.DomainEntity
-	Rule                     string           `json:"rule" gorm:"type:varchar(255)"`
-	Severity                 string           `json:"severity" gorm:"type:varchar(255)"`
-	Component                string           `json:"component" gorm:"type:varchar(255)"`
-	ProjectKey               string           `gorm:"index;type:varchar(255)"` //domain project key
-	Line                     int              `json:"line"`
-	Status                   string           `json:"status" gorm:"type:varchar(255)"`
-	Message                  string           `json:"message"`
-	Debt                     int              `json:"debt"`
-	Effort                   int              `json:"effort"`
-	CommitAuthorEmail        string           `json:"author" gorm:"type:varchar(255)"`
-	Assignee                 string           `json:"assignee" gorm:"type:varchar(255)"`
-	Hash                     string           `json:"hash" gorm:"type:varchar(255)"`
-	Tags                     string           `json:"tags" gorm:"type:varchar(255)"`
-	Type                     string           `json:"type" gorm:"type:varchar(255)"`
-	Scope                    string           `json:"scope" gorm:"type:varchar(255)"`
+	Rule                     string `gorm:"type:varchar(255)"`
+	Severity                 string `gorm:"type:varchar(100)"`
+	Component                string `gorm:"type:varchar(255)"`
+	ProjectKey               string `gorm:"index;type:varchar(100)"` //domain project key
+	Line                     int
+	Status                   string `gorm:"type:varchar(20)"`
+	Message                  string
+	Debt                     int
+	Effort                   int
+	CommitAuthorEmail        string `json:"author" gorm:"type:varchar(255)"`
+	Assignee                 string `json:"assignee" gorm:"type:varchar(255)"`
+	Hash                     string `gorm:"type:varchar(100)"`
+	Tags                     string
+	Type                     string           `gorm:"type:varchar(100)"`
+	Scope                    string           `gorm:"type:varchar(255)"`
 	StartLine                int              `json:"startLine"`
 	EndLine                  int              `json:"endLine"`
 	StartOffset              int              `json:"startOffset"`
diff --git a/backend/core/models/domainlayer/codequality/cq_projects.go b/backend/core/models/domainlayer/codequality/cq_projects.go
index c354bea45..b53a062a4 100644
--- a/backend/core/models/domainlayer/codequality/cq_projects.go
+++ b/backend/core/models/domainlayer/codequality/cq_projects.go
@@ -27,11 +27,11 @@ var _ plugin.Scope = (*CqProject)(nil)
 
 type CqProject struct {
 	domainlayer.DomainEntity
-	Name             string           `json:"name" gorm:"type:varchar(255)"`
-	Qualifier        string           `json:"qualifier" gorm:"type:varchar(255)"`
-	Visibility       string           `json:"visibility" gorm:"type:varchar(64)"`
-	LastAnalysisDate *api.Iso8601Time `json:"lastAnalysisDate"`
-	CommitSha        string           `json:"revision" gorm:"type:varchar(128)"`
+	Name             string `gorm:"type:varchar(255)"`
+	Qualifier        string `gorm:"type:varchar(255)"`
+	Visibility       string `gorm:"type:varchar(64)"`
+	LastAnalysisDate *api.Iso8601Time
+	CommitSha        string `gorm:"type:varchar(128)"`
 }
 
 func (CqProject) TableName() string {
diff --git a/backend/core/models/migrationscripts/20230208_add_code_quality.go b/backend/core/models/migrationscripts/20230208_add_code_quality.go
index a60e5bb42..3b5b71626 100644
--- a/backend/core/models/migrationscripts/20230208_add_code_quality.go
+++ b/backend/core/models/migrationscripts/20230208_add_code_quality.go
@@ -46,7 +46,7 @@ func (u *addCodeQuality) Up(basicRes context.BasicRes) errors.Error {
 }
 
 func (*addCodeQuality) Version() uint64 {
-	return 20230221000022
+	return 20230221000035
 }
 
 func (*addCodeQuality) Name() string {
diff --git a/backend/core/models/migrationscripts/archived/cq_file_metrics.go b/backend/core/models/migrationscripts/archived/cq_file_metrics.go
index 399311502..8b716960e 100644
--- a/backend/core/models/migrationscripts/archived/cq_file_metrics.go
+++ b/backend/core/models/migrationscripts/archived/cq_file_metrics.go
@@ -19,25 +19,30 @@ package archived
 
 type CqFileMetrics struct {
 	DomainEntity
-	ProjectKey               string `gorm:"index;type:varchar(255)"` //domain project key
-	FileName                 string `json:"file_name"`
-	FilePath                 string `json:"file_path"`
-	FileLanguage             string `json:"file_language"`
-	CodeSmells               int    `json:"code_smells"`
-	SqaleIndex               int
-	SqaleRating              float64
-	Bugs                     int     `json:"bugs"`
-	ReliabilityRating        string  `json:"reliability_rating"`
-	Vulnerabilities          int     `json:"vulnerabilities"`
-	SecurityRating           string  `json:"security_rating"`
-	SecurityHotspots         int     `json:"security_hotspots"`
-	SecurityHotspotsReviewed float64 `json:"security_hotspots_reviewed"`
-	SecurityReviewRating     string  `json:"security_review_rating"`
-	Ncloc                    int     `json:"ncloc"`
-	Coverage                 float64 `json:"coverage"`
-	LinesToCover             int     `json:"lines_to_cover"`
-	DuplicatedLinesDensity   float64 `json:"duplicated_lines_density"`
-	DuplicatedBlocks         int     `json:"duplicated_blocks"`
+	ProjectKey                          string `gorm:"index;type:varchar(255)"` //domain project key
+	FileName                            string `gorm:"type:varchar(255)"`
+	FilePath                            string
+	FileLanguage                        string `gorm:"type:varchar(20)"`
+	CodeSmells                          int
+	SqaleIndex                          int
+	SqaleRating                         float64
+	Bugs                                int
+	ReliabilityRating                   string `gorm:"type:varchar(20)"`
+	Vulnerabilities                     int
+	SecurityRating                      string `gorm:"type:varchar(20)"`
+	SecurityHotspots                    int
+	SecurityHotspotsReviewed            float64
+	SecurityReviewRating                string `gorm:"type:varchar(20)"`
+	Ncloc                               int    `json:"ncloc"`
+	UnoveredLines                       int
+	LinesToCover                        int     `json:"lines_to_cover"`
+	DuplicatedLinesDensity              float64 `json:"duplicated_lines_density"`
+	DuplicatedBlocks                    int     `json:"duplicated_blocks"`
+	DuplicatedFiles                     int
+	DuplicatedLines                     int
+	EffortToReachMaintainabilityRatingA int
+	Complexity                          int
+	CognitiveComplexity                 int
 }
 
 func (CqFileMetrics) TableName() string {
diff --git a/backend/core/models/migrationscripts/archived/cq_issue_code_blocks.go b/backend/core/models/migrationscripts/archived/cq_issue_code_blocks.go
index 8a33eeb27..326ebb947 100644
--- a/backend/core/models/migrationscripts/archived/cq_issue_code_blocks.go
+++ b/backend/core/models/migrationscripts/archived/cq_issue_code_blocks.go
@@ -20,12 +20,12 @@ package archived
 type CqIssueCodeBlock struct {
 	DomainEntity
 	IssueKey    string `json:"key" gorm:"index"`
-	Component   string `json:"component" gorm:"index"`
-	StartLine   int    `json:"startLine" `
-	EndLine     int    `json:"endLine" `
-	StartOffset int    `json:"startOffset" `
-	EndOffset   int    `json:"endOffset" `
-	Msg         string `json:"msg" `
+	Component   string `gorm:"index"`
+	StartLine   int
+	EndLine     int
+	StartOffset int
+	EndOffset   int
+	Msg         string
 }
 
 func (CqIssueCodeBlock) TableName() string {
diff --git a/backend/core/models/migrationscripts/archived/cq_issues.go b/backend/core/models/migrationscripts/archived/cq_issues.go
index 8652137eb..b17917211 100644
--- a/backend/core/models/migrationscripts/archived/cq_issues.go
+++ b/backend/core/models/migrationscripts/archived/cq_issues.go
@@ -23,21 +23,21 @@ import (
 
 type CqIssue struct {
 	DomainEntity
-	Rule                     string           `json:"rule" gorm:"type:varchar(255)"`
-	Severity                 string           `json:"severity" gorm:"type:varchar(255)"`
-	Component                string           `json:"component" gorm:"type:varchar(255)"`
-	ProjectKey               string           `gorm:"index;type:varchar(255)"` //domain project key
-	Line                     int              `json:"line"`
-	Status                   string           `json:"status" gorm:"type:varchar(255)"`
-	Message                  string           `json:"message"`
-	Debt                     int              `json:"debt"`
-	Effort                   int              `json:"effort"`
-	CommitAuthorEmail        string           `json:"author" gorm:"type:varchar(255)"`
-	Assignee                 string           `json:"assignee" gorm:"type:varchar(255)"`
-	Hash                     string           `json:"hash" gorm:"type:varchar(255)"`
-	Tags                     string           `json:"tags" gorm:"type:varchar(255)"`
-	Type                     string           `json:"type" gorm:"type:varchar(255)"`
-	Scope                    string           `json:"scope" gorm:"type:varchar(255)"`
+	Rule                     string `gorm:"type:varchar(255)"`
+	Severity                 string `gorm:"type:varchar(100)"`
+	Component                string `gorm:"type:varchar(255)"`
+	ProjectKey               string `gorm:"index;type:varchar(100)"` //domain project key
+	Line                     int
+	Status                   string `gorm:"type:varchar(20)"`
+	Message                  string
+	Debt                     int
+	Effort                   int
+	CommitAuthorEmail        string `json:"author" gorm:"type:varchar(255)"`
+	Assignee                 string `json:"assignee" gorm:"type:varchar(255)"`
+	Hash                     string `gorm:"type:varchar(100)"`
+	Tags                     string
+	Type                     string           `gorm:"type:varchar(100)"`
+	Scope                    string           `gorm:"type:varchar(255)"`
 	StartLine                int              `json:"startLine"`
 	EndLine                  int              `json:"endLine"`
 	StartOffset              int              `json:"startOffset"`
diff --git a/backend/core/models/migrationscripts/archived/cq_projects.go b/backend/core/models/migrationscripts/archived/cq_projects.go
index d17763f74..9fc06fcf9 100644
--- a/backend/core/models/migrationscripts/archived/cq_projects.go
+++ b/backend/core/models/migrationscripts/archived/cq_projects.go
@@ -23,11 +23,11 @@ import (
 
 type CqProject struct {
 	DomainEntity
-	Name             string           `json:"name" gorm:"type:varchar(255)"`
-	Qualifier        string           `json:"qualifier" gorm:"type:varchar(255)"`
-	Visibility       string           `json:"visibility" gorm:"type:varchar(64)"`
-	LastAnalysisDate *api.Iso8601Time `json:"lastAnalysisDate"`
-	CommitSha        string           `gorm:"type:varchar(128)"`
+	Name             string `gorm:"type:varchar(255)"`
+	Qualifier        string `gorm:"type:varchar(255)"`
+	Visibility       string `gorm:"type:varchar(64)"`
+	LastAnalysisDate *api.Iso8601Time
+	CommitSha        string `gorm:"type:varchar(128)"`
 }
 
 func (CqProject) TableName() string {
diff --git a/backend/plugins/gitlab/models/project.go b/backend/plugins/gitlab/models/project.go
index 24db88ce2..340dcd65e 100644
--- a/backend/plugins/gitlab/models/project.go
+++ b/backend/plugins/gitlab/models/project.go
@@ -38,7 +38,7 @@ type GitlabProject struct {
 	StarCount               int    `json:"starCount" mapstructure:"StarCount"`
 	ForkedFromProjectId     int    `json:"forkedFromProjectId" mapstructure:"forkedFromProjectId"`
 	ForkedFromProjectWebUrl string `json:"forkedFromProjectWebUrl" mapstructure:"forkedFromProjectWebUrl" gorm:"type:varchar(255)"`
-	HttpUrlToRepo           string `json:"httpUrlToRepo" gorm:"varchar(255)"`
+	HttpUrlToRepo           string `json:"httpUrlToRepo" gorm:"type:varchar(255)"`
 
 	CreatedDate      time.Time  `json:"createdDate" mapstructure:"-"`
 	UpdatedDate      *time.Time `json:"updatedDate" mapstructure:"-"`
diff --git a/backend/plugins/sonarqube/e2e/account_test.go b/backend/plugins/sonarqube/e2e/account_test.go
index b53b042b1..0a98d9f99 100644
--- a/backend/plugins/sonarqube/e2e/account_test.go
+++ b/backend/plugins/sonarqube/e2e/account_test.go
@@ -40,8 +40,8 @@ func TestSonarqubeAccountDataFlow(t *testing.T) {
 	}
 
 	// import raw data table
-	dataflowTester.ImportCsvIntoRawTable("./raw_tables/_raw_sonarqube_accounts.csv",
-		"_raw_sonarqube_accounts")
+	dataflowTester.ImportCsvIntoRawTable("./raw_tables/_raw_sonarqube_api_accounts.csv",
+		"_raw_sonarqube_api_accounts")
 
 	// verify extraction
 	dataflowTester.FlushTabler(&models.SonarqubeAccount{})
diff --git a/backend/plugins/sonarqube/e2e/filemetrics_test.go b/backend/plugins/sonarqube/e2e/filemetrics_test.go
index b59fe15db..9c34ef99a 100644
--- a/backend/plugins/sonarqube/e2e/filemetrics_test.go
+++ b/backend/plugins/sonarqube/e2e/filemetrics_test.go
@@ -32,30 +32,34 @@ func TestSonarqubeFileMetricsDataFlow(t *testing.T) {
 	dataflowTester := e2ehelper.NewDataFlowTester(t, "sonarqube", sonarqube)
 
 	// import raw data table
-	dataflowTester.ImportCsvIntoRawTable("./raw_tables/_raw_sonarqube_filemetrics.csv",
-		"_raw_sonarqube_filemetrics")
+	dataflowTester.ImportCsvIntoRawTable("./raw_tables/_raw_sonarqube_api_filemetrics.csv",
+		"_raw_sonarqube_api_filemetrics")
+	dataflowTester.ImportCsvIntoRawTable("./raw_tables/_raw_sonarqube_api_filemetrics_additional.csv",
+		"_raw_sonarqube_api_filemetrics_additional")
 
 	// Standard data
 	taskData := &tasks.SonarqubeTaskData{
 		Options: &tasks.SonarqubeOptions{
-			ConnectionId: 1,
-			ProjectKey:   "02c1047b-f87c-4c35-a6b5-76c6b607d37f",
+			ConnectionId: 2,
+			ProjectKey:   "testDevLake",
 		},
 	}
 	// Interfered data
 	taskData2 := &tasks.SonarqubeTaskData{
 		Options: &tasks.SonarqubeOptions{
-			ConnectionId: 2,
-			ProjectKey:   "e2c6d5e9-a321-4e8c-b322-03d9599ef962",
+			ConnectionId: 1,
+			ProjectKey:   "testNone",
 		},
 	}
 
 	// verify extraction
-	dataflowTester.FlushTabler(&models.SonarqubeFileMetrics{})
+	dataflowTester.FlushTabler(&models.SonarqubeWholeFileMetrics{})
 	dataflowTester.Subtask(tasks.ExtractFilemetricsMeta, taskData)
+	dataflowTester.Subtask(tasks.ExtractAdditionalFileMetricsMeta, taskData)
 
 	dataflowTester.Subtask(tasks.ExtractFilemetricsMeta, taskData2)
-	dataflowTester.VerifyTableWithOptions(&models.SonarqubeFileMetrics{}, e2ehelper.TableOptions{
+	dataflowTester.Subtask(tasks.ExtractAdditionalFileMetricsMeta, taskData2)
+	dataflowTester.VerifyTableWithOptions(&models.SonarqubeWholeFileMetrics{}, e2ehelper.TableOptions{
 		CSVRelPath:  "./snapshot_tables/_tool_sonarqube_filemetrics.csv",
 		IgnoreTypes: []interface{}{common.NoPKModel{}},
 	})
diff --git a/backend/plugins/sonarqube/e2e/hotspot_test.go b/backend/plugins/sonarqube/e2e/hotspot_test.go
index 32c46838a..8be9024ef 100644
--- a/backend/plugins/sonarqube/e2e/hotspot_test.go
+++ b/backend/plugins/sonarqube/e2e/hotspot_test.go
@@ -32,8 +32,8 @@ func TestSonarqubeHotspotDataFlow(t *testing.T) {
 	dataflowTester := e2ehelper.NewDataFlowTester(t, "sonarqube", sonarqube)
 
 	// import raw data table
-	dataflowTester.ImportCsvIntoRawTable("./raw_tables/_raw_sonarqube_hotspots.csv",
-		"_raw_sonarqube_hotspots")
+	dataflowTester.ImportCsvIntoRawTable("./raw_tables/_raw_sonarqube_api_hotspots.csv",
+		"_raw_sonarqube_api_hotspots")
 
 	// Standard data
 	taskData := &tasks.SonarqubeTaskData{
diff --git a/backend/plugins/sonarqube/e2e/issue_test.go b/backend/plugins/sonarqube/e2e/issue_test.go
index 93788b437..c425224ca 100644
--- a/backend/plugins/sonarqube/e2e/issue_test.go
+++ b/backend/plugins/sonarqube/e2e/issue_test.go
@@ -32,8 +32,8 @@ func TestSonarqubeIssueDataFlow(t *testing.T) {
 	dataflowTester := e2ehelper.NewDataFlowTester(t, "sonarqube", sonarqube)
 
 	// import raw data table
-	dataflowTester.ImportCsvIntoRawTable("./raw_tables/_raw_sonarqube_issues.csv",
-		"_raw_sonarqube_issues")
+	dataflowTester.ImportCsvIntoRawTable("./raw_tables/_raw_sonarqube_api_issues.csv",
+		"_raw_sonarqube_api_issues")
 
 	// Standard data
 	taskData := &tasks.SonarqubeTaskData{
diff --git a/backend/plugins/sonarqube/e2e/raw_tables/_raw_sonarqube_accounts.csv b/backend/plugins/sonarqube/e2e/raw_tables/_raw_sonarqube_api_accounts.csv
similarity index 100%
rename from backend/plugins/sonarqube/e2e/raw_tables/_raw_sonarqube_accounts.csv
rename to backend/plugins/sonarqube/e2e/raw_tables/_raw_sonarqube_api_accounts.csv
diff --git a/backend/plugins/sonarqube/e2e/raw_tables/_raw_sonarqube_api_filemetrics.csv b/backend/plugins/sonarqube/e2e/raw_tables/_raw_sonarqube_api_filemetrics.csv
new file mode 100644
index 000000000..b7619a264
--- /dev/null
+++ b/backend/plugins/sonarqube/e2e/raw_tables/_raw_sonarqube_api_filemetrics.csv
@@ -0,0 +1,7 @@
+"id","params","data","url","input","created_at"
+1,"{""connectionId"":2,""ProjectKey"":""testDevLake""}","{""id"":""AYZ8ryYAvqC8Tm0EnSMh"",""key"":""testDevLake:backend/plugins/tapd/tasks/company_extractor.go"",""name"":""company_extractor.go"",""qualifier"":""FIL"",""path"":""backend/plugins/tapd/tasks/company_extractor.go"",""language"":""go"",""measures"":[{""metric"":""security_review_rating"",""value"":""1.0"",""bestValue"":true},{""metric"":""lines_to_cover"",""value"":""14""},{""metric"":""ncloc"",""value"":""40""},{""metric"":" [...]
+2,"{""connectionId"":2,""ProjectKey"":""testDevLake""}","{""id"":""AYZ8ryYBvqC8Tm0EnSNE"",""key"":""testDevLake:backend/plugins/tapd/e2e/company_test.go"",""name"":""company_test.go"",""qualifier"":""FIL"",""path"":""backend/plugins/tapd/e2e/company_test.go"",""language"":""go"",""measures"":[{""metric"":""security_review_rating"",""value"":""1.0"",""bestValue"":true},{""metric"":""lines_to_cover"",""value"":""7""},{""metric"":""ncloc"",""value"":""41""},{""metric"":""reliability_rating" [...]
+3,"{""connectionId"":2,""ProjectKey"":""testDevLake""}","{""id"":""AYZ8ryYPvqC8Tm0EnSWk"",""key"":""testDevLake:backend/core/models/domainlayer/code/component.go"",""name"":""component.go"",""qualifier"":""FIL"",""path"":""backend/core/models/domainlayer/code/component.go"",""language"":""go"",""measures"":[{""metric"":""security_review_rating"",""value"":""1.0"",""bestValue"":true},{""metric"":""lines_to_cover"",""value"":""1""},{""metric"":""ncloc"",""value"":""9""},{""metric"":""relia [...]
+4,"{""connectionId"":1,""ProjectKey"":""testNone""}","{""id"":""AYZ8ryYRvqC8Tm0EnSbH"",""key"":""testDevLake:backend/mocks/core/plugin/CompositeDataSourcePluginBlueprintV200.go"",""name"":""CompositeDataSourcePluginBlueprintV200.go"",""qualifier"":""FIL"",""path"":""backend/mocks/core/plugin/CompositeDataSourcePluginBlueprintV200.go"",""language"":""go"",""measures"":[{""metric"":""security_review_rating"",""value"":""1.0"",""bestValue"":true},{""metric"":""lines_to_cover"",""value"":""3 [...]
+5,"{""connectionId"":2,""ProjectKey"":""testNone""}","{""id"":""AYZ8ryYRvqC8Tm0EnSba"",""key"":""testDevLake:backend/mocks/core/plugin/CompositeMetricPluginBlueprintV200.go"",""name"":""CompositeMetricPluginBlueprintV200.go"",""qualifier"":""FIL"",""path"":""backend/mocks/core/plugin/CompositeMetricPluginBlueprintV200.go"",""language"":""go"",""measures"":[{""metric"":""security_review_rating"",""value"":""1.0"",""bestValue"":true},{""metric"":""lines_to_cover"",""value"":""28""},{""metr [...]
+6,"{""connectionId"":1,""ProjectKey"":""testDevLake""}","{""id"":""AYZ8ryYRvqC8Tm0EnSbc"",""key"":""testDevLake:backend/mocks/core/plugin/CompositePluginBlueprintV200.go"",""name"":""CompositePluginBlueprintV200.go"",""qualifier"":""FIL"",""path"":""backend/mocks/core/plugin/CompositePluginBlueprintV200.go"",""language"":""go"",""measures"":[{""metric"":""security_review_rating"",""value"":""1.0"",""bestValue"":true},{""metric"":""lines_to_cover"",""value"":""45""},{""metric"":""ncloc"", [...]
diff --git a/backend/plugins/sonarqube/e2e/raw_tables/_raw_sonarqube_api_filemetrics_additional.csv b/backend/plugins/sonarqube/e2e/raw_tables/_raw_sonarqube_api_filemetrics_additional.csv
new file mode 100644
index 000000000..a01b0a5f4
--- /dev/null
+++ b/backend/plugins/sonarqube/e2e/raw_tables/_raw_sonarqube_api_filemetrics_additional.csv
@@ -0,0 +1,7 @@
+"id","params","data","url","input","created_at"
+1,"{""connectionId"":2,""ProjectKey"":""testDevLake""}","{""id"":""AYZ8ryYAvqC8Tm0EnSMh"",""key"":""testDevLake:backend/plugins/tapd/tasks/company_extractor.go"",""name"":""company_extractor.go"",""qualifier"":""FIL"",""path"":""backend/plugins/tapd/tasks/company_extractor.go"",""language"":""go"",""measures"":[{""metric"":""complexity"",""value"":""3""},{""metric"":""cognitive_complexity"",""value"":""3"",""bestValue"":false},{""metric"":""duplicated_lines"",""value"":""10"",""bestValue [...]
+2,"{""connectionId"":2,""ProjectKey"":""testDevLake""}","{""id"":""AYZ8ryYBvqC8Tm0EnSNE"",""key"":""testDevLake:backend/plugins/tapd/e2e/company_test.go"",""name"":""company_test.go"",""qualifier"":""FIL"",""path"":""backend/plugins/tapd/e2e/company_test.go"",""language"":""go"",""measures"":[{""metric"":""complexity"",""value"":""1""},{""metric"":""cognitive_complexity"",""value"":""13"",""bestValue"":true},{""metric"":""duplicated_lines"",""value"":""20"",""bestValue"":true},{""metric" [...]
+3,"{""connectionId"":2,""ProjectKey"":""testDevLake""}","{""id"":""AYZ8ryYPvqC8Tm0EnSWk"",""key"":""testDevLake:backend/core/models/domainlayer/code/component.go"",""name"":""component.go"",""qualifier"":""FIL"",""path"":""backend/core/models/domainlayer/code/component.go"",""language"":""go"",""measures"":[{""metric"":""complexity"",""value"":""1""},{""metric"":""cognitive_complexity"",""value"":""21"",""bestValue"":true},{""metric"":""duplicated_lines"",""value"":""30"",""bestValue"":t [...]
+4,"{""connectionId"":1,""ProjectKey"":""testNone""}","{""id"":""AYZ8ryYRvqC8Tm0EnSbH"",""key"":""testDevLake:backend/mocks/core/plugin/CompositeDataSourcePluginBlueprintV200.go"",""name"":""CompositeDataSourcePluginBlueprintV200.go"",""qualifier"":""FIL"",""path"":""backend/mocks/core/plugin/CompositeDataSourcePluginBlueprintV200.go"",""language"":""go"",""measures"":[{""metric"":""duplicated_lines"",""value"":""31"",""bestValue"":false},{""metric"":""complexity"",""value"":""12""},{""me [...]
+5,"{""connectionId"":2,""ProjectKey"":""testNone""}","{""id"":""AYZ8ryYRvqC8Tm0EnSba"",""key"":""testDevLake:backend/mocks/core/plugin/CompositeMetricPluginBlueprintV200.go"",""name"":""CompositeMetricPluginBlueprintV200.go"",""qualifier"":""FIL"",""path"":""backend/mocks/core/plugin/CompositeMetricPluginBlueprintV200.go"",""language"":""go"",""measures"":[{""metric"":""duplicated_lines"",""value"":""22"",""bestValue"":false},{""metric"":""complexity"",""value"":""10""},{""metric"":""cog [...]
+6,"{""connectionId"":1,""ProjectKey"":""testDevLake""}","{""id"":""AYZ8ryYRvqC8Tm0EnSbc"",""key"":""testDevLake:backend/mocks/core/plugin/CompositePluginBlueprintV200.go"",""name"":""CompositePluginBlueprintV200.go"",""qualifier"":""FIL"",""path"":""backend/mocks/core/plugin/CompositePluginBlueprintV200.go"",""language"":""go"",""measures"":[{""metric"":""duplicated_lines"",""value"":""53"",""bestValue"":false},{""metric"":""complexity"",""value"":""17""},{""metric"":""cognitive_complexi [...]
diff --git a/backend/plugins/sonarqube/e2e/raw_tables/_raw_sonarqube_hotspots.csv b/backend/plugins/sonarqube/e2e/raw_tables/_raw_sonarqube_api_hotspots.csv
similarity index 100%
rename from backend/plugins/sonarqube/e2e/raw_tables/_raw_sonarqube_hotspots.csv
rename to backend/plugins/sonarqube/e2e/raw_tables/_raw_sonarqube_api_hotspots.csv
diff --git a/backend/plugins/sonarqube/e2e/raw_tables/_raw_sonarqube_issues.csv b/backend/plugins/sonarqube/e2e/raw_tables/_raw_sonarqube_api_issues.csv
similarity index 100%
rename from backend/plugins/sonarqube/e2e/raw_tables/_raw_sonarqube_issues.csv
rename to backend/plugins/sonarqube/e2e/raw_tables/_raw_sonarqube_api_issues.csv
diff --git a/backend/plugins/sonarqube/e2e/raw_tables/_raw_sonarqube_filemetrics.csv b/backend/plugins/sonarqube/e2e/raw_tables/_raw_sonarqube_filemetrics.csv
deleted file mode 100644
index b2940880e..000000000
--- a/backend/plugins/sonarqube/e2e/raw_tables/_raw_sonarqube_filemetrics.csv
+++ /dev/null
@@ -1,11 +0,0 @@
-"id","params","data","url","input","created_at"
-1,"{""connectionId"":1,""ProjectKey"":""02c1047b-f87c-4c35-a6b5-76c6b607d37f""}","{""key"":""02c1047b-f87c-4c35-a6b5-76c6b607d37f:db/migrate/20170206220334_add_start_date_to_sponsors_and_paid.rb"",""name"":""20170206220334_add_start_date_to_sponsors_and_paid.rb"",""qualifier"":""FIL"",""path"":""db/migrate/20170206220334_add_start_date_to_sponsors_and_paid.rb"",""language"":""ruby"",""measures"":[{""metric"":""sqale_rating"",""value"":""1.0"",""bestValue"":true},{""metric"":""reliability [...]
-2,"{""connectionId"":1,""ProjectKey"":""02c1047b-f87c-4c35-a6b5-76c6b607d37f""}","{""key"":""02c1047b-f87c-4c35-a6b5-76c6b607d37f:db/migrate/20170208152018_acts_as_follower_migration.rb"",""name"":""20170208152018_acts_as_follower_migration.rb"",""qualifier"":""FIL"",""path"":""db/migrate/20170208152018_acts_as_follower_migration.rb"",""language"":""ruby"",""measures"":[{""metric"":""sqale_rating"",""value"":""1.0"",""bestValue"":true},{""metric"":""reliability_rating"",""value"":""1.0"" [...]
-3,"{""connectionId"":1,""ProjectKey"":""02c1047b-f87c-4c35-a6b5-76c6b607d37f""}","{""key"":""02c1047b-f87c-4c35-a6b5-76c6b607d37f:db/migrate/20170209164016_add_email_follower_notifications.rb"",""name"":""20170209164016_add_email_follower_notifications.rb"",""qualifier"":""FIL"",""path"":""db/migrate/20170209164016_add_email_follower_notifications.rb"",""language"":""ruby"",""measures"":[{""metric"":""sqale_rating"",""value"":""1.0"",""bestValue"":true},{""metric"":""reliability_rating"" [...]
-4,"{""connectionId"":1,""ProjectKey"":""02c1047b-f87c-4c35-a6b5-76c6b607d37f""}","{""key"":""02c1047b-f87c-4c35-a6b5-76c6b607d37f:db/migrate/20170213183337_add_sign_up_information_to_users.rb"",""name"":""20170213183337_add_sign_up_information_to_users.rb"",""qualifier"":""FIL"",""path"":""db/migrate/20170213183337_add_sign_up_information_to_users.rb"",""language"":""ruby"",""measures"":[{""metric"":""sqale_rating"",""value"":""1.0"",""bestValue"":true},{""metric"":""reliability_rating"" [...]
-5,"{""connectionId"":1,""ProjectKey"":""02c1047b-f87c-4c35-a6b5-76c6b607d37f""}","{""key"":""02c1047b-f87c-4c35-a6b5-76c6b607d37f:db/migrate/20170216145500_add_location_fields_etc_to_users.rb"",""name"":""20170216145500_add_location_fields_etc_to_users.rb"",""qualifier"":""FIL"",""path"":""db/migrate/20170216145500_add_location_fields_etc_to_users.rb"",""language"":""ruby"",""measures"":[{""metric"":""sqale_rating"",""value"":""1.0"",""bestValue"":true},{""metric"":""reliability_rating"" [...]
-6,"{""connectionId"":1,""ProjectKey"":""02c1047b-f87c-4c35-a6b5-76c6b607d37f""}","{""key"":""02c1047b-f87c-4c35-a6b5-76c6b607d37f:db/migrate/20170228174838_add_identity_data_to_users.rb"",""name"":""20170228174838_add_identity_data_to_users.rb"",""qualifier"":""FIL"",""path"":""db/migrate/20170228174838_add_identity_data_to_users.rb"",""language"":""ruby"",""measures"":[{""metric"":""sqale_rating"",""value"":""1.0"",""bestValue"":true},{""metric"":""reliability_rating"",""value"":""1.0"" [...]
-7,"{""connectionId"":2,""ProjectKey"":""e2c6d5e9-a321-4e8c-b322-03d9599ef962""}","{""key"":""e2c6d5e9-a321-4e8c-b322-03d9599ef962:db/migrate/20170302152930_add_attributes_to_tags.rb"",""name"":""20170302152930_add_attributes_to_tags.rb"",""qualifier"":""FIL"",""path"":""db/migrate/20170302152930_add_attributes_to_tags.rb"",""language"":""ruby"",""measures"":[{""metric"":""sqale_rating"",""value"":""1.0"",""bestValue"":true},{""metric"":""reliability_rating"",""value"":""1.0"",""bestValue [...]
-8,"{""connectionId"":2,""ProjectKey"":""e2c6d5e9-a321-4e8c-b322-03d9599ef962""}","{""key"":""e2c6d5e9-a321-4e8c-b322-03d9599ef962:db/migrate/20170303171502_add_social_image_to_tags.rb"",""name"":""20170303171502_add_social_image_to_tags.rb"",""qualifier"":""FIL"",""path"":""db/migrate/20170303171502_add_social_image_to_tags.rb"",""language"":""ruby"",""measures"":[{""metric"":""sqale_rating"",""value"":""1.0"",""bestValue"":true},{""metric"":""reliability_rating"",""value"":""1.0"",""bes [...]
-9,"{""connectionId"":1,""ProjectKey"":""xxxx""}","{""key"":""xxxx:db/migrate/20170303180353_create_features.rb"",""name"":""20170303180353_create_features.rb"",""qualifier"":""FIL"",""path"":""db/migrate/20170303180353_create_features.rb"",""language"":""ruby"",""measures"":[{""metric"":""sqale_rating"",""value"":""1.0"",""bestValue"":true},{""metric"":""reliability_rating"",""value"":""1.0"",""bestValue"":true},{""metric"":""security_rating"",""value"":""1.0"",""bestValue"":true},{""met [...]
-10,"{""connectionId"":1,""ProjectKey"":""e2c6d5e9-a321-4e8c-b322-03d9599ef962""}","{""key"":""e2c6d5e9-a321-4e8c-b322-03d9599ef962:db/migrate/20170309162937_add_markdown_character_count_to_comments.rb"",""name"":""20170309162937_add_markdown_character_count_to_comments.rb"",""qualifier"":""FIL"",""path"":""db/migrate/20170309162937_add_markdown_character_count_to_comments.rb"",""language"":""ruby"",""measures"":[{""metric"":""sqale_rating"",""value"":""1.0"",""bestValue"":true},{""metric [...]
diff --git a/backend/plugins/sonarqube/e2e/snapshot_tables/_tool_sonarqube_filemetrics.csv b/backend/plugins/sonarqube/e2e/snapshot_tables/_tool_sonarqube_filemetrics.csv
index 9faef947d..0fbfcfdb1 100644
--- a/backend/plugins/sonarqube/e2e/snapshot_tables/_tool_sonarqube_filemetrics.csv
+++ b/backend/plugins/sonarqube/e2e/snapshot_tables/_tool_sonarqube_filemetrics.csv
@@ -1,9 +1,5 @@
-connection_id,file_metrics_key,project_key,file_name,file_path,file_language,code_smells,sqale_index,sqale_rating,bugs,reliability_rating,vulnerabilities,security_rating,security_hotspots,security_hotspots_reviewed,security_review_rating,ncloc,coverage,lines_to_cover,duplicated_lines_density,duplicated_blocks
-1,02c1047b-f87c-4c35-a6b5-76c6b607d37f:db/migrate/20170206220334_add_start_date_to_sponsors_and_paid.rb,02c1047b-f87c-4c35-a6b5-76c6b607d37f,20170206220334_add_start_date_to_sponsors_and_paid.rb,db/migrate/20170206220334_add_start_date_to_sponsors_and_paid.rb,ruby,0,0,1,0,A,0,A,0,0,A,6,0,2,0,0
-1,02c1047b-f87c-4c35-a6b5-76c6b607d37f:db/migrate/20170208152018_acts_as_follower_migration.rb,02c1047b-f87c-4c35-a6b5-76c6b607d37f,20170208152018_acts_as_follower_migration.rb,db/migrate/20170208152018_acts_as_follower_migration.rb,ruby,0,0,1,0,A,0,A,0,0,A,15,0,8,0,0
-1,02c1047b-f87c-4c35-a6b5-76c6b607d37f:db/migrate/20170209164016_add_email_follower_notifications.rb,02c1047b-f87c-4c35-a6b5-76c6b607d37f,20170209164016_add_email_follower_notifications.rb,db/migrate/20170209164016_add_email_follower_notifications.rb,ruby,0,0,1,0,A,0,A,0,0,A,5,0,1,0,0
-1,02c1047b-f87c-4c35-a6b5-76c6b607d37f:db/migrate/20170213183337_add_sign_up_information_to_users.rb,02c1047b-f87c-4c35-a6b5-76c6b607d37f,20170213183337_add_sign_up_information_to_users.rb,db/migrate/20170213183337_add_sign_up_information_to_users.rb,ruby,0,0,1,0,A,0,A,0,0,A,7,0,3,0,0
-1,02c1047b-f87c-4c35-a6b5-76c6b607d37f:db/migrate/20170216145500_add_location_fields_etc_to_users.rb,02c1047b-f87c-4c35-a6b5-76c6b607d37f,20170216145500_add_location_fields_etc_to_users.rb,db/migrate/20170216145500_add_location_fields_etc_to_users.rb,ruby,0,0,1,0,A,0,A,0,0,A,20,0,16,0,0
-1,02c1047b-f87c-4c35-a6b5-76c6b607d37f:db/migrate/20170228174838_add_identity_data_to_users.rb,02c1047b-f87c-4c35-a6b5-76c6b607d37f,20170228174838_add_identity_data_to_users.rb,db/migrate/20170228174838_add_identity_data_to_users.rb,ruby,0,0,1,0,A,0,A,0,0,A,9,0,5,0,0
-2,e2c6d5e9-a321-4e8c-b322-03d9599ef962:db/migrate/20170302152930_add_attributes_to_tags.rb,e2c6d5e9-a321-4e8c-b322-03d9599ef962,20170302152930_add_attributes_to_tags.rb,db/migrate/20170302152930_add_attributes_to_tags.rb,ruby,0,0,1,0,A,0,A,0,0,A,16,0,12,0,0
-2,e2c6d5e9-a321-4e8c-b322-03d9599ef962:db/migrate/20170303171502_add_social_image_to_tags.rb,e2c6d5e9-a321-4e8c-b322-03d9599ef962,20170303171502_add_social_image_to_tags.rb,db/migrate/20170303171502_add_social_image_to_tags.rb,ruby,0,0,1,0,A,0,A,0,0,A,5,0,1,0,0
+connection_id,file_metrics_key,project_key,file_name,file_path,file_language,code_smells,sqale_index,sqale_rating,bugs,reliability_rating,vulnerabilities,security_rating,security_hotspots,security_hotspots_reviewed,security_review_rating,ncloc,unovered_lines,lines_to_cover,duplicated_lines_density,duplicated_blocks,duplicated_files,duplicated_lines,effort_to_reach_maintainability_rating_a,complexity,cognitive_complexity
+1,testDevLake:backend/mocks/core/plugin/CompositeDataSourcePluginBlueprintV200.go,testNone,CompositeDataSourcePluginBlueprintV200.go,backend/mocks/core/plugin/CompositeDataSourcePluginBlueprintV200.go,go,1,15,1,0,A,0,A,0,0,A,67,0,33,33.29999923706055,1,1,31,0,12,16
+2,testDevLake:backend/core/models/domainlayer/code/component.go,testDevLake,component.go,backend/core/models/domainlayer/code/component.go,go,0,0,1,0,A,0,A,0,0,A,9,0,1,0,0,10,30,4,1,21
+2,testDevLake:backend/plugins/tapd/e2e/company_test.go,testDevLake,company_test.go,backend/plugins/tapd/e2e/company_test.go,go,0,0,1,0,A,0,A,0,0,A,41,0,7,0,0,20,20,2,1,13
+2,testDevLake:backend/plugins/tapd/tasks/company_extractor.go,testDevLake,company_extractor.go,backend/plugins/tapd/tasks/company_extractor.go,go,0,0,1,0,A,0,A,0,0,A,40,0,14,0,0,14,10,3,3,3
diff --git a/backend/plugins/sonarqube/e2e/snapshot_tables/filemetrics.csv b/backend/plugins/sonarqube/e2e/snapshot_tables/filemetrics.csv
index 82470bddb..6464fb2cd 100644
--- a/backend/plugins/sonarqube/e2e/snapshot_tables/filemetrics.csv
+++ b/backend/plugins/sonarqube/e2e/snapshot_tables/filemetrics.csv
@@ -1,7 +1,4 @@
-id,project_key,file_name,file_path,file_language,code_smells,sqale_index,sqale_rating,bugs,reliability_rating,vulnerabilities,security_rating,security_hotspots,security_hotspots_reviewed,security_review_rating,ncloc,coverage,lines_to_cover,duplicated_lines_density,duplicated_blocks
-sonarqube:SonarqubeFileMetrics:1:02c1047b-f87c-4c35-a6b5-76c6b607d37f:db/migrate/20170206220334_add_start_date_to_sponsors_and_paid.rb,sonarqube:SonarqubeProject:1:02c1047b-f87c-4c35-a6b5-76c6b607d37f,20170206220334_add_start_date_to_sponsors_and_paid.rb,db/migrate/20170206220334_add_start_date_to_sponsors_and_paid.rb,ruby,0,0,1,0,A,0,A,0,0,A,6,0,2,0,0
-sonarqube:SonarqubeFileMetrics:1:02c1047b-f87c-4c35-a6b5-76c6b607d37f:db/migrate/20170208152018_acts_as_follower_migration.rb,sonarqube:SonarqubeProject:1:02c1047b-f87c-4c35-a6b5-76c6b607d37f,20170208152018_acts_as_follower_migration.rb,db/migrate/20170208152018_acts_as_follower_migration.rb,ruby,0,0,1,0,A,0,A,0,0,A,15,0,8,0,0
-sonarqube:SonarqubeFileMetrics:1:02c1047b-f87c-4c35-a6b5-76c6b607d37f:db/migrate/20170209164016_add_email_follower_notifications.rb,sonarqube:SonarqubeProject:1:02c1047b-f87c-4c35-a6b5-76c6b607d37f,20170209164016_add_email_follower_notifications.rb,db/migrate/20170209164016_add_email_follower_notifications.rb,ruby,0,0,1,0,A,0,A,0,0,A,5,0,1,0,0
-sonarqube:SonarqubeFileMetrics:1:02c1047b-f87c-4c35-a6b5-76c6b607d37f:db/migrate/20170213183337_add_sign_up_information_to_users.rb,sonarqube:SonarqubeProject:1:02c1047b-f87c-4c35-a6b5-76c6b607d37f,20170213183337_add_sign_up_information_to_users.rb,db/migrate/20170213183337_add_sign_up_information_to_users.rb,ruby,0,0,1,0,A,0,A,0,0,A,7,0,3,0,0
-sonarqube:SonarqubeFileMetrics:1:02c1047b-f87c-4c35-a6b5-76c6b607d37f:db/migrate/20170216145500_add_location_fields_etc_to_users.rb,sonarqube:SonarqubeProject:1:02c1047b-f87c-4c35-a6b5-76c6b607d37f,20170216145500_add_location_fields_etc_to_users.rb,db/migrate/20170216145500_add_location_fields_etc_to_users.rb,ruby,0,0,1,0,A,0,A,0,0,A,20,0,16,0,0
-sonarqube:SonarqubeFileMetrics:1:02c1047b-f87c-4c35-a6b5-76c6b607d37f:db/migrate/20170228174838_add_identity_data_to_users.rb,sonarqube:SonarqubeProject:1:02c1047b-f87c-4c35-a6b5-76c6b607d37f,20170228174838_add_identity_data_to_users.rb,db/migrate/20170228174838_add_identity_data_to_users.rb,ruby,0,0,1,0,A,0,A,0,0,A,9,0,5,0,0
+id,project_key,file_name,file_path,file_language,code_smells,sqale_index,sqale_rating,bugs,reliability_rating,vulnerabilities,security_rating,security_hotspots,security_hotspots_reviewed,security_review_rating,ncloc,unovered_lines,lines_to_cover,duplicated_lines_density,duplicated_blocks,duplicated_files,duplicated_lines,effort_to_reach_maintainability_rating_a,complexity,cognitive_complexity
+sonarqube:SonarqubeFileMetrics:2:testDevLake:backend/core/models/domainlayer/code/component.go,sonarqube:SonarqubeProject:2:testDevLake,component.go,backend/core/models/domainlayer/code/component.go,go,0,0,1,0,A,0,A,0,0,A,9,0,1,0,0,10,30,4,1,21
+sonarqube:SonarqubeFileMetrics:2:testDevLake:backend/plugins/tapd/e2e/company_test.go,sonarqube:SonarqubeProject:2:testDevLake,company_test.go,backend/plugins/tapd/e2e/company_test.go,go,0,0,1,0,A,0,A,0,0,A,41,0,7,0,0,20,20,2,1,13
+sonarqube:SonarqubeFileMetrics:2:testDevLake:backend/plugins/tapd/tasks/company_extractor.go,sonarqube:SonarqubeProject:2:testDevLake,company_extractor.go,backend/plugins/tapd/tasks/company_extractor.go,go,0,0,1,0,A,0,A,0,0,A,40,0,14,0,0,14,10,3,3,3
diff --git a/backend/plugins/sonarqube/impl/impl.go b/backend/plugins/sonarqube/impl/impl.go
index a92953bdf..74e4cf8a6 100644
--- a/backend/plugins/sonarqube/impl/impl.go
+++ b/backend/plugins/sonarqube/impl/impl.go
@@ -85,6 +85,8 @@ func (p Sonarqube) SubTaskMetas() []plugin.SubTaskMeta {
 		tasks.ExtractIssuesMeta,
 		tasks.CollectHotspotsMeta,
 		tasks.ExtractHotspotsMeta,
+		tasks.CollectAdditionalFilemetricsMeta,
+		tasks.ExtractAdditionalFileMetricsMeta,
 		tasks.CollectFilemetricsMeta,
 		tasks.ExtractFilemetricsMeta,
 		tasks.CollectAccountsMeta,
diff --git a/backend/plugins/sonarqube/models/migrationscripts/20230111_add_init_tables.go b/backend/plugins/sonarqube/models/migrationscripts/20230111_add_init_tables.go
index e448a5a6e..a1552fe73 100644
--- a/backend/plugins/sonarqube/models/migrationscripts/20230111_add_init_tables.go
+++ b/backend/plugins/sonarqube/models/migrationscripts/20230111_add_init_tables.go
@@ -51,7 +51,7 @@ func (*addInitTables) Up(basicRes context.BasicRes) errors.Error {
 }
 
 func (*addInitTables) Version() uint64 {
-	return 20230221220030
+	return 20230221220042
 }
 
 func (*addInitTables) Name() string {
diff --git a/backend/plugins/sonarqube/models/migrationscripts/archived/connection.go b/backend/plugins/sonarqube/models/migrationscripts/archived/connection.go
index a7509964f..1337f0667 100644
--- a/backend/plugins/sonarqube/models/migrationscripts/archived/connection.go
+++ b/backend/plugins/sonarqube/models/migrationscripts/archived/connection.go
@@ -38,30 +38,11 @@ type SonarqubeConnection struct {
 	AccessToken    `mapstructure:",squash"`
 }
 
-type TestConnectionRequest struct {
-	Endpoint    string `json:"endpoint"`
-	Proxy       string `json:"proxy"`
-	AccessToken `mapstructure:",squash"`
-}
-
 // For sonarqube, we can `use user_token:`
 type AccessToken struct {
 	Token string `mapstructure:"token" validate:"required" json:"token" encrypt:"yes"`
 }
 
-// This object conforms to what the frontend currently expects.
-type SonarqubeResponse struct {
-	Name string `json:"name"`
-	ID   int    `json:"id"`
-	SonarqubeConnection
-}
-
-// Using User because it requires authentication.
-type ApiUserResponse struct {
-	Id   int
-	Name string `json:"name"`
-}
-
 func (SonarqubeConnection) TableName() string {
 	return "_tool_sonarqube_connections"
 }
diff --git a/backend/plugins/sonarqube/models/migrationscripts/archived/sonarqube_account.go b/backend/plugins/sonarqube/models/migrationscripts/archived/sonarqube_account.go
index 418c0a64c..bba3490da 100644
--- a/backend/plugins/sonarqube/models/migrationscripts/archived/sonarqube_account.go
+++ b/backend/plugins/sonarqube/models/migrationscripts/archived/sonarqube_account.go
@@ -24,11 +24,11 @@ import (
 type SonarqubeAccount struct {
 	archived.NoPKModel
 	ConnectionId uint64 `gorm:"primaryKey"`
-	Login        string `json:"login" gorm:"primaryKey"`
-	Name         string `json:"name"`
-	Email        string `json:"email"`
-	Active       bool   `json:"active"`
-	Local        bool   `json:"local"`
+	Login        string `gorm:"primaryKey"`
+	Name         string `gorm:"type:varchar(100)"`
+	Email        string `gorm:"type:varchar(100)"`
+	Active       bool
+	Local        bool
 }
 
 func (SonarqubeAccount) TableName() string {
diff --git a/backend/plugins/sonarqube/models/migrationscripts/archived/sonarqube_file_metrics.go b/backend/plugins/sonarqube/models/migrationscripts/archived/sonarqube_file_metrics.go
index a288de898..52df2e059 100644
--- a/backend/plugins/sonarqube/models/migrationscripts/archived/sonarqube_file_metrics.go
+++ b/backend/plugins/sonarqube/models/migrationscripts/archived/sonarqube_file_metrics.go
@@ -25,24 +25,30 @@ type SonarqubeFileMetrics struct {
 	ConnectionId             uint64 `gorm:"primaryKey"`
 	FileMetricsKey           string `gorm:"primaryKey"`
 	ProjectKey               string `gorm:"index"`
-	FileName                 string
+	FileName                 string `gorm:"type:varchar(255)"`
 	FilePath                 string
-	FileLanguage             string
+	FileLanguage             string `gorm:"type:varchar(20)"`
 	CodeSmells               int
 	SqaleIndex               int
 	SqaleRating              float64
 	Bugs                     int
-	ReliabilityRating        string
+	ReliabilityRating        string `gorm:"type:varchar(20)"`
 	Vulnerabilities          int
-	SecurityRating           string
+	SecurityRating           string `gorm:"type:varchar(20)"`
 	SecurityHotspots         int
 	SecurityHotspotsReviewed float64
-	SecurityReviewRating     string
+	SecurityReviewRating     string `gorm:"type:varchar(20)"`
 	Ncloc                    int
-	Coverage                 float64
+	UnoveredLines            int
 	LinesToCover             int
 	DuplicatedLinesDensity   float64
 	DuplicatedBlocks         int
+	DuplicatedFiles          int
+	DuplicatedLines          int
+
+	EffortToReachMaintainabilityRatingA int
+	Complexity                          int
+	CognitiveComplexity                 int
 	archived.NoPKModel
 }
 
diff --git a/backend/plugins/sonarqube/models/migrationscripts/archived/sonarqube_hotspot.go b/backend/plugins/sonarqube/models/migrationscripts/archived/sonarqube_hotspot.go
index ab107f8f6..cc78ca47f 100644
--- a/backend/plugins/sonarqube/models/migrationscripts/archived/sonarqube_hotspot.go
+++ b/backend/plugins/sonarqube/models/migrationscripts/archived/sonarqube_hotspot.go
@@ -25,16 +25,16 @@ import (
 type SonarqubeHotspot struct {
 	ConnectionId             uint64 `gorm:"primaryKey"`
 	HotspotKey               string `gorm:"primaryKey"`
-	RuleKey                  string
+	RuleKey                  string `gorm:"type:varchar(255)"`
 	Component                string `gorm:"index"`
 	ProjectKey               string `gorm:"index"`
 	Line                     int
-	Status                   string
+	Status                   string `gorm:"type:varchar(100)"`
 	Message                  string
-	Author                   string
-	Assignee                 string
-	SecurityCategory         string
-	VulnerabilityProbability string
+	Author                   string `gorm:"type:varchar(100)"`
+	Assignee                 string `gorm:"type:varchar(100)"`
+	SecurityCategory         string `gorm:"type:varchar(100)"`
+	VulnerabilityProbability string `gorm:"type:varchar(100)"`
 	CreationDate             *api.Iso8601Time
 	UpdateDate               *api.Iso8601Time
 	archived.NoPKModel
diff --git a/backend/plugins/sonarqube/models/migrationscripts/archived/sonarqube_issue.go b/backend/plugins/sonarqube/models/migrationscripts/archived/sonarqube_issue.go
index f457d5570..080f361eb 100644
--- a/backend/plugins/sonarqube/models/migrationscripts/archived/sonarqube_issue.go
+++ b/backend/plugins/sonarqube/models/migrationscripts/archived/sonarqube_issue.go
@@ -26,19 +26,19 @@ type SonarqubeIssue struct {
 	ConnectionId uint64 `gorm:"primaryKey"`
 	IssueKey     string `gorm:"primaryKey"`
 	Rule         string `gorm:"type:varchar(255)"`
-	Severity     string `gorm:"type:varchar(255)"`
+	Severity     string `gorm:"type:varchar(100)"`
 	Component    string `gorm:"type:varchar(255)"`
-	ProjectKey   string `gorm:"index;type:varchar(255)"` //domain project key
+	ProjectKey   string `gorm:"index;type:varchar(100)"` //domain project key
 	Line         int
-	Status       string
+	Status       string `gorm:"type:varchar(20)"`
 	Message      string
 	Debt         int
 	Effort       int
-	Author       string
-	Hash         string
+	Author       string `gorm:"type:varchar(100)"`
+	Hash         string `gorm:"type:varchar(100)"`
 	Tags         string
-	Type         string
-	Scope        string
+	Type         string `gorm:"type:varchar(100)"`
+	Scope        string `gorm:"type:varchar(255)"`
 	StartLine    int
 	EndLine      int
 	StartOffset  int
diff --git a/backend/plugins/sonarqube/models/migrationscripts/archived/sonarqube_issue_code_block.go b/backend/plugins/sonarqube/models/migrationscripts/archived/sonarqube_issue_code_block.go
index 0cbedc14b..5942806e9 100644
--- a/backend/plugins/sonarqube/models/migrationscripts/archived/sonarqube_issue_code_block.go
+++ b/backend/plugins/sonarqube/models/migrationscripts/archived/sonarqube_issue_code_block.go
@@ -21,7 +21,7 @@ import "github.com/apache/incubator-devlake/core/models/migrationscripts/archive
 
 type SonarqubeIssueCodeBlock struct {
 	ConnectionId uint64 `gorm:"primaryKey"`
-	Id           string `gorm:"primaryKey"`
+	Id           string `gorm:"primaryKey;type:varchar(100)"`
 	IssueKey     string `gorm:"index"`
 	Component    string `gorm:"index"`
 	StartLine    int
diff --git a/backend/plugins/sonarqube/models/sonarqube_account.go b/backend/plugins/sonarqube/models/sonarqube_account.go
index 233ffb538..c0d3850b1 100644
--- a/backend/plugins/sonarqube/models/sonarqube_account.go
+++ b/backend/plugins/sonarqube/models/sonarqube_account.go
@@ -25,8 +25,8 @@ type SonarqubeAccount struct {
 	common.NoPKModel
 	ConnectionId uint64 `gorm:"primaryKey"`
 	Login        string `json:"login" gorm:"primaryKey"`
-	Name         string `json:"name"`
-	Email        string `json:"email"`
+	Name         string `gorm:"type:varchar(100)"`
+	Email        string `gorm:"type:varchar(100)"`
 	Active       bool   `json:"active"`
 	Local        bool   `json:"local"`
 }
diff --git a/backend/plugins/sonarqube/models/sonarqube_file_metrics.go b/backend/plugins/sonarqube/models/sonarqube_file_metrics.go
index c895e44f2..0bf43c867 100644
--- a/backend/plugins/sonarqube/models/sonarqube_file_metrics.go
+++ b/backend/plugins/sonarqube/models/sonarqube_file_metrics.go
@@ -39,7 +39,7 @@ type SonarqubeFileMetrics struct {
 	SecurityHotspotsReviewed float64
 	SecurityReviewRating     string
 	Ncloc                    int
-	Coverage                 float64
+	UnoveredLines            int
 	LinesToCover             int
 	DuplicatedLinesDensity   float64
 	DuplicatedBlocks         int
@@ -49,3 +49,52 @@ type SonarqubeFileMetrics struct {
 func (SonarqubeFileMetrics) TableName() string {
 	return "_tool_sonarqube_file_metrics"
 }
+
+type SonarqubeAdditionalFileMetrics struct {
+	ConnectionId                        uint64 `gorm:"primaryKey"`
+	FileMetricsKey                      string `gorm:"primaryKey"`
+	DuplicatedFiles                     int
+	DuplicatedLines                     int
+	EffortToReachMaintainabilityRatingA int
+	Complexity                          int
+	CognitiveComplexity                 int
+	common.NoPKModel
+}
+
+func (SonarqubeAdditionalFileMetrics) TableName() string {
+	return "_tool_sonarqube_file_metrics"
+}
+
+type SonarqubeWholeFileMetrics struct {
+	ConnectionId                        uint64 `gorm:"primaryKey"`
+	FileMetricsKey                      string `gorm:"primaryKey"`
+	ProjectKey                          string `gorm:"index"`
+	FileName                            string `gorm:"type:varchar(255)"`
+	FilePath                            string
+	FileLanguage                        string `gorm:"type:varchar(20)"`
+	CodeSmells                          int
+	SqaleIndex                          int
+	SqaleRating                         float64
+	Bugs                                int
+	ReliabilityRating                   string `gorm:"type:varchar(20)"`
+	Vulnerabilities                     int
+	SecurityRating                      string `gorm:"type:varchar(20)"`
+	SecurityHotspots                    int
+	SecurityHotspotsReviewed            float64
+	SecurityReviewRating                string `gorm:"type:varchar(20)"`
+	Ncloc                               int
+	UnoveredLines                       int
+	LinesToCover                        int
+	DuplicatedLinesDensity              float64
+	DuplicatedBlocks                    int
+	DuplicatedFiles                     int
+	DuplicatedLines                     int
+	EffortToReachMaintainabilityRatingA int
+	Complexity                          int
+	CognitiveComplexity                 int
+	common.NoPKModel
+}
+
+func (SonarqubeWholeFileMetrics) TableName() string {
+	return "_tool_sonarqube_file_metrics"
+}
diff --git a/backend/plugins/sonarqube/models/sonarqube_hotspot.go b/backend/plugins/sonarqube/models/sonarqube_hotspot.go
index 7bb669374..63dc682b9 100644
--- a/backend/plugins/sonarqube/models/sonarqube_hotspot.go
+++ b/backend/plugins/sonarqube/models/sonarqube_hotspot.go
@@ -25,16 +25,16 @@ import (
 type SonarqubeHotspot struct {
 	ConnectionId             uint64 `gorm:"primaryKey"`
 	HotspotKey               string `gorm:"primaryKey"`
-	RuleKey                  string
+	RuleKey                  string `gorm:"type:varchar(255)"`
 	Component                string `gorm:"index"`
 	ProjectKey               string `gorm:"index"`
 	Line                     int
-	Status                   string
+	Status                   string `gorm:"type:varchar(100)"`
 	Message                  string
-	Author                   string
-	Assignee                 string
-	SecurityCategory         string
-	VulnerabilityProbability string
+	Author                   string `gorm:"type:varchar(100)"`
+	Assignee                 string `gorm:"type:varchar(100)"`
+	SecurityCategory         string `gorm:"type:varchar(100)"`
+	VulnerabilityProbability string `gorm:"type:varchar(100)"`
 	CreationDate             *api.Iso8601Time
 	UpdateDate               *api.Iso8601Time
 	common.NoPKModel
diff --git a/backend/plugins/sonarqube/models/sonarqube_issue.go b/backend/plugins/sonarqube/models/sonarqube_issue.go
index 343a7c639..d822a256c 100644
--- a/backend/plugins/sonarqube/models/sonarqube_issue.go
+++ b/backend/plugins/sonarqube/models/sonarqube_issue.go
@@ -24,21 +24,21 @@ import (
 
 type SonarqubeIssue struct {
 	ConnectionId uint64 `gorm:"primaryKey"`
-	IssueKey     string `gorm:"primaryKey"`
+	IssueKey     string `gorm:"primaryKey;type:varchar(100)"`
 	Rule         string `gorm:"type:varchar(255)"`
-	Severity     string `gorm:"type:varchar(255)"`
+	Severity     string `gorm:"type:varchar(100)"`
 	Component    string `gorm:"type:varchar(255)"`
-	ProjectKey   string `gorm:"index;type:varchar(255)"` //domain project key
+	ProjectKey   string `gorm:"index;type:varchar(100)"` //domain project key
 	Line         int
-	Status       string
+	Status       string `gorm:"type:varchar(20)"`
 	Message      string
 	Debt         int
 	Effort       int
-	Author       string
-	Hash         string
+	Author       string `gorm:"type:varchar(100)"`
+	Hash         string `gorm:"type:varchar(100)"`
 	Tags         string
-	Type         string
-	Scope        string
+	Type         string `gorm:"type:varchar(100)"`
+	Scope        string `gorm:"type:varchar(255)"`
 	StartLine    int
 	EndLine      int
 	StartOffset  int
diff --git a/backend/plugins/sonarqube/tasks/accounts_collector.go b/backend/plugins/sonarqube/tasks/accounts_collector.go
index d762abe3d..6da12b493 100644
--- a/backend/plugins/sonarqube/tasks/accounts_collector.go
+++ b/backend/plugins/sonarqube/tasks/accounts_collector.go
@@ -28,7 +28,7 @@ import (
 	helper "github.com/apache/incubator-devlake/helpers/pluginhelper/api"
 )
 
-const RAW_ACCOUNTS_TABLE = "sonarqube_accounts"
+const RAW_ACCOUNTS_TABLE = "sonarqube_api_accounts"
 
 var _ plugin.SubTaskEntryPoint = CollectAccounts
 
diff --git a/backend/plugins/sonarqube/tasks/filemetrics_collector.go b/backend/plugins/sonarqube/tasks/filemetrics_additional_collector.go
similarity index 75%
copy from backend/plugins/sonarqube/tasks/filemetrics_collector.go
copy to backend/plugins/sonarqube/tasks/filemetrics_additional_collector.go
index 5d3aacd1a..6fe73b88e 100644
--- a/backend/plugins/sonarqube/tasks/filemetrics_collector.go
+++ b/backend/plugins/sonarqube/tasks/filemetrics_additional_collector.go
@@ -20,34 +20,30 @@ package tasks
 import (
 	"encoding/json"
 	"fmt"
-	"net/http"
-	"net/url"
-
 	"github.com/apache/incubator-devlake/core/errors"
 	"github.com/apache/incubator-devlake/core/plugin"
 	helper "github.com/apache/incubator-devlake/helpers/pluginhelper/api"
+	"net/http"
+	"net/url"
 )
 
-const RAW_FILEMETRICS_TABLE = "sonarqube_filemetrics"
+var _ plugin.SubTaskEntryPoint = CollectAdditionalFilemetrics
 
-var _ plugin.SubTaskEntryPoint = CollectFilemetrics
+const RAW_FILEMETRICS_ADDITIONAL_TABLE = "sonarqube_api_filemetrics_additional"
 
-func CollectFilemetrics(taskCtx plugin.SubTaskContext) errors.Error {
-	logger := taskCtx.GetLogger()
-	logger.Info("collect filemetrics")
+func CollectAdditionalFilemetrics(taskCtx plugin.SubTaskContext) errors.Error {
+	rawDataSubTaskArgs, data := CreateRawDataSubTaskArgs(taskCtx, RAW_FILEMETRICS_ADDITIONAL_TABLE)
 
-	rawDataSubTaskArgs, data := CreateRawDataSubTaskArgs(taskCtx, RAW_FILEMETRICS_TABLE)
 	collector, err := helper.NewApiCollector(helper.ApiCollectorArgs{
 		RawDataSubTaskArgs: *rawDataSubTaskArgs,
 		ApiClient:          data.ApiClient,
 		PageSize:           100,
-		Incremental:        false,
 		UrlTemplate:        "measures/component_tree",
 		Query: func(reqData *helper.RequestData) (url.Values, errors.Error) {
 			query := url.Values{}
 			query.Set("component", data.Options.ProjectKey)
 			query.Set("qualifiers", "FIL")
-			query.Set("metricKeys", "code_smells,sqale_index,sqale_rating,bugs,reliability_rating,vulnerabilities,security_rating,security_hotspots,security_hotspots_reviewed,security_review_rating,ncloc,coverage,lines_to_cover,duplicated_lines_density,duplicated_blocks")
+			query.Set("metricKeys", "duplicated_lines, duplicated_files, complexity, cognitive_complexity, effort_to_reach_maintainability_rating_a")
 			query.Set("p", fmt.Sprintf("%v", reqData.Pager.Page))
 			query.Set("ps", fmt.Sprintf("%v", reqData.Pager.Size))
 			return query, nil
@@ -66,9 +62,9 @@ func CollectFilemetrics(taskCtx plugin.SubTaskContext) errors.Error {
 	return collector.Execute()
 }
 
-var CollectFilemetricsMeta = plugin.SubTaskMeta{
-	Name:             "CollectFilemetrics",
-	EntryPoint:       CollectFilemetrics,
+var CollectAdditionalFilemetricsMeta = plugin.SubTaskMeta{
+	Name:             "CollectAdditionalFilemetrics",
+	EntryPoint:       CollectAdditionalFilemetrics,
 	EnabledByDefault: true,
 	Description:      "Collect Filemetrics data from Sonarqube api",
 	DomainTypes:      []string{plugin.DOMAIN_TYPE_CODE_QUALITY},
diff --git a/backend/plugins/sonarqube/tasks/hotspots_collector.go b/backend/plugins/sonarqube/tasks/filemetrics_additional_extractor.go
similarity index 50%
copy from backend/plugins/sonarqube/tasks/hotspots_collector.go
copy to backend/plugins/sonarqube/tasks/filemetrics_additional_extractor.go
index b31e17c7d..cb3036c66 100644
--- a/backend/plugins/sonarqube/tasks/hotspots_collector.go
+++ b/backend/plugins/sonarqube/tasks/filemetrics_additional_extractor.go
@@ -19,57 +19,45 @@ package tasks
 
 import (
 	"encoding/json"
-	"fmt"
-	"net/http"
-	"net/url"
-
 	"github.com/apache/incubator-devlake/core/errors"
 	"github.com/apache/incubator-devlake/core/plugin"
 	helper "github.com/apache/incubator-devlake/helpers/pluginhelper/api"
+	"github.com/apache/incubator-devlake/plugins/sonarqube/models"
 )
 
-const RAW_HOTSPOTS_TABLE = "sonarqube_hotspots"
-
-var _ plugin.SubTaskEntryPoint = CollectHotspots
+var _ plugin.SubTaskEntryPoint = ExtractAdditionalFileMetrics
 
-func CollectHotspots(taskCtx plugin.SubTaskContext) errors.Error {
-	logger := taskCtx.GetLogger()
-	logger.Info("collect hotspots")
+func ExtractAdditionalFileMetrics(taskCtx plugin.SubTaskContext) errors.Error {
+	rawDataSubTaskArgs, data := CreateRawDataSubTaskArgs(taskCtx, RAW_FILEMETRICS_ADDITIONAL_TABLE)
 
-	rawDataSubTaskArgs, data := CreateRawDataSubTaskArgs(taskCtx, RAW_HOTSPOTS_TABLE)
-
-	collector, err := helper.NewApiCollector(helper.ApiCollectorArgs{
+	extractor, err := helper.NewApiExtractor(helper.ApiExtractorArgs{
 		RawDataSubTaskArgs: *rawDataSubTaskArgs,
-		ApiClient:          data.ApiClient,
-		PageSize:           100,
-		Incremental:        false,
-		UrlTemplate:        "hotspots/search",
-		Query: func(reqData *helper.RequestData) (url.Values, errors.Error) {
-			query := url.Values{}
-			// no time range
-			query.Set("projectKey", data.Options.ProjectKey)
-			query.Set("p", fmt.Sprintf("%v", reqData.Pager.Page))
-			query.Set("ps", fmt.Sprintf("%v", reqData.Pager.Size))
-			return query, nil
-		},
-		ResponseParser: func(res *http.Response) ([]json.RawMessage, errors.Error) {
-			var resData struct {
-				Data []json.RawMessage `json:"hotspots"`
+
+		Extract: func(resData *helper.RawData) ([]interface{}, errors.Error) {
+			body := &fileMetricsResponse{}
+			err := errors.Convert(json.Unmarshal(resData.Data, body))
+			if err != nil {
+				return nil, err
+			}
+			fileMetrics := &models.SonarqubeAdditionalFileMetrics{
+				ConnectionId:   data.Options.ConnectionId,
+				FileMetricsKey: body.Key,
 			}
-			err := helper.UnmarshalResponse(res, &resData)
-			return resData.Data, err
+			err = setAdditionalMetrics(fileMetrics, body.Measures)
+			return []interface{}{fileMetrics}, err
 		},
 	})
 	if err != nil {
 		return err
 	}
-	return collector.Execute()
+
+	return extractor.Execute()
 }
 
-var CollectHotspotsMeta = plugin.SubTaskMeta{
-	Name:             "CollectHotspots",
-	EntryPoint:       CollectHotspots,
+var ExtractAdditionalFileMetricsMeta = plugin.SubTaskMeta{
+	Name:             "ExtractAdditionalFileMetrics",
+	EntryPoint:       ExtractAdditionalFileMetrics,
 	EnabledByDefault: true,
-	Description:      "Collect Hotspots data from Sonarqube api",
+	Description:      "Extract raw data into tool layer table sonarqube_api_filemetrics",
 	DomainTypes:      []string{plugin.DOMAIN_TYPE_CODE_QUALITY},
 }
diff --git a/backend/plugins/sonarqube/tasks/filemetrics_collector.go b/backend/plugins/sonarqube/tasks/filemetrics_collector.go
index 5d3aacd1a..ddee2ad03 100644
--- a/backend/plugins/sonarqube/tasks/filemetrics_collector.go
+++ b/backend/plugins/sonarqube/tasks/filemetrics_collector.go
@@ -28,7 +28,7 @@ import (
 	helper "github.com/apache/incubator-devlake/helpers/pluginhelper/api"
 )
 
-const RAW_FILEMETRICS_TABLE = "sonarqube_filemetrics"
+const RAW_FILEMETRICS_TABLE = "sonarqube_api_filemetrics"
 
 var _ plugin.SubTaskEntryPoint = CollectFilemetrics
 
diff --git a/backend/plugins/sonarqube/tasks/filemetrics_convertor.go b/backend/plugins/sonarqube/tasks/filemetrics_convertor.go
index 2cc7d911d..23d567070 100644
--- a/backend/plugins/sonarqube/tasks/filemetrics_convertor.go
+++ b/backend/plugins/sonarqube/tasks/filemetrics_convertor.go
@@ -29,8 +29,6 @@ import (
 	"reflect"
 )
 
-const RAW_PROJECTS_TABLE = "sonarqube_projects"
-
 var ConvertFileMetricsMeta = plugin.SubTaskMeta{
 	Name:             "convertFileMetrics",
 	EntryPoint:       ConvertFileMetrics,
@@ -41,8 +39,8 @@ var ConvertFileMetricsMeta = plugin.SubTaskMeta{
 
 func ConvertFileMetrics(taskCtx plugin.SubTaskContext) errors.Error {
 	db := taskCtx.GetDal()
-	rawDataSubTaskArgs, data := CreateRawDataSubTaskArgs(taskCtx, RAW_PROJECTS_TABLE)
-	cursor, err := db.Cursor(dal.From(sonarqubeModels.SonarqubeFileMetrics{}),
+	rawDataSubTaskArgs, data := CreateRawDataSubTaskArgs(taskCtx, RAW_FILEMETRICS_TABLE)
+	cursor, err := db.Cursor(dal.From(sonarqubeModels.SonarqubeWholeFileMetrics{}),
 		dal.Where("connection_id = ? and project_key = ?", data.Options.ConnectionId, data.Options.ProjectKey))
 	if err != nil {
 		return err
@@ -52,32 +50,37 @@ func ConvertFileMetrics(taskCtx plugin.SubTaskContext) errors.Error {
 	issueIdGen := didgen.NewDomainIdGenerator(&sonarqubeModels.SonarqubeFileMetrics{})
 	projectIdGen := didgen.NewDomainIdGenerator(&sonarqubeModels.SonarqubeProject{})
 	converter, err := api.NewDataConverter(api.DataConverterArgs{
-		InputRowType:       reflect.TypeOf(sonarqubeModels.SonarqubeFileMetrics{}),
+		InputRowType:       reflect.TypeOf(sonarqubeModels.SonarqubeWholeFileMetrics{}),
 		Input:              cursor,
 		RawDataSubTaskArgs: *rawDataSubTaskArgs,
 		Convert: func(inputRow interface{}) ([]interface{}, errors.Error) {
-			sonarqubeFileMetric := inputRow.(*sonarqubeModels.SonarqubeFileMetrics)
+			sonarqubeFileMetric := inputRow.(*sonarqubeModels.SonarqubeWholeFileMetrics)
 			domainFileMetric := &codequality.CqFileMetrics{
-				DomainEntity:             domainlayer.DomainEntity{Id: issueIdGen.Generate(data.Options.ConnectionId, sonarqubeFileMetric.FileMetricsKey)},
-				FileName:                 sonarqubeFileMetric.FileName,
-				FilePath:                 sonarqubeFileMetric.FilePath,
-				FileLanguage:             sonarqubeFileMetric.FileLanguage,
-				ProjectKey:               projectIdGen.Generate(data.Options.ConnectionId, sonarqubeFileMetric.ProjectKey),
-				CodeSmells:               sonarqubeFileMetric.CodeSmells,
-				SqaleIndex:               sonarqubeFileMetric.SqaleIndex,
-				SqaleRating:              sonarqubeFileMetric.SqaleRating,
-				Bugs:                     sonarqubeFileMetric.Bugs,
-				ReliabilityRating:        sonarqubeFileMetric.ReliabilityRating,
-				Vulnerabilities:          sonarqubeFileMetric.Vulnerabilities,
-				SecurityRating:           sonarqubeFileMetric.SecurityRating,
-				SecurityHotspots:         sonarqubeFileMetric.SecurityHotspots,
-				SecurityHotspotsReviewed: sonarqubeFileMetric.SecurityHotspotsReviewed,
-				SecurityReviewRating:     sonarqubeFileMetric.SecurityReviewRating,
-				Ncloc:                    sonarqubeFileMetric.Ncloc,
-				Coverage:                 sonarqubeFileMetric.Coverage,
-				LinesToCover:             sonarqubeFileMetric.LinesToCover,
-				DuplicatedLinesDensity:   sonarqubeFileMetric.DuplicatedLinesDensity,
-				DuplicatedBlocks:         sonarqubeFileMetric.DuplicatedBlocks,
+				DomainEntity:                        domainlayer.DomainEntity{Id: issueIdGen.Generate(data.Options.ConnectionId, sonarqubeFileMetric.FileMetricsKey)},
+				ProjectKey:                          projectIdGen.Generate(data.Options.ConnectionId, sonarqubeFileMetric.ProjectKey),
+				FileName:                            sonarqubeFileMetric.FileName,
+				FilePath:                            sonarqubeFileMetric.FilePath,
+				FileLanguage:                        sonarqubeFileMetric.FileLanguage,
+				CodeSmells:                          sonarqubeFileMetric.CodeSmells,
+				SqaleIndex:                          sonarqubeFileMetric.SqaleIndex,
+				SqaleRating:                         sonarqubeFileMetric.SqaleRating,
+				Bugs:                                sonarqubeFileMetric.Bugs,
+				ReliabilityRating:                   sonarqubeFileMetric.ReliabilityRating,
+				Vulnerabilities:                     sonarqubeFileMetric.Vulnerabilities,
+				SecurityRating:                      sonarqubeFileMetric.SecurityRating,
+				SecurityHotspots:                    sonarqubeFileMetric.SecurityHotspots,
+				SecurityHotspotsReviewed:            sonarqubeFileMetric.SecurityHotspotsReviewed,
+				SecurityReviewRating:                sonarqubeFileMetric.SecurityReviewRating,
+				Ncloc:                               sonarqubeFileMetric.Ncloc,
+				UnoveredLines:                       sonarqubeFileMetric.UnoveredLines,
+				LinesToCover:                        sonarqubeFileMetric.LinesToCover,
+				DuplicatedLinesDensity:              sonarqubeFileMetric.DuplicatedLinesDensity,
+				DuplicatedBlocks:                    sonarqubeFileMetric.DuplicatedBlocks,
+				DuplicatedFiles:                     sonarqubeFileMetric.DuplicatedFiles,
+				DuplicatedLines:                     sonarqubeFileMetric.DuplicatedLines,
+				EffortToReachMaintainabilityRatingA: sonarqubeFileMetric.EffortToReachMaintainabilityRatingA,
+				Complexity:                          sonarqubeFileMetric.Complexity,
+				CognitiveComplexity:                 sonarqubeFileMetric.CognitiveComplexity,
 			}
 			return []interface{}{
 				domainFileMetric,
diff --git a/backend/plugins/sonarqube/tasks/filemetrics_extractor.go b/backend/plugins/sonarqube/tasks/filemetrics_extractor.go
index d3392513e..a107d2283 100644
--- a/backend/plugins/sonarqube/tasks/filemetrics_extractor.go
+++ b/backend/plugins/sonarqube/tasks/filemetrics_extractor.go
@@ -23,7 +23,6 @@ import (
 	"github.com/apache/incubator-devlake/core/plugin"
 	helper "github.com/apache/incubator-devlake/helpers/pluginhelper/api"
 	"github.com/apache/incubator-devlake/plugins/sonarqube/models"
-	"strconv"
 )
 
 var _ plugin.SubTaskEntryPoint = ExtractFilemetrics
@@ -48,85 +47,8 @@ func ExtractFilemetrics(taskCtx plugin.SubTaskContext) errors.Error {
 				FileLanguage:   body.Language,
 				ProjectKey:     data.Options.ProjectKey,
 			}
-			alphabetMap := map[string]string{
-				"1.0": "A",
-				"2.0": "B",
-				"3.0": "C",
-				"4.0": "D",
-				"5.0": "E",
-			}
-			for _, v := range body.Measures {
-				switch v.Metric {
-				case "sqale_index":
-					fileMetrics.SqaleIndex, err = errors.Convert01(strconv.Atoi(v.Value))
-					if err != nil {
-						return nil, err
-					}
-				case "sqale_rating":
-					fileMetrics.SqaleRating, err = errors.Convert01(strconv.ParseFloat(v.Value, 32))
-					if err != nil {
-						return nil, err
-					}
-				case "reliability_rating":
-					fileMetrics.ReliabilityRating = alphabetMap[v.Value]
-				case "security_rating":
-					fileMetrics.SecurityRating = alphabetMap[v.Value]
-				case "security_review_rating":
-					fileMetrics.SecurityReviewRating = alphabetMap[v.Value]
-				case "ncloc":
-					fileMetrics.Ncloc, err = errors.Convert01(strconv.Atoi(v.Value))
-					if err != nil {
-						return nil, err
-					}
-				case "duplicated_blocks":
-					fileMetrics.DuplicatedBlocks, err = errors.Convert01(strconv.Atoi(v.Value))
-					if err != nil {
-						return nil, err
-					}
-
-				case "duplicated_lines_density":
-					fileMetrics.DuplicatedLinesDensity, err = errors.Convert01(strconv.ParseFloat(v.Value, 32))
-					if err != nil {
-						return nil, err
-					}
-				case "code_smells":
-					fileMetrics.CodeSmells, err = errors.Convert01(strconv.Atoi(v.Value))
-					if err != nil {
-						return nil, err
-					}
-				case "bugs":
-					fileMetrics.Bugs, err = errors.Convert01(strconv.Atoi(v.Value))
-					if err != nil {
-						return nil, err
-					}
-				case "vulnerabilities":
-					fileMetrics.Vulnerabilities, err = errors.Convert01(strconv.Atoi(v.Value))
-					if err != nil {
-						return nil, err
-					}
-				case "security_hotspots":
-					fileMetrics.SecurityHotspots, err = errors.Convert01(strconv.Atoi(v.Value))
-					if err != nil {
-						return nil, err
-					}
-				case "security_hotspots_reviewed":
-					fileMetrics.SecurityHotspotsReviewed, err = errors.Convert01(strconv.ParseFloat(v.Value, 32))
-					if err != nil {
-						return nil, err
-					}
-				case "coverage":
-					fileMetrics.Coverage, err = errors.Convert01(strconv.ParseFloat(v.Value, 32))
-					if err != nil {
-						return nil, err
-					}
-				case "lines_to_cover":
-					fileMetrics.LinesToCover, err = errors.Convert01(strconv.Atoi(v.Value))
-					if err != nil {
-						return nil, err
-					}
-				}
-			}
-			return []interface{}{fileMetrics}, nil
+			err = setMetrics(fileMetrics, body.Measures)
+			return []interface{}{fileMetrics}, err
 		},
 	})
 	if err != nil {
@@ -143,17 +65,3 @@ var ExtractFilemetricsMeta = plugin.SubTaskMeta{
 	Description:      "Extract raw data into tool layer table sonarqube_filemetrics",
 	DomainTypes:      []string{plugin.DOMAIN_TYPE_CODE_QUALITY},
 }
-
-type fileMetricsResponse struct {
-	Key       string    `json:"key"`
-	Name      string    `json:"name"`
-	Qualifier string    `json:"qualifier"`
-	Path      string    `json:"path"`
-	Language  string    `json:"language"`
-	Measures  []Measure `json:"measures"`
-}
-type Measure struct {
-	Metric    string `json:"metric"`
-	Value     string `json:"value"`
-	BestValue bool   `json:"bestValue,omitempty"`
-}
diff --git a/backend/plugins/sonarqube/tasks/hotspots_collector.go b/backend/plugins/sonarqube/tasks/hotspots_collector.go
index b31e17c7d..e6ccf547c 100644
--- a/backend/plugins/sonarqube/tasks/hotspots_collector.go
+++ b/backend/plugins/sonarqube/tasks/hotspots_collector.go
@@ -28,7 +28,7 @@ import (
 	helper "github.com/apache/incubator-devlake/helpers/pluginhelper/api"
 )
 
-const RAW_HOTSPOTS_TABLE = "sonarqube_hotspots"
+const RAW_HOTSPOTS_TABLE = "sonarqube_api_hotspots"
 
 var _ plugin.SubTaskEntryPoint = CollectHotspots
 
diff --git a/backend/plugins/sonarqube/tasks/hotspots_convertor.go b/backend/plugins/sonarqube/tasks/hotspots_convertor.go
index 8c198e013..687fe2de3 100644
--- a/backend/plugins/sonarqube/tasks/hotspots_convertor.go
+++ b/backend/plugins/sonarqube/tasks/hotspots_convertor.go
@@ -40,7 +40,7 @@ var ConvertHotspotsMeta = plugin.SubTaskMeta{
 
 func ConvertHotspots(taskCtx plugin.SubTaskContext) errors.Error {
 	db := taskCtx.GetDal()
-	rawDataSubTaskArgs, data := CreateRawDataSubTaskArgs(taskCtx, RAW_PROJECTS_TABLE)
+	rawDataSubTaskArgs, data := CreateRawDataSubTaskArgs(taskCtx, RAW_HOTSPOTS_TABLE)
 	cursor, err := db.Cursor(dal.From(sonarqubeModels.SonarqubeHotspot{}),
 		dal.Where("connection_id = ? and project_key = ?", data.Options.ConnectionId, data.Options.ProjectKey))
 	if err != nil {
diff --git a/backend/plugins/sonarqube/tasks/issues_collector.go b/backend/plugins/sonarqube/tasks/issues_collector.go
index 860dc0506..7e3292a3a 100644
--- a/backend/plugins/sonarqube/tasks/issues_collector.go
+++ b/backend/plugins/sonarqube/tasks/issues_collector.go
@@ -28,7 +28,7 @@ import (
 	helper "github.com/apache/incubator-devlake/helpers/pluginhelper/api"
 )
 
-const RAW_ISSUES_TABLE = "sonarqube_issues"
+const RAW_ISSUES_TABLE = "sonarqube_api_issues"
 
 var _ plugin.SubTaskEntryPoint = CollectIssues
 
diff --git a/backend/plugins/sonarqube/tasks/issues_convertor.go b/backend/plugins/sonarqube/tasks/issues_convertor.go
index 137e82173..503b935bd 100644
--- a/backend/plugins/sonarqube/tasks/issues_convertor.go
+++ b/backend/plugins/sonarqube/tasks/issues_convertor.go
@@ -40,7 +40,7 @@ var ConvertIssuesMeta = plugin.SubTaskMeta{
 
 func ConvertIssues(taskCtx plugin.SubTaskContext) errors.Error {
 	db := taskCtx.GetDal()
-	rawDataSubTaskArgs, data := CreateRawDataSubTaskArgs(taskCtx, RAW_PROJECTS_TABLE)
+	rawDataSubTaskArgs, data := CreateRawDataSubTaskArgs(taskCtx, RAW_ISSUES_TABLE)
 	cursor, err := db.Cursor(dal.From(sonarqubeModels.SonarqubeIssue{}),
 		dal.Where("connection_id = ? and project_key = ?", data.Options.ConnectionId, data.Options.ProjectKey))
 	if err != nil {
diff --git a/backend/plugins/sonarqube/tasks/projects_convertor.go b/backend/plugins/sonarqube/tasks/projects_convertor.go
index 245afab9f..afa6a694d 100644
--- a/backend/plugins/sonarqube/tasks/projects_convertor.go
+++ b/backend/plugins/sonarqube/tasks/projects_convertor.go
@@ -30,6 +30,8 @@ import (
 	sonarqubeModels "github.com/apache/incubator-devlake/plugins/sonarqube/models"
 )
 
+const RAW_PROJECTS_TABLE = "sonarqube_api_projects"
+
 var ConvertProjectsMeta = plugin.SubTaskMeta{
 	Name:             "convertProjects",
 	EntryPoint:       ConvertProjects,
diff --git a/backend/plugins/sonarqube/tasks/shared.go b/backend/plugins/sonarqube/tasks/shared.go
index 43f7d3cdf..8666ff418 100644
--- a/backend/plugins/sonarqube/tasks/shared.go
+++ b/backend/plugins/sonarqube/tasks/shared.go
@@ -26,6 +26,7 @@ import (
 	"github.com/apache/incubator-devlake/plugins/sonarqube/models"
 	"hash"
 	"net/http"
+	"strconv"
 	"unicode"
 )
 
@@ -124,6 +125,139 @@ func convertTimeToMinutes(timeStr string) int {
 		}
 	}
 
-	totalMinutes := days*24*60 + hours*60 + minutes
+	totalMinutes := days*8*60 + hours*60 + minutes
 	return totalMinutes
 }
+
+var alphabetMap = map[string]string{
+	"1.0": "A",
+	"2.0": "B",
+	"3.0": "C",
+	"4.0": "D",
+	"5.0": "E",
+}
+
+type fileMetricsResponse struct {
+	Key       string    `json:"key"`
+	Name      string    `json:"name"`
+	Qualifier string    `json:"qualifier"`
+	Path      string    `json:"path"`
+	Language  string    `json:"language"`
+	Measures  []Measure `json:"measures"`
+}
+type Measure struct {
+	Metric    string `json:"metric"`
+	Value     string `json:"value"`
+	BestValue bool   `json:"bestValue,omitempty"`
+}
+
+// As we have many metrics, we cannot
+func setMetrics(fileMetrics *models.SonarqubeFileMetrics, metricsList []Measure) errors.Error {
+	var err errors.Error
+	for _, v := range metricsList {
+		switch v.Metric {
+		case "sqale_index":
+			fileMetrics.SqaleIndex, err = errors.Convert01(strconv.Atoi(v.Value))
+			if err != nil {
+				return err
+			}
+		case "sqale_rating":
+			fileMetrics.SqaleRating, err = errors.Convert01(strconv.ParseFloat(v.Value, 32))
+			if err != nil {
+				return err
+			}
+		case "reliability_rating":
+			fileMetrics.ReliabilityRating = alphabetMap[v.Value]
+		case "security_rating":
+			fileMetrics.SecurityRating = alphabetMap[v.Value]
+		case "security_review_rating":
+			fileMetrics.SecurityReviewRating = alphabetMap[v.Value]
+		case "ncloc":
+			fileMetrics.Ncloc, err = errors.Convert01(strconv.Atoi(v.Value))
+			if err != nil {
+				return err
+			}
+		case "code_smells":
+			fileMetrics.CodeSmells, err = errors.Convert01(strconv.Atoi(v.Value))
+			if err != nil {
+				return err
+			}
+		case "bugs":
+			fileMetrics.Bugs, err = errors.Convert01(strconv.Atoi(v.Value))
+			if err != nil {
+				return err
+			}
+		case "vulnerabilities":
+			fileMetrics.Vulnerabilities, err = errors.Convert01(strconv.Atoi(v.Value))
+			if err != nil {
+				return err
+			}
+		case "security_hotspots":
+			fileMetrics.SecurityHotspots, err = errors.Convert01(strconv.Atoi(v.Value))
+			if err != nil {
+				return err
+			}
+		case "security_hotspots_reviewed":
+			fileMetrics.SecurityHotspotsReviewed, err = errors.Convert01(strconv.ParseFloat(v.Value, 32))
+			if err != nil {
+				return err
+			}
+		case "uncovered_lines":
+			fileMetrics.UnoveredLines, err = errors.Convert01(strconv.Atoi(v.Value))
+			if err != nil {
+				return err
+			}
+		case "lines_to_cover":
+			fileMetrics.LinesToCover, err = errors.Convert01(strconv.Atoi(v.Value))
+			if err != nil {
+				return err
+			}
+		case "duplicated_blocks":
+			fileMetrics.DuplicatedBlocks, err = errors.Convert01(strconv.Atoi(v.Value))
+			if err != nil {
+				return err
+			}
+		case "duplicated_lines_density":
+			fileMetrics.DuplicatedLinesDensity, err = errors.Convert01(strconv.ParseFloat(v.Value, 32))
+			if err != nil {
+				return err
+			}
+		}
+	}
+	return nil
+}
+
+// As we have many metrics, we cannot
+func setAdditionalMetrics(fileMetrics *models.SonarqubeAdditionalFileMetrics, metricsList []Measure) errors.Error {
+	var err errors.Error
+	for _, v := range metricsList {
+		switch v.Metric {
+		case "duplicated_lines":
+			fileMetrics.DuplicatedLines, err = errors.Convert01(strconv.Atoi(v.Value))
+			if err != nil {
+				return err
+			}
+		case "duplicated_files":
+			fileMetrics.DuplicatedFiles, err = errors.Convert01(strconv.Atoi(v.Value))
+			if err != nil {
+				return err
+			}
+		case "complexity":
+			fileMetrics.Complexity, err = errors.Convert01(strconv.Atoi(v.Value))
+			if err != nil {
+				return err
+			}
+		case "cognitive_complexity":
+			fileMetrics.CognitiveComplexity, err = errors.Convert01(strconv.Atoi(v.Value))
+			if err != nil {
+				return err
+			}
+		case "effort_to_reach_maintainability_rating_a":
+			fileMetrics.EffortToReachMaintainabilityRatingA, err = errors.Convert01(strconv.Atoi(v.Value))
+			if err != nil {
+				return err
+			}
+		}
+	}
+	return nil
+}
diff --git a/backend/plugins/sonarqube/tasks/shared_test.go b/backend/plugins/sonarqube/tasks/shared_test.go
index 0cd211a9e..84f30dd04 100644
--- a/backend/plugins/sonarqube/tasks/shared_test.go
+++ b/backend/plugins/sonarqube/tasks/shared_test.go
@@ -35,8 +35,8 @@ func TestConvertTimeToMinutes(t *testing.T) {
 		//{"1min", 1},
 		//{"30min", 30},
 		//{"1h30min", 90},
-		{"1d1h30min", 1530},
-		{"3d5h10min", 4630},
+		{"1d1h30min", 570},
+		{"3d5h10min", 1750},
 	}
 
 	for _, tc := range testCases {
@@ -103,3 +103,49 @@ func TestGetTotalPagesFromResponse(t *testing.T) {
 		t.Fatalf("Expected %v pages, but got %v", expectedPages, totalPages)
 	}
 }
+
+func TestSetMetrics(t *testing.T) {
+	fileMetrics := &models.SonarqubeFileMetrics{}
+	metricsList := []Measure{
+		{Metric: "sqale_index", Value: "100"},
+		{Metric: "sqale_rating", Value: "1.0"},
+		{Metric: "reliability_rating", Value: "1.0"},
+		{Metric: "security_rating", Value: "2.0"},
+		{Metric: "security_review_rating", Value: "3.0"},
+		{Metric: "ncloc", Value: "500"},
+		{Metric: "code_smells", Value: "10"},
+	}
+
+	err := setMetrics(fileMetrics, metricsList)
+	if err != nil {
+		t.Errorf("setMetrics returned an error: %s", err)
+	}
+
+	if fileMetrics.SqaleIndex != 100 {
+		t.Errorf("SqaleIndex was not set properly")
+	}
+
+	if fileMetrics.SqaleRating != 1.0 {
+		t.Errorf("SqaleRating was not set properly")
+	}
+
+	if fileMetrics.ReliabilityRating != "A" {
+		t.Errorf("ReliabilityRating was not set properly")
+	}
+
+	if fileMetrics.SecurityRating != "B" {
+		t.Errorf("SecurityRating was not set properly")
+	}
+
+	if fileMetrics.SecurityReviewRating != "C" {
+		t.Errorf("SecurityReviewRating was not set properly")
+	}
+
+	if fileMetrics.Ncloc != 500 {
+		t.Errorf("Ncloc was not set properly")
+	}
+
+	if fileMetrics.CodeSmells != 10 {
+		t.Errorf("CodeSmells was not set properly")
+	}
+}