You are viewing a plain text version of this content. The canonical link for it is here.
Posted to issues@carbondata.apache.org by "Venkata Ramana G (JIRA)" <ji...@apache.org> on 2017/12/21 17:58:00 UTC

[jira] [Resolved] (CARBONDATA-1914) Dictionary Cache Access Count Maintenance

     [ https://issues.apache.org/jira/browse/CARBONDATA-1914?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ]

Venkata Ramana G resolved CARBONDATA-1914.
------------------------------------------
       Resolution: Fixed
         Assignee: sounak chakraborty
    Fix Version/s: 1.3.0

> Dictionary Cache Access Count Maintenance
> -----------------------------------------
>
>                 Key: CARBONDATA-1914
>                 URL: https://issues.apache.org/jira/browse/CARBONDATA-1914
>             Project: CarbonData
>          Issue Type: Bug
>            Reporter: sounak chakraborty
>            Assignee: sounak chakraborty
>             Fix For: 1.3.0
>
>          Time Spent: 3h 20m
>  Remaining Estimate: 0h
>
> When dictionary cache is being accessed the access count is being atomicaly increased and after the access it has to be decremented. This access count prevent it from removal from cache when cache overflow scenario comes into picture. 
> There are some code places where access is incremented but not decremented after usage is completed. This is a resource leak. 
> Test Case
> =======
> Precondition 
> ----------------
> (CarbonCommonConstants.CARBON_MAX_DRIVER_LRU_CACHE_SIZE, "1")
>  (CarbonCommonConstants.CARBON_MAX_EXECUTOR_LRU_CACHE_SIZE, "1")
> spark.sql("drop table if exists carbon_new6").show(200,false)
> spark.sql("drop table if exists carbon_new7").show(200,false)
> spark.sql("drop table if exists carbon_new8").show(200,false)
> //    spark.sql("CREATE TABLE carbon_new6 (CUST_ID INT,CUST_NAME STRING,ACTIVE_EMUI_VERSION STRING, DOB TIMESTAMP, DOJ TIMESTAMP, BIGINT_COLUMN1 BIGINT,BIGINT_COLUMN2 BIGINT,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 DECIMAL(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 INT) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES ('dictionary_include'='CUST_NAME')").show(200,false)
> //    spark.sql("LOAD DATA INPATH '/home/root1/data_2000.csv' INTO TABLE carbon_new6 OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='\"','BAD_RECORDS_ACTION'='FORCE','SINGLE_PASS'='TRUE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2, INTEGER_COLUMN1')").show(200,false)
> spark.sql("CREATE TABLE carbon_new7 (CUST_ID INT,CUST_NAME STRING,ACTIVE_EMUI_VERSION STRING, DOB TIMESTAMP, DOJ TIMESTAMP, BIGINT_COLUMN1 BIGINT,BIGINT_COLUMN2 BIGINT,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 DECIMAL(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 INT) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES ('dictionary_include'='CUST_NAME, ACTIVE_EMUI_VERSION,BIGINT_COLUMN1,Double_COLUMN1, Double_COLUMN2')").show(200,false)
> spark.sql("LOAD DATA INPATH '/home/root1/data_2000.csv' INTO TABLE carbon_new7 OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='\"','BAD_RECORDS_ACTION'='FORCE','SINGLE_PASS'='TRUE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2, INTEGER_COLUMN1')").show(200,false)
> spark.sql("LOAD DATA INPATH '/home/root1/data_2000.csv' INTO TABLE carbon_new7 OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='\"','BAD_RECORDS_ACTION'='FORCE','SINGLE_PASS'='TRUE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2, INTEGER_COLUMN1')").show(200,false)
> spark.sql("CREATE TABLE carbon_new8 (CUST_ID INT,CUST_NAME STRING,ACTIVE_EMUI_VERSION STRING, DOB TIMESTAMP, DOJ TIMESTAMP, BIGINT_COLUMN1 BIGINT,BIGINT_COLUMN2 BIGINT,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 DECIMAL(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 INT) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES ('dictionary_include'='CUST_NAME, ACTIVE_EMUI_VERSION,BIGINT_COLUMN1,Double_COLUMN1, Double_COLUMN2')").show(200,false)
> spark.sql("LOAD DATA INPATH '/home/root1/data_2000.csv' INTO TABLE carbon_new8 OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='\"','BAD_RECORDS_ACTION'='FORCE','SINGLE_PASS'='TRUE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2, INTEGER_COLUMN1')").show(200,false)
> spark.sql("LOAD DATA INPATH '/home/root1/data_2000.csv' INTO TABLE carbon_new8 OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='\"','BAD_RECORDS_ACTION'='FORCE','SINGLE_PASS'='TRUE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2, INTEGER_COLUMN1')").show(200,false)



--
This message was sent by Atlassian JIRA
(v6.4.14#64029)