You are viewing a plain text version of this content. The canonical link for it is here.
Posted to issues@madlib.apache.org by "Frank McQuillan (JIRA)" <ji...@apache.org> on 2019/04/24 21:14:00 UTC

[jira] [Commented] (MADLIB-1329) MLP warm start not working

    [ https://issues.apache.org/jira/browse/MADLIB-1329?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=16825511#comment-16825511 ] 

Frank McQuillan commented on MADLIB-1329:
-----------------------------------------

Testing https://github.com/apache/madlib/pull/376

Seems to work now.

{code}
DROP TABLE IF EXISTS iris_data;
CREATE TABLE iris_data(
    id serial,
    attributes numeric[],
    class_text varchar,
    class integer,
    state varchar
) DISTRIBUTED RANDOMLY;
INSERT INTO iris_data(id, attributes, class_text, class, state) VALUES
(1,ARRAY[5.0,3.2,1.2,0.2],'Iris_setosa',1,'Alaska'),
(2,ARRAY[5.5,3.5,1.3,0.2],'Iris_setosa',1,'Alaska'),
(3,ARRAY[4.9,3.1,1.5,0.1],'Iris_setosa',1,'Alaska'),
(4,ARRAY[4.4,3.0,1.3,0.2],'Iris_setosa',1,'Alaska'),
(5,ARRAY[5.1,3.4,1.5,0.2],'Iris_setosa',1,'Alaska'),
(6,ARRAY[5.0,3.5,1.3,0.3],'Iris_setosa',1,'Alaska'),
(7,ARRAY[4.5,2.3,1.3,0.3],'Iris_setosa',1,'Alaska'),
(8,ARRAY[4.4,3.2,1.3,0.2],'Iris_setosa',1,'Alaska'),
(9,ARRAY[5.0,3.5,1.6,0.6],'Iris_setosa',1,'Alaska'),
(10,ARRAY[5.1,3.8,1.9,0.4],'Iris_setosa',1,'Alaska'),
(11,ARRAY[4.8,3.0,1.4,0.3],'Iris_setosa',1,'Alaska'),
(12,ARRAY[5.1,3.8,1.6,0.2],'Iris_setosa',1,'Alaska'),
(13,ARRAY[5.7,2.8,4.5,1.3],'Iris_versicolor',2,'Alaska'),
(14,ARRAY[6.3,3.3,4.7,1.6],'Iris_versicolor',2,'Alaska'),
(15,ARRAY[4.9,2.4,3.3,1.0],'Iris_versicolor',2,'Alaska'),
(16,ARRAY[6.6,2.9,4.6,1.3],'Iris_versicolor',2,'Alaska'),
(17,ARRAY[5.2,2.7,3.9,1.4],'Iris_versicolor',2,'Alaska'),
(18,ARRAY[5.0,2.0,3.5,1.0],'Iris_versicolor',2,'Alaska'),
(19,ARRAY[5.9,3.0,4.2,1.5],'Iris_versicolor',2,'Alaska'),
(20,ARRAY[6.0,2.2,4.0,1.0],'Iris_versicolor',2,'Alaska'),
(21,ARRAY[6.1,2.9,4.7,1.4],'Iris_versicolor',2,'Alaska'),
(22,ARRAY[5.6,2.9,3.6,1.3],'Iris_versicolor',2,'Alaska'),
(23,ARRAY[6.7,3.1,4.4,1.4],'Iris_versicolor',2,'Alaska'),
(24,ARRAY[5.6,3.0,4.5,1.5],'Iris_versicolor',2,'Alaska'),
(25,ARRAY[5.8,2.7,4.1,1.0],'Iris_versicolor',2,'Alaska'),
(26,ARRAY[6.2,2.2,4.5,1.5],'Iris_versicolor',2,'Alaska'),
(27,ARRAY[5.6,2.5,3.9,1.1],'Iris_versicolor',2,'Alaska'),
(28,ARRAY[5.0,3.4,1.5,0.2],'Iris_setosa',1,'Tennessee'),
(29,ARRAY[4.4,2.9,1.4,0.2],'Iris_setosa',1,'Tennessee'),
(30,ARRAY[4.9,3.1,1.5,0.1],'Iris_setosa',1,'Tennessee'),
(31,ARRAY[5.4,3.7,1.5,0.2],'Iris_setosa',1,'Tennessee'),
(32,ARRAY[4.8,3.4,1.6,0.2],'Iris_setosa',1,'Tennessee'),
(33,ARRAY[4.8,3.0,1.4,0.1],'Iris_setosa',1,'Tennessee'),
(34,ARRAY[4.3,3.0,1.1,0.1],'Iris_setosa',1,'Tennessee'),
(35,ARRAY[5.8,4.0,1.2,0.2],'Iris_setosa',1,'Tennessee'),
(36,ARRAY[5.7,4.4,1.5,0.4],'Iris_setosa',1,'Tennessee'),
(37,ARRAY[5.4,3.9,1.3,0.4],'Iris_setosa',1,'Tennessee'),
(38,ARRAY[6.0,2.9,4.5,1.5],'Iris_versicolor',2,'Tennessee'),
(39,ARRAY[5.7,2.6,3.5,1.0],'Iris_versicolor',2,'Tennessee'),
(40,ARRAY[5.5,2.4,3.8,1.1],'Iris_versicolor',2,'Tennessee'),
(41,ARRAY[5.5,2.4,3.7,1.0],'Iris_versicolor',2,'Tennessee'),
(42,ARRAY[5.8,2.7,3.9,1.2],'Iris_versicolor',2,'Tennessee'),
(43,ARRAY[6.0,2.7,5.1,1.6],'Iris_versicolor',2,'Tennessee'),
(44,ARRAY[5.4,3.0,4.5,1.5],'Iris_versicolor',2,'Tennessee'),
(45,ARRAY[6.0,3.4,4.5,1.6],'Iris_versicolor',2,'Tennessee'),
(46,ARRAY[6.7,3.1,4.7,1.5],'Iris_versicolor',2,'Tennessee'),
(47,ARRAY[6.3,2.3,4.4,1.3],'Iris_versicolor',2,'Tennessee'),
(48,ARRAY[5.6,3.0,4.1,1.3],'Iris_versicolor',2,'Tennessee'),
(49,ARRAY[5.5,2.5,4.0,1.3],'Iris_versicolor',2,'Tennessee'),
(50,ARRAY[5.5,2.6,4.4,1.2],'Iris_versicolor',2,'Tennessee'),
(51,ARRAY[6.1,3.0,4.6,1.4],'Iris_versicolor',2,'Tennessee'),
(52,ARRAY[5.8,2.6,4.0,1.2],'Iris_versicolor',2,'Tennessee');

DROP TABLE IF EXISTS mlp_model, mlp_model_summary, mlp_model_standardization;
SELECT madlib.mlp_classification(
    'iris_data',      -- Source table
    'mlp_model',      -- Destination table
    'attributes',     -- Input features
    'class_text',     -- Label
    ARRAY[5,5,5,5],         -- Number of units per layer
    'learning_rate_init=0.003,
    n_iterations=10
    ,
    tolerance=0',     -- Optimizer params, with n_tries
    'tanh',           -- Activation function
    NULL,             -- Default weight (1)
    FALSE,            -- Warm start
    TRUE             -- Verbose
);

INFO:  Iteration: 1, Loss: <1.35625945699>
CONTEXT:  PL/Python function "mlp_classification"
INFO:  Iteration: 2, Loss: <1.36651421952>
CONTEXT:  PL/Python function "mlp_classification"
INFO:  Iteration: 3, Loss: <1.36375152132>
CONTEXT:  PL/Python function "mlp_classification"
INFO:  Iteration: 4, Loss: <1.36135678198>
CONTEXT:  PL/Python function "mlp_classification"
INFO:  Iteration: 5, Loss: <1.36003050752>
CONTEXT:  PL/Python function "mlp_classification"
INFO:  Iteration: 6, Loss: <1.35904748724>
CONTEXT:  PL/Python function "mlp_classification"
INFO:  Iteration: 7, Loss: <1.35807967399>
CONTEXT:  PL/Python function "mlp_classification"
INFO:  Iteration: 8, Loss: <1.35697355564>
CONTEXT:  PL/Python function "mlp_classification"
INFO:  Iteration: 9, Loss: <1.35561839982>
CONTEXT:  PL/Python function "mlp_classification"
 mlp_classification 
--------------------
 
(1 row)


SELECT madlib.mlp_classification(
    'iris_data',      -- Source table
    'mlp_model',      -- Destination table
    'attributes',     -- Input features
    'class_text',     -- Label
    ARRAY[5,5,5,5],         -- Number of units per layer
    'learning_rate_init=0.003,
    n_iterations=10
    ,
    tolerance=0',     -- Optimizer params
    'tanh',           -- Activation function
    NULL,             -- Default weight (1)
    TRUE,             -- Warm start
    TRUE             -- Verbose
);

INFO:  Iteration: 1, Loss: <1.35166783476>
CONTEXT:  PL/Python function "mlp_classification"
INFO:  Iteration: 2, Loss: <1.34988054997>
CONTEXT:  PL/Python function "mlp_classification"
INFO:  Iteration: 3, Loss: <1.34594490655>
CONTEXT:  PL/Python function "mlp_classification"
INFO:  Iteration: 4, Loss: <1.3402598473>
CONTEXT:  PL/Python function "mlp_classification"
INFO:  Iteration: 5, Loss: <1.33178575282>
CONTEXT:  PL/Python function "mlp_classification"
INFO:  Iteration: 6, Loss: <1.31837076108>
CONTEXT:  PL/Python function "mlp_classification"
INFO:  Iteration: 7, Loss: <1.29562336958>
CONTEXT:  PL/Python function "mlp_classification"
INFO:  Iteration: 8, Loss: <1.25447623278>
CONTEXT:  PL/Python function "mlp_classification"
INFO:  Iteration: 9, Loss: <1.17417748438>
CONTEXT:  PL/Python function "mlp_classification"
 mlp_classification 
--------------------
 
(1 row)


SELECT madlib.mlp_classification(
    'iris_data',      -- Source table
    'mlp_model',      -- Destination table
    'attributes',     -- Input features
    'class_text',     -- Label
    ARRAY[5,5,5,5],         -- Number of units per layer
    'learning_rate_init=0.003,
    n_iterations=10
    ,
    tolerance=0',     -- Optimizer params
    'tanh',           -- Activation function
    NULL,             -- Default weight (1)
    TRUE,             -- Warm start
    TRUE             -- Verbose
);

INFO:  Iteration: 1, Loss: <0.738854126564>
CONTEXT:  PL/Python function "mlp_classification"
INFO:  Iteration: 2, Loss: <0.526882490966>
CONTEXT:  PL/Python function "mlp_classification"
INFO:  Iteration: 3, Loss: <0.319354760958>
CONTEXT:  PL/Python function "mlp_classification"
INFO:  Iteration: 4, Loss: <0.205886915309>
CONTEXT:  PL/Python function "mlp_classification"
INFO:  Iteration: 5, Loss: <0.14547762843>
CONTEXT:  PL/Python function "mlp_classification"
INFO:  Iteration: 6, Loss: <0.110265713284>
CONTEXT:  PL/Python function "mlp_classification"
INFO:  Iteration: 7, Loss: <0.0879825099627>
CONTEXT:  PL/Python function "mlp_classification"
INFO:  Iteration: 8, Loss: <0.0725562017344>
CONTEXT:  PL/Python function "mlp_classification"
INFO:  Iteration: 9, Loss: <0.0614147085913>
CONTEXT:  PL/Python function "mlp_classification"
 mlp_classification 
--------------------
 
(1 row)


SELECT madlib.mlp_classification(
    'iris_data',      -- Source table
    'mlp_model',      -- Destination table
    'attributes',     -- Input features
    'class_text',     -- Label
    ARRAY[5,5,5,5],         -- Number of units per layer
    'learning_rate_init=0.003,
    n_iterations=10
    ,
    tolerance=0',     -- Optimizer params
    'tanh',           -- Activation function
    NULL,             -- Default weight (1)
    TRUE,             -- Warm start
    TRUE             -- Verbose
);

INFO:  Iteration: 1, Loss: <0.0466524478431>
CONTEXT:  PL/Python function "mlp_classification"
INFO:  Iteration: 2, Loss: <0.0428847110436>
CONTEXT:  PL/Python function "mlp_classification"
INFO:  Iteration: 3, Loss: <0.0385294181024>
CONTEXT:  PL/Python function "mlp_classification"
INFO:  Iteration: 4, Loss: <0.0348556514382>
CONTEXT:  PL/Python function "mlp_classification"
INFO:  Iteration: 5, Loss: <0.0317769181477>
CONTEXT:  PL/Python function "mlp_classification"
INFO:  Iteration: 6, Loss: <0.0291668423206>
CONTEXT:  PL/Python function "mlp_classification"
INFO:  Iteration: 7, Loss: <0.0269186308805>
CONTEXT:  PL/Python function "mlp_classification"
INFO:  Iteration: 8, Loss: <0.0249811721867>
CONTEXT:  PL/Python function "mlp_classification"
INFO:  Iteration: 9, Loss: <0.0232811210341>
CONTEXT:  PL/Python function "mlp_classification"
 mlp_classification 
--------------------

(1 row)
{code}




> MLP warm start not working
> --------------------------
>
>                 Key: MADLIB-1329
>                 URL: https://issues.apache.org/jira/browse/MADLIB-1329
>             Project: Apache MADlib
>          Issue Type: Bug
>          Components: Module: Neural Networks
>            Reporter: Frank McQuillan
>            Priority: Major
>             Fix For: v1.16
>
>
> MLP warm start not working when have >1 hidden layers:
> {code}
> madlib=# DROP TABLE IF EXISTS mlp_model, mlp_model_summary, mlp_model_standardization;
> DROP TABLE
> madlib=# -- Set seed so results are reproducible
> madlib=# SELECT setseed(0);
> -[ RECORD 1 ]
> setseed | 
> madlib=# SELECT madlib.mlp_classification(
> madlib(#     'iris_data',      -- Source table
> madlib(#     'mlp_model',      -- Destination table
> madlib(#     'attributes',     -- Input features
> madlib(#     'class_text',     -- Label
> madlib(#     ARRAY[5,5,5,5],         -- Number of units per layer
> madlib(#     'learning_rate_init=0.003,
> madlib'#     n_iterations=25,
> madlib'#     tolerance=0',     -- Optimizer params, with n_tries
> madlib(#     'tanh',           -- Activation function
> madlib(#     NULL,             -- Default weight (1)
> madlib(#     FALSE,            -- No warm start
> madlib(#     TRUE             -- Not verbose
> madlib(# );
> INFO:  Iteration: 1, Loss: <1.36043166072>
> INFO:  Iteration: 2, Loss: <1.3499381049>
> INFO:  Iteration: 3, Loss: <1.33443838909>
> INFO:  Iteration: 4, Loss: <1.29191921391>
> INFO:  Iteration: 5, Loss: <1.13514652999>
> INFO:  Iteration: 6, Loss: <0.605807639127>
> INFO:  Iteration: 7, Loss: <0.222521966636>
> INFO:  Iteration: 8, Loss: <0.119710009513>
> INFO:  Iteration: 9, Loss: <0.0798186326376>
> INFO:  Iteration: 10, Loss: <0.059110614479>
> INFO:  Iteration: 11, Loss: <0.0465561473767>
> INFO:  Iteration: 12, Loss: <0.0381870587426>
> INFO:  Iteration: 13, Loss: <0.0322380000338>
> INFO:  Iteration: 14, Loss: <0.0278080860799>
> INFO:  Iteration: 15, Loss: <0.0243910955926>
> INFO:  Iteration: 16, Loss: <0.0216814708136>
> INFO:  Iteration: 17, Loss: <0.019484269578>
> INFO:  Iteration: 18, Loss: <0.0176694972659>
> INFO:  Iteration: 19, Loss: <0.0161472761464>
> INFO:  Iteration: 20, Loss: <0.0148535627994>
> INFO:  Iteration: 21, Loss: <0.0137415416838>
> INFO:  Iteration: 22, Loss: <0.0127762321545>
> INFO:  Iteration: 23, Loss: <0.0119309943525>
> INFO:  Iteration: 24, Loss: <0.0111851991017>
> -[ RECORD 1 ]------+-
> mlp_classification | 
> madlib=# SELECT madlib.mlp_classification(
> madlib(#     'iris_data',      -- Source table
> madlib(#     'mlp_model',      -- Destination table
> madlib(#     'attributes',     -- Input features
> madlib(#     'class_text',     -- Label
> madlib(#     ARRAY[5,5,5,5],         -- Number of units per layer
> madlib(#     'learning_rate_init=0.003,
> madlib'#     n_iterations=25,
> madlib'#     tolerance=0',     -- Optimizer params
> madlib(#     'tanh',           -- Activation function
> madlib(#     NULL,             -- Default weight (1)
> madlib(#     TRUE,             -- Warm start
> madlib(#     TRUE             -- Not verbose
> madlib(# );
> INFO:  Iteration: 1, Loss: <0.526775235884>
> INFO:  Iteration: 2, Loss: <0.117145401113>
> INFO:  Iteration: 3, Loss: <0.0602106163466>
> INFO:  Iteration: 4, Loss: <0.04108334178>
> INFO:  Iteration: 5, Loss: <0.0312666961015>
> INFO:  Iteration: 6, Loss: <0.0252605498585>
> INFO:  Iteration: 7, Loss: <0.0211986830952>
> INFO:  Iteration: 8, Loss: <0.01826564738>
> INFO:  Iteration: 9, Loss: <0.0160470744822>
> INFO:  Iteration: 10, Loss: <0.0143096846719>
> INFO:  Iteration: 11, Loss: <0.0129119612879>
> INFO:  Iteration: 12, Loss: <0.011763011155>
> INFO:  Iteration: 13, Loss: <0.0108017593072>
> INFO:  Iteration: 14, Loss: <0.00998563483374>
> INFO:  Iteration: 15, Loss: <0.0092840495142>
> INFO:  Iteration: 16, Loss: <0.00867445508889>
> INFO:  Iteration: 17, Loss: <0.00813986234697>
> INFO:  Iteration: 18, Loss: <0.00766722611383>
> INFO:  Iteration: 19, Loss: <0.00724636272143>
> INFO:  Iteration: 20, Loss: <0.00686920569199>
> INFO:  Iteration: 21, Loss: <0.00652928239318>
> INFO:  Iteration: 22, Loss: <0.00622133870019>
> INFO:  Iteration: 23, Loss: <0.00594106500617>
> INFO:  Iteration: 24, Loss: <0.00568489301496>
> -[ RECORD 1 ]------+-
> mlp_classification | 
> madlib=# SELECT madlib.mlp_classification(
> madlib(#     'iris_data',      -- Source table
> madlib(#     'mlp_model',      -- Destination table
> madlib(#     'attributes',     -- Input features
> madlib(#     'class_text',     -- Label
> madlib(#     ARRAY[5,5,5,5],         -- Number of units per layer
> madlib(#     'learning_rate_init=0.003,
> madlib'#     n_iterations=25,
> madlib'#     tolerance=0',     -- Optimizer params
> madlib(#     'tanh',           -- Activation function
> madlib(#     NULL,             -- Default weight (1)
> madlib(#     TRUE,             -- Warm start
> madlib(#     TRUE             -- Not verbose
> madlib(# );
> INFO:  Iteration: 1, Loss: <0.310183199428>
> INFO:  Iteration: 2, Loss: <0.088393889654>
> INFO:  Iteration: 3, Loss: <0.0516018746862>
> INFO:  Iteration: 4, Loss: <0.0371242489164>
> INFO:  Iteration: 5, Loss: <0.0291275739389>
> INFO:  Iteration: 6, Loss: <0.0240085087393>
> INFO:  Iteration: 7, Loss: <0.0204364115942>
> INFO:  Iteration: 8, Loss: <0.0177966114175>
> INFO:  Iteration: 9, Loss: <0.0157638172685>
> INFO:  Iteration: 10, Loss: <0.0141490621788>
> INFO:  Iteration: 11, Loss: <0.0128347944511>
> INFO:  Iteration: 12, Loss: <0.0117439367706>
> INFO:  Iteration: 13, Loss: <0.0108237918575>
> INFO:  Iteration: 14, Loss: <0.0100370813095>
> INFO:  Iteration: 15, Loss: <0.00935667554548>
> INFO:  Iteration: 16, Loss: <0.00876235168295>
> INFO:  Iteration: 17, Loss: <0.00823872217701>
> INFO:  Iteration: 18, Loss: <0.00777386815345>
> INFO:  Iteration: 19, Loss: <0.00735841229808>
> INFO:  Iteration: 20, Loss: <0.0069848745105>
> INFO:  Iteration: 21, Loss: <0.00664721443072>
> INFO:  Iteration: 22, Loss: <0.00634050043973>
> INFO:  Iteration: 23, Loss: <0.00606066608629>
> INFO:  Iteration: 24, Loss: <0.00580432810321>
> -[ RECORD 1 ]------+-
> mlp_classification | 
> madlib=# SELECT madlib.mlp_classification(
> madlib(#     'iris_data',      -- Source table
> madlib(#     'mlp_model',      -- Destination table
> madlib(#     'attributes',     -- Input features
> madlib(#     'class_text',     -- Label
> madlib(#     ARRAY[5,5,5,5],         -- Number of units per layer
> madlib(#     'learning_rate_init=0.003,
> madlib'#     n_iterations=25,
> madlib'#     tolerance=0',     -- Optimizer params
> madlib(#     'tanh',           -- Activation function
> madlib(#     NULL,             -- Default weight (1)
> madlib(#     TRUE,             -- Warm start
> madlib(#     TRUE             -- Not verbose
> madlib(# );
> INFO:  Iteration: 1, Loss: <0.241686849256>
> INFO:  Iteration: 2, Loss: <0.0789037812418>
> INFO:  Iteration: 3, Loss: <0.0477383278583>
> INFO:  Iteration: 4, Loss: <0.0349153179967>
> INFO:  Iteration: 5, Loss: <0.0276779503206>
> INFO:  Iteration: 6, Loss: <0.0229787339952>
> INFO:  Iteration: 7, Loss: <0.0196655648511>
> INFO:  Iteration: 8, Loss: <0.0171975669069>
> INFO:  Iteration: 9, Loss: <0.0152849520197>
> INFO:  Iteration: 10, Loss: <0.0137577086149>
> INFO:  Iteration: 11, Loss: <0.0125092120633>
> INFO:  Iteration: 12, Loss: <0.0114690677404>
> INFO:  Iteration: 13, Loss: <0.0105888625494>
> INFO:  Iteration: 14, Loss: <0.00983417252295>
> INFO:  Iteration: 15, Loss: <0.00917983203434>
> INFO:  Iteration: 16, Loss: <0.00860700679725>
> INFO:  Iteration: 17, Loss: <0.0081013143176>
> INFO:  Iteration: 18, Loss: <0.0076515781172>
> INFO:  Iteration: 19, Loss: <0.00724897914204>
> INFO:  Iteration: 20, Loss: <0.00688646377197>
> INFO:  Iteration: 21, Loss: <0.00655832207597>
> INFO:  Iteration: 22, Loss: <0.00625988170135>
> INFO:  Iteration: 23, Loss: <0.00598728196025>
> INFO:  Iteration: 24, Loss: <0.0057373045858>
> -[ RECORD 1 ]------+-
> mlp_classification | 
> {code}
> MLP warm start does seem to work OK for 1 hidden layer:
> {code}
> madlib=# DROP TABLE IF EXISTS mlp_model, mlp_model_summary, mlp_model_standardization;
> DROP TABLE
> madlib=# -- Set seed so results are reproducible
> madlib=# SELECT setseed(0);
> -[ RECORD 1 ]
> setseed | 
> madlib=# SELECT madlib.mlp_classification(
> madlib(#     'iris_data',      -- Source table
> madlib(#     'mlp_model',      -- Destination table
> madlib(#     'attributes',     -- Input features
> madlib(#     'class_text',     -- Label
> madlib(#     ARRAY[5],         -- Number of units per layer
> madlib(#     'learning_rate_init=0.003,
> madlib'#     n_iterations=25,
> madlib'#     tolerance=0',     -- Optimizer params, with n_tries
> madlib(#     'tanh',           -- Activation function
> madlib(#     NULL,             -- Default weight (1)
> madlib(#     FALSE,            -- No warm start
> madlib(#     TRUE             -- Not verbose
> madlib(# );
> INFO:  Iteration: 1, Loss: <1.43845150739>
> INFO:  Iteration: 2, Loss: <0.645143112079>
> INFO:  Iteration: 3, Loss: <0.198510249554>
> INFO:  Iteration: 4, Loss: <0.10299445227>
> INFO:  Iteration: 5, Loss: <0.0683773141272>
> INFO:  Iteration: 6, Loss: <0.0508462725678>
> INFO:  Iteration: 7, Loss: <0.0403303687053>
> INFO:  Iteration: 8, Loss: <0.0333482996416>
> INFO:  Iteration: 9, Loss: <0.0283871624887>
> INFO:  Iteration: 10, Loss: <0.02468658589>
> INFO:  Iteration: 11, Loss: <0.0218236746963>
> INFO:  Iteration: 12, Loss: <0.0195449339706>
> INFO:  Iteration: 13, Loss: <0.0176893779887>
> INFO:  Iteration: 14, Loss: <0.0161499639458>
> INFO:  Iteration: 15, Loss: <0.0148528011871>
> INFO:  Iteration: 16, Loss: <0.0137452714235>
> INFO:  Iteration: 17, Loss: <0.0127889068292>
> INFO:  Iteration: 18, Loss: <0.0119549448347>
> INFO:  Iteration: 19, Loss: <0.011221456629>
> INFO:  Iteration: 20, Loss: <0.0105714364345>
> INFO:  Iteration: 21, Loss: <0.00999149680482>
> INFO:  Iteration: 22, Loss: <0.00947095724551>
> INFO:  Iteration: 23, Loss: <0.00900119461623>
> INFO:  Iteration: 24, Loss: <0.00857517170977>
> -[ RECORD 1 ]------+-
> mlp_classification | 
> madlib=# SELECT madlib.mlp_classification(
> madlib(#     'iris_data',      -- Source table
> madlib(#     'mlp_model',      -- Destination table
> madlib(#     'attributes',     -- Input features
> madlib(#     'class_text',     -- Label
> madlib(#     ARRAY[5],         -- Number of units per layer
> madlib(#     'learning_rate_init=0.003,
> madlib'#     n_iterations=25,
> madlib'#     tolerance=0',     -- Optimizer params
> madlib(#     'tanh',           -- Activation function
> madlib(#     NULL,             -- Default weight (1)
> madlib(#     TRUE,             -- Warm start
> madlib(#     TRUE             -- Not verbose
> madlib(# );
> INFO:  Iteration: 1, Loss: <0.00783212719879>
> INFO:  Iteration: 2, Loss: <0.00754868929356>
> INFO:  Iteration: 3, Loss: <0.00724537071819>
> INFO:  Iteration: 4, Loss: <0.00696499503481>
> INFO:  Iteration: 5, Loss: <0.0067052209318>
> INFO:  Iteration: 6, Loss: <0.00646387227329>
> INFO:  Iteration: 7, Loss: <0.00623906734339>
> INFO:  Iteration: 8, Loss: <0.00602917134597>
> INFO:  Iteration: 9, Loss: <0.00583275728324>
> INFO:  Iteration: 10, Loss: <0.00564857402972>
> INFO:  Iteration: 11, Loss: <0.00547552010753>
> INFO:  Iteration: 12, Loss: <0.0053126220109>
> INFO:  Iteration: 13, Loss: <0.00515901618608>
> INFO:  Iteration: 14, Loss: <0.00501393396817>
> INFO:  Iteration: 15, Loss: <0.00487668892432>
> INFO:  Iteration: 16, Loss: <0.00474666616715>
> INFO:  Iteration: 17, Loss: <0.00462331328983>
> INFO:  Iteration: 18, Loss: <0.00450613264339>
> INFO:  Iteration: 19, Loss: <0.00439467473002>
> INFO:  Iteration: 20, Loss: <0.00428853252899>
> INFO:  Iteration: 21, Loss: <0.00418733660521>
> INFO:  Iteration: 22, Loss: <0.00409075087755>
> INFO:  Iteration: 23, Loss: <0.00399846894544>
> INFO:  Iteration: 24, Loss: <0.00391021088973>
> -[ RECORD 1 ]------+-
> mlp_classification | 
> madlib=# SELECT madlib.mlp_classification(
>     'iris_data',      -- Source table
>     'mlp_model',      -- Destination table
>     'attributes',     -- Input features
>     'class_text',     -- Label
>     ARRAY[5],         -- Number of units per layer
>     'learning_rate_init=0.003,
>     n_iterations=25,
>     tolerance=0',     -- Optimizer params
>     'tanh',           -- Activation function
>     NULL,             -- Default weight (1)
>     TRUE,             -- Warm start
>     TRUE             -- Not verbose
> );
> INFO:  Iteration: 1, Loss: <0.00374476271638>
> INFO:  Iteration: 2, Loss: <0.00367708783353>
> INFO:  Iteration: 3, Loss: <0.00360220992802>
> INFO:  Iteration: 4, Loss: <0.00353024687379>
> INFO:  Iteration: 5, Loss: <0.00346107072758>
> INFO:  Iteration: 6, Loss: <0.00339452362577>
> INFO:  Iteration: 7, Loss: <0.00333045921955>
> INFO:  Iteration: 8, Loss: <0.00326874179276>
> INFO:  Iteration: 9, Loss: <0.00320924531599>
> INFO:  Iteration: 10, Loss: <0.0031518525991>
> INFO:  Iteration: 11, Loss: <0.00309645453113>
> INFO:  Iteration: 12, Loss: <0.00304294939712>
> INFO:  Iteration: 13, Loss: <0.00299124226312>
> INFO:  Iteration: 14, Loss: <0.00294124442135>
> INFO:  Iteration: 15, Loss: <0.00289287288902>
> INFO:  Iteration: 16, Loss: <0.00284604995461>
> INFO:  Iteration: 17, Loss: <0.00280070276667>
> INFO:  Iteration: 18, Loss: <0.0027567629604>
> INFO:  Iteration: 19, Loss: <0.0027141663181>
> INFO:  Iteration: 20, Loss: <0.00267285246002>
> INFO:  Iteration: 21, Loss: <0.00263276456234>
> INFO:  Iteration: 22, Loss: <0.00259384909969>
> INFO:  Iteration: 23, Loss: <0.00255605560962>
> INFO:  Iteration: 24, Loss: <0.00251933647706>
> -[ RECORD 1 ]------+-
> mlp_classification | 
> madlib=# SELECT madlib.mlp_classification(
>     'iris_data',      -- Source table
>     'mlp_model',      -- Destination table
>     'attributes',     -- Input features
>     'class_text',     -- Label
>     ARRAY[5],         -- Number of units per layer
>     'learning_rate_init=0.003,
>     n_iterations=25,
>     tolerance=0',     -- Optimizer params
>     'tanh',           -- Activation function
>     NULL,             -- Default weight (1)
>     TRUE,             -- Warm start
>     TRUE             -- Not verbose
> );
> INFO:  Iteration: 1, Loss: <0.00244894389095>
> INFO:  Iteration: 2, Loss: <0.00241947565705>
> INFO:  Iteration: 3, Loss: <0.00238653134239>
> INFO:  Iteration: 4, Loss: <0.00235444586402>
> INFO:  Iteration: 5, Loss: <0.0023232030616>
> INFO:  Iteration: 6, Loss: <0.00229277038229>
> INFO:  Iteration: 7, Loss: <0.00226311685617>
> INFO:  Iteration: 8, Loss: <0.00223421306776>
> INFO:  Iteration: 9, Loss: <0.00220603106015>
> INFO:  Iteration: 10, Loss: <0.00217854424586>
> INFO:  Iteration: 11, Loss: <0.0021517273241>
> INFO:  Iteration: 12, Loss: <0.00212555620409>
> INFO:  Iteration: 13, Loss: <0.00210000793359>
> INFO:  Iteration: 14, Loss: <0.00207506063261>
> INFO:  Iteration: 15, Loss: <0.00205069343161>
> INFO:  Iteration: 16, Loss: <0.0020268864139>
> INFO:  Iteration: 17, Loss: <0.00200362056202>
> INFO:  Iteration: 18, Loss: <0.00198087770756>
> INFO:  Iteration: 19, Loss: <0.00195864048445>
> INFO:  Iteration: 20, Loss: <0.0019368922852>
> INFO:  Iteration: 21, Loss: <0.00191561721998>
> INFO:  Iteration: 22, Loss: <0.00189480007833>
> INFO:  Iteration: 23, Loss: <0.00187442629334>
> INFO:  Iteration: 24, Loss: <0.00185448190796>
> -[ RECORD 1 ]------+-
> mlp_classification | 
> {code}



--
This message was sent by Atlassian JIRA
(v7.6.3#76005)