This repository was archived by the owner on Aug 15, 2019. It is now read-only.
File tree Expand file tree Collapse file tree 3 files changed +3
-8
lines changed Expand file tree Collapse file tree 3 files changed +3
-8
lines changed Original file line number Diff line number Diff line change @@ -369,7 +369,6 @@ export class ModelBuilder extends ModelBuilderPolymer {
369
369
break ;
370
370
}
371
371
case "adagrad" : {
372
- this . needMomentum = true ;
373
372
break ;
374
373
}
375
374
default : {
@@ -390,7 +389,7 @@ export class ModelBuilder extends ModelBuilderPolymer {
390
389
return new RMSPropOptimizer ( + this . learningRate , + this . gamma ) ;
391
390
}
392
391
case 'adagrad' : {
393
- return new AdagradOptimizer ( + this . learningRate , + this . momentum ) ;
392
+ return new AdagradOptimizer ( + this . learningRate ) ;
394
393
}
395
394
default : {
396
395
throw new Error ( `Unknown optimizer "${ this . selectedOptimizerName } "` ) ;
Original file line number Diff line number Diff line change @@ -24,10 +24,8 @@ import {Optimizer} from './optimizer';
24
24
25
25
export class AdagradOptimizer extends Optimizer {
26
26
constructor (
27
- protected learningRate : number , protected momentum : number ,
28
- specifiedVariableList ?: Node [ ] ) {
27
+ protected learningRate : number , specifiedVariableList ?: Node [ ] ) {
29
28
super ( learningRate , specifiedVariableList ) ;
30
- this . m = Scalar . new ( momentum ) ;
31
29
this . eps = Scalar . new ( 1e-6 ) ;
32
30
}
33
31
@@ -74,12 +72,10 @@ export class AdagradOptimizer extends Optimizer {
74
72
75
73
dispose ( ) {
76
74
super . dispose ( ) ;
77
- this . m . dispose ( ) ;
78
75
this . eps . dispose ( ) ;
79
76
this . accumulatedSquaredGradients . dispose ( ) ;
80
77
}
81
78
82
79
private accumulatedSquaredGradients = new TensorArrayMap ( ) ;
83
- private m : Scalar ;
84
80
private eps : Scalar ;
85
81
}
Original file line number Diff line number Diff line change @@ -354,7 +354,7 @@ describe('Session', () => {
354
354
const y = g . reduceSum ( g . add ( g . matmul ( w , x ) , b ) ) ;
355
355
356
356
const safeMode = true ;
357
- const optimizer = new AdagradOptimizer ( 0.1 , 0.5 ) ;
357
+ const optimizer = new AdagradOptimizer ( 0.1 ) ;
358
358
const math = new NDArrayMathCPU ( safeMode ) ;
359
359
const session = new Session ( g , math ) ;
360
360
const inputProvider : InputProvider = {
You can’t perform that action at this time.
0 commit comments