Skip to content

Commit

Permalink
apache#13441 [Clojure] Enhance Spec Validations and fix neural style …
Browse files Browse the repository at this point in the history
…example
  • Loading branch information
hellonico committed Dec 5, 2018
1 parent 2d96340 commit 99a0be8
Show file tree
Hide file tree
Showing 2 changed files with 14 additions and 13 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -193,7 +193,7 @@
;;;train

;;initialize with random noise
img (ndarray/- (random/uniform 0 255 content-np-shape dev) 128)
img (ndarray/- (random/uniform 0 255 content-np-shape {:ctx dev}) 128)
;;; img (random/uniform -0.1 0.1 content-np-shape dev)
;; img content-np
lr-sched (lr-scheduler/factor-scheduler 10 0.9)
Expand Down
25 changes: 13 additions & 12 deletions contrib/clojure-package/src/org/apache/clojure_mxnet/optimizer.clj
Original file line number Diff line number Diff line change
Expand Up @@ -24,10 +24,11 @@
(org.apache.mxnet.optimizer SGD DCASGD NAG AdaDelta RMSProp AdaGrad Adam SGLD)
(org.apache.mxnet FactorScheduler)))

(s/def ::learning-rate float?)
(s/def ::momentum float?)
(s/def ::wd float?)
(s/def ::clip-gradient float?)
(s/def ::int-or-float (s/or :f float? :i int?))
(s/def ::learning-rate ::int-or-float)
(s/def ::momentum ::int-or-float)
(s/def ::wd ::int-or-float)
(s/def ::clip-gradient ::int-or-float)
(s/def ::lr-scheduler #(instance? FactorScheduler %))
(s/def ::sgd-opts (s/keys :opt-un [::learning-rate ::momentum ::wd ::clip-gradient ::lr-scheduler]))

Expand All @@ -43,7 +44,7 @@
([]
(sgd {})))

(s/def ::lambda float?)
(s/def ::lambda ::int-or-float)
(s/def ::dcasgd-opts (s/keys :opt-un [::learning-rate ::momentum ::lambda ::wd ::clip-gradient ::lr-scheduler]))

(defn dcasgd
Expand Down Expand Up @@ -77,9 +78,9 @@
([]
(nag {})))

(s/def ::rho float?)
(s/def ::rescale-gradient float?)
(s/def ::epsilon float?)
(s/def ::rho ::int-or-float)
(s/def ::rescale-gradient ::int-or-float)
(s/def ::epsilon ::int-or-float)
(s/def ::ada-delta-opts (s/keys :opt-un [::rho ::rescale-gradient ::epsilon ::wd ::clip-gradient]))

(defn ada-delta
Expand All @@ -96,8 +97,8 @@
([]
(ada-delta {})))

(s/def gamma1 float?)
(s/def gamma2 float?)
(s/def gamma1 ::int-or-float)
(s/def gamma2 ::int-or-float)
(s/def ::rms-prop-opts (s/keys :opt-un [::learning-rate ::rescale-gradient ::gamma1 ::gamma2 ::wd ::clip-gradient]))

(defn rms-prop
Expand Down Expand Up @@ -144,8 +145,8 @@
([]
(ada-grad {})))

(s/def ::beta1 float?)
(s/def ::beta2 float?)
(s/def ::beta1 ::int-or-float)
(s/def ::beta2 ::int-or-float)
(s/def ::adam-opts (s/keys :opt-un [::learning-rate ::beta1 ::beta2 ::epsilon ::decay-factor ::wd ::clip-gradient ::lr-scheduler]))

(defn adam
Expand Down

0 comments on commit 99a0be8

Please sign in to comment.