Skip to content
This repository has been archived by the owner on Nov 17, 2023. It is now read-only.

Commit

Permalink
Fix scaladoc build errors (#13189)
Browse files Browse the repository at this point in the history
* Fix scaladoc errors from missing classpath

Remove duplicate scalastyle plugin

* Fix scaladoc warnings

Also enable and fix all feature and deprecation warnings
  • Loading branch information
zachgk authored and nswamy committed Nov 14, 2018
1 parent 2eb76b5 commit 8cb73ef
Show file tree
Hide file tree
Showing 23 changed files with 44 additions and 38 deletions.
9 changes: 7 additions & 2 deletions docs/mxdoc.py
Original file line number Diff line number Diff line change
Expand Up @@ -110,8 +110,13 @@ def build_scala(app):
def build_scala_docs(app):
"""build scala doc and then move the outdir"""
scala_path = app.builder.srcdir + '/../scala-package'
# scaldoc fails on some apis, so exit 0 to pass the check
_run_cmd('cd ' + scala_path + '; scaladoc `find . -type f -name "*.scala" | egrep \"\/core|\/infer\" | egrep -v \"Suite|javaapi\"`; exit 0')
scala_doc_sources = 'find . -type f -name "*.scala" | egrep \"\.\/core|\.\/infer\" | egrep -v \"Suite\"'
scala_doc_classpath = ':'.join([
'`find native -name "*.jar" | grep "target/lib/" | tr "\\n" ":" `',
'`find macros -name "*-SNAPSHOT.jar" | tr "\\n" ":" `'
])
_run_cmd('cd {}; scaladoc `{}` -classpath {} -feature -deprecation'
.format(scala_path, scala_doc_sources, scala_doc_classpath))
dest_path = app.builder.outdir + '/api/scala/docs'
_run_cmd('rm -rf ' + dest_path)
_run_cmd('mkdir -p ' + dest_path)
Expand Down
4 changes: 0 additions & 4 deletions scala-package/core/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -93,10 +93,6 @@
<groupId>org.scalastyle</groupId>
<artifactId>scalastyle-maven-plugin</artifactId>
</plugin>
<plugin>
<groupId>org.scalastyle</groupId>
<artifactId>scalastyle-maven-plugin</artifactId>
</plugin>
</plugins>
</build>
<dependencies>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,8 @@

package org.apache.mxnet

import scala.language.implicitConversions

object Context {
val devtype2str = Map(1 -> "cpu", 2 -> "gpu", 3 -> "cpu_pinned")
val devstr2type = Map("cpu" -> 1, "gpu" -> 2, "cpu_pinned" -> 3)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -224,7 +224,6 @@ class Executor private[mxnet](private[mxnet] val handle: ExecutorHandle,
/**
* Get dictionary representation of argument arrrays.
* @return The dictionary that maps name of arguments to NDArrays.
* @throws IllegalArgumentException if there are duplicated names in the arguments.
*/
def argDict: Map[String, NDArray] = {
if (_argDict == null) {
Expand All @@ -236,7 +235,6 @@ class Executor private[mxnet](private[mxnet] val handle: ExecutorHandle,
/**
* Get dictionary representation of gradient arrays.
* @return The dictionary that maps name of arguments to gradient arrays.
* @throws IllegalArgumentException if there are duplicated names in the grads.
*/
def gradDict: Map[String, NDArray] = {
if (_gradDict == null) {
Expand All @@ -248,7 +246,6 @@ class Executor private[mxnet](private[mxnet] val handle: ExecutorHandle,
/**
* Get dictionary representation of auxiliary states arrays.
* @return The dictionary that maps name of auxiliary states to NDArrays.
* @throws IllegalArgumentException if there are duplicated names in the auxiliary states.
*/
def auxDict: Map[String, NDArray] = {
if (_auxDict == null) {
Expand All @@ -265,8 +262,6 @@ class Executor private[mxnet](private[mxnet] val handle: ExecutorHandle,
* Whether allow extra parameters that are not needed by symbol
* If this is True, no error will be thrown when arg_params or aux_params
* contain extra parameters that is not needed by the executor.
* @throws IllegalArgumentException
* If there is additional parameters in the dict but allow_extra_params=False
*/
def copyParamsFrom(argParams: Map[String, NDArray],
auxParams: Map[String, NDArray],
Expand Down
7 changes: 4 additions & 3 deletions scala-package/core/src/main/scala/org/apache/mxnet/IO.scala
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ import org.slf4j.LoggerFactory
import scala.annotation.varargs
import scala.collection.immutable.ListMap
import scala.collection.mutable.ListBuffer
import scala.language.implicitConversions
/**
* IO iterators for loading training & validation data
*/
Expand Down Expand Up @@ -340,11 +341,11 @@ abstract class DataIter extends Iterator[DataBatch] {
def getIndex(): IndexedSeq[Long]

// The name and shape of data provided by this iterator
@deprecated
@deprecated("Use provideDataDesc instead", "1.3.0")
def provideData: ListMap[String, Shape]

// The name and shape of label provided by this iterator
@deprecated
@deprecated("Use provideLabelDesc instead", "1.3.0")
def provideLabel: ListMap[String, Shape]

// Provide type:DataDesc of the data
Expand Down Expand Up @@ -404,7 +405,7 @@ object DataDesc {
}
}

@deprecated
@deprecated("Please use DataDesc methods instead", "1.3.0")
implicit def ListMap2Descs(shapes: ListMap[String, Shape]): IndexedSeq[DataDesc] = {
if (shapes != null) {
shapes.map { case (k, s) => new DataDesc(k, s) }.toIndexedSeq
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -286,7 +286,7 @@ class KVStore(private[mxnet] val handle: KVStoreHandle) extends NativeResource {
case cachedStates: MXKVStoreCachedStates =>
val bis = new BufferedInputStream (new FileInputStream (fname) )
try {
val bArray = Stream.continually (bis.read).takeWhile (- 1 !=).map (_.toByte).toArray
val bArray = Stream.continually (bis.read).takeWhile (_ != -1).map (_.toByte).toArray
cachedStates.deserializeState(bArray)
} finally {
bis.close ()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ import org.slf4j.LoggerFactory

import scala.collection.mutable
import scala.collection.mutable.{ArrayBuffer, ListBuffer}
import scala.language.implicitConversions
import scala.ref.WeakReference

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -144,7 +144,7 @@ abstract class Optimizer extends Serializable {
def deserializeState(bytes: Array[Byte]): AnyRef

// Set individual learning rate scale for parameters
@deprecated("Use setLrMult instead.")
@deprecated("Use setLrMult instead.", "0.10.0")
def setLrScale(lrScale: Map[Int, Float]): Unit = {
val argsLrScale: Map[Either[Int, String], Float] = lrScale.map { case (k, v) => Left(k) -> v }
setLrMult(argsLrScale)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ import scala.util.Try
import scala.util.control.{ControlThrowable, NonFatal}

/**
* This class manages automatically releasing of [[NativeResource]]s
* This class manages automatically releasing of `org.apache.mxnet.NativeResource`s
*/
class ResourceScope extends AutoCloseable {

Expand All @@ -43,8 +43,8 @@ class ResourceScope extends AutoCloseable {
ResourceScope.addToThreadLocal(this)

/**
* Releases all the [[NativeResource]] by calling
* the associated [[NativeResource.close()]] method
* Releases all the `org.apache.mxnet.NativeResource` by calling
* the associated`'org.apache.mxnet.NativeResource.close()` method
*/
override def close(): Unit = {
ResourceScope.removeFromThreadLocal(this)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ import org.apache.mxnet.DType.DType
import org.slf4j.{Logger, LoggerFactory}

import scala.collection.mutable.{ArrayBuffer, ListBuffer}
import scala.language.implicitConversions

/**
* Symbolic configuration API of mxnet. <br />
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ import scala.util.parsing.json._
import java.io.File
import java.io.PrintWriter
import scala.collection.mutable.ArrayBuffer
import scala.language.postfixOps

object Visualization {

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -158,11 +158,11 @@ private[mxnet] class MXDataIter(private[mxnet] val handle: DataIterHandle,
}

// The name and shape of data provided by this iterator
@deprecated
@deprecated("Please use provideDataDesc instead", "1.3.0")
override def provideData: ListMap[String, Shape] = _provideData

// The name and shape of label provided by this iterator
@deprecated
@deprecated("Please use provideLabelDesc instead", "1.3.0")
override def provideLabel: ListMap[String, Shape] = _provideLabel

// Provide type:DataDesc of the data
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -237,11 +237,11 @@ class NDArrayIter(data: IndexedSeq[(DataDesc, NDArray)],


// The name and shape of data provided by this iterator
@deprecated
@deprecated("Please use provideDataDesc instead", "1.3.0")
override def provideData: ListMap[String, Shape] = _provideData

// The name and shape of label provided by this iterator
@deprecated
@deprecated("Please use provideLabelDesc instead", "1.3.0")
override def provideLabel: ListMap[String, Shape] = _provideLabel

// Provide type:DataDesc of the data
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -178,11 +178,11 @@ class PrefetchingIter(
override def getPad(): Int = this.currentBatch.pad

// The name and shape of label provided by this iterator
@deprecated
@deprecated("Please use provideDataDesc instead", "1.3.0")
override def provideLabel: ListMap[String, Shape] = this._provideLabel

// The name and shape of data provided by this iterator
@deprecated
@deprecated("Please use provideLabelDesc instead", "1.3.0")
override def provideData: ListMap[String, Shape] = this._provideData

// Provide type:DataDesc of the data
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -134,13 +134,13 @@ class ResizeIter(
}

// The name and shape of data provided by this iterator
@deprecated
@deprecated("Please use provideDataDesc instead", "1.3.0")
override def provideData: ListMap[String, Shape] = {
dataIter.provideData
}

// The name and shape of label provided by this iterator
@deprecated
@deprecated("Please use provideLabelDesc instead", "1.3.0")
override def provideLabel: ListMap[String, Shape] = {
dataIter.provideLabel
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
package org.apache.mxnet.javaapi

import collection.JavaConverters._
import scala.language.implicitConversions

class Context(val context: org.apache.mxnet.Context) {

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,8 @@

package org.apache.mxnet.javaapi

import scala.language.implicitConversions

class DataDesc(val dataDesc: org.apache.mxnet.DataDesc) {

def this(name: String, shape: Shape, dType: DType.DType, layout: String) =
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
package org.apache.mxnet.javaapi

import collection.JavaConverters._
import scala.language.implicitConversions

/**
* Shape of [[NDArray]] or other data
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -210,7 +210,7 @@ abstract class BaseModule {
* @param reset Default is `True`, indicating whether we should reset the data iter before start
* doing prediction.
* @return The return value will be a nested list like
* `[[out1_batch1, out2_batch1, ...], [out1_batch2, out2_batch2, ...]]`
* `[ [out1_batch1, out2_batch1, ...], [out1_batch2, out2_batch2, ...] ]`
* This mode is useful because in some cases (e.g. bucketing),
* the module does not necessarily produce the same number of outputs.
*/
Expand Down Expand Up @@ -501,7 +501,7 @@ abstract class BaseModule {
* Get outputs of the previous forward computation.
* @return In the case when data-parallelism is used,
* the outputs will be collected from multiple devices.
* The results will look like `[[out1_dev1, out1_dev2], [out2_dev1, out2_dev2]]`,
* The results will look like `[ [out1_dev1, out1_dev2], [out2_dev1, out2_dev2] ]`,
* those `NDArray` might live on different devices.
*/
def getOutputs(): IndexedSeq[IndexedSeq[NDArray]]
Expand All @@ -519,7 +519,7 @@ abstract class BaseModule {
* Get the gradients to the inputs, computed in the previous backward computation.
* @return In the case when data-parallelism is used,
* the grads will be collected from multiple devices.
* The results will look like `[[grad1_dev1, grad1_dev2], [grad2_dev1, grad2_dev2]]`,
* The results will look like `[ [grad1_dev1, grad1_dev2], [grad2_dev1, grad2_dev2] ]`,
* those `NDArray` might live on different devices.
*/
def getInputGrads(): IndexedSeq[IndexedSeq[NDArray]]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -339,7 +339,7 @@ class BucketingModule(symGen: AnyRef => (Symbol, IndexedSeq[String], IndexedSeq[
* Get outputs of the previous forward computation.
* @return In the case when data-parallelism is used,
* the outputs will be collected from multiple devices.
* The results will look like `[[out1_dev1, out1_dev2], [out2_dev1, out2_dev2]]`,
* The results will look like `[ [out1_dev1, out1_dev2], [out2_dev1, out2_dev2] ]`,
* those `NDArray` might live on different devices.
*/
override def getOutputs(): IndexedSeq[IndexedSeq[NDArray]] = {
Expand All @@ -363,7 +363,7 @@ class BucketingModule(symGen: AnyRef => (Symbol, IndexedSeq[String], IndexedSeq[
* Get the gradients to the inputs, computed in the previous backward computation.
* @return In the case when data-parallelism is used,
* the grads will be collected from multiple devices.
* The results will look like `[[grad1_dev1, grad1_dev2], [grad2_dev1, grad2_dev2]]`,
* The results will look like `[ [grad1_dev1, grad1_dev2], [grad2_dev1, grad2_dev2] ]`,
* those `NDArray` might live on different devices.
*/
override def getInputGrads(): IndexedSeq[IndexedSeq[NDArray]] = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -517,7 +517,7 @@ class DataParallelExecutorGroup private[module](
* Get outputs of the previous forward computation.
* @return In the case when data-parallelism is used,
* the outputs will be collected from multiple devices.
* The results will look like `[[out1_dev1, out1_dev2], [out2_dev1, out2_dev2]]`,
* The results will look like `[ [out1_dev1, out1_dev2], [out2_dev1, out2_dev2] ]`,
* those `NDArray` might live on different devices.
*/
def getOutputs(): IndexedSeq[IndexedSeq[NDArray]] = {
Expand All @@ -539,7 +539,7 @@ class DataParallelExecutorGroup private[module](
* Get the gradients to the inputs, computed in the previous backward computation.
* @return In the case when data-parallelism is used,
* the grads will be collected from multiple devices.
* The results will look like `[[grad1_dev1, grad1_dev2], [grad2_dev1, grad2_dev2]]`,
* The results will look like `[ [grad1_dev1, grad1_dev2], [grad2_dev1, grad2_dev2] ]`,
* those `NDArray` might live on different devices.
*/
def getInputGrads(): IndexedSeq[IndexedSeq[NDArray]] = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -486,7 +486,7 @@ class Module(symbolVar: Symbol,
* Get outputs of the previous forward computation.
* @return In the case when data-parallelism is used,
* the outputs will be collected from multiple devices.
* The results will look like `[[out1_dev1, out1_dev2], [out2_dev1, out2_dev2]]`,
* The results will look like `[ [out1_dev1, out1_dev2], [out2_dev1, out2_dev2] ]`,
* those `NDArray` might live on different devices.
*/
def getOutputs(): IndexedSeq[IndexedSeq[NDArray]] = {
Expand All @@ -510,7 +510,7 @@ class Module(symbolVar: Symbol,
* Get the gradients to the inputs, computed in the previous backward computation.
* @return In the case when data-parallelism is used,
* the grads will be collected from multiple devices.
* The results will look like `[[grad1_dev1, grad1_dev2], [grad2_dev1, grad2_dev2]]`,
* The results will look like `[ [grad1_dev1, grad1_dev2], [grad2_dev1, grad2_dev2] ]`,
* those `NDArray` might live on different devices.
*/
def getInputGrads(): IndexedSeq[IndexedSeq[NDArray]] = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -346,7 +346,7 @@ class SequentialModule extends BaseModule {
* Get outputs of the previous forward computation.
* @return In the case when data-parallelism is used,
* the outputs will be collected from multiple devices.
* The results will look like `[[out1_dev1, out1_dev2], [out2_dev1, out2_dev2]]`,
* The results will look like `[ [out1_dev1, out1_dev2], [out2_dev1, out2_dev2] ]`,
* those `NDArray` might live on different devices.
*/
def getOutputs(): IndexedSeq[IndexedSeq[NDArray]] = {
Expand All @@ -370,7 +370,7 @@ class SequentialModule extends BaseModule {
* Get the gradients to the inputs, computed in the previous backward computation.
* @return In the case when data-parallelism is used,
* the grads will be collected from multiple devices.
* The results will look like `[[grad1_dev1, grad1_dev2], [grad2_dev1, grad2_dev2]]`,
* The results will look like `[ [grad1_dev1, grad1_dev2], [grad2_dev1, grad2_dev2] ]`,
* those `NDArray` might live on different devices.
*/
def getInputGrads(): IndexedSeq[IndexedSeq[NDArray]] = {
Expand Down

0 comments on commit 8cb73ef

Please sign in to comment.