Skip to content

Commit

Permalink
Merge remote-tracking branch 'upstream/master' into es/lpt/lpt_to_ngr…
Browse files Browse the repository at this point in the history
…aph_fixes2_with_master
  • Loading branch information
eshoguli committed Oct 20, 2020
2 parents 2a3316e + a2e4946 commit e1a5cee
Show file tree
Hide file tree
Showing 480 changed files with 9,027 additions and 7,436 deletions.
4 changes: 3 additions & 1 deletion .ci/openvino-onnx/Jenkinsfile
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,9 @@ def buildDockerImage() {
def runTests() {
sh """
docker run --name ${DOCKER_CONTAINER_NAME} \
--volume ${HOME}/ONNX_CI/onnx-models/.onnx:/root/.onnx ${DOCKER_IMAGE_TAG}
--volume ${HOME}/ONNX_CI/onnx-models-15-Oct/.onnx/model_zoo:/root/.onnx/model_zoo \
--volume ${HOME}/ONNX_CI/onnx-models/.onnx/model_zoo/MSFT:/root/.onnx/model_zoo/MSFT \
${DOCKER_IMAGE_TAG}
"""
}

Expand Down
6 changes: 3 additions & 3 deletions build-instruction.md
Original file line number Diff line number Diff line change
Expand Up @@ -69,13 +69,13 @@ The software was validated on:
cd openvino
git submodule update --init --recursive
```
2. Install build dependencies using the `install_dependencies.sh` script in the
2. Install build dependencies using the `install_build_dependencies.sh` script in the
project root folder.
```sh
chmod +x install_dependencies.sh
chmod +x install_build_dependencies.sh
```
```sh
./install_dependencies.sh
./install_build_dependencies.sh
```
3. By default, the build enables the Inference Engine GPU plugin to infer models
on your Intel® Processor Graphics. This requires you to
Expand Down
117 changes: 117 additions & 0 deletions cmake/api_validator.cmake
Original file line number Diff line number Diff line change
@@ -0,0 +1,117 @@
# Copyright (C) 2020 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
#

if(WIN32)
set(PROGRAMFILES_ENV "ProgramFiles(X86)")
file(TO_CMAKE_PATH $ENV{${PROGRAMFILES_ENV}} PROGRAMFILES)
set(UWP_SDK_PATH "${PROGRAMFILES}/Windows Kits/10/bin/${CMAKE_VS_WINDOWS_TARGET_PLATFORM_VERSION}/x64")

message(STATUS "Trying to find apivalidator in: ${UWP_SDK_PATH}")
find_host_program(UWP_API_VALIDATOR
NAMES apivalidator
PATHS "${UWP_SDK_PATH}"
DOC "ApiValidator for UWP compliance")

if(UWP_API_VALIDATOR)
message(STATUS "Found apivalidator: ${UWP_API_VALIDATOR}")
endif()
endif()

function(_ie_add_api_validator_post_build_step_recursive)
cmake_parse_arguments(API_VALIDATOR "" "TARGET" "" ${ARGN})

list(APPEND API_VALIDATOR_TARGETS ${API_VALIDATOR_TARGET})
set(API_VALIDATOR_TARGETS ${API_VALIDATOR_TARGETS} PARENT_SCOPE)

get_target_property(IS_IMPORTED ${API_VALIDATOR_TARGET} IMPORTED)
if(IS_IMPORTED)
return()
endif()

get_target_property(LIBRARY_TYPE ${API_VALIDATOR_TARGET} TYPE)
if(LIBRARY_TYPE STREQUAL "EXECUTABLE" OR LIBRARY_TYPE STREQUAL "SHARED_LIBRARY")
get_target_property(LINKED_LIBRARIES ${API_VALIDATOR_TARGET} LINK_LIBRARIES)
if(LINKED_LIBRARIES)
foreach(ITEM IN LISTS LINKED_LIBRARIES)
if(NOT TARGET ${ITEM})
continue()
endif()
get_target_property(LIBRARY_TYPE_DEPENDENCY ${ITEM} TYPE)
if(LIBRARY_TYPE_DEPENDENCY STREQUAL "SHARED_LIBRARY")
_ie_add_api_validator_post_build_step_recursive(TARGET ${ITEM})
endif()
endforeach()
endif()
endif()

set(API_VALIDATOR_TARGETS ${API_VALIDATOR_TARGETS} PARENT_SCOPE)
endfunction()

set(VALIDATED_LIBRARIES "" CACHE INTERNAL "")

function(_ie_add_api_validator_post_build_step)
set(UWP_API_VALIDATOR_APIS "${PROGRAMFILES}/Windows Kits/10/build/universalDDIs/x64/UniversalDDIs.xml")
set(UWP_API_VALIDATOR_EXCLUSION "${UWP_SDK_PATH}/BinaryExclusionlist.xml")

if(NOT UWP_API_VALIDATOR OR (WINDOWS_STORE OR WINDOWS_PHONE) OR
NOT EXISTS UWP_API_VALIDATOR_APIS OR NOT EXISTS UWP_API_VALIDATOR_EXCLUSION)
return()
endif()

cmake_parse_arguments(API_VALIDATOR "" "TARGET" "" ${ARGN})

if(NOT API_VALIDATOR_TARGET)
message(FATAL_ERROR "RunApiValidator requires TARGET to validate!")
endif()

if(NOT TARGET ${API_VALIDATOR_TARGET})
message(FATAL_ERROR "${API_VALIDATOR_TARGET} is not a TARGET in the project tree.")
endif()

# collect targets

_ie_add_api_validator_post_build_step_recursive(TARGET ${API_VALIDATOR_TARGET})

# remove targets which were tested before

foreach(item IN LISTS VALIDATED_LIBRARIES)
list(REMOVE_ITEM API_VALIDATOR_TARGETS ${item})
endforeach()

list(REMOVE_DUPLICATES API_VALIDATOR_TARGETS)

if(NOT API_VALIDATOR_TARGETS)
return()
endif()

# generate rules

foreach(target IN LISTS API_VALIDATOR_TARGETS)
list(APPEND commands
COMMAND "${UWP_API_VALIDATOR}"
-SupportedApiXmlFiles:${UWP_API_VALIDATOR_APIS}
-BinaryExclusionListXmlFile:${UWP_API_VALIDATOR_EXCLUSION}
-StrictCompliance:TRUE
-DriverPackagePath:$<TARGET_FILE:${target}>)
endforeach()

# apply rules

add_custom_command(TARGET ${API_VALIDATOR_TARGET} POST_BUILD
${commands}
COMMENT "[apiValidator] Check ${API_VALIDATOR_TARGET} and its dependencies for WCOS compatibility"
VERBATIM)

# update list of validated libraries

list(APPEND VALIDATED_LIBRARIES ${API_VALIDATOR_TARGETS})
set(VALIDATED_LIBRARIES "${VALIDATED_LIBRARIES}" CACHE INTERNAL "" FORCE)
endfunction()

#
# ie_add_api_validator_post_build_step(TARGET <name>)
#
macro(ie_add_api_validator_post_build_step)
_ie_add_api_validator_post_build_step(${ARGV})
endmacro()
1 change: 1 addition & 0 deletions cmake/developer_package.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -237,6 +237,7 @@ include(os_flags)
include(sanitizer)
include(cross_compiled_func)
include(faster_build)
include(api_validator)

function(set_ci_build_number)
set(OpenVINO_MAIN_SOURCE_DIR "${CMAKE_SOURCE_DIR}")
Expand Down
4 changes: 2 additions & 2 deletions cmake/uwp.toolchain.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,11 @@

set(CMAKE_SYSTEM_NAME WindowsStore)

if (NOT DEFINED CMAKE_SYSTEM_VERSION)
if(NOT DEFINED CMAKE_SYSTEM_VERSION)
set(CMAKE_SYSTEM_VERSION 10.0)
endif()

if (NOT DEFINED CMAKE_SYSTEM_PROCESSOR)
if(NOT DEFINED CMAKE_SYSTEM_PROCESSOR)
set(CMAKE_SYSTEM_PROCESSOR ${CMAKE_HOST_SYSTEM_PROCESSOR})
endif()

Expand Down
6 changes: 3 additions & 3 deletions docs/ops/detection/ReorgYolo_1.md
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@

**Inputs**:

* **1**: 4D input tensor of any type and shape `[N, C, H, W]`. `H` and `W` should be divisible by `stride`. Required.
* **1**: 4D input tensor of any type and shape `[N, C, H, W]`. `H` and `W` should be divisible by `stride` and `C >= (stride*stride)`. **Required.**

**Outputs**:

Expand All @@ -31,7 +31,7 @@
**Example**

```xml
<layer id="89" name="ExtractImagePatches" type="ReorgYolo">
<layer id="89" name="reorg" type="ReorgYolo">
<data stride="2"/>
<input>
<port id="0">
Expand All @@ -50,4 +50,4 @@
</port>
</output>
</layer>
```
```
31 changes: 22 additions & 9 deletions docs/ops/normalization/LRN_1.md
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@

* *bias*

* **Description**: *beta* represents the offset. Usually positive number to avoid dividing by zero.
* **Description**: *bias* represents the offset. Usually positive number to avoid dividing by zero.
* **Range of values**: no restrictions
* **Type**: float
* **Default value**: None
Expand All @@ -50,13 +50,26 @@

* **1**: Output tensor of the same shape and type as the `data` input tensor.

**Detailed description**: [Reference](http://yeephycho.github.io/2016/08/03/Normalizations-in-neural-networks/#Local-Response-Normalization-LRN)

Here is an example for 4D `data` input tensor and `axes` = `[1]`:

sqr_sum[a, b, c, d] =
sum(input[a, b - local_size : b + local_size + 1, c, d] ** 2)
output = input / (bias + alpha * sqr_sum) ** beta
**Detailed description**:
Local Response Normalization performs a normalization over local input regions.
Each input value is divided by
\f[ (bias + \frac{alpha}{{size}^{len(axes)}} \cdot \sum_{i} data_{i})^{beta} \f]
The sum is taken over a region of a side length `size` and number of dimensions equal to number of axes.
The region is centered at the input value that's being normalized (with zero padding added if needed).

Here is an example for 4D `data` input tensor and `axes = [1]`:
```
sqr_sum[a, b, c, d] =
sum(data[a, max(0, b - size / 2) : min(data.shape[1], b + size / 2 + 1), c, d] ** 2)
output = data / (bias + (alpha / size ** len(axes)) * sqr_sum) ** beta
```

Example for 4D `data` input tensor and `axes = [2, 3]`:
```
sqr_sum[a, b, c, d] =
sum(data[a, b, max(0, c - size / 2) : min(data.shape[2], c + size / 2 + 1), max(0, d - size / 2) : min(data.shape[3], d + size / 2 + 1)] ** 2)
output = data / (bias + (alpha / size ** len(axes)) * sqr_sum) ** beta
```

**Example**

Expand All @@ -83,4 +96,4 @@ Here is an example for 4D `data` input tensor and `axes` = `[1]`:
</port>
</output>
</layer>
```
```
Original file line number Diff line number Diff line change
Expand Up @@ -19,9 +19,20 @@ const std::vector<std::map<std::string, std::string>> configs = {
{}
};

INSTANTIATE_TEST_CASE_P(PreprocessingPrecisionConvertTests, PreprocessingPrecisionConvertTest,
INSTANTIATE_TEST_CASE_P(PreprocessingPrecisionConvertTestsViaSetInput, PreprocessingPrecisionConvertTest,
::testing::Combine(
::testing::ValuesIn(inputPrecisions),
::testing::Values(1, 2, 3, 4, 5), // Number of input tensor channels
::testing::Values(true), // Use SetInput
::testing::Values("TEMPLATE"),
::testing::ValuesIn(configs)),
PreprocessingPrecisionConvertTest::getTestCaseName);

INSTANTIATE_TEST_CASE_P(PreprocessingPrecisionConvertTestsViaGetBlob, PreprocessingPrecisionConvertTest,
::testing::Combine(
::testing::ValuesIn(inputPrecisions),
::testing::Values(4, 5), // Number of input tensor channels (blob_copy only supports 4d and 5d tensors)
::testing::Values(false), // use GetBlob
::testing::Values("TEMPLATE"),
::testing::ValuesIn(configs)),
PreprocessingPrecisionConvertTest::getTestCaseName);
Expand Down
8 changes: 4 additions & 4 deletions get-started-linux.md
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ the OpenVINO™ Toolkit on Linux\*. With this guide, you will learn how to:
## Prerequisites
1. This guide assumes that you have already cloned the `openvino` repo and
successfully built the Inference Engine and Samples using the
[build instructions](inference-engine/README.md).
[build instructions](build-instruction.md).
2. The original structure of the repository directories remains unchanged.

> **NOTE**: Below, the directory to which the `openvino` repository is cloned is
Expand Down Expand Up @@ -188,7 +188,7 @@ Now you are ready to run the Image Classification Sample Application.
## Run the Image Classification Sample Application

The Inference Engine sample applications are automatically compiled when you
built the Inference Engine using the [build instructions](inference-engine/README.md).
built the Inference Engine using the [build instructions](build-instruction.md).
The binary files are located in the `<OPENVINO_DIR>/inference-engine/bin/intel64/Release`
directory.

Expand Down Expand Up @@ -253,7 +253,7 @@ Throughput: 375.3339402 FPS
## Additional Resources

* [OpenVINO™ Release Notes](https://software.intel.com/en-us/articles/OpenVINO-RelNotes)
* [Inference Engine build instructions](inference-engine/README.md)
* [Inference Engine build instructions](build-instruction.md)
* [Introduction to Intel® Deep Learning Deployment Toolkit](https://docs.openvinotoolkit.org/latest/_docs_IE_DG_Introduction.html)
* [Inference Engine Developer Guide](https://docs.openvinotoolkit.org/latest/_docs_IE_DG_Deep_Learning_Inference_Engine_DevGuide.html)
* [Model Optimizer Developer Guide]
Expand All @@ -264,4 +264,4 @@ Throughput: 375.3339402 FPS
[OpenVINO™ pre-trained models]:https://github.com/opencv/open_model_zoo/tree/master/models/intel
[prerequisites]:https://github.com/opencv/open_model_zoo/tree/master/tools/downloader#prerequisites
[list of supported devices]:https://docs.openvinotoolkit.org/latest/_docs_IE_DG_supported_plugins_Supported_Devices.html
[different precision formats]:https://docs.openvinotoolkit.org/latest/_docs_IE_DG_supported_plugins_Supported_Devices.html#supported_model_formats
[different precision formats]:https://docs.openvinotoolkit.org/latest/_docs_IE_DG_supported_plugins_Supported_Devices.html#supported_model_formats
2 changes: 1 addition & 1 deletion inference-engine/ie_bridges/java/cpp/ie_core.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ JNIEXPORT jlong JNICALL Java_org_intel_openvino_IECore_GetCore(JNIEnv *env, jobj
return 0;
}

JNIEXPORT jlong JNICALL Java_org_intel_openvino_IECore_GetCore_1(JNIEnv *env, jobject obj, jstring xmlConfigFile)
JNIEXPORT jlong JNICALL Java_org_intel_openvino_IECore_GetCore1(JNIEnv *env, jobject obj, jstring xmlConfigFile)
{
static const char method_name[] = "GetCore_1";
try
Expand Down
2 changes: 1 addition & 1 deletion inference-engine/ie_bridges/java/cpp/openvino_java.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ JNIEXPORT void JNICALL Java_org_intel_openvino_IECore_SetConfig(JNIEnv *, jobjec
JNIEXPORT void JNICALL Java_org_intel_openvino_IECore_SetConfig1(JNIEnv *, jobject, jlong, jobject);
JNIEXPORT jlong JNICALL Java_org_intel_openvino_IECore_GetConfig(JNIEnv *, jobject, jlong, jstring, jstring);
JNIEXPORT jlong JNICALL Java_org_intel_openvino_IECore_GetCore(JNIEnv *, jobject);
JNIEXPORT jlong JNICALL Java_org_intel_openvino_IECore_GetCore_1(JNIEnv *, jobject, jstring);
JNIEXPORT jlong JNICALL Java_org_intel_openvino_IECore_GetCore1(JNIEnv *, jobject, jstring);
JNIEXPORT void JNICALL Java_org_intel_openvino_IECore_delete(JNIEnv *, jobject, jlong);

//
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ public IECore() {
}

public IECore(String xmlConfigFile) {
super(GetCore_1(xmlConfigFile));
super(GetCore1(xmlConfigFile));
}

public CNNNetwork ReadNetwork(final String modelPath, final String weightPath) {
Expand Down Expand Up @@ -92,7 +92,7 @@ private static native long LoadNetwork1(

private static native long GetCore();

private static native long GetCore_1(String xmlConfigFile);
private static native long GetCore1(String xmlConfigFile);

@Override
protected native void delete(long nativeObj);
Expand Down
1 change: 1 addition & 0 deletions inference-engine/ie_bridges/java/samples/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ Upon start-up, the application reads command-line parameters and loads a network

## Build
Create an environment variable with Inference Engine installation path:
```bash
export IE_PATH=/path/to/openvino/bin/intel64/Release/lib
```

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ endforeach()

function(python_disable_deprecated_warnings)
disable_deprecated_warnings()
set(pyx_file "${CMAKE_CURRENT_BINARY_DIR}/ie_api.cxx")
set(pyx_file "${CMAKE_CURRENT_BINARY_DIR}/ie_api.cxx" "${CMAKE_CURRENT_BINARY_DIR}/constants.cxx")
set_source_files_properties(${pyx_file} PROPERTIES COMPILE_FLAGS ${ie_c_cxx_deprecated})
endfunction()

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -244,7 +244,7 @@ cdef class IECore:
versions[device].major = ver.apiVersion.major
return versions

## Reads a network from the Intermediate Representation (IR) and creates an `IENetwork`.
## Reads a network from Intermediate Representation (IR) or ONNX formats and creates an `IENetwork`.
# @param model: A `.xml`, `.onnx`or `.prototxt` model file or string with IR.
# @param weights: A `.bin` file of the IR. Depending on `init_from_buffer` value, can be a string path or
# bytes with file content.
Expand Down
Loading

0 comments on commit e1a5cee

Please sign in to comment.