Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[release-0.4] WIP: backports for 0.4.4 #15385

Merged
merged 58 commits into from
Mar 15, 2016
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
58 commits
Select commit Hold shift + click to select a range
397a781
Fix rendering bug of Markdown Code.
hayd Jan 11, 2016
d8e2537
Document how to make a kwarg's type explicit.
hayd Jan 11, 2016
132cb20
Root the function object in jlcall
yuyichao Dec 7, 2015
fa6802d
Add test for missing linfo root for jlcall function object
yuyichao Dec 7, 2015
235fde2
fix incremental deserializer on some cases of external singleton values
JeffBezanson Jan 2, 2016
a437ee9
Fix spelling errors
petercolberg Jan 14, 2016
9e625f6
formatting fixup on test/repl.jl, best viewed with ?w=1
tkelman Mar 7, 2016
884d2d0
return false from hascolor if success(`tput setaf 0`) throws a julia …
tkelman Jan 14, 2016
221e865
Use naïve algorithm for generic A[tc]_mul_B!
simonster Jan 17, 2016
2ff37c9
fix a typo
Jan 16, 2016
131a2df
add fast-path optimizations for reading LibuvStreams
vtjnash Jan 13, 2016
148dd77
Remove readbytes! method introduced in #14667 that broke MbedTLS.jl
tkelman Mar 7, 2016
d776ca8
Implement AArch64 ABI
yuyichao Dec 30, 2015
3602b5b
Updated out-of-date comment in AArch64 ABI file.
yuyichao Jan 13, 2016
6632662
Reorganize abi_aarch64.cpp and fix HVA and HFA support
yuyichao Jan 14, 2016
f5a38dc
Document requirement of __precompile__
yuyichao Jan 23, 2016
d21e9b0
Undo a doc update that is not accurate for release-0.4
tkelman Mar 7, 2016
1a706c4
Fix promotion of element types when converting a Vector or Matrix
andreasnoack Jan 27, 2016
d214e1c
fix #14825
JeffBezanson Jan 29, 2016
fd33166
Add extern declaration of jl_symbol_name which is not in julia.h on r…
tkelman Mar 7, 2016
119c9bf
fix eof definitions and add tests
vtjnash Jan 14, 2016
9f70f69
Use -fdefault-integer-8 when building lapack from source against ILP6…
tkelman Feb 19, 2016
d7dd764
intel fortran compiler uses a different flag for integer size
tkelman Feb 19, 2016
910baf8
Add ScalarEvolutionExpander patch.
blakejohnson Jan 28, 2016
64da48a
Big overhaul of "Scope" manual section
mauro3 Jul 14, 2015
a06f8a5
Some updates after a mailing-list conversation
mauro3 Jul 16, 2015
83e6732
Fixed whitespace and two typos
mauro3 Feb 1, 2016
b5cd49c
fix typo in _generic_matmatmul!()
alyst Feb 2, 2016
1a020b0
ccalltest: fix format string for Julia integers.
maleadt Dec 5, 2015
2940a3d
Try to fix win32 ABI
yuyichao Feb 2, 2016
129d646
Extend and clean the ccall test suite.
maleadt Dec 1, 2015
4cd482f
document addprocs keyword arg topology
amitmurthy Jan 30, 2016
8f58a98
Make show for Cholesky write to the right IO
simonster Feb 6, 2016
9942234
Use Any with ccall instead of pointer_from_objref
yuyichao Feb 10, 2016
1727c0f
Updated gcc command line and execution instruction
AndyGreenwell Feb 12, 2016
62b7e07
fix #15077, bug in `rehash!` leading to un-findable key
JeffBezanson Feb 16, 2016
3770c61
patches #13682 to fix memory mapping of large files on Windows
Cody-G Feb 17, 2016
722463b
Remove no longer accurate Windows warning in build_sysimg.jl
tkelman Feb 18, 2016
a86f33c
Fix unix version of default_sysimg_path
tkelman Feb 18, 2016
675305c
Complex symv and syr are part of lapack, not blas
tkelman Feb 20, 2016
4e6cf74
Make sure ptrarray is valid after grow_beg
yuyichao Feb 24, 2016
c747915
Check stride on preallocated output for matmul (fixes #15286)
timholy Feb 29, 2016
c53b24f
Fix check of inner dimension in matmul for 2x2 and 3x3 outer dimension
andreasnoack Mar 1, 2016
1d27400
Fix missing GC root in table.c
yuyichao Mar 2, 2016
ca4f7f3
BoundsError does not take a string
tkelman Jan 28, 2016
bc5ea27
Add a test that should have been part of #5492
tkelman Feb 26, 2016
05b1aed
Move discussion about abstract types in fields from FAQ to performances
Aug 2, 2015
93e38d8
Fix various sphinx warnings about misformatted rst
tkelman Jan 21, 2016
e4bd799
Do not copy gcc dlls from dist-extras in windows binary-dist
tkelman Feb 29, 2016
36089d7
Temporary ugly hack to download old versions of winrpm gcc dll's
tkelman Mar 13, 2016
84c1be4
fix #922 (wrong line numbers in error expressions) and remove old hac…
vtjnash Oct 7, 2015
e5701a5
generalize backtrace test to succeed without inlining
vtjnash Oct 20, 2015
a5ba47a
Fix axpy! bug and add tests
Mar 8, 2016
647cb78
Confirm overwrite with 7z
tkelman Mar 13, 2016
7d85e10
Revert "Add test for missing linfo root for jlcall function object"
tkelman Mar 13, 2016
6fffa59
Revert "Root the function object in jlcall"
tkelman Mar 13, 2016
d4d0156
Reinstate printing of :line Exprs
toivoh Mar 1, 2016
fe33ecc
Fix line numbers in doctests
tkelman Mar 15, 2016
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 16 additions & 2 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -562,7 +562,14 @@ ifneq (,$(filter $(ARCH), i386 i486 i586 i686))
$(JLDOWNLOAD) http://downloads.sourceforge.net/sevenzip/7z920.exe && \
7z x -y 7z920.exe 7z.exe 7z.dll && \
../contrib/windows/winrpm.sh http://download.opensuse.org/repositories/windows:/mingw:/win32/openSUSE_13.2 \
"mingw32-libgfortran3 mingw32-libquadmath0 mingw32-libstdc++6 mingw32-libgcc_s_sjlj1 mingw32-libssp0 mingw32-libexpat1 mingw32-zlib1" && \
"mingw32-libexpat1 mingw32-zlib1" && \
$(JLDOWNLOAD) https://juliacache.s3.amazonaws.com/mingw32-libgfortran3-5.3.0-1.1.noarch.rpm && \
$(JLDOWNLOAD) https://juliacache.s3.amazonaws.com/mingw32-libquadmath0-5.3.0-1.1.noarch.rpm && \
$(JLDOWNLOAD) https://juliacache.s3.amazonaws.com/mingw32-libstdc++6-5.3.0-1.1.noarch.rpm && \
$(JLDOWNLOAD) https://juliacache.s3.amazonaws.com/mingw32-libgcc_s_sjlj1-5.3.0-1.1.noarch.rpm && \
$(JLDOWNLOAD) https://juliacache.s3.amazonaws.com/mingw32-libssp0-5.3.0-1.1.noarch.rpm && \
for i in *.rpm; do 7z x -y $$i; done && \
for i in *.cpio; do 7z x -y $$i; done && \
cp usr/i686-w64-mingw32/sys-root/mingw/bin/*.dll . && \
$(JLDOWNLOAD) PortableGit.7z https://github.com/git-for-windows/git/releases/download/v2.6.1.windows.1/PortableGit-2.6.1-32-bit.7z.exe
else ifeq ($(ARCH),x86_64)
Expand All @@ -572,7 +579,14 @@ else ifeq ($(ARCH),x86_64)
mv _7z.dll 7z.dll && \
mv _7z.exe 7z.exe && \
../contrib/windows/winrpm.sh http://download.opensuse.org/repositories/windows:/mingw:/win64/openSUSE_13.2 \
"mingw64-libgfortran3 mingw64-libquadmath0 mingw64-libstdc++6 mingw64-libgcc_s_seh1 mingw64-libssp0 mingw64-libexpat1 mingw64-zlib1" && \
"mingw64-libexpat1 mingw64-zlib1" && \
$(JLDOWNLOAD) https://juliacache.s3.amazonaws.com/mingw64-libgfortran3-5.3.0-1.1.noarch.rpm && \
$(JLDOWNLOAD) https://juliacache.s3.amazonaws.com/mingw64-libquadmath0-5.3.0-1.1.noarch.rpm && \
$(JLDOWNLOAD) https://juliacache.s3.amazonaws.com/mingw64-libstdc++6-5.3.0-1.1.noarch.rpm && \
$(JLDOWNLOAD) https://juliacache.s3.amazonaws.com/mingw64-libgcc_s_seh1-5.3.0-1.1.noarch.rpm && \
$(JLDOWNLOAD) https://juliacache.s3.amazonaws.com/mingw64-libssp0-5.3.0-1.1.noarch.rpm && \
for i in *.rpm; do 7z x -y $$i; done && \
for i in *.cpio; do 7z x -y $$i; done && \
cp usr/x86_64-w64-mingw32/sys-root/mingw/bin/*.dll . && \
$(JLDOWNLOAD) PortableGit.7z https://github.com/git-for-windows/git/releases/download/v2.6.1.windows.1/PortableGit-2.6.1-64-bit.7z.exe
else
Expand Down
9 changes: 8 additions & 1 deletion base/Terminals.jl
Original file line number Diff line number Diff line change
Expand Up @@ -201,7 +201,14 @@ start_reading(t::UnixTerminal) = start_reading(t.in_stream)
stop_reading(t::UnixTerminal) = stop_reading(t.in_stream)
eof(t::UnixTerminal) = eof(t.in_stream)

@unix_only hascolor(t::TTYTerminal) = (startswith(t.term_type, "xterm") || success(`tput setaf 0`))
@unix_only function hascolor(t::TTYTerminal)
startswith(t.term_type, "xterm") && return true
try
return success(`tput setaf 0`)
catch
return false
end
end
@windows_only hascolor(t::TTYTerminal) = true

end # module
8 changes: 7 additions & 1 deletion base/dict.jl
Original file line number Diff line number Diff line change
Expand Up @@ -495,15 +495,21 @@ function rehash!{K,V}(h::Dict{K,V}, newsz = length(h.keys))
vals = Array(V, newsz)
count0 = h.count
count = 0
maxprobe = max(16, newsz>>6)

for i = 1:sz
if olds[i] == 0x1
k = oldk[i]
v = oldv[i]
index = hashindex(k, newsz)
index0 = index = hashindex(k, newsz)
while slots[index] != 0
index = (index & (newsz-1)) + 1
end
if index - index0 > maxprobe
# rare condition: new table size causes more grouping of keys than before
# see issue #15077
return rehash!(h, newsz*2)
end
slots[index] = 0x1
keys[index] = k
vals[index] = v
Expand Down
70 changes: 41 additions & 29 deletions base/docs/helpdb.jl
Original file line number Diff line number Diff line change
Expand Up @@ -4011,46 +4011,67 @@ Note that workers do not run a ``.juliarc.jl`` startup script, nor do they synch
"""
addprocs()

doc"""
```rst
.. addprocs(machines; tunnel=false, sshflags=``, max_parallel=10, exeflags=``) -> List of process identifiers
"""
```
addprocs(machines; keyword_args...) -> List of process identifiers
```

Add processes on remote machines via SSH.
Requires julia to be installed in the same location on each node, or to be available via a shared file system.
Add processes on remote machines via SSH. Requires julia to be installed in the same
location on each node, or to be available via a shared file system.

``machines`` is a vector of machine specifications. Worker are started for each specification.
`machines` is a vector of machine specifications. Worker are started for each specification.

A machine specification is either a string ``machine_spec`` or a tuple - ``(machine_spec, count)``
A machine specification is either a string `machine_spec` or a tuple - `(machine_spec, count)`.

``machine_spec`` is a string of the form ``[user@]host[:port] [bind_addr[:port]]``. ``user`` defaults
to current user, ``port`` to the standard ssh port. If ``[bind_addr[:port]]`` is specified, other
workers will connect to this worker at the specified ``bind_addr`` and ``port``.
`machine_spec` is a string of the form `[user@]host[:port] [bind_addr[:port]]`. `user` defaults
to current user, `port` to the standard ssh port. If `[bind_addr[:port]]` is specified, other
workers will connect to this worker at the specified `bind_addr` and `port`.

``count`` is the number of workers to be launched on the specified host. If specified as ``:auto``
`count` is the number of workers to be launched on the specified host. If specified as `:auto`
it will launch as many workers as the number of cores on the specific host.


Keyword arguments:

``tunnel`` : if ``true`` then SSH tunneling will be used to connect to the worker from the master process.
* `tunnel`: if `true` then SSH tunneling will be used to connect to the worker from the
master process. Default is `false`.

* `sshflags`: specifies additional ssh options, e.g.

sshflags=`-i /home/foo/bar.pem`

* `max_parallel`: specifies the maximum number of workers connected to in parallel at a host.
Defaults to 10.

* `dir`: specifies the working directory on the workers. Defaults to the host's current
directory (as found by `pwd()`)

``sshflags`` : specifies additional ssh options, e.g. :literal:`sshflags=\`-i /home/foo/bar.pem\`` .
* `exename`: name of the julia executable. Defaults to `"\$JULIA_HOME/julia"` or
`"\$JULIA_HOME/julia-debug"` as the case may be.

``max_parallel`` : specifies the maximum number of workers connected to in parallel at a host. Defaults to 10.
* `exeflags`: additional flags passed to the worker processes.

``dir`` : specifies the working directory on the workers. Defaults to the host's current directory (as found by ``pwd()``)
* `topology`: Specifies how the workers connect to each other. Sending a message
between unconnected workers results in an error.

``exename`` : name of the julia executable. Defaults to "$JULIA_HOME/julia" or "$JULIA_HOME/julia-debug" as the case may be.
+ `topology=:all_to_all` : All processes are connected to each other.
This is the default.

+ `topology=:master_slave` : Only the driver process, i.e. pid 1 connects to the
workers. The workers do not connect to each other.

+ `topology=:custom` : The `launch` method of the cluster manager specifes the
connection topology via fields `ident` and `connect_idents` in
`WorkerConfig`. A worker with a cluster manager identity `ident`
will connect to all workers specified in `connect_idents`.

``exeflags`` : additional flags passed to the worker processes.

Environment variables :

If the master process fails to establish a connection with a newly launched worker within 60.0 seconds,
the worker treats it a fatal situation and terminates. This timeout can be controlled via environment
variable ``JULIA_WORKER_TIMEOUT``. The value of ``JULIA_WORKER_TIMEOUT`` on the master process, specifies
variable `JULIA_WORKER_TIMEOUT`. The value of `JULIA_WORKER_TIMEOUT` on the master process, specifies
the number of seconds a newly launched worker waits for connection establishment.
```
"""
addprocs(machines)

Expand Down Expand Up @@ -10606,15 +10627,6 @@ This is only needed if your module depends on a file that is not used via `inclu
"""
include_dependency

doc"""
__precompile__(isprecompilable::Bool=true)

Specify whether the file calling this function is precompilable. If `isprecompilable` is `true`, then `__precompile__` throws an exception when the file is loaded by `using`/`import`/`require` *unless* the file is being precompiled, and in a module file it causes the module to be automatically precompiled when it is imported. Typically, `__precompile__()` should occur before the `module` declaration in the file, or better yet `VERSION >= v"0.4" && __precompile__()` in order to be backward-compatible with Julia 0.3.

If a module or file is *not* safely precompilable, it should call `__precompile__(false)` in order to throw an error if Julia attempts to precompile it.
"""
__precompile__

doc"""
randn!([rng], A::Array{Float64,N})

Expand Down Expand Up @@ -11082,7 +11094,7 @@ DivideError
doc"""
AssertionError([msg])

The asserted condition did not evalutate to `true`.
The asserted condition did not evaluate to `true`.
Optional argument `msg` is a descriptive error string.
"""
AssertionError
Expand Down
29 changes: 15 additions & 14 deletions base/linalg/blas.jl
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,7 @@ export


const libblas = Base.libblas_name
const liblapack = Base.liblapack_name

import ..LinAlg: BlasReal, BlasComplex, BlasFloat, BlasInt, DimensionMismatch, chksquare, axpy!

Expand Down Expand Up @@ -239,10 +240,10 @@ function axpy!{T<:BlasFloat,Ta<:Number,Ti<:Integer}(alpha::Ta, x::Array{T}, rx::
throw(DimensionMismatch("ranges of differing lengths"))
end
if minimum(rx) < 1 || maximum(rx) > length(x)
throw(BoundsError("range out of bounds for x, of length $(length(x))"))
throw(ArgumentError("range out of bounds for x, of length $(length(x))"))
end
if minimum(ry) < 1 || maximum(ry) > length(y)
throw(BoundsError("range out of bounds for y, of length $(length(y))"))
throw(ArgumentError("range out of bounds for y, of length $(length(y))"))
end
axpy!(length(rx), convert(T, alpha), pointer(x)+(first(rx)-1)*sizeof(T), step(rx), pointer(y)+(first(ry)-1)*sizeof(T), step(ry))
y
Expand Down Expand Up @@ -342,10 +343,10 @@ for (fname, elty) in ((:dgbmv_,:Float64),
end

### symv
for (fname, elty) in ((:dsymv_,:Float64),
(:ssymv_,:Float32),
(:zsymv_,:Complex128),
(:csymv_,:Complex64))
for (fname, elty, lib) in ((:dsymv_,:Float64,libblas),
(:ssymv_,:Float32,libblas),
(:zsymv_,:Complex128,liblapack),
(:csymv_,:Complex64,liblapack))
# Note that the complex symv are not BLAS but auiliary functions in LAPACK
@eval begin
# SUBROUTINE DSYMV(UPLO,N,ALPHA,A,LDA,X,INCX,BETA,Y,INCY)
Expand All @@ -366,7 +367,7 @@ for (fname, elty) in ((:dsymv_,:Float64),
if m != length(y)
throw(DimensionMismatch("A has size $(size(A)), and y has length $(length(y))"))
end
ccall(($(blasfunc(fname)), libblas), Void,
ccall(($(blasfunc(fname)), $lib), Void,
(Ptr{UInt8}, Ptr{BlasInt}, Ptr{$elty}, Ptr{$elty},
Ptr{BlasInt}, Ptr{$elty}, Ptr{BlasInt}, Ptr{$elty},
Ptr{$elty}, Ptr{BlasInt}),
Expand Down Expand Up @@ -565,17 +566,17 @@ for (fname, elty) in ((:dger_,:Float64),
end

### syr
for (fname, elty) in ((:dsyr_,:Float64),
(:ssyr_,:Float32),
(:zsyr_,:Complex128),
(:csyr_,:Complex64))
for (fname, elty, lib) in ((:dsyr_,:Float64,libblas),
(:ssyr_,:Float32,libblas),
(:zsyr_,:Complex128,liblapack),
(:csyr_,:Complex64,liblapack))
@eval begin
function syr!(uplo::Char, α::$elty, x::StridedVector{$elty}, A::StridedMatrix{$elty})
n = chksquare(A)
if length(x) != n
throw(DimensionMismatch("A has size ($n,$n), x has length $(length(x))"))
end
ccall(($(blasfunc(fname)), libblas), Void,
ccall(($(blasfunc(fname)), $lib), Void,
(Ptr{UInt8}, Ptr{BlasInt}, Ptr{$elty}, Ptr{$elty},
Ptr{BlasInt}, Ptr{$elty}, Ptr{BlasInt}),
&uplo, &n, &α, x,
Expand Down Expand Up @@ -943,10 +944,10 @@ end # module
function copy!{T<:BlasFloat,Ti<:Integer}(dest::Array{T}, rdest::Union{UnitRange{Ti},Range{Ti}},
src::Array{T}, rsrc::Union{UnitRange{Ti},Range{Ti}})
if minimum(rdest) < 1 || maximum(rdest) > length(dest)
throw(BoundsError("range out of bounds for dest, of length $(length(dest))"))
throw(ArgumentError("range out of bounds for dest, of length $(length(dest))"))
end
if minimum(rsrc) < 1 || maximum(rsrc) > length(src)
throw(BoundsError("range out of bounds for src, of length $(length(src))"))
throw(ArgumentError("range out of bounds for src, of length $(length(src))"))
end
if length(rdest) != length(rsrc)
throw(DimensionMismatch("ranges must be of the same length"))
Expand Down
2 changes: 1 addition & 1 deletion base/linalg/cholesky.jl
Original file line number Diff line number Diff line change
Expand Up @@ -185,7 +185,7 @@ function getindex{T<:BlasFloat}(C::CholeskyPivoted{T}, d::Symbol)
end

show{T,S<:AbstractMatrix}(io::IO, C::Cholesky{T,S}) =
(println("$(typeof(C)) with factor:");show(io,C[:UL]))
(println(io, "$(typeof(C)) with factor:");show(io,C[:UL]))

A_ldiv_B!{T<:BlasFloat,S<:AbstractMatrix}(C::Cholesky{T,S}, B::StridedVecOrMat{T}) =
LAPACK.potrs!(C.uplo, C.factors, B)
Expand Down
7 changes: 3 additions & 4 deletions base/linalg/generic.jl
Original file line number Diff line number Diff line change
Expand Up @@ -447,21 +447,20 @@ function axpy!(α, x::AbstractArray, y::AbstractArray)
end

function axpy!{Ti<:Integer,Tj<:Integer}(α, x::AbstractArray, rx::AbstractArray{Ti}, y::AbstractArray, ry::AbstractArray{Tj})
if length(x) != length(y)
throw(DimensionMismatch("x has length $(length(x)), but y has length $(length(y))"))
if length(rx) != length(ry)
throw(DimensionMismatch("rx has length $(length(rx)), but ry has length $(length(ry))"))
elseif minimum(rx) < 1 || maximum(rx) > length(x)
throw(BoundsError(x, rx))
elseif minimum(ry) < 1 || maximum(ry) > length(y)
throw(BoundsError(y, ry))
elseif length(rx) != length(ry)
throw(ArgumentError("rx has length $(length(rx)), but ry has length $(length(ry))"))
end
for i = 1:length(rx)
@inbounds y[ry[i]] += x[rx[i]]*α
end
y
end


# Elementary reflection similar to LAPACK. The reflector is not Hermitian but ensures that tridiagonalization of Hermitian matrices become real. See lawn72
@inline function reflector!(x::AbstractVector)
n = length(x)
Expand Down
17 changes: 9 additions & 8 deletions base/linalg/matmul.jl
Original file line number Diff line number Diff line change
Expand Up @@ -256,7 +256,7 @@ function syrk_wrapper!{T<:BlasFloat}(C::StridedMatrix{T}, tA::Char, A::StridedVe
return matmul3x3!(C,tA,tAt,A,A)
end

if stride(A, 1) == 1 && stride(A, 2) >= size(A, 1)
if stride(A, 1) == stride(C, 1) == 1 && stride(A, 2) >= size(A, 1) && stride(C, 2) >= size(C, 1)
return copytri!(BLAS.syrk!('U', tA, one(T), A, zero(T), C), 'U')
end
return generic_matmatmul!(C, tA, tAt, A, A)
Expand Down Expand Up @@ -287,7 +287,7 @@ function herk_wrapper!{T<:BlasReal}(C::Union{StridedMatrix{T}, StridedMatrix{Com
# Result array does not need to be initialized as long as beta==0
# C = Array(T, mA, mA)

if stride(A, 1) == 1 && stride(A, 2) >= size(A, 1)
if stride(A, 1) == stride(C, 1) == 1 && stride(A, 2) >= size(A, 1) && stride(C, 2) >= size(C, 1)
return copytri!(BLAS.herk!('U', tA, one(T), A, zero(T), C), 'U', true)
end
return generic_matmatmul!(C,tA, tAt, A, A)
Expand Down Expand Up @@ -325,7 +325,7 @@ function gemm_wrapper!{T<:BlasFloat}(C::StridedVecOrMat{T}, tA::Char, tB::Char,
return matmul3x3!(C,tA,tB,A,B)
end

if stride(A, 1) == stride(B, 1) == 1 && stride(A, 2) >= size(A, 1) && stride(B, 2) >= size(B, 1)
if stride(A, 1) == stride(B, 1) == stride(C, 1) == 1 && stride(A, 2) >= size(A, 1) && stride(B, 2) >= size(B, 1) && stride(C, 2) >= size(C, 1)
return BLAS.gemm!(tA, tB, one(T), A, B, zero(T), C)
end
generic_matmatmul!(C, tA, tB, A, B)
Expand Down Expand Up @@ -434,11 +434,12 @@ const Cbuf = Array(UInt8, tilebufsize)
function generic_matmatmul!{T,S,R}(C::AbstractMatrix{R}, tA, tB, A::AbstractMatrix{T}, B::AbstractMatrix{S})
mA, nA = lapack_size(tA, A)
mB, nB = lapack_size(tB, B)
mC, nC = size(C)

if mA == nA == nB == 2
if mA == nA == mB == nB == mC == nC == 2
return matmul2x2!(C, tA, tB, A, B)
end
if mA == nA == nB == 3
if mA == nA == mB == nB == mC == nC == 3
return matmul3x3!(C, tA, tB, A, B)
end
_generic_matmatmul!(C, tA, tB, A, B)
Expand All @@ -450,14 +451,14 @@ function _generic_matmatmul!{T,S,R}(C::AbstractVecOrMat{R}, tA, tB, A::AbstractV
mA, nA = lapack_size(tA, A)
mB, nB = lapack_size(tB, B)
if mB != nA
throw(DimensionMismatch("matrix A has dimensions ($mA, $nB), matrix B has dimensions ($mB, $nB)"))
throw(DimensionMismatch("matrix A has dimensions ($mA,$nA), matrix B has dimensions ($mB,$nB)"))
end
if size(C,1) != mA || size(C,2) != nB
throw(DimensionMismatch("result C has dimensions $(size(C)), needs ($mA, $nB)"))
throw(DimensionMismatch("result C has dimensions $(size(C)), needs ($mA,$nB)"))
end

tile_size = 0
if isbits(R) && isbits(T) && isbits(S)
if isbits(R) && isbits(T) && isbits(S) && (tA == 'N' || tB != 'N')
tile_size = floor(Int,sqrt(tilebufsize/max(sizeof(R),sizeof(S),sizeof(T))))
end
@inbounds begin
Expand Down
8 changes: 4 additions & 4 deletions base/linalg/tridiag.jl
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ function diag{T}(M::SymTridiagonal{T}, n::Integer=0)
elseif absn<size(M,1)
return zeros(T,size(M,1)-absn)
else
throw(BoundsError("$n-th diagonal of a $(size(M)) matrix doesn't exist!"))
throw(ArgumentError("$n-th diagonal of a $(size(M)) matrix doesn't exist!"))
end
end

Expand Down Expand Up @@ -249,7 +249,7 @@ det(A::SymTridiagonal) = det_usmani(A.ev, A.dv, A.ev)

function getindex{T}(A::SymTridiagonal{T}, i::Integer, j::Integer)
if !(1 <= i <= size(A,2) && 1 <= j <= size(A,2))
throw(BoundsError("(i,j) = ($i,$j) not within matrix of size $(size(A))"))
throw(BoundsError(A, (i,j)))
end
if i == j
return A.dv[i]
Expand Down Expand Up @@ -339,13 +339,13 @@ function diag{T}(M::Tridiagonal{T}, n::Integer=0)
elseif abs(n) < size(M,1)
return zeros(T,size(M,1)-abs(n))
else
throw(BoundsError("$n-th diagonal of a $(size(M)) matrix doesn't exist!"))
throw(ArgumentError("$n-th diagonal of a $(size(M)) matrix doesn't exist!"))
end
end

function getindex{T}(A::Tridiagonal{T}, i::Integer, j::Integer)
if !(1 <= i <= size(A,2) && 1 <= j <= size(A,2))
throw(BoundsError("(i,j) = ($i,$j) not within matrix of size $(size(A))"))
throw(BoundsError(A, (i,j)))
end
if i == j
return A.d[i]
Expand Down
Loading