# $Id: base.site,v 1.72 2001/06/21 18:03:31 curfman Exp $ 

#  This file contains site-specific information.  The definitions below
#  should be changed to match the locations of libraries at your site.
#  The following naming convention is used:
#     XXX_LIB - location of library XXX
#     XXX_INCLUDE - directory for include files needed for library XXX
#
# Location of BLAS and LAPACK. See ${PETSC_DIR}/docs/installation.html for
# information on retrieving them.
#
# BLAS usually comes with SGI. Do NOT use the parallel (library names with 
# mp in them) version of the SGI BLAS.
#
BLASLAPACK_LIB =  -lcomplib.sgimath -lblas
#
# Location of MPI (Message Passing Interface) software  
#
# We recommend using SGI's MPI implementation over MPICH on the Origin and 
# Powerchallenge.
#
MPI_LIB        = -lmpi -lmpi++
MPI_INCLUDE    = 
MPIRUN         = /usr/sbin/npri -w /bin/mpirun
#
# The following lines can be used with MPICH
#
#MPI_LIB        = -L/home/petsc/mpich/lib/IRIX64/ch_p4 -lmpi
#MPI_INCLUDE    = -I/home/petsc/mpich/include
#MPIRUN         =  /home/petsc/mpich/lib/IRIX64/ch_p4/mpirun
#
# The following lines can be used with MPIUNI
#
#MPI_LIB         =${INSTALL_LIB_DIR}/libmpiuni.a
#MPI_INCLUDE     = -I${PETSC_DIR}/src/sys/src/mpiuni
#MPIRUN          = ${PETSC_DIR}/src/sys/src/mpiuni/mpirun
#
# ----------------------------------------------------------------------------------------  
#  Locations of OPTIONAL packages. Comment out those you do not have.
# ----------------------------------------------------------------------------------------  
#
# Location of X-windows software
#
X11_INCLUDE    = 
X11_LIB        = -lX11
PETSC_HAVE_X11 = -DPETSC_HAVE_X11
#
# Location of MPE
# If using MPICH version 1.1.2 or higher use the flag -DPETSC_HAVE_MPE_INITIALIZED_LOGGING
#
#MPE_INCLUDE   = -I/usr/local/mpi/include
#MPE_LIB       = -L/usr/local/mpi/lib/IRIX64/ch_shmem -lmpe -lpmpi
#PETSC_HAVE_MPE = -DPETSC_HAVE_MPE
#
# Location of BlockSolve (MPI version)
#
#BLOCKSOLVE_INCLUDE = -I/home/petsc/software/BlockSolve95/include
#BLOCKSOLVE_LIB     = -L/home/petsc/software/BlockSolve95/lib/libO/${PETSC_ARCH} -lBS95
#PETSC_HAVE_BLOCKSOLVE = -DPETSC_HAVE_BLOCKSOLVE
#
# Matlab location
#
#  The CFLAGS and following stuff is due to Matlab being miss-intalled on our machine
#  you may not need any of that stuff
#MCC            = cc
#CMEX           = /home/bsmith/bin/IRIX64/mex CFLAGS='-32' LDFLAGS="-32 -mips4 -shared -U -Bsymbolic -exported_symbol mexFunction -exported_symbol mexVersion"
#CMEX           = mex CFLAGS='-32' LDFLAGS="-32 -mips4 -shared -U -Bsymbolic -exported_symbol mexFunction -exported_symbol mexVersion"
#MATLABCOMMAND  = matlab -sgi
#PETSC_HAVE_MATLAB =  -DPETSC_HAVE_MATLAB
#
#
#MATLAB_ENGINE_INCLUDE    = -I/home/petsc/software/matlab/include
#MATLAB_ENGINE_LIB        = -L/software/irix-6/com/packages/matlab-r11.1/extern/lib/sgi -leng -lmx -lmat -lmi -lut
#PETSC_HAVE_MATLAB_ENGINE =  -DPETSC_HAVE_MATLAB_ENGINE
#
#
# Location where adiC is installed
#
#ADIC_DEFINES    = -Dad_GRAD_MAX=36
#ADIC_CC         = adiC -a -d gradient
#PETSC_HAVE_ADIC = -DPETSC_HAVE_ADIC
#
# Location of PVODE; Alan Hindmarsh's parallel ODE solver
# 
#PVODE_INCLUDE = -I/home/petsc/software/MPI_PVODE/include
#PVODE_LIB     = /home/petsc/software/MPI_PVODE/lib/IRIX64/libpvode.a
#PETSC_HAVE_PVODE = -DPETSC_HAVE_PVODE
#
#
# Location of ParMetis
#
#PARMETIS_INCLUDE = -I/home/petsc/software/ParMetis-2.0
#PARMETIS_LIB     = -L/home/petsc/software/ParMetis-2.0/lib/${PETSC_ARCH} -lparmetis -lmetis
#PETSC_HAVE_PARMETIS = -DPETSC_HAVE_PARMETIS
#
#
#  Location for ALICE Memory Snooper
#
#AMS_INCLUDE    = -I/home/alice/ams/include
#AMS_LIB        = -L/home/alice/ams/lib/libg/irix64 -lamspub -lamsutilmt -lamsutil -lpthread
#PETSC_HAVE_AMS = -DPETSC_HAVE_AMS
#
# ---------------------------------------------------------------------------------------
#
# If you are using shared version of any external libraries you must make this
# point to the directories where all your shared libraries are stored.
#
C_DYLIBPATH     =
F_DYLIBPATH     =
