# $Id: base.site,v 1.38 2001/03/27 22:17:30 balay Exp $ 
#
#    This arch is for SGI 32 bit machines running IRIX OS 6.x;
#  if your machine is running an OS 5.x using the PETSc arch IRIX5 instead.
#  If you are running on a 64 bit machine, SGI PowerChallange or Origin,
#  for example, use IRIX64.
#
#  This file contains site-specific information.  The definitions below
#  should be changed to match the locations of libraries at your site.
#  The following naming convention is used:
#     XXX_LIB - location of library XXX
#     XXX_INCLUDE - directory for include files needed for library XXX
#
# Location of BLAS and LAPACK. See ${PETSC_DIR}/docs/installation.html
# for information on retrieving them.
#
# BLAS usually comes with SGI and can be accessed with -lblas listed 
# below for BLASLAPACK_LIB. However, the complex BLAS routine izamax()
# has a bug in it, so if you are using complex numbers we recommend
# using the SGI BLAS with extreme care.
#
# BLAS usually comes with SGI. Do NOT use the parallel (library names with 
# mp in them) version of the SGI BLAS.
#
BLASLAPACK_LIB = -L/home/petsc/software/blaslapack/IRIX -lflapack -lblas

# Location of MPI (Message Passing Interface) software  
MPI_HOME       = /home/petsc/software/mpich-1.2.0/IRIX
MPI_LIB        = -L${MPI_HOME}/lib  -lmpich
MPI_INCLUDE    = -I${MPI_HOME}/include
MPIRUN         =  ${MPI_HOME}/bin/mpirun -machinefile ${PETSC_DIR}/maint/hosts.local

#
# ----------------------------------------------------------------------------------------  
#  Locations of OPTIONAL packages. Comment out those you do not have.
# ----------------------------------------------------------------------------------------  
#
# Location of X-windows software
#
X11_INCLUDE    = 
X11_LIB        = -lX11
PETSC_HAVE_X11 = -DPETSC_HAVE_X11
#
# Location of MPE
# If using MPICH version 1.1.2 or higher use the flag -DPETSC_HAVE_MPE_INITIALIZED_LOGGING
#
#MPE_INCLUDE   = -I/home/petsc/mpich/mpe 
#MPE_LIB       = -L/home/petsc/mpich/lib/IRIX/ch_shmem/ -lmpe -lpmpi
#MPE_INCLUDE    = -I${MPI_HOME}/mpe 
#MPE_LIB        = -L${MPI_HOME}/lib -lmpe
#PETSC_HAVE_MPE = -DPETSC_HAVE_MPE
#
# Location of BlockSolve (MPI version)
#
#BLOCKSOLVE_INCLUDE    = -I/home/petsc/software/BlockSolve95/include
#BLOCKSOLVE_LIB        = -L/home/petsc/software/BlockSolve95/lib/libO/${PETSC_ARCH} -lBS95
#PETSC_HAVE_BLOCKSOLVE = -DPETSC_HAVE_BLOCKSOLVE
#
# Matlab location
#
#CMEX           = /usr/local/bin/cmex 
#MCC            = 
#MATLABCOMMAND  = matlab
#PETSC_HAVE_MATLAB =  -DPETSC_HAVE_MATLAB
#
# Location where adiC is installed
#
#ADIC_DEFINES    = -Dad_GRAD_MAX=36
#ADIC_CC         = adiC -a -d gradient
#PETSC_HAVE_ADIC = -DPETSC_HAVE_ADIC
#
# Location of PVODE; Alan Hindmarsh's parallel ODE solver
# 
#PVODE_INCLUDE = -I/home/petsc/software/MPI_PVODE/include
#PVODE_LIB     = /home/petsc/software/MPI_PVODE/lib/IRIX/libpvode.a
#PETSC_HAVE_PVODE = -DPETSC_HAVE_PVODE
#
# Location of ParMetis
#
#PARMETIS_INCLUDE = -I/home/petsc/software/ParMetis-2.0
#PARMETIS_LIB     = -L/home/petsc/software/ParMetis-2.0/lib/${PETSC_ARCH} -lparmetis -lmetis
#PETSC_HAVE_PARMETIS = -DPETSC_HAVE_PARMETIS
#
#
# Location of the LUSOL sparse LU factorization code (part of MINOS)
# developed by Michael Saunders, saunders@stanford.edu at the
# Systems Optimization Laboratory, Stanford University.
#  http://www.sbsi-sol-optimize.com/
# Uses the two files mi25bfac.f and mi15blas.f (or LUSOL.f LUSOL_BLAS.f
# depending on how they are named)
#
#PETSC_HAVE_LUSOL     = -DPETSC_HAVE_LUSOL
#LUSOL_LIB = 
# ---------------------------------------------------------------------------------------
#
# If you are using shared version of any external libraries you must make this
# point to the directories where all your shared libraries are stored.
#
C_DYLIBPATH     = ${CLINKER_SLFLAG}/home/petsc/software/BlockSolve95/lib/libO/IRIX
F_DYLIBPATH     = ${FLINKER_SLFLAG}/home/petsc/software/BlockSolve95/lib/libO/IRIX
