goodness.tex 90 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303
  1. \documentclass[draft]{article}
  2. \usepackage{makeidx}
  3. %\title{\(\rightarrow\)DRAFT\(\leftarrow\)\\
  4. %Goodness of Fit Techniques}
  5. \title{Goodness of Fit Tests\\
  6. {\large Documentation on {\tt libcdhc.a}}\\
  7. {\large and}\\
  8. {\large A GRASS Tutorial on {\tt s.normal}}}
  9. \author{James Darrell McCauley\thanks{USDA National Needs Fellow,
  10. Department of Agricultural Enginering, Purdue University. Email:
  11. {\tt mccauley@ecn.purdue.edu}}}
  12. \makeindex
  13. \addtolength{\oddsidemargin}{-.55in}
  14. \addtolength{\evensidemargin}{-.55in}
  15. \addtolength{\textwidth}{.1in}
  16. \addtolength{\marginparwidth}{.45in}
  17. \addtolength{\topmargin}{-.25in}
  18. \addtolength{\textheight}{.5in}
  19. \def\libname{{\tt cdhc}}
  20. \def\returns#1{\sffamily\slshape Returns \(\mathsf{#1}\).}
  21. \def\function#1#2{\centerline{%
  22. \protect\index{#1}
  23. \framebox[.9\marginparwidth][l]{\vbox{\noindent\textsf{#2}}}}
  24. \vspace{.5\baselineskip}}
  25. %\def\function#1#2{\marginpar{%
  26. % \protect\index{#1}
  27. % \framebox[.9\marginparwidth][l]{\vbox{\textsf{#2}}}\hfill}}
  28. \newenvironment{example}{%
  29. \vspace{\baselineskip}
  30. \par\noindent\hrulefill\par
  31. \noindent{\em Example:}}{%
  32. \par\noindent\hrulefill\par
  33. \vspace{\baselineskip}}
  34. \begin{document}
  35. \bibliographystyle{plain}
  36. \maketitle
  37. \begin{abstract}
  38. The methods used by the GRASS program {\tt s.normal}
  39. are presented. These are various goodness of fit statistics for testing
  40. the null hypothesis of normality. Other additional tests found in
  41. \libname\, a C programming library,
  42. are also documented (this document serves two puposes:
  43. a tutorial for the GRASS geographic information system
  44. and documentation for the library).
  45. \end{abstract}
  46. \section{Introduction}
  47. This document is a programmer's
  48. manual for \libname, a C programming library useful
  49. for testing whether a sample is normally, lognormally,
  50. or exponentially distributed.
  51. Prototypes for library functions\footnote{%
  52. Each function in the library returns a pointer to static double.
  53. The \libname\ library was inspired by Johnson's
  54. STATLIB collection of FORTRAN routines for testing
  55. distribution assumptions~\protect\cite{johnson94}.
  56. Some functions in \libname\
  57. are loosely based on Johnson's work (they have been completely
  58. rewritten, reducing memory requirements and number
  59. of computations and fixing a few bugs). Others are based on
  60. algorithms found in \emph{Applied Statistics}, \emph{Technometrics},
  61. and other related journals.}
  62. are given in the margins near
  63. corresponding mathematical explanations. Hence, it is also
  64. a user's guide for programs using \libname.
  65. Readers should be equipped with at least one graduate course
  66. in probability and statistics. Much of the background
  67. and derivation/justification of each test has been
  68. omitted. A good text for more background information
  69. is {\em Goodness-of-Fit Techniques\/} by
  70. D'Agostino and Stephens~\cite{dagostino86b} (see also references in text).
  71. \subsection{Hypothesis Testing}
  72. Before beginning the description of the tests, a few definitions
  73. should be given. The general framework for mosts tests is that
  74. the {\em null\/} hypothesis \(H_0\) is that a random variable \(x\)
  75. follows a particular distribution \(F\left(x\right)\).
  76. Generally, the {\em alternative\/} hypothesis is that
  77. \(x\) does not follow \(F\left(x\right)\) (with no additional
  78. usuable information; the Kotz Separate Families test in \S\ref{sec:kotz}
  79. is one exception).
  80. This may differ from the way that some have learned hypothesis testing
  81. in that some tests are set up to reject the null hypothesis in
  82. favor of the alternative.
  83. A {\em simple\/} hypothesis implies that \(F\left(x\right)\)
  84. is completely specified, e.g., \(x\sim N\left(0,1\right)\).
  85. A {\em composite\/} hypothesis means that
  86. one (or more) of the parameters of \(F\left(x\right)\)
  87. is not completely specified, e.g., \(x\sim N\left(\mu,\sigma\right)\).
  88. That is, the composite hypothesis may be:
  89. \begin{displaymath}
  90. H_0 : F\left(x\right) = F_0\left(x; \theta\right)
  91. \end{displaymath}
  92. where \(\theta=\left[\theta_1, \ldots,\theta_p\right]'\)
  93. is a \(p\) vector of \emph{nuisance} parameters whose values
  94. are unknown and must be estimated from data.
  95. % Less is known
  96. % about the theory of this later
  97. % case, which is the most commonly encountered in practice.
  98. \subsection{Probability Plots}
  99. In addition to these analytical techniques, graphical
  100. methods are valuable supplements. The most important
  101. graphical technique is probability plotting. A \emph{probability plot}
  102. \label{pplot}
  103. is a plot of the cumulative distribution function \(F\left(x\right)\)
  104. on the vertical axis versus \(x\) on the horizontal axis.
  105. The vertical axis is scaled such that, if the data fit
  106. the assumed distribution, the resulting plot will lie on
  107. a straight line. Special plotting paper may be purchased
  108. to do these plots; however, most modern scientific
  109. plotting programs have this capability (e.g., {\tt gnuplot}).
  110. Each test presented below
  111. should be used in conjunction with a probability plot.
  112. \subsection{Shape of Distributions}
  113. Through much of the literature are references to Johnson
  114. curves: \(S_U\) or \(S_B\) (see \S\ref{sec:johnson-su},
  115. page~\pageref{sec:johnson-su}).
  116. These refer to a system of distributions introduced by
  117. Johnson~\cite{johnson49} where a standard normal random
  118. variable \(Z\) is translated to \(\left(Z-\gamma\right)/\delta\)
  119. and transformed using \(T\):
  120. \begin{equation}
  121. Y=T\left(\frac{Z-\gamma}{\delta}\right).
  122. \end{equation}
  123. Three families in Johnson's~\cite{johnson49} system are:
  124. \begin{enumerate}
  125. \item a family of bounded distributions, denoted by \(S_B\), where:
  126. \begin{equation}
  127. Y=T\left( \frac{e^x}{1+e^x} \right);
  128. \end{equation}
  129. \item a family lognormal distributions where:
  130. \begin{equation}
  131. Y=T\left( e^x \right);
  132. \end{equation}
  133. \item and a family of unbounded distributions, denoted by \(S_U\), where:
  134. \begin{equation}
  135. Y=\sinh\left(x\right) = T\left( e^x-e^{-x} \right).
  136. \end{equation}
  137. \end{enumerate}
  138. In the \(S_B\) and \(S_U\) families, \(\gamma\) and \(\delta\)
  139. govern the shape of the distribution. In the lognormal families,
  140. \(\delta\) governs the shape while \(\gamma\) is only a scaling
  141. factor~\cite{hoaglin85c}. Other approaches to exploring the
  142. shape of a distribution include \(g\)- and
  143. \(h\)-distributions~\cite{hoaglin85c} and
  144. Pearson curves (see Bowman~\cite{bowman86}).
  145. \subsection{Miscellaneous}
  146. Many tests are presented here without mention of their relative
  147. merits. Users are advised to consult the cited literature to
  148. determine which test is appropriate for their situation. Sometimes
  149. a certain test will have more \emph{power} than another; that is,
  150. a test may have a better ability to reject a model when
  151. the model is incorrect.
  152. \section{Moments: \(b_2\) and \(\protect\sqrt{b_1}\)}
  153. \function{omnibus\_moments(x,n)}
  154. {double* \\
  155. \hbox{omnibus\_moments(x,n)}\\
  156. double *x;\\
  157. int n;\\
  158. \returns{\left[\sqrt{b_1},b_2\right]'}}%
  159. Let \(x_1, x_2, \ldots, x_n\) be the \(n\)
  160. observations with mean:
  161. \begin{equation}
  162. m_1 = \frac{1}{n}\sum_{j=1}{n} x_j.
  163. \end{equation}
  164. The central moments are defined as:
  165. \begin{equation}
  166. \label{eqn:moments}
  167. m_i = \frac{1}{n}\sum_{j=1}{n}\left( x_j - m_i\right)^i,\: i=2,3,4.
  168. \end{equation}
  169. The sample skewness \(\left(\sqrt{b_1}\right)\)
  170. and kurtosis \(\left(b_2\right)\) are defined as:
  171. \begin{equation}
  172. \sqrt{b_1} = m_3/m_2^{3/2} = \sqrt{n}
  173. \left(\sum_{j=1}^n\left(x_i-\bar{x}\right)^3\right)/
  174. \left( \sum_{j=1}^n\left(x_i-\bar{x}\right)^2 \right)^{3/2}
  175. \end{equation}
  176. and
  177. \begin{equation}
  178. \label{eqn:4th-sample-moment}
  179. b_2 = m_4/m_2^2.
  180. \end{equation}
  181. These are invariant under both origin and scale changes~\cite{bowman86}.
  182. When a distribution is specified, these are denoted as
  183. \(\sqrt{\beta_1}\) and \(\beta_2\).
  184. For a standard normal, \(\sqrt{\beta_1}=0\) and \(\beta_2=3\).
  185. To use either or both of these statistics to test for
  186. departure from normality, these are sometimes transformed
  187. to their standardized to their normal equivalent
  188. deviates, \(X\left(\sqrt{b_1}\right)\) and \(X\left(b_1\right)\).
  189. For \(X\left(\sqrt{b_1}\right)\), D'Agostino and
  190. Pearson~\cite{dagostino73} gave coefficients \(\delta\)
  191. and \(\lambda\) (\(n=8\) to 1000) for:
  192. \begin{equation}
  193. X\left(\sqrt{b_1}\right) = \delta \sinh^{-1}
  194. \left(\sqrt{b_1}/\lambda\right)
  195. \end{equation}
  196. that transforms \(\sqrt{b_1}\) to a standard normal
  197. using a Johnson \(S_U\) approximation (Table~\ref{tbl:johnson}).
  198. \label{sec:johnson-su}
  199. An equivalent approximation~\cite{dagostino86}
  200. that avoids the use of tables is given by:
  201. \begin{enumerate}
  202. \item Compute \(\sqrt{b_1}\) from the sample data.
  203. \item Compute:
  204. \begin{eqnarray}
  205. Y &=& \sqrt{b_1} \left[\frac{\left(n+1\right)\left(n+3\right)}
  206. {6\left(n-2\right)}\right]^{\frac{1}{2}}, \\
  207. \beta_2 &=& \frac{3\left(n^2+27n-70\right)\left(n+1\right)\left(n+3\right)}
  208. {\left(n-2\right)\left(n+5\right)\left(n+7\right)\left(n+9\right)},\\
  209. W^2 &=& \sqrt{2\left(\beta_2-1\right)}-1, \\
  210. \delta &=& 1/\sqrt{\log W}, \\
  211. \mbox{and}\\
  212. \alpha &=& \sqrt{2/\left(W^2-1\right)}.
  213. \end{eqnarray}
  214. \item Compute the standard normal variable:
  215. \begin{equation}
  216. Z = \delta \log\left[Y/\alpha + \sqrt{\left(Y/\alpha\right)^2+1}\,\right].
  217. \end{equation}
  218. \end{enumerate}
  219. This procedure is applicable for \(n\ge8\).
  220. %D'Agostino~\cite{dagostino86} also notes
  221. %that the normal approximation given by
  222. %\begin{equation}
  223. %\sqrt{\beta_1}\left[\frac{\left(n+1\right)\left(n+3\right)}
  224. %{6\left(n-2\right)}\right]^{\frac{1}{2}}
  225. %\end{equation}
  226. %is valid for \(n\ge150\)~\cite{dagostino86}.
  227. \begin{example}
  228. For the sample data given in Table~\ref{tbl:pine} (\(n=584\)),
  229. \(\sqrt{b_1} = 0.2373\). Suppose that we wish to test the
  230. hypothesis of normality:
  231. \(H_0\): \(\sqrt{\beta_1}=0\) (normality)
  232. \noindent versus the two-sided alternative
  233. \(H_1\): \(\sqrt{\beta_1}\ne0\) (non-normality)
  234. \noindent at a level of significance of 0.05.
  235. Following the procedure given above,
  236. \(Y =2.3454\),
  237. \(\beta_2 = 3.0592\),
  238. \(W^2 = 1.0294\),
  239. \(\delta=12.6132\),
  240. \(\alpha=8.2522\), and
  241. \(Z=1.5367\).
  242. At a 0.05 significance level for a two-sided test, we reject
  243. the null hypothesis of normality if \(\left|Z\right|\ge1.96\). In
  244. this instance, we cannot reject \(H_0\).
  245. \end{example}
  246. The fourth standardized moment \(b_2\) may be used to
  247. test the normality hypothesis by the following
  248. procedure~\cite{anscombe63}:
  249. \begin{enumerate}
  250. \item Compute \(b_2\) from the sample data.
  251. \item Compute the mean and variance of \(b_2\):
  252. \begin{equation}
  253. E\left(b_2\right) = \frac{3\left(n-1\right)}{n+1}
  254. \end{equation}
  255. and
  256. \begin{equation}
  257. Var\left(b_2\right) = \frac{24n\left(n-2\right)\left(n-3\right)}
  258. {\left(n+1\right)^2\left(n+3\right)\left(n+5\right)}.
  259. \end{equation}
  260. \item Compute the standardized value of \(b_2\):
  261. \begin{equation}
  262. y = \frac{b_2-E\left(b_2\right)}{Var\left(b_2\right)}.
  263. \end{equation}
  264. \item Compute the third standardized moment of \(b_2\):
  265. \begin{equation}
  266. \sqrt{\beta_1\left(b_2\right)} =
  267. \frac{6\left(n^2-5n+2\right)}{\left(n+7\right)\left(n+9\right)}
  268. \sqrt{\frac{6\left(n+3\right)\left(n+5\right)}
  269. {n\left(n-2\right)\left(n-3\right)}}.
  270. \end{equation}
  271. \item Compute:
  272. \begin{equation}
  273. A=6+\frac{8}{\sqrt{\beta_1\left(b_2\right)}}\left[
  274. \frac{2}{\sqrt{\beta_1\left(b_2\right)}} +
  275. \sqrt{1+\frac{4}{\sqrt{\beta_1\left(b_2\right)}}}\,\right].
  276. \end{equation}
  277. \item Compute:
  278. \begin{equation}
  279. \label{eqn:z-b2}
  280. Z = \left(\left(1-\frac{2}{9A}\right)-
  281. \left[\frac{1-2/A}{1+y\sqrt{2/\left(A-4\right)}}\right]^{\frac{1}{3}}\right)/
  282. \sqrt{2/\left(9A\right)}
  283. \end{equation}
  284. where \(Z\) is a standard normal variable with
  285. zero mean and variance of one.
  286. \end{enumerate}
  287. \begin{example}
  288. For the sample data given in Table~\ref{tbl:pine} (\(n=584\)),
  289. \(b_2 =1.9148\). Suppose that we wish to test the
  290. hypothesis of normality:
  291. \(H_0\): \(\beta_2=3\) (normality)
  292. \noindent versus the one-sided alternative
  293. \(H_1\): \(\beta_2>3\) (non-normality)
  294. \noindent at a level of significance of 0.05. We would
  295. reject \(H_0\) if \(Z\) (eqn.~\ref{eqn:z-b2}) is larger
  296. than 1.645 (Table~\ref{tbl:normal}). Following the procedure given above,
  297. \(E\left(b_2\right)=2.9897\),
  298. \(Var\left(b_2\right)=0.0401\),
  299. \(y=-26.8366\),
  300. \(\sqrt{\beta_1\left(b_2\right)}=0.0989\),
  301. \(A=2163\), and
  302. \(Z=-131.7\).
  303. Therefore, we cannot reject \(H_0\).
  304. \end{example}
  305. \subsection{Omnibus Tests for Normality}
  306. \section{Geary's Test of Normality}
  307. \label{sec:geary}
  308. \function{geary\_test(x,n)}
  309. {double*\\
  310. \hbox{geary\_test(x,n)}\\
  311. double *x;\\
  312. int n;\\
  313. \returns{\left[\sqrt{a},y\right]'}}
  314. Let \(x_1, x_2, \ldots, x_n\) be the \(n\)
  315. observations. The ratio of the mean deviation
  316. to the standard deviation is given as:
  317. \begin{equation}\label{eqn:geary}
  318. a = \frac{1}{n\sqrt{m_2}}\sum_{j=1}^n \left|x_i-\bar{x}\right|
  319. \end{equation}
  320. where \(\bar{x}=\sum_{i=1}^n x_i\) and \(m_2\) is defined
  321. by eqn.~\ref{eqn:moments}.
  322. This ratio can be transformed
  323. a standard normal~\cite{dagostino86} via
  324. \begin{equation}\label{eqn:geary-normal}
  325. y = \frac{\sqrt{n}\left(a-0.7979\right)}{0.2123}.
  326. \end{equation}
  327. This test is valid for \(n\ge41\).
  328. More generally, Geary~\cite{geary47} considered tests of the
  329. form
  330. \begin{equation}
  331. a\left(c\right) =
  332. \frac{1}{nm_2^{c/2}}
  333. \sum_{j=1}^n \left|x_i-\bar{x} \right|^c \: \mbox{for}\: c\ge1
  334. \end{equation}
  335. where \(a\left(1\right)=a\) of eqn.~\ref{eqn:geary}, and
  336. \(a\left(4\right)=b_2\) of eqn.~\ref{eqn:4th-sample-moment}.
  337. D'Agostino and Rosman~\cite{dagostino74} conclude that
  338. Geary's \(a\) test has good power for symmetric alternatives
  339. and skewed alternatives with \(\beta_2 < 3\) when compared to
  340. other tests, though for symmetric alternatives, \(b_2\)
  341. (eqn.~\ref{eqn:4th-sample-moment}) can sometimes be more powerful and
  342. for skewed alternatives, \(W\) (eqn~\ref{eqn:w-test})
  343. or \(W'\) (eqn~\ref{eqn:w-prime-test})
  344. usually dominate \(a\).
  345. The Geary test (eqns.~\ref{eqn:geary}-\ref{eqn:geary-normal})
  346. is seldom used today---D'Agostino~\cite{dagostino86} include it
  347. in his summary work because it is of ``historical interest.''
  348. \begin{example}
  349. For the sample data given in Table~\ref{tbl:pine} (\(n=584\)),
  350. \(a = 0.8823\). Suppose that we wish to test the
  351. hypothesis of normality:
  352. \(H_0\): normality
  353. \noindent versus the two-sided alternative
  354. \(H_1\): non-normality
  355. \noindent at a level of significance of 0.05.
  356. From eqn.~\ref{eqn:geary-normal}, \(y=9.9607\).
  357. \end{example}
  358. \section{Extreme Normal Deviates}
  359. \label{sec:extreme}
  360. \function{extremes(x,n)}
  361. {double* \\
  362. \hbox{extremes(x,n)}\\
  363. double *x;\\
  364. int n;\\
  365. \returns{\left[x_n-\bar{x}, x_1-\bar{x}\right]'}}
  366. Let \(x_1 \le x_2 \le \cdots \le x_n\) be the \(n\)
  367. observations. Given a known normal deviation \(\sigma\),
  368. the largest and smallest deviation from a normal population
  369. may be computed:
  370. \begin{equation}
  371. u_n = \frac{x_n-\bar{x}}{\sigma}
  372. \end{equation}
  373. and
  374. \begin{equation}
  375. u_1 = -\frac{x_1-\bar{x}}{\sigma},
  376. \end{equation}
  377. respectively. These statistics are potentially
  378. useful for detecting outliers for populations
  379. with a known \(\sigma\) but an unknown mean.
  380. Table 25 in Pearson and Hartley~\cite{pearson76}
  381. gives percentage points for this statistic.
  382. Pearson and Hartley~\cite{pearson76} also give examples
  383. of the use of extreme deviates when an estimator of
  384. \(\sigma\) (independent of the sample) is
  385. known and when a combined ``internal''
  386. and ``external'' estimate is used.
  387. \section{EDF Statistics for Testing Normality}
  388. [Note: This section follows closely the presentation of
  389. Stephens~\cite{stephens86}.]
  390. Let \(x_1 \le x_2 \le \cdots \le x_n\) be the \(n\)
  391. observations. Suppose that the continuous distribution of \(x\)
  392. is \(F\left(x\right)\). The empirical distribution function (EDF)
  393. is \(F_n\left(x\right)\) defined by:
  394. \begin{equation}
  395. F_n\left(x\right) = \frac{1}{n}\left(\mbox{number of observations}
  396. \le x\right); \: -\infty < x < \infty
  397. \end{equation}
  398. or
  399. \begin{displaymath}
  400. \begin{array}{rclll}
  401. F_n\left(x\right)& = &0, & x<x_1\\
  402. F_n\left(x\right)& = &\frac{1}{n}, & x_i\le x<x_{i+1}, & i=1,\ldots,n-1\\
  403. F_n\left(x\right)& = &1, & x_n\le x.
  404. \end{array}
  405. \end{displaymath}
  406. Thus \(F_n\left(x\right)\) is a step function calculated from
  407. the data. As \(n\rightarrow\infty\), \\
  408. \(\left|F_n\left(x\right)- F\left(x\right)\right|\)
  409. decreases to zero with probability one~\cite{stephens86}.
  410. EDF statistics that measure the difference between
  411. \(F_n\left(x\right)\) and \(F\left(x\right)\) are divided
  412. into two classes: supremum and quadratic.
  413. On a graph of
  414. \(F_n\left(x\right)\) and \(F\left(x\right)\) versus \(x_i\),
  415. denote the largest vertical distance when
  416. \(F_n\left(x\right)>F\left(x\right)\) as \(D^+\).
  417. Also, let \(D^-\) denote the largest vertical distance when
  418. \(F_n\left(x\right)<F\left(x\right)\). These two
  419. measures are supremum statistics.
  420. Quadratic statistics are given by the Cram\'er--von Mises family
  421. \begin{equation}
  422. \label{eqn:cramer-family}
  423. Q = n\int_{-\infty}^{\infty}
  424. \left(F_n\left(x\right) - F\left(x\right)\right)^2
  425. \psi\left(x\right) d F\left(x\right)
  426. \end{equation}
  427. where \(\psi\left(x\right)\) is a weighting function~\cite{stephens86}.
  428. To compute these statistics, the Probability Integral Transformation
  429. is used: \(z=F\left(x\right)\) where \(F\left(x\right)\) is
  430. the Gaussian distribution. The new variable, \(z\), is uniformly
  431. distributed between 0 and 1. Then \(z\) has distribution
  432. function \(F^*\left(z\right)=z\), \(0\le z\le1\).
  433. A sample \(x_1, x_2, \ldots, x_n\) gives values \(z_i=F\left(x_i\right)\),
  434. \(i=1, \ldots, n\), and \(F^*_n\left(z\right)\) is the EDF of
  435. values \(z_i\). For testing normality,
  436. \begin{equation}
  437. z_{\left(i\right)} = \Phi\left(
  438. \left(x_{\left(i\right)}-\hat{\mu}\right)/\hat{\sigma}
  439. \right)
  440. \end{equation}
  441. where \(\hat{\mu}\) and \(\hat{\sigma}\) are estimated from
  442. the data and \(\Phi\left(\cdot\right)\) denotes the cumulative
  443. probability of a standard normal. For testing if the data
  444. follows an exponential distribution \(\mbox{Exp}\left(\alpha,\beta\right)\),
  445. where \(\alpha\) is known to be zero, \(\hat{\beta}\)
  446. is estimated by \(\bar{x}\) (the sample mean) and
  447. \begin{equation}
  448. z_{\left(i\right)} = 1-\exp\left(-x_{\left(i\right)}/\bar{x}\right).
  449. \end{equation}
  450. Now, EDF statistics can be computed by comparing \(F^*_n\left(z\right)\)
  451. and a uniform distribution for \(z\). These take the same values
  452. as comparisons between \(F_n\left(x\right)\) and \(F\left(x\right)\):
  453. \begin{equation}
  454. F_n\left(x\right) - F\left(x\right) =
  455. F^*_n\left(z\right) - F^*\left(z\right) =
  456. F^*_n\left(z\right) - z.
  457. \end{equation}
  458. After ordering \(z\)-values,
  459. \(z_{\left(1\right)}\le
  460. z_{\left(2\right)} \le \cdots
  461. \le z_{\left(n\right)}\) and computing \(\bar{z}=\sum_{i=1}^n z_i/n\),
  462. the supremum statistics are
  463. \begin{equation}
  464. \label{eqn:dplus}
  465. D^+=\max_{i=1,\ldots,n}\left(i/n-z_{\left(i\right)}\right)
  466. \end{equation}
  467. and
  468. \begin{equation}
  469. \label{eqn:dminus}
  470. D^-=\max_{i=1,\ldots,n}\left(z_{\left(i\right)}-\left(i-1\right)/n\right).
  471. \end{equation}
  472. \subsection{Kolmogorov \(D\)}
  473. \function{kolmogorov\_smirnov(x,n)}
  474. {double* \\
  475. \hbox{kolmogorov\_smirnov(x,n)}\\
  476. double *x;\\
  477. int n;\\
  478. \returns{\left[D^n,D\right]'}}
  479. \function{kolmogorov\_smirnov\_exp(x,n)}
  480. {double* \\
  481. \hbox{kolmogorov\_smirnov\_exp(x,n)}\\
  482. double *x;\\
  483. int n;\\
  484. \returns{\left[D^e,D\right]'}}
  485. The most well-known EDF statistic is Kolmogorov's \(D\), computed
  486. from supremum statistics:
  487. \begin{equation}
  488. D = \sup_x\left|F_n\left(x\right) - F\left(x\right)\right| =
  489. \max\left(D^+,D^-\right).
  490. \end{equation}
  491. The modified form for testing a completely specified
  492. distribution~\cite{stephens86}:
  493. \begin{equation}
  494. D^*=D\left(\sqrt{n}+0.12+0.11/\sqrt{n}\right).
  495. \end{equation}
  496. For testing a normal distribution with \(\mu\) and \(\sigma\)
  497. unknown, the modified equation is~\cite{stephens86}:
  498. \begin{equation}
  499. D^n=D\left(\sqrt{n}-0.01+0.85/\sqrt{n}\right).
  500. \end{equation}
  501. For testing an exponential distribution with \(\alpha\) and \(\beta\)
  502. % origin and scale
  503. unknown, \(D\) does not need modified~\cite{stephens86}.
  504. \begin{example}
  505. For the sample data given in Table~\ref{tbl:pine} (\(n=584\)),
  506. \(D^n = 4.0314\) and \(y=\). Suppose that we wish to test the
  507. hypothesis of normality:
  508. \(H_0\): normality
  509. \noindent versus the two-sided alternative
  510. \(H_1\): non-normality
  511. \noindent at a level of significance of 0.05.
  512. \end{example}
  513. \subsection{Kuiper's \(V\)}
  514. \label{sec:kuiper}
  515. \function{kuipers\_v(x,n)}
  516. {double* \\
  517. \hbox{kuipers\_v(x,n)}\\
  518. double *x;\\
  519. int n;\\
  520. \returns{\left[V^n,V\right]'}}
  521. \function{kuipers\_v\_exp(x,n)}
  522. {double* \\
  523. \hbox{kuipers\_v\_exp(x,n)}\\
  524. double *x;\\
  525. int n;\\
  526. \returns{\left[V^e,V\right]'}}
  527. Kuiper's~\cite{kuiper60} \(V\) is another statistic computed
  528. from supremum statistics:
  529. \begin{equation}
  530. \label{eqn:kuipers-v}
  531. V = D^+ + D^-.
  532. \end{equation}
  533. The modified form for testing a completely specified
  534. distribution~\cite{stephens86}:
  535. \begin{equation}
  536. V^*=V\left(\sqrt{n}+0.155 +0.24\sqrt{n}\right).
  537. \end{equation}
  538. For testing a normal distribution with \(\mu\) and \(\sigma\)
  539. unknown, the modified equation is~\cite{stephens86}:
  540. \begin{equation}
  541. V^n=V\left(\sqrt{n}+0.05+0.82/\sqrt{n}\right).
  542. \end{equation}
  543. For testing an exponential distribution with \(\alpha\) and \(\beta\)
  544. unknown, \(V\) the modified equation is~\cite{stephens86}:
  545. \begin{equation}
  546. V^e=\left(V-0.2/\sqrt{n}\right)
  547. \left(\sqrt{n}+0.24+0.35/\sqrt{n}\right).
  548. \end{equation}
  549. \subsection{Pyke's Statistics}
  550. \label{sec:pyke}
  551. For some purposes, eqns.~\ref{eqn:dplus} and~\ref{eqn:dminus}
  552. may be modified to~\cite{pyke59}:
  553. \begin{equation}
  554. \label{eqn:cplus}
  555. C^+=\max_{0\le i\le n}\left(\frac{i}{n+1}-z_{\left(i\right)}\right),\:
  556. z_{\left(0\right)}=0,
  557. \end{equation}
  558. and
  559. \begin{equation}
  560. \label{eqn:cminus}
  561. C^-=\max_{0\le i\le n}\left(z_{\left(i\right)}-\frac{i}{n+1}\right)
  562. \end{equation}
  563. (following the modification of notation by Durbin~\cite{durbin73}). Then,
  564. \begin{equation}
  565. C = \max\left(C^+,C^-\right).
  566. \end{equation}
  567. Durbin~\cite{durbin73} notes that these modifications to
  568. eqns.~\ref{eqn:dplus} and~\ref{eqn:dminus} are related to
  569. the fact that \(E\left(z_{\left(i\right)}\right)=i/\left(n+1\right)\).
  570. Percentage points were given by Durbin~\cite{durbin69}.
  571. \subsection{Brunk's \(B\)}
  572. \label{sec:brunk}
  573. As an alternative to Kuiper's \(V\) (eqn.~\ref{eqn:kuipers-v}),
  574. Brunk~\cite{brunk62} suggests:
  575. \begin{equation}
  576. \label{eqn:brunks-b}
  577. B = C^+ + C^-
  578. \end{equation}
  579. where \(C^+\) and \(C^-\) are given by eqns.~\ref{eqn:cplus}
  580. and \ref{eqn:cminus}.
  581. \subsection{Cram\'er--von Mises \(W^2\)}
  582. \label{sec:cramer-von-mises}
  583. \function{cramer\_von\_mises(x,n)}
  584. {double* \\
  585. \hbox{cramer\_von\_mises(x,n)}\\
  586. double *x;\\
  587. int n;\\
  588. \returns{\left[W^{2,n},W^2\right]'}}
  589. \function{cramer\_von\_mises(x,n)}
  590. {double* \\
  591. \hbox{cramer\_von\_mises\_exp(x,n)}\\
  592. double *x;\\
  593. int n;\\
  594. \returns{\left[W^{2,e},W^2\right]'}}
  595. Quadratic statistics are computed from
  596. the Cram\'er--von Mises family given in eqn~\ref{eqn:cramer-family}.
  597. When \(\psi\left(x\right)=1\) in eqn~\ref{eqn:cramer-family}, the statistic is
  598. the Cram\'er--von Mises statistic \(W^2\):
  599. \begin{equation}
  600. W^2=\sum_{j=1}^n\left(Z_i - \left(2j-1\right)/\left(2n\right)\right)^2
  601. +\frac{1}{12n}
  602. \end{equation}
  603. (When \(\psi\left(x\right)=
  604. \left(F\left(x\right)\left(1 - F\left(x\right)\right)\right)^{-1}\),
  605. this yields the Anderson--Darling statistic given below
  606. in \S\ref{sec:anderson-darling}~\cite{stephens86}.)
  607. The modified form for testing a completely specified
  608. distribution~\cite{stephens86}:
  609. \begin{equation}
  610. W^{2,*} = \left(W^2-0.4/n +0.6/n^2\right)/\left(1 + 1/n\right).
  611. \end{equation}
  612. For testing a normal distribution with \(\mu\) and \(\sigma\)
  613. unknown, the modified equation is~\cite{stephens86}:
  614. \begin{equation}
  615. W^{2,n}=W^2\left(1.0 + 0.5/n\right).
  616. \end{equation}
  617. For testing an exponential distribution with \(\alpha\) and \(\beta\)
  618. unknown, the modified equation is~\cite{stephens86}:
  619. \begin{equation}
  620. W^{2,e}=W^2\left(1.0 + 2.8/n -3/n^2\right).
  621. \end{equation}
  622. \subsection{Watson \(U^2\)}
  623. \label{sec:watson}
  624. \function{watson\_u2(x,n)}
  625. {double* \\
  626. \hbox{watson\_u2(x,n)}\\
  627. double *x;\\
  628. int n;\\
  629. \returns{\left[U^{2,n}, U^{2}\right]'}}
  630. \function{watson\_u2\_exp(x,n)}
  631. {double* \\
  632. \hbox{watson\_u2\_exp(x,n)}\\
  633. double *x;\\
  634. int n;\\
  635. \returns{\left[U^{2,e}, U^{2}\right]'}}
  636. \begin{equation}
  637. U^2=W^2-n\left(\bar{Z}-0.5\right)^2
  638. \end{equation}
  639. where \(W^2\) is the Cram\'er--von Mises statistic
  640. (\S\ref{sec:cramer-von-mises}).
  641. The modified form for testing a completely specified
  642. distribution~\cite{stephens86}:
  643. \begin{equation}
  644. U^{2,*} = \left(U^2-0.1/n +0.1/n^2\right)/\left(1 + 0.8/n\right).
  645. \end{equation}
  646. For testing a normal distribution with \(\mu\) and \(\sigma\)
  647. unknown, the modified equation is~\cite{stephens86}:
  648. \begin{equation}
  649. U^{2,n}=U^2\left(1.0 + 0.5/n\right).
  650. \end{equation}
  651. For testing an exponential distribution with \(\alpha\) and \(\beta\)
  652. unknown, the modified equation is~\cite{stephens86}:
  653. \begin{equation}
  654. U^{2,e}=U^2\left(1.0 + 2.3/n -3/n^2\right).
  655. \end{equation}
  656. \subsection{Anderson--Darling \(A^2\)}
  657. \label{sec:anderson-darling}
  658. \function{anderson\_darling(x,n)}
  659. {double* \\
  660. \hbox{anderson\_darling(x,n)}\\
  661. double *x;\\
  662. int n;\\
  663. \returns{\left[A^{2,n}, A^{2}\right]'}}
  664. \function{anderson\_darling\_exp(x,n)}
  665. {double* \\
  666. \hbox{anderson\_darling\_exp(x,n)}\\
  667. double *x;\\
  668. int n;\\
  669. \returns{\left[A^{2,e}, A^{2}\right]'}}
  670. Anderson and Darling~\cite{anderson54} present
  671. another EDF test statistic which is sensitive at the
  672. tails of the distribution (rather than near
  673. the median).
  674. When \(\psi\left(x\right)=
  675. \left(F\left(x\right)\left(1 - F\left(x\right)\right)\right)^{-1}\)
  676. in eqn.(\ref{eqn:cramer-family}),
  677. this yields the Anderson--Darling statistic~\cite{anderson54,stephens86}:
  678. \begin{equation}
  679. A^2 = -n - \frac{1}{n} \sum_{j=1}^n \left(2j-1\right)
  680. \left[ \ln z_j + \ln\left(1-z_{n-j+1}\right)\right].
  681. \end{equation}
  682. Equivalently~\cite{stephens86},
  683. \begin{equation}
  684. A^2 = -n - \frac{1}{n} \sum_{j=1}^n\left[ \left(2j-1\right)
  685. \ln z_j + \left(2n+1-2j\right) \ln\left(1-z_{j}\right)\right].
  686. \end{equation}
  687. Anderson and Darling~\cite{anderson54} give
  688. the following asymptotic significance values of \(A^2\):
  689. \begin{center}
  690. \begin{tabular}{cc}\hline
  691. Significance & Significance \\
  692. Level & Point \\ \hline \hline
  693. 0.10 & 1.933\\
  694. 0.05 & 2.492\\
  695. 0.01 & 3.857\\ \hline
  696. \end{tabular}
  697. \end{center}
  698. Anderson and Darling~\cite{anderson54} state that
  699. sample size should be at least 40; however, Stephens~\cite{stephens86}
  700. give the same asymptotic values (for more significance levels)
  701. for a sample size \(\ge5\).
  702. For testing a completely specified distribution, \(A^2\)
  703. is used unmodified.
  704. For testing a normal distribution with \(\mu\) and \(\sigma\)
  705. unknown, the modified equation is~\cite{stephens86}:
  706. \begin{equation}
  707. A^{2,n}=A^2\left(1.0 + 0.75/n+2.25/n^2\right).
  708. \end{equation}
  709. For testing an exponential distribution with \(\alpha\) and \(\beta\)
  710. unknown, the modified equation is~\cite{stephens86}:
  711. \begin{equation}
  712. A^{2,e}=A^2\left(1.0 + 5.4/n -11/n^2\right).
  713. \end{equation}
  714. \subsection{Durbin's Exact Test}
  715. \label{sec:durbin}
  716. \function{durbins\_exact(x,n)}
  717. {double* \\
  718. \hbox{durbins\_exact(x,n)}\\
  719. double *x;\\
  720. int n;\\
  721. \returns{\left[K_m,\sqrt{n}K_m\right]'}}
  722. Durbin~\cite{durbin61} presented a modified
  723. Kolmogorov test. The discussion that follows
  724. has been adapted from Durbin's work~\cite{durbin61}.
  725. Let \(x_1, x_2, \ldots, x_n\) be the \(n\)
  726. i.i.d.\ observations and suppose that
  727. it is desired to test the hypothesis that
  728. they come from the continuous distribution \(F\left(x\right)\).
  729. If the null hypothesis is true, then \(u_j=F\left(x_j\right)\)
  730. (\(j=1,\ldots,n)\) are independent \(U\left(0,1\right)\)
  731. variables and are randomly scattered on the (0,1) interval.
  732. Clustering may indicated a departure from the null hypothesis.
  733. Denoting the ordered \(u\)'s by
  734. \(0 \le u_{\left(1\right)} \le \cdots \le u_{\left(n\right)} \le 1\),
  735. let \(c_1=u_{\left(1\right)}\),
  736. \(c_2=u_{\left(j\right)}-u_{\left(j-1\right)}\)
  737. (\(j=2,\ldots,n\)), and \(c_{n+1}=1-u_{\left(n\right)}\).
  738. Since the interest is in relative magnitudes of \(c\)'s, these
  739. are ordered:
  740. \(c_{\left(1\right)} \le c_{\left(2\right)} \cdots
  741. \le c_{\left(n\right)}\). Then, the following transformation
  742. is applied:
  743. \begin{equation}
  744. \label{eqn:durbin:g}
  745. g_j=\left(n+2-j\right)\left(c_{\left(j\right)}
  746. -c_{\left(j-1\right)}\right)\:
  747. \left(c_{\left(0\right)}=0;\: j=1,\ldots,n+1\right).
  748. \end{equation}
  749. Durbin~\cite{durbin61} shows that the \(g\)'s, which depend
  750. on the {\em ordered\/} intervals, have the same
  751. distribution as the {\em unordered\/} \(c\)'s.
  752. Letting
  753. \begin{equation}
  754. \label{eqn:durbin:w}
  755. w_r = \sum_{j=1}^r g_j
  756. \end{equation}
  757. it follows that \(w_1, \ldots, w_n\) have the same distribution
  758. as the ordered \(U\left(0,1\right)\) variables
  759. \(u_{\left(1\right)}, \ldots, u_{\left(n\right)}\).
  760. From eqns.~\ref{eqn:durbin:g} and \ref{eqn:durbin:w}, \(w_j\)
  761. can be expressed as:
  762. \begin{equation}
  763. w_j=c_{\left(1\right)} + \cdots
  764. + c_{\left(j-1\right)} + \left(n+2-j\right)c_{\left(j\right)},\:
  765. \left(j=1,\ldots,n\right),
  766. \end{equation}
  767. where
  768. \(c_{\left(1\right)} \le \cdots \le c_{\left(n\right)}\) is
  769. the ordered set of intervals.
  770. In addition to two other test, Durbin~\cite{durbin61} introduces
  771. the {\em modified Kolmogorov test}. The test statistic is:
  772. \begin{equation}
  773. K_m = \max_{r=1,\ldots,n}\left(\frac{r}{n}-w_r\right).
  774. \end{equation}
  775. The test procedure is to reject when \(K_m\) is greater
  776. than the value tabulated for a one-sided Kolmogorov test.
  777. \begin{example}
  778. For the sample data given in Table~\ref{tbl:pine} (\(n=584\)),
  779. \(K_m = 0.4127\). To test the
  780. hypothesis of normality:
  781. \(H_0\): normality
  782. \noindent versus the one-sided alternative
  783. \(H_1\): non-normality
  784. \noindent at a level of significance of 0.05, we would
  785. reject \(H_0\) if \(K_m\) is larger
  786. than 0.895 (critical value of \(D\) for \(\alpha=0.05\).
  787. Therefore, we cannot reject \(H_0\).
  788. \end{example}
  789. \section{Chi-Square Test}
  790. \function{chi\_square(x,n)}
  791. {double* \\
  792. \hbox{chi\_square(x,n)}\\
  793. double *x;\\
  794. int n;\\
  795. \returns{\left[x^2,k-3\right]'}}
  796. \function{chi\_square\_exp(x,n)}
  797. {double* \\
  798. \hbox{chi\_square\_exp(x,n)}\\
  799. double *x;\\
  800. int n;\\
  801. \returns{\left[x^2,k-2\right]'}}
  802. According to Shapiro~\cite{shapiro90},
  803. the chi-square goodness of fit test is the oldest
  804. procedure for testing distributional assumptions.
  805. It is useful for testing normality and exponentiality
  806. when the number of observations is large (because its power
  807. is poor for small samples when compared to other tests).
  808. It is also useful when data are discrete~\cite{shapiro90}.
  809. The basic idea is to divide the \(n\) data into \(k\) cells
  810. and compare the observed number in each cell with the
  811. expected number in each cell. The resulting statistic
  812. is distributed as a chi-square random variable with
  813. \(k-1-t\) degrees of freedom, where \(t\) is the number
  814. of parameters estimated. The number of cells is taken
  815. as
  816. \begin{equation}
  817. k=\mbox{(int)} 4\left[0.75\left(n-1\right)^2\right]^{1/5}.
  818. \end{equation}
  819. \marginpar{what should the notation be for rounding? For ceil,
  820. we use \(\lceil x\rceil\). For floor, we use \(\lfloor x\rfloor\).}
  821. The ratio \(n/k\) should be at least 5; otherwise another
  822. test should be used~\cite{shapiro90}. In this implementation,
  823. \(k\) is decremented by one until \(n/k\ge5\).
  824. Let \(x_{\left(1\right)},
  825. x_{\left(2\right)},\ldots, x_{\left(k\right)}\)
  826. be the upper boundaries of cells. Choose \(x_{\left(i\right)}\)
  827. so that the probability of being in any cell
  828. is the same:
  829. \begin{equation}
  830. P\left(x\le x_{\left(i\right)}\right) = \frac{i}{k},\:
  831. i=1,2,\ldots,k
  832. \end{equation}
  833. In thse implmentation, only the case of raw data, as opposed
  834. to pre-tabulated data, is considered (i.e., equal probability cells).
  835. For testing the normality hypothesis,
  836. let \(x_{\left(0\right)}=-\infty\) and
  837. \(x_{\left(k\right)}=\infty\).
  838. The values of \(x_{\left(i\right)}\) are:
  839. \begin{equation}
  840. x_{\left(i\right)} = \bar{x} + s\,Z_{i/k}
  841. \end{equation}
  842. where \(\bar{x}\) and \(s\) are estimated
  843. mean and variance parameters and \(Z_{i/k}\)
  844. are percentiles of the standard normal distribution.
  845. The test statistic is
  846. \begin{equation}
  847. \label{eqn:chi-square}
  848. x^2 = \frac{k}{n}\sum_{i=1}^k f_i^2-n
  849. \end{equation}
  850. where \(f_i\) is the number of observations in cell \(i\).
  851. The hypothesis of normality is rejected at an \(\alpha\)
  852. level if \(x^2\) is greater \(x^2_{\alpha}\), a
  853. \(\chi^2\) random variable with \(k-3\) degrees of freedom.
  854. \begin{example}
  855. For the sample data given in Table~\ref{tbl:pine} (\(n=584\)),
  856. \(x^2 = 952.7\) with \(\nu=45\) degrees of freedom.
  857. Since \(\chi^2_{45,0.05}\approx30.33\) (Table~\ref{tbl:chisq}),
  858. we reject \(H_0\) at an \(\alpha=0.05\) level.
  859. \end{example}
  860. For testing the exponentiality hypothesis,
  861. let \(x_{\left(0\right)}=0\) and
  862. \(x_{\left(k\right)}=\infty\).
  863. The values of \(x_{\left(i\right)}\) are:
  864. \begin{equation}
  865. x_{\left(i\right)} = -\frac{1}{\lambda}\ln\left(1-\frac{i}{k}\right),
  866. i=1,2,\ldots,k-1.
  867. \end{equation}
  868. The parameter \(\lambda\) is estimated from
  869. \begin{equation}
  870. \hat{\lambda} = n \left(\sum_{i=1}^n x_i\right)^{-1}
  871. \end{equation}
  872. where \(x_i\) is the \(i\)th observation in
  873. the sample. Equation~(\ref{eqn:chi-square})
  874. is the statistic used for testing exponentiality. The hypothesis
  875. of exponentiality is rejected at an \(\alpha\) level if
  876. \(x^2\), a \(\chi^2\) random variable with \(k-2\)
  877. degrees of freedom.
  878. \begin{example}
  879. For the sample data given in Table~\ref{tbl:pine} (\(n=584\)),
  880. \(x^2 = 308.11\) with \(\nu=46\) degrees of freedom.
  881. Since \(\chi^2_{46,0.05}\approx31.16\) (Table~\ref{tbl:chisq}),
  882. we reject \(H_0:\) exponentiality, at an \(\alpha=0.05\) level.
  883. \end{example}
  884. \section{Analysis of Variance Tests}
  885. \subsection{Shapiro-Wilk \(W\)}
  886. \label{sec:shapiro-wilk}
  887. \function{shapiro\_wilk(x,n)}
  888. {double* \\
  889. \hbox{shapiro\_wilk(x,n)}\\
  890. double *x;\\
  891. int n;\\
  892. \returns{\left[W,S^2\right]'}}
  893. \function{shapiro\_wilk\_exp(x,n)}
  894. {double* \\
  895. \hbox{shapiro\_wilk\_exp(x,n)}\\
  896. double *x;\\
  897. int n;\\
  898. \returns{\left[W,S^2\right]'}}
  899. %\marginpar{3, 4, 206, 208, 211, 252, 393, 403--406}
  900. Recall the description of a probability plot given on page~\pageref{pplot}.
  901. Ordered observations are plotted against expected values of
  902. order statistics from the distribution being tested. The plot tends
  903. to be linear if the distributional assumption is correct. If
  904. a genearlized least squares is performed, an \(F\)-type ratio could
  905. be used to test the fit of a linear model. This was the basis of
  906. test introduced by Shapiro and Wilk~\cite{shapiro65}. Foregoing
  907. many of the details in the derivation, the test procedures
  908. for normality and exponentiality are given
  909. below.
  910. Let \(x_1 \le x_2 \le \cdots \le x_n\) be the \(n\)
  911. ordered observations and let
  912. \begin{equation}
  913. S^2 = \sum_{i=1}^n x_i^2 - \frac{1}{n} \left(\sum_{i=1}^n x_i\right)^2.
  914. \end{equation}
  915. Calculate
  916. \begin{equation}
  917. b = \sum_{i=1}^k a_{n-i+1}\left(x_{n-i+1}- x_i\right)
  918. \end{equation}
  919. where \(k=n/2\) if \(n\) is even,
  920. \(k=\left(n-1\right)/2\) if \(n\) is odd, and
  921. \(a_{n-i+1}\) are found in Table~\ref{tbl:shapiro-wilk-a}.
  922. Then a test of normality
  923. for small samples (\(3\le n\le 50\)) is defined as
  924. \begin{equation}
  925. \label{eqn:w-test}
  926. W = \frac{b^2}{S^2}
  927. \end{equation}
  928. Small values of \(W\) indicate non-normality (``lower-tail''). Hence
  929. if the computed value of \(W\) is less than the
  930. \(W_{\alpha}\) shown in Table~\ref{tbl:w-test}, the hypothesis
  931. of normality is rejected.
  932. \begin{example}
  933. Using the first 40 observations from the sample data given in
  934. Table~\ref{tbl:pine},
  935. \(W=0.0000245\). Using \(\alpha=0.05\) and Table~\ref{tbl:w-test},
  936. \(W_{0.05}=0.940\). Since \(W<W_{0.05}\), we reject \(H_0\).
  937. \end{example}
  938. For testing exponentiality, no tabulated constants are needed
  939. for calculation of \(b\):
  940. \begin{equation}
  941. b = \sqrt{\frac{n}{n-1}}\left(\bar{x}-x_1\right)
  942. \end{equation}
  943. where
  944. \begin{equation}
  945. \bar{x} = \frac{1}{n} \sum_{i=1}^n x_i.
  946. \end{equation}
  947. This assumes that the origin parameter is unknown. It also
  948. differs from the test of normality in that it is a two-tailed
  949. procedure. That is, too small or too large a value of the
  950. test statistic indicates non-exponentiality~\cite{shapiro90}.
  951. \begin{example}
  952. Using the first 40 observations from the sample data given in
  953. Table~\ref{tbl:pine},
  954. \(W=0.0909\). Using \(\alpha=0.05\) and Table~\ref{tbl:w-test-e},
  955. \(W_{0.025}=0.0148\) and \(W_{0.975}=0.0447\).
  956. Since \(W\) is not contained in the
  957. interval \(\left[W_{0.025},W_{0.975}\right]\),
  958. we reject \(H_0\): exponentiality.
  959. \end{example}
  960. \subsection{Modified Shapiro--Francia \(W'\)}
  961. \label{sec:shapiro-francia}
  962. \function{shapiro\_francia(x,n)}
  963. {double* \\
  964. \hbox{shapiro\_francia(x,n)}\\
  965. double *x;\\
  966. int n;\\
  967. \returns{\left[W',S^2\right]'}}
  968. %\marginpar{213, 223, 399, 403--406}
  969. The \(W\) test of normality in the previous
  970. section for sample sizes of 50 or less.
  971. Shapiro and Francia~\cite{shapiro72b} extended
  972. the \(W\) test for \(n\) up to 99 by replacing
  973. the values \(a_{n-i+1}\) in Table~\ref{tbl:shapiro-wilk-a}
  974. \(b_{n-i+1}\) in Table~\ref{tbl:shapiro-francia-b}.
  975. The test procedure follows.
  976. Let \(x_1 \le x_2 \le \cdots \le x_n\) be the \(n\)
  977. ordered observations. Then a test of normality
  978. for large samples is defined as:
  979. \begin{equation}
  980. \label{eqn:w-prime-test}
  981. W' = \frac{b'}{S^2}
  982. \end{equation}
  983. The numerator \(b'\) is defined as:
  984. \begin{equation}
  985. b' = \sum_{i=1}^k b_{n-i+1} \left(x_{n-i+1} - x_i\right)
  986. \end{equation}
  987. where \(k=n/2\) if \(n\) is even and \(k=\left(n-1\right)/2\)
  988. is \(n\) is odd.
  989. Significant values,
  990. determined empirically by Shapiro and Francia~\cite{shapiro72b}
  991. are given in Table~\ref{tbl:w-prime-test}.
  992. D'Agostino~\cite{dagostino86} notes that the values given
  993. by Shapiro and Francia~\cite{shapiro72b} in the lower
  994. tail were ``higher than what they should be'' since too few
  995. samples were used in determining these significance levels.
  996. \begin{example}
  997. Using the first 99 observations from the sample data given in
  998. Table~\ref{tbl:pine},
  999. \(W'=1.0139\). Using \(\alpha=0.05\) and Table~\ref{tbl:w-prime-test},
  1000. \(W'_{0.05}=0.976\). Since \(W'>W'_{0.05}\), we cannot reject \(H_0\).
  1001. \end{example}
  1002. \subsection{Weisberg-Bingham \(\tilde{W'}\)}
  1003. \function{weisberg\_bingham(x,n)}
  1004. {double* \\
  1005. \hbox{weisberg\_bingham(x,n)}\\
  1006. double *x;\\
  1007. int n;\\
  1008. \returns{\left[\tilde{W'},S^2\right]'}}
  1009. An alternative way of computing \(b'\) is to note that
  1010. the vector \(\left[b_1, b_2,\ldots,b_n\right]'\)
  1011. is equivalent to \(m'/\left(m'm\right)^{1/2}\)
  1012. where \( m' = \left(m_1, m_2, \ldots, m_n\right)\) denotes
  1013. a vector of expected normal order statistics.
  1014. One approximation for normal order statistics
  1015. attributed to Blom~\cite{blom58} is:
  1016. \begin{equation}
  1017. E\left(r,n\right) = -\Phi^{-1}\left(\frac{r-\alpha}{n-2\alpha+1}\right)
  1018. \end{equation}
  1019. with a recommended ``compromise value \(\alpha=0.375\)~\cite{royston82c}.''
  1020. Define this new statistic as \(\tilde{W'}\).
  1021. So, instead of hardcoding constants (as done in
  1022. \S\ref{sec:shapiro-wilk}-\ref{sec:shapiro-francia}),
  1023. this approximation is used. Since \(\tilde{W'}\)
  1024. is essentially the same as \(W'\), the table of
  1025. critical values for \(W'\) (Table~\ref{tbl:w-prime-test}) may be used.
  1026. \subsection{D'Agostino's \(D\) Test of Normality}
  1027. \label{sec:dagostino-d}
  1028. \function{dagostino\_d(x,n)}
  1029. {double* \\
  1030. \hbox{dagostino\_d(x,n)}\\
  1031. double *x;\\
  1032. int n;\\
  1033. \returns{\left[D,y\right]'}}
  1034. D'Agostino~\cite{dagostino86} presents a modified
  1035. Shapiro-Wilk \(W\) test that eliminates the need for
  1036. a table of weights. The test statistic is given as
  1037. \begin{eqnarray}
  1038. D &=& T/\left(n^2\sqrt{m_2}\right) \\ \nonumber
  1039. &=& T/\left(n^{3/2}\sqrt{\sum_{j=1}^n\left(x_j-\bar{x}\right)^2}\right)
  1040. \end{eqnarray}
  1041. where
  1042. \begin{equation}
  1043. T = \sum_{i=1}^n \left(i-\frac{1}{2}\left(n+1\right)\right)x_i.
  1044. \end{equation}
  1045. An approximate standard variable is
  1046. \begin{equation}
  1047. \label{eqn:xform-d}
  1048. y=\frac{\sqrt{n}\left(D-0.28209479\right)}{0.02998598}.
  1049. \end{equation}
  1050. Significant values are given in Table~\ref{tbl:d-test}.
  1051. \begin{example}
  1052. For the sample data given in Table~\ref{tbl:pine} (\(n=584\)),
  1053. \(D = 0.2859\) and \(y=3.0667\). Suppose that we wish to test the
  1054. hypothesis of normality:
  1055. \(H_0\): normality
  1056. \noindent versus the two-sided alternative
  1057. \(H_1\): non-normality
  1058. \noindent at a level of significance of 0.005. From Table~\ref{tbl:d-test}
  1059. (linearly interpolating),
  1060. we reject \(H_0\) if \(y<-3.006\) or \(y>2.148\). Therefore, we
  1061. cannot reject \(H_0\).
  1062. \end{example}
  1063. \subsection{Royston's Modification}
  1064. \function{royston(x,n)}
  1065. {double* \\
  1066. \hbox{royston(x,n)}\\
  1067. double *x;\\
  1068. int n;\\
  1069. \returns{\left[W, P\right]'}}
  1070. Royston~\cite{royston82a} also presented a modified \(W\) statistic for
  1071. \(n\) up to 2000 that did not require extensive use of tabulated
  1072. constants.
  1073. If \( m' = \left(m_1, m_2, \ldots, m_n\right)\) denotes
  1074. a vector of expected values of standard normal order
  1075. statistics and \(V=\left(v_{ij}\right)\) denote the corresponding
  1076. \(n\times n\) covariance matrix, then \(W\) may be written as:
  1077. \begin{equation}
  1078. W=\left[\sum_{i=1}^n a_i x_{\left(i\right)}\right]^2/
  1079. \sum_{i=1}^n \left( x_{\left(i\right)} - \bar{x}\right)^2
  1080. \end{equation}
  1081. where
  1082. \begin{equation}
  1083. a'=m'V^{-1}\left[\left(m'V^{-1}\right)\left(V^{-1} m' \right)\right]^{1/2}.
  1084. \end{equation}
  1085. Let \(a^* = m'V^{-1}\); The following
  1086. approximation for \(a^*\) is used:
  1087. \begin{equation}
  1088. \label{eqn:astar}
  1089. \hat{a}^* = \cases{
  1090. 2m_i, & i=2,3,\ldots,n-1\cr\cr
  1091. \left(\frac{\hat{a}_1^2}{1-2\hat{a}_1^2}
  1092. \sum_{i=2}^{n-1} \hat{a}_i^{*2}\right)^{1/2}, & i=1, i=n}
  1093. \end{equation}
  1094. where
  1095. \begin{equation}
  1096. \hat{a}_1^2=\hat{a}_n^2 = \cases{
  1097. g\left(n-1\right), n\le20\cr\cr
  1098. g\left(n\right), n>20}
  1099. \end{equation}
  1100. and
  1101. \begin{equation}
  1102. g\left(n\right)=\frac{\Gamma\left(\frac{1}{2}\left[n+1\right]\right)}
  1103. {\sqrt{2\Gamma\left(\frac{1}{2}n+1\right)}}.
  1104. \end{equation}
  1105. The function \(g\left(n\right)\) is approximated using:
  1106. \begin{equation}
  1107. \label{eqn:stirling}
  1108. g\left(n\right)=\left[\frac{6n+7}{6n+13}\right]
  1109. \left(\frac{\exp\left(1\right)}{n+2}
  1110. \left[\frac{n+1}{n_2}\right]^{n-2}\right)^{1/2}
  1111. \end{equation}
  1112. Royston~\cite{royston82a} used eqns.~\ref{eqn:astar}--\ref{eqn:stirling}
  1113. for the range \(7\le n\le2000\), but exact values of \(a_i\)
  1114. for \(n<7\).
  1115. Royston~\cite{royston82a} used the following normalizing transformation:
  1116. \begin{equation}
  1117. y=\left(1-W\right)^\lambda
  1118. \end{equation}
  1119. so that
  1120. \begin{equation}
  1121. z=\left[\left(1-W\right)^\lambda-\mu_y\right]/\sigma_y
  1122. \end{equation}
  1123. can be compared with the upper tail of a standard normal. Large
  1124. values of \(z\) indicate non-normality of the original sample.
  1125. This implementation in \libname\
  1126. closely follows Royston's published FORTRAN code~\cite{royston82b,royston82c}.
  1127. It returns \(W\) and a corresponding \(P\) value (smallest level
  1128. at which we could have preset \(\alpha\) and still have been able
  1129. to reject \(H_0\)).
  1130. It also utilizes algorithms by Hill~\cite{hill73} and
  1131. Wichura~\cite{wichura88}.
  1132. %\section{Modified Maximum Likelihood Ratio Test}
  1133. %
  1134. %If the third moment is less than zero:
  1135. %\begin{equation}
  1136. %\sum_i\left(x_i-\bar{x}\right) \le 0
  1137. %\end{equation}
  1138. %then the distribution is normal. Otherwise, the test
  1139. %statistic is:
  1140. %\begin{equation}
  1141. %\frac{\sqrt{\frac{1}{n}\sum_i\left(x_i-\sigma/n\right)^2}}
  1142. % {\exp\left(\sigma/n\right) \sqrt{\frac{1}{n}\left(x_i-\bar{x}\right)^2}}
  1143. %\end{equation}
  1144. %
  1145. %\function{mod\_maxlik\_ratio(x,n)}
  1146. % {double* \\
  1147. % \hbox{mod\_maxlik\_ratio(x,n)}\\
  1148. % double *x;\\
  1149. % int n;\\
  1150. % \returns{?}}
  1151. %\section{Coefficient of Variation Test}
  1152. %
  1153. %pages 424, 428, 435, 457
  1154. %
  1155. %\begin{equation}
  1156. %\sqrt{\exp\left(\frac{1}{n-1}\sqrt{\exp\left(\frac{1}{n-1}\sum_i
  1157. %\left(\log x_i - \frac{1}{n}\sum_j x_j\right)^2\right)-1}\right)-1}
  1158. %\end{equation}
  1159. %
  1160. %\function{coeff\_variation(x,n)}
  1161. % {double* \\
  1162. % \hbox{coeff\_variation(x,n)}\\
  1163. % double *x;\\
  1164. % int n;\\
  1165. % \returns{?}}
  1166. %
  1167. \section{Kotz Separate Families \(T'_f\)}
  1168. \label{sec:kotz}
  1169. % move as subsection to EDF Stats?
  1170. \function{kotz\_families(x,n)}
  1171. {double* \\
  1172. \hbox{kotz\_families(x,n)}\\
  1173. double *x;\\
  1174. int n;\\
  1175. \returns{\left[T_f', T_f\right]'}}
  1176. Kotz~\cite{kotz73} developed a test where the null hypothesis
  1177. \(H_0\) is that the sample \(x_1, x_2, \ldots, x_n\) came
  1178. from a lognormal distribution, and the alternate hypothesis
  1179. is that the parent population was normal. The test statistic,
  1180. given as:
  1181. \begin{equation}
  1182. T'_f = \frac{\log\frac{\hat{\beta}_2}{\beta_{2,\hat{\alpha}}}}
  1183. {2\sqrt{n}\{\frac{1}{4}\left(e^{4\hat{\alpha}_2}+
  1184. 2e^{3\hat{\alpha}_2} -4\right) -\hat{\alpha}_2 -
  1185. \frac{\hat{\alpha}_2\left(2e^{\hat{\alpha}_2}-1\right)^2}
  1186. {2\left(2e^{\hat{\alpha}_2}-1\right)^2}
  1187. +\frac{3}{4}e^{\hat{\alpha}_2}\}^{1/2}}
  1188. \end{equation}
  1189. is asymptotically normal~\cite{cox62}.
  1190. \begin{example}
  1191. For the sample data given in Table~\ref{tbl:pine} (\(n=584\)),
  1192. \(T'_f = -0.6021\). Suppose that we wish to test the hypothesis
  1193. \(H_0:\) lognormal
  1194. \noindent versus
  1195. \(H_1:\) normal
  1196. \noindent at a level of significance of 0.05. We would
  1197. reject \(H_0\) if \(T'_f\) is larger
  1198. than 1.645. Therefore, we reject \(H_0\).
  1199. \end{example}
  1200. The discussion that follows explains in more detail how this
  1201. statistic is calculated and how it was derived. The remainder
  1202. of this section
  1203. was taken directly from the work of Kotz~\cite{kotz73}
  1204. (pages 123,124--126).
  1205. \ldots\ A test for this special situation was considered
  1206. by Roy~\cite{roy50}, where he bases his decision on the
  1207. statistic
  1208. \begin{equation}
  1209. R=\frac{L_l}{L_n}
  1210. \end{equation}
  1211. where \(L_l\) denotes the likelihood of the sample under the
  1212. lognormal hypothesis and \(L_n\) that under the normal
  1213. hypothesis. If \(R>1\) one accepts lognormality,
  1214. and if \(R<1\) normality is accepted. More recently Cox~\cite{cox61,cox62}
  1215. has elaborated on Roy's heuristic approach, and has derived a general
  1216. class of tests to discriminate between hypotheses that are {\em separate\/}
  1217. (in the sense that an arbitrary simple hypothesis in \(H_0\) cannot
  1218. be obtained as a limit---in the parameter space---of a simple hypothesis
  1219. in \(H_1\). We will now apply Cox's general theory to testing
  1220. lognormality against normality\ldots
  1221. Suppose \(x_1, x_2, \ldots, x_n\) is a random sample from a certain
  1222. population. The null hypothesis, \(H_f\), is that the p.d.f.\ of the
  1223. \(x\)'s is log-normal and the alternate hypothesis, \(H_g\), is
  1224. that the p.d.f.\ is normal, that is, for \(H_f\)
  1225. \begin{equation}
  1226. f\left(y,\beta\right) = \frac{1}{\sqrt{2\pi\beta}}
  1227. \exp-\left(\frac{\left(\log y-\beta\right)^2}{2\beta}\right),
  1228. \: -\infty < y< \infty.
  1229. \end{equation}
  1230. and for \(H_g\):
  1231. \begin{equation}
  1232. g\left(y,\alpha\right) = \frac{1}{y\sqrt{2\pi\alpha_2}}
  1233. \exp-\left(\frac{\left(y-\alpha_1\right)^2}{2\alpha_2}\right),
  1234. \: y>0.
  1235. \end{equation}
  1236. From the maximum likelihood equations we find that
  1237. \begin{equation}
  1238. \hat{\alpha}_1=\frac{1}{n}\sum\log x_i; \:
  1239. \hat{\alpha}_2=\frac{1}{n}\sum\left(\log x_i-\hat{\alpha}_1\right),
  1240. \end{equation}
  1241. and analogous equations for \(\hat{\beta}_1\)
  1242. and \(\hat{\beta}_2\).
  1243. Under \(H_f\), the log-normal null hypothesis, as the sample
  1244. size \(n\) increases to infinity,
  1245. \(\hat{\alpha}_1\rightarrow\alpha_1\),
  1246. \(\hat{\alpha}_2\rightarrow\alpha_2\),
  1247. \(\hat{\beta}_{1,\alpha}\rightarrow\beta_{1,\alpha}\),
  1248. and
  1249. \(\hat{\beta}_{2,\alpha}\rightarrow\beta_{2,\alpha}\)
  1250. where
  1251. \begin{equation}
  1252. \hat{\beta}_{1,\alpha}=\exp\left(\alpha_1+\frac{\alpha_2}{2}\right)
  1253. \end{equation}
  1254. and
  1255. \begin{equation}
  1256. \hat{\beta}_{2,\alpha}=\exp
  1257. \left(2\alpha_1+\alpha_2\right)
  1258. \left[\exp\left(\alpha_2\right) -1\right].
  1259. \end{equation}
  1260. Cox's test is based on the log likelihood ratio
  1261. \begin{equation}
  1262. L_{fg}=\sum_{i=1}^n\log
  1263. \frac{f\left(x_i,\hat{\alpha}\right)}
  1264. {g\left(x_i,\hat{\beta}\right)}
  1265. \end{equation}
  1266. and his test statistic is given by
  1267. \begin{equation}
  1268. T_f=L_{fg}-E_{\hat{\alpha}}\left(L_{fg}\right)
  1269. \end{equation}
  1270. where \(E_{\hat{\alpha}}\left(L_{fg}\right)\) is the expected
  1271. value under \(H_f\) when \(\alpha\) takes the value
  1272. \(\hat\alpha\). Writing
  1273. \begin{equation}
  1274. F=\log f\left(x,\alpha\right), \:
  1275. F_{\alpha_i} = \frac{\partial\log f\left(x,\alpha\right)}{\partial\alpha_i},\:
  1276. i=1,2
  1277. \end{equation}
  1278. \null\begin{equation}
  1279. F_{\alpha_i\alpha_j} = \frac{\partial^2\log f\left(x,\alpha\right)}
  1280. {\partial\alpha_i\partial\alpha_j}, \:
  1281. G = \log g\left(x,\beta\right)
  1282. \end{equation}
  1283. \null\begin{equation}
  1284. G_{\beta_i}=\frac{\partial\log g\left(x,\beta\right)}{\partial\beta_i}, \:
  1285. \mbox{etc.,}
  1286. \end{equation}
  1287. Cox shows that \(T_f\) is asymptotically normal with zero mean and
  1288. variance
  1289. \begin{equation}
  1290. V_\alpha\left(T_f\right)=
  1291. nV_\alpha\left(F-G\right) -
  1292. \sum\frac{C_\alpha^2\left(F-G, F_{\alpha_i}\right)}
  1293. {V_\alpha\left(F_{\alpha_i}\right)}
  1294. \end{equation}
  1295. where \(V_\alpha\left(\cdot\right)\),
  1296. \(C_\alpha\left(\cdot\right)\), denote variance and covariance under \(H_f\).
  1297. In our case it can be shown that
  1298. \begin{equation}
  1299. T_f=\frac{n}{2}\log\frac{\hat{\beta}_2}{\hat{\beta}_{2,\hat{\alpha}}}
  1300. \end{equation}
  1301. Results of the following type are used in the derivation of
  1302. \(V_\alpha\left(T_f\right)\):
  1303. \begin{equation}
  1304. E_\alpha\left[x^2\log x\right] =
  1305. \left(\alpha_1+2\alpha_2\right)\exp\left(2\alpha_1+2\alpha_2\right)
  1306. \end{equation}
  1307. \null\begin{equation}
  1308. E_\alpha\left[x^2\log^2x\right] =
  1309. \left(\alpha_2+\alpha_1^2+4\alpha_1\alpha_2+4\alpha_2^2\right)
  1310. \exp\left(2\alpha_1+2\alpha_2\right)
  1311. \end{equation}
  1312. \null\begin{equation}
  1313. E_\alpha\left[\left(\log x\right)\left(\log x-\alpha\right)\right] =
  1314. \alpha_2
  1315. \end{equation}
  1316. \null\begin{equation}
  1317. E_\alpha\left[\left(\log x\right)\left(\log x-\beta_1\right)^2\right] =
  1318. \beta_2\left(\alpha_1+2\alpha_2\right)
  1319. \end{equation}
  1320. \null\begin{equation}
  1321. E_\alpha\left[\left(\log x -\alpha_1\right)
  1322. \left(\log x-\beta_1\right)^2\right] =
  1323. 2\alpha_2\beta_2.
  1324. \end{equation}
  1325. Using these results, after a considerable amount of simplification,
  1326. we get
  1327. \begin{equation}
  1328. V_\alpha\left(T_f\right)=n\left[
  1329. \frac{1}{4}\left(e^{4\alpha_2}+
  1330. 2e^{3\alpha_2}+
  1331. 3e^{\alpha_2}-4\right)
  1332. \alpha_2-
  1333. \frac{\alpha_2\left(2e^{\alpha_2}-1\right)^2}
  1334. {2\left(2e^{\alpha_2}-1\right)^2}\right]
  1335. \end{equation}
  1336. Cox~\cite{cox62} has shown that
  1337. \begin{equation}
  1338. T'_f=\frac{T_f}{\sqrt{V_\alpha\left(T_f\right)}}
  1339. \end{equation}
  1340. is asymptotically standardized normal. In our case we get,
  1341. after substituting the estimators for the parameters,
  1342. \begin{equation}
  1343. T'_f = \frac{\log\frac{\hat{\beta}_2}{\beta_{2,\hat{\alpha}}}}
  1344. {2\sqrt{n}\{\frac{1}{4}\left(e^{4\hat{\alpha}_2}+
  1345. 2e^{3\hat{\alpha}_2} -4\right) -\hat{\alpha}_2 -
  1346. \frac{\hat{\alpha}_2\left(2e^{\hat{\alpha}_2}-1\right)^2}
  1347. {2\left(2e^{\hat{\alpha}_2}-1\right)^2}
  1348. +\frac{3}{4}e^{\hat{\alpha}_2}\}^{1/2}}
  1349. \end{equation}
  1350. %\begin{equation}
  1351. %T'_f = \frac{\log\frac{\hat{\beta}_2}{\beta_{2,\hat{\alpha}}}}
  1352. %{2\sqrt{n}\{\frac{1}{4}\left(\exp\left({4\hat{\alpha}_2}\right)+
  1353. %2\exp\left({3\hat{\alpha}_2}\right) -4\right) -\hat{\alpha}_2 -
  1354. %\frac{\hat{\alpha}_2\left(2\exp\left({\hat{\alpha}_2}\right)-1\right)^2}
  1355. % {2\left(2\exp\left({\hat{\alpha}_2}\right)-1\right)^2}
  1356. %+\frac{3}{4}\exp\left({\hat{\alpha}_2}\right)\}^{1/2}}
  1357. %\end{equation}
  1358. \section{Utility Functions}
  1359. This section describes some useful functions included in
  1360. \libname\ but not necessarily described in the previous
  1361. sections, e.g., normal order statistics, normal probabilities,
  1362. inverse normals.
  1363. %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
  1364. \bibliography{goodness}
  1365. %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
  1366. \clearpage
  1367. \appendix
  1368. \begin{table}
  1369. \caption{Cumulative Standard Normal Distribution.}
  1370. \label{tbl:normal}
  1371. \centerline{Area Under the Normal Curve from}
  1372. \begin{displaymath}
  1373. -\infty\:\:\mbox{to}\:\:z=\frac{X_i-\mu}{\sigma}.
  1374. \end{displaymath}
  1375. \centerline{Computed by the author using
  1376. algorithm 5666 for the error function, from
  1377. Hart \emph{et al.}~\cite{hart68}.}
  1378. \footnotesize
  1379. \begin{center}
  1380. \begin{tabular}{c%
  1381. @{\extracolsep{4pt}}c%
  1382. @{\extracolsep{4pt}}c%
  1383. @{\extracolsep{4pt}}c%
  1384. @{\extracolsep{4pt}}c%
  1385. @{\extracolsep{4pt}}c%
  1386. @{\extracolsep{4pt}}c%
  1387. @{\extracolsep{4pt}}c%
  1388. @{\extracolsep{4pt}}c%
  1389. @{\extracolsep{4pt}}c%
  1390. @{\extracolsep{4pt}}c%
  1391. }\hline
  1392. \(z\)& 0.00& 0.01 & 0.02 & 0.03 & 0.04 & 0.05 & 0.06 & 0.07 & 0.08 & 0.09\\ \hline
  1393. 0.0&0.50000&0.50399&0.50798&0.51197&0.51595&0.51994&0.52392&0.52790&0.53188&0.53586\\
  1394. 0.1&0.53983&0.54380&0.54776&0.55172&0.55567&0.55962&0.56356&0.56749&0.57142&0.57535\\
  1395. 0.2&0.57926&0.58317&0.58706&0.59095&0.59483&0.59871&0.60257&0.60642&0.61026&0.61409\\
  1396. 0.3&0.61791&0.62172&0.62552&0.62930&0.63307&0.63683&0.64058&0.64431&0.64803&0.65173\\
  1397. 0.4&0.65542&0.65910&0.66276&0.66640&0.67003&0.67364&0.67724&0.68082&0.68439&0.68793\\
  1398. 0.5&0.69146&0.69497&0.69847&0.70194&0.70540&0.70884&0.71226&0.71566&0.71904&0.72240\\
  1399. 0.6&0.72575&0.72907&0.73237&0.73565&0.73891&0.74215&0.74537&0.74857&0.75175&0.75490\\
  1400. 0.7&0.75804&0.76115&0.76424&0.76730&0.77035&0.77337&0.77637&0.77935&0.78230&0.78524\\
  1401. 0.8&0.78814&0.79103&0.79389&0.79673&0.79955&0.80234&0.80511&0.80785&0.81057&0.81327\\
  1402. 0.9&0.81594&0.81859&0.82121&0.82381&0.82639&0.82894&0.83147&0.83398&0.83646&0.83891\\
  1403. 1.0&0.84134&0.84375&0.84614&0.84849&0.85083&0.85314&0.85543&0.85769&0.85993&0.86214\\
  1404. 1.1&0.86433&0.86650&0.86864&0.87076&0.87286&0.87493&0.87698&0.87900&0.88100&0.88298\\
  1405. 1.2&0.88493&0.88686&0.88877&0.89065&0.89251&0.89435&0.89617&0.89796&0.89973&0.90147\\
  1406. 1.3&0.90320&0.90490&0.90658&0.90824&0.90988&0.91149&0.91309&0.91466&0.91621&0.91774\\
  1407. 1.4&0.91924&0.92073&0.92220&0.92364&0.92507&0.92647&0.92785&0.92922&0.93056&0.93189\\
  1408. 1.5&0.93319&0.93448&0.93574&0.93699&0.93822&0.93943&0.94062&0.94179&0.94295&0.94408\\
  1409. 1.6&0.94520&0.94630&0.94738&0.94845&0.94950&0.95053&0.95154&0.95254&0.95352&0.95449\\
  1410. 1.7&0.95543&0.95637&0.95728&0.95818&0.95907&0.95994&0.96080&0.96164&0.96246&0.96327\\
  1411. 1.8&0.96407&0.96485&0.96562&0.96638&0.96712&0.96784&0.96856&0.96926&0.96995&0.97062\\
  1412. 1.9&0.97128&0.97193&0.97257&0.97320&0.97381&0.97441&0.97500&0.97558&0.97615&0.97670\\
  1413. 2.0&0.97725&0.97778&0.97831&0.97882&0.97932&0.97982&0.98030&0.98077&0.98124&0.98169\\
  1414. 2.1&0.98214&0.98257&0.98300&0.98341&0.98382&0.98422&0.98461&0.98500&0.98537&0.98574\\
  1415. 2.2&0.98610&0.98645&0.98679&0.98713&0.98745&0.98778&0.98809&0.98840&0.98870&0.98899\\
  1416. 2.3&0.98928&0.98956&0.98983&0.99010&0.99036&0.99061&0.99086&0.99111&0.99134&0.99158\\
  1417. 2.4&0.99180&0.99202&0.99224&0.99245&0.99266&0.99286&0.99305&0.99324&0.99343&0.99361\\
  1418. 2.5&0.99379&0.99396&0.99413&0.99430&0.99446&0.99461&0.99477&0.99492&0.99506&0.99520\\
  1419. 2.6&0.99534&0.99547&0.99560&0.99573&0.99585&0.99598&0.99609&0.99621&0.99632&0.99643\\
  1420. 2.7&0.99653&0.99664&0.99674&0.99683&0.99693&0.99702&0.99711&0.99720&0.99728&0.99736\\
  1421. 2.8&0.99744&0.99752&0.99760&0.99767&0.99774&0.99781&0.99788&0.99795&0.99801&0.99807\\
  1422. 2.9&0.99813&0.99819&0.99825&0.99831&0.99836&0.99841&0.99846&0.99851&0.99856&0.99861\\
  1423. 3.0&0.99865&0.99869&0.99874&0.99878&0.99882&0.99886&0.99889&0.99893&0.99896&0.99900\\
  1424. 3.1&0.99903&0.99906&0.99910&0.99913&0.99916&0.99918&0.99921&0.99924&0.99926&0.99929\\
  1425. 3.2&0.99931&0.99934&0.99936&0.99938&0.99940&0.99942&0.99944&0.99946&0.99948&0.99950\\
  1426. 3.3&0.99952&0.99953&0.99955&0.99957&0.99958&0.99960&0.99961&0.99962&0.99964&0.99965\\
  1427. 3.4&0.99966&0.99968&0.99969&0.99970&0.99971&0.99972&0.99973&0.99974&0.99975&0.99976\\
  1428. 3.5&0.99977&0.99978&0.99978&0.99979&0.99980&0.99981&0.99981&0.99982&0.99983&0.99983\\\hline
  1429. \end{tabular}
  1430. \end{center}
  1431. \normalsize
  1432. \end{table}
  1433. \clearpage
  1434. \begin{table}
  1435. \caption{Cumulative Chi-Square Distribution.}
  1436. \label{tbl:chisq}
  1437. Computed by the author using CDFLIB~\cite{brown93},
  1438. with the exception of items marked with a dagger (\dag), which
  1439. were found in {\em Biometrika Tables for Statisticians} (1966),
  1440. 3rd.~Ed., University College, London, as cited by Shapiro~\cite{shapiro90}.
  1441. \scriptsize
  1442. \begin{center}
  1443. \begin{tabular}{r%
  1444. r@{.}l%
  1445. @{\extracolsep{1.0pt}}r@{.}l%
  1446. @{\extracolsep{1.0pt}}r@{.}l%
  1447. @{\extracolsep{1.0pt}}r@{.}l%
  1448. @{\extracolsep{1.0pt}}r@{.}l%
  1449. @{\extracolsep{1.0pt}}r@{.}l%
  1450. @{\extracolsep{1.0pt}}r@{.}l%
  1451. @{\extracolsep{1.0pt}}r@{.}l%
  1452. @{\extracolsep{1.0pt}}r@{.}l%
  1453. @{\extracolsep{1.0pt}}r@{.}l%
  1454. @{\extracolsep{1.0pt}}
  1455. }
  1456. \hline
  1457. & \multicolumn{20}{c}{\(\alpha\)} \\ \cline{2-21}
  1458. \(\nu\) &
  1459. 0&995 & 0&990 & 0&975 & 0&950 & 0&900 & 0&100 & 0&050 & 0&025 & 0&010 & 0&005\\
  1460. \hline
  1461. 1 & 0&\(0000393^{\dag}\) & 0&\(000157^{\dag}\) & 0&\(000982^{\dag}\)
  1462. & 0&\(0158^{\dag}\) & 0&\(102^{\dag}\) & 2&71 & 3&84 & 5&02 & 6&63 & 7&88 \\
  1463. 2 & 0&0100 & 0&0201& 0&0506& 0&103 & 0&211 &4&61 & 5&99 & 7&38 & 9&21 & 10&6 \\
  1464. 3 & 0&0717 & 0&115 & 0&216 & 0&352 & 0&584 &6&25 & 7&81 & 9&35 &11&3 & 12&8 \\
  1465. 4 & 0&207 & 0&297 & 0&484 & 0&711 & 1&06 &7&78 & 9&49 &11&1 &13&3 & 14&9 \\
  1466. 5 & 0&412 & 0&554 & 0&831 & 1&15 & 1&61 &9&24 &11&1 &12&8 &15&1 & 16&8 \\
  1467. \\
  1468. 6 & 0&676 &0&872 & 1&24 & 1&64 & 2&20 & 10&6 & 12&6 & 14&5& 16&8& 18&5 \\
  1469. 7 & 0&989 & 1&24 & 1&69 & 2&17 & 2&83 & 12&0 & 14&1 & 16&0& 18&5& 20&3 \\
  1470. 8 & 1&34 & 1&65 & 2&18 & 2&73 & 3&49 & 13&4 & 15&5 & 17&5 & 20&1 & 22&0 \\
  1471. 9 & 1&73 & 2&09 & 2&70 & 3&33 & 4&17 & 14&7 & 16&9 & 19&0 & 21&7 & 23&6 \\
  1472. 10 & 2&16 & 2&56 & 3&25 & 3&94 & 4&87 & 16&0 & 18&3 & 20&5 & 23&2 & 25&2 \\
  1473. \\
  1474. 11 & 2&60 & 3&05 & 3&82 & 4&57 & 5&58 & 17&3 & 19&7 & 21&9 & 24&7 & 26&8 \\
  1475. 12 & 3&07 & 3&57 & 4&40 & 5&23 & 6&30 & 18&6 & 21&0 & 23&3 & 26&2 & 28&3 \\
  1476. 13 & 3&57 & 4&11 & 5&01 & 5&89 & 7&04 & 19&8 & 22&4 & 24&7 & 27&7 & 29&8 \\
  1477. 14 & 4&07 & 4&66 & 5&63 & 6&57 & 7&79 & 21&1 & 23&7 & 26&1 & 29&1 & 31&3 \\
  1478. 15 & 4&60 & 5&23 & 6&26 & 7&26 & 8&55 & 22&3 & 25&0 & 27&5 & 30&6 & 32&8 \\
  1479. \\
  1480. 16 & 5&14 & 5&81 & 6&91 & 7&96 & 9&31 & 23&5 & 26&3 & 28&9 & 32&0 & 34&3 \\
  1481. 17 & 5&70 & 6&41 & 7&56 & 8&67 & 10&1 & 24&8 & 27&6 & 30&2 & 33&4 & 35&7 \\
  1482. 18 & 6&26 & 7&01 & 8&23 & 9&39 & 10&9 & 26&0 & 28&9 & 31&5 & 34&8 & 37&2 \\
  1483. 19 & 6&84 & 7&63 & 8&91 & 10&1 & 11&7 & 27&2 & 30&1 & 32&9 & 36&2 & 38&6 \\
  1484. 20 & 7&43 & 8&26 & 9&59 & 10&9 & 12&4 & 28&4 & 31&4 & 34&2 & 37&6 & 40&0 \\
  1485. \\
  1486. 21 & 8&03 & 8&90 & 10&3 & 11&6 & 13&2 & 29&6 & 32&7 & 35&5 & 38&9 & 41&4 \\
  1487. 22 & 8&64 & 9&54 & 11&0 & 12&3 & 14&0 & 30&8 & 33&9 & 36&8 & 40&3 & 42&8 \\
  1488. 23 & 9&26 & 10&2 & 11&7 & 13&1 & 14&9 & 32&0 & 35&1 & 38&0 & 41&6 & 44&2 \\
  1489. 24 & 9&89 & 10&9 & 12&4 & 13&9 & 15&7 & 33&2 & 36&4 & 39&4 & 43&0 & 45&6 \\
  1490. 25 & 10&5 & 11&5 & 13&1 & 14&6 & 16&5 & 34&4 & 37&7 & 40&6 & 44&3 & 46&9 \\
  1491. \\
  1492. 26 & 11&2 & 12&2 & 13&8 & 15&4 & 17&3 & 35&6 & 38&9 & 41&9 & 45&6 & 48&3 \\
  1493. 27 & 11&8 & 12&9 & 14&6 & 16&2 & 18&1 & 36&7 & 40&1 & 43&2 & 47&0 & 49&6 \\
  1494. 28 & 12&5 & 13&6 & 15&3 & 16&9 & 18&9 & 37&9 & 41&3 & 44&5 & 48&3 & 51&0 \\
  1495. 29 & 13&1 & 14&3 & 16&0 & 17&7 & 19&8 & 39&1 & 42&6 & 45&7 & 49&6 & 52&3 \\
  1496. 30 & 13&8 & 15&0 & 16&8 & 18&5 & 20&6 & 40&3 & 43&8 & 47&0 & 50&9 & 53&7 \\
  1497. \hline
  1498. \end{tabular}
  1499. \end{center}
  1500. \normalsize
  1501. According to Shapiro~\cite{shapiro90}, for situations with
  1502. larger than 30 degrees of freedom, \(\chi^2_{\nu,\alpha} =
  1503. 0.5 \left(z_{\alpha}+\sqrt{2\nu-1}\right)^2\), where
  1504. \(z_{\alpha}\) is the 100\(\alpha\)\% point of the standard normal
  1505. distribution, e.g., \(z_{0.05}=-1.645\) from Table~\ref{tbl:normal}.
  1506. \end{table}
  1507. \clearpage
  1508. \begin{table}
  1509. \caption{Signficant Values of D'Agostino's D Test (\(y\) statistic
  1510. of eqn.~\protect\ref{eqn:xform-d}).}
  1511. \centerline{Reproduced from D'Agostino~\protect\cite{dagostino86}.}
  1512. \label{tbl:d-test}
  1513. \scriptsize
  1514. \begin{center}
  1515. \begin{tabular}{rllllllllll}\hline
  1516. & \multicolumn{10}{c}{Percentiles} \\ \cline{2-11}
  1517. n & 0.5 & 1.0 & 2.5 & 5 & 10 & 90 & 95 & 97.5 & 99 & 99.5 \\ \hline
  1518. 10&-4.66&-4.06&-3.25&-2.62&-1.99&0.149&0.235&0.299&0.356&0.385\\
  1519. 12&-4.63&-4.02&-3.20&-2.58&-1.94&0.237&0.329&0.381&0.440&0.479\\
  1520. 14&-4.57&-3.97&-3.16&-2.53&-1.90&0.308&0.399&0.460&0.515&0.555\\
  1521. 16&-4.52&-3.92&-3.12&-2.50&-1.87&0.367&0.459&0.526&0.587&0.613\\
  1522. 18&-4.47&-3.87&-3.08&-2.47&-1.85&0.417&0.515&0.574&0.636&0.667\\
  1523. 20&-4.41&-3.83&-3.04&-2.44&-1.82&0.460&0.565&0.628&0.690&0.720\\
  1524. \\
  1525. 22&-4.36&-3.78&-3.01&-2.41&-1.81&0.497&0.609&0.677&0.744&0.775\\
  1526. 24&-4.32&-3.75&-2.98&-2.39&-1.79&0.530&0.648&0.720&0.783&0.822\\
  1527. 26&-4.27&-3.71&-2.96&-2.37&-1.77&0.559&0.682&0.760&0.827&0.867\\
  1528. 28&-4.23&-3.68&-2.93&-2.35&-1.76&0.586&0.714&0.797&0.868&0.910\\
  1529. 30&-4.19&-3.64&-.291&-2.33&-1.75&0.610&0.743&0.830&0.906&0.941\\
  1530. \\
  1531. 32&-4.16&-3.61&-2.88&-2.32&-1.73&0.631&0.770&0.862&0.942&0.983\\
  1532. 34&-4.12&-3.59&-2.86&-2.30&-1.72&0.651&0.794&0.891&0.975&1.02\\
  1533. 36&-4.09&-3.56&-2.85&-2.29&-1.71&0.669&0.816&0.917&1.00&1.05\\
  1534. 38&-4.06&-3.54&-2.83&-2.28&-1.70&0.686&0.837&0.941&1.03&1.08\\
  1535. 40&-4.03&-3.51&-2.81&-2.26&-1.70&0.702&0.857&0.964&1.06&1.11\\
  1536. \\
  1537. 42&-4.00&-3.49&-2.80&-2.25&-1.69&0.716&0.875&0.986&1.09&1.14\\
  1538. 44&-3.98&-3.47&-2.78&-2.24&-1.68&0.730&0.892&1.01&1.11&1.17\\
  1539. 46&-3.95&-3.45&-2.77&-2.23&-1.67&0.742&0.908&1.02&1.13&1.19\\
  1540. 48&-3.93&-3.43&-2.75&-2.22&-1.67&0.754&0.923&1.04&1.15&1.22\\
  1541. 50&-3.91&-3.41&-2.74&-2.21&-1.66&0.765&0.937&1.06&1.18&1.24\\
  1542. \\
  1543. 60&-3.81&-3.34&-2.68&-2.17&-1.64&0.812&0.997&1.13&1.26&1.34\\
  1544. 70&-3.73&-3.27&-2.64&-2.14&-1.61&0.849&1.05&1.19&1.33&1.42\\
  1545. 80&-3.67&-3.22&-2.60&-2.11&-1.59&0.878&1.08&1.24&1.39&1.48\\
  1546. 90&-3.61&-3.17&-2.57&-2.09&-1.58&0.902&1.12&1.28&1.44&1.54\\
  1547. 100&-3.57&-3.14&-2.54&-2.07&-1.57&0.923&1.14&1.31&1.48&1.59\\
  1548. \\
  1549. 150&-3.409&-3.009&-2.452&-2.004&-1.520&0.990&1.233&1.423&1.623&1.746\\
  1550. 200&-3.302&-2.922&-2.391&-1.960&-1.491&1.032&1.290&1.496&1.715&1.853\\
  1551. 250&-3.227&-2.861&-2.348&-1.926&-1.471&1.060&1.328&1.545&1.779&1.927\\
  1552. 300&-3.172&-2.816&-2.316&01.906&-1.456&1.080&1.357&1.528&1.826&1.983\\
  1553. 350&-3.129&-2.781&-2.291&-1.888&-1.444&1.096&1.379&1.610&1.863&2.026\\
  1554. \\
  1555. 400&-3.094&-2.753&-2.270&-1.873&-1.434&1.108&1.396&1.633&1.893&2.061\\
  1556. 450&-3.064&-2.729&-2.253&-1.861&-1.426&1.119&1.411&1.652&1.918&2.090\\
  1557. 500&-3.040&-2.709&-2.239&-1.850&-1.419&1.127&1.423&1.668&1.938&2.114\\
  1558. 550&-3.019&-2.691&-2.226&-1.841&-1.413&1.135&1.434&1.682&1.957&2.136\\
  1559. 600&-3.000&-2.676&-2.215&-1.833&-1.408&1.141&1.443&1.694&1.972&2.154\\
  1560. \\
  1561. 650&-2.984&-2.663&-2.206&-1.826&-1.403&1.147&1.451&1.704&1.986&2.171\\
  1562. 700&-2.969&-2.651&-2.197&-1.820&-1.399&1.152&1.458&1.714&1.999&2.185\\
  1563. 750&-2.956&-2.640&-2.189&-1.814&-1.395&1.157&1.465&1.722&2.010&2.199\\
  1564. 800&-2.944&-2.630&-2.182&-1.809&-1.392&1.161&1.471&1.730&2.020&2.221\\
  1565. 850&-2.933&-2.621&-2.176&-1.804&-1.389&1.165&1.476&1.737&2.029&2.221\\
  1566. \\
  1567. 900&-2.923&-2.613&-2.710&-1.800&-1.386&1.168&1.481&1.743&2.037&2.231\\
  1568. 950&-2.914&-2.605&-2.164&-1.796&-1.383&1.171&1.485&1.749&2.045&2.241\\
  1569. 1000&-2.906&-2.599&-2.159&-1.792&-1.381&1.174&1.489&1.754&2.052&2.249\\
  1570. 1500&-2.845&-2.549&-2.123&-1.765&-1.363&1.194&1.519&1.793&2.103&2.309\\
  1571. 2000&-2.807&-2.515&-2.101&-1.750&-1.353&1.207&1.536&1.815&2.132&2.342\\
  1572. \hline
  1573. \end{tabular}
  1574. \end{center}
  1575. \end{table}
  1576. \clearpage
  1577. \begin{table}
  1578. \caption{Sample Data. Diameters at Breast Height (cm)
  1579. of 584 Longleaf Pine Trees.}
  1580. \label{tbl:pine}
  1581. Locations and Diameters at Breast Height (dbh, in centimeters)
  1582. of all 584 Longleaf Pine Trees in the 4 hectare Study Region.
  1583. The \(x\) coordinates are distances (in meters) from the tree to the
  1584. southern boundary. The \(y\) coordinates are distances (in meters) from
  1585. the tree to the eastern boundary.
  1586. Reproduced from Table~8.1 of Cressie~\protect\cite{cressie91}.
  1587. \scriptsize
  1588. \begin{center}
  1589. \begin{tabular}{rrrrrrrrrrrr}
  1590. \hline
  1591. \(x\)&\(y\)& dbh &\(x\)&\(y\)& dbh &\(x\)&\(y\)& dbh &\(x\)&\(y\)& dbh \\
  1592. \hline
  1593. 200.0& 8.8& 32.9&199.3& 10.0& 53.5&193.6& 22.4& 68.0&167.7& 35.6& 17.7\\
  1594. 183.9& 45.4& 36.9&182.5& 47.2& 51.6&166.1& 48.8& 66.4&160.7& 42.4& 17.7\\
  1595. 162.9& 29.0& 21.9&166.4& 33.6& 25.7&163.0& 35.8& 25.5&156.1& 38.7& 28.3\\
  1596. 157.6& 42.8& 11.2&154.4& 36.2& 33.8&150.8& 45.8& 2.5&144.6& 25.4& 4.2\\
  1597. 142.7& 25.4& 2.5&144.0& 28.3& 31.2&143.5& 36.9& 16.4&123.1& 14.3& 53.2\\
  1598. 113.9& 13.1& 67.3&114.9& 8.1& 37.8&101.4& 9.3& 49.9&105.7& 9.1& 46.3\\
  1599. 106.9& 14.7& 40.5&127.0& 29.7& 57.7&129.8& 45.8& 58.0&136.3& 44.2& 54.9\\
  1600. 106.7& 49.4& 25.3&103.4& 49.6& 18.4& 89.7& 4.9& 72.0& 10.8& 0.0& 31.4\\
  1601. 26.4& 5.4& 55.1& 11.0& 5.5& 36.0& 5.1& 3.9& 28.4& 10.1& 8.5& 24.8\\
  1602. 18.9& 11.3& 44.1& 28.4& 11.0& 50.9& 41.1& 9.2& 47.5& 41.2& 12.6& 58.0\\
  1603. \\
  1604. 33.9& 21.4& 36.9& 40.8& 39.8& 65.6& 49.7& 18.2& 52.9& 6.7& 46.9& 39.5\\
  1605. 11.6& 46.9& 42.7& 17.2& 47.9& 44.4& 19.4& 50.0& 40.3& 26.9& 47.2& 53.5\\
  1606. 39.6& 47.9& 44.2& 38.0& 50.7& 53.8& 19.1& 45.2& 38.0& 32.1& 35.0& 48.3\\
  1607. 28.4& 35.5& 42.9& 3.8& 44.8& 40.6& 8.5& 43.4& 34.5& 11.2& 40.2& 45.7\\
  1608. 22.4& 34.3& 51.8& 23.8& 33.3& 52.0& 24.9& 29.8& 44.5& 9.0& 38.9& 35.6\\
  1609. 10.4& 61.2& 19.2& 30.9& 52.2& 43.5& 48.9& 67.8& 33.7& 49.5& 73.8& 43.3\\
  1610. 46.3& 80.9& 36.6& 44.1& 78.0& 46.3& 48.5& 94.8& 48.3& 45.9& 90.4& 20.4\\
  1611. 44.2& 84.0& 40.5& 37.0& 64.3& 44.0& 36.3& 67.7& 40.9& 36.7& 71.5& 51.0\\
  1612. 35.3& 78.3& 36.5& 33.5& 81.6& 42.1& 29.3& 83.8& 15.6& 22.4& 84.1& 18.5\\
  1613. 17.1& 84.7& 43.0& 27.3& 89.4& 28.9& 27.9& 90.6& 21.3& 48.4& 99.5& 30.9\\
  1614. 43.6& 98.4& 42.7& 39.0& 97.3& 37.6& 14.9& 91.2& 47.1& 6.1& 96.2& 44.6\\
  1615. 10.7& 98.6& 44.3& 22.2&100.0& 26.1\\
  1616. & & & & & & 32.7& 99.1& 25.9& 0.9&100.0& 41.4\\
  1617. 93.5& 96.2& 59.5& 85.1& 90.6& 26.1& 92.8& 61.5& 11.4& 91.3& 69.5& 33.4\\
  1618. 95.9& 59.7& 35.8& 93.4& 71.5& 54.4& 89.6& 86.3& 33.6& 99.5& 78.9& 35.5\\
  1619. 100.6& 53.1& 7.4&103.5& 72.1& 36.6&104.7& 74.0& 19.1&104.0& 67.1& 34.9\\
  1620. 104.2& 64.7& 37.3&105.0& 59.8& 16.3&111.8& 73.2& 39.1&112.4& 69.8& 36.5\\
  1621. 110.0& 65.9& 25.0&120.4& 79.2& 46.8&109.4& 62.5& 18.7&109.7& 62.9& 23.2\\
  1622. 113.3& 60.4& 20.4&118.0& 69.3& 42.3&126.5& 69.2& 38.1&125.1& 68.2& 17.9\\
  1623. 114.2& 54.6& 39.7&110.6& 51.5& 14.5&147.3& 73.8& 33.5&146.7& 73.0& 56.0\\
  1624. 148.1& 86.2& 66.1&138.2& 73.4& 26.3&135.7& 70.7& 44.8&134.9& 72.7& 24.2\\
  1625. 98.0& 27.7& 39.0& 93.5& 28.7& 15.1& 82.3& 16.8& 35.6& 79.2& 25.3& 21.6\\
  1626. 84.2& 29.0& 17.2& 88.8& 35.1& 22.3& 82.5& 36.3& 18.2& 75.6& 28.1& 55.6\\
  1627. 72.9& 36.2& 23.2& 79.1& 43.6& 27.0& 50.0& 48.8& 50.1& 59.9& 34.4& 45.5\\
  1628. 60.5& 13.0& 47.2& 60.2& 11.4& 37.8& 66.5& 15.9& 31.9& 70.4& 6.6& 38.5\\
  1629. 70.7& 2.2& 23.8& 71.7& 1.9& 46.3&179.5& 92.6& 2.8&186.1& 91.0& 3.2\\
  1630. 178.3& 92.4& 5.8&178.6& 91.8& 3.5&186.2& 90.3& 2.3&185.2& 89.9& 3.8\\
  1631. 185.5& 89.8& 3.2&185.8& 89.1& 4.4&186.5& 88.8& 3.9&176.7& 92.3& 7.8\\
  1632. 177.7& 91.5& 4.7&184.0& 89.0& 4.8& 11.0& 34.4& 44.1& 17.5& 21.9& 51.5\\
  1633. 4.3& 31.3& 51.6& 5.9& 8.1& 33.3& 1.9& 68.5& 13.3& 1.8& 71.0& 5.7\\
  1634. 1.1& 82.5& 3.3& 2.4& 95.3& 45.9& 4.6& 94.0& 32.6& 3.1& 79.5& 11.4\\
  1635. 3.9& 72.1& 9.1& 4.1& 70.9& 5.2& 7.9& 68.7& 4.9& 14.8& 81.8& 42.0\\
  1636. 9.4& 67.7& 32.0& 15.9& 78.7& 32.8& 16.6& 78.8& 22.0& 18.2& 80.3& 20.8\\
  1637. 174.1&135.6& 7.3&173.0&127.4& 3.0&174.0&125.7& 2.2&177.3&121.0& 2.2\\
  1638. 177.6&120.3& 2.2&195.7&144.1& 59.4&197.0&142.5& 48.1&178.2&112.6& 51.5\\
  1639. 173.8&112.7& 50.3&172.8&124.4& 2.9&162.7&114.6& 19.1&164.6&120.9& 15.1\\
  1640. 80.4& 90.7& 21.7& 71.0& 88.8& 42.4& 73.0& 85.6& 40.2& 56.7& 95.3& 37.4\\
  1641. 66.5& 86.2& 40.1& 67.0& 84.7& 39.5& 62.9& 87.9& 32.5& 61.8& 89.0& 39.5\\
  1642. 51.9& 94.5& 35.6& 60.9& 71.6& 44.1& 61.0& 69.8& 42.2& 61.7& 66.2& 39.4\\
  1643. 57.3& 68.4& 35.5& 54.2& 76.4& 39.1& 76.1& 52.9& 9.5& 67.2& 57.6& 48.4\\
  1644. 81.9& 58.5& 31.9& 90.1& 59.6& 30.7&135.3&126.6& 15.0&135.0&124.0& 24.5\\
  1645. \hline
  1646. \end{tabular}
  1647. \end{center}
  1648. \end{table}
  1649. \clearpage
  1650. \scriptsize
  1651. \begin{center}
  1652. {\normalsize Table~\thetable (continued).}
  1653. \par\vspace{\baselineskip}\par
  1654. \begin{tabular}{rrrrrrrrrrrr}
  1655. \hline
  1656. \(x\)&\(y\)& dbh &\(x\)&\(y\)& dbh &\(x\)&\(y\)& dbh &\(x\)&\(y\)& dbh \\
  1657. \hline
  1658. 136.2&122.1& 15.0&129.7&127.0& 22.2&134.8&120.2& 27.5&136.9&116.8& 10.8\\
  1659. 137.0&116.0& 26.2&128.9&124.2& 10.2&127.5&125.0& 18.9&127.6&121.7& 44.2\\
  1660. 129.7&119.0& 13.8&126.6&121.1& 16.7&133.4& 77.1& 35.7&129.9& 76.1& 12.1\\
  1661. 126.5& 77.3& 35.4&129.1& 83.1& 32.7&134.4& 87.0& 30.1&130.7& 90.1& 28.4\\
  1662. 130.9& 90.7& 16.5&132.0& 94.5& 12.7&136.8& 96.7& 5.5&137.7& 98.0& 2.5\\
  1663. 157.8& 99.9& 3.0&187.1& 98.1& 3.2&190.6& 92.1& 3.2&185.4& 93.1& 4.0\\
  1664. 186.6& 92.2& 3.6&185.9& 91.7& 3.8&184.3& 92.1& 4.3&188.2& 91.2& 3.3\\
  1665. 104.4&145.1& 6.3&104.9&145.0& 18.4&101.5&148.4& 5.4&102.4&148.7& 5.4\\
  1666. 123.4&128.9& 26.0&123.8&135.1& 22.3&127.0&133.8& 35.2&109.6&145.9& 24.1\\
  1667. 112.4&145.0& 6.9&133.1&144.8& 61.0&139.4&143.1& 20.6&140.4&143.6& 6.5\\
  1668. 184.1& 88.2& 2.8&183.5& 88.5& 4.8&183.0& 88.0& 5.4&176.1& 91.0& 4.3\\
  1669. 175.6& 90.2& 4.0&173.8& 89.9& 3.2&164.9& 93.7& 2.8&163.0& 95.3& 4.9\\
  1670. 163.2& 94.1& 3.5&162.4& 94.5& 2.9&161.5& 94.9& 2.4&162.2& 94.3& 3.3\\
  1671. 161.0& 94.7& 2.1&157.7& 95.7& 2.0&154.9& 96.2& 3.9&154.6& 92.7& 5.0\\
  1672. 152.9& 93.7& 2.3&153.2& 93.2& 2.2&168.2& 73.0& 67.7&151.6& 93.0& 2.9\\
  1673. 151.4& 93.4& 2.4&157.6& 67.2& 56.3&149.4& 63.0& 39.4&149.4& 64.3& 59.5\\
  1674. 167.3& 54.6& 42.4&157.4& 51.5& 63.7&181.5& 66.1& 66.6&196.5& 55.2& 69.3\\
  1675. 189.9& 85.2& 56.9&155.1&149.2& 23.5&154.5&148.4& 9.1&162.9&119.9& 29.9\\
  1676. 158.4&113.4& 14.9&153.9&108.3& 38.7&156.1&116.0& 31.5&156.5&118.9& 27.8\\
  1677. 156.8&122.3& 28.5&159.0&126.1& 21.6&161.0&131.9& 2.0&161.3&132.8& 2.6\\
  1678. 160.6&132.6& 2.3&161.3&134.9& 3.5&159.7&129.8& 3.6&161.7&136.1& 2.6\\
  1679. 161.1&136.4& 2.0&160.1&133.0& 2.0&159.0&133.6& 2.7&160.0&134.8& 2.6\\
  1680. 160.2&135.5& 2.2&159.1&136.5& 2.7&154.7&126.8& 30.1&151.9&127.5& 16.6\\
  1681. 151.3&124.7& 10.4&151.0&127.3& 11.8&150.4&123.0& 32.3&149.6&124.6& 33.5\\
  1682. 146.2&127.1& 30.5&146.1&127.4& 10.5&144.4&131.8& 13.8&143.3&131.5& 22.8\\
  1683. 140.6&137.7& 31.7&143.2&125.4& 10.1&127.1&119.9& 14.5&120.7&115.6& 12.0\\
  1684. 115.3&112.6& 2.2&134.1&105.2& 2.3&134.6&104.1& 3.2&135.6&103.3& 3.0\\
  1685. 128.9&102.6& 50.6&116.3&106.5& 2.6&104.3&104.0& 50.0&111.5&100.0& 52.2\\
  1686. 100.5&149.7& 5.2&100.0&145.5& 5.2&100.8&145.0& 6.7&100.9&143.5& 14.0\\
  1687. 100.3&140.8& 12.7&101.5&120.8& 59.5& 99.3&110.6& 52.0& 99.2&106.0& 45.9\\
  1688. 102.0&137.1& 18.0&105.4&115.7& 43.5&103.6&134.2& 3.3&103.9&139.4& 4.3\\
  1689. 102.6&141.6& 7.4&102.0&143.3& 10.1&102.1&144.4& 23.1&103.5&141.3& 8.1\\
  1690. 102.9&143.8& 5.7&105.7&138.2& 13.3&106.6&135.1& 12.8&108.5&133.2& 11.6\\
  1691. 105.2&142.3& 6.3&139.7&145.8& 20.0&145.5&148.4& 8.9&146.4&148.4& 27.6\\
  1692. 105.8&149.8& 4.5& 96.7&149.1& 9.2& 66.5&150.0& 2.3& 55.7&148.5& 5.0\\
  1693. 54.7&146.8& 4.0& 57.1&144.0& 21.8& 61.7&145.3& 10.9& 60.1&143.7& 14.9\\
  1694. 77.7&144.8& 45.0& 67.2&139.3& 16.4& 80.7&133.2& 43.3& 85.1&133.5& 55.6\\
  1695. 94.7&143.7& 10.6& 81.2&125.0& 45.9& 81.9&123.2& 45.2& 83.8&123.1& 35.5\\
  1696. 84.8&121.4& 43.6& 82.9&119.2& 44.6& 82.1&116.4& 38.8& 84.3&114.8& 34.9\\
  1697. 96.7&142.6& 17.0& 92.0&109.0& 50.4& 96.1&146.6& 2.0& 78.5&102.5& 33.8\\
  1698. 78.7&103.0& 51.1& 59.5&107.4& 21.8& 56.5&105.5& 46.5& 64.3&132.1& 5.6\\
  1699. 152.7&146.7& 19.6&155.8&145.4& 32.3&161.2&138.1& 3.7&161.0&138.1& 2.7\\
  1700. 162.1&136.9& 2.5&166.2&132.0& 2.5&168.7&133.4& 2.4&169.3&133.7& 7.2\\
  1701. 57.9&140.7& 7.0& 57.5&142.3& 11.8& 57.3&141.7& 8.5& 56.0&137.7& 9.5\\
  1702. 53.4&139.3& 7.0& 53.1&136.0& 10.5& 54.0&137.7& 6.6& 54.5&136.7& 6.6\\
  1703. 53.3&137.8& 8.8& 52.1&139.3& 11.6& 48.0&114.4& 48.2& 44.2&129.6& 36.2\\
  1704. 39.4&136.8& 44.9& 42.7&124.0& 43.0& 38.1&134.4& 37.5& 37.1&131.9& 31.5\\
  1705. 37.6&125.4& 39.9& 31.2&127.9& 35.5& 40.1&112.2& 51.7& 29.3&118.6& 36.5\\
  1706. 23.8&114.5& 40.2&141.0&127.8& 7.8&140.1&127.3& 17.0&140.9&121.4& 36.4\\
  1707. 135.0&132.3& 19.6&139.3&122.9& 15.0&142.0&117.2& 28.8&140.4&117.2& 20.1\\
  1708. \hline
  1709. \end{tabular}
  1710. \end{center}
  1711. \clearpage
  1712. \scriptsize
  1713. \begin{center}
  1714. {\normalsize Table~\thetable (continued).}
  1715. \par\vspace{\baselineskip}\par
  1716. \begin{tabular}{rrrrrrrrrrrr}
  1717. \hline
  1718. \(x\)&\(y\)& dbh &\(x\)&\(y\)& dbh &\(x\)&\(y\)& dbh &\(x\)&\(y\)& dbh \\
  1719. \hline
  1720. 138.5&121.5& 39.3& 28.7&158.8& 37.9& 33.7&162.3& 40.6& 23.1&160.8& 33.0\\
  1721. 11.3&158.9& 35.7& 18.2&168.2& 20.6& 21.5&172.3& 22.0& 15.9&168.3& 16.3\\
  1722. 15.4&172.8& 5.6& 14.0&174.2& 7.4& 6.8&179.6& 42.3& 6.0&184.1& 43.8\\
  1723. 1.6&194.9& 53.0& 43.6&197.3& 48.1& 39.4&195.5& 41.9& 37.1&196.1& 48.0\\
  1724. 23.7&193.9& 75.9& 21.5&187.9& 40.4& 27.7&188.7& 40.9& 32.3&178.9& 39.4\\
  1725. 32.6&168.6& 40.9& 37.7&176.9& 17.6&107.5&138.5& 17.8&107.9&139.5& 3.7\\
  1726. 116.5&122.6& 19.0&114.5&127.7& 11.2&115.3&127.4& 27.6&115.3&128.1& 14.5\\
  1727. 119.0&127.4& 34.4&119.4&127.7& 20.0& 94.7&179.8& 2.9& 89.3&185.0& 7.3\\
  1728. 90.8&174.0& 52.7& 95.3&158.4& 8.7& 90.9&162.1& 3.6& 90.2&162.1& 4.6\\
  1729. 90.2&161.7& 11.4& 90.6&160.8& 11.0& 93.0&158.0& 18.7& 78.4&172.4& 5.6\\
  1730. 76.2&171.4& 2.1& 75.8&171.0& 3.3& 75.7&169.7& 11.5& 82.7&163.5& 2.6\\
  1731. 76.7&166.3& 4.4& 74.7&167.1& 18.3&119.4&170.8& 7.5& 74.2&164.3& 17.2\\
  1732. 73.9&162.7& 4.6& 81.7&156.7& 32.0& 79.5&156.3& 56.7& 56.8&116.0& 46.0\\
  1733. 62.2&137.7& 7.8& 58.2&125.1& 54.9& 54.1&115.5& 45.5& 59.5&138.1& 9.2\\
  1734. 58.6&140.3& 13.2& 58.8&141.5& 15.3& 57.9&137.3& 8.5&153.5&159.9& 2.2\\
  1735. 155.9&183.7& 58.8&160.4&176.6& 47.5&171.3&185.1& 52.2&182.8&187.4& 56.3\\
  1736. 182.5&196.0& 39.8&176.3&197.7& 38.1&161.9&199.4& 38.9&199.5&179.4& 9.7\\
  1737. 197.6&176.9& 7.4&196.3&192.4& 22.1&195.7&180.5& 16.9&196.2&177.1& 5.9\\
  1738. 196.3&176.0& 10.5&193.7&185.8& 9.5&191.7&189.2& 45.9&194.5&173.8& 11.4\\
  1739. 192.7&177.3& 7.8&188.9&182.1& 14.4&190.1&174.4& 8.3&186.9&179.4& 30.6\\
  1740. 26.9&111.3& 44.4& 17.9&111.0& 38.7& 34.4&104.2& 41.5& 31.9&103.2& 34.5\\
  1741. 20.6&101.5& 31.8& 14.1&103.1& 39.7& 2.9&122.8& 23.3& 6.4&125.9& 37.7\\
  1742. 2.2&142.2& 43.0& 11.7&116.2& 39.2& 14.2&116.5& 40.4& 15.6&118.1& 36.7\\
  1743. 13.6&127.4& 48.4& 11.1&134.8& 27.9& 7.2&141.7& 46.4& 12.2&140.1& 38.5\\
  1744. 23.0&132.7& 39.4& 30.2&133.9& 50.0& 27.7&136.5& 51.6& 3.4&148.8& 38.7\\
  1745. 15.4&145.6& 39.6& 16.7&146.4& 29.1& 24.3&145.7& 44.0& 0.4&175.2& 50.9\\
  1746. 0.0&177.5& 50.8& 7.9&151.0& 43.0& 33.2&151.2& 44.5& 36.6&150.6& 29.8\\
  1747. 42.2&153.7& 44.3& 24.5&153.4& 51.2& 40.4&179.3& 37.7& 41.0&176.6& 36.8\\
  1748. 43.9&182.2& 33.6& 44.7&184.6& 47.9& 45.6&175.2& 32.0& 47.5&175.9& 40.3\\
  1749. 51.2&177.9& 42.5& 55.0&159.3& 59.7& 58.0&180.3& 44.2& 54.6&188.7& 30.9\\
  1750. 58.9&180.0& 39.5& 63.9&178.6& 48.7& 64.3&178.9& 32.8& 65.6&179.3& 47.2\\
  1751. 61.0&184.9& 42.1& 63.1&183.3& 43.8& 86.1&186.9& 30.5& 65.8&194.9& 28.3\\
  1752. 90.0&195.1& 10.4& 94.3&196.1& 15.0& 91.9&197.1& 7.4& 86.5&197.4& 15.3\\
  1753. 87.5&199.3& 17.5& 93.9&199.2& 5.0& 92.4&199.3& 12.2& 81.8&198.9& 9.0\\
  1754. 99.0&158.1& 2.4& 94.1&187.2& 13.7& 95.4&182.9& 13.1& 97.1&168.4& 12.8\\
  1755. 79.2&155.6& 27.0& 61.6&158.2& 2.6& 70.3&153.1& 4.9& 79.8&151.8& 35.0\\
  1756. 110.1&150.4& 23.7&116.1&156.8& 42.9&114.0&165.1& 14.2&103.2&154.4& 3.3\\
  1757. 112.3&167.0& 28.4&110.4&167.3& 10.0&110.6&166.4& 6.4&107.0&165.0& 22.0\\
  1758. 105.6&160.6& 4.3&104.0&162.4& 10.0&104.0&166.1& 9.2&103.7&167.2& 3.7\\
  1759. 108.6&182.1& 66.7&105.7&182.6& 68.0&102.8&169.7& 23.1&101.5&171.8& 5.7\\
  1760. 100.4&170.5& 11.7&144.1&199.0& 40.4&138.3&197.9& 43.3&142.7&197.2& 60.2\\
  1761. 118.8&188.0& 55.5&142.3&173.3& 54.1&143.8&156.0& 22.3&145.3&155.6& 21.4\\
  1762. 151.2&192.2& 55.7&153.7&176.5& 51.4&186.9&174.7& 23.9&181.2&176.9& 5.2\\
  1763. 181.1&176.1& 7.6&177.2&174.5& 27.8&182.8&162.9& 49.6&180.0&160.2& 51.0\\
  1764. 189.1&156.3& 50.7&196.9&151.4& 43.4&171.4&161.6& 55.6&169.1&160.0& 4.3\\
  1765. 162.5&157.3& 2.5&156.7&155.3& 23.5&154.1&150.8& 8.0& 87.7&200.0& 11.7\\
  1766. \hline
  1767. \end{tabular}
  1768. \end{center}
  1769. \normalsize
  1770. \clearpage
  1771. \begin{table}
  1772. \caption{
  1773. Coefficients for transforming \(\protect\sqrt{b_1}\) to a standard normal
  1774. using a Johnson \(S_U\) approximation.}
  1775. \centerline{Reproduced from Table~4 of D'Agostino and
  1776. Pearson~\protect\cite{dagostino73}.}
  1777. \label{tbl:johnson}
  1778. \scriptsize
  1779. \begin{center}
  1780. \begin{tabular}{rllrllrll}\hline
  1781. \multicolumn{1}{c}{\(n\)} & \multicolumn{1}{c}{\(\delta\)}
  1782. & \multicolumn{1}{c}{\(1/\lambda\)} &
  1783. \multicolumn{1}{c}{\(n\)} & \multicolumn{1}{c}{\(\delta\)}
  1784. & \multicolumn{1}{c}{\(1/\lambda\)} &
  1785. \multicolumn{1}{c}{\(n\)} & \multicolumn{1}{c}{\(\delta\)}
  1786. & \multicolumn{1}{c}{\(1/\lambda\)} \\ \hline
  1787. 8 & 5.563 & 0.3030 & 62 & 3.389 & 1.0400 & 260 & 5.757 & 1.1744 \\
  1788. 9 & 4.260 & 0.4080 & 64 & 3.420 & 1.0449 & 270 & 5.835 & 1.1761 \\
  1789. 10 & 3.734 & 0.4794 & 66 & 3.450 & 1.0495 & 280 & 5.946 & 1.1779 \\
  1790. & & & 68 & 3.480 & 1.0540 & 290 & 6.039 & 1.1793 \\
  1791. 11 & 3.447 & 0.5339 & 70 & 3.510 & 1.0581 & 300 & 6.130 & 1.1808 \\
  1792. 12 & 3.270 & 0.5781 \\
  1793. 13 & 3.151 & 0.6153 & 72 & 3.540 & 1.0621 & 310 & 6.220 & 1.1821 \\
  1794. 14 & 3.069 & 0.6473 & 74 & 3.569 & 1.0659 & 320 & 6.308 & 1.1834 \\
  1795. 15 & 3.010 & 0.6753 & 76 & 3.599 & 1.0695 & 330 & 6.396 & 1.1846 \\
  1796. & & & 78 & 3.628 & 1.0730 & 340 & 6.482 & 1.1858 \\
  1797. 16 & 2.968 & 0.7001 & 80 & 3.657 & 1.0763 & 350 & 6.567 & 1.1868 \\
  1798. 17 & 2.937 & 0.7224 \\
  1799. 18 & 2.915 & 0.7426 & 82 & 3.686 & 1.0795 & 360 & 6.651 & 1.1879 \\
  1800. 19 & 2.900 & 0.7610 & 84 & 3.715 & 1.0825 & 370 & 6.733 & 1.1888 \\
  1801. 20 & 2.890 & 0.7779 & 86 & 3.744 & 1.0854 & 380 & 6.815 & 1.1897 \\
  1802. & & & 88 & 3.772 & 1.0882 & 390 & 6.896 & 1.1906 \\
  1803. 21 & 2.884 & 0.7934 & 90 & 3.801 & 1.0909 & 400 & 6.976 & 1.1914 \\
  1804. 22 & 2.882 & 0.8078 \\
  1805. 23 & 2.882 & 0.8211 & 92 & 3.829 & 1.0934 & 410 & 7.056 & 1.1922 \\
  1806. 24 & 2.884 & 0.8336 & 94 & 3.857 & 1.0959 & 420 & 7.134 & 1.1929 \\
  1807. 25 & 2.889 & 0.8452 & 96 & 3.885 & 1.0983 & 430 & 7.211 & 1.1937 \\
  1808. & & & 98 & 3.913 & 1.1006 & 440 & 7.288 & 1.1943 \\
  1809. 26 & 2.895 & 0.8561 & 100 & 3.940 & 1.1028 & 450 & 7.363 & 1.1950 \\
  1810. 27 & 2.902 & 0.8664 \\
  1811. 28 & 2.910 & 0.8760 & 105 & 4.009 & 1.1080 & 460 & 7.438 & 1.1956 \\
  1812. 29 & 2.920 & 0.8851 & 110 & 4.076 & 1.1128 & 470 & 7.513 & 1.1962 \\
  1813. 30 & 2.930 & 0.8938 & 115 & 4.142 & 1.1172 & 480 & 7.586 & 1.1968 \\
  1814. & & & 120 & 4.207 & 1.1212 & 490 & 7.659 & 1.1974 \\
  1815. 31 & 2.941 & 0.9020 & 125 & 4.272 & 1.1250 & 500 & 7.731 & 1.1959 \\
  1816. 32 & 2.952 & 0.9097 \\
  1817. 33 & 2.964 & 0.9171 & 130 & 4.336 & 1.1285 & 520 & 7.873 & 1.1989 \\
  1818. 34 & 2.977 & 0.9241 & 135 & 4.398 & 1.1318 & 540 & 8.013 & 1.1998 \\
  1819. 35 & 2.990 & 0.9308 & 140 & 4.460 & 1.1348 & 560 & 8.151 & 1.2007 \\
  1820. & & & 145 & 4.521 & 1.1377 & 580 & 8.286 & 1.2015 \\
  1821. 36 & 3.003 & 0.9372 & 150 & 4.582 & 1.1403 & 600 & 8.419 & 1.2023 \\
  1822. 37 & 3.016 & 0.9433 \\
  1823. 38 & 3.030 & 0.9492 & 155 & 4.641 & 1.1428 & 620 & 8.550 & 1.2030 \\
  1824. 39 & 3.044 & 0.9548 & 160 & 4.700 & 1.1452 & 640 & 8.679 & 1.2036 \\
  1825. 40 & 3.058 & 0.9601 & 165 & 4.758 & 1.1474 & 660 & 8.806 & 1.2043 \\
  1826. & & & 170 & 4.816 & 1.1496 & 680 & 8.931 & 1.2049 \\
  1827. 41 & 3.073 & 0.9653 & 175 & 4.873 & 1.1516 & 700 & 9.054 & 1.2054 \\
  1828. 42 & 3.087 & 0.9702 \\
  1829. 43 & 3.102 & 0.9750 & 180 & 4.929 & 1.1535 & 720 & 9.176 & 1.2060 \\
  1830. 44 & 3.117 & 0.9795 & 185 & 4.985 & 1.1553 & 740 & 9.297 & 1.2065 \\
  1831. 45 & 3.131 & 0.9840 & 190 & 5.040 & 1.1570 & 760 & 9.415 & 1.2069 \\
  1832. & & & 195 & 5.094 & 1.1586 & 780 & 9.533 & 1.2073 \\
  1833. 46 & 3.146 & 0.9882 & 200 & 5.148 & 1.1602 & 800 & 9.649 & 1.2078 \\
  1834. 47 & 3.161 & 0.9923 \\
  1835. 48 & 3.176 & 0.9963 & 205 & 5.202 & 1.1616 & 820 & 9.763 & 1.2082 \\
  1836. 49 & 3.192 & 1.0001 & 210 & 5.255 & 1.1631 & 840 & 9.876 & 1.2086 \\
  1837. 50 & 3.207 & 1.0038 & 215 & 5.307 & 1.1644 & 860 & 9.988 & 1.2089 \\
  1838. & & & 220 & 5.359 & 1.1657 & 880 & 10.098 & 1.2093 \\
  1839. 52 & 3.237 & 1.0108 & 225 & 5.410 & 1.1669 & 900 & 10.208 & 1.2096 \\
  1840. 54 & 3.268 & 1.0174 \\
  1841. 56 & 3.298 & 1.0235 & 230 & 5.461 & 1.1681 & 920 & 10.316 & 1.2100 \\
  1842. 58 & 3.329 & 1.0293 & 235 & 5.511 & 1.1693 & 940 & 10.423 & 1.2103 \\
  1843. 60 & 3.359 & 1.0348 & 240 & 5.561 & 1.1704 & 960 & 10.529 & 1.2106 \\
  1844. & & & 245 & 5.611 & 1.1714 & 980 & 10.634 & 1.2109 \\
  1845. & & & 250 & 5.660 & 1.1724 & 1000 & 10.738 & 1.2111 \\
  1846. \hline
  1847. \end{tabular}
  1848. \end{center}
  1849. \normalsize
  1850. \end{table}
  1851. \clearpage
  1852. \begin{table}
  1853. \caption{Coefficients \(\{a_{n-i+1}\}\) for the Shapiro-Wilk
  1854. \(W\) Test for Normality.}
  1855. \centerline{Reproduced from Table~5 of Shapiro and Wilk~\cite{shapiro65}.}
  1856. \label{tbl:shapiro-wilk-a}
  1857. \tiny
  1858. \begin{center}
  1859. \begin{tabular}{rcccccccccc}\hline
  1860. \multicolumn{1}{c}{\(i\)} & \multicolumn{10}{c}{\(n\)} \\ \hline
  1861. & \multicolumn{1}{c}{2}
  1862. & \multicolumn{1}{c}{3}
  1863. & \multicolumn{1}{c}{4}
  1864. & \multicolumn{1}{c}{5}
  1865. & \multicolumn{1}{c}{6}
  1866. & \multicolumn{1}{c}{7}
  1867. & \multicolumn{1}{c}{8}
  1868. & \multicolumn{1}{c}{9}
  1869. & \multicolumn{1}{c}{10} \\ \cline{2-10}
  1870. 1&0.7071&0.7071&0.6872&0.6646&0.6431&0.6233&0.6052&0.5888&0.5739\\
  1871. 2& --&0.0000&0.1677&0.2413&0.2806&0.3031&0.3164&0.3244&0.3291\\
  1872. 3& --& --& -- &0.0000&0.0875&0.1401&0.1743&0.1976&0.2141\\
  1873. 4& --& --& -- & -- & -- &0.0000&0.0561&0.0947&0.1224\\
  1874. 5& --& --& -- & -- & -- & -- & -- &0.0000&0.0399\\
  1875. \\
  1876. & \multicolumn{1}{c}{11}
  1877. & \multicolumn{1}{c}{12}
  1878. & \multicolumn{1}{c}{13}
  1879. & \multicolumn{1}{c}{14}
  1880. & \multicolumn{1}{c}{15}
  1881. & \multicolumn{1}{c}{16}
  1882. & \multicolumn{1}{c}{17}
  1883. & \multicolumn{1}{c}{18}
  1884. & \multicolumn{1}{c}{19}
  1885. & \multicolumn{1}{c}{20} \\ \cline{2-11}
  1886. 1&0.5601&0.5475&0.5359&0.5251&0.5150&0.5056&0.4968&0.4886&0.4808&0.4734\\
  1887. 2&0.3315&0.3325&0.3325&0.3318&0.3306&0.3290&0.3273&0.3253&0.3232&0.3211\\
  1888. 3&0.2260&0.2347&0.2412&0.2460&0.2495&0.2521&0.2540&0.2553&0.2561&0.2565\\
  1889. 4&0.1429&0.1586&0.1707&0.1802&0.1878&0.1939&0.1988&0.2027&0.2059&0.2085\\
  1890. 5&0.0695&0.0922&0.1099&0.1240&0.1353&0.1447&0.1524&0.1587&0.1641&0.1686\\
  1891. 6&0.0000&0.0303&0.0539&0.0727&0.0880&0.1005&0.1109&0.1197&0.1271&0.1334\\
  1892. 7& -- & -- &0.0000&0.0240&0.0433&0.0593&0.0725&0.0837&0.0932&0.1013\\
  1893. 8& -- & -- & -- & -- &0.0000&0.0196&0.0359&0.0496&0.0612&0.0711\\
  1894. 9& -- & -- & -- & -- & -- & -- &0.0000&0.0163&0.0303&0.0422\\
  1895. 10& -- & -- & -- & -- & -- & -- & -- & -- &0.0000&0.0140\\
  1896. \\
  1897. & \multicolumn{1}{c}{21}
  1898. & \multicolumn{1}{c}{22}
  1899. & \multicolumn{1}{c}{23}
  1900. & \multicolumn{1}{c}{24}
  1901. & \multicolumn{1}{c}{25}
  1902. & \multicolumn{1}{c}{26}
  1903. & \multicolumn{1}{c}{27}
  1904. & \multicolumn{1}{c}{28}
  1905. & \multicolumn{1}{c}{29}
  1906. & \multicolumn{1}{c}{30} \\ \cline{2-11}
  1907. 1&0.4643&0.4590&0.4542&0.4493&0.4450&0.4407&0.4366&0.4328&0.4291&0.4254\\
  1908. 2&0.3185&0.3156&0.3126&0.3098&0.3069&0.3043&0.3018&0.2992&0.2968&0.2944\\
  1909. 3&0.2578&0.2571&0.2563&0.2554&0.2543&0.2533&0.2522&0.2510&0.2499&0.2487\\
  1910. 4&0.2119&0.2131&0.2139&0.2145&0.2148&0.2151&0.2152&0.2151&0.2150&0.2148\\
  1911. 5&0.1736&0.1764&0.1787&0.1807&0.1822&0.1836&0.1848&0.1857&0.1864&0.1870\\
  1912. 6&0.1399&0.1443&0.1480&0.1512&0.1539&0.1563&0.1584&0.1601&0.1616&0.1630\\
  1913. 7&0.1092&0.1150&0.1201&0.1245&0.1283&0.1316&0.1346&0.1372&0.1395&0.1415\\
  1914. 8&0.0804&0.0878&0.0941&0.0997&0.1046&0.1089&0.1128&0.1162&0.1192&0.1219\\
  1915. 9&0.0530&0.0618&0.0696&0.0764&0.0823&0.0876&0.0923&0.0965&0.1002&0.1036\\
  1916. 10&0.0263&0.0368&0.0459&0.0539&0.0610&0.0672&0.0728&0.0778&0.0822&0.0862\\
  1917. 11&0.0000&0.0122&0.0228&0.0321&0.0403&0.0476&0.0540&0.0598&0.0650&0.0697\\
  1918. 12& -- & -- &0.0000&0.0107&0.0200&0.0284&0.0358&0.0424&0.0483&0.0537\\
  1919. 13& -- & -- & -- & -- &0.0000&0.0094&0.0178&0.0253&0.0320&0.0381\\
  1920. 14& -- & -- & -- & -- & -- & -- &0.0000&0.0084&0.0159&0.0227\\
  1921. 15& -- & -- & -- & -- & -- & -- & -- & -- &0.0000&0.0076\\
  1922. \\
  1923. & \multicolumn{1}{c}{31}
  1924. & \multicolumn{1}{c}{32}
  1925. & \multicolumn{1}{c}{33}
  1926. & \multicolumn{1}{c}{34}
  1927. & \multicolumn{1}{c}{35}
  1928. & \multicolumn{1}{c}{36}
  1929. & \multicolumn{1}{c}{37}
  1930. & \multicolumn{1}{c}{38}
  1931. & \multicolumn{1}{c}{39}
  1932. & \multicolumn{1}{c}{40} \\ \cline{2-11}
  1933. 1&0.4220&0.4188&0.4156&0.4127&0.4096&0.4068&0.4040&0.4015&0.3989&0.3964\\
  1934. 2&0.2921&0.2898&0.2876&0.2854&0.2834&0.2813&0.2794&0.2774&0.2755&0.2737\\
  1935. 3&0.2475&0.2463&0.2451&0.2439&0.2427&0.2415&0.2403&0.2391&0.2380&0.2368\\
  1936. 4&0.2145&0.2141&0.2137&0.2132&0.2127&0.2121&0.2116&0.2110&0.2104&0.2098\\
  1937. 5&0.1874&0.1878&0.1880&0.1882&0.1883&0.1883&0.1883&0.1881&0.1880&0.1878\\
  1938. 6&0.1641&0.1651&0.1660&0.1667&0.1673&0.1678&0.1683&0.1686&0.1689&0.1691\\
  1939. 7&0.1433&0.1449&0.1463&0.1475&0.1487&0.1496&0.1505&0.1513&0.1520&0.1526\\
  1940. 8&0.1243&0.1265&0.1284&0.1301&0.1317&0.1331&0.1344&0.1356&0.1366&0.1376\\
  1941. 9&0.1066&0.1093&0.1118&0.1140&0.1160&0.1179&0.1196&0.1211&0.1225&0.1237\\
  1942. 10&0.0899&0.0931&0.0961&0.0988&0.1013&0.1036&0.1056&0.1075&0.1092&0.1108\\
  1943. 11&0.0739&0.0777&0.0812&0.0844&0.0873&0.0900&0.0924&0.0947&0.0967&0.0986\\
  1944. 12&0.0585&0.0629&0.0669&0.0706&0.0739&0.0770&0.0798&0.0824&0.0848&0.0870\\
  1945. 13&0.0435&0.0485&0.0530&0.0572&0.0610&0.0645&0.0677&0.0706&0.0733&0.0759\\
  1946. 14&0.0289&0.0344&0.0395&0.0441&0.0484&0.0523&0.0559&0.0592&0.0622&0.0651\\
  1947. 15&0.0144&0.0206&0.0262&0.0314&0.0361&0.0404&0.0444&0.0481&0.0515&0.0546\\
  1948. 16&0.0000&0.0068&0.0131&0.0187&0.0239&0.0287&0.0331&0.0372&0.0409&0.0444\\
  1949. 17& -- & -- &0.0000&0.0062&0.0119&0.0172&0.0220&0.0264&0.0305&0.0343\\
  1950. 18& -- & -- & -- & -- &0.0000&0.0057&0.0110&0.0158&0.0203&0.0244\\
  1951. 19& -- & -- & -- & -- & -- & -- &0.0000&0.0053&0.0101&0.0146\\
  1952. 20& -- & -- & -- & -- & -- & -- & -- & -- &0.0000&0.0049\\
  1953. \\
  1954. & \multicolumn{1}{c}{41}
  1955. & \multicolumn{1}{c}{42}
  1956. & \multicolumn{1}{c}{43}
  1957. & \multicolumn{1}{c}{44}
  1958. & \multicolumn{1}{c}{45}
  1959. & \multicolumn{1}{c}{46}
  1960. & \multicolumn{1}{c}{47}
  1961. & \multicolumn{1}{c}{48}
  1962. & \multicolumn{1}{c}{49}
  1963. & \multicolumn{1}{c}{50} \\ \cline{2-11}
  1964. 1&0.3940&0.3917&0.3894&0.3872&0.3850&0.3830&0.3808&0.3789&0.3770&0.3964\\
  1965. 2&0.2719&0.2701&0.2684&0.2667&0.2651&0.2635&0.2620&0.2604&0.2589&0.2737\\
  1966. 3&0.2357&0.2345&0.2334&0.2323&0.2313&0.2302&0.2291&0.2281&0.2271&0.2368\\
  1967. 4&0.2091&0.2085&0.2078&0.2072&0.2065&0.2058&0.2052&0.2045&0.2038&0.2098\\
  1968. 5&0.1876&0.1874&0.1871&0.1868&0.1865&0.1862&0.1859&0.1855&0.1851&0.1878\\
  1969. 6&0.1693&0.1694&0.1695&0.1695&0.1695&0.1695&0.1695&0.1693&0.1692&0.1691\\
  1970. 7&0.1531&0.1535&0.1539&0.1542&0.1545&0.1548&0.1550&0.1551&0.1553&0.1554\\%1526\\
  1971. 8&0.1384&0.1392&0.1398&0.1405&0.1410&0.1415&0.1420&0.1423&0.1427&0.1430\\%1376\\
  1972. 9&0.1249&0.1259&0.1269&0.1278&0.1286&0.1293&0.1300&0.1306&0.1312&0.1317\\%1237\\
  1973. 10&0.1123&0.1136&0.1149&0.1160&0.1170&0.1180&0.1189&0.1197&0.1205&0.1212\\%1108\\
  1974. 11&0.1004&0.1020&0.1035&0.1049&0.1062&0.1073&0.1085&0.1095&0.1105&0.1113\\
  1975. 12&0.0891&0.0909&0.0927&0.0943&0.0959&0.0972&0.0986&0.0998&0.1010&0.1020\\
  1976. 13&0.0782&0.0804&0.0824&0.0842&0.0860&0.0876&0.0892&0.0906&0.0919&0.0932\\
  1977. 14&0.0677&0.0701&0.0724&0.0745&0.0765&0.0783&0.0801&0.0817&0.0832&0.0846\\
  1978. 15&0.0575&0.0602&0.0628&0.0651&0.0673&0.0694&0.0713&0.0731&0.0748&0.0764\\
  1979. 16&0.0476&0.0506&0.0534&0.0560&0.0584&0.0607&0.0628&0.0648&0.0667&0.0685\\
  1980. 17&0.0379&0.0411&0.0442&0.0471&0.0497&0.0522&0.0546&0.0568&0.0588&0.0608\\
  1981. 18&0.0283&0.0318&0.0352&0.0383&0.0412&0.0439&0.0465&0.0489&0.0511&0.0532\\
  1982. 19&0.0188&0.0227&0.0263&0.0296&0.0328&0.0357&0.0385&0.0411&0.0436&0.0459\\
  1983. 20&0.0094&0.0136&0.0175&0.0211&0.0245&0.0277&0.0307&0.0335&0.0361&0.0386\\
  1984. 21& -- &0.0045&0.0087&0.0126&0.0163&0.0197&0.0229&0.0259&0.0288&0.0314\\
  1985. 22& -- & -- &0.0000&0.0042&0.0081&0.0118&0.0153&0.0185&0.0215&0.0244\\
  1986. 23& -- & -- & -- & -- &0.0000&0.0039&0.0076&0.0111&0.0143&0.0174\\
  1987. 24& -- & -- & -- & -- & -- & -- &0.0000&0.0037&0.0071&0.0104\\
  1988. 25& -- & -- & -- & -- & -- & -- & -- & -- &0.0000&0.0035\\
  1989. \hline
  1990. \end{tabular}
  1991. \end{center}
  1992. \normalsize
  1993. \end{table}
  1994. \clearpage
  1995. \begin{table}
  1996. \caption{Critical Values of the Shapiro-Wilk \(W\) for Testing Normality.}
  1997. \centerline{Reproduced from Table~6 of Shapiro and Wilk~\cite{shapiro65}.}
  1998. \label{tbl:w-test}
  1999. \begin{center}
  2000. \begin{tabular}{rlllll}\hline
  2001. \(n\) & \multicolumn{5}{c}{\(\alpha\)} \\ \cline{2-6}
  2002. & 0.01 & 0.02 & 0.05 & 0.10 & 0.50 \\ \hline
  2003. 3 & 0.753 & 0.756 & 0.767 & 0.789 & 0.959 \\ \hline
  2004. \end{tabular}
  2005. \end{center}
  2006. \normalsize
  2007. \end{table}
  2008. \clearpage
  2009. \begin{table}
  2010. \caption{Critcal Values of the Shapiro-Wilk \(W\) for Testing Exponentiality.}
  2011. \centerline{Reproduced from Table~1 of Shapiro and Wilk~\cite{shapiro72}.}
  2012. \label{tbl:w-test-e}
  2013. \scriptsize
  2014. \begin{center}
  2015. \begin{tabular}{r%
  2016. @{\extracolsep{3pt}}l%
  2017. @{\extracolsep{2pt}}l%
  2018. @{\extracolsep{2pt}}l%
  2019. @{\extracolsep{2pt}}l%
  2020. @{\extracolsep{2pt}}l%
  2021. @{\extracolsep{2pt}}l%
  2022. @{\extracolsep{2pt}}l%
  2023. @{\extracolsep{2pt}}l%
  2024. @{\extracolsep{2pt}}l%
  2025. @{\extracolsep{2pt}}l%
  2026. @{\extracolsep{2pt}}l%
  2027. }\hline
  2028. \(n\) & \multicolumn{11}{c}{\(\alpha\)} \\ \cline{2-12}
  2029. &\multicolumn{1}{c}{0.005}
  2030. &\multicolumn{1}{c}{0.01}
  2031. &\multicolumn{1}{c}{0.025}
  2032. &\multicolumn{1}{c}{0.05 }
  2033. &\multicolumn{1}{c}{0.10 }
  2034. &\multicolumn{1}{c}{0.50 }
  2035. &\multicolumn{1}{c}{0.90 }
  2036. &\multicolumn{1}{c}{0.95 }
  2037. &\multicolumn{1}{c}{0.975}
  2038. &\multicolumn{1}{c}{0.99 }
  2039. &\multicolumn{1}{c}{0.995}\\ \hline
  2040. 3&.2519&.2538&.2596&.2697&.2915&.5714&.9709&.9926&.9981&.9997&.99993\\
  2041. 4&.1241&.1302&.1434&.1604&.1891&.3768&.7514&.8581&.9236&.9680&.9837\\
  2042. \hline
  2043. \end{tabular}
  2044. \end{center}
  2045. \normalsize
  2046. \end{table}
  2047. \clearpage
  2048. \begin{table}
  2049. \caption{Coefficients \(\{b_{n-i+1}\}\) for the Shapiro-Francia
  2050. \(W'\) Test for Normality.}
  2051. \centerline{Reproduced from Table~1 of Shapiro and Wilk~\cite{shapiro72b}.}
  2052. \label{tbl:shapiro-francia-b}
  2053. %\begin{center}
  2054. %\begin{tabular}
  2055. %\hline
  2056. %\end{tabular}
  2057. %\end{center}
  2058. \normalsize
  2059. \end{table}
  2060. \clearpage
  2061. \begin{table}
  2062. \caption{Percentage Points for \(W'\) Test Statistic}
  2063. \centerline{Reproduced from Table~1 of Shapirio and Francia~\cite{shapiro72b}.}
  2064. \label{tbl:w-prime-test}
  2065. \scriptsize
  2066. \begin{center}
  2067. \begin{tabular}{l@{\extracolsep{1pt}}r%
  2068. @{\extracolsep{1pt}}r%
  2069. @{\extracolsep{1pt}}r%
  2070. @{\extracolsep{1pt}}rrrrrrrr}\hline
  2071. \(n\) & \multicolumn{11}{c}{\(P\)} \\ \cline{2-12}
  2072. &
  2073. \multicolumn{1}{l}{0.01} &
  2074. \multicolumn{1}{l}{0.05} &
  2075. \multicolumn{1}{l}{0.10} &
  2076. \multicolumn{1}{l}{0.15} &
  2077. \multicolumn{1}{l}{0.20} &
  2078. \multicolumn{1}{l}{0.50} &
  2079. \multicolumn{1}{l}{0.80} &
  2080. \multicolumn{1}{l}{0.85} &
  2081. \multicolumn{1}{l}{0.90} &
  2082. \multicolumn{1}{l}{0.95} &
  2083. \multicolumn{1}{l}{0.99}\\
  2084. \hline
  2085. 35&0.919&0.943&0.952&0.956&0.964&0.976&0.982&0.985&0.987&0.989&0.992\\
  2086. 50& .935& .953& .963& .968& .971& .981& .987& .988& .990& .991& .994\\
  2087. \\
  2088. 51&0.935&0.954&0.964&0.968&0.971&0.981&0.988&0.989&0.990&0.992&0.994\\
  2089. 53& .938& .957& .964& .969& .972& .982& .988& .989& .990& .992& .994\\
  2090. 55& .940& .958& .965& .971& .973& .983& .988& .990& .991& .992& .994\\
  2091. 57& .944& .961& .966& .971& .974& .983& .989& .990& .991& .992& .994\\
  2092. 59& .945& .962& .967& .972& .975& .983& .989& .990& .991& .992& .994\\
  2093. \\
  2094. 61&0.947&0.963&0.968&0.973&0.975&0.984&0.990&0.990&0.991&0.992&0.994\\
  2095. 63& .947& .964& .970& .973& .976& .984& .990& .991& .992& .993& .994\\
  2096. 65& .948& .965& .971& .974& .976& .985& .990& .991& .992& .993& .995\\
  2097. 67& .950& .966& .971& .974& .977& .985& .990& .991& .992& .993& .995\\
  2098. 69& .951& .966& .972& .976& .978& .986& .990& .991& .992& .993& .995\\
  2099. \\
  2100. 71&0.953&0.967&0.972&0.976&0.978&0.986&0.990&0.991&0.992&0.994&0.995\\
  2101. 73& .956& .968& .973& .976& .979& .986& .991& .992& .993& .994& .995\\
  2102. 75& .956& .969& .973& .976& .979& .986& .991& .992& .993& .994& .995\\
  2103. 77& .957& .969& .974& .977& .980& .987& .991& .992& .993& .994& .996\\
  2104. 79& .957& .970& .975& .978& .980& .987& .991& .992& .993& .994& .996\\
  2105. \\
  2106. 81&0.958&0.970&0.975&0.979&0.981&0.987&0.992&0.992&0.993&0.994&0.996\\
  2107. 83& .960& .971& .976& .979& .981& .988& .992& .992& .993& .994& .996\\
  2108. 85& .961& .972& .977& .980& .981& .988& .992& .992& .993& .994& .996\\
  2109. 87& .961& .972& .977& .980& .982& .988& .992& .993& .994& .994& .996\\
  2110. 89& .961& .972& .977& .981& .982& .988& .992& .993& .994& .995& .996\\
  2111. \\
  2112. 91&0.962&0.973&0.978&0.981&0.983&0.989&0.992&0.993&0.994&0.995&0.996\\
  2113. 93& .963& .973& .979& .981& .983& .989& .992& .993& .994& .995& .996\\
  2114. 95& .965& .974& .979& .981& .983& .989& .993& .993& .994& .995& .996\\
  2115. 97& .965& .975& .979& .982& .984& .989& .993& .993& .994& .995& .996\\
  2116. 99& .967& .976& .980& .982& .984& .989& .993& .994& .994& .995& .996\\
  2117. \hline
  2118. \end{tabular}
  2119. \end{center}
  2120. \normalsize
  2121. \end{table}
  2122. \clearpage
  2123. \end{document}