{"id":519,"date":"2020-09-29T15:12:47","date_gmt":"2020-09-29T09:42:47","guid":{"rendered":"http:\/\/maths.jfn.ac.lk\/?page_id=519"},"modified":"2021-08-03T10:13:00","modified_gmt":"2021-08-03T04:43:00","slug":"special-degree-statistics","status":"publish","type":"page","link":"https:\/\/maths.jfn.ac.lk\/index.php\/special-degree-statistics\/","title":{"rendered":"Special Degree &#8211; Statistics"},"content":{"rendered":"\t\t<div data-elementor-type=\"wp-page\" data-elementor-id=\"519\" class=\"elementor elementor-519\">\n\t\t\t\t\t\t<section class=\"elementor-section elementor-top-section elementor-element elementor-element-439442aa elementor-section-boxed elementor-section-height-default elementor-section-height-default\" data-id=\"439442aa\" data-element_type=\"section\">\n\t\t\t\t\t\t<div class=\"elementor-container elementor-column-gap-default\">\n\t\t\t\t\t<div class=\"elementor-column elementor-col-100 elementor-top-column elementor-element elementor-element-124acff9\" data-id=\"124acff9\" data-element_type=\"column\">\n\t\t\t<div class=\"elementor-widget-wrap elementor-element-populated\">\n\t\t\t\t\t\t<div class=\"elementor-element elementor-element-6db986df elementor-widget elementor-widget-text-editor\" data-id=\"6db986df\" data-element_type=\"widget\" data-widget_type=\"text-editor.default\">\n\t\t\t\t<div class=\"elementor-widget-container\">\n\t\t\t\t\t\t\t\t\t<h3>Level \u2013 3M<\/h3><h4>Course units effective from academic year 2016\/2017 to date<\/h4><p><div class=\"su-accordion su-u-trim\"><br \/><div class=\"su-spoiler su-spoiler-style-simple su-spoiler-icon-plus su-spoiler-closed\" data-scroll-offset=\"0\" data-anchor-in-url=\"no\"><div class=\"su-spoiler-title\" tabindex=\"0\" role=\"button\"><span class=\"su-spoiler-icon\"><\/span>STA301M3: Advanced Design of Experiments <\/div><div class=\"su-spoiler-content su-u-clearfix su-u-trim\"><br \/><div class=\"su-table su-table-alternate\"><\/p><table style=\"height: 1472px\" width=\"870\"><tbody><tr><td><strong>Course Code<\/strong><\/td><td colspan=\"3\" width=\"458\"><strong>STA301M3<\/strong><\/td><\/tr><tr><td><strong>Course Title<\/strong><\/td><td colspan=\"3\" width=\"458\"><strong>Advanced Design of Experiments<\/strong><\/td><\/tr><tr><td><strong>Credit Value<\/strong><\/td><td colspan=\"3\" width=\"458\"><strong>03<\/strong><\/td><\/tr><tr><td width=\"143\"><strong>Prerequisite\u00a0\u00a0\u00a0 <\/strong><strong>Prerequisite<\/strong><\/td><td colspan=\"3\" width=\"458\"><strong>STA203G3<\/strong><\/td><\/tr><tr><td rowspan=\"2\" width=\"143\"><strong>Hourly Breakdown<\/strong><\/td><td width=\"146\"><strong>Theory<\/strong><\/td><td width=\"146\"><strong>Practical<\/strong><\/td><td width=\"166\"><strong>IndependentLearning<\/strong><\/td><\/tr><tr><td width=\"146\"><strong>45<\/strong><\/td><td width=\"146\"><strong>&#8211;<\/strong><\/td><td width=\"166\"><strong>105<\/strong><\/td><\/tr><tr><td colspan=\"4\" width=\"600\"><strong>Objective:<\/strong><\/td><\/tr><tr><td colspan=\"4\" width=\"600\"><ul><li>Provide an introduction to the\u00a0 block\u00a0 and factorial\u00a0 experimental designs<\/li><li>Introduce and explain the design aspects of the experiments<\/li><\/ul><\/td><\/tr><tr><td colspan=\"4\" width=\"600\"><strong>Intended Learning Outcomes:<\/strong><\/td><\/tr><tr><td colspan=\"4\" width=\"600\"><ul><li>Explain the mathematical models and issues such as interaction, confounding etc.<\/li><li>Construct \u00a0factorial experiments confounded with blocks<\/li><li>Design fractional factorial experiments<\/li><li>Analyze experimental data of \u00a0incomplete block designs<\/li><\/ul><\/td><\/tr><tr><td colspan=\"4\" width=\"600\"><strong>Course Contents:<\/strong><\/td><\/tr><tr><td colspan=\"4\" width=\"600\"><ul><li><strong>Factorial Designs: <\/strong>2<sup style=\"font-family: inherit\">k<\/sup><span style=\"font-family: inherit;font-size: inherit\"> Factorial Designs, 3<\/span><sup style=\"font-family: inherit\">k<\/sup><span style=\"font-family: inherit;font-size: inherit\">Factorial Designs, Yates\u2019 Algorithm, Blocking, Confounding, Partial Confounding, Fractional Factorial Designs, Design Resolution, Blocking Fractional Factorials, Alias Structure.<\/span><\/li><\/ul><ul><li><strong>Block Designs: <\/strong>Randomized Complete Block Design, Latin Square Design, Balanced Incomplete Block Design, Graeco- Latin Square Design, Partially Balanced Incomplete Block Design, Youden Square Design.<\/li><\/ul><\/td><\/tr><tr><td colspan=\"4\" width=\"600\"><strong>Teaching Methods:<\/strong><\/td><\/tr><tr><td colspan=\"4\" width=\"600\"><ul><li>Lectures and \u00a0Tutorial discussions<\/li><\/ul><\/td><\/tr><tr><td colspan=\"4\" width=\"600\"><strong>Assessment\/ Evaluation Details:<\/strong><\/td><\/tr><tr><td colspan=\"4\" width=\"600\"><ul><li>In-course Assessments\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 30%<\/li><li>End-of-course Examination\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 70%<\/li><\/ul><\/td><\/tr><tr><td colspan=\"4\" width=\"600\"><strong>Recommended Readings:<\/strong><\/td><\/tr><tr><td colspan=\"4\" width=\"600\"><ul><li>Douglas C. Montgomery., Design and Analysis of Experiments, Wiley Series, 2013.<\/li><li>H.R. Lindman., Analysis of Variance in Experimental Design, Springer Series, 1992.<\/li><li>R. A. Fisher., The Design of Experiments, Oliver and Boyd, 1960.<\/li><li>G. W. Cobb., Design and Analysis of Experiments, Springer Series, 1998.<\/li><\/ul><\/td><\/tr><\/tbody><\/table><p><\/div><\/div><\/div><div class=\"su-spoiler su-spoiler-style-simple su-spoiler-icon-plus su-spoiler-closed\" data-scroll-offset=\"0\" data-anchor-in-url=\"no\"><div class=\"su-spoiler-title\" tabindex=\"0\" role=\"button\"><span class=\"su-spoiler-icon\"><\/span>STA302M3: Medical Statistics<\/div><div class=\"su-spoiler-content su-u-clearfix su-u-trim\"><div class=\"su-table su-table-alternate\"><\/p><table><tbody><tr><td><strong>Course Code<\/strong><\/td><td colspan=\"3\" width=\"458\"><strong>STA302M3<\/strong><\/td><\/tr><tr><td><strong>Course Title<\/strong><\/td><td colspan=\"3\" width=\"458\"><strong>Medical Statistics<\/strong><\/td><\/tr><tr><td><strong>Academic Credits<\/strong><\/td><td colspan=\"3\" width=\"458\"><strong>03<\/strong><\/td><\/tr><tr><td rowspan=\"2\"><strong>Hourly Breakdown<\/strong><\/td><td width=\"126\"><strong>Theory<\/strong><\/td><td width=\"139\"><strong>Practical<\/strong><\/td><td width=\"193\"><strong>IndependentLearning<\/strong><\/td><\/tr><tr><td width=\"126\"><strong>45 <\/strong><\/td><td width=\"139\"><strong>&#8211;<\/strong><\/td><td width=\"193\"><strong>\u00a0\u00a0\u00a0\u00a0 105 <\/strong><\/td><\/tr><tr><td colspan=\"4\" width=\"603\"><strong>Objective:<\/strong><\/td><\/tr><tr><td colspan=\"4\" width=\"603\">Introduce the statistical methods used in medical science<\/td><\/tr><tr><td colspan=\"4\" width=\"603\"><strong>Intended Learning Outcomes:<\/strong><\/td><\/tr><tr><td colspan=\"4\" width=\"603\"><ul><li>Discuss \u00a0common terms used in epidemiology<\/li><li>Discuss direct and indirect methods of adjustment of overall rates<\/li><li>Compare the disease occurrence between two groups<\/li><li>Evaluate the common odds ratio with confidence interval<\/li><li>Discuss observational and experimental studies in medical field<\/li><li>Compare the analytical studies in medical field<\/li><li>Test the possible effects in crossover trial<\/li><li>Express the relationship among the survival function, distribution function, hazard function and cumulative hazard function<\/li><li>Estimate survival function using parametric and non-parametric methods<\/li><li>\u00a0Illustrate two-sample comparison for survival data using common statistical procedures<\/li><li>Apply Cox proportional hazard model to real life data<\/li><\/ul><\/td><\/tr><tr><td colspan=\"4\" width=\"603\"><strong>Course Contents:<\/strong><\/td><\/tr><tr><td colspan=\"4\" width=\"603\"><ul><li><strong>Epidemiology: <\/strong>definition of\u00a0 epidemiology<strong>, <\/strong>measuring disease frequency: population at risk, incidence, prevalence, case fatality, birth rate, death rate, life expectancy, direct and indirect standardized rate, comparing disease occurrence: absolute and relative comparison, common odds ratio: Cochran Mantel Haenszel and logit method, confidence interval for common odds ratio, Cochran Mantel Haenszel test<\/li><li><strong>Types of studies: <\/strong>observational studies<strong>: <\/strong>descriptive and\u00a0 analytical studies, ecological, cross-sectional, case-control and cohort studies, experimental studies: clinical trial, parallel group design, in-series design and crossover design<\/li><li><strong>Survival Analysis<\/strong>: censoring, survival function, hazard function, cumulative hazard function, mean and median survival time, mean residual life time, estimation of survival function: parametric and non-parametric method: Kaplan-Meier estimator, life table, cumulative hazard estimator, two sample comparison: log-rank test, maximum likelihood test and likelihood ratio test, Cox proportional hazard model<\/li><\/ul><\/td><\/tr><tr><td colspan=\"4\" width=\"603\"><strong>Teaching Methods:<\/strong><\/td><\/tr><tr><td colspan=\"4\" width=\"603\">Lectures and Tutorial discussions<\/td><\/tr><tr><td colspan=\"4\" width=\"603\"><strong>Assessment\/ Evaluation Details:<\/strong><\/td><\/tr><tr><td colspan=\"4\" width=\"603\"><ul><li>In-course assessments\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 \u00a0\u00a0\u00a0 30%<\/li><li>\u00a0End of course Examination\u00a0\u00a0\u00a0\u00a0 70%<\/li><\/ul><\/td><\/tr><tr><td colspan=\"4\" width=\"603\"><strong>Recommended Readings:<\/strong><\/td><\/tr><tr><td colspan=\"4\" width=\"603\"><ul><li>David W. H., Stanley, L. and Susanne M., Applied Survival Analysis: Regression Modeling of Time to Event Data, John Wiley and Sons, New Jersey, 2008.<\/li><li>Bonita, R.,Beaglehole, R. and Kjellstrom, T., Basic Epidemiology, 2<sup>nd<\/sup> edition, World Health Organization, 2006.<\/li><li><strong>Kleinbaum<\/strong><strong>,<\/strong> D.G., Survival Analysis: A Self-Learning Text, Springer, New York, 1996.<\/li><\/ul><\/td><\/tr><\/tbody><\/table><p><\/div><\/div><\/div><div class=\"su-spoiler su-spoiler-style-simple su-spoiler-icon-plus su-spoiler-closed\" data-scroll-offset=\"0\" data-anchor-in-url=\"no\"><div class=\"su-spoiler-title\" tabindex=\"0\" role=\"button\"><span class=\"su-spoiler-icon\"><\/span>STA303M3: Categorical Data Analysis <\/div><div class=\"su-spoiler-content su-u-clearfix su-u-trim\"><div class=\"su-table su-table-alternate\"><\/p><table style=\"height: 1631px\" width=\"877\"><tbody><tr><td width=\"138\"><strong>Course Code<\/strong><\/td><td colspan=\"3\" width=\"477\"><strong>STA303M3<\/strong><\/td><\/tr><tr><td width=\"138\"><strong>Course Title<\/strong><\/td><td colspan=\"3\" width=\"477\"><strong>Categorical Data Analysis <\/strong><\/td><\/tr><tr><td width=\"138\"><strong>Academic Credits<\/strong><\/td><td colspan=\"3\" width=\"477\"><strong>03 <\/strong><\/td><\/tr><tr><td rowspan=\"2\" width=\"138\"><strong>Hourly Breakdown <\/strong><\/td><td width=\"144\"><strong>Theory<\/strong><\/td><td width=\"157\"><strong>Practical<\/strong><\/td><td width=\"175\"><strong>IndependentLearning<\/strong><\/td><\/tr><tr><td width=\"144\"><strong>\u00a045 <\/strong><\/td><td width=\"157\"><strong>&#8211;<\/strong><\/td><td width=\"175\"><strong>\u00a0\u00a0\u00a0 105 <\/strong><\/td><\/tr><tr><td colspan=\"4\" width=\"615\"><p><strong>Objective:<\/strong><\/p><p><em>Provide knowledge <\/em>for analyzing categorical data.<\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"615\"><strong>Intended Learning Outcomes:<\/strong><\/td><\/tr><tr><td colspan=\"4\" width=\"615\"><ul><li>Discuss major types of categorical data and their probability distributions<\/li><li>Apply appropriate descriptive and inferential statistical methods for contingency tables<\/li><li>Build appropriate statistical models for different types of categorical response data<\/li><li>Analyze repeated\/longitudinal categorical response data<\/li><\/ul><\/td><\/tr><tr><td colspan=\"4\" width=\"615\"><strong>Course Contents:<\/strong><\/td><\/tr><tr><td colspan=\"4\" width=\"615\"><ul><li><strong>Introduction:<\/strong>Categorical response data, Probability distributions for categorical data: Bernoulli, binomial, multinomial, and Poisson,Likelihood function and Maximum likelihood estimate, Likelihood\u2010based inference methods: Wald test, score test.<\/li><li><strong>Contingency tables:<\/strong>Two-way contingency tables; Table structure, comparing proportions, relative risk, odds ratio, Pearson\u2019s Chi-square test, Likelihood ratio test, testing independence for ordinal data, Fisher\u2019s exact test for small samples.<\/li><\/ul><p style=\"padding-left: 40px\">Three-way Contingency tables; Conditional versus marginal tables, Simpson\u2019s paradox, Conditional versus marginal odds ratios, Conditional versus marginal independence, Cochran-Mantel-Haenszel (CMH) test Homogeneous association for \u00a0 tables.<\/p><ul><li><strong>Generalized Linear Model (GLM):<\/strong>Components of generalized linear models, GLMs for binary and count data; Logistic regression and Log-linear model, Statistical inference for GLM, Comparing models, Model selection, Model diagnostics, Logit models, Probit Models, Analysis of repeated responses.<\/li><\/ul><\/td><\/tr><tr><td colspan=\"4\" width=\"615\"><strong>Teaching Methods: <\/strong><\/td><\/tr><tr><td colspan=\"4\" width=\"615\">Lectures\u00a0 and tutorial discussions<\/td><\/tr><tr><td colspan=\"4\" width=\"615\"><strong>Assessment\/ Evaluation Details:<\/strong><\/td><\/tr><tr><td colspan=\"4\" width=\"615\"><ul><li>In-course assessments\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 \u00a0\u00a0\u00a0\u00a0 30%<\/li><li>End of course Examination\u00a0\u00a0\u00a0\u00a0 70%<\/li><\/ul><\/td><\/tr><tr><td colspan=\"4\" width=\"615\"><strong>Recommended Readings:<\/strong><\/td><\/tr><tr><td colspan=\"4\" width=\"615\"><ul><li>Agresti. A, Categorical Data Analysis, 3<sup>rd<\/sup>\u00a0 Edition. New York: Wiley, 2012.<\/li><li>McCullagh. P, and Nelder. J. A, Generalized Linear Models, 2<sup>nd<\/sup> Edition, London: Chapman and Hall, 1989.<\/li><li>Powers. A. D, and Y. Xie., Statistical Methods for Categorical Data Analysis, San Diego, CA: Academic Press, 2000.<\/li><\/ul><\/td><\/tr><\/tbody><\/table><p><\/div><\/div><\/div><div class=\"su-spoiler su-spoiler-style-simple su-spoiler-icon-plus su-spoiler-closed\" data-scroll-offset=\"0\" data-anchor-in-url=\"no\"><div class=\"su-spoiler-title\" tabindex=\"0\" role=\"button\"><span class=\"su-spoiler-icon\"><\/span>STA304M3: Computational Statistics<\/div><div class=\"su-spoiler-content su-u-clearfix su-u-trim\"><br \/><div class=\"su-table su-table-alternate\"><\/p><table style=\"height: 1629px\" width=\"930\"><tbody><tr><td><strong>Course Code<\/strong><\/td><td colspan=\"3\" width=\"458\"><strong>STA304M3<\/strong><\/td><\/tr><tr><td><strong>CourseTitle<\/strong><\/td><td colspan=\"3\" width=\"458\"><strong>Computational Statistics<\/strong><\/td><\/tr><tr><td><strong>Credit Value<\/strong><\/td><td colspan=\"3\" width=\"458\"><strong>03<\/strong><\/td><\/tr><tr><td rowspan=\"2\" width=\"143\"><strong>Hourly Breakdown<\/strong><\/td><td width=\"146\"><strong>Theory<\/strong><\/td><td width=\"146\"><strong>Practical<\/strong><\/td><td width=\"166\"><strong>Independent Learning<\/strong><\/td><\/tr><tr><td width=\"146\"><strong>15<\/strong><\/td><td width=\"146\"><strong>60 <\/strong><\/td><td width=\"166\"><strong>75<\/strong><\/td><\/tr><tr><td colspan=\"4\" width=\"600\"><strong>Objectives:<\/strong><\/td><\/tr><tr><td colspan=\"4\" width=\"600\"><ul><li>Provide an introduction to the\u00a0 computational statistics<\/li><li>Introduce some software for the statistical computing<\/li><\/ul><\/td><\/tr><tr><td colspan=\"4\" width=\"600\"><strong>Intended Learning Outcomes:<\/strong><\/td><\/tr><tr><td colspan=\"4\" width=\"600\"><ul><li>Formulate simple functions for data management<\/li><li>Develop\u00a0 algorithms for simulation of random numbers<\/li><li>Apply Monte- Carlo simulation techniques to \u00a0real world problems<\/li><li>Apply Bootstrap methods to real world problems<\/li><li>Develop the ability to use some statistical software in a real world situation.<\/li><\/ul><\/td><\/tr><tr><td colspan=\"4\" width=\"600\"><strong>Course Contents:<\/strong><\/td><\/tr><tr><td colspan=\"4\" width=\"600\"><ul><li><strong>Introduction: <\/strong>Make use of a statistical software to write simple functions for data management and analysis<\/li><li><strong>Simulation of random numbers: <\/strong>Box-Muller Algorithm, Inverse Transformation Method, Acceptance-Rejection Method, Polar Algorithm, Composition<\/li><\/ul><ul><li><strong>Monte- Carlo methods: <\/strong>Monte-Carlo integration, Markov chain Monte Carlo methods,\u00a0 Metropolis-Hastings algorithms,\u00a0 the Gibbs sampler<\/li><\/ul><ul><li><strong>Bootstrap methods: <\/strong>Bootstrap re-sampling techniques,\u00a0 Bootstrap confidence intervals, Bootstrap estimate of bias<\/li><\/ul><\/td><\/tr><tr><td colspan=\"4\" width=\"600\"><strong>Teaching Methods:<\/strong><\/td><\/tr><tr><td colspan=\"4\" width=\"600\"><ul><li>Lectures,\u00a0 Laboratory practical, group assignments and e-resources<\/li><\/ul><\/td><\/tr><tr><td colspan=\"4\" width=\"600\"><strong>Assessment\/ Evaluation Details:<\/strong><\/td><\/tr><tr><td colspan=\"4\" width=\"600\"><ul><li>In-course Assessments\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 30%<\/li><li>End-of-course Practical Examination\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 70%<\/li><\/ul><\/td><\/tr><tr><td colspan=\"4\" width=\"600\"><strong>Recommended Readings:<\/strong><\/td><\/tr><tr><td colspan=\"4\" width=\"600\"><ul><li>Wendy L. Martinez, Angel R. Martinez, Computational Statistics handbook with MATLAB, Chapman and Hall\/CRC, 2015.<\/li><li>Venables, W.N., Ripley, B.D., Modern Applied Statistics with S, Springer Series, 1999.<\/li><li>MoonJung Cho, Wendy L. Martinez., Statistics in MATLAB: A Primer, Chapman and Hall\/CRC, 2014.<\/li><\/ul><\/td><\/tr><\/tbody><\/table><p><\/div><\/div><\/div><div class=\"su-spoiler su-spoiler-style-simple su-spoiler-icon-plus su-spoiler-closed\" data-scroll-offset=\"0\" data-anchor-in-url=\"no\"><div class=\"su-spoiler-title\" tabindex=\"0\" role=\"button\"><span class=\"su-spoiler-icon\"><\/span>STA305M3: Time Series Analysis <\/div><div class=\"su-spoiler-content su-u-clearfix su-u-trim\"><br \/><div class=\"su-table su-table-alternate\"><\/div><br \/><\/div><\/div><div class=\"su-spoiler su-spoiler-style-simple su-spoiler-icon-plus su-spoiler-closed\" data-scroll-offset=\"0\" data-anchor-in-url=\"no\"><div class=\"su-spoiler-title\" tabindex=\"0\" role=\"button\"><span class=\"su-spoiler-icon\"><\/span>STA306M3: Multivariate Analysis I <\/div><div class=\"su-spoiler-content su-u-clearfix su-u-trim\"><br \/><div class=\"su-table su-table-alternate\"><\/p><table style=\"height: 2105px\" width=\"904\"><tbody><tr><td><strong>Course Code<\/strong><\/td><td colspan=\"3\" width=\"458\"><strong>STA306M3<\/strong><\/td><\/tr><tr><td><strong>Course Title<\/strong><\/td><td colspan=\"3\" width=\"458\"><strong>Multivariate Analysis I<\/strong><\/td><\/tr><tr><td><strong>Academic Credits<\/strong><\/td><td colspan=\"3\" width=\"458\"><strong>03<\/strong><\/td><\/tr><tr><td rowspan=\"2\"><strong>Hourly Breakdown<\/strong><\/td><td width=\"126\"><strong>Theory<\/strong><\/td><td width=\"113\"><strong>Practical<\/strong><\/td><td width=\"218\"><strong>IndepInde\u00a0 Independent Learning<\/strong><\/td><\/tr><tr><td width=\"126\"><strong>45<\/strong><\/td><td width=\"113\"><strong>&#8211;<\/strong><\/td><td width=\"218\"><strong>105 <\/strong><\/td><\/tr><tr><td colspan=\"4\" width=\"603\"><strong>Objective:<\/strong><\/td><\/tr><tr><td colspan=\"4\" width=\"603\">Introduce multivariate techniques and their applications to real world problems<\/td><\/tr><tr><td colspan=\"4\" width=\"603\"><strong>Intended Learning Outcomes:<\/strong><\/td><\/tr><tr><td colspan=\"4\" width=\"603\"><ul><li>Distinguish univariate and multivariate data<\/li><li>\u00a0Find mean vector, covariance matrix and correlation matrix for a multivariate data<\/li><li>Determine the distribution of linear combination of random variables<\/li><li>Discuss the use of Wishart distribution in multivariate data<\/li><li>Discuss the properties of multivariate normal and Wishart distributions<\/li><li>Apply Hotelling T<sup>2<\/sup> statistics for testing the plausible value for mean vector<\/li><li>Compare several covariance matrices<\/li><li>Apply statistical tests to multivariate normal distributions<\/li><li>Construct confidence intervals for mean vector and treatment effects<\/li><\/ul><\/td><\/tr><tr><td colspan=\"4\" width=\"603\"><strong>Syllabus Outline<\/strong><\/td><\/tr><tr><td colspan=\"4\" width=\"603\"><strong>Course Contents:<\/strong><\/td><\/tr><tr><td colspan=\"4\" width=\"603\"><ul><li><strong>Introduction :<\/strong>Multivariate Data, Multivariate marginal and conditional distributions, mean vector, variance-covariance and correlation matrices, properties of covariance and correlation matrices, linear combination of random variables<\/li><li><strong>Multivariate distribution : <\/strong>Multivariate Normal distribution; probability density of multivariate Normal distribution and its properties, transforming multivariate observations, multivariate likelihood estimation of mean vector and covariance matrix, Wishart distribution; Probability density of Wishart distribution and its properties, Sampling distribution of sample mean vector and sample covariance matrix<\/li><li><strong>Inference about mean vector :<\/strong>Hotelling T<sup>2<\/sup> distribution, Hotelling T<sup>2<\/sup> test for plausible value for mean vector, confidence region, Comparisons of component means: Simultaneous and Bonferroniconfidence intervals, large sample inference about population mean vector, profile analysis<\/li><li><strong>Comparison of several multivariate means : <\/strong>Comparing mean vectors from two population, simultaneous and Bonferroni confidence intervals, Large sample inference for comparing mean vector, profile analysis, Box-M test for comparing several covariance matrices, Paired comparisons, One way MANOVA, Two way MANOVA, Simultaneous and Bonferroni confidence intervals for treatment effects<\/li><\/ul><\/td><\/tr><tr><td colspan=\"4\" width=\"603\"><strong>Teaching Methods:<\/strong><\/td><\/tr><tr><td colspan=\"4\" width=\"603\">Lectures, \u00a0demonstration and Tutorial discussions<\/td><\/tr><tr><td colspan=\"4\" width=\"603\"><strong>Assessment\/ Evaluation Details:<\/strong><\/td><\/tr><tr><td colspan=\"4\" width=\"603\"><ul><li>In-course assessments\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 \u00a0\u00a0\u00a0 30%<\/li><li>End of course Examination\u00a0\u00a0\u00a0\u00a0 70%<\/li><\/ul><\/td><\/tr><tr><td colspan=\"4\" width=\"603\"><strong>Recommended Readings:<\/strong><\/td><\/tr><tr><td colspan=\"4\" width=\"603\"><ul><li>Chatfield, C., and Collins, A. J., Introduction to multivariate analysis, New York: Chapman and Hall, 1980.<\/li><li>Johnson, R. A., and Wichern, D. W., Applied multivariate statistical analysis, Englewood Cliffs, 6<sup>th<\/sup> Edition, N.J: Prentice Hall, 2006.<\/li><li>Everitt, B. S. and Hothorn T., An Introduction to Applied Multivariate Analysis with R, Springer, (2011).<\/li><\/ul><\/td><\/tr><\/tbody><\/table><p><\/div><br \/><\/div><\/div><br \/><\/div><\/p>\t\t\t\t\t\t\t\t<\/div>\n\t\t\t\t<\/div>\n\t\t\t\t\t<\/div>\n\t\t<\/div>\n\t\t\t\t\t<\/div>\n\t\t<\/section>\n\t\t\t\t<section class=\"elementor-section elementor-top-section elementor-element elementor-element-20def68 elementor-section-boxed elementor-section-height-default elementor-section-height-default\" data-id=\"20def68\" data-element_type=\"section\">\n\t\t\t\t\t\t<div class=\"elementor-container elementor-column-gap-default\">\n\t\t\t\t\t<div class=\"elementor-column elementor-col-100 elementor-top-column elementor-element elementor-element-a7cd5c6\" data-id=\"a7cd5c6\" data-element_type=\"column\">\n\t\t\t<div class=\"elementor-widget-wrap elementor-element-populated\">\n\t\t\t\t\t\t<div class=\"elementor-element elementor-element-7fd4d9f elementor-widget elementor-widget-text-editor\" data-id=\"7fd4d9f\" data-element_type=\"widget\" data-widget_type=\"text-editor.default\">\n\t\t\t\t<div class=\"elementor-widget-container\">\n\t\t\t\t\t\t\t\t\t<h3>Level \u2013 4M<\/h3><h4>Course units effective from academic year 2016\/2017 to date<\/h4><p><div class=\"su-accordion su-u-trim\"><br \/><div class=\"su-spoiler su-spoiler-style-simple su-spoiler-icon-plus su-spoiler-closed\" data-scroll-offset=\"0\" data-anchor-in-url=\"no\"><div class=\"su-spoiler-title\" tabindex=\"0\" role=\"button\"><span class=\"su-spoiler-icon\"><\/span>STA401M4: Measure Theory <\/div><div class=\"su-spoiler-content su-u-clearfix su-u-trim\"><br \/><div class=\"su-table su-table-alternate\"><\/p><table><tbody><tr><td width=\"163\"><p><strong>Course Code<\/strong><\/p><\/td><td colspan=\"3\" width=\"452\"><p><strong>STA401 M4<\/strong><\/p><\/td><\/tr><tr><td width=\"163\"><p><strong>Course Title<\/strong><\/p><\/td><td colspan=\"3\" width=\"452\"><p><strong>Measure Theory<\/strong><\/p><\/td><\/tr><tr><td width=\"163\"><p><strong>Credit Value<\/strong><\/p><\/td><td colspan=\"3\" width=\"452\"><p><strong>04 <\/strong><\/p><\/td><\/tr><tr><td width=\"163\"><p><strong>Prerequisites<\/strong><\/p><\/td><td colspan=\"3\" width=\"452\"><p><strong>PMM202G2 and PMM203G3<\/strong><\/p><\/td><\/tr><tr><td rowspan=\"2\" width=\"163\"><p><strong>\u00a0<\/strong><\/p><p><strong>Hourly Breakdown<\/strong><\/p><\/td><td width=\"120\"><p><strong>Theory<\/strong><\/p><\/td><td width=\"120\"><p><strong>Practical<\/strong><\/p><\/td><td width=\"212\"><p><strong>Independent Learning<\/strong><\/p><\/td><\/tr><tr><td width=\"120\"><p><strong>60<\/strong><\/p><\/td><td width=\"120\"><p><strong>&#8212;<\/strong><\/p><\/td><td width=\"212\"><p><strong>140<\/strong><\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"615\"><p><strong>Objectives:<\/strong><\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"615\"><ul><li>\u00a0Introduce the fundamental concepts of Lebesgue measure spaces and abstract measure spaces<\/li><li>\u00a0Develop clear ideas on the concept of Lebesgue measurable functions, integrals, and their convergence properties<\/li><li>Discussthe fundamental connection between differentiation, and integration<\/li><\/ul><\/td><\/tr><tr><td colspan=\"4\" width=\"615\"><p><strong>Intended Learning Outcomes:<\/strong><\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"615\"><ul><li>Construct Lebesgue measures on the real line<\/li><li>Define abstract measure space<\/li><li>Illustrate the properties of abstract measure space<\/li><li>Discuss the properties of measurable functions and the convergence of sequence of measurable functions<\/li><li>Explain the simple function approximation of measurable functions<\/li><li>Formulate integrals in a measure space<\/li><li>Discuss the convergence of integrals<\/li><li>Extend the measures from algebras\/semialgebra to \u03c3-algebras<\/li><li>Formulateproduct measures<\/li><li>Prove Fubini\u2019s theorem, and Tonelli\u2019s theorem<\/li><li>Discuss the fundamental connection between differentiation, and integration<\/li><\/ul><\/td><\/tr><tr><td colspan=\"4\" width=\"615\"><p><strong>Course Contents:<\/strong><\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"615\"><p><strong>Measure Spaces:<\/strong>Preliminaries:Algebra and \u03c3-algebras of sets,Borel sets; Lebesgue measure: Outer measure, Measurable sets, and Lebesgue measure,Properties,Example of a non-measurable set, Borel measures; General measure: Definition of measure, Measure space, Complete measure space, Examples, Properties.<\/p><p><strong>Measurable Functions:<\/strong>\u00a0 Basic properties of measurable functions, Examples, Borel measurable functions, Approximation Theorem; Littlewoods\u2019s three principles: Egoroff\u2019s theorem.<\/p><p><strong>Integration:<\/strong>Integral of nonnegative functions, Integrability of a nonnegative function, Fatou\u2019s Lemma, Monotone convergence theorem, Lebesgue Convergence Theorem, Generalized Convergence Theorem.<\/p><p><strong>Extension of Measure:<\/strong>Measure on an algebra, Extension of measures from algebras to \u03c3-algebras, Carath\u00e9odory\u2019s theorem, and Lebesgue-Stieltjes integral.<\/p><p><strong>Product Measure:<\/strong>Measurable rectangle, Semialgebra, Construction of product measures, Fubini\u2019s theorem, and Tonelli\u2019s theorem<\/p><p><strong>Differentiation and Integration:<\/strong>Differentiation of monotone functions: Vitali\u2019s lemma, Functions of bounded variations; Differentiation of an integral: Indefinite integral, and Absolutely continuous functions.<\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"615\"><p><strong>Teaching Methods:<\/strong><\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"615\"><ul><li>\u00a0Lectures, Tutorials, Handouts, Problem solving, Use of e-resources<\/li><\/ul><\/td><\/tr><tr><td colspan=\"4\" width=\"615\"><p><strong>Assessment\/ Evaluation Details:<\/strong><\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"615\"><ul><li>\u00a0In-course Assessments\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 30%<\/li><li>\u00a0End-of-course Examination 70%<\/li><\/ul><\/td><\/tr><tr><td colspan=\"4\" width=\"615\"><p><strong>Recommended Readings:<\/strong><\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"615\"><ul><li>Halsey Royden, Patrick Fitzpatrick, Real Analysis,4<sup>th<\/sup> Edition, 2010.<\/li><li>Walter Rudin, Real and Complex Analysis, 3<sup>rd<\/sup> Edition, 1986.<\/li><li>\u00a0G De Barra, Measure theory and Integration, 2<sup>nd<\/sup> Edition, 2003.<\/li><li>Gerald, B. Folland, Real Analysis: Modern Techniques and Their Applications, 2nd Edition, 2007.<\/li><\/ul><\/td><\/tr><\/tbody><\/table><p><\/div><\/div><\/div><div class=\"su-spoiler su-spoiler-style-simple su-spoiler-icon-plus su-spoiler-closed\" data-scroll-offset=\"0\" data-anchor-in-url=\"no\"><div class=\"su-spoiler-title\" tabindex=\"0\" role=\"button\"><span class=\"su-spoiler-icon\"><\/span>STA402M2: Advanced Statistical Computing<\/div><div class=\"su-spoiler-content su-u-clearfix su-u-trim\"><div class=\"su-table su-table-alternate\"><\/p><table width=\"625\"><tbody><tr><td><p><strong>Course Code<\/strong><\/p><\/td><td colspan=\"3\" width=\"490\"><p><strong>STA402M2<\/strong><\/p><\/td><\/tr><tr><td><p><strong>Course Title<\/strong><\/p><\/td><td colspan=\"3\" width=\"490\"><p><strong>Advanced Statistical Computing<\/strong><\/p><\/td><\/tr><tr><td><p><strong>Academic Credits<\/strong><\/p><\/td><td colspan=\"3\" width=\"490\"><p><strong>02<\/strong><\/p><\/td><\/tr><tr><td rowspan=\"2\"><p><strong>Hourly Breakdown<\/strong><\/p><\/td><td width=\"153\"><p><strong>Theory<\/strong><\/p><\/td><td width=\"153\"><p><strong>Practical<\/strong><\/p><\/td><td width=\"185\"><p><strong>Independent Learning<\/strong><\/p><\/td><\/tr><tr><td width=\"153\"><p>&#8211;<\/p><\/td><td width=\"153\"><p>60 Hours<\/p><\/td><td width=\"185\"><p>40 Hours<\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"625\"><p><strong>Objective:<\/strong><\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"625\"><p>Introduce the Statistical concepts and principles to perform numerical computation using statistical software<\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"625\"><p><strong>Intended Learning Outcomes:<\/strong><\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"625\"><ul><li>Utilize build-in functions to analyze categorical data sets<\/li><li>Analyse the survival data by applying build-in functions<\/li><li>Develop time series models using statistical software<\/li><li>List summary statistics for given multivariate data sets<\/li><li>Examine the effects of factors by applying build- in functions<\/li><li>Explore standard statistical methods using statistical software<\/li><li>Write computer programms to accomplish a task<\/li><\/ul><\/td><\/tr><tr><td colspan=\"4\" width=\"625\"><p><strong>Syllabus Outline<\/strong><\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"625\"><p><strong>Contents:<\/strong><\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"625\"><ul><li><strong>Analysis of large data sets<\/strong>: Build-in functions for categorical data, survival data, time series data and multivariate data.<\/li><li><strong>Analysis of experimental data sets<\/strong>: Analysis of variance (ANOVA), multivariate hypothesis tests, Multivariate Analysis of variance (MANOVA)<\/li><li><strong>Simple function: <\/strong>writing simple functions to perform specific tasks.<\/li><\/ul><\/td><\/tr><tr><td colspan=\"4\" width=\"625\"><p><strong>Teaching Methods:<\/strong><\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"625\"><p>Laboratory practical<\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"625\"><p><strong>Assessment\/ Evaluation Details:<\/strong><\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"625\"><ul><li>In-course assessments (practical)\u00a0\u00a0\u00a0\u00a0\u00a0 \u00a0\u00a0\u00a0 30%<\/li><li>End of course Examination (practical)\u00a0 70%<\/li><\/ul><\/td><\/tr><tr><td colspan=\"4\" width=\"625\"><p><strong>Recommended Readings:<\/strong><\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"625\"><ul><li>Lafaye de Micheaux, Pierre and Drouilhet, R\u00e9my and Liquet, Benoit, The R software: Fundamentals of programming and statistical analysis, Springer, 2013.<\/li><li>\u00a0Michael, J. Crawley ,The R Book, Second Edition, John Wiley and Sons, Ltd, 2013.<\/li><li>Dirk, F. Moore, Applied Survival Analysis Using R, Springer, 2016.<\/li><li>Daniel Zelterman, Applied Multivariate Statistics with R, Springer, 2015.<\/li><li>Robert H. Shumway and David S. Stoffer, Time Series Analysis and Its Application With R Examples, Springer, 2011.<\/li><\/ul><\/td><\/tr><\/tbody><\/table><p><\/div><\/div><\/div><div class=\"su-spoiler su-spoiler-style-simple su-spoiler-icon-plus su-spoiler-closed\" data-scroll-offset=\"0\" data-anchor-in-url=\"no\"><div class=\"su-spoiler-title\" tabindex=\"0\" role=\"button\"><span class=\"su-spoiler-icon\"><\/span>STA403M3: Markov Processes for Stochastic Modelling <\/div><div class=\"su-spoiler-content su-u-clearfix su-u-trim\"><div class=\"su-table su-table-alternate\"><\/p><table><tbody><tr><td width=\"151\"><p><strong>Course Code<\/strong><\/p><\/td><td colspan=\"3\" width=\"449\"><p><strong>STA403M3<\/strong><\/p><\/td><\/tr><tr><td width=\"151\"><p><strong>Course Title<\/strong><\/p><\/td><td colspan=\"3\" width=\"449\"><p><strong>Markov Processes for Stochastic Modelling<\/strong><\/p><\/td><\/tr><tr><td width=\"151\"><p><strong>Academic Credits<\/strong><\/p><\/td><td colspan=\"3\" width=\"449\"><p><strong>03 <\/strong><\/p><\/td><\/tr><tr><td width=\"151\"><p><strong>Prerequisite\u00a0\u00a0\u00a0 <\/strong><\/p><\/td><td colspan=\"3\" width=\"449\"><p><strong>STA302G3<\/strong><\/p><\/td><\/tr><tr><td rowspan=\"2\" width=\"151\"><p><strong>Hourly Breakdown<\/strong><\/p><\/td><td width=\"137\"><p><strong>Theory<\/strong><\/p><\/td><td width=\"146\"><p><strong>Practical<\/strong><\/p><\/td><td width=\"166\"><p><strong>Independent Learning<\/strong><\/p><\/td><\/tr><tr><td width=\"137\"><p>\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 45Hours<\/p><\/td><td width=\"146\"><p>_<\/p><\/td><td width=\"166\"><p>105 Hours<\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"600\"><p><strong>Objectives:<\/strong><\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"600\"><ul><li>\u00a0Impart sound understanding on the Markov processes and their properties<\/li><li>\u00a0Introduce the basic concept and modelling methods on birth and death processes<\/li><li>Provide rigorous knowledge in queueing theory and applications<\/li><\/ul><\/td><\/tr><tr><td colspan=\"4\" width=\"600\"><p><strong>Intended Learning Outcomes:<\/strong><\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"600\"><ul><li>\u00a0Recall basic characteristics of Markov processes<\/li><li>Discuss important properties of Markov chain<\/li><li>Evaluate the first passage and absorption probabilities<\/li><li>Find stationary distribution of a Markov chain<\/li><li>Illustrate the canonical form of a Markov chain<\/li><li>Model therelevant birth and death processes for randomly varying dynamic systems<\/li><li>Apply Chapman-Kolmogorov equation to formulate the forward differential equations<\/li><li>Construct probability distribution of random processes<\/li><li>\u00a0Explain the probability generating function for stochastic models<\/li><li>\u00a0Find average waiting time and queue length of the systems<\/li><li>Determine steady state distribution of a queueing system<\/li><\/ul><\/td><\/tr><tr><td colspan=\"4\" width=\"600\"><p><strong>Syllabus Outline<\/strong><\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"600\"><p><strong>Contents:<\/strong><\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"600\"><ul><li><strong>Markov processes in discrete parameter space: <\/strong><\/li><\/ul><p>Basic properties of Markov chain,Transition probability matrix, Classification of states (recurrent and transient classes), Periodicity of a class, Irreducible Markov chains, Ergodic Markov chains,First passage and recurrent times, Probabilities of absorption of transient states in one of the recurrent classes, Expected value and standard deviation of the number of transitions till absorption, Stationary distributions, Canonical form, The fundamental matrix. Random walk with absorbing and reflecting barriers.<\/p><ul><li><strong>Markov processes in continuous parameter space:<\/strong><\/li><\/ul><p>Markov pure jump process, Chapman-Kolmogorov equation, Birth and death process, pure birth process, pure death process, Forward and backward Kolmogorov differential equations, transition rate matrix, Analysis of random process using probability generating function, expected value and variance, probability extinction.<\/p><ul><li><strong>Queueing processes:<\/strong><\/li><\/ul><p>Arrival and service processes, single and multiple server queueing systems, Steady state distribution, Traffic intensity, mean of waiting time, Network of queues, Martingale, Stochastic differential equations.<\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"600\"><p><strong>Teaching Methods:<\/strong><\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"600\"><ul><li>Lectures, Tutorials, Handouts, Problem solving, Use e-resources<\/li><\/ul><\/td><\/tr><tr><td colspan=\"4\" width=\"600\"><p><strong>Assessment\/ Evaluation Details:<\/strong><\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"600\"><ul><li>In-course Assessments:\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 30% \u00a0<\/li><li>End-of-course Examination:\u00a0\u00a0\u00a0\u00a0 70%\u00a0<\/li><\/ul><\/td><\/tr><tr><td colspan=\"4\" width=\"600\"><p><strong>Recommended Readings:<\/strong><\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"600\"><ul><li>Parzen, E.,StochasticProcesses. SIAM Edition; Society for Industrial and Applied Mathematics Philadelphia, 1999.<\/li><li>\u00a0Sheldon M. Ross,Introduction to Probability Models, 10th ed. Academic Press Elsevier, 2013.\u00a0<\/li><li>Jones, P.W, and Smith, P., Stochastic Processes An Introduction, 1<sup>st<\/sup> Edition, ARNOLD A member of the Hodder Headline Group London, Co-published in the USA Oxford University Press Inc, New York, 2001.<\/li><\/ul><\/td><\/tr><\/tbody><\/table><p><\/div><\/div><\/div><div class=\"su-spoiler su-spoiler-style-simple su-spoiler-icon-plus su-spoiler-closed\" data-scroll-offset=\"0\" data-anchor-in-url=\"no\"><div class=\"su-spoiler-title\" tabindex=\"0\" role=\"button\"><span class=\"su-spoiler-icon\"><\/span>STA404M3: Generalized Linear Models for Familial Longitudinal Data<\/div><div class=\"su-spoiler-content su-u-clearfix su-u-trim\"><br \/><div class=\"su-table su-table-alternate\"><\/p><table><tbody><tr><td width=\"139\"><p><strong>Course Code<\/strong><\/p><\/td><td colspan=\"3\" width=\"479\"><p><strong>STA404M3<\/strong><\/p><\/td><\/tr><tr><td width=\"139\"><p><strong>Course Title<\/strong><\/p><\/td><td colspan=\"3\" width=\"479\"><p><strong>Generalized Linear Models for Familial Longitudinal Data<\/strong><\/p><\/td><\/tr><tr><td width=\"139\"><p><strong>Academic Credits<\/strong><\/p><\/td><td colspan=\"3\" width=\"479\"><p><strong>03 <\/strong><\/p><\/td><\/tr><tr><td width=\"139\"><p><strong>Prerequisite<\/strong><\/p><\/td><td colspan=\"3\" width=\"479\">\u00a0<\/td><\/tr><tr><td rowspan=\"2\" width=\"139\"><p><strong>Hourly Breakdown <\/strong><\/p><\/td><td width=\"145\"><p><strong>Theory<\/strong><\/p><\/td><td width=\"158\"><p><strong>Practical<\/strong><\/p><\/td><td width=\"176\"><p><strong>Independent Learning<\/strong><\/p><\/td><\/tr><tr><td width=\"145\"><p>45 Hours<\/p><\/td><td width=\"158\">\u00a0<\/td><td width=\"176\"><p>105 Hours<\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"618\"><p><strong>Objective:<\/strong><\/p><p>Provide knowledge in fitting models to familial longitudinal data and apply these models to real life problems.<\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"618\"><p><strong>Intended Learning Outcomes:<\/strong><\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"618\"><ul><li>Distinguish familial and longitudinal models<\/li><li>Formulate marginal and conditional models for the analysis of familial longitudinal data<\/li><li>Compare different types of parameter estimation techniques<\/li><li>Apply standard correlation structures for familial longitudinal data<\/li><li>Build appropriate statistical models for count\/binary familial longitudinal data<\/li><\/ul><\/td><\/tr><tr><td colspan=\"4\" width=\"618\"><p><strong>Syllabus Outline<\/strong><\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"618\"><p><strong>Contents:<\/strong><\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"618\"><ul><li><strong>Overview of Linear fixed models<\/strong><\/li><\/ul><p>Estimation of parameters: Method of moments, Ordinary Least Squares method (OLS),\u00a0 Generalized Least square method (GLS), OLS Vs GLS estimation performance; Estimation under stationary general autocorrelation structure: A class of autocorrelations<\/p><ul><li><strong>Familial models for count data<\/strong><\/li><\/ul><p>Poisson mixed models and basic properties; Estimation for single random effect based parametric mixed models: Exact likelihood estimation Method of moments, Generalized Estimating Equation (GEE) approach , Generalized Quasi-likelihood (GQL) Approach<\/p><ul><li><strong>Familial models for binary data<\/strong><\/li><\/ul><p>Binary mixed models and basic properties: Computational formulas for binary moments; Estimation for single random effect based parametric mixed models: Method of moments,\u00a0 Generalized Quasi-likelihood approach, Maximum likelihood estimation (MLE)<\/p><ul><li><strong>Longitudinal models for count data<\/strong><\/li><\/ul><p>Marginal model; Marginal model based estimation of regression effects; Correlation models for stationary count data: Poisson AR(1) model,\u00a0 Poisson MA(1) model, Poisson Equicorrelation (EQC) model; Inferences for stationary correlation models; Nonstaionary correlation models<\/p><ul><li><strong>Longitudinal models for binary data<\/strong><\/li><\/ul><p>Marginal model; Marginal model based estimation of regression effects; Some selected correlation models for longitudinal binary data; Low-order autocorrelation models for stationary binary data: Binary AR(1) model,\u00a0 Binary MA(1) model, Binary EQC\u00a0 model; Inferences in Non-stationary correlation models for repeated binary data<\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"618\"><p><strong>Teaching Methods: <\/strong><\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"618\"><p>Lecture demonstration, and tutorial discussions<\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"618\"><p><strong>Assessment\/ Evaluation Details:<\/strong><\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"618\"><ul><li>In-course assessments\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 \u00a0\u00a0\u00a0\u00a0 30%<\/li><li>End of course Examination\u00a0\u00a0\u00a0\u00a0 70%<\/li><\/ul><\/td><\/tr><tr><td colspan=\"4\" width=\"618\"><p><strong>Recommended Readings:<\/strong><\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"618\"><ul><li>Diggle, P.,Heagerty, K.Y.Liang, K.Y. and Zeger, S. L, Analysis of Longitudinal Data, 2<sup>nd\u00a0 <\/sup>Edition, Oxford University Press, Oxford, 2002.<\/li><li>Brajendra, C. Sutradhar, Dynamic Mixed Models for Familial Longitudinal Data, Springer, 2011.\u00a0<\/li><li>McCullagh, P and Nelder, A. J, Generalized Linear Models, Chapman and Hall, 1989.\u00a0\u00a0<\/li><\/ul><\/td><\/tr><\/tbody><\/table><p><\/div><\/div><\/div><div class=\"su-spoiler su-spoiler-style-simple su-spoiler-icon-plus su-spoiler-closed\" data-scroll-offset=\"0\" data-anchor-in-url=\"no\"><div class=\"su-spoiler-title\" tabindex=\"0\" role=\"button\"><span class=\"su-spoiler-icon\"><\/span>STA405M3: Advanced Statistical Theory <\/div><div class=\"su-spoiler-content su-u-clearfix su-u-trim\"><br \/><div class=\"su-table su-table-alternate\"><\/div><\/p><table><tbody><tr><td width=\"151\"><p><strong>Course Code<\/strong><\/p><\/td><td colspan=\"3\" width=\"464\"><p><strong>STA405M3<\/strong><\/p><\/td><\/tr><tr><td width=\"151\"><p><strong>Course Title<\/strong><\/p><\/td><td colspan=\"3\" width=\"464\"><p><strong>Advanced Statistical\u00a0 Theory<\/strong><\/p><\/td><\/tr><tr><td width=\"151\"><p><strong>Academic Credits<\/strong><\/p><\/td><td colspan=\"3\" width=\"464\"><p><strong>03 <\/strong><\/p><\/td><\/tr><tr><td width=\"151\"><p><strong>Prerequisite<\/strong><\/p><\/td><td colspan=\"3\" width=\"464\">\u00a0<\/td><\/tr><tr><td rowspan=\"2\" width=\"151\"><p><strong>Hourly Breakdown <\/strong><\/p><\/td><td width=\"132\"><p><strong>Theory<\/strong><\/p><\/td><td width=\"157\"><p><strong>Practical<\/strong><\/p><\/td><td width=\"175\"><p><strong>Independent\u00a0 Learning<\/strong><\/p><\/td><\/tr><tr><td width=\"132\"><p>45 Hours<\/p><\/td><td width=\"157\"><p>&#8211;<\/p><\/td><td width=\"175\"><p>105 Hours<\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"615\"><p><strong>Objective:<\/strong><\/p><p><em>Introduce concept of advanced statistical theory<\/em><\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"615\"><p><strong>Intended Learning Outcomes:<\/strong><\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"615\"><ul><li>Recall sufficient statistics, minimal sufficient statistics, complete sufficient statistics<\/li><li>Identify the exponential families of distribution<\/li><li>Prove Basu\u2019s theorem and use it for showing independence of statistics<\/li><li>Obtain point estimator using various estimation techniques<\/li><li>Prove Cramer-Rao \u00a0inequality, Rao- Blackwell, Lehmann-Sceff\u00e9 theorems<\/li><li>Find minimum variance of an unbiased estimator using Cramer-Rao \u00a0inequality<\/li><li>Obtain the minimum variance unbiased estimator for various probability distribution<\/li><li>Analyze point estimators in terms of consistency, asymptotic normality and efficiency properties<\/li><li>Determine interval estimators<\/li><li>Evaluate the efficiency of the interval estimators<\/li><li>Apply statistical methods for hypothesis testing<\/li><li>Prove Neyman-Pearson lemma<\/li><\/ul><\/td><\/tr><tr><td colspan=\"4\" width=\"615\"><p><strong>Course Contents:<\/strong><\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"615\"><ul><li><strong>Data Reduction<\/strong>: Scale and location families, sufficiency, factorization theorem, minimal sufficiency, ancillary statistics, complete statistics, Basu\u2019s theorem, exponential families; one parameter case, \u00a0parameter case<\/li><li><strong>Point Estimation<\/strong>: Method of moments, maximum likelihood, properties of maximum likelihood estimator, Bayesian point estimation; prior and posterior distributions, bias, variance, mean square error, minimum variance unbiased estimator, Fisher information, Cramer-Rao lower bound, the Rao-Blackwell theorem, the Lehmann-Sceff\u00e9 theorem, large sample theory; consistency, asymptotic normality and related properties, asymptotic efficiency and optimality<\/li><li><strong>Interval Estimation<\/strong>: Methods of finding interval estimators; Inverting to a test statistic, pivotal quantities, pivoting the cumulative density function, Bayesian interval.Method of evaluating interval estimators;\u00a0 size and coverage probability, test related to optimality, loss function optimality<\/li><li><strong>Hypothesis test<\/strong>: Simple hypothesis, composite hypothesis, Neyman-Pearson lemma, uniformly most powerful test, likelihood ratio test, the sequential probability test, Bayesian testing procedures<\/li><\/ul><\/td><\/tr><tr><td colspan=\"4\" width=\"615\"><p><strong>Teaching Methods: <\/strong><\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"615\"><p>Lecture demonstration and tutorial discussions<\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"615\"><p><strong>Assessment\/ Evaluation Details:<\/strong><\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"615\"><ul><li>In-course assessments\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 \u00a0\u00a0\u00a0\u00a0 30%<\/li><li>End of course Examination\u00a0\u00a0\u00a0\u00a0\u00a0 70%<\/li><\/ul><\/td><\/tr><tr><td colspan=\"4\" width=\"615\"><p><strong>Recommended Readings:<\/strong><\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"615\"><ul><li>Casella, G., and Berger. R., Statistical Inference, 2<sup>nd<\/sup> Edition, Pacific Grove, CA: Wadsworth, 2001.<\/li><li>Knight, K., Mathematical Statistics, 1<sup>st<\/sup>Edition, Chapman and Hall\/CRC, 1999.<\/li><li>Hogg, V., McKean, W., and Craig, T., Introduction to Mathematical Statistics, 7<sup>th<\/sup> Edition, Pearson, 2012.<\/li><li>Bickel, P. J., and Doksum, K. A., Mathematical Statistics: Basic Ideas and Selected Topics, 6<sup>th<\/sup> Edition, San Francisco: Holden-Day, 1977.<\/li><\/ul><\/td><\/tr><\/tbody><\/table><p><\/div><\/div><div class=\"su-spoiler su-spoiler-style-simple su-spoiler-icon-plus su-spoiler-closed\" data-scroll-offset=\"0\" data-anchor-in-url=\"no\"><div class=\"su-spoiler-title\" tabindex=\"0\" role=\"button\"><span class=\"su-spoiler-icon\"><\/span>STA406M3: Multivariate Analysis II <\/div><div class=\"su-spoiler-content su-u-clearfix su-u-trim\"><br \/><div class=\"su-table su-table-alternate\"><\/div><\/p><table><tbody><tr><td><p><strong>Course Code<\/strong><\/p><\/td><td colspan=\"3\" width=\"458\"><p><strong>STA406M3<\/strong><\/p><\/td><\/tr><tr><td><p><strong>Course Title<\/strong><\/p><\/td><td colspan=\"3\" width=\"458\"><p><strong>Multivariate Analysis II<\/strong><\/p><\/td><\/tr><tr><td><p><strong>Academic Credits<\/strong><\/p><\/td><td colspan=\"3\" width=\"458\"><p><strong>03<\/strong><\/p><\/td><\/tr><tr><td><p><strong>Prerequisite<\/strong><\/p><\/td><td colspan=\"3\" width=\"458\">\u00a0<\/td><\/tr><tr><td rowspan=\"2\"><p><strong>Hourly Breakdown<\/strong><\/p><\/td><td width=\"126\"><p><strong>Theory<\/strong><\/p><\/td><td width=\"139\"><p><strong>Practical<\/strong><\/p><\/td><td width=\"193\"><p><strong>Independent Learning<\/strong><\/p><\/td><\/tr><tr><td width=\"126\"><p>45 Hours<\/p><\/td><td width=\"139\"><p>&#8211;<\/p><\/td><td width=\"193\"><p>105 Hours<\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"592\"><p><strong>Objective:<\/strong><\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"592\"><p>Introduce further multivariate techniques and their application to real world problems<\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"592\"><p><strong>Intended Learning Outcomes:<\/strong><\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"592\"><ul><li>Use principal component analysis effectively for data exploration and dimension reduction<\/li><li>Apply factor analysis effectively for exploratory and confirmatory data analysis<\/li><li>Apply multivariate regression to real world data<\/li><li>\u00a0Classify the groups using discriminate function<\/li><li>Apply discriminate function among groups<\/li><li>Find groupings and associations using cluster analysis<\/li><\/ul><\/td><\/tr><tr><td colspan=\"4\" width=\"592\"><p><strong>Syllabus Outline<\/strong><\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"592\"><p><strong>Course Contents:<\/strong><\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"592\"><ul><li><strong>Principal Component Analysis:<\/strong> Derivation of principal components: Covariance matrix and Correlation matrix, loading matrix, Scree plot, principal component scores.<\/li><li><strong>Factor Analysis:<\/strong>Orthogonal factor model, Methods of Estimation: Principal Component Method and Maximum Likelihood Method, Factor Rotation: Graphical method, Varimax and Oblique rotation, Factor Scores.<\/li><li><strong>Multivariate regression:<\/strong>Multivariate linear regression model, Assumptions of multivariate linear regression, Least squares method ofparameter estimation, Statistical Inference on regression coefficients.<\/li><li><strong>Canonical Correlation Analysis: <\/strong>Canonical variates and canonical correlation, test for significant canonical correlation<\/li><li><strong>Discrimination and Classification<\/strong>: Separation and Classification for two population, Fisher\u2019s Discriminant function, Classification with several population.<\/li><li><strong>Cluster analysis: <\/strong>Similarity measures: Pairs of items and Pairs of variables, Clustering methods: Single Linkage, Complete Linkage, Average Linkage and K-mean method<\/li><\/ul><\/td><\/tr><tr><td colspan=\"4\" width=\"592\"><p><strong>Teaching Methods:<\/strong><\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"592\"><p>Lecture demonstration and Tutorial discussions<\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"592\"><p><strong>Assessment\/ Evaluation Details:<\/strong><\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"592\"><ul><li>In-course assessments\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 \u00a0\u00a0 30%<\/li><li>End of course Examination\u00a0\u00a0\u00a0\u00a0 70%<\/li><\/ul><\/td><\/tr><tr><td colspan=\"4\" width=\"592\"><p><strong>Recommended Readings:<\/strong><\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"592\"><ul><li>Chatfield, C.and Collins, A. J., Introduction to multivariate analysis, New York: Chapman and Hall, 1980.<\/li><li>Johnson, R. A.and Wichern, D. W., Applied multivariate statistical analysis, Englewood Cliffs, N.J: Prentice Hall, 1992.<\/li><li>Everitt, B.S. andHothorn, T., An Introduction to Applied Multivariate Analysis with R, Springer, 2011.<\/li><\/ul><\/td><\/tr><\/tbody><\/table><p><\/div><\/div><div class=\"su-spoiler su-spoiler-style-simple su-spoiler-icon-plus su-spoiler-closed\" data-scroll-offset=\"0\" data-anchor-in-url=\"no\"><div class=\"su-spoiler-title\" tabindex=\"0\" role=\"button\"><span class=\"su-spoiler-icon\"><\/span>STA407M4: Advanced Probability Theory <\/div><div class=\"su-spoiler-content su-u-clearfix su-u-trim\"><br \/><div class=\"su-table su-table-alternate\"><\/div><\/p><p>\u00a0<\/p><table><tbody><tr><td width=\"151\"><p><strong>Course Code<\/strong><\/p><\/td><td colspan=\"3\" width=\"450\"><p><strong>STA407<\/strong><strong>M4<\/strong><\/p><\/td><\/tr><tr><td width=\"151\"><p><strong>Course Title<\/strong><\/p><\/td><td colspan=\"3\" width=\"450\"><p><strong>Advanced Probability Theory<\/strong><\/p><\/td><\/tr><tr><td width=\"151\"><p><strong>Academic Credits<\/strong><\/p><\/td><td colspan=\"3\" width=\"450\"><p><strong>04<\/strong><\/p><\/td><\/tr><tr><td width=\"151\"><p><strong>Prerequisite\u00a0\u00a0\u00a0 <\/strong><\/p><\/td><td colspan=\"3\" width=\"450\"><p><strong>PMM202G2 and PMM203G3<\/strong><\/p><\/td><\/tr><tr><td rowspan=\"2\" width=\"151\"><p><strong>Hourly Breakdown<\/strong><\/p><\/td><td width=\"138\"><p><strong>Theory<\/strong><\/p><\/td><td width=\"146\"><p><strong>Practical<\/strong><\/p><\/td><td width=\"166\"><p><strong>Independent Learning<\/strong><\/p><\/td><\/tr><tr><td width=\"138\"><p>\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 60Hours<\/p><\/td><td width=\"146\"><p>_<\/p><\/td><td width=\"166\"><p>140 Hours<\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"600\"><p><strong>Objectives:<\/strong><\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"600\"><ul><li>Introduce basic concepts of probability theory in measure theoretic approach<\/li><li>Develop clear ideas on the concept of integration in probability measure space and the expectation of random variables<\/li><li>Impart profound knowledge and application methods on distribution functions, mode convergence and characteristic functions<\/li><li>Provide sound theoretical basis for further studies in mathematical statistics<\/li><\/ul><\/td><\/tr><tr><td colspan=\"4\" width=\"600\"><p><strong>Intended Learning Outcomes:<\/strong><\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"600\"><ul><li>Recall the concepts of probability theory<\/li><li>Construct probability measures and measurable spaces<\/li><li>Illustrate the properties of random variables and expectation<\/li><li>Formulate integrals with respect to probability measures<\/li><li>Express probability and moment inequalities<\/li><li>Discuss the modes of convergence<\/li><li>Apply Fatou\u2019s lemma, monotone and dominated convergence theorems<\/li><li>Explain Borel-Cantelli lemmas and Kolmogorov zero-one law<\/li><li>Discuss the properties of distribution functions and characteristic functions<\/li><li>Explain weak and complete convergence of sequence of distribution functions<\/li><li>Apply decomposition theorem, Helly-Bray lemma and theorem, uniqueness theorem, inversion theorem, Levy continuity theorem and central limit theorem<\/li><\/ul><\/td><\/tr><tr><td colspan=\"4\" width=\"600\"><p><strong>Syllabus Outline<\/strong><\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"600\"><p><strong>Contents:<\/strong><\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"600\"><p><strong>Mathematical Foundation of Probability Theory:<\/strong><\/p><p>Sets and Operations, Collection of sets, Algebra and Sigma-algebras of sets, limits of sets, monotone sequence of sets, Probability spaces and properties, Construction of a probability measures and continuity theorem, Conditional probability and Independent events, Borel sets.<\/p><p><strong>Random Variables:<\/strong><\/p><p>Basic properties of random variables and vectors, random elements, induced probability measures and spaces, measurability and limits, Functions of random variables, simple random variables, induced sigma-algebras.<\/p><p><strong>Expectation and Convergence:<\/strong><\/p><p>Definitions and Properties of Expectation, Convergence concepts; Uniformly and point-wise, Mode of convergence; almost surely, in probability, in <em>r<\/em>th mean, in distribution. Convergence of function of random variables, Markov and Chebyshov\u2019s inequalities. Moment Inequalities: Holder\u2019s, Minkowski and Jensen\u2019s. Fatou\u2019s Lemma, Monotone and Dominated Convergence Theorems, Product measures. Independence of function of random variables and sigma algebras. Borel-Cantelli Lemmas, Kolmogorov zero-one Law, Strong Law of Large Numbers.<\/p><p><strong>Distribution Functions:<\/strong> Properties of distribution functions, Decomposition theorem, weak and complete convergent, Helly-Bray lemma, extended lemma and theorem, Convolution, Conditional Distributions and Expectations.<\/p><p><strong>Characteristic Functions<\/strong>: Definition and Basic Properties, Uniqueness theorem, Inversion theorem, Levy Continuity theorem, Examples of Characteristic functions, Law of Large Numbers, Stirling\u2019s formula, Central Limit Theorem, Martingales.<\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"600\"><p><strong>Teaching Methods:<\/strong><\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"600\"><ul><li>Lectures, Tutorial discussion, Handouts, Use e-resources<\/li><\/ul><\/td><\/tr><tr><td colspan=\"4\" width=\"600\"><p><strong>Assessment\/ Evaluation Details:<\/strong><\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"600\"><ul><li>In-course Assessments:\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 30%<\/li><li>End-of-course Examination:\u00a0\u00a0\u00a0\u00a0 70%\u00a0<\/li><\/ul><\/td><\/tr><tr><td colspan=\"4\" width=\"600\"><p><strong>Recommended Readings:<\/strong><\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"600\"><ul><li>Alan F. Karr., Probability, 1<sup>st<\/sup> Edition; Springer-Verlag New York, Inc, 1993.<\/li><li>Ramdas Bhat. B., Modern Probability Theory, An Introductory Text Book,2<sup>nd<\/sup> Edition, Wiley Eastern Limited, 1985.<\/li><li>Kai Lai Chung. ,A Course in Probability Theory. 3rd Edition, Elsevier (USA), 2000.<\/li><li>Clarke. L. E.,Random Variables, 1<sup>st<\/sup> Edition, Longman Mathematical Texts, USA by Longman Inc., New York, 1975.<\/li><li>Allan Gut., Probability: A Graduate Course. II. series, Springer texts in Statistics, Springer-Verlag New York, Inc, 2005.<\/li><\/ul><\/td><\/tr><\/tbody><\/table><p><\/div><\/div><div class=\"su-spoiler su-spoiler-style-simple su-spoiler-icon-plus su-spoiler-closed\" data-scroll-offset=\"0\" data-anchor-in-url=\"no\"><div class=\"su-spoiler-title\" tabindex=\"0\" role=\"button\"><span class=\"su-spoiler-icon\"><\/span>STA408M3: Theory of Linear Models <\/div><div class=\"su-spoiler-content su-u-clearfix su-u-trim\"><br \/><div class=\"su-table su-table-alternate\"><\/div><\/p><table><tbody><tr><td width=\"151\"><p><strong>Course Code<\/strong><\/p><\/td><td colspan=\"3\" width=\"467\"><p><strong>STA408M3<\/strong><\/p><\/td><\/tr><tr><td width=\"151\"><p><strong>Course Title<\/strong><\/p><\/td><td colspan=\"3\" width=\"467\"><p><strong>Theory of Linear Models<\/strong><\/p><\/td><\/tr><tr><td width=\"151\"><p><strong>Academic Credits<\/strong><\/p><\/td><td colspan=\"3\" width=\"467\"><p><strong>03 <\/strong><\/p><\/td><\/tr><tr><td width=\"151\"><p><strong>Prerequisite<\/strong><\/p><\/td><td colspan=\"3\" width=\"467\">\u00a0<\/td><\/tr><tr><td rowspan=\"2\" width=\"151\"><p><strong>Hourly Breakdown <\/strong><\/p><\/td><td width=\"133\"><p><strong>Theory<\/strong><\/p><\/td><td width=\"158\"><p><strong>Practical<\/strong><\/p><\/td><td width=\"176\"><p><strong>Independent Learning<\/strong><\/p><\/td><\/tr><tr><td width=\"133\"><p>45 Hours<\/p><\/td><td width=\"158\"><p>&#8211;<\/p><\/td><td width=\"176\"><p>105 Hours<\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"618\"><p><strong>Objective:<\/strong><\/p><p><strong>\u00a0<\/strong><\/p><p>Provide depth knowledge in theory of linear models and its applications<\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"618\"><p><strong>Intended Learning Outcomes:<\/strong><\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"618\"><ul><li>Prove basic results related to the statistical theory of linear models<\/li><li>Discuss different type of parameter estimation in linear models<\/li><li>Perform hypothesis testing related to different characteristics of a linear model<\/li><li>Assess the fit of a linear model to data and the validity of its assumptions<\/li><li>Develop theoretical knowledge on the \u00a0concepts behind the robust regression<\/li><\/ul><\/td><\/tr><tr><td colspan=\"4\" width=\"618\"><p><strong>Syllabus Outline<\/strong><\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"618\"><p><strong>Contents:<\/strong><\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"618\"><ul><li><strong>Introduction<\/strong><\/li><\/ul><p>Multivariate Normal Distribution, Distribution of Quadratic forms,\u00a0 Estimation by Least Squares, Orthonormal Bases, Q-R decompositions, Hat Matrices<\/p><ul><li><strong>Variances and Covariances<\/strong><\/li><\/ul><p>Gauss- Markov Theorem,\u00a0 Estimation of variance, Generalized Least Squares, Collinearity in Least square estimation, Consequences and Identification, Biased Estimation, Ridge Regression, Sensitivity Analysis of Least Squares using Residuals<\/p><ul><li><strong>Statistical Inference for Normal Errors<\/strong><\/li><\/ul><p>Chi-square<em>, t<\/em> and <em>F<\/em> distributions, Distribution theory, Hypothesis testing, Robustness of <em>F<\/em>-tests, Non-central Chi-square and Power of tests, Power and Size of <em>F<\/em>-tests<\/p><ul><li><strong>Non-Full Rank Models<\/strong><\/li><\/ul><p>Analysis of Variance Models, Singular Value Decompositions, Estimable Functions and their properties, Hypotheses testing, Analysis of Variance Models with Covariates<\/p><ul><li><strong>Robust Regression<\/strong><\/li><\/ul><p>Influence Curves, Sensitivity Analysis based on the Influence Curve, M-Estimation, GM- Estimation, Influence curves of estimators (GLS and GM)<\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"618\"><p><strong>Teaching Methods: <\/strong><\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"618\"><p>Lecture demonstration, and tutorial discussions<\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"618\"><p><strong>Assessment\/ Evaluation Details:<\/strong><\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"618\"><ul><li>In-course assessments\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 \u00a0\u00a0\u00a0\u00a0 30%<\/li><li>End of course Examination\u00a0\u00a0\u00a0\u00a0 70%<\/li><\/ul><\/td><\/tr><tr><td colspan=\"4\" width=\"618\"><p><strong>Recommended Readings:<\/strong><\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"618\"><ul><li><a href=\"https:\/\/www.google.lk\/search?tbo=p&amp;tbm=bks&amp;q=inauthor:%22James+H.+Stapleton%22\">James H. Stapleton<\/a>,Linear Statistical Models, John Wiley and Sons, 2009.<\/li><li>George, A. F. Seber and Alan, J. Lee, Linear Regression Analysis, secondEdition, John Wiley and Sons, 2011.<\/li><li>Alvin, C. Rencher and Bruce Schaalje,G., Linear Models in Statistics, Second Edition, John Wiley and Sons, 2007.<\/li><li><a href=\"https:\/\/www.google.lk\/search?tbo=p&amp;tbm=bks&amp;q=inauthor:%22R.+D.+Cook%22\">Cook<\/a>, R.D and <a href=\"https:\/\/www.google.lk\/search?tbo=p&amp;tbm=bks&amp;q=inauthor:%22S.+Weisberg%22\">S. Weisberg<\/a>, S., Residuals and Influence in Regression, Taylor and Francis, 1982.<\/li><li>John Fox., Regression Diagnostics, Sage Publications, 1991.<\/li><\/ul><\/td><\/tr><\/tbody><\/table><p><\/div><\/div><div class=\"su-spoiler su-spoiler-style-simple su-spoiler-icon-plus su-spoiler-closed\" data-scroll-offset=\"0\" data-anchor-in-url=\"no\"><div class=\"su-spoiler-title\" tabindex=\"0\" role=\"button\"><span class=\"su-spoiler-icon\"><\/span>STA409M6: Research project <\/div><div class=\"su-spoiler-content su-u-clearfix su-u-trim\"><br \/><div class=\"su-table su-table-alternate\"><\/div><\/p><table><tbody><tr><td width=\"151\"><p><strong>Course Code<\/strong><\/p><\/td><td colspan=\"3\" width=\"467\"><p><strong>STA409M6<\/strong><\/p><\/td><\/tr><tr><td width=\"151\"><p><strong>Course Title<\/strong><\/p><\/td><td colspan=\"3\" width=\"467\"><p><strong>Research Project<\/strong><\/p><\/td><\/tr><tr><td width=\"151\"><p><strong>Academic Credits<\/strong><\/p><\/td><td colspan=\"3\" width=\"467\"><p><strong>06 <\/strong><\/p><\/td><\/tr><tr><td rowspan=\"2\" width=\"151\"><p><strong>Hourly Breakdown <\/strong><\/p><\/td><td width=\"133\"><p><strong>Theory<\/strong><\/p><\/td><td width=\"158\"><p><strong>Practical<\/strong><\/p><\/td><td width=\"176\"><p><strong>Independent Learning<\/strong><\/p><\/td><\/tr><tr><td width=\"133\"><p>&#8211;<\/p><\/td><td width=\"158\"><p>&#8211;<\/p><\/td><td width=\"176\"><p>300 Hours<\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"618\"><p><strong>Objective:<\/strong><\/p><p><strong>\u00a0<\/strong>Provide training in scientific skills of problem analysis, research design, evaluation of empirical evidence and dissemination.<\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"618\"><p><strong>Intended Learning Outcomes:<\/strong><\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"618\"><ul><li>Identify a research problem<\/li><li>List appropriate literature to discuss the research findings<\/li><li>Plan a proper research methodology<\/li><li>Formulate a suitable hypothesis for the research problem<\/li><li>Apply suitable statistical techniques to make decisions<\/li><li>Develop skills of scientific writing and presenting results<\/li><\/ul><\/td><\/tr><tr><td colspan=\"4\" width=\"618\"><p><strong>Syllabus Outline<\/strong><\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"618\"><p><strong>\u00a0Course Description:<\/strong><\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"618\"><p>Students are expected to carry out an independent research project in the field of Statistics under the supervision of a senior staff member in the department. Students need to give presentations in the beginning, middle, and the end of their research. At the completion of the research project, students are expected to write a comprehensive report. During the research, students are expected to maintain a research diary.<\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"618\"><p><strong>Teaching Methods: <\/strong><\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"618\"><p>Guided independent study, Discussion with the supervisor, Use of e-resources<\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"618\"><p><strong>Assessment\/ Evaluation Details:<\/strong><\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"618\"><ul><li>Presentation\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 \u00a0\u00a0\u00a0 \u00a0\u00a030%<\/li><li>Project Report\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 70%<\/li><\/ul><\/td><\/tr><tr><td colspan=\"4\" width=\"618\"><p><strong>Recommended Readings:<\/strong><\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"618\"><ul><li>Kothari, C. R., Research Methodology: Methods and Techniques, Second Edition, New Age International (P) Limited, Publishers, 2004.<\/li><li>McMillan, K. and Weyers, J., How to Write Dissertations and Project Reports, Prentice Hall, 2011.<\/li><li>Denicolo, P. and Becker, L., Developing a Research Proposal. SAGE Publications, 2012.<\/li><\/ul><\/td><\/tr><\/tbody><\/table><p><\/div><\/div><div class=\"su-spoiler su-spoiler-style-simple su-spoiler-icon-plus su-spoiler-closed\" data-scroll-offset=\"0\" data-anchor-in-url=\"no\"><div class=\"su-spoiler-title\" tabindex=\"0\" role=\"button\"><span class=\"su-spoiler-icon\"><\/span>STA410M2: Bayesian statistics <\/div><div class=\"su-spoiler-content su-u-clearfix su-u-trim\"><br \/><div class=\"su-table su-table-alternate\"><\/div><\/p><table><tbody><tr><td><p><strong>Course Code<\/strong><\/p><\/td><td colspan=\"3\" width=\"458\"><p><strong>STA410M2<\/strong><\/p><\/td><\/tr><tr><td><p><strong>Course Title<\/strong><\/p><\/td><td colspan=\"3\" width=\"458\"><p><strong>Bayesian Statistics<\/strong><\/p><\/td><\/tr><tr><td><p><strong>Academic Credits<\/strong><\/p><\/td><td colspan=\"3\" width=\"458\"><p><strong>02 <\/strong><\/p><\/td><\/tr><tr><td width=\"143\"><p><strong>Prerequisite\u00a0\u00a0\u00a0 <\/strong><\/p><\/td><td colspan=\"3\" width=\"458\"><p><strong>STA201G3 and STA204G2<\/strong><\/p><\/td><\/tr><tr><td rowspan=\"2\" width=\"143\"><p><strong>Hourly Breakdown<\/strong><\/p><\/td><td width=\"146\"><p><strong>Theory<\/strong><\/p><\/td><td width=\"146\"><p><strong>Practical<\/strong><\/p><\/td><td width=\"166\"><p><strong>Independent Learning<\/strong><\/p><\/td><\/tr><tr><td width=\"146\"><p>\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 30Hours<\/p><\/td><td width=\"146\"><p>_<\/p><\/td><td width=\"166\"><p>70 Hours<\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"600\"><p><strong>Objectives:<\/strong><\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"600\"><ul><li>Introduce the basic concepts of Bayesian theory.<\/li><li>Apply Bayesian statistics in a real world problem.<\/li><\/ul><\/td><\/tr><tr><td colspan=\"4\" width=\"600\"><p><strong>Intended Learning Outcomes:<\/strong><\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"600\"><ul><li>Distinguish classical and Bayesian approaches<\/li><li>Recall various priors such as conjugate, non informative, Jeffreys\u2019<\/li><li>Determine the posterior and predictive distributions for standard prior distributions<\/li><li>Find mean and variance for the posterior distributions<\/li><li>Evaluate Bayes\u2019 estimate for the parameter of the posterior distribution<\/li><li>Construct the credible interval and highest posterior density interval<\/li><li>Test the simple hypotheses using Bayes\u2019 factor<\/li><li>Formulate the linear hierarchical models<\/li><li>Utilize the Bayes\u2019 risk to select the best decisions<\/li><\/ul><\/td><\/tr><tr><td colspan=\"4\" width=\"600\"><p><strong>Syllabus Outline<\/strong><\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"600\"><p><strong>Contents:<\/strong><\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"600\"><p><strong>Fundamentals of Bayesian Analysis:<\/strong><\/p><p>Definitions of classical and Bayesian approaches to inference about parameters. Bayes\u2019 theorem for parametric inference, likelihood functions, exponential families and conjugate priors. Mixtures of conjugate priors, Non informative priors, Jeffreys\u2019 prior. Prior and Posterior analysis of standard distributions; binomial-beta, Poisson-gamma, exponential-gamma, uniform-Pareto, normal(mean)-normal, normal(precision)-gamma, normal(mean and precision)\u2013normal-gamma. Predictive distributions. Exchangeability, Point and interval estimations; maximum a posteriori (MAP) estimators, credible intervals and highest posterior density intervals. Bayes\u2019 factors, Bayesian hypothesis testing. Two sample problems.<\/p><p><strong>Bayesian Linear Models:<\/strong><\/p><p>Uniform priors, Normal priors, Hierarchical models; Two and Three stage models.<\/p><p><strong>Statistical Decision Theory:<\/strong><\/p><p>Loss functions, Bayes\u2019 risk, Bayes\u2019 rule, Minimax and Bayes\u2019 procedures.<\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"600\"><p><strong>Teaching Methods:<\/strong><\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"600\"><p>\u00b7\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 Lectures, Tutorial discussion, Handouts, Use e-resources<\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"600\"><p><strong>Assessment\/ Evaluation Details:<\/strong><\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"600\"><ul><li>In-course Assessments:\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 30% \u00a0\u00a0\u00a0\u00a0<\/li><li>End-of-course Examination:\u00a0\u00a0\u00a0\u00a0 70%\u00a0<\/li><\/ul><\/td><\/tr><tr><td colspan=\"4\" width=\"600\"><p><strong>Recommended Readings:<\/strong><\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"600\"><ul><li>PeterM. Lee., Bayesian Statistics: An Introduction. 4 th edition, John Wiley and Sons Limited. U.K.,2012.<\/li><li>Peter D. Hoff., A First Course in Bayesian Statistical Methods. Springer-Heidelberg London, New York, 2009.<\/li><li>Vladimir P. Savchuk and Chris P. Tsokos., Bayesian Theory and Methods with Applications. Atlantis Press, 8, square des Bouleaux, 75019 Paris, France, 2011.<\/li><li>James O. Berger., Statistical Decision Theory and Bayesian Analysis. Second Edition, Springer-Verlag, New York, 1988.<\/li><\/ul><\/td><\/tr><\/tbody><\/table><p><\/div><\/div><div class=\"su-spoiler su-spoiler-style-simple su-spoiler-icon-plus su-spoiler-closed\" data-scroll-offset=\"0\" data-anchor-in-url=\"no\"><div class=\"su-spoiler-title\" tabindex=\"0\" role=\"button\"><span class=\"su-spoiler-icon\"><\/span>STA411M3: Multivariate Analysis I <\/div><div class=\"su-spoiler-content su-u-clearfix su-u-trim\"><br \/><div class=\"su-table su-table-alternate\"><\/div><\/p><table width=\"659\"><tbody><tr><td width=\"213\"><p><strong>Course Code<\/strong><\/p><\/td><td colspan=\"3\" width=\"446\"><p><strong>STA411M3<\/strong><\/p><\/td><\/tr><tr><td width=\"213\"><p><strong>Course Title<\/strong><\/p><\/td><td colspan=\"3\" width=\"446\"><p><strong>Data Mining<\/strong><\/p><\/td><\/tr><tr><td width=\"213\"><p><strong>Academic Credits<\/strong><\/p><\/td><td colspan=\"3\" width=\"446\"><p><strong>03 <\/strong><\/p><\/td><\/tr><tr><td width=\"213\"><p><strong>Prerequisite<\/strong><\/p><\/td><td colspan=\"3\" width=\"446\"><p><strong>\u00a0<\/strong><\/p><\/td><\/tr><tr><td rowspan=\"2\" width=\"213\"><p><strong>Hourly Breakdown:<\/strong><\/p><\/td><td width=\"149\"><p><strong>Theory<\/strong><\/p><\/td><td width=\"149\"><p><strong>Practical<\/strong><\/p><\/td><td width=\"149\"><p><strong>Independent Learning<\/strong><\/p><\/td><\/tr><tr><td width=\"149\"><p>45<\/p><\/td><td width=\"149\"><p>&#8211;<\/p><\/td><td width=\"149\"><p>105<\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"659\"><p><strong>Objectives:<\/strong><\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"659\"><p>Provide knowledge on the concepts behind various\u00a0 data mining techniques and techniques for learning from data as well as data analysis and modelling<\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"659\"><p><strong>Intended Learning Outcomes:<\/strong><\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"659\"><ul><li>Plan pre and post-processing operations for data mining<\/li><li>Describe a range of supervised and unsupervised learning algorithms<\/li><li>Use machine learning algorithms on data to identify new patterns or concepts<\/li><li>Evaluate the performance of learning algorithms<\/li><\/ul><\/td><\/tr><tr><td colspan=\"4\" width=\"659\"><p><strong>Course Contents:<\/strong><\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"659\"><ul><li>Introduction to data mining: Data mining and its applications; Data handling\u2013instances, attributes and their types<\/li><li>Data mining process: Data preparation\/cleansing, sparse data, missing data, inaccurate values, task identification, use of Weka tool<\/li><li>Supervised learning: Introduction to classification and regression, rule-based learning, decision tree learning, Naive Bayes, k-nearest neighbour, support vector machines, neural networks, linear regressions, introduction to boosting<\/li><li>Unsupervised learning: K-means clustering, Gaussian mixture models (GMMs), Hierarchical clustering, Latent Dirichlet Allocation(LDA)<\/li><li>Dimensionality reduction: Principal Component Analysis, Multidimensional Scaling, Filter methods<\/li><li>Evaluation of learning algorithms: Training and testing, Error rates, over- and under- fitting, Cross-validation, Confusion matrices and ROC graphs<\/li><\/ul><\/td><\/tr><tr><td colspan=\"4\" width=\"659\"><p><strong>Teaching\/Learning Methods:<\/strong><\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"659\"><p>Lecture demonstration, and tutorial discussions and laboratory experiments<\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"659\"><p><strong>Assessment Strategy:<\/strong><\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"659\"><ul><li>In-course Assessments\u00a0 \u00a0 \u00a0 \u00a0 \u00a0 \u00a0 \u00a0 \u00a0 \u00a0 30%<\/li><li>End-of-course Examination\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 70%<\/li><\/ul><\/td><\/tr><tr><td colspan=\"4\" width=\"659\"><p><strong>References:<\/strong><\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"659\"><ul><li>Bishop,C. M,\u00a0 Pattern Recognition and Machine Learning, 2007.<\/li><li>Duda. R. O, Hart,P. E. and Stork, D. G., Pattern Classification, 2<sup>nd<\/sup>Edition., Wiley, 2000.<\/li><li>Mitchell, T., Machine Learning, McGraw Hill, 1997.<\/li><li>Witten, I. H., Frank, E. and Hall, M. A, Data Mining: Practical Machine Learning Tools and Techniques, 3<sup>rd<\/sup>Edition, Morgan Kaufmann Series, 2011.<\/li><\/ul><\/td><\/tr><\/tbody><\/table><p><\/div><\/div><div class=\"su-spoiler su-spoiler-style-simple su-spoiler-icon-plus su-spoiler-closed\" data-scroll-offset=\"0\" data-anchor-in-url=\"no\"><div class=\"su-spoiler-title\" tabindex=\"0\" role=\"button\"><span class=\"su-spoiler-icon\"><\/span>STA412M3: Biostatistical techniques<\/div><div class=\"su-spoiler-content su-u-clearfix su-u-trim\"><br \/><div class=\"su-table su-table-alternate\"><\/p><table><tbody><tr><td><p><strong>Course Code<\/strong><\/p><\/td><td colspan=\"3\" width=\"458\"><p><strong>STA412M3<\/strong><\/p><\/td><\/tr><tr><td><p><strong>Course Title<\/strong><\/p><\/td><td colspan=\"3\" width=\"458\"><p><strong>Biostatistical Techniques <\/strong><\/p><\/td><\/tr><tr><td><p><strong>Academic Credits<\/strong><\/p><\/td><td colspan=\"3\" width=\"458\"><p><strong>03<\/strong><\/p><\/td><\/tr><tr><td rowspan=\"2\"><p><strong>Hourly Breakdown<\/strong><\/p><\/td><td width=\"126\"><p><strong>Theory<\/strong><\/p><\/td><td width=\"139\"><p><strong>Practical<\/strong><\/p><\/td><td width=\"193\"><p><strong>Independent Learning<\/strong><\/p><\/td><\/tr><tr><td width=\"126\"><p><strong>45 Hours<\/strong><\/p><\/td><td width=\"139\"><p><strong>&#8211;<\/strong><\/p><\/td><td width=\"193\"><p><strong>105 Hours<\/strong><\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"603\"><p><strong>Objective:<\/strong><\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"603\"><p>Introduce the applied Biostatistical techniques used in statistical collaboration with various clinical trials.<\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"603\"><p><strong>Intended Learning Outcomes:<\/strong><\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"603\"><ul><li>Distinguish kinds and source of data<\/li><li>How to apply best tools and approaches for data collection or retrieval<\/li><li>Identify pitfalls and best practices for turning data into analyzable data<\/li><li>Examine the data qualities<\/li><li>Identify the correct use of models under different response domains<\/li><li><span style=\"font-family: inherit;font-size: inherit\">\u00a0<\/span>Use observational data for comparative studies<\/li><li>Discuss different types of follow-up and time to event responses<\/li><li>Apply basic actuarial and parametric approaches for time to event responses<\/li><li>Outline different types of follow-up and longitudinal data<\/li><li>Develop basic modeling approaches for longitudinal response<\/li><\/ul><\/td><\/tr><tr><td colspan=\"4\" width=\"603\"><p><strong>Course Contents:<\/strong><\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"603\"><ul><li><strong>Data<\/strong>: Kinds of raw data: Unstructured, semi-structured; structured; Sources of data: active versus passive data, clinical databases, registries, administrative data; Data Collection tools: spreadsheets, databases, text mining; Analysis dataset: event-based, longitudinal, unique record versus multiple records; Data screening: manual review, descriptive summary, exploratory data analysis; best practices when using tables and figures.<\/li><li><strong>Study Initiation: <\/strong>Introduction to types of studies; power calculation; measures of agreement: kappa statistic, concordance correlation coefficient; diagnostic tests: sensitivity, specificity, positive predictive value, negative predictive value; Simple Statistical Tests: parametric and nonparametric tests: comparison tests, trend tests, tests for correlated responses and assumptions.<\/li><li><strong>Modelling:<\/strong>Understand the basic principles of different kinds of statistical models, and their applicability to the analysis of clinical data; Models for continuous Response, Categorical Response, Count data, Zero-inflated data; Methods used in comparative studies<strong>: <\/strong>weighting, stratification, adjustment, matching.<\/li><li><strong>Time related responses: <\/strong>Introduction to Time to Event data: left censored data, competing risk data, repeated events; Introduction to Longitudinal Responses: continuous, binary, ordinal and nominal. Brief introduction to marginal and conditional (mixed-effects) models.<\/li><li><strong>Machine Learning Methods<\/strong>: Use of Bootstrap to estimate standard errors and confidence interval, Introduction toRandom Forests: continuous response, categorical response, and time to event response.<\/li><\/ul><\/td><\/tr><tr><td colspan=\"4\" width=\"603\"><p><strong>Teaching Methods:<\/strong><\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"603\"><p>Lecture demonstration, Quizzes and Tutorial discussions<\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"603\"><p><strong>Assessment\/ Evaluation Details:<\/strong><\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"603\"><ul><li>In-course assessments\u00a0\u00a0 \u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0 30%<\/li><li>End of course Examination\u00a0\u00a0\u00a0\u00a0 70%<\/li><\/ul><\/td><\/tr><tr><td colspan=\"4\" width=\"603\"><p><strong>Recommended Readings:<\/strong><\/p><\/td><\/tr><tr><td colspan=\"4\" width=\"603\"><ul><li>Forthofer, R. N., Lee, E. S. and Hernandez, M., Biostatistics: A Guide to Design, Analysis and Discovery, 2<sup>nd<\/sup> Edition, Elsevier- Academic Press, Boston, 2007.<\/li><li>Gordis, L., Epidemiology, 5<sup>th<\/sup> Edition, Elsevier- Academic Press, Philadelphia, 2014.<\/li><li>Harrell, F.E., Regression Modeling Strategies: With Applications to Linear Models, Logistic Regression, and Survival Analysis, 2<sup>nd<\/sup> Edition, Springer, New York, 2001.<\/li><\/ul><\/td><\/tr><\/tbody><\/table><p><\/div><br \/><\/div><\/div><br \/><\/div><\/p>\t\t\t\t\t\t\t\t<\/div>\n\t\t\t\t<\/div>\n\t\t\t\t\t<\/div>\n\t\t<\/div>\n\t\t\t\t\t<\/div>\n\t\t<\/section>\n\t\t\t\t<\/div>\n\t\t","protected":false},"excerpt":{"rendered":"<p>Level \u2013 3M Course units effective from academic year 2016\/2017 to date Level \u2013 4M Course units effective from academic year 2016\/2017 to date<\/p>\n","protected":false},"author":1,"featured_media":0,"parent":0,"menu_order":0,"comment_status":"closed","ping_status":"closed","template":"","meta":{"_uag_custom_page_level_css":"","ocean_post_layout":"","ocean_both_sidebars_style":"","ocean_both_sidebars_content_width":0,"ocean_both_sidebars_sidebars_width":0,"ocean_sidebar":"0","ocean_second_sidebar":"0","ocean_disable_margins":"enable","ocean_add_body_class":"","ocean_shortcode_before_top_bar":"","ocean_shortcode_after_top_bar":"","ocean_shortcode_before_header":"","ocean_shortcode_after_header":"","ocean_has_shortcode":"","ocean_shortcode_after_title":"","ocean_shortcode_before_footer_widgets":"","ocean_shortcode_after_footer_widgets":"","ocean_shortcode_before_footer_bottom":"","ocean_shortcode_after_footer_bottom":"","ocean_display_top_bar":"default","ocean_display_header":"default","ocean_header_style":"","ocean_center_header_left_menu":"0","ocean_custom_header_template":"0","ocean_custom_logo":0,"ocean_custom_retina_logo":0,"ocean_custom_logo_max_width":0,"ocean_custom_logo_tablet_max_width":0,"ocean_custom_logo_mobile_max_width":0,"ocean_custom_logo_max_height":0,"ocean_custom_logo_tablet_max_height":0,"ocean_custom_logo_mobile_max_height":0,"ocean_header_custom_menu":"0","ocean_menu_typo_font_family":"0","ocean_menu_typo_font_subset":"","ocean_menu_typo_font_size":0,"ocean_menu_typo_font_size_tablet":0,"ocean_menu_typo_font_size_mobile":0,"ocean_menu_typo_font_size_unit":"px","ocean_menu_typo_font_weight":"","ocean_menu_typo_font_weight_tablet":"","ocean_menu_typo_font_weight_mobile":"","ocean_menu_typo_transform":"","ocean_menu_typo_transform_tablet":"","ocean_menu_typo_transform_mobile":"","ocean_menu_typo_line_height":0,"ocean_menu_typo_line_height_tablet":0,"ocean_menu_typo_line_height_mobile":0,"ocean_menu_typo_line_height_unit":"","ocean_menu_typo_spacing":0,"ocean_menu_typo_spacing_tablet":0,"ocean_menu_typo_spacing_mobile":0,"ocean_menu_typo_spacing_unit":"","ocean_menu_link_color":"","ocean_menu_link_color_hover":"","ocean_menu_link_color_active":"","ocean_menu_link_background":"","ocean_menu_link_hover_background":"","ocean_menu_link_active_background":"","ocean_menu_social_links_bg":"","ocean_menu_social_hover_links_bg":"","ocean_menu_social_links_color":"","ocean_menu_social_hover_links_color":"","ocean_disable_title":"default","ocean_disable_heading":"default","ocean_post_title":"","ocean_post_subheading":"","ocean_post_title_style":"","ocean_post_title_background_color":"","ocean_post_title_background":0,"ocean_post_title_bg_image_position":"","ocean_post_title_bg_image_attachment":"","ocean_post_title_bg_image_repeat":"","ocean_post_title_bg_image_size":"","ocean_post_title_height":0,"ocean_post_title_bg_overlay":0.5,"ocean_post_title_bg_overlay_color":"","ocean_disable_breadcrumbs":"default","ocean_breadcrumbs_color":"","ocean_breadcrumbs_separator_color":"","ocean_breadcrumbs_links_color":"","ocean_breadcrumbs_links_hover_color":"","ocean_display_footer_widgets":"default","ocean_display_footer_bottom":"default","ocean_custom_footer_template":"0","footnotes":""},"class_list":["post-519","page","type-page","status-publish","hentry","entry"],"uagb_featured_image_src":{"full":false,"thumbnail":false,"medium":false,"medium_large":false,"large":false,"1536x1536":false,"2048x2048":false,"ocean-thumb-m":false,"ocean-thumb-ml":false,"ocean-thumb-l":false},"uagb_author_info":{"display_name":"mathwpadmn","author_link":"https:\/\/maths.jfn.ac.lk\/index.php\/author\/mathwpadmn\/"},"uagb_comment_info":0,"uagb_excerpt":"Level \u2013 3M Course units effective from academic year 2016\/2017 to date Level \u2013 4M Course units effective from academic year 2016\/2017 to date","_links":{"self":[{"href":"https:\/\/maths.jfn.ac.lk\/index.php\/wp-json\/wp\/v2\/pages\/519","targetHints":{"allow":["GET"]}}],"collection":[{"href":"https:\/\/maths.jfn.ac.lk\/index.php\/wp-json\/wp\/v2\/pages"}],"about":[{"href":"https:\/\/maths.jfn.ac.lk\/index.php\/wp-json\/wp\/v2\/types\/page"}],"author":[{"embeddable":true,"href":"https:\/\/maths.jfn.ac.lk\/index.php\/wp-json\/wp\/v2\/users\/1"}],"replies":[{"embeddable":true,"href":"https:\/\/maths.jfn.ac.lk\/index.php\/wp-json\/wp\/v2\/comments?post=519"}],"version-history":[{"count":23,"href":"https:\/\/maths.jfn.ac.lk\/index.php\/wp-json\/wp\/v2\/pages\/519\/revisions"}],"predecessor-version":[{"id":1732,"href":"https:\/\/maths.jfn.ac.lk\/index.php\/wp-json\/wp\/v2\/pages\/519\/revisions\/1732"}],"wp:attachment":[{"href":"https:\/\/maths.jfn.ac.lk\/index.php\/wp-json\/wp\/v2\/media?parent=519"}],"curies":[{"name":"wp","href":"https:\/\/api.w.org\/{rel}","templated":true}]}}