Student evaluation of teaching (SET) ratings are used to evaluate faculty's teaching effectiveness based on a widespread belief that students learn more from highly rated professors. The key evidence cited in support of this belief are meta-analyses of multisection studies showing small-to-moderate correlations between SET ratings and student achievement (e.g., Cohen, 1980, Cohen, 1981; Feldman, 1989). We re-analyzed previously published meta-analyses of the multisection studies and found that their findings were an artifact of small sample sized studies and publication bias. Whereas the small sample sized studies showed large and moderate correlation, the large sample sized studies showed no or only minimal correlation between SET ratings and learning. Our up-to-date meta-analysis of all multisection studies revealed no significant correlations between the SET ratings and learning. These findings suggest that institutions focused on student learning and career success may want to abandon SET ratings as a measure of faculty's teaching effectiveness.
%0 Journal Article
%1 UTTL201722
%A Uttl, Bob
%A White, Carmela A.
%A Gonzalez, Daniela Wong
%D 2017
%J Studies in Educational Evaluation
%K Evaluation Meta-analysis Multisection SET Teaching Validity correlations effectiveness evaluation faculty learning of student studies teaching
%P 22-42
%R https://doi.org/10.1016/j.stueduc.2016.08.007
%T Meta-analysis of faculty's teaching effectiveness: Student evaluation of teaching ratings and student learning are not related
%U https://www.sciencedirect.com/science/article/pii/S0191491X16300323
%V 54
%X Student evaluation of teaching (SET) ratings are used to evaluate faculty's teaching effectiveness based on a widespread belief that students learn more from highly rated professors. The key evidence cited in support of this belief are meta-analyses of multisection studies showing small-to-moderate correlations between SET ratings and student achievement (e.g., Cohen, 1980, Cohen, 1981; Feldman, 1989). We re-analyzed previously published meta-analyses of the multisection studies and found that their findings were an artifact of small sample sized studies and publication bias. Whereas the small sample sized studies showed large and moderate correlation, the large sample sized studies showed no or only minimal correlation between SET ratings and learning. Our up-to-date meta-analysis of all multisection studies revealed no significant correlations between the SET ratings and learning. These findings suggest that institutions focused on student learning and career success may want to abandon SET ratings as a measure of faculty's teaching effectiveness.
@article{UTTL201722,
abstract = {Student evaluation of teaching (SET) ratings are used to evaluate faculty's teaching effectiveness based on a widespread belief that students learn more from highly rated professors. The key evidence cited in support of this belief are meta-analyses of multisection studies showing small-to-moderate correlations between SET ratings and student achievement (e.g., Cohen, 1980, Cohen, 1981; Feldman, 1989). We re-analyzed previously published meta-analyses of the multisection studies and found that their findings were an artifact of small sample sized studies and publication bias. Whereas the small sample sized studies showed large and moderate correlation, the large sample sized studies showed no or only minimal correlation between SET ratings and learning. Our up-to-date meta-analysis of all multisection studies revealed no significant correlations between the SET ratings and learning. These findings suggest that institutions focused on student learning and career success may want to abandon SET ratings as a measure of faculty's teaching effectiveness.},
added-at = {2022-11-20T08:23:41.000+0100},
author = {Uttl, Bob and White, Carmela A. and Gonzalez, Daniela Wong},
biburl = {https://www.bibsonomy.org/bibtex/20406103478c880f8fee0062bffa1b38b/yish},
doi = {https://doi.org/10.1016/j.stueduc.2016.08.007},
interhash = {e49cb95f2ee9ad62e9f9c6e43d31ce96},
intrahash = {0406103478c880f8fee0062bffa1b38b},
issn = {0191-491X},
journal = {Studies in Educational Evaluation},
keywords = {Evaluation Meta-analysis Multisection SET Teaching Validity correlations effectiveness evaluation faculty learning of student studies teaching},
note = {Evaluation of teaching: Challenges and promises},
pages = {22-42},
timestamp = {2022-11-20T08:23:41.000+0100},
title = {Meta-analysis of faculty's teaching effectiveness: Student evaluation of teaching ratings and student learning are not related},
url = {https://www.sciencedirect.com/science/article/pii/S0191491X16300323},
volume = 54,
year = 2017
}