File size: 43,634 Bytes
0d00d62 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471 472 473 474 475 476 477 478 479 480 481 482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497 498 499 500 501 502 503 504 505 506 507 508 509 510 511 512 513 514 515 516 517 518 519 520 521 522 523 524 525 526 527 528 529 530 531 532 533 534 535 536 537 538 539 540 541 542 543 544 545 546 547 548 549 550 551 552 553 554 555 556 557 558 559 560 561 562 563 564 565 566 567 568 569 570 571 572 573 574 575 576 577 578 579 580 581 582 583 584 585 586 587 588 589 590 591 592 593 594 595 596 597 598 599 600 601 602 603 604 605 606 607 608 609 610 611 612 613 614 615 616 617 618 619 620 621 622 623 624 625 626 627 628 629 630 631 632 633 634 635 636 637 638 639 640 641 642 643 644 645 646 647 648 649 650 651 652 653 654 655 656 657 658 659 660 661 662 663 664 665 666 667 668 669 670 671 672 673 674 675 676 677 678 679 680 681 682 683 684 685 686 687 688 689 690 691 692 693 694 695 696 697 698 699 700 701 702 703 704 705 706 707 708 709 710 711 712 713 714 715 716 717 718 719 720 721 722 723 724 725 726 727 728 729 730 731 732 733 734 735 736 737 738 739 740 741 742 743 744 745 746 747 748 749 750 751 752 753 754 755 756 757 758 759 760 761 762 763 764 765 766 767 768 769 770 771 772 773 774 775 776 777 778 779 780 781 782 783 784 785 786 787 788 789 790 791 792 793 794 795 796 797 798 799 800 801 802 803 804 805 806 807 808 809 810 811 812 813 814 815 816 817 818 819 820 821 822 823 824 825 826 827 828 829 830 831 832 833 834 835 836 837 838 839 840 841 842 843 844 845 846 847 848 849 850 851 852 853 854 855 856 857 858 859 860 861 862 863 864 865 866 867 868 869 870 871 872 873 874 875 876 877 878 879 880 881 882 883 884 885 886 887 888 889 890 891 892 893 894 895 896 897 898 899 900 901 902 903 904 905 906 907 908 909 910 911 912 913 914 915 916 917 918 919 920 921 922 923 924 925 926 927 928 929 930 931 932 933 934 935 936 937 938 939 940 941 942 943 944 945 946 947 948 949 950 951 952 953 954 955 956 957 958 959 960 961 962 963 964 965 966 967 968 969 970 971 972 973 974 975 976 977 978 979 980 981 982 983 984 985 986 987 988 989 990 991 992 993 994 995 996 997 998 999 1000 1001 1002 1003 1004 1005 1006 1007 1008 1009 1010 1011 1012 1013 1014 1015 1016 1017 1018 1019 1020 1021 1022 1023 1024 1025 1026 1027 1028 1029 1030 1031 1032 1033 1034 1035 1036 1037 1038 1039 1040 1041 1042 1043 1044 1045 1046 1047 1048 1049 1050 1051 1052 1053 1054 1055 1056 1057 1058 1059 1060 1061 1062 1063 1064 1065 1066 1067 1068 1069 1070 1071 1072 1073 1074 1075 1076 1077 1078 1079 1080 1081 1082 1083 1084 1085 1086 1087 1088 1089 1090 1091 1092 1093 1094 1095 1096 1097 1098 1099 1100 1101 1102 1103 1104 1105 1106 1107 1108 1109 1110 1111 1112 1113 1114 1115 1116 1117 1118 1119 1120 1121 1122 1123 1124 1125 1126 1127 1128 1129 1130 1131 1132 1133 1134 1135 1136 1137 1138 1139 1140 1141 1142 1143 1144 1145 1146 1147 1148 1149 1150 1151 1152 1153 1154 1155 1156 1157 1158 1159 1160 1161 1162 1163 1164 1165 1166 1167 1168 1169 1170 1171 1172 1173 1174 1175 1176 1177 1178 1179 1180 1181 1182 1183 1184 1185 1186 1187 1188 1189 1190 1191 1192 1193 1194 1195 1196 1197 1198 1199 1200 1201 1202 1203 1204 1205 1206 1207 |
{
"title": "Polynomial Regression Mastery: 100 MCQs",
"description": "A comprehensive set of 100 multiple-choice questions designed to teach and test your understanding of Polynomial Regression, starting from basic Linear Regression concepts to advanced ideas like model evaluation, bias-variance tradeoff, and overfitting.",
"questions": [
{
"id": 1,
"questionText": "What is the main goal of Linear Regression?",
"options": [
"To find clusters in data",
"To compress the data",
"To find a straight-line relationship between variables",
"To predict categories"
],
"correctAnswerIndex": 2,
"explanation": "Linear Regression tries to find the best straight line that shows the relationship between input and output variables."
},
{
"id": 2,
"questionText": "In Linear Regression, what kind of relationship is modeled between X and Y?",
"options": [
"Polynomial",
"Linear",
"Circular",
"Exponential"
],
"correctAnswerIndex": 1,
"explanation": "Linear Regression assumes a straight-line (linear) relationship between the independent and dependent variables."
},
{
"id": 3,
"questionText": "What does the slope in Linear Regression represent?",
"options": [
"The change in Y for a one-unit change in X",
"The error of the model",
"The average of all Y values",
"The value of Y when X is 0"
],
"correctAnswerIndex": 0,
"explanation": "The slope tells us how much Y changes when X increases by 1 unit."
},
{
"id": 4,
"questionText": "What is the intercept in a Linear Regression equation?",
"options": [
"The number of data points",
"The steepness of the line",
"The point where the line crosses the Y-axis",
"The residual value"
],
"correctAnswerIndex": 2,
"explanation": "The intercept is the Y value when X equals 0. It’s where the line meets the Y-axis."
},
{
"id": 5,
"questionText": "What does a residual represent in regression?",
"options": [
"The slope of the line",
"The average of predictions",
"Difference between actual and predicted values",
"The standard deviation"
],
"correctAnswerIndex": 2,
"explanation": "A residual is the difference between the actual value and the predicted value. It shows how far the model’s prediction is from reality."
},
{
"id": 6,
"questionText": "What method is commonly used to fit a Linear Regression line?",
"options": [
"Gradient Ascent",
"Residual Addition",
"Ordinary Least Squares",
"Mean Minimization"
],
"correctAnswerIndex": 2,
"explanation": "Ordinary Least Squares (OLS) minimizes the sum of squared residuals to find the best-fitting line."
},
{
"id": 7,
"questionText": "What happens if residuals are not randomly distributed?",
"options": [
"There may be a pattern not captured by the model",
"It increases accuracy",
"The slope becomes 0",
"The model is perfect"
],
"correctAnswerIndex": 0,
"explanation": "If residuals show a pattern, it means the model missed some relationship in the data."
},
{
"id": 8,
"questionText": "What type of variable does Linear Regression predict?",
"options": [
"Continuous",
"Integer only",
"Categorical",
"Binary"
],
"correctAnswerIndex": 0,
"explanation": "Linear Regression is used for predicting continuous numerical values like height, weight, or prices."
},
{
"id": 9,
"questionText": "Which assumption is true for Linear Regression?",
"options": [
"All features are independent",
"Residuals are normally distributed",
"Data must be categorical",
"Output is binary"
],
"correctAnswerIndex": 1,
"explanation": "One assumption of Linear Regression is that residuals should follow a normal distribution."
},
{
"id": 10,
"questionText": "What problem occurs when data is not linear?",
"options": [
"Lower variance",
"Perfect prediction",
"Poor model fit",
"Balanced output"
],
"correctAnswerIndex": 2,
"explanation": "Linear Regression works best for linear data. If data is curved, it won’t fit well, leading to high error."
},
{
"id": 11,
"questionText": "What is Polynomial Regression used for?",
"options": [
"Modeling curved relationships",
"Modeling straight-line relationships",
"Finding clusters",
"Reducing dimensionality"
],
"correctAnswerIndex": 0,
"explanation": "Polynomial Regression models non-linear or curved relationships between input and output variables."
},
{
"id": 12,
"questionText": "Polynomial Regression is an extension of which model?",
"options": [
"Decision Tree",
"Linear Regression",
"Logistic Regression",
"Naive Bayes"
],
"correctAnswerIndex": 1,
"explanation": "Polynomial Regression is an extension of Linear Regression where input features are raised to powers."
},
{
"id": 13,
"questionText": "In Polynomial Regression, we add what kind of terms to the model?",
"options": [
"Cubic roots only",
"Squared and higher power terms of input",
"Logarithmic terms",
"Exponential terms"
],
"correctAnswerIndex": 1,
"explanation": "Polynomial Regression includes higher power terms like x², x³, etc., to capture curves in the data."
},
{
"id": 14,
"questionText": "What shape can a second-degree Polynomial Regression model represent?",
"options": [
"Circle",
"Parabola",
"Zigzag",
"Straight line"
],
"correctAnswerIndex": 1,
"explanation": "A second-degree polynomial creates a parabola-shaped curve, allowing the model to fit U-shaped data."
},
{
"id": 15,
"questionText": "What is the general form of a Polynomial Regression equation with one variable?",
"options": [
"y = b0 + b1x + b2x² + ... + bkx^k",
"y = mx + b",
"y = b0 + b1x",
"y = bx + c"
],
"correctAnswerIndex": 0,
"explanation": "Polynomial Regression includes terms of increasing power: x, x², x³, etc., up to the desired degree k."
},
{
"id": 16,
"questionText": "What happens when you increase the degree of a polynomial too much?",
"options": [
"The model becomes linear",
"The model may overfit the data",
"The model becomes simpler",
"The error increases on training data"
],
"correctAnswerIndex": 1,
"explanation": "A high-degree polynomial can overfit by fitting noise in the training data rather than the true pattern."
},
{
"id": 17,
"questionText": "Overfitting in Polynomial Regression leads to what?",
"options": [
"Lower variance",
"Simpler equations",
"Better generalization",
"Poor performance on new data"
],
"correctAnswerIndex": 3,
"explanation": "Overfitting means the model performs well on training data but fails to generalize to unseen data."
},
{
"id": 18,
"questionText": "What is underfitting?",
"options": [
"When the model is too simple to capture patterns",
"When training accuracy is 100%",
"When residuals are 0",
"When the model is too complex"
],
"correctAnswerIndex": 0,
"explanation": "Underfitting happens when the model is too simple and cannot capture the underlying structure of the data."
},
{
"id": 19,
"questionText": "Which term describes the trade-off between bias and variance in a polynomial model?",
"options": [
"Regularization",
"Feature Scaling",
"Gradient Descent",
"Bias-Variance Tradeoff"
],
"correctAnswerIndex": 3,
"explanation": "Bias-Variance Tradeoff explains how increasing model complexity reduces bias but increases variance."
},
{
"id": 20,
"questionText": "What is the degree of a polynomial?",
"options": [
"Number of variables",
"Highest power of the input variable",
"Sum of all coefficients",
"Number of residuals"
],
"correctAnswerIndex": 1,
"explanation": "The degree of a polynomial is the highest exponent of the input variable in the equation."
},
{
"id": 21,
"questionText": "Which type of relationship can Polynomial Regression handle that Linear Regression cannot?",
"options": [
"Categorical",
"Binary",
"Constant",
"Non-linear"
],
"correctAnswerIndex": 3,
"explanation": "Polynomial Regression can model curved, non-linear relationships, unlike simple linear regression."
},
{
"id": 22,
"questionText": "What does increasing the polynomial degree do?",
"options": [
"Simplifies computation",
"Decreases coefficients",
"Removes noise",
"Adds more curve flexibility"
],
"correctAnswerIndex": 3,
"explanation": "A higher degree polynomial gives the model more flexibility to follow the data's shape."
},
{
"id": 23,
"questionText": "What kind of curve does a third-degree polynomial create?",
"options": [
"Straight line",
"S-shape",
"U-shape",
"Flat line"
],
"correctAnswerIndex": 1,
"explanation": "A cubic polynomial (degree 3) can create an S-shaped curve that changes direction once."
},
{
"id": 24,
"questionText": "Which library in Python is commonly used to create polynomial features?",
"options": [
"NumPy",
"scikit-learn",
"Pandas",
"Matplotlib"
],
"correctAnswerIndex": 1,
"explanation": "The PolynomialFeatures class from scikit-learn is used to generate higher-degree input features."
},
{
"id": 25,
"questionText": "What function in scikit-learn is used to transform data into polynomial features?",
"options": [
"create_poly_data()",
"PolynomialFeatures()",
"poly_transform()",
"make_polynomial()"
],
"correctAnswerIndex": 1,
"explanation": "The PolynomialFeatures() function expands input features into polynomial combinations."
},
{
"id": 26,
"questionText": "Which of the following problems is Polynomial Regression best suited for?",
"options": [
"Linear relationships only",
"Categorical output prediction",
"Curved relationships between variables",
"Time series forecasting only"
],
"correctAnswerIndex": 2,
"explanation": "Polynomial Regression is best used when data shows a curved or non-linear pattern between input and output."
},
{
"id": 27,
"questionText": "If the degree of the polynomial is 1, what does Polynomial Regression become?",
"options": [
"Logistic Regression",
"Linear Regression",
"Decision Tree",
"Ridge Regression"
],
"correctAnswerIndex": 1,
"explanation": "When the degree is 1, Polynomial Regression is the same as simple Linear Regression."
},
{
"id": 28,
"questionText": "What happens when you use a degree that is too low for Polynomial Regression?",
"options": [
"No bias",
"Underfitting",
"Perfect fit",
"Overfitting"
],
"correctAnswerIndex": 1,
"explanation": "Using a degree that is too low may cause the model to miss patterns, leading to underfitting."
},
{
"id": 29,
"questionText": "What kind of error increases with a high-degree polynomial?",
"options": [
"Noise",
"Correlation",
"Bias",
"Variance"
],
"correctAnswerIndex": 3,
"explanation": "High-degree polynomials often increase variance, meaning the model becomes sensitive to small data changes."
},
{
"id": 30,
"questionText": "What is the main goal when choosing the degree of a polynomial?",
"options": [
"To balance bias and variance",
"To reduce coefficients",
"To fit as many points as possible",
"To maximize error"
],
"correctAnswerIndex": 0,
"explanation": "The degree should be chosen to balance bias (simplicity) and variance (complexity) for good generalization."
},
{
"id": 31,
"questionText": "What technique can help prevent overfitting in Polynomial Regression?",
"options": [
"Adding more features",
"Increasing polynomial degree",
"Removing training data",
"Regularization"
],
"correctAnswerIndex": 3,
"explanation": "Regularization methods like Ridge or Lasso Regression can reduce overfitting by penalizing large coefficients."
},
{
"id": 32,
"questionText": "What is Ridge Regression also known as?",
"options": [
"Variance Reduction",
"L2 Regularization",
"Elastic Net",
"L1 Regularization"
],
"correctAnswerIndex": 1,
"explanation": "Ridge Regression uses L2 Regularization, which penalizes the sum of squared coefficients."
},
{
"id": 33,
"questionText": "What is Lasso Regression also known as?",
"options": [
"L1 Regularization",
"Bias Correction",
"L2 Regularization",
"Polynomial Fitting"
],
"correctAnswerIndex": 0,
"explanation": "Lasso Regression uses L1 Regularization, which penalizes the absolute values of coefficients."
},
{
"id": 34,
"questionText": "What is the main difference between Ridge and Lasso?",
"options": [
"Ridge can remove features, Lasso cannot",
"Ridge uses L1, Lasso uses L2",
"Both remove coefficients equally",
"Lasso can make some coefficients zero, Ridge cannot"
],
"correctAnswerIndex": 3,
"explanation": "Lasso can shrink some coefficients to exactly zero, performing feature selection, while Ridge cannot."
},
{
"id": 35,
"questionText": "What evaluation metric measures how well the model explains the variance of the data?",
"options": [
"Mean Absolute Error",
"Mean Squared Error",
"R-squared",
"Root Mean Square Deviation"
],
"correctAnswerIndex": 2,
"explanation": "R-squared measures the proportion of variance in the target variable explained by the model."
},
{
"id": 36,
"questionText": "What is the range of R-squared values?",
"options": [
"0 to 1",
"1 to infinity",
"0 to 100",
"-1 to 1"
],
"correctAnswerIndex": 0,
"explanation": "R-squared ranges from 0 to 1, where 1 means perfect prediction and 0 means no predictive power."
},
{
"id": 37,
"questionText": "Which error metric squares the difference between actual and predicted values?",
"options": [
"Correlation Coefficient",
"R-squared",
"Mean Absolute Error",
"Mean Squared Error"
],
"correctAnswerIndex": 3,
"explanation": "Mean Squared Error (MSE) calculates the average of squared prediction errors."
},
{
"id": 38,
"questionText": "Why is Root Mean Squared Error (RMSE) preferred over MSE?",
"options": [
"It gives larger values",
"It reduces overfitting",
"It is in the same units as the target variable",
"It lowers variance"
],
"correctAnswerIndex": 2,
"explanation": "RMSE is the square root of MSE, giving error values in the same unit as the dependent variable."
},
{
"id": 39,
"questionText": "What can be a sign of overfitting when comparing training and test errors?",
"options": [
"Training error is low but test error is high",
"Both errors are low",
"Both errors are high",
"Test error is lower than training error"
],
"correctAnswerIndex": 0,
"explanation": "If the training error is much lower than test error, it indicates the model has memorized the training data."
},
{
"id": 40,
"questionText": "Which plot is useful to visualize Polynomial Regression fit?",
"options": [
"Scatter plot with curve",
"Line plot",
"Bar plot",
"Pie chart"
],
"correctAnswerIndex": 0,
"explanation": "Scatter plots with a fitted curve help visualize how well the polynomial model fits the data."
},
{
"id": 41,
"questionText": "How can you check if adding polynomial terms improves your model?",
"options": [
"By visualizing the curve",
"By comparing R-squared values",
"By adding random features",
"By increasing degree blindly"
],
"correctAnswerIndex": 1,
"explanation": "Comparing R-squared and validation errors helps decide if extra polynomial terms improve model accuracy."
},
{
"id": 42,
"questionText": "What is multicollinearity in Polynomial Regression?",
"options": [
"When residuals are independent",
"When regularization is applied",
"When output is non-linear",
"When input features are highly correlated"
],
"correctAnswerIndex": 3,
"explanation": "Polynomial features (x, x², x³, etc.) are often correlated, causing multicollinearity, which affects coefficient stability."
},
{
"id": 43,
"questionText": "Which method can help reduce multicollinearity in polynomial models?",
"options": [
"Adding noise",
"Increasing degree",
"Regularization",
"Ignoring correlations"
],
"correctAnswerIndex": 2,
"explanation": "Regularization (Ridge or Lasso) reduces coefficient sensitivity caused by multicollinearity."
},
{
"id": 44,
"questionText": "What is the purpose of feature scaling in Polynomial Regression?",
"options": [
"To make data categorical",
"To prevent large coefficient values",
"To remove outliers",
"To increase variance"
],
"correctAnswerIndex": 1,
"explanation": "Feature scaling ensures that polynomial features with large values do not dominate during training."
},
{
"id": 45,
"questionText": "Which scaling method is commonly used before Polynomial Regression?",
"options": [
"Min-Max Scaling",
"Text Vectorization",
"One-Hot Encoding",
"Label Encoding"
],
"correctAnswerIndex": 0,
"explanation": "Min-Max Scaling is often used to bring features within a small range, improving numerical stability."
},
{
"id": 46,
"questionText": "What is the main advantage of Polynomial Regression over Linear Regression?",
"options": [
"Faster computation",
"Easier interpretation",
"Ability to fit curved patterns",
"Less data needed"
],
"correctAnswerIndex": 2,
"explanation": "Polynomial Regression can model curved, non-linear data patterns that linear models cannot handle."
},
{
"id": 47,
"questionText": "Which curve fitting problem can Polynomial Regression solve?",
"options": [
"Fitting U-shaped and S-shaped data",
"Fitting straight lines",
"Finding text patterns",
"Classifying images"
],
"correctAnswerIndex": 0,
"explanation": "Polynomial Regression is effective for U-shaped or S-shaped curves that need flexibility in fitting."
},
{
"id": 48,
"questionText": "Which of these statements about high-degree polynomials is true?",
"options": [
"They are simple to interpret",
"They generalize well",
"They may oscillate wildly between points",
"They reduce variance"
],
"correctAnswerIndex": 2,
"explanation": "High-degree polynomials may fluctuate too much between data points, reducing stability."
},
{
"id": 49,
"questionText": "What type of regularization combines L1 and L2?",
"options": [
"Ridge",
"Dropout",
"Elastic Net",
"Lasso"
],
"correctAnswerIndex": 2,
"explanation": "Elastic Net combines both L1 (Lasso) and L2 (Ridge) regularization techniques."
},
{
"id": 50,
"questionText": "What does the alpha parameter control in Ridge and Lasso Regression?",
"options": [
"The learning rate",
"The model degree",
"The regularization strength",
"The intercept"
],
"correctAnswerIndex": 2,
"explanation": "Alpha controls how strongly the model penalizes large coefficient values. Higher alpha means stronger regularization."
},
{
"id": 51,
"questionText": "What happens if the polynomial degree is set too high on a small dataset?",
"options": [
"Perfect fitting always",
"Underfitting",
"Overfitting",
"No change in accuracy"
],
"correctAnswerIndex": 2,
"explanation": "A high-degree polynomial can memorize the training data, leading to overfitting and poor generalization."
},
{
"id": 52,
"questionText": "Which of the following helps reduce overfitting in Polynomial Regression?",
"options": [
"Using regularization",
"Using fewer data points",
"Adding noise to labels",
"Increasing polynomial degree"
],
"correctAnswerIndex": 0,
"explanation": "Regularization penalizes large coefficients, which helps reduce overfitting."
},
{
"id": 53,
"questionText": "What does feature scaling do before applying polynomial features?",
"options": [
"Ensures all features contribute equally",
"Removes outliers",
"Increases model degree",
"Makes coefficients smaller"
],
"correctAnswerIndex": 0,
"explanation": "Feature scaling ensures all input features have similar ranges, preventing domination by one feature."
},
{
"id": 54,
"questionText": "Why is Polynomial Regression still considered a linear model?",
"options": [
"Because it ignores nonlinear patterns",
"Because coefficients are linear in parameters",
"Because data must be linear",
"Because it uses straight lines"
],
"correctAnswerIndex": 1,
"explanation": "Despite nonlinear features, the model remains linear in terms of its coefficients."
},
{
"id": 55,
"questionText": "Which sklearn class is used to generate polynomial features?",
"options": [
"PolynomialFeatures",
"PolyScaler",
"FeatureGenerator",
"PolynomialModel"
],
"correctAnswerIndex": 0,
"explanation": "PolynomialFeatures from sklearn.preprocessing expands input data to include polynomial terms."
},
{
"id": 56,
"questionText": "What is the main disadvantage of using very high-degree polynomials?",
"options": [
"Simpler model",
"Overfitting and numerical instability",
"Lower computation time",
"Underfitting"
],
"correctAnswerIndex": 1,
"explanation": "High-degree polynomials can overfit and suffer from large coefficient swings causing instability."
},
{
"id": 57,
"questionText": "In Polynomial Regression, which term represents the intercept?",
"options": [
"x^n term",
"x^1 term",
"x^0 term",
"x^2 term"
],
"correctAnswerIndex": 2,
"explanation": "The x^0 term represents the constant (intercept) of the polynomial equation."
},
{
"id": 58,
"questionText": "What will happen if we skip PolynomialFeatures but use degree > 1 in LinearRegression?",
"options": [
"It will use polynomial terms automatically",
"The model will fail",
"It will regularize coefficients",
"It will behave like linear regression"
],
"correctAnswerIndex": 3,
"explanation": "LinearRegression does not create polynomial terms automatically. Without PolynomialFeatures, it stays linear."
},
{
"id": 59,
"questionText": "Which cross-validation technique is useful to choose polynomial degree?",
"options": [
"Train-Test Split only",
"Random Sampling",
"Leave-One-Out CV",
"No validation needed"
],
"correctAnswerIndex": 2,
"explanation": "Leave-One-Out Cross Validation works well to find the optimal polynomial degree for small datasets."
},
{
"id": 60,
"questionText": "How does increasing polynomial degree affect bias and variance?",
"options": [
"Increases bias and decreases variance",
"Decreases bias and increases variance",
"Increases both",
"Decreases both"
],
"correctAnswerIndex": 1,
"explanation": "Higher degrees reduce bias (fit training data better) but increase variance (sensitive to noise)."
},
{
"id": 61,
"questionText": "What does the term 'interaction features' mean in Polynomial Regression?",
"options": [
"Features multiplied together",
"Random noise features",
"Unrelated features",
"Features added together"
],
"correctAnswerIndex": 0,
"explanation": "Interaction features are created by multiplying original features, capturing combined effects."
},
{
"id": 62,
"questionText": "What happens to training error as we increase polynomial degree?",
"options": [
"Always increases",
"Usually decreases",
"Becomes random",
"Stays constant"
],
"correctAnswerIndex": 1,
"explanation": "A higher-degree polynomial fits the training data better, reducing training error."
},
{
"id": 63,
"questionText": "Which step comes immediately after generating polynomial features?",
"options": [
"Scaling",
"Model fitting",
"Data shuffling",
"Feature selection"
],
"correctAnswerIndex": 1,
"explanation": "After generating polynomial features, the next step is fitting the regression model."
},
{
"id": 64,
"questionText": "What is a typical symptom of overfitting in Polynomial Regression?",
"options": [
"Identical train and test results",
"Low training accuracy",
"High training accuracy but low test accuracy",
"High test accuracy"
],
"correctAnswerIndex": 2,
"explanation": "Overfitting happens when a model performs very well on training data but poorly on unseen data."
},
{
"id": 65,
"questionText": "How can we make polynomial regression less sensitive to outliers?",
"options": [
"Use regularization",
"Add more noise",
"Ignore scaling",
"Increase degree"
],
"correctAnswerIndex": 0,
"explanation": "Regularization like Ridge or Lasso limits large coefficient values, making the model less sensitive to outliers."
},
{
"id": 66,
"questionText": "Which metric is least suitable for measuring polynomial regression performance?",
"options": [
"R-squared",
"Confusion Matrix",
"Mean Absolute Error",
"Mean Squared Error"
],
"correctAnswerIndex": 1,
"explanation": "Confusion Matrix is used for classification problems, not regression."
},
{
"id": 67,
"questionText": "What is the shape of the curve in quadratic regression?",
"options": [
"Circle",
"Parabola",
"Hyperbola",
"Line"
],
"correctAnswerIndex": 1,
"explanation": "A second-degree polynomial forms a parabola."
},
{
"id": 68,
"questionText": "What does PolynomialFeatures(degree=3) generate for input x?",
"options": [
"x^2 only",
"x^3 only",
"x, x^2, x^3",
"x"
],
"correctAnswerIndex": 2,
"explanation": "It expands the feature set to include x, x^2, and x^3 terms."
},
{
"id": 69,
"questionText": "When should we use Polynomial Regression over Linear Regression?",
"options": [
"When relationship is clearly nonlinear",
"When slope is constant",
"When data has many missing values",
"When data is categorical"
],
"correctAnswerIndex": 0,
"explanation": "Polynomial Regression captures nonlinear relationships between input and output."
},
{
"id": 70,
"questionText": "Why does feature scaling matter more for higher-degree polynomials?",
"options": [
"Because it reduces intercept",
"Because it helps visualization",
"Because polynomial terms grow rapidly",
"Because it ignores bias"
],
"correctAnswerIndex": 2,
"explanation": "High-degree terms like x^5 or x^6 can produce large numeric values; scaling keeps them manageable."
},
{
"id": 71,
"questionText": "What is the main effect of high-degree polynomials on model complexity?",
"options": [
"Increases complexity",
"Keeps complexity same",
"Removes features",
"Reduces complexity"
],
"correctAnswerIndex": 0,
"explanation": "High-degree polynomials add more terms, increasing model complexity and flexibility."
},
{
"id": 72,
"questionText": "Which method helps select the optimal polynomial degree?",
"options": [
"Cross-validation",
"Using only training error",
"Trial and error",
"Random selection"
],
"correctAnswerIndex": 0,
"explanation": "Cross-validation evaluates model performance on unseen data to choose the best polynomial degree."
},
{
"id": 73,
"questionText": "What is bias in the context of polynomial regression?",
"options": [
"Error due to noise",
"Error due to large coefficients",
"Error due to model simplicity",
"Random fluctuation"
],
"correctAnswerIndex": 2,
"explanation": "Bias measures the error caused by approximating a complex relationship with a simple model."
},
{
"id": 74,
"questionText": "What is variance in the context of polynomial regression?",
"options": [
"Error due to bias",
"Error due to sensitivity to training data",
"Error due to model simplicity",
"Error due to missing features"
],
"correctAnswerIndex": 1,
"explanation": "Variance is the error caused when the model changes too much with small changes in the training data."
},
{
"id": 75,
"questionText": "Which combination of bias and variance is ideal?",
"options": [
"High bias, low variance",
"Low bias, high variance",
"High bias, high variance",
"Low bias, low variance"
],
"correctAnswerIndex": 3,
"explanation": "The ideal model has low bias (accurate on training) and low variance (stable on new data)."
},
{
"id": 76,
"questionText": "How can we detect overfitting visually?",
"options": [
"By examining coefficients only",
"By looking at training vs test error",
"By plotting residuals",
"By plotting polynomial degree only"
],
"correctAnswerIndex": 1,
"explanation": "Overfitting is indicated when training error is very low but test error is high."
},
{
"id": 77,
"questionText": "Which method reduces model complexity while keeping fit reasonable?",
"options": [
"Regularization",
"Adding more polynomial terms",
"Ignoring validation data",
"Increasing dataset noise"
],
"correctAnswerIndex": 0,
"explanation": "Regularization penalizes large coefficients, simplifying the model and reducing overfitting."
},
{
"id": 78,
"questionText": "Why is L1 regularization useful in polynomial regression?",
"options": [
"Increases variance",
"Makes polynomial degree higher",
"Removes features automatically",
"Decreases bias only"
],
"correctAnswerIndex": 2,
"explanation": "L1 regularization (Lasso) can shrink some coefficients to zero, effectively selecting important features."
},
{
"id": 79,
"questionText": "Why is L2 regularization useful in polynomial regression?",
"options": [
"Removes features",
"Reduces large coefficient impact",
"Increases polynomial degree",
"Increases training error only"
],
"correctAnswerIndex": 1,
"explanation": "L2 regularization (Ridge) penalizes large coefficients to make the model more stable."
},
{
"id": 80,
"questionText": "Which visualization helps check polynomial fit?",
"options": [
"Histogram",
"Box plot",
"Scatter plot with fitted curve",
"Bar chart"
],
"correctAnswerIndex": 2,
"explanation": "Scatter plots with fitted curves show how well the polynomial captures data patterns."
},
{
"id": 81,
"questionText": "What does R-squared indicate in polynomial regression?",
"options": [
"Mean squared error",
"Training time",
"Number of features",
"Proportion of variance explained"
],
"correctAnswerIndex": 3,
"explanation": "R-squared measures how much of the target variance is captured by the model."
},
{
"id": 82,
"questionText": "Which error metric gives average magnitude of prediction errors?",
"options": [
"Mean Absolute Error",
"Variance",
"R-squared",
"Mean Squared Error"
],
"correctAnswerIndex": 0,
"explanation": "Mean Absolute Error calculates the average absolute difference between predicted and actual values."
},
{
"id": 83,
"questionText": "Which metric penalizes large errors more heavily?",
"options": [
"MSE",
"MAE",
"R-squared",
"Correlation coefficient"
],
"correctAnswerIndex": 0,
"explanation": "MSE squares the errors, giving higher weight to large deviations."
},
{
"id": 84,
"questionText": "Why is cross-validation important in polynomial regression?",
"options": [
"To ignore overfitting",
"To fit data perfectly",
"To evaluate model on unseen data",
"To increase polynomial degree"
],
"correctAnswerIndex": 2,
"explanation": "Cross-validation tests model performance on unseen data, helping select optimal degree and reduce overfitting."
},
{
"id": 85,
"questionText": "Which technique can combine multiple polynomial models for better prediction?",
"options": [
"Single model fitting",
"L1 regularization only",
"Feature scaling",
"Bagging"
],
"correctAnswerIndex": 3,
"explanation": "Bagging combines predictions from multiple models to reduce variance and improve accuracy."
},
{
"id": 86,
"questionText": "Which problem arises if polynomial degree is too low?",
"options": [
"Feature scaling",
"Underfitting",
"Regularization",
"Overfitting"
],
"correctAnswerIndex": 1,
"explanation": "A low-degree polynomial may fail to capture data patterns, causing underfitting."
},
{
"id": 87,
"questionText": "Which method automatically selects important polynomial terms?",
"options": [
"Lasso Regression",
"Ridge Regression",
"Cross-validation only",
"Standard Linear Regression"
],
"correctAnswerIndex": 0,
"explanation": "Lasso regression can shrink some coefficients to zero, selecting the most important features."
},
{
"id": 88,
"questionText": "Which is a symptom of multicollinearity in polynomial regression?",
"options": [
"Low variance",
"High R-squared always",
"Unstable coefficients",
"Zero training error"
],
"correctAnswerIndex": 2,
"explanation": "Polynomial terms are often correlated, making coefficients unstable and sensitive to small data changes."
},
{
"id": 89,
"questionText": "Which of these is an advantage of polynomial regression?",
"options": [
"Fits linear data only",
"Can fit nonlinear patterns",
"Removes outliers automatically",
"Reduces training data needed"
],
"correctAnswerIndex": 1,
"explanation": "Polynomial regression captures nonlinear relationships between variables."
},
{
"id": 90,
"questionText": "Which is a common step before polynomial regression on real data?",
"options": [
"Removing target variable",
"Feature scaling",
"Increasing polynomial degree blindly",
"Random noise addition"
],
"correctAnswerIndex": 1,
"explanation": "Feature scaling ensures all polynomial terms are on a similar scale for stable model training."
},
{
"id": 91,
"questionText": "Which model would you choose for a U-shaped data trend?",
"options": [
"Linear Regression",
"Logistic Regression",
"Quadratic Polynomial Regression",
"Cubic Regression"
],
"correctAnswerIndex": 2,
"explanation": "Quadratic (degree 2) polynomial regression is ideal for U-shaped patterns."
},
{
"id": 92,
"questionText": "Which model would you choose for an S-shaped trend?",
"options": [
"Quadratic Regression",
"Cubic Regression",
"Linear Regression",
"Logistic Regression"
],
"correctAnswerIndex": 1,
"explanation": "Cubic (degree 3) polynomial regression can fit S-shaped trends with one inflection point."
},
{
"id": 93,
"questionText": "Which is an indicator of underfitting in polynomial regression?",
"options": [
"Low bias",
"High variance",
"Low training error and high test error",
"High training error and high test error"
],
"correctAnswerIndex": 3,
"explanation": "Underfitting shows both training and test errors are high due to a too-simple model."
},
{
"id": 94,
"questionText": "What is the effect of regularization on polynomial coefficients?",
"options": [
"Increases bias only",
"Reduces magnitude of coefficients",
"Increases all coefficients",
"Removes training data"
],
"correctAnswerIndex": 1,
"explanation": "Regularization penalizes large coefficients to reduce overfitting."
},
{
"id": 95,
"questionText": "Which method can evaluate polynomial regression stability across datasets?",
"options": [
"Only visualization",
"Train-test split",
"Cross-validation",
"Random coefficient assignment"
],
"correctAnswerIndex": 2,
"explanation": "Cross-validation tests the model on multiple data splits to check stability and generalization."
},
{
"id": 96,
"questionText": "Why should we avoid excessively high-degree polynomials?",
"options": [
"They increase overfitting",
"They always improve R-squared",
"They reduce bias",
"They remove noise automatically"
],
"correctAnswerIndex": 0,
"explanation": "Excessively high-degree polynomials may fit noise rather than the actual pattern, causing overfitting."
},
{
"id": 97,
"questionText": "Which method can simplify a polynomial regression model?",
"options": [
"Ignoring validation",
"Increasing degree",
"Adding noise",
"Regularization"
],
"correctAnswerIndex": 3,
"explanation": "Regularization reduces large coefficients and can simplify the model."
},
{
"id": 98,
"questionText": "Which of the following is true about polynomial regression predictions?",
"options": [
"Always linear",
"Independent of input",
"Always quadratic",
"Can be nonlinear even with linear coefficients"
],
"correctAnswerIndex": 3,
"explanation": "Predictions can be nonlinear because the input features are polynomial terms, even if the model is linear in coefficients."
},
{
"id": 99,
"questionText": "Which is a good strategy for selecting polynomial degree?",
"options": [
"Ignoring training error",
"Always using degree 5",
"Using cross-validation",
"Trial and error without validation"
],
"correctAnswerIndex": 2,
"explanation": "Cross-validation helps find a degree that balances underfitting and overfitting."
},
{
"id": 100,
"questionText": "What is the final goal of polynomial regression?",
"options": [
"To remove features",
"To increase variance",
"To predict continuous values with nonlinear patterns",
"To classify data"
],
"correctAnswerIndex": 2,
"explanation": "Polynomial regression aims to predict continuous outcomes while capturing nonlinear relationships."
}
]
}
|