0
votes

I am receiving error message "Error: Discrete value supplied to continuous scale". I tried the proposed solutions but didnt help. In my data, column 1 is the name of the methods. I want on y-axis the method names and on x-axis the name of the month, then I will use geom_tile to fill the heatmap using accuracy scores of all methods.

dput(results)
structure(list(V1 = c("Pers", "58.73", "68.58", "54.25", "47.69", 
"42.98", "40.6", "37.47", "40.81", "51.37", "57.13", "63.08", 
"75.75", "62.49", "54.1", "60.85", "47.78", "46.23", "35.7", 
"39.96", "40.14", "50.89", "56", "62.29", "68.12"), V2 = c("Clear-sky Pers", 
"46.68", "59.05", "37.28", "32.82", "28.89", "29.9", "26.58", 
"22.87", "27.77", "49.75", "52.66", "63.74", "52.41", "42.38", 
"45.54", "32.16", "32.83", "22.41", "31.01", "23.99", "28.45", 
"48.3", "53.44", "57.96"), V3 = c("Bagged MARS", "39.82", "51.28", 
"36.43", "32.51", "25.39", "27.93", "26.35", "23.27", "28.62", 
"26.16", "36.28", "55.49", "45.14", "33.34", "41.7", "31.49", 
"31.63", "21.88", "29.32", "23.47", "29.34", "30.59", "32.03", 
"46.87"), V4 = c("Bagged MARS using gCV Pruning", "40.16", "51.16", 
"36.4", "32.47", "25.45", "27.98", "26.41", "23.27", "28.59", 
"26.33", "36.45", "55.47", "45.46", "33.29", "41.91", "31.5", 
"31.64", "21.92", "29.35", "23.49", "29.32", "30.64", "32.05", 
"46.95"), V5 = c("Bayesian Generalized Linear Model", "38.43", 
"52.1", "36.74", "33.11", "24.98", "28.33", "25.9", "23.33", 
"29.04", "26.58", "35.23", "54.92", "44.84", "33.2", "41.44", 
"32.27", "31.6", "21.96", "28.94", "23.31", "29.32", "30.85", 
"31.39", "45.57"), V6 = c("Bayesian Regularized Neural Networks", 
"36.04", "50.2", "35.43", "32.39", "24.31", "27.84", "24.82", 
"22.52", "26.97", "25.14", "33.29", "53.37", "42.94", "31.03", 
"39.42", "30.7", "30.08", "21.32", "27.81", "22.36", "28.06", 
"29.34", "30.35", "43.84"), V7 = c("Bayesian Ridge Regression", 
"38.62", "51.54", "36.85", "32.74", "25.03", "28.01", "25.82", 
"23.16", "28.72", "26.65", "35.53", "55.13", "44.61", "33.03", 
"41.38", "31.74", "31.64", "21.65", "28.84", "23.28", "29.15", 
"31.05", "31.53", "45.7"), V8 = c("Boosted Generalized Linear Model", 
"43.54", "52.36", "39.77", "34.33", "27.37", "28.73", "26.55", 
"24.38", "30.94", "30.72", "39.88", "57.57", "46.36", "37.41", 
"45.41", "33.53", "32.73", "22.28", "28.77", "25.19", "31.15", 
"33.76", "35.7", "48.53"), V9 = c("Boosted Linear Model", "110.08", 
"78.7", "52.57", "39.61", "35.96", "35.48", "33.23", "33.92", 
"37.37", "51.98", "99.36", "215.31", "117.81", "67.24", "60.16", 
"40.15", "39.39", "36.3", "35.53", "32.66", "38.57", "57.07", 
"93.16", "159.24"), V10 = c("Boosted Smoothing Spline", "43.97", 
"51.77", "37.48", "33.33", "26.44", "28.51", "26.64", "23.95", 
"28.91", "28.04", "40.9", "62.15", "48.88", "36.86", "43.89", 
"32.58", "32.07", "22.97", "28.71", "24.32", "30.14", "32.08", 
"36.55", "52.15"), V11 = c("Conditional Inference Tree1", "39.82", 
"55.73", "38.41", "34.83", "27.2", "31.44", "29.94", "23.55", 
"29.34", "30.24", "39.86", "67.82", "51.5", "36.04", "42.72", 
"32.07", "34.13", "22.43", "30.4", "23.92", "31.59", "35.06", 
"34.57", "50.32"), V12 = c("Cubist", "33.5", "51.07", "33.97", 
"30.65", "23.59", "26.42", "25.03", "21.41", "27.43", "25.5", 
"30.89", "50.48", "40.23", "30.64", "39.35", "29.92", "29.78", 
"21.05", "27.52", "21.69", "27.85", "28.45", "30.92", "43.16"
), V13 = c("Elasticnet", "38.43", "52.09", "36.74", "33.11", 
"24.98", "28.33", "25.9", "23.34", "29.04", "26.59", "35.24", 
"54.92", "44.84", "33.2", "41.45", "32.27", "31.6", "21.96", 
"28.93", "23.31", "29.32", "30.86", "31.4", "45.57"), V14 = c("eXtreme Gradient Boosting1", 
"36.68", "52.85", "36.71", "32.11", "25.66", "28.42", "26.17", 
"21.12", "27.94", "27.52", "33.97", "54.64", "44.5", "34.29", 
"42.02", "32.23", "33.12", "21.84", "28", "22.19", "28.36", "30.83", 
"32.79", "43.7"), V15 = c("eXtreme Gradient Boosting2", "37.68", 
"51.46", "35.73", "30.87", "25.5", "28.02", "25.45", "22.34", 
"28.06", "26.55", "35.49", "57.21", "46.39", "35.52", "41.72", 
"31.16", "31.33", "21.14", "28.25", "22.51", "28.33", "30.09", 
"33.12", "47.95"), V16 = c("Gaussian Process", "38.42", "52.09", 
"36.74", "33.11", "24.97", "28.34", "25.89", "23.34", "29.03", 
"26.59", "35.24", "54.92", "44.86", "33.2", "41.44", "32.27", 
"31.61", "21.96", "28.93", "23.31", "29.32", "30.86", "31.4", 
"45.57"), V17 = c("Gaussian Process with Polynomial Kernel", 
"35.59", "49.85", "35.08", "31.03", "23.82", "27.04", "24.65", 
"22.14", "26.8", "24.69", "32.66", "52.76", "42.84", "31.22", 
"40.27", "29.82", "30.43", "21.23", "28.17", "21.69", "27.15", 
"29.19", "29.37", "43.69"), V18 = c("Gaussian Process with Radial Basis Function Kernel", 
"34.79", "49.54", "34.38", "31.47", "23.93", "26.79", "24.65", 
"21.86", "27.02", "25.13", "32.68", "53.85", "42.52", "31.67", 
"39.99", "30.79", "31.19", "21.94", "27.79", "21.96", "27.25", 
"29.62", "29.63", "43.6"), V19 = c("Generalized Linear Model", 
"38.43", "52.09", "36.74", "33.11", "24.98", "28.33", "25.9", 
"23.34", "29.04", "26.58", "35.23", "54.92", "44.83", "33.2", 
"41.44", "32.27", "31.6", "21.96", "28.94", "23.31", "29.32", 
"30.85", "31.39", "45.57"), V20 = c("Generalized Linear Model with Stepwise Feature Selection", 
"38.5", "51.69", "36.81", "32.97", "25.07", "28.17", "25.91", 
"23.44", "29.08", "26.59", "35.36", "55.09", "44.93", "33.1", 
"41.36", "32.14", "31.68", "21.91", "29", "23.41", "29.34", "30.97", 
"31.56", "45.64"), V21 = c("glmnet", "38.51", "51.71", "36.74", 
"32.94", "24.95", "28.17", "25.82", "23.25", "28.97", "26.62", 
"35.43", "54.99", "44.71", "33.18", "41.42", "31.99", "31.55", 
"21.8", "28.8", "23.31", "29.23", "30.92", "31.53", "45.65"), 
    V22 = c("Independent Component Regression", "64.48", "65.31", 
    "59.31", "47.77", "37.03", "37.93", "35.77", "32.59", "42.25", 
    "46.73", "63.67", "86.5", "68.29", "57.1", "63.21", "50.57", 
    "44.58", "28.25", "37.25", "35.92", "44.67", "51.94", "55.68", 
    "71.15"), V23 = c("k-Nearest Neighbors1", "53.75", "60.79", 
    "43.11", "37.76", "30.56", "31.38", "30.94", "26.4", "34.61", 
    "37.92", "50.99", "72.14", "56.45", "50.02", "49.67", "39.95", 
    "37.59", "24.82", "30.98", "27.38", "33.81", "41.64", "48.35", 
    "58.98"), V24 = c("k-Nearest Neighbors2", "53.66", "59.26", 
    "43.71", "37.13", "30.49", "31.28", "29.87", "25.62", "34.07", 
    "36.48", "50.61", "72.3", "55.43", "49.71", "50.18", "39", 
    "37.1", "24.92", "30.2", "27.55", "32.68", "40.49", "47.34", 
    "56.89"), V25 = c("L2 Regularized Support Vector Machine (dual) with Linear Kernel", 
    "38.87", "52.41", "36.85", "33.06", "25.07", "28.37", "25.66", 
    "23.36", "28.95", "26.93", "36.15", "55.55", "44.98", "33.42", 
    "41.68", "32.25", "31.76", "21.81", "28.85", "23.43", "29.42", 
    "31.17", "31.88", "46.16"), V26 = c("Least Angle Regression1", 
    "38.43", "52.09", "36.74", "33.11", "24.98", "28.33", "25.9", 
    "23.34", "29.04", "26.58", "35.23", "54.92", "44.83", "33.2", 
    "41.44", "32.27", "31.6", "21.96", "28.94", "23.31", "29.32", 
    "30.85", "31.39", "45.57"), V27 = c("Least Angle Regression2", 
    "38.54", "51.66", "36.75", "32.9", "24.97", "28.11", "25.81", 
    "23.24", "28.95", "26.63", "35.45", "55.02", "44.71", "33.17", 
    "41.42", "31.93", "31.55", "21.78", "28.8", "23.33", "29.23", 
    "30.91", "31.53", "45.65"), V28 = c("Linear Regression", 
    "38.43", "52.09", "36.74", "33.11", "24.98", "28.33", "25.9", 
    "23.34", "29.04", "26.58", "35.23", "54.92", "44.83", "33.2", 
    "41.44", "32.27", "31.6", "21.96", "28.94", "23.31", "29.32", 
    "30.85", "31.39", "45.57"), V29 = c("Linear Regression with Backwards Selection", 
    "45.75", "55.54", "41.96", "35.34", "29.44", "31.08", "28.69", 
    "25.84", "33.65", "30.81", "41.37", "59.3", "49.04", "38.46", 
    "47.73", "35.02", "34.48", "23.93", "31.67", "26.38", "33.15", 
    "35.01", "36.44", "52.08"), V30 = c("Linear Regression with Forward Selection", 
    "45.75", "55.54", "41.96", "35.34", "29.44", "31.08", "28.69", 
    "25.84", "33.65", "30.81", "41.37", "59.3", "49.04", "38.46", 
    "47.73", "35.02", "34.48", "23.93", "31.67", "26.38", "33.15", 
    "35.01", "36.44", "52.08"), V31 = c("Linear Regression with Stepwise Selection1", 
    "45.75", "55.54", "41.96", "35.34", "29.44", "31.08", "28.69", 
    "25.84", "33.65", "30.81", "41.37", "59.3", "49.04", "38.46", 
    "47.73", "35.02", "34.48", "23.93", "31.67", "26.38", "33.15", 
    "35.01", "36.44", "52.08"), V32 = c("Linear Regression with Stepwise Selection2", 
    "38.5", "51.69", "36.81", "32.97", "25.07", "28.17", "25.91", 
    "23.44", "29.08", "26.59", "35.36", "55.09", "44.93", "33.1", 
    "41.36", "32.14", "31.68", "21.91", "29", "23.41", "29.34", 
    "30.97", "31.56", "45.64"), V33 = c("Model Averaged Neural Network", 
    "35.6", "50.16", "34.29", "32", "23.78", "26.95", "24.27", 
    "21.88", "26.82", "24.81", "33.78", "55.53", "45.01", "31.83", 
    "40.06", "29.72", "30.83", "21.16", "27.19", "21.51", "27.06", 
    "29.64", "31.56", "46.37"), V34 = c("Monotone Multi-Layer Perceptron Neural Network", 
    "37.92", "52.03", "36.65", "33.14", "25.06", "28.29", "25.78", 
    "23.32", "29.12", "26.52", "34.95", "54.91", "45.1", "33.17", 
    "41.28", "32.07", "31.82", "21.68", "28.64", "23.21", "29.29", 
    "31.01", "31.56", "46.01"), V35 = c("Multi-Layer Perceptron1", 
    "35.31", "50.61", "33.84", "31.61", "23.45", "26.84", "25.01", 
    "21.85", "26.94", "25.11", "31.51", "51.96", "43.54", "31.39", 
    "39.99", "29.85", "30.68", "21.44", "27.65", "22.45", "27.57", 
    "28.98", "30.18", "43.1"), V36 = c("Multi-Layer Perceptron2", 
    "35.12", "50.64", "35.52", "33.06", "24.99", "27.72", "24.66", 
    "22.77", "27.88", "24.9", "32.6", "52.27", "43.67", "31.2", 
    "40.75", "30.76", "31.75", "21.52", "28.14", "22.29", "28.5", 
    "29.54", "29.28", "42.99"), V37 = c("Multi-Layer Perceptron, multiple layers", 
    "34.68", "51.31", "34.64", "32.11", "23.97", "27.32", "25.37", 
    "22.53", "28.16", "25.94", "32.22", "53.61", "42.42", "31.53", 
    "40.65", "31.05", "30.7", "21.49", "27.92", "22.87", "28.25", 
    "29.35", "30.83", "43.27"), V38 = c("Multi-Layer Perceptron, with multiple layers", 
    "35.61", "50.6", "33.6", "31.75", "23.08", "27.61", "25.32", 
    "22.33", "27.77", "25.28", "32.78", "53.73", "43.31", "31.95", 
    "40.73", "30.31", "30.22", "21.36", "27.79", "22.46", "28.16", 
    "29.55", "31.15", "44.56"), V39 = c("Multivariate Adaptive Regression Spline", 
    "41.68", "51.87", "37.06", "33.14", "26.23", "28.72", "26.66", 
    "23.88", "29.33", "27.44", "37.39", "55.9", "46.2", "33.84", 
    "43.5", "32.02", "32.38", "22.36", "29.85", "24.1", "29.72", 
    "31.12", "32.39", "48.54"), V40 = c("Multivariate Adaptive Regression Splines", 
    "41.42", "51.71", "36.81", "33", "26.06", "28.59", "26.65", 
    "24", "29.27", "27.24", "37.11", "55.89", "46.27", "33.95", 
    "43.38", "32.1", "32.32", "22.34", "29.79", "24.11", "29.67", 
    "31.13", "32.69", "47.84"), V41 = c("Negative Binomial Generalized Linear Model", 
    "42.81", "53.78", "37.91", "34.91", "27.94", "31.24", "29.2", 
    "26.38", "29.4", "27.99", "39.89", "61.41", "50.25", "36.51", 
    "41.83", "35.07", "34.05", "28.45", "33.86", "26.33", "30.55", 
    "33.29", "35.24", "52.61"), V42 = c("Neural Network", "36.54", 
    "50.38", "33.78", "31.81", "23.32", "26.9", "24.39", "21.96", 
    "26.82", "25", "34.25", "56.28", "45.03", "32.29", "40.4", 
    "29.68", "30.83", "21.53", "26.78", "21.95", "27.56", "30.16", 
    "31.93", "47.19"), V43 = c("Neural Networks with Feature Extraction", 
    "36.35", "50.06", "33.85", "32.87", "24.87", "27.84", "26.55", 
    "22.78", "27.77", "27.26", "34.9", "56.73", "45.47", "31.16", 
    "39.59", "30.23", "32.12", "21.46", "27.45", "22.29", "28.46", 
    "31.34", "32.86", "46.93"), V44 = c("Non-Convex Penalized Quantile Regression", 
    "37.43", "52.33", "37.1", "33.2", "25.48", "28.74", "26.56", 
    "23.77", "29.58", "26.53", "34.27", "54.56", "44.35", "32.63", 
    "41.49", "32.17", "32.18", "22.36", "29.55", "23.8", "29.82", 
    "30.94", "31.13", "45.12"), V45 = c("Non-Informative Model", 
    "116.8", "89.25", "70.83", "63.06", "57.13", "54.53", "52.35", 
    "55.06", "59.52", "74.86", "114.22", "217.39", "121.69", 
    "81.62", "76.36", "63.22", "60.7", "56.81", "56.55", "54.5", 
    "60.2", "73.41", "106.28", "160.14"), V46 = c("Non-Negative Least Squares", 
    "49.93", "55.84", "43.86", "37.28", "30.72", "30.73", "28.48", 
    "27.17", "34.78", "36.48", "46.07", "62.29", "51.04", "42.91", 
    "50.2", "37.64", "35.1", "24.09", "30.54", "28.43", "34.96", 
    "38.66", "41.77", "53.62"), V47 = c("partDSA", "83.16", "71.02", 
    "46.78", "45.63", "38.18", "38.31", "36.71", "36.6", "39.75", 
    "45.63", "70.28", "109.86", "77.89", "58", "57.51", "46.59", 
    "45.27", "37.45", "37.95", "35.83", "39.26", "48.09", "64.29", 
    "93.3"), V48 = c("Partial Least Squares1", "54.32", "59.04", 
    "44.77", "38.44", "29.87", "31.2", "27.05", "25.42", "32.47", 
    "35.41", "48.48", "68.73", "53.24", "45.09", "52.47", "36.36", 
    "35.39", "23.64", "29.63", "27.14", "33.29", "38.16", "41.31", 
    "57.75"), V49 = c("Penalized Linear Regression", "38.8", 
    "51.5", "36.85", "32.75", "25", "27.99", "25.77", "23.14", 
    "28.86", "26.74", "35.68", "55.19", "44.71", "33.23", "41.43", 
    "31.81", "31.51", "21.63", "28.73", "23.35", "29.15", "31.05", 
    "31.71", "45.76"), V50 = c("Principal Component Analysis", 
    "64.49", "65.31", "59.31", "47.77", "37.03", "37.93", "35.76", 
    "32.59", "42.25", "46.73", "63.67", "86.5", "68.29", "57.11", 
    "63.21", "50.57", "44.58", "28.25", "37.25", "35.92", "44.67", 
    "51.95", "55.68", "71.15"), V51 = c("Projection Pursuit Regression", 
    "34.91", "51.89", "35.77", "32.05", "24.03", "28.01", "25.69", 
    "22.3", "27.71", "25.21", "31.67", "53.37", "43.33", "30.54", 
    "40.49", "30.63", "29.74", "21.57", "28.22", "22.57", "27.85", 
    "29.51", "31.21", "43.45"), V52 = c("Quantile Random Forest", 
    "66.17", "72.68", "55.9", "48.02", "46.77", "46.3", "44.93", 
    "39.39", "45.11", "52.09", "64.06", "87.14", "76.56", "59.63", 
    "62.01", "49.56", "50.61", "42.47", "46.51", "42.12", "46.86", 
    "55.8", "60.88", "81.43"), V53 = c("Quantile Regression Neural Network", 
    "34.69", "52.56", "34.91", "32.67", "24.25", "27.15", "25.01", 
    "21.93", "27.1", "24.8", "31.22", "51.92", "42.94", "30.75", 
    "41.94", "29.29", "31.15", "21.29", "28.22", "22.59", "27.58", 
    "29.26", "29.66", "43.24"), V54 = c("Quantile Regression with LASSO penalty", 
    "37.43", "52.33", "37.1", "33.2", "25.48", "28.74", "26.56", 
    "23.77", "29.58", "26.53", "34.27", "54.56", "44.35", "32.63", 
    "41.49", "32.17", "32.18", "22.36", "29.55", "23.8", "29.82", 
    "30.94", "31.13", "45.12"), V55 = c("Random Forest1", "33.19", 
    "48.87", "34.21", "30.39", "23.81", "26.03", "24.46", "21.28", 
    "26.61", "25.54", "32.59", "52.32", "41.48", "31.5", "40.43", 
    "29.64", "30.39", "21.12", "26.81", "21.33", "27.66", "28.45", 
    "31.33", "41.45"), V56 = c("Random Forest by Randomization", 
    "32.62", "49.17", "34.55", "30.33", "23.75", "26.16", "24.8", 
    "21.11", "26.76", "25.76", "31.75", "51.01", "41.38", "30.85", 
    "39.93", "29.38", "30.04", "21.42", "27.2", "21.66", "27.56", 
    "28.25", "31.14", "41.01"), V57 = c("Relaxed Lasso", "38.35", 
    "53.28", "37.32", "34.07", "25.41", "29.35", "26.63", "24.04", 
    "29.51", "26.65", "34.69", "54.57", "45.78", "33.59", "41.95", 
    "33.59", "32.32", "22.92", "29.76", "23.78", "29.93", "31.12", 
    "30.89", "45.77"), V58 = c("Ridge Regression", "38.43", "52.09", 
    "36.74", "33.11", "24.98", "28.33", "25.9", "23.34", "29.04", 
    "26.59", "35.24", "54.92", "44.84", "33.2", "41.45", "32.27", 
    "31.6", "21.96", "28.93", "23.31", "29.32", "30.86", "31.4", 
    "45.57"), V59 = c("Self-Organizing Maps", "68.34", "67.63", 
    "56.9", "43.81", "35.01", "35.4", "34.82", "28.93", "39.39", 
    "52.58", "60.73", "97.42", "67.67", "59.62", "63.77", "46.68", 
    "42.6", "31.61", "38.82", "30.85", "40.32", "55.88", "54.07", 
    "75.36"), V60 = c("Sparse Partial Least Squares", "38.45", 
    "52.02", "36.76", "33.1", "24.99", "28.33", "25.9", "23.38", 
    "28.95", "26.59", "35.2", "54.93", "44.71", "33.1", "41.45", 
    "32.25", "31.61", "21.92", "28.91", "23.31", "29.27", "30.89", 
    "31.45", "45.54"), V61 = c("Stochastic Gradient Boosting", 
    "37.37", "50.46", "35.78", "31.25", "25.36", "27.88", "25.82", 
    "22.32", "28.03", "26.11", "34.56", "56.13", "44.63", "33.66", 
    "40.97", "31.02", "30.84", "21.53", "28.33", "22.32", "28.67", 
    "30.59", "33.43", "45.86"), V62 = c("Support Vector Machines with Linear Kernel", 
    "37.47", "52.19", "36.93", "33.15", "25.45", "28.63", "26.51", 
    "23.74", "29.36", "26.38", "34.23", "54.64", "44.38", "32.58", 
    "41.41", "32.23", "32.13", "22.21", "29.52", "23.74", "29.73", 
    "30.91", "30.95", "45.17"), V63 = c("Support Vector Machines with Polynomial Kernel", 
    "34.4", "50.5", "35.17", "31.49", "24.11", "27.06", "24.38", 
    "21.88", "26.39", "24.56", "31.42", "51.65", "41.79", "30.53", 
    "40.31", "29.6", "30.46", "20.81", "27.98", "21.54", "27.22", 
    "28.89", "29.2", "42.97"), V64 = c("Support Vector Machines with Radial Basis Function Kernel1", 
    "34.28", "50.18", "35.22", "31.37", "23.92", "27.09", "24.84", 
    "21.87", "26.76", "24.9", "31.87", "52.7", "41.91", "30.93", 
    "40.07", "30.12", "30.63", "20.58", "27.43", "21.89", "27.45", 
    "29.65", "29.46", "42.94"), V65 = c("Support Vector Machines with Radial Basis Function Kernel2", 
    "34.3", "50.18", "35.22", "31.37", "23.91", "27.1", "24.83", 
    "21.88", "26.75", "24.89", "31.9", "52.73", "41.92", "30.92", 
    "40.06", "30.09", "30.62", "20.58", "27.44", "21.88", "27.45", 
    "29.62", "29.45", "42.96"), V66 = c("Support Vector Machines with Radial Basis Function Kernel3", 
    "34.76", "50.36", "35.19", "31.44", "23.82", "26.97", "24.71", 
    "21.93", "26.69", "24.69", "32.06", "52.42", "42.01", "30.47", 
    "39.87", "29.67", "30.45", "20.58", "27.67", "21.6", "27.51", 
    "29.56", "29.34", "42.85"), V67 = c("The Bayesian lasso", 
    "38.4", "51.52", "36.85", "32.88", "25.04", "28.08", "25.93", 
    "23.27", "28.74", "26.61", "35.35", "54.93", "44.68", "33.05", 
    "41.33", "31.78", "31.69", "21.83", "28.88", "23.37", "29.14", 
    "30.97", "31.44", "45.66"), V68 = c("The lasso", "38.48", 
    "51.77", "36.73", "32.98", "24.95", "28.21", "25.85", "23.28", 
    "28.99", "26.6", "35.36", "54.95", "44.74", "33.17", "41.42", 
    "32.04", "31.58", "21.83", "28.83", "23.31", "29.24", "30.89", 
    "31.48", "45.6"), V69 = c("Tree Models from Genetic Algorithms", 
    "44.36", "59.09", "39.65", "37.51", "27.41", "28.46", "28.55", 
    "24.71", "28.34", "30.08", "39.06", "67.37", "51.24", "38.47", 
    "47.35", "32.36", "35.52", "21.08", "31.27", "27.15", "31.94", 
    "35.56", "36.52", "51.04"), V70 = c("Tree-Based Ensembles", 
    "59.16", "56.47", "40.37", "36.2", "29.12", "29.71", "28.92", 
    "25.56", "30.35", "34.5", "52.12", "82.58", "60.79", "46.2", 
    "48.32", "35.73", "35.17", "25.89", "28.99", "25.82", "31.53", 
    "37.76", "49.3", "67.16")), .Names = c("V1", "V2", "V3", 
"V4", "V5", "V6", "V7", "V8", "V9", "V10", "V11", "V12", "V13", 
"V14", "V15", "V16", "V17", "V18", "V19", "V20", "V21", "V22", 
"V23", "V24", "V25", "V26", "V27", "V28", "V29", "V30", "V31", 
"V32", "V33", "V34", "V35", "V36", "V37", "V38", "V39", "V40", 
"V41", "V42", "V43", "V44", "V45", "V46", "V47", "V48", "V49", 
"V50", "V51", "V52", "V53", "V54", "V55", "V56", "V57", "V58", 
"V59", "V60", "V61", "V62", "V63", "V64", "V65", "V66", "V67", 
"V68", "V69", "V70"), row.names = c(NA, -25L), class = "data.frame")
results <- as.data.frame(t(results))
names(results) <- c("Name","m1","m2","m3","m4","m5","m6","m7","m8","m9","m10","m11","m12",                     "m13","m14","m15","m16","m17","m18","m19","m20","m21","m22","m23","m24")

library(ggplot2)
library(reshape2)
results.m <- melt(results,id.vars="Name")

p <- ggplot(results.m, aes(variable, Name)) + 
  geom_tile(aes(fill = value),colour = "white") +
  scale_fill_gradient(low = "white", high = "steelblue")
p
1
It looks like before you transpose, the first row of results contains a column name. This messes up your data by turning it into factors. If you get results from a csv file or something, you should add an option like header = TRUE when reading in the data to avoid having the column names being included in the data.Marius
yes that is the case. now it is working fine. Thanks for the hint.Reiso

1 Answers

0
votes

The answer

results <- as.data.frame(fread("nRMSE-Monthly-1hAhead.csv", header = T, sep = ","))
results <- cbind(results, seq(1,24,1))
names(results)[dim(results)[2]] <- c("Month")

results.m <- melt(results,id.vars="Month")

p <- ggplot(results.m, aes(Month, variable)) + 
  geom_tile(aes(fill = value)) +
  scale_fill_gradient(low = "red", high = "green")
p