Free Statistics

of Irreproducible Research!

Author's title

Author*The author of this computation has been verified*
R Software Modulerwasp_regression_trees1.wasp
Title produced by softwareRecursive Partitioning (Regression Trees)
Date of computationFri, 24 Dec 2010 14:54:19 +0000
Cite this page as followsStatistical Computations at FreeStatistics.org, Office for Research Development and Education, URL https://freestatistics.org/blog/index.php?v=date/2010/Dec/24/t12932023368jthr4h3p27yh32.htm/, Retrieved Tue, 30 Apr 2024 02:34:06 +0000
Statistical Computations at FreeStatistics.org, Office for Research Development and Education, URL https://freestatistics.org/blog/index.php?pk=115039, Retrieved Tue, 30 Apr 2024 02:34:06 +0000
QR Codes:

Original text written by user:
IsPrivate?No (this computation is public)
User-defined keywords
Estimated Impact124
Family? (F = Feedback message, R = changed R code, M = changed R Module, P = changed Parameters, D = changed Data)
-     [Univariate Data Series] [] [2008-12-08 19:22:39] [d2d412c7f4d35ffbf5ee5ee89db327d4]
- RMP   [ARIMA Backward Selection] [] [2010-12-16 20:56:51] [2ae6beac29e6e5c076a37b2886f2a670]
- RMPD      [Recursive Partitioning (Regression Trees)] [Workshop 7 Costs ...] [2010-12-24 14:54:19] [21ba15a181629d0f70ea456ec39a7075] [Current]
Feedback Forum

Post a new message
Dataseries X:
1	162556	1081	213118	6282929
1	29790	309	81767	4324047
1	87550	458	153198	4108272
0	84738	588	-26007	-1212617
1	54660	299	126942	1485329
1	42634	156	157214	1779876
0	40949	481	129352	1367203
1	42312	323	234817	2519076
1	37704	452	60448	912684
1	16275	109	47818	1443586
0	25830	115	245546	1220017
0	12679	110	48020	984885
1	18014	239	-1710	1457425
0	43556	247	32648	-572920
1	24524	497	95350	929144
0	6532	103	151352	1151176
0	7123	109	288170	790090
1	20813	502	114337	774497
1	37597	248	37884	990576
0	17821	373	122844	454195
1	12988	119	82340	876607
1	22330	84	79801	711969
0	13326	102	165548	702380
0	16189	295	116384	264449
0	7146	105	134028	450033
0	15824	64	63838	541063
1	26088	267	74996	588864
0	11326	129	31080	-37216
0	8568	37	32168	783310
0	14416	361	49857	467359
1	3369	28	87161	688779
1	11819	85	106113	608419
1	6620	44	80570	696348
1	4519	49	102129	597793
0	2220	22	301670	821730
0	18562	155	102313	377934
0	10327	91	88577	651939
1	5336	81	112477	697458
1	2365	79	191778	700368
0	4069	145	79804	225986
0	7710	816	128294	348695
0	13718	61	96448	373683
0	4525	226	93811	501709
0	6869	105	117520	413743
0	4628	62	69159	379825
1	3653	24	101792	336260
1	1265	26	210568	636765
1	7489	322	136996	481231
0	4901	84	121920	469107
0	2284	33	76403	211928
1	3160	108	108094	563925
1	4150	150	134759	511939
1	7285	115	188873	521016
1	1134	162	146216	543856
1	4658	158	156608	329304
0	2384	97	61348	423262
0	3748	9	50350	509665
0	5371	66	87720	455881
0	1285	107	99489	367772
1	9327	101	87419	406339
1	5565	47	94355	493408
0	1528	38	60326	232942
1	3122	34	94670	416002
1	7317	84	82425	337430
0	2675	79	59017	361517
0	13253	947	90829	360962
0	880	74	80791	235561
1	2053	53	100423	408247
0	1424	94	131116	450296
1	4036	63	100269	418799
1	3045	58	27330	247405
0	5119	49	39039	378519
0	1431	34	106885	326638
0	554	11	79285	328233
0	1975	35	118881	386225
1	1286	17	77623	283662
0	1012	47	114768	370225
0	810	43	74015	269236
0	1280	117	69465	365732
1	666	171	117869	420383
0	1380	26	60982	345811
1	4608	73	90131	431809
0	876	59	138971	418876
0	814	18	39625	297476
0	514	15	102725	416776
1	5692	72	64239	357257
0	3642	86	90262	458343
0	540	14	103960	388386
0	2099	64	106611	358934
0	567	11	103345	407560
0	2001	52	95551	392558
1	2949	41	82903	373177
0	2253	99	63593	428370
1	6533	75	126910	369419
0	1889	45	37527	358649
1	3055	43	60247	376641
0	272	8	112995	467427
1	1414	198	70184	364885
0	2564	22	130140	436230
1	1383	11	73221	329118
1	1261	33	76114	317365
0	975	23	90534	286849
0	3366	80	108479	376685
0	576	18	113761	407198
0	1306	28	68696	377772
0	746	23	71561	271483
1	3192	60	59831	153661
1	2045	20	97890	513294
0	5477	59	101481	324881
1	1932	36	72954	264512
0	936	30	67939	420968
1	3437	47	48022	129302
0	5131	71	86111	191521
1	2397	14	74020	268673
1	1389	9	57530	353179
0	1503	39	56364	354624
0	402	26	84990	363713
0	2239	21	88590	456657
1	2234	16	77200	211742
0	837	69	61262	338381
0	10579	92	110309	418530
0	875	14	67000	351483
0	1395	103	93099	372928
1	1659	29	107577	485538
1	2647	37	62920	279268
1	3294	23	75832	219060
0	0	0	60720	325560
0	94	7	60793	325314
0	422	28	57935	322046
0	0	0	60720	325560
0	34	8	60630	325599
0	1558	63	55637	377028
0	0	0	60720	325560
0	43	3	60887	323850
0	0	0	60720	325560
0	316	9	60505	331514
0	115	13	60945	325632
0	0	0	60720	325560
0	0	0	60720	325560
0	0	0	60720	325560
0	389	14	58990	322265
0	0	0	60720	325560
0	1002	15	56750	325906
0	36	3	60894	325985
0	460	15	63346	346145
0	309	11	56535	325898
0	0	0	60720	325560
0	9	6	60835	325356
0	0	0	60720	325560
0	14	1	61016	325930
0	520	10	58650	318020
0	1766	73	60438	326389
0	0	0	60720	325560
0	458	11	58625	302925
0	20	3	60938	325540
0	0	0	60720	325560
0	0	0	60720	325560
0	98	2	61490	326736
0	405	7	60845	340580
0	0	0	60720	325560
0	0	0	60720	325560
0	0	0	60720	325560
0	0	0	60720	325560
0	483	27	60830	331828
0	454	51	63261	323299
0	0	0	60720	325560
0	0	0	60720	325560
0	757	19	45689	387722
0	0	0	60720	325560
0	0	0	60720	325560
0	0	0	60720	325560
0	36	4	61564	324598
0	0	0	60720	325560
0	203	9	61938	328726
0	0	0	60720	325560
0	90	8	60951	325043
0	0	0	60720	325560
1	71	1	60745	325806
0	0	0	60720	325560
0	0	0	60720	325560
0	972	34	71642	387732
1	531	10	71641	349729
0	604	38	55792	332202
1	283	10	71873	305442
1	23	5	62555	329537
1	638	14	60370	327055
1	699	16	64873	356245
0	149	5	62041	328451
0	226	5	65745	307062
0	0	0	60720	325560
0	275	4	59500	331345
0	0	0	60720	325560
0	141	6	61630	331824
0	0	0	60720	325560
0	28	2	60890	325685
1	0	0	60720	325560
1	2566	80	113521	404480
1	0	0	60720	325560
1	0	0	60720	325560
1	472	20	80045	318314
1	0	0	60720	325560
1	0	0	60720	325560
1	0	0	60720	325560
1	203	27	50804	311807
1	496	17	87390	337724
1	10	2	61656	326431
1	63	4	65688	327556
1	0	0	60720	325560
1	1136	32	48522	356850
1	0	0	60720	325560
0	0	0	60720	325560
0	0	0	60720	325560
0	267	32	57640	322741
0	474	20	61977	310902
0	534	7	62620	324295
0	0	0	60720	325560
0	15	8	60831	326156
0	397	28	60646	326960
0	0	0	60720	325560
0	1061	20	56225	333411
0	288	4	60510	297761
0	0	0	60720	325560
0	3	2	60698	325536
0	0	0	60720	325560
0	20	2	60805	325762
0	278	26	61404	327957
0	0	0	60720	325560
0	0	0	60720	325560
0	192	4	65276	318521
0	0	0	60720	325560
0	317	9	63915	319775
0	0	0	60720	325560
0	0	0	60720	325560
0	368	17	61686	332128
0	0	0	60720	325560
0	2	1	60743	325486
0	0	0	60720	325560
0	53	6	60349	325838
0	0	0	60720	325560
0	0	0	60720	325560
0	0	0	60720	325560
0	94	3	61360	331767
0	0	0	60720	325560
0	24	8	59818	324523
0	2332	4	72680	339995
1	0	0	60720	325560
1	0	0	60720	325560
0	131	11	61808	319582
1	0	0	60720	325560
1	0	0	60720	325560
0	206	9	53110	307245
1	0	0	60720	325560
0	167	2	64245	317967
0	622	73	73007	331488
0	885	85	82732	335452
0	0	0	60720	325560
0	365	8	54820	334184
0	364	35	47705	313213
1	0	0	60720	325560
0	0	0	60720	325560
1	0	0	60720	325560
1	0	0	60720	325560
0	226	12	72835	348678
0	307	15	58856	328727
1	0	0	60720	325560
0	0	0	60720	325560
1	0	0	60720	325560
0	188	11	77655	387978
1	0	0	60720	325560
0	138	6	69817	336704
1	0	0	60720	325560
1	0	0	60720	325560
1	0	0	60720	325560
0	125	12	60798	322076
0	0	0	60720	325560
0	282	30	62452	334272
0	335	33	64175	338197
0	0	0	60720	325560
1	813	82	67440	321024
0	176	28	68136	322145
1	0	0	60720	325560
0	0	0	60720	325560
0	249	72	56726	323351
0	0	0	60720	325560
0	333	13	70811	327748
0	0	0	60720	325560
1	0	0	60720	325560
0	30	4	62045	328157
1	0	0	60720	325560
0	249	62	54323	311594
1	0	0	60720	325560
0	165	24	62841	335962
0	453	21	81125	372426
0	0	0	60720	325560
0	53	14	59506	319844
1	382	21	59365	355822
0	0	0	60720	325560
1	0	0	60720	325560
0	0	0	60720	325560
0	0	0	60720	325560
1	30	4	60798	324047
0	290	2	58790	311464
0	0	0	60720	325560
1	0	0	60720	325560
0	366	53	61808	353417
0	2	9	60735	325590
1	0	0	60720	325560
1	209	13	64016	328576
0	384	22	54683	326126
1	0	0	60720	325560
1	0	0	60720	325560
0	365	83	87192	369376
1	0	0	60720	325560
1	49	8	64107	332013
0	3	4	60761	325871
0	133	14	65990	342165
0	32	1	59988	324967
0	368	17	61167	314832
0	1	6	60719	325557
0	0	0	60720	325560
0	0	0	60720	325560
0	0	0	60720	325560
0	0	0	60720	325560
1	0	0	60720	325560
0	0	0	60720	325560
0	22	2	60722	322649
1	0	0	60720	325560
1	0	0	60720	325560
0	0	0	60720	325560
0	0	0	60720	325560
0	0	0	60720	325560
0	0	0	60720	325560
0	0	0	60720	325560
0	96	5	60379	324598
0	1	2	60727	325567
0	0	0	60720	325560
0	81	7	60925	324005
0	0	0	60720	325560
0	26	1	60896	325748
0	125	13	59734	323385
0	304	15	62969	315409
0	0	0	60720	325560
0	0	0	60720	325560
0	0	0	60720	325560
0	0	0	60720	325560
0	0	0	60720	325560
0	119	6	59118	312275
0	0	0	60720	325560
0	0	0	60720	325560
0	0	0	60720	325560
0	312	14	58598	320576
0	60	10	61124	325246
0	587	12	59595	332961
0	135	2	62065	323010
0	0	0	60720	325560
0	0	0	60720	325560
0	514	52	78780	345253
0	0	0	60720	325560
0	0	0	60720	325560
0	0	0	60720	325560
0	1	4	60722	325559
0	0	0	60720	325560
0	0	0	60720	325560
1	58	3	61600	319634
0	180	11	59635	319951
0	0	0	60720	325560
0	0	0	60720	325560
0	0	0	60720	325560
0	0	0	60720	325560
0	0	0	60720	325560
0	0	0	60720	325560
0	448	40	59781	318519
0	227	9	76644	343222
0	174	1	64820	317234
0	0	0	60720	325560
0	0	0	60720	325560
0	121	24	56178	314025
0	607	11	60436	320249
0	0	0	60720	325560
0	0	0	60720	325560
0	0	0	60720	325560
0	530	60	73433	349365
0	571	80	41477	289197
0	0	0	60720	325560
0	78	16	62700	329245
0	2489	40	67804	240869
0	131	6	59661	327182
0	923	8	58620	322876
0	72	3	60398	323117
0	572	16	58580	306351
0	397	10	62710	335137
0	450	8	59325	308271
0	622	7	60950	301731
0	694	8	68060	382409
1	3425	12	83620	279230
0	562	13	58456	298731
0	4917	42	52811	243650
1	1442	118	121173	532682
0	529	9	63870	319771
1	2126	138	21001	171493
0	1061	5	70415	347262
0	776	9	64230	343945
0	611	8	59190	311874
1	1526	25	69351	302211
0	592	7	64270	316708
0	1182	13	70694	333463
0	621	16	68005	344282
0	989	11	58930	319635
0	438	11	58320	301186
0	726	3	69980	300381
0	1303	61	69863	318765
1	6341	24	63255	286146
1	1164	17	57320	306844
1	3310	33	75230	307705
0	1366	7	79420	312448
0	965	3	73490	299715
0	3256	66	35250	373399
1	1135	17	62285	299446
0	1270	26	69206	325586
0	661	3	65920	291221
0	1013	2	69770	261173
0	2844	67	72683	255027
1	11528	70	-14545	-78375
0	6526	26	55830	-58143
0	2264	24	55174	227033
1	4461	94	67038	235098
0	3999	30	51252	21267
0	35624	223	157278	238675
0	9252	48	79510	197687
0	15236	90	77440	418341
0	18073	180	27284	-297706




Summary of computational transaction
Raw Inputview raw input (R code)
Raw Outputview raw output of R engine
Computing time8 seconds
R Server'Gwilym Jenkins' @ 72.249.127.135
R Framework error message
The field 'Names of X columns' contains a hard return which cannot be interpreted.
Please, resubmit your request without hard returns in the 'Names of X columns'.

\begin{tabular}{lllllllll}
\hline
Summary of computational transaction \tabularnewline
Raw Input & view raw input (R code)  \tabularnewline
Raw Output & view raw output of R engine  \tabularnewline
Computing time & 8 seconds \tabularnewline
R Server & 'Gwilym Jenkins' @ 72.249.127.135 \tabularnewline
R Framework error message & 
The field 'Names of X columns' contains a hard return which cannot be interpreted.
Please, resubmit your request without hard returns in the 'Names of X columns'.
\tabularnewline \hline \end{tabular} %Source: https://freestatistics.org/blog/index.php?pk=115039&T=0

[TABLE]
[ROW][C]Summary of computational transaction[/C][/ROW]
[ROW][C]Raw Input[/C][C]view raw input (R code) [/C][/ROW]
[ROW][C]Raw Output[/C][C]view raw output of R engine [/C][/ROW]
[ROW][C]Computing time[/C][C]8 seconds[/C][/ROW]
[ROW][C]R Server[/C][C]'Gwilym Jenkins' @ 72.249.127.135[/C][/ROW]
[ROW][C]R Framework error message[/C][C]
The field 'Names of X columns' contains a hard return which cannot be interpreted.
Please, resubmit your request without hard returns in the 'Names of X columns'.
[/C][/ROW] [/TABLE] Source: https://freestatistics.org/blog/index.php?pk=115039&T=0

Globally Unique Identifier (entire table): ba.freestatistics.org/blog/index.php?pk=115039&T=0

As an alternative you can also use a QR Code:  

The GUIDs for individual cells are displayed in the table below:

Summary of computational transaction
Raw Inputview raw input (R code)
Raw Outputview raw output of R engine
Computing time8 seconds
R Server'Gwilym Jenkins' @ 72.249.127.135
R Framework error message
The field 'Names of X columns' contains a hard return which cannot be interpreted.
Please, resubmit your request without hard returns in the 'Names of X columns'.







10-Fold Cross Validation
Prediction (training)Prediction (testing)
ActualC1C2CVC1C2CV
C115843650.8127176350.8341
C21619190.991742110.9814
Overall--0.9019--0.9085

\begin{tabular}{lllllllll}
\hline
10-Fold Cross Validation \tabularnewline
 & Prediction (training) & Prediction (testing) \tabularnewline
Actual & C1 & C2 & CV & C1 & C2 & CV \tabularnewline
C1 & 1584 & 365 & 0.8127 & 176 & 35 & 0.8341 \tabularnewline
C2 & 16 & 1919 & 0.9917 & 4 & 211 & 0.9814 \tabularnewline
Overall & - & - & 0.9019 & - & - & 0.9085 \tabularnewline
\hline
\end{tabular}
%Source: https://freestatistics.org/blog/index.php?pk=115039&T=1

[TABLE]
[ROW][C]10-Fold Cross Validation[/C][/ROW]
[ROW][C][/C][C]Prediction (training)[/C][C]Prediction (testing)[/C][/ROW]
[ROW][C]Actual[/C][C]C1[/C][C]C2[/C][C]CV[/C][C]C1[/C][C]C2[/C][C]CV[/C][/ROW]
[ROW][C]C1[/C][C]1584[/C][C]365[/C][C]0.8127[/C][C]176[/C][C]35[/C][C]0.8341[/C][/ROW]
[ROW][C]C2[/C][C]16[/C][C]1919[/C][C]0.9917[/C][C]4[/C][C]211[/C][C]0.9814[/C][/ROW]
[ROW][C]Overall[/C][C]-[/C][C]-[/C][C]0.9019[/C][C]-[/C][C]-[/C][C]0.9085[/C][/ROW]
[/TABLE]
Source: https://freestatistics.org/blog/index.php?pk=115039&T=1

Globally Unique Identifier (entire table): ba.freestatistics.org/blog/index.php?pk=115039&T=1

As an alternative you can also use a QR Code:  

The GUIDs for individual cells are displayed in the table below:

10-Fold Cross Validation
Prediction (training)Prediction (testing)
ActualC1C2CVC1C2CV
C115843650.8127176350.8341
C21619190.991742110.9814
Overall--0.9019--0.9085







Confusion Matrix (predicted in columns / actuals in rows)
C1C2
C117541
C20215

\begin{tabular}{lllllllll}
\hline
Confusion Matrix (predicted in columns / actuals in rows) \tabularnewline
 & C1 & C2 \tabularnewline
C1 & 175 & 41 \tabularnewline
C2 & 0 & 215 \tabularnewline
\hline
\end{tabular}
%Source: https://freestatistics.org/blog/index.php?pk=115039&T=2

[TABLE]
[ROW][C]Confusion Matrix (predicted in columns / actuals in rows)[/C][/ROW]
[ROW][C][/C][C]C1[/C][C]C2[/C][/ROW]
[ROW][C]C1[/C][C]175[/C][C]41[/C][/ROW]
[ROW][C]C2[/C][C]0[/C][C]215[/C][/ROW]
[/TABLE]
Source: https://freestatistics.org/blog/index.php?pk=115039&T=2

Globally Unique Identifier (entire table): ba.freestatistics.org/blog/index.php?pk=115039&T=2

As an alternative you can also use a QR Code:  

The GUIDs for individual cells are displayed in the table below:

Confusion Matrix (predicted in columns / actuals in rows)
C1C2
C117541
C20215



Parameters (Session):
par1 = 2 ; par2 = quantiles ; par3 = 2 ; par4 = yes ;
Parameters (R input):
par1 = 2 ; par2 = quantiles ; par3 = 2 ; par4 = yes ;
R code (references can be found in the software module):
library(party)
library(Hmisc)
par1 <- as.numeric(par1)
par3 <- as.numeric(par3)
x <- data.frame(t(y))
is.data.frame(x)
x <- x[!is.na(x[,par1]),]
k <- length(x[1,])
n <- length(x[,1])
colnames(x)[par1]
x[,par1]
if (par2 == 'kmeans') {
cl <- kmeans(x[,par1], par3)
print(cl)
clm <- matrix(cbind(cl$centers,1:par3),ncol=2)
clm <- clm[sort.list(clm[,1]),]
for (i in 1:par3) {
cl$cluster[cl$cluster==clm[i,2]] <- paste('C',i,sep='')
}
cl$cluster <- as.factor(cl$cluster)
print(cl$cluster)
x[,par1] <- cl$cluster
}
if (par2 == 'quantiles') {
x[,par1] <- cut2(x[,par1],g=par3)
}
if (par2 == 'hclust') {
hc <- hclust(dist(x[,par1])^2, 'cen')
print(hc)
memb <- cutree(hc, k = par3)
dum <- c(mean(x[memb==1,par1]))
for (i in 2:par3) {
dum <- c(dum, mean(x[memb==i,par1]))
}
hcm <- matrix(cbind(dum,1:par3),ncol=2)
hcm <- hcm[sort.list(hcm[,1]),]
for (i in 1:par3) {
memb[memb==hcm[i,2]] <- paste('C',i,sep='')
}
memb <- as.factor(memb)
print(memb)
x[,par1] <- memb
}
if (par2=='equal') {
ed <- cut(as.numeric(x[,par1]),par3,labels=paste('C',1:par3,sep=''))
x[,par1] <- as.factor(ed)
}
table(x[,par1])
colnames(x)
colnames(x)[par1]
x[,par1]
if (par2 == 'none') {
m <- ctree(as.formula(paste(colnames(x)[par1],' ~ .',sep='')),data = x)
}
load(file='createtable')
if (par2 != 'none') {
m <- ctree(as.formula(paste('as.factor(',colnames(x)[par1],') ~ .',sep='')),data = x)
if (par4=='yes') {
a<-table.start()
a<-table.row.start(a)
a<-table.element(a,'10-Fold Cross Validation',3+2*par3,TRUE)
a<-table.row.end(a)
a<-table.row.start(a)
a<-table.element(a,'',1,TRUE)
a<-table.element(a,'Prediction (training)',par3+1,TRUE)
a<-table.element(a,'Prediction (testing)',par3+1,TRUE)
a<-table.row.end(a)
a<-table.row.start(a)
a<-table.element(a,'Actual',1,TRUE)
for (jjj in 1:par3) a<-table.element(a,paste('C',jjj,sep=''),1,TRUE)
a<-table.element(a,'CV',1,TRUE)
for (jjj in 1:par3) a<-table.element(a,paste('C',jjj,sep=''),1,TRUE)
a<-table.element(a,'CV',1,TRUE)
a<-table.row.end(a)
for (i in 1:10) {
ind <- sample(2, nrow(x), replace=T, prob=c(0.9,0.1))
m.ct <- ctree(as.formula(paste('as.factor(',colnames(x)[par1],') ~ .',sep='')),data =x[ind==1,])
if (i==1) {
m.ct.i.pred <- predict(m.ct, newdata=x[ind==1,])
m.ct.i.actu <- x[ind==1,par1]
m.ct.x.pred <- predict(m.ct, newdata=x[ind==2,])
m.ct.x.actu <- x[ind==2,par1]
} else {
m.ct.i.pred <- c(m.ct.i.pred,predict(m.ct, newdata=x[ind==1,]))
m.ct.i.actu <- c(m.ct.i.actu,x[ind==1,par1])
m.ct.x.pred <- c(m.ct.x.pred,predict(m.ct, newdata=x[ind==2,]))
m.ct.x.actu <- c(m.ct.x.actu,x[ind==2,par1])
}
}
print(m.ct.i.tab <- table(m.ct.i.actu,m.ct.i.pred))
numer <- 0
for (i in 1:par3) {
print(m.ct.i.tab[i,i] / sum(m.ct.i.tab[i,]))
numer <- numer + m.ct.i.tab[i,i]
}
print(m.ct.i.cp <- numer / sum(m.ct.i.tab))
print(m.ct.x.tab <- table(m.ct.x.actu,m.ct.x.pred))
numer <- 0
for (i in 1:par3) {
print(m.ct.x.tab[i,i] / sum(m.ct.x.tab[i,]))
numer <- numer + m.ct.x.tab[i,i]
}
print(m.ct.x.cp <- numer / sum(m.ct.x.tab))
for (i in 1:par3) {
a<-table.row.start(a)
a<-table.element(a,paste('C',i,sep=''),1,TRUE)
for (jjj in 1:par3) a<-table.element(a,m.ct.i.tab[i,jjj])
a<-table.element(a,round(m.ct.i.tab[i,i]/sum(m.ct.i.tab[i,]),4))
for (jjj in 1:par3) a<-table.element(a,m.ct.x.tab[i,jjj])
a<-table.element(a,round(m.ct.x.tab[i,i]/sum(m.ct.x.tab[i,]),4))
a<-table.row.end(a)
}
a<-table.row.start(a)
a<-table.element(a,'Overall',1,TRUE)
for (jjj in 1:par3) a<-table.element(a,'-')
a<-table.element(a,round(m.ct.i.cp,4))
for (jjj in 1:par3) a<-table.element(a,'-')
a<-table.element(a,round(m.ct.x.cp,4))
a<-table.row.end(a)
a<-table.end(a)
table.save(a,file='mytable3.tab')
}
}
m
bitmap(file='test1.png')
plot(m)
dev.off()
bitmap(file='test1a.png')
plot(x[,par1] ~ as.factor(where(m)),main='Response by Terminal Node',xlab='Terminal Node',ylab='Response')
dev.off()
if (par2 == 'none') {
forec <- predict(m)
result <- as.data.frame(cbind(x[,par1],forec,x[,par1]-forec))
colnames(result) <- c('Actuals','Forecasts','Residuals')
print(result)
}
if (par2 != 'none') {
print(cbind(as.factor(x[,par1]),predict(m)))
myt <- table(as.factor(x[,par1]),predict(m))
print(myt)
}
bitmap(file='test2.png')
if(par2=='none') {
op <- par(mfrow=c(2,2))
plot(density(result$Actuals),main='Kernel Density Plot of Actuals')
plot(density(result$Residuals),main='Kernel Density Plot of Residuals')
plot(result$Forecasts,result$Actuals,main='Actuals versus Predictions',xlab='Predictions',ylab='Actuals')
plot(density(result$Forecasts),main='Kernel Density Plot of Predictions')
par(op)
}
if(par2!='none') {
plot(myt,main='Confusion Matrix',xlab='Actual',ylab='Predicted')
}
dev.off()
if (par2 == 'none') {
detcoef <- cor(result$Forecasts,result$Actuals)
a<-table.start()
a<-table.row.start(a)
a<-table.element(a,'Goodness of Fit',2,TRUE)
a<-table.row.end(a)
a<-table.row.start(a)
a<-table.element(a,'Correlation',1,TRUE)
a<-table.element(a,round(detcoef,4))
a<-table.row.end(a)
a<-table.row.start(a)
a<-table.element(a,'R-squared',1,TRUE)
a<-table.element(a,round(detcoef*detcoef,4))
a<-table.row.end(a)
a<-table.row.start(a)
a<-table.element(a,'RMSE',1,TRUE)
a<-table.element(a,round(sqrt(mean((result$Residuals)^2)),4))
a<-table.row.end(a)
a<-table.end(a)
table.save(a,file='mytable1.tab')
a<-table.start()
a<-table.row.start(a)
a<-table.element(a,'Actuals, Predictions, and Residuals',4,TRUE)
a<-table.row.end(a)
a<-table.row.start(a)
a<-table.element(a,'#',header=TRUE)
a<-table.element(a,'Actuals',header=TRUE)
a<-table.element(a,'Forecasts',header=TRUE)
a<-table.element(a,'Residuals',header=TRUE)
a<-table.row.end(a)
for (i in 1:length(result$Actuals)) {
a<-table.row.start(a)
a<-table.element(a,i,header=TRUE)
a<-table.element(a,result$Actuals[i])
a<-table.element(a,result$Forecasts[i])
a<-table.element(a,result$Residuals[i])
a<-table.row.end(a)
}
a<-table.end(a)
table.save(a,file='mytable.tab')
}
if (par2 != 'none') {
a<-table.start()
a<-table.row.start(a)
a<-table.element(a,'Confusion Matrix (predicted in columns / actuals in rows)',par3+1,TRUE)
a<-table.row.end(a)
a<-table.row.start(a)
a<-table.element(a,'',1,TRUE)
for (i in 1:par3) {
a<-table.element(a,paste('C',i,sep=''),1,TRUE)
}
a<-table.row.end(a)
for (i in 1:par3) {
a<-table.row.start(a)
a<-table.element(a,paste('C',i,sep=''),1,TRUE)
for (j in 1:par3) {
a<-table.element(a,myt[i,j])
}
a<-table.row.end(a)
}
a<-table.end(a)
table.save(a,file='mytable2.tab')
}