实验三决策树算法实验实验报告Word文件下载.docx
《实验三决策树算法实验实验报告Word文件下载.docx》由会员分享,可在线阅读,更多相关《实验三决策树算法实验实验报告Word文件下载.docx(11页珍藏版)》请在冰豆网上搜索。
(8)Foreachtest_attribute的已知值v//划分samples;
(9)由节点N分出一个对应test_attribute=v的分支;
(10令Sv为samples中test_attribute=v的样本集合;
//一个划分块(11)IfSv为空then
(12)加上一个叶节点,以samples中最普遍的类标记;
(13)Else加入一个由Decision_Tree(Sv,attribute_list-test_attribute)返回节点值。
(2)实验数据预处理
Age:
30岁以下标记为“1”;
30岁以上50岁以下标记为“2”;
50岁以上标记为“3”。
Sex:
FEMAL----“1”;
MALE----“2”
Region:
INNER
CITY----“1”;
TOWN----“2”;
RURAL----“3”;
SUBURBAN----“4”
Income:
5000~2万----“1”;
2万~4万----“2”;
4万以上----“3”
Married
Children
Car
Mortgage
Pep:
以上五个条件,若为“是”标记为“1”,若为“否”标记为“2”。
Agesexregionincomemarriedchildrencarmortgagepep
121121122
121122221
214121221
211112222
121112222
121121211
212112112
211121121
213122121
212221222
221222211
212211211
221212212
111212221
321211122
111211121
113222121
312212221
323311121
322312112
313311221
321312122
321311111
311312112
313312222
324312211
313322112
(3)Matlab语句:
[TreeRulesMatrix]=DecisionTree(DataSet,AttributName);
六、实验结果:
实验程序:
function[TreeRulesMatrix]=DecisionTree(DataSet,AttributName)
%输入为训练集,为离散后的数字,如记录1:
11321;
%前面为属性列,最后一列为类标
ifnargin<
1
error('
请输入数据集'
);
else
ifisstr(DataSet)
[DataSetAttributValue]=readdata2(DataSet);
else
AttributValue=[];
end
end
2
AttributName=[];
Attributs=[1:
size(DataSet,2)-1];
Tree=CreatTree(DataSet,Attributs);
disp([char(13)'
TheDecisionTree:
'
]);
showTree(Tree,0,0,1,AttributValue,AttributName);
Rules=getRule(Tree);
RulesMatrix=zeros(size(Rules,1),size(DataSet,2));
fori=1:
size(Rules,1)
rule=cell2struct(Rules(i,1),{'
str'
});
rule=str2num([rule.str([1:
(find(rule.str=='
C'
)-1)])rule.str((find(rule.str=='
)+1):
length(rule.str))]);
forj=1:
(length(rule)-1)/2
RulesMatrix(i,rule((j-1)*2+1))=rule(j*2);
RulesMatrix(i,size(DataSet,2))=rule(length(rule));
functionTree=CreatTree(DataSet,Attributs)%决策树程序输入为:
数据集,属性名列表
%disp(Attributs);
[SValRecords]=ComputEntropy(DataSet,0);
if(S==0)%当样例全为一类时退出,返回叶子节点类标
fori=1:
length(ValRecords)
if(length(ValRecords(i).matrix)==size(DataSet,1))
break;
Tree.Attribut=i;
Tree.Child=[];
return;
if(length(Attributs)==0)%当条件属性个数为0时返回占多数的类标
mostlabelnum=0;
mostlabel=0;
if(length(ValRecords(i).matrix)>
mostlabelnum)
mostlabelnum=length(ValRecords(i).matrix);
mostlabel=i;
Tree.Attribut=mostlabel;
length(Attributs)
[Sa(i)ValRecord]=ComputEntropy(DataSet,i);
Gains(i)=S-Sa(i);
AtrributMatric(i).val=ValRecord;
[maxvalmaxindex]=max(Gains);
Tree.Attribut=Attributs(maxindex);
Attributs2=[Attributs(1:
maxindex-1)Attributs(maxindex+1:
length(Attributs))];
length(AtrributMatric(maxindex).val)
DataSet2=[DataSet(AtrributMatric(maxindex).val(j).matrix'
1:
maxindex-1)DataSet(AtrributMatric(maxindex).val(j).matrix'
maxindex+1:
size(DataSet,2))];
if(size(DataSet2,1)==0)
mostlabel=i;
Tree.Child(j).root.Attribut=mostlabel;
Tree.Child(j).root.Child=[];
Tree.Child(j).root=CreatTree(DataSet2,Attributs2);
end
function[EntropyRecordVal]=ComputEntropy(DataSet,attribut)%计算信息熵
if(attribut==0)
clnum=0;
size(DataSet,1)
if(DataSet(i,size(DataSet,2))>
clnum)%防止下标越界
classnum(DataSet(i,size(DataSet,2)))=0;
clnum=DataSet(i,size(DataSet,2));
RecordVal(DataSet(i,size(DataSet,2))).matrix=[];
classnum(DataSet(i,size(DataSet,2)))=classnum(DataSet(i,size(DataSet,2)))+1;
RecordVal(DataSet(i,size(DataSet,2))).matrix=[RecordVal(DataSet(i,size(DataSet,2))).matrixi];
Entropy=0;
length(classnum)
P=classnum(j)/size(DataSet,1);
if(P~=0)
Entropy=Entropy+(-P)*log2(P);
valnum=0;
if(DataSet(i,attribut)>
valnum)%防止参数下标越界
clnum(DataSet(i,attribut))=0;
valnum=DataSet(i,attribut);
Valueexamnum(DataSet(i,attribut))=0;
RecordVal(DataSet(i,attribut)).matrix=[];
%将编号保留下来,以方便后面按值分割数据集
clnum(DataSet(i,attribut)))%防止下标越界
Value(DataSet(i,attribut)).classnum(DataSet(i,size(DataSet,2)))=0;
clnum(DataSet(i,attribut))=DataSet(i,size(DataSet,2));
Value(DataSet(i,attribut)).classnum(DataSet(i,size(DataSet,2)))=Value(DataSet(i,attribut)).classnum(DataSet(i,size(DataSet,2)))+1;
Valueexamnum(DataSet(i,attribut))=Valueexamnum(DataSet(i,attribut))+1;
RecordVal(DataSet(i,attribut)).matrix=[RecordVal(DataSet(i,attribut)).matrixi];
valnum
Entropys=0;
fork=1:
length(Value(j).classnum)
P=Value(j).classnum(k)/Valueexamnum(j);
Entropys=Entropys+(-P)*log2(P);
Entropy=Entropy+(Valueexamnum(j)/size(DataSet,1))*Entropys;
functionshowTree(Tree,level,value,branch,AttributValue,AttributName)
blank=[];
level-1
if(branch(i)==1)
blank=[blank'
|'
];
'
if(level==0)
blank=['
(TheRoot):
ifisempty(AttributValue)
|_____'
int2str(value)'
______'
value'
if(length(Tree.Child)~=0)%非叶子节点
ifisempty(AttributName)
disp([blank'
Attribut'
int2str(Tree.Attribut)]);
AttributName{Tree.Attribut}]);
length(Tree.Child)-1
showTree(Tree.Child(j).root,level+1,j,[branch1],AttributValue,AttributName);
showTree(Tree.Child(length(Tree.Child)).root,level+1,length(Tree.Child),[branch(1:
length(branch)-1)01],AttributValue,AttributName);
showTree(Tree.Child(j).root,level+1,AttributValue{Tree.Attribut}{j},[branch1],AttributValue,AttributName);
showTree(Tree.Child(length(Tree.Child)).root,level+1,AttributValue{Tree.Attribut}{length(Tree.Child)},[branch(1:
leaf'
AttributValue{length(AttributValue)}{Tree.Attribut}]);
functionRules=getRule(Tree)
if(length(Tree.Child)~=0)
Rules={};
length(Tree.Child)
content=getRule(Tree.Child(i).root);
%disp(content);
%disp([num2str(Tree.Attribut)'
'
num2str(i)'
size(content,1)
rule=cell2struct(content(j,1),{'
content(j,1)={[num2str(Tree.Attribut)'
rule.str]};
Rules=[Rules;
content];
else
Rules={['
num2str(Tree.Attribut)]};