Documente Academic
Documente Profesional
Documente Cultură
java.io.PrintWriter;
java.sql.PreparedStatement;
java.sql.ResultSet;
java.sql.Statement;
java.util.ArrayList;
java.util.Collections;
java.util.Comparator;
java.util.HashMap;
java.util.Iterator;
java.util.LinkedHashMap;
java.util.LinkedList;
java.util.List;
java.util.Map;
/**
*
* @author user
*/
public class FCorrelation {
public static void claFCorrelation() throws Exception {
double eps_sum = 0.0, eps = 0.0;
int eps_count = 0;
HashMap<String, Double> features_rel = new HashMap<String, Double>();
HashMap<String, Double> features_final = new HashMap<String, Double>();
features_rel = (HashMap<String, Double>) Retrieve.getSelectedFeatures();
features_final = (HashMap<String, Double>) sortByValues(features_rel);
Statement st = db.dbconn.connect();
ArrayList<String> features = new ArrayList<String>();
double f_i_entropy, f_j_entropy;
ResultSet count_f_i, count_f_j, count_f_i_j;
ArrayList<Integer> cfi = new ArrayList<Integer>();
ArrayList<Integer> cfj = new ArrayList<Integer>();
HashMap<String, Integer> cfj_hs = new HashMap<String, Integer>();
HashMap<String, ArrayList<Integer>> cfij_hs = new HashMap<String, ArrayL
ist<Integer>>();
int count = 0;
HashMap<String, Double> f_correlation = new HashMap<String, Double>();
String count_sql = "SELECT count(*) as count FROM report";
PreparedStatement st1=db.dbconn.getConn().prepareStatement(count_sql);
ResultSet rs_count = st1.executeQuery();
if (rs_count.next()) {
count = Integer.parseInt(rs_count.getString("count"));
}
for (Map.Entry<String, Double> entry : features_final.entrySet()) {
features.add(entry.getKey());//adding relevant features into arrayli
st from hashmap relavant_features
}
String outputFile = System.getProperty("user.dir")+"/F_Correlation.txt"
;
//
//
atrix
//
} else {
String s = features.get(i) + ":" + features.get(j);
try {
String sql_features_i = "SELECT " + features.get(i) + ", COU
NT(*) as count FROM report GROUP BY " + features.get(i);
PreparedStatement st2=db.dbconn.getConn().prepareStatement(s
ql_features_i);
// count_f_i = st.executeQuery(sql_features_i);
count_f_i = st2.executeQuery();
while (count_f_i.next()) {//getting count of fetures i
cfi.add(count_f_i.getInt("count"));
}
f_i_entropy = EntropySubEntropy.getEntropy(cfi, count);
String sql_features_j = "SELECT " + features.get(j) + ", COU
NT(*) as count FROM report GROUP BY " + features.get(j);
PreparedStatement st3=db.dbconn.getConn().prepareStatement(s
ql_features_j);
//count_f_j = st.executeQuery(sql_features_j);
count_f_j = st3.executeQuery();
while (count_f_j.next()) {//getting count of features j
cfj.add(count_f_j.getInt("count"));
cfj_hs.put(count_f_j.getString(features.get(j)), count_f
_j.getInt("count"));
}
f_j_entropy = EntropySubEntropy.getEntropy(cfj, count);
ArrayList<Integer> temp = null;
String sql_features_i_j = "SELECT " + features.get(i) + ","
+ features.get(j) + ", COUNT(*) as count FROM report GROUP BY " + features.get(i
) + "," + features.get(j);
PreparedStatement st4=db.dbconn.getConn().prepareStatement(s
ql_features_i_j);
count_f_i_j = st4.executeQuery(sql_features_i_j);
//put features as key and list of count as values
while (count_f_i_j.next()) {//getting relational count betwe
en feature i and feature j
String key = count_f_i_j.getString(features.get(j));
int value = count_f_i_j.getInt("count");
if (cfij_hs.containsKey(key)) {
temp = cfij_hs.get(key);
temp.add(value);
cfij_hs.put(key, temp);
} else {
temp = new ArrayList<Integer>();
temp.add(value);
cfij_hs.put(key, temp);
}
}
double dd = (double) (((2 * (f_i_entropy - EntropySubEntropy.ge
//
//
atrix
//
} else {
String s = features1.get(i) + ":" + features1.get(j);
try {
String sql_features_i = "SELECT " + features1.get(i) + ", CO
UNT(*) as count FROM report GROUP BY " + features1.get(i);
PreparedStatement st2=db.dbconn.getConn().prepareStatement(s
ql_features_i);
// count_f_i = st.executeQuery(sql_features_i);
count_f_i = st2.executeQuery();
while (count_f_i.next()) {//getting count of fetures i
cfi.add(count_f_i.getInt("count"));
}
f_i_entropy = EntropySubEntropy.getEntropy(cfi, count);
String sql_features_j = "SELECT " + features1.get(j) + ", CO
UNT(*) as count FROM report GROUP BY " + features1.get(j);
PreparedStatement st3=db.dbconn.getConn().prepareStatement(s
ql_features_j);
//count_f_j = st.executeQuery(sql_features_j);
count_f_j = st3.executeQuery();
while (count_f_j.next()) {//getting count of features j
cfj.add(count_f_j.getInt("count"));
cfj_hs.put(count_f_j.getString(features1.get(j)), count_
f_j.getInt("count"));
}
f_j_entropy = EntropySubEntropy.getEntropy(cfj, count);
ArrayList<Integer> temp = null;
//
System.out.println("");
//
for (k = 0; k < features.size(); k++) {
//
System.out.print(features.get(k) + "\t");
//
for (int j = 0; j < features.size(); j++) {
////
System.out.print(adj_matrix[k][j] + "\t");//output t
he adjacency matrix on console
//
}
//
System.out.println("");
//
}
//
return adj_matrix;
}
public
public
public
public
public
public
public
public
public
public
public
public
public
void
void
void
void
void
void
void
void
void
void
void
void
void
flush() {}
close() {}
write(int b) {}
write(byte[] b) {}
write(byte[] buf, int off, int len) {}
print(boolean b) {}
print(char c) {}
print(int i) {}
print(long l) {}
print(float f) {}
print(double d) {}
print(char[] s) {}
print(String s) {}