Sunteți pe pagina 1din 7

/*

* To change this template, choose Tools | Templates


* and open the template in the editor.
*/
package algo;
import
import
import
import
import
import
import
import
import
import
import
import
import

java.io.PrintWriter;
java.sql.PreparedStatement;
java.sql.ResultSet;
java.sql.Statement;
java.util.ArrayList;
java.util.Collections;
java.util.Comparator;
java.util.HashMap;
java.util.Iterator;
java.util.LinkedHashMap;
java.util.LinkedList;
java.util.List;
java.util.Map;

/**
*
* @author user
*/
public class FCorrelation {
public static void claFCorrelation() throws Exception {
double eps_sum = 0.0, eps = 0.0;
int eps_count = 0;
HashMap<String, Double> features_rel = new HashMap<String, Double>();
HashMap<String, Double> features_final = new HashMap<String, Double>();
features_rel = (HashMap<String, Double>) Retrieve.getSelectedFeatures();
features_final = (HashMap<String, Double>) sortByValues(features_rel);
Statement st = db.dbconn.connect();
ArrayList<String> features = new ArrayList<String>();
double f_i_entropy, f_j_entropy;
ResultSet count_f_i, count_f_j, count_f_i_j;
ArrayList<Integer> cfi = new ArrayList<Integer>();
ArrayList<Integer> cfj = new ArrayList<Integer>();
HashMap<String, Integer> cfj_hs = new HashMap<String, Integer>();
HashMap<String, ArrayList<Integer>> cfij_hs = new HashMap<String, ArrayL
ist<Integer>>();
int count = 0;
HashMap<String, Double> f_correlation = new HashMap<String, Double>();
String count_sql = "SELECT count(*) as count FROM report";
PreparedStatement st1=db.dbconn.getConn().prepareStatement(count_sql);
ResultSet rs_count = st1.executeQuery();
if (rs_count.next()) {
count = Integer.parseInt(rs_count.getString("count"));
}
for (Map.Entry<String, Double> entry : features_final.entrySet()) {
features.add(entry.getKey());//adding relevant features into arrayli
st from hashmap relavant_features
}
String outputFile = System.getProperty("user.dir")+"/F_Correlation.txt"
;

//
//
atrix
//

PrintWriter wr = new PrintWriter(outputFile,"UTF-8");


System.out.println("List size : "+features.size());
for (int i = 0; i < features.size(); i++) {
for (int j = i + 1; j < features.size(); j++) {
eps_count++;
if (i == j) {
adj_matrix[i][j] = 0.0;//setting diagonal element of the m

} else {
String s = features.get(i) + ":" + features.get(j);
try {
String sql_features_i = "SELECT " + features.get(i) + ", COU
NT(*) as count FROM report GROUP BY " + features.get(i);
PreparedStatement st2=db.dbconn.getConn().prepareStatement(s
ql_features_i);
// count_f_i = st.executeQuery(sql_features_i);
count_f_i = st2.executeQuery();
while (count_f_i.next()) {//getting count of fetures i
cfi.add(count_f_i.getInt("count"));
}
f_i_entropy = EntropySubEntropy.getEntropy(cfi, count);
String sql_features_j = "SELECT " + features.get(j) + ", COU
NT(*) as count FROM report GROUP BY " + features.get(j);
PreparedStatement st3=db.dbconn.getConn().prepareStatement(s
ql_features_j);
//count_f_j = st.executeQuery(sql_features_j);
count_f_j = st3.executeQuery();
while (count_f_j.next()) {//getting count of features j
cfj.add(count_f_j.getInt("count"));
cfj_hs.put(count_f_j.getString(features.get(j)), count_f
_j.getInt("count"));
}
f_j_entropy = EntropySubEntropy.getEntropy(cfj, count);
ArrayList<Integer> temp = null;
String sql_features_i_j = "SELECT " + features.get(i) + ","
+ features.get(j) + ", COUNT(*) as count FROM report GROUP BY " + features.get(i
) + "," + features.get(j);
PreparedStatement st4=db.dbconn.getConn().prepareStatement(s
ql_features_i_j);
count_f_i_j = st4.executeQuery(sql_features_i_j);
//put features as key and list of count as values
while (count_f_i_j.next()) {//getting relational count betwe
en feature i and feature j
String key = count_f_i_j.getString(features.get(j));
int value = count_f_i_j.getInt("count");
if (cfij_hs.containsKey(key)) {
temp = cfij_hs.get(key);
temp.add(value);
cfij_hs.put(key, temp);
} else {
temp = new ArrayList<Integer>();
temp.add(value);
cfij_hs.put(key, temp);
}
}
double dd = (double) (((2 * (f_i_entropy - EntropySubEntropy.ge

tSubEntropy_one(cfij_hs, cfj_hs, count))) / (f_i_entropy + f_j_entropy)));


//double dd = (double) Math.sqrt(2 * (1 - (EntropySubEntropy.
getSubEntropy_one(cfij_hs, cfj_hs, count) / (f_i_entropy + f_j_entropy))));
eps_sum += (double) dd;
// System.out.println("DD "+dd);
f_correlation.put(s, dd);
//calculatin f-correlation between feature i and feature j
//
adj_matrix[i][j] = (double) (((2 * (f_i_entropy - Entr
opySubEntropy.getSubEntropy_one(cfij_hs, cfj_hs, count))) / (f_i_entropy + f_j_e
ntropy)));
//
adj_matrix[j][i] = adj_matrix[i][j];
wr.write(features.get(i)+"\t"+features.get(j)+"\t"+dd+"\n");
} catch (Exception e) {
e.printStackTrace();
}
//
}
}
}
wr.close();
System.out.println("Correlation map :"+f_correlation.isEmpty());
for (Map.Entry<String, Double> entry : f_correlation.entrySet()) {
System.out.println("Features:->" + entry.getKey() + " F-Correlation:
->" + entry.getValue());
}
eps = (double) eps_sum / (double) eps_count;
System.out.println("EPS value is->"+eps);
//
System.out.println(" Exit from Loop ");
//
int k = 0;
//
System.out.print("\t");
//
for (int i = 0; i < features.size(); i++) {
//
System.out.print(features.get(i) + "\t");
//
}
//
System.out.println("");
//
for (k = 0; k < features.size(); k++) {
//
System.out.print(features.get(k) + "\t");
//
for (int j = 0; j < features.size(); j++) {
////
System.out.print(adj_matrix[k][j] + "\t");//output t
he adjacency matrix on console
//
}
//
System.out.println("");
//
}
//
return adj_matrix;
}

public static void claFCorrelation1() throws Exception {


double eps_sum = 0.0, eps = 0.0;
int eps_count = 0;
HashMap<String, Double> features_rel1 = new HashMap<String, Double>();
HashMap<String, Double> features_final1 = new HashMap<String, Double>();
features_rel1 = (HashMap<String, Double>) Retrieve1.getSelectedFeatures(
);
features_final1 = (HashMap<String, Double>) sortByValues(features_rel1);
Statement st = db.dbconn.connect();
ArrayList<String> features1 = new ArrayList<String>();
double f_i_entropy, f_j_entropy;

ResultSet count_f_i, count_f_j, count_f_i_j;


ArrayList<Integer> cfi = new ArrayList<Integer>();
ArrayList<Integer> cfj = new ArrayList<Integer>();
HashMap<String, Integer> cfj_hs = new HashMap<String, Integer>();
HashMap<String, ArrayList<Integer>> cfij_hs = new HashMap<String, ArrayL
ist<Integer>>();
int count = 0;
HashMap<String, Double> f_correlation1 = new HashMap<String, Double>();
String count_sql = "SELECT count(*) as count FROM report";
PreparedStatement st1=db.dbconn.getConn().prepareStatement(count_sql);
ResultSet rs_count = st1.executeQuery();
if (rs_count.next()) {
count = Integer.parseInt(rs_count.getString("count"));
}
for (Map.Entry<String, Double> entry : features_final1.entrySet()) {
features1.add(entry.getKey());//adding relevant features into arrayl
ist from hashmap relavant_features
}
// Sout.sout("features--", features);
//
double[][] adj_matrix = new double[features.size()][features.size()];
String outputFile = System.getProperty("user.dir")+"/F_Correlation1.txt
";

//
//
atrix
//

PrintWriter wr = new PrintWriter(outputFile,"UTF-8");


System.out.println("List size : "+features1.size());
for (int i = 0; i < features1.size(); i++) {
for (int j = i + 1; j < features1.size(); j++) {
eps_count++;
if (i == j) {
adj_matrix[i][j] = 0.0;//setting diagonal element of the m

} else {
String s = features1.get(i) + ":" + features1.get(j);
try {
String sql_features_i = "SELECT " + features1.get(i) + ", CO
UNT(*) as count FROM report GROUP BY " + features1.get(i);
PreparedStatement st2=db.dbconn.getConn().prepareStatement(s
ql_features_i);
// count_f_i = st.executeQuery(sql_features_i);
count_f_i = st2.executeQuery();
while (count_f_i.next()) {//getting count of fetures i
cfi.add(count_f_i.getInt("count"));
}
f_i_entropy = EntropySubEntropy.getEntropy(cfi, count);
String sql_features_j = "SELECT " + features1.get(j) + ", CO
UNT(*) as count FROM report GROUP BY " + features1.get(j);
PreparedStatement st3=db.dbconn.getConn().prepareStatement(s
ql_features_j);
//count_f_j = st.executeQuery(sql_features_j);
count_f_j = st3.executeQuery();
while (count_f_j.next()) {//getting count of features j
cfj.add(count_f_j.getInt("count"));
cfj_hs.put(count_f_j.getString(features1.get(j)), count_
f_j.getInt("count"));
}
f_j_entropy = EntropySubEntropy.getEntropy(cfj, count);
ArrayList<Integer> temp = null;

String sql_features_i_j = "SELECT " + features1.get(i) + ","


+ features1.get(j) + ", COUNT(*) as count FROM report GROUP BY " + features1.ge
t(i) + "," + features1.get(j);
PreparedStatement st4=db.dbconn.getConn().prepareStatement(s
ql_features_i_j);
//count_f_i_j = st.executeQuery(sql_features_i_j);
count_f_i_j = st4.executeQuery(sql_features_i_j);
//put features as key and list of count as values
while (count_f_i_j.next()) {//getting relational count betwe
en feature i and feature j
String key = count_f_i_j.getString(features1.get(j));
int value = count_f_i_j.getInt("count");
if (cfij_hs.containsKey(key)) {
temp = cfij_hs.get(key);
temp.add(value);
cfij_hs.put(key, temp);
} else {
temp = new ArrayList<Integer>();
temp.add(value);
cfij_hs.put(key, temp);
}
}
//double dd = (double) (((2 * (f_i_entropy - EntropySubEntropy.g
etSubEntropy_one(cfij_hs, cfj_hs, count))) / (f_i_entropy + f_j_entropy)));
double dd = (double) Math.sqrt(2 * (1 - (EntropySubEntropy.ge
tSubEntropy_one(cfij_hs, cfj_hs, count) / (f_i_entropy + f_j_entropy))));
eps_sum += (double) dd;
// System.out.println("DD "+dd);
f_correlation1.put(s, dd);
//calculatin f-correlation between feature i and feature j
//
adj_matrix[i][j] = (double) (((2 * (f_i_entropy - Entr
opySubEntropy.getSubEntropy_one(cfij_hs, cfj_hs, count))) / (f_i_entropy + f_j_e
ntropy)));
//
adj_matrix[j][i] = adj_matrix[i][j];
wr.write(features1.get(i)+"\t"+features1.get(j)+"\t"+dd+"\n"
);
} catch (Exception e) {
e.printStackTrace();
}
//
}
}
}
wr.close();
//
System.out.println("Correlation map :"+f_correlation.isEmpty());
for (Map.Entry<String, Double> entry : f_correlation1.entrySet()) {
System.out.println("Features:->" + entry.getKey() + " F-Correlation:
->" + entry.getValue());
}
eps = (double) eps_sum / (double) eps_count;
System.out.println("EPS value is->"+eps);
//
System.out.println(" Exit from Loop ");
//
int k = 0;
//
System.out.print("\t");
//
for (int i = 0; i < features.size(); i++) {
//
System.out.print(features.get(i) + "\t");
//
}

//
System.out.println("");
//
for (k = 0; k < features.size(); k++) {
//
System.out.print(features.get(k) + "\t");
//
for (int j = 0; j < features.size(); j++) {
////
System.out.print(adj_matrix[k][j] + "\t");//output t
he adjacency matrix on console
//
}
//
System.out.println("");
//
}
//
return adj_matrix;
}

public static HashMap sortByValues(HashMap map) {


List list = new LinkedList(map.entrySet());
// Defined Custom Comparator here
Collections.sort(list, new Comparator() {
public int compare(Object o1, Object o2) {
return ((Comparable) ((Map.Entry) (o2)).getValue())
.compareTo(((Map.Entry) (o1)).getValue());
}
});
// Here I am copying the sorted list in HashMap
// using LinkedHashMap to preserve the insertion order
HashMap sortedHashMap = new LinkedHashMap();
for (Iterator it = list.iterator(); it.hasNext();) {
Map.Entry entry = (Map.Entry) it.next();
sortedHashMap.put(entry.getKey(), entry.getValue());
}
return sortedHashMap;
}
public static void main(String args[])throws Exception
{
System.setOut(new java.io.PrintStream(new java.io.OutputStream() {
@Override public void write(int b) {}
}) {
@Override
@Override
@Override
@Override
@Override
@Override
@Override
@Override
@Override
@Override
@Override
@Override
@Override

public
public
public
public
public
public
public
public
public
public
public
public
public

void
void
void
void
void
void
void
void
void
void
void
void
void

flush() {}
close() {}
write(int b) {}
write(byte[] b) {}
write(byte[] buf, int off, int len) {}
print(boolean b) {}
print(char c) {}
print(int i) {}
print(long l) {}
print(float f) {}
print(double d) {}
print(char[] s) {}
print(String s) {}

@Override public void print(Object obj) {}


@Override public void println() {}
@Override public void println(boolean x) {}
@Override public void println(char x) {}
@Override public void println(int x) {}
@Override public void println(long x) {}
@Override public void println(float x) {}
@Override public void println(double x) {}
@Override public void println(char[] x) {}
@Override public void println(String x) {}
@Override public void println(Object x) {}
@Override public java.io.PrintStream printf(String format, Object... args) {
return this; }
@Override public java.io.PrintStream printf(java.util.Locale l, String forma
t, Object... args) { return this; }
@Override public java.io.PrintStream format(String format, Object... args) {
return this; }
@Override public java.io.PrintStream format(java.util.Locale l, String forma
t, Object... args) { return this; }
@Override public java.io.PrintStream append(CharSequence csq) { return this;
}
@Override public java.io.PrintStream append(CharSequence csq, int start, int
end) { return this; }
@Override public java.io.PrintStream append(char c) { return this; }
});
FCorrelation.claFCorrelation();
}
}

S-ar putea să vă placă și