Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • aricca/chessTrack
  • nicolas.furquez/chessTrack
2 results
Show changes
Showing
with 6624 additions and 0 deletions
package uy.edu.fing.chesstrack.ajedrez;
import org.opencv.core.Core;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.core.Size;
public class Logica {
private static final String TAG = "CHESSTRACK::Logica";
private Mat _tableroInicio;
private Mat _tableroAnterior;
public Logica() {
double[] vectorONES = { 1, 1, 1, 1, 1, 1, 1, 1 };
_tableroInicio = Mat.zeros(new Size(8, 8), CvType.CV_8UC1);
_tableroInicio.put(0, 0, vectorONES);
_tableroInicio.put(1, 0, vectorONES);
_tableroInicio.put(6, 0, vectorONES);
_tableroInicio.put(7, 0, vectorONES);
_tableroAnterior = _tableroInicio;
}
public Mat getTableroInicio() {
return _tableroInicio;
}
public boolean validarNuevoTablero(Mat actual) {
Mat result = new Mat(8, 8, CvType.CV_8UC1);
Core.absdiff(actual, _tableroAnterior, result);
if (Core.countNonZero(result) != 0) {
_tableroAnterior = actual;
}
return (Core.countNonZero(result) != 0);
}
public boolean validarTableroArmado(Mat nuevo) {
Mat result = new Mat(8, 8, CvType.CV_8UC1);
Core.absdiff(nuevo, _tableroInicio, result);
return (Core.countNonZero(result) == 0);
}
}
package uy.edu.fing.chesstrack.communication;
import java.io.DataOutputStream;
import java.io.IOException;
import java.net.InetAddress;
import java.net.Socket;
import android.util.Log;
public class Client {
private static final String TAG = "CLIENT";
private static Client _clientInstance;
private Socket serverSocket;
private int _serverPort = 5556;
private String _serverIp = "192.168.1.23";
// private BufferedReader input;
private DataOutputStream output;
protected Client() {
}
public static Client getInstance() {
if (_clientInstance == null) {
_clientInstance = new Client();
}
return _clientInstance;
}
public void EstablishConnection(String serverIp, int serverPort) {
Log.i(TAG, "init client-server communication");
this._serverIp = serverIp;
this._serverPort = serverPort;
try {
Log.i(TAG, "Server on " + this._serverIp + ":" + _serverPort);
InetAddress serverAddr = InetAddress.getByName(this._serverIp);
serverSocket = new Socket(serverAddr, _serverPort);
// get stream to send data
this.output = new DataOutputStream(
this.serverSocket.getOutputStream());
// get stream to receive data
// this.input = new BufferedReader(new
// InputStreamReader(this.serverSocket.getInputStream()));
} catch (IOException e) {
e.printStackTrace();
}
}
public void SendData(String msg) {
try {
Log.i(TAG, "sending=" + msg);
this.output.writeBytes(msg);
} catch (IOException e) {
e.printStackTrace();
}
}
/*
* public String ReceiveData() { try { String read = input.readLine();
* Log.i(TAG, "received="+ read); return read; } catch (IOException e) {
* e.printStackTrace(); return null; } }
*/
public void Stop() {
try {
// input.close();
output.close();
serverSocket.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
package uy.edu.fing.chesstrack.modulomodelador;
import org.opencv.core.Core;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.core.Point;
import org.opencv.core.Rect;
import org.opencv.core.Scalar;
import org.opencv.core.Size;
import org.opencv.imgproc.Imgproc;
import uy.edu.fing.chesstrack.modulovision.Calibracion;
import android.util.Log;
public class Modelador {
private static final String TAG = "CHESSTRACK::Modelador";
private final int CANT_FILAS = 8;
// private final int CANT_SCAQUE = CANT_FILAS * CANT_FILAS;
private final int MIN_CANT_PIX_WHITE = 30;
private Mat tablero;
// se cuentan de arrib a abajo y de izquierda a derecha
private final Mat[][] escaques;
// private static Modelador instance;
public Modelador() {
tablero = Mat.zeros(new Size(8, 8), CvType.CV_8UC1);
escaques = new Mat[CANT_FILAS][CANT_FILAS];
}
public void dividirTablero(Mat inputFrame) {
Imgproc.cvtColor(inputFrame, inputFrame, Imgproc.COLOR_RGBA2GRAY, 4);
int largo = inputFrame.rows();
int ancho = inputFrame.cols();
// obtengo cuanto mide en ancho y largo un escaque
int largoEscaque = (int) Math.floor(largo / (CANT_FILAS + 2));// +2
// porque
// se
// toma
// un
// escaque
// mas
// de
// borde
int anchoEscaque = (int) Math.floor(ancho / (CANT_FILAS + 2));
Log.i(TAG, "largoEscaque= " + largoEscaque);
Log.i(TAG, "anchoEscaque= " + anchoEscaque);
for (int i = 0; i < CANT_FILAS; i++) {
for (int j = 0; j < CANT_FILAS; j++) {
int rowStart = (int) Math.floor(largoEscaque) + i
* largoEscaque + 4;
int rowEnd = (int) Math.floor(largoEscaque) + i * largoEscaque
+ largoEscaque - 4;
int colStart = (int) Math.floor(anchoEscaque) + j
* anchoEscaque + 4;
int colEnd = (int) Math.floor(anchoEscaque) + j * anchoEscaque
+ anchoEscaque - 4;
escaques[i][j] = inputFrame.submat(colStart, colEnd, rowStart,
rowEnd);
}
}
}
private int getHayFichaEnEscaque(int i, int j) {
int ret = 0;
Mat mIntermediateMat = new Mat();
Imgproc.Sobel(escaques[i][j], mIntermediateMat, CvType.CV_8U, 1, 1);
Core.convertScaleAbs(mIntermediateMat, mIntermediateMat, 10, 0);
Imgproc.threshold(mIntermediateMat, mIntermediateMat, 70, 255,
Imgproc.THRESH_BINARY);
// int cant_pix = mIntermediateMat.cols() * mIntermediateMat.rows();
int cant_pix_white = Core.countNonZero(mIntermediateMat);
if (cant_pix_white > (MIN_CANT_PIX_WHITE)) {
ret = 1;
}
return ret;
}
public Mat getMatrizFichas(Mat frame) {
dividirTablero(frame);
tablero = Mat.zeros(new Size(8, 8), CvType.CV_8UC1);
for (int i = 0; i < CANT_FILAS; i++) {
for (int j = 0; j < CANT_FILAS; j++) {
tablero.put(i, j, getHayFichaEnEscaque(i, j));
}
}
return tablero.t();
}
public Mat dibujarEscaque() {
Mat tmp = Mat.ones(Calibracion.getInstance().get_sizeFrame(),
CvType.CV_8UC4);
Mat subm; // tmp.submat(new Rect(new Point(0,0)
// ,escaques[i][j].size()));
// escaques[i][j].copyTo(subm);
int pos_x = 0;
int pos_y = 0;
for (int i = 0; i < CANT_FILAS; i++) {
for (int j = 0; j < CANT_FILAS; j++) {
pos_x = (int) (escaques[i][j].size().width * i);
pos_y = (int) (escaques[i][j].size().height * j);
subm = tmp.submat(new Rect(new Point(pos_x, pos_y),
escaques[i][j].size()));
escaques[i][j].copyTo(subm);
Core.putText(tmp,
Integer.toString(i) + "-" + Integer.toString(j),
new Point(pos_x + 1, pos_y + 1),
Core.FONT_HERSHEY_SIMPLEX, 0.3, new Scalar(255, 0, 0));
}
}
return tmp;
}
public Mat dibujarEscaquesCanny() {
Mat tmp = Mat.zeros(Calibracion.getInstance().get_sizeFrame(),
CvType.CV_8UC4);
Mat subm; // tmp.submat(new Rect(new Point(0,0)
// ,escaques[i][j].size()));
// escaques[i][j].copyTo(subm);
Mat mIntermediateMat = new Mat();
int pos_x = 0;
int pos_y = 0;
for (int i = 0; i < CANT_FILAS; i++) {
for (int j = 0; j < CANT_FILAS; j++) {
pos_x = (int) (escaques[i][j].size().width * i);
pos_y = (int) (escaques[i][j].size().height * j);
subm = tmp.submat(new Rect(new Point(pos_x, pos_y),
escaques[i][j].size()));
Imgproc.Canny(escaques[i][j], mIntermediateMat, 80, 90);
Imgproc.cvtColor(mIntermediateMat, subm,
Imgproc.COLOR_GRAY2RGBA, 4);
escaques[i][j].copyTo(subm);
Core.putText(tmp,
Integer.toString(i) + "-" + Integer.toString(j),
new Point(pos_x + 1, pos_y + 1),
Core.FONT_HERSHEY_SIMPLEX, 0.3, new Scalar(255, 0, 0));
}
}
return tmp;
}
public Mat dibujarEscaquesSobel(int version) {
Mat tmp = Mat.zeros(Calibracion.getRectROI().size(), CvType.CV_8UC4);
Mat subm; // tmp.submat(new Rect(new Point(0,0)
// ,escaques[i][j].size()));
// escaques[i][j].copyTo(subm);
Mat mIntermediateMat = new Mat();
int pos_x = 0;
int pos_y = 0;
Log.i(TAG, "cant_pix_INI");
for (int i = 0; i < CANT_FILAS; i++) {
for (int j = 0; j < CANT_FILAS; j++) {
pos_x = (int) (escaques[i][j].size().width * i);
pos_y = (int) (escaques[i][j].size().height * j);
subm = tmp.submat(new Rect(new Point(pos_x, pos_y),
escaques[i][j].size()));
// Imgproc.cvtColor(escaques[i][j] ,mIntermediateMat,
// Imgproc.COLOR_RGBA2GRAY,4);
Imgproc.Sobel(escaques[i][j], mIntermediateMat, CvType.CV_8U,
1, 1);
Core.convertScaleAbs(mIntermediateMat, mIntermediateMat, 10, 0);
Imgproc.threshold(mIntermediateMat, mIntermediateMat, 70, 255,
Imgproc.THRESH_BINARY);
Imgproc.erode(mIntermediateMat, mIntermediateMat, Imgproc
.getStructuringElement(Imgproc.MORPH_RECT, new Size(1,
1)));
Imgproc.dilate(mIntermediateMat, mIntermediateMat, Imgproc
.getStructuringElement(Imgproc.MORPH_RECT, new Size(1,
1)));
int cant_pix = mIntermediateMat.cols()
* mIntermediateMat.rows();
int cant_pix_white = Core.countNonZero(mIntermediateMat);
Log.i(TAG, "cant_pix=" + cant_pix);
Log.i(TAG, "cant_pix_white=" + cant_pix_white);
switch (version) {
case 1:
if (cant_pix_white > (MIN_CANT_PIX_WHITE)) {
Imgproc.cvtColor(mIntermediateMat, subm,
Imgproc.COLOR_GRAY2RGBA, 4);
} else {
Imgproc.cvtColor(escaques[i][j], subm,
Imgproc.COLOR_GRAY2RGBA, 4);
}
break;
case 2:
if (cant_pix_white > (MIN_CANT_PIX_WHITE)) {
Log.i(TAG, "NUEVA FICHA" + (i + 1) + "-" + (j + 1));
}
Imgproc.cvtColor(mIntermediateMat, subm,
Imgproc.COLOR_GRAY2RGBA, 4);
break;
default:
Imgproc.cvtColor(escaques[i][j], subm,
Imgproc.COLOR_GRAY2RGBA, 4);
break;
}
}
}
Log.i(TAG, "cant_pix_FIN");
return tmp;
}
}
package uy.edu.fing.chesstrack.modulovision;
import java.util.ArrayList;
import java.util.List;
import org.opencv.core.Mat;
import uy.edu.fing.chesstrack.modulovision.imgproc.Homografia;
import uy.edu.fing.chesstrack.modulovision.imgproc.ImgProcInterface;
import android.util.Log;
public class Adquisicion {
private static final String TAG = "CHESSTRACK::Adquisicion";
List<ImgProcInterface> _listProc;
public Adquisicion() throws Exception {
super();
Log.i(TAG, "Cargado....");
_listProc = new ArrayList<ImgProcInterface>();
_listProc.add(new Homografia());
//_listProc.add(new EcualizarImagen());
}
/**
* PRocesa el frame pasado por parametro
* @param inputPicture
* @return
* ver http://dsp.stackexchange.com/questions/2805/how-does-opencv-find-chessboard-corners
* http://www.cvg.ethz.ch/teaching/2012fall/cvl/Tutorial-OpenCV.pdf
*/
public synchronized Mat processFrame(Mat inputPicture) {
Log.i(TAG, "processFrame inicia");
Mat tempImg = inputPicture;
for (ImgProcInterface proc: _listProc){
tempImg = proc.procesarImagen(tempImg);
}
return tempImg;
}
}
package uy.edu.fing.chesstrack.modulovision;
import org.opencv.calib3d.Calib3d;
import org.opencv.core.Mat;
import org.opencv.core.MatOfPoint2f;
import org.opencv.core.Rect;
import org.opencv.core.Size;
import android.util.Log;
public class Calibracion {
private static final String TAG = "CHESSTRACK::Calibracion";
private Size _sizeFrame = null;
private MatOfPoint2f _vertices = null;
private static Rect _rectROI = null;
private static Calibracion _instance;
protected Calibracion() {
}
public static Calibracion getInstance() {
if (_instance == null) {
_instance = new Calibracion();
}
return _instance;
}
public boolean calibrar(Mat img) {
Log.i(TAG, "INI calibrando");
Size patternSize = new Size(7, 7);
MatOfPoint2f corners = new MatOfPoint2f();
boolean ret = Calib3d.findChessboardCorners(img, patternSize, corners);
if (ret) {
Log.i(TAG, "Encontro = " + corners.total());
// Calib3d.drawChessboardCorners(img, patternSize, corners, ret);
// only for debug
_vertices = corners;
_sizeFrame = img.size();
}
Log.i(TAG, "FIN calibrando = " + ret);
return ret;
}
public Size get_sizeFrame() {
return _sizeFrame;
}
public void set_sizeFrame(Size _sizeFrame) {
this._sizeFrame = _sizeFrame;
}
public MatOfPoint2f getVertices() {
return _vertices;
}
public void setVertices(MatOfPoint2f vertices) {
this._vertices = vertices;
}
public static Rect getRectROI() {
return _rectROI;
}
public static void setRectROI(Rect rectROI) {
Calibracion._rectROI = rectROI;
}
}
package uy.edu.fing.chesstrack.modulovision;
import org.opencv.core.Core;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.core.Scalar;
import org.opencv.core.Size;
import org.opencv.imgproc.Imgproc;
import org.opencv.video.BackgroundSubtractorMOG2;
import android.util.Log;
public class DetectorOclusion {
private static final String TAG = "CHESSTRACK::BackgroundSupress";
private static final int OCLUSION = 0;
private static final int ESTABLE = 1;
private static final int TRANSICION = 2;
private static final int ENVIAR_TABLERO = 3;
private static final int START = -1;
private final Mat _fgMaskMOG2;
private Mat _resultado;
private BackgroundSubtractorMOG2 _pMOG2;
private final Mat _morphKernel;
private int _estadoANTERIOR;
private int _estadoACTUAL;
public DetectorOclusion() {
super();
Log.i(TAG, "constructor INI");
_morphKernel = Imgproc.getStructuringElement(Imgproc.MORPH_RECT,
new Size(3, 3));
_fgMaskMOG2 = new Mat(4, 1, CvType.CV_8UC1);
_pMOG2 = new BackgroundSubtractorMOG2();
_estadoACTUAL = START;
_resultado = new Mat();
Log.i(TAG, "constructor FIN");
}
public Mat get_resultado() {
return _resultado;
}
public void set_resultado(Mat _resultado) {
this._resultado = _resultado;
}
public boolean hayNuevoTableroValido(Mat inputFrame) {
/*
* if (_estadoACTUAL == START){ _pMOG2 = new BackgroundSubtractorMOG2();
* // MOG2 approach }
*/
Log.i(TAG, "Procesar!");
Log.i(TAG, "Region SIZE=" + inputFrame.size());
_pMOG2.apply(inputFrame, _fgMaskMOG2);
Imgproc.erode(_fgMaskMOG2, _fgMaskMOG2, _morphKernel);
Log.i(TAG, "Apply erode");
Imgproc.threshold(_fgMaskMOG2, _fgMaskMOG2, 200, 255,
Imgproc.THRESH_BINARY);
Log.i(TAG, "Apply threshold");
// Imgproc.dilate(_fgMaskMOG2, _fgMaskMOG2, _morphKernel);
// Log.i(TAG, "Apply dilate");
Scalar suma = Core.sumElems(_fgMaskMOG2);
Log.i(TAG, "SUMA = " + suma);
_estadoANTERIOR = _estadoACTUAL;
if (suma.val[0] > 1250000) {
Log.i(TAG, "VEO MANO !!!");
_estadoACTUAL = OCLUSION;
/*
* Core.putText(_fgMaskMOG2, "MANO !!!", new Point(20, 20),
* Core.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar(255, 255, 0));
*/
} else {
if (_estadoANTERIOR == OCLUSION) {
/*
* Core.putText(_fgMaskMOG2, "TRANSI !!!", new Point(20, 20),
* Core.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar(255, 255, 0));
*/
Log.i(TAG, "VEO TRANSICION !!!");
_estadoACTUAL = TRANSICION;
}
if (_estadoANTERIOR == TRANSICION) {
/*
* Core.putText(_fgMaskMOG2, "ESTABLE !!!", new Point(20, 20),
* Core.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar(255, 255, 0));
*/
_estadoACTUAL = ENVIAR_TABLERO;
_pMOG2 = new BackgroundSubtractorMOG2(); // MOG2 approach
Log.i(TAG, "VEO ENVIAR !!!");
}
if (_estadoANTERIOR == ENVIAR_TABLERO) {
_estadoACTUAL = ESTABLE;
Log.i(TAG, "VEO ESTABLE !!!");
}
}
Log.i(TAG, "END");
Imgproc.cvtColor(_fgMaskMOG2, _resultado, Imgproc.COLOR_GRAY2RGBA, 4);
return (_estadoACTUAL == ENVIAR_TABLERO);
}
}
/*
* double maxArea = 0; MatOfPoint largestContour = null;
* Imgproc.findContours(_fgMaskMOG2, contours, new Mat(), Imgproc.RETR_LIST,
* Imgproc.CHAIN_APPROX_TC89_L1); for (MatOfPoint contour : contours) { double
* area = Imgproc.contourArea(contour); if (area > maxArea) { maxArea = area;
* largestContour = contour; } } Log.i(TAG, "ESTABLE !!!" + maxArea); if
* ((largestContour != null) && (maxArea > 10000)){ Log.i(TAG, "MANO !!!" +
* maxArea); Rect boundingRect = Imgproc.boundingRect(largestContour);
* ToneGenerator toneG = new ToneGenerator(AudioManager.STREAM_ALARM, 50);
* toneG.startTone(ToneGenerator.TONE_CDMA_ALERT_CALL_GUARD, 200); // 200 is
* duration in ms }
*/
/*
* Imgproc.findContours(_fgMaskMOG2, contours, new Mat(), Imgproc.RETR_LIST,
* Imgproc.CHAIN_APPROX_NONE);
*
* for(int idx = 0; idx < contours.size(); idx++) { double area =
* Imgproc.contourArea(contours.get(idx)); Log.i(TAG, "CONTOUR = AREA: " +
* area); if ((area > 15000) && (area < 70000)){ Log.i(TAG, "DRAW !!! : " +
* area); Scalar color = new Scalar(255,127,127); Rect r =
* Imgproc.boundingRect(contours.get(idx)); Log.i(TAG,
* "3 at backgroundS "+(_fgMaskMOG2.type() == CvType.CV_8UC1));
* Core.rectangle(_fgMaskMOG2, r.tl(), r.br(), color, 2, 8, 0);
* //Imgproc.drawContours(_fgMaskMOG2, contours, idx, color); } }
*
* contours.clear();
*/
\ No newline at end of file
package uy.edu.fing.chesstrack.modulovision;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import uy.edu.fing.chesstrack.ajedrez.Logica;
import uy.edu.fing.chesstrack.communication.Client;
import uy.edu.fing.chesstrack.modulomodelador.Modelador;
import android.media.AudioManager;
import android.media.ToneGenerator;
import android.util.Log;
public class Manager {
private static final String TAG = "CHESSTRACK::MANAGER";
private static Manager _instance;
private static final int ARMANDO_TABLERO = 0;
private static final int JUGANDO = 1;
private static final int OK = 1;
private static final int NOT = 0;
private static final String WELCOME_MSG = ""
+ "--------------------------------------------------------------------------\n"
+ " .:: CHESSTRACK ::. seguimiento de una partida de Ajedrez\n"
+ "\n"
+ " TImag 2014\n"
+ " Nicolas Furquez - Aylen Ricca\n"
+ "--------------------------------------------------------------------------\n";
private static int _estado;
private static int _salida;
private static Adquisicion _adquisicion;
private static Client _client;
private static DetectorOclusion _detectorOclusion;
private static Modelador _modelador;
private static Logica _logica;
private final boolean _debug;
private int _cantJugada;
protected Manager() {
_estado = ARMANDO_TABLERO;
_salida = NOT;
_adquisicion = null;
_client = null;
_debug = false;
_cantJugada = 0;
}
public static Manager getInstance() {
if (_instance == null) {
_instance = new Manager();
}
return _instance;
}
public static int get_estado() {
return _estado;
}
public static void set_estado(int _estado) {
Manager._estado = _estado;
}
public void setConnection(String ip, int port) {
Log.i(TAG, "Setting connection");
_client = Client.getInstance();
_client.EstablishConnection(ip, port);
_client.SendData(WELCOME_MSG);
_salida = OK;
}
public boolean calibrar(Mat frame) {
Log.i(TAG, "Calibrando");
if (Calibracion.getInstance().calibrar(frame)) {
ToneGenerator toneG = new ToneGenerator(AudioManager.STREAM_ALARM,
50);
toneG.startTone(ToneGenerator.TONE_CDMA_ALERT_CALL_GUARD, 200); // ms
if (_salida == OK) {
_client.SendData("FIN calibrar\n . . . armando tablero!\n");
}
try {
_adquisicion = new Adquisicion();
_modelador = new Modelador();
_logica = new Logica();
} catch (Exception e) {
e.printStackTrace();
return false;
}
return true;
}
Log.i(TAG, "No calibrado");
ToneGenerator toneG = new ToneGenerator(AudioManager.STREAM_ALARM, 50);
toneG.startTone(ToneGenerator.TONE_CDMA_ALERT_CALL_GUARD, 700);
return false;
}
public void iniciarJuego() {
_estado = JUGANDO;
// beep
_detectorOclusion = new DetectorOclusion();
ToneGenerator toneG = new ToneGenerator(AudioManager.STREAM_ALARM, 50);
toneG.startTone(ToneGenerator.TONE_CDMA_ALERT_CALL_GUARD, 200); // ms
if (_salida == OK) {
Mat tableroInicio = _logica.getTableroInicio();
_client.SendData("TableroINICIO!\n");
_client.SendData(" ".concat(tableroInicio.dump().concat("\n")));
}
}
public Mat processFrame(Mat inputFrame) {
Log.i(TAG, "Acondicionamiento");
Mat region = _adquisicion.processFrame(inputFrame);
Log.i(TAG, "TYPE=" + (inputFrame.type() == CvType.CV_8UC4));
Log.i(TAG, "CHANNEL=" + (inputFrame.channels() == 4));
switch (_estado) {
case ARMANDO_TABLERO:
Log.i(TAG, "Armando Tablero");
Mat nuevo = _modelador.getMatrizFichas(region);
if (_logica.validarTableroArmado(nuevo)) {
iniciarJuego();
}
break;
case JUGANDO:
Log.i(TAG, "Jugando");
if (_detectorOclusion.hayNuevoTableroValido(region)) {
Log.i(TAG, "Tablero Valido");
// llamar al modelador
Mat aux = _modelador.getMatrizFichas(region);
// Log.i(TAG, "MATRIZ=" + aux.dump());
// region = _modelador.dibujarEscaquesSobel(2);
if (_logica.validarNuevoTablero(aux)) {
if (_salida == OK) {
_client.SendData("Nuevo Tablero Valido = ["
+ String.valueOf(_cantJugada) + "]\n");
_cantJugada++;
_client.SendData(" ".concat(aux.dump().concat("\n")));
}
}
}
break;
}
if (_debug) {
Log.i(TAG, "DEBUG");
Mat tmp = Mat.zeros(inputFrame.size(), CvType.CV_8UC4);
Mat matTMP = tmp.submat(Calibracion.getRectROI());
region.copyTo(matTMP);
return tmp;
}
return inputFrame;
}
public void destroy() {
if (_client != null) {
_client.Stop();
}
}
}
package uy.edu.fing.chesstrack.modulovision.imgproc;
import org.opencv.core.Mat;
import org.opencv.imgproc.Imgproc;
import android.util.Log;
public class EcualizarImagen implements ImgProcInterface {
private static final String TAG = "CHESSTRACK::Ecualizar";
@Override
public Mat procesarImagen(Mat inputFrame) {
//List<Mat> channels = new ArrayList<Mat>();
Mat img_hist_equalized = new Mat();
Log.i(TAG, "processing Frame - INI");
Imgproc.cvtColor(inputFrame, img_hist_equalized, Imgproc.COLOR_BGR2GRAY);
Imgproc.equalizeHist(img_hist_equalized, img_hist_equalized);
Imgproc.cvtColor(img_hist_equalized, inputFrame, Imgproc.COLOR_GRAY2BGR);
/*// ecualizar en cada canal
Core.split(inputFrame, channels); // split the image into channels
//Imgproc.equalizeHist(channels.get(0), channels.get(0));
Imgproc.equalizeHist(channels.get(1), channels.get(1));
//Imgproc.equalizeHist(channels.get(2), channels.get(2));
Core.merge(channels, inputFrame);
*/
Log.i(TAG, "processing Frame - FIN");
return img_hist_equalized;
}
}
package uy.edu.fing.chesstrack.modulovision.imgproc;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.core.MatOfPoint2f;
import org.opencv.core.Point;
import org.opencv.core.Rect;
import org.opencv.core.Size;
import org.opencv.imgproc.Imgproc;
import uy.edu.fing.chesstrack.modulovision.Calibracion;
import android.util.Log;
public class Homografia implements ImgProcInterface {
private static final String TAG = "CHESSTRACK::Homografia";
private final Mat _matrizTransformada;
private final Rect _rectROI;
private int _media;
private int _offset;
public Homografia() throws Exception {
super();
MatOfPoint2f vertices = Calibracion.getInstance().getVertices();
if (vertices != null){
Mat src = new Mat(4,1,CvType.CV_32FC2);
Mat dst = new Mat(4,1,CvType.CV_32FC2);
Point p1, p2, p3, p4;
Point[] ret = getBestPoints(vertices, Calibracion.getInstance().get_sizeFrame());
Log.i(TAG, "Best Points= " + ret.toString());
p1 = ret[0];
p2 = ret[1];
p3 = ret[2];
p4 = ret[3];
// se obtienen las distancias de cada una de las aristas que determinan los vertices
// internos mas externos y se determina el promedio de esa medida
double d12 = Math.sqrt(Math.pow((p1.x- p2.x),2) + Math.pow((p1.y- p2.y),2));
double d23 = Math.sqrt(Math.pow((p2.x- p3.x),2) + Math.pow((p2.y- p3.y),2));
double d34 = Math.sqrt(Math.pow((p3.x- p4.x),2) + Math.pow((p3.y- p4.y),2));
double d14 = Math.sqrt(Math.pow((p4.x- p1.x),2) + Math.pow((p4.y- p1.y),2));
_media = (int) Math.floor( Math.round((d12+d23+d34+d14)/4));
// El offset es el tamaño de dos celdas (para abarcar el primer casillero y dejar un borde al tablero
_offset = 2*(_media/6);
Log.i(TAG, "media= " + _media);
Log.i(TAG, "offset= " + _offset);
src.put(0,0, (int)p1.x,(int)p1.y, (int)p2.x,(int)p2.y, (int)p3.x,(int)p3.y, (int)p4.x,(int)p4.y);
dst.put(0,0, _offset,_offset, _offset+_media,_offset, _offset+_media,_offset+_media, _offset,_offset+_media );
_rectROI = new Rect(new Point(0,0), new Point(2*_offset+_media,2*_offset+_media));
Calibracion.getInstance();
Calibracion.setRectROI(_rectROI);
Log.i(TAG, "rectROI size= " + _rectROI.size());
_matrizTransformada = Imgproc.getPerspectiveTransform(src,dst);
} else {
throw new Exception(TAG + "No se han calculado los vertices");
}
}
/**
* Hace homografia y recorta la imagen
*/
@Override
public Mat procesarImagen(Mat inputFrame) {
Log.i(TAG, "processing Frame - INI");
double x = inputFrame.size().width;
double y = inputFrame.size().height;
if (x < _rectROI.size().width){
x = _rectROI.size().width;
}
if (y < _rectROI.size().height){
y = _rectROI.size().height;
}
Mat aux = new Mat();
// hace la homografia con la matriz calculada en un ppio
Imgproc.warpPerspective(inputFrame,aux, _matrizTransformada, new Size(x,y));
Mat tmp = Mat.zeros(_rectROI.size(), CvType.CV_8UC4);
(aux.submat(_rectROI)).copyTo(tmp);
Log.i(TAG, "processing Frame - FIN");
return tmp;
}
private Point[] getBestPoints(MatOfPoint2f vertices, Size tamanio){
double d, best_dist;
Point[] ret = new Point[4];
Point[] vect = new Point[4];
vect[0] = new Point(0,0);
vect[1] = new Point(tamanio.width,0);
vect[2] = new Point(tamanio.width,tamanio.height);
vect[3] = new Point(0,tamanio.height);
for (int i = 0; i < 4; i++){
Point aux = vect[i];
best_dist = Double.MAX_VALUE;
for(Point pt : vertices.toList()){
d = Math.sqrt(Math.pow((pt.x- aux.x),2) + Math.pow((pt.y- aux.y),2));
if (d < best_dist){
best_dist = d;
ret[i] = pt;
}
}
}
return ret;
}
}
package uy.edu.fing.chesstrack.modulovision.imgproc;
import org.opencv.core.Mat;
/*
* Las clases para procesar las imagenes deben de implementar
* esta interface y recibiran una imagen que puede haber sido
* por otra clase en un paso previo
*/
public interface ImgProcInterface {
abstract Mat procesarImagen(Mat inputFrame);
}
Integrantes.
- Nicolás Furquez
- Aylen Ricca
import java.io.*;
import java.net.*;
class TCPClient {
public static void main(String argv[]) throws Exception
{
String sentence;
String modifiedSentence;
BufferedReader inFromUser =
new BufferedReader(new InputStreamReader(System.in));
Socket clientSocket = new Socket("192.168.1.21", 5555);
DataOutputStream outToServer =
new DataOutputStream(clientSocket.getOutputStream());
BufferedReader inFromServer =
new BufferedReader(new
InputStreamReader(clientSocket.getInputStream()));
sentence = inFromUser.readLine();
outToServer.writeBytes(sentence + '\n');
modifiedSentence = inFromServer.readLine();
System.out.println("FROM SERVER: " + modifiedSentence);
clientSocket.close();
}
}
import java.io.*;
import java.net.*;
class TCPServer {
public static void main(String argv[]) throws Exception
{
String clientSentence;
String capitalizedSentence;
ServerSocket welcomeSocket = new ServerSocket(5556);
Socket connectionSocket = welcomeSocket.accept();
BufferedReader inFromClient =
new BufferedReader(new InputStreamReader(connectionSocket.getInputStream()));
DataOutputStream outToClient =
new DataOutputStream(connectionSocket.getOutputStream());
while(true) {
clientSentence = inFromClient.readLine();
//capitalizedSentence = clientSentence.toUpperCase() + '\n';
//outToClient.writeBytes(capitalizedSentence);
if (clientSentence != null){
System.out.println(clientSentence);
}
}
}
}
clc
close all
% Connect to a camera
%
camera = cv.VideoCapture(1);
pause(4);
% get Frame
frame = camera.read;
%}
%frame = imread('dd.png');
if (size(frame,1) ~= 0)
% save frame
imwrite(frame, 'frame0.png');
% show frame
figure,
imshow(frame);
patternSize = [7,7];
corners = cv.findChessboardCorners(frame, patternSize);
wind = cv.drawChessboardCorners(frame, patternSize, corners);
% show frame
figure,
imshow(wind);
% save frame
imwrite(wind, 'frame0esq.png');
% muevo corners a otra estructura de datos
sizeV = size(corners,2);
esquinas = zeros(sizeV,2);
for i=1:sizeV
esquinas(i,1) = corners{i}(1);
esquinas(i,2) = corners{i}(2);
end
% muestro de a una en orden qe las veo
%{
e = round(esquinas);
orden = wind;
for i=1:sizeV
orden(e(i,2)-5:e(i,2)+5,e(i,1)-5:e(i,1)+5,:) = 0;
figure,
imshow(orden);
end
%}
% muestro de a una en orden qe las veo
%{
e = round(esquinas);
externas = wind;
externas(e(1,2)-5:e(1,2)+5,e(1,1)-5:e(1,1)+5,:) = 0;
externas(e(7,2)-5:e(7,2)+5,e(7,1)-5:e(7,1)+5,:) = 0;
externas(e(43,2)-5:e(43,2)+5,e(43,1)-5:e(43,1)+5,:) = 0;
externas(e(49,2)-5:e(49,2)+5,e(49,1)-5:e(49,1)+5,:) = 0;
figure,
imshow(externas);
%}
e = round(esquinas);
% distancias, promedio, media
%{
d17 = sqrt((e(1,1)-e(7,1))^2 + (e(1,2)-e(7,2))^2);
d749 = sqrt((e(7,1)-e(49,1))^2 + (e(7,2)-e(49,2))^2);
d143 = sqrt((e(1,1)-e(43,1))^2 + (e(1,2)-e(43,2))^2);
d4349 = sqrt((e(43,1)-e(49,1))^2 + (e(43,2)-e(49,2))^2);
media = round(mean([d17,d749,d143,d4349]));
offset = 2*(media/7);
%}
offset = 48*2;
media = 48*6;
a = [e(1,:);e(7,:);e(43,:);e(49,:)];
b = [ offset,offset ; offset+media,offset ; offset,offset+media ; offset+media,offset+media ];
matrizTransformada = cv.getPerspectiveTransform(a,b);
transformada = cv.warpPerspective(frame, matrizTransformada,'DSize', [480,480]);
figure,
imshow(transformada);
imwrite(transformada, 'framehomogra.png');
% muestro de a una en orden qe las veo
%{
e = 20:20:180;
cuadricula = transformada;
tablero = zeros(81,2);
for i=1:9
for j=1:9
cuadricula(e(i):e(i),e(j):e(j),:) = 0;
cuadricula(e(i):e(i),e(j):e(j),3) = 255;
tablero((i-1)*9+j,1) = e(i);
tablero((i-1)*9+j,2) = e(j);
end
end
figure,
imshow(cuadricula);
imwrite(cuadricula, 'cuadricula'+i+'.png');
%}
%{
outputString = sprintf('I am %d years old', n)
t2 = dlmread('tablero2.txt');
dlmwrite('tablero2.txt', transformada);
%}
%{
edges = cv.Canny(transformada, 60);
figure,
imshow(edges);
imwrite(edges, 'edges.png');
%}
end
%%%%%%%%%%%% ----- FIN ----- %%%%%%%%%%%%
\ No newline at end of file
cmake_minimum_required(VERSION 2.8)
project( SubsBackground )
find_package( OpenCV REQUIRED )
add_executable( SubsBackground SubsBackground.cpp )
target_link_libraries( SubsBackground ${OpenCV_LIBS} )
project( Corners )
add_executable( Corners Corners.cpp )
target_link_libraries( Corners ${OpenCV_LIBS} )
#include <string>
#include <iostream>
#include <vector>
#include "opencv2/opencv.hpp"
using namespace std;
using namespace cv;
//----------------------------------------------------------
// MAIN
//----------------------------------------------------------
int main(int argc, char* argv[])
{
// src image
Mat src;
// dst image
Mat dst;
// Image loading
namedWindow("result");
namedWindow("src");
src=imread("image.png",0);
cv::cvtColor(src,dst,cv::COLOR_GRAY2BGR);
Mat corners;
cv::goodFeaturesToTrack(src,corners,100,0.01,20.0);
for(int i=0;i<corners.rows;++i)
{
circle(dst,cv::Point(corners.at<float>(i,0),corners.at<float>(i,1)),3,Scalar(255,0,0),-1,CV_AA);
}
imshow("src",src);
imshow("result",dst);
//----------------------------------------------------------
// Wait key press
//----------------------------------------------------------
while (waitKey(0) != 'q');
destroyAllWindows();
return 0;
}
/**
* @file bg_sub.cpp
* @brief Background subtraction tutorial sample code
* @author Domenico D. Bloisi
*/
//opencv
#include <opencv2/core/core.hpp>
#include <opencv2/opencv.hpp>
#include <opencv/cv.h>
#include <opencv2/highgui/highgui.hpp>
//#include <opencv2/imgproc/imgproc.hpp>
#include <opencv2/video/background_segm.hpp>
//C
#include <stdio.h>
//C++
#include <iostream>
#include <sstream>
using namespace cv;
using namespace std;
// Global variables
Mat frame; //current frame
Mat fgMaskMOG; //fg mask generated by MOG method
Mat fgMaskMOG2; //fg mask fg mask generated by MOG2 method
Ptr<BackgroundSubtractorMOG> pMOG; //MOG Background subtractor
Ptr<BackgroundSubtractorMOG2> pMOG2; //MOG2 Background subtractor
Mat morphKernel;
int keyboard; //input from keyboard
/** Function Headers */
void help();
void processVideo(char* videoFilename);
void processImages(char* firstFrameFilename);
void help()
{
cout
<< "--------------------------------------------------------------------------" << endl
<< "This program shows how to use background subtraction methods provided by " << endl
<< " OpenCV. You can process both videos (-vid) and images (-img)." << endl
<< endl
<< "Usage:" << endl
<< "./bs {-vid <video filename>|-img <image filename>}" << endl
<< "for example: ./bs -vid video.avi" << endl
<< "or: ./bs -img /data/images/1.png" << endl
<< "--------------------------------------------------------------------------" << endl
<< endl;
}
/**
* @function main
*/
int main(int argc, char* argv[])
{
//print help information
help();
//check for the input parameter correctness
if(argc != 3) {
cerr <<"Incorret input list" << endl;
cerr <<"exiting..." << endl;
return EXIT_FAILURE;
}
//create GUI windows
namedWindow("Frame");
//namedWindow("FG Mask MOG");
namedWindow("FG Mask MOG 2");
//create Background Subtractor objects
// pMOG= new BackgroundSubtractorMOG(200,5,0.7,0); //MOG approach
pMOG= new BackgroundSubtractorMOG(); //MOG approach
pMOG2 = new BackgroundSubtractorMOG2(); //MOG2 approach
// pMOG = createBackgroundSubtractorMOG(); //MOG approach
// pMOG2 = createBackgroundSubtractorMOG2(); //MOG2 approach
morphKernel = getStructuringElement(CV_SHAPE_RECT, Size(3, 3), Point(1, 1));
if(strcmp(argv[1], "-vid") == 0) {
//input data coming from a video
processVideo(argv[2]);
}
else {
//error in reading input parameters
cerr <<"Please, check the input parameters." << endl;
cerr <<"Exiting..." << endl;
return EXIT_FAILURE;
}
//destroy GUI windows
destroyAllWindows();
return EXIT_SUCCESS;
}
/**
* @function processVideo
*/
void processVideo(char* videoFilename) {
int estadoANTERIOR = 0;
int estadoACTUAL = 0;
int c = 0;
char buffer [15];
//create the capture object
VideoCapture capture(videoFilename);
if(!capture.isOpened()){
//error in opening the video input
cerr << "Unable to open video file: " << videoFilename << endl;
exit(EXIT_FAILURE);
}
while( (char)keyboard != 'p' ){
//read the current frame
if(!capture.read(frame)) {
cerr << "Unable to read next frame." << endl;
cerr << "Exiting..." << endl;
exit(EXIT_FAILURE);
}
imshow("Frame", frame);
keyboard = waitKey( 30 );
}
//read input data. ESC or 'q' for quitting
while( (char)keyboard != 'q' && (char)keyboard != 27 ){
if ((char)keyboard == 's'){
pMOG= new BackgroundSubtractorMOG(); //MOG approach
pMOG2 = new BackgroundSubtractorMOG2(); //MOG2 approach
}
//read the current frame
if(!capture.read(frame)) {
cerr << "Unable to read next frame." << endl;
cerr << "Exiting..." << endl;
exit(EXIT_FAILURE);
}
//update the background model
pMOG->operator()(frame, fgMaskMOG,0);
pMOG2->operator()(frame, fgMaskMOG2,0);
erode(fgMaskMOG2, fgMaskMOG2, morphKernel);
cv::threshold(fgMaskMOG2, fgMaskMOG2, 200, 255, THRESH_BINARY);
Scalar suma = cv::sum(fgMaskMOG2);
// cerr << suma.val[0] << endl;
if (suma.val[0] > 1250000){
// estado oclusion
estadoANTERIOR = estadoACTUAL;
estadoACTUAL = 1;
cerr << "MANO !!!\n";
rectangle(frame, cv::Point(10, 2), cv::Point(100,20),cv::Scalar(255,255,255), -1);
putText(frame, "MANO !!!", cv::Point(15, 15),FONT_HERSHEY_SIMPLEX, 0.5 , cv::Scalar(0,0,0));
} else {
estadoANTERIOR = estadoACTUAL;
if (estadoANTERIOR == 1){
// estado transicion
estadoACTUAL = 3;
}
if (estadoANTERIOR == 3){
// estado estable
estadoACTUAL = 2;
imshow("ESTABLE", frame);
int b = sprintf (buffer, "tablero%d.png", c);
imwrite(buffer, frame);
c++;
pMOG= new BackgroundSubtractorMOG(); //MOG approach
pMOG2 = new BackgroundSubtractorMOG2(); //MOG2 approach
}
cerr << "ESTABLE !!!\n";
rectangle(frame, cv::Point(10, 2), cv::Point(100,20),cv::Scalar(255,255,255), -1);
putText(frame, "ESTABLE !!!", cv::Point(15, 15),FONT_HERSHEY_SIMPLEX, 0.5 , cv::Scalar(0,0,0));
}
// dilate(fgMaskMOG, fgMaskMOG, morphKernel, 1);
// pMOG->apply(frame, fgMaskMOG);
// pMOG2->apply(frame, fgMaskMOG2);
//get the frame number and write it on the current frame
/*
stringstream ss;
rectangle(frame, cv::Point(10, 2), cv::Point(100,20),
cv::Scalar(255,255,255), -1);
ss << capture.get(CAP_PROP_POS_FRAMES);
string frameNumberString = ss.str();
putText(frame, frameNumberString.c_str(), cv::Point(15, 15),
FONT_HERSHEY_SIMPLEX, 0.5 , cv::Scalar(0,0,0));
*/
//show the current frame and the fg masks
imshow("Frame", frame);
// imshow("FG Mask MOG", fgMaskMOG);
imshow("FG Mask MOG 2", fgMaskMOG2);
//get the input from the keyboard
keyboard = waitKey( 30 );
}
//delete capture object
capture.release();
}
This diff is collapsed.
/**************************
*
* GENERAL
*
**************************/
.camera_wrap img,
.camera_wrap ol, .camera_wrap ul, .camera_wrap li,
.camera_wrap table, .camera_wrap tbody, .camera_wrap tfoot, .camera_wrap thead, .camera_wrap tr, .camera_wrap th, .camera_wrap td
.camera_thumbs_wrap a, .camera_thumbs_wrap img,
.camera_thumbs_wrap ol, .camera_thumbs_wrap ul, .camera_thumbs_wrap li,
.camera_thumbs_wrap table, .camera_thumbs_wrap tbody, .camera_thumbs_wrap tfoot, .camera_thumbs_wrap thead, .camera_thumbs_wrap tr, .camera_thumbs_wrap th, .camera_thumbs_wrap td {
background: none;
border: 0;
font: inherit;
font-size: 100%;
margin: 0;
padding: 0;
vertical-align: baseline;
list-style: none
}
.camera_wrap {
display: none;
float: none;
position: relative;
z-index: 0;
}
.camera_wrap img {
max-width: none !important;
}
.camera_fakehover {
height: 100%;
min-height: 40px;
position: relative;
width: 100%;
z-index: 1;
}
.camera_wrap {
width: 100%;
height:428px;
}
.camera_src {
display: none;
}
.cameraCont, .cameraContents {
height: 100%;
position: relative;
width: 100%;
z-index: 1;
}
.cameraSlide {
bottom: 0;
left: 0;
position: absolute;
right: 0;
top: 0;
width: 100%;
}
.cameraContent {
bottom: 0;
display: none;
left: 0;
position: absolute;
right: 0;
top: 0;
width: 100%;
}
.camera_target {
bottom: 0;
height: 100%;
left: 0;
overflow: hidden;
position: absolute;
right: 0;
text-align: left;
top: 0;
width: 100%;
z-index: 0;
}
.camera_overlayer {
bottom: 0;
height: 100%;
left: 0;
overflow: hidden;
position: absolute;
right: 0;
top: 0;
width: 100%;
z-index: 0;
}
.camera_target_content {
bottom: 0;
left: 0;
overflow: visible;
position: absolute;
right: 0;
top: 0;
z-index: 2;
}
.camera_target_content .camera_link {
display: block;
height: 100%;
text-decoration: none;
}
.camera_loader {
background: #fff url(../img/camera-loader.gif) no-repeat center;
background: rgba(255, 255, 255, 0.9) url(../img/camera-loader.gif) no-repeat center;
border: 1px solid #ffffff;
-webkit-border-radius: 18px;
-moz-border-radius: 18px;
border-radius: 18px;
height: 36px;
left: 50%;
overflow: hidden;
position: absolute;
margin: -18px 0 0 -18px;
top: 50%;
width: 36px;
z-index: 3;
}
.camera_clear {
display: block;
clear: both;
}
.showIt {
display: none;
}
.camera_clear {
clear: both;
display: block;
height: 1px;
margin: 0px;
position: relative;
}
.camera_caption {
}
.camera_caption > div {
}
\ No newline at end of file
/* Add additional stylesheets below
-------------------------------------------------- */
/*
Bootstrap's documentation styles
Special styles for presenting Bootstrap's documentation and examples
*/
/* Tweak navbar brand link to be super sleek
-------------------------------------------------- */
#panel .navbar {
font-size: 13px;
}
/* Change the docs' brand */
#panel .navbar .brand {
padding-right: 0;
padding-left: 0;
margin-left: 20px;
float: right;
font-weight: bold;
color: #000;
text-shadow: 0 1px 0 rgba(255,255,255,.1), 0 0 30px rgba(255,255,255,.125);
-webkit-transition: all .2s linear;
-moz-transition: all .2s linear;
transition: all .2s linear;
}
#panel .navbar .brand:hover {
text-decoration: none;
text-shadow: 0 1px 0 rgba(255,255,255,.1), 0 0 30px rgba(255,255,255,.4);
color: #fff;
}
#advanced span.trigger{display:block;position:absolute; background:url(../img/tm/pro_button.png) 0 0 no-repeat; width:42px; height:42px; right:0; top:41px; cursor:pointer;}
#advanced span.trigger strong{display:block; background: url(../img/tm/plus-minus.png) 0 0 no-repeat; width:11px; height:11px; position:absolute; right:7px; bottom:24px; opacity:0;}
#advanced.closed span.trigger strong{opacity:1; }
#advanced span.trigger em{display:block; background: url(../img/tm/plus-minus.png) -11px 0 no-repeat; width:11px; height:11px; position:absolute; right:7px; bottom:24px;}
#panel li {
line-height: 20px;
}
#panel .dropdown {
position: relative;
}
#panel #advanced {
position: relative;
}
#panel .dropdown-toggle {
*margin-bottom: -3px;
}
#panel .dropdown-toggle:active,
#panel .open .dropdown-toggle {
outline: 0;
}
#panel .caret {
display: inline-block;
width: 0;
height: 0;
vertical-align: top;
border-top: 4px solid #000000;
border-right: 4px solid transparent;
border-left: 4px solid transparent;
content: "";
}
#panel .dropdown .caret {
margin-top: 8px;
margin-left: 2px;
}
#panel .dropdown-menu {
position: absolute;
top: 100%;
left: 0;
z-index: 1000;
display: none;
float: left;
min-width: 160px;
padding: 5px 0;
margin: 2px 0 0;
list-style: none;
background-color: #ffffff;
border: 1px solid #ccc;
border: 1px solid rgba(0, 0, 0, 0.2);
*border-right-width: 2px;
*border-bottom-width: 2px;
-webkit-border-radius: 6px;
-moz-border-radius: 6px;
border-radius: 6px;
-webkit-box-shadow: 0 5px 10px rgba(0, 0, 0, 0.2);
-moz-box-shadow: 0 5px 10px rgba(0, 0, 0, 0.2);
box-shadow: 0 5px 10px rgba(0, 0, 0, 0.2);
-webkit-background-clip: padding-box;
-moz-background-clip: padding;
background-clip: padding-box;
}
#panel .dropdown-menu .divider {
*width: 100%;
height: 1px;
margin: 9px 1px;
*margin: -5px 0 5px;
overflow: hidden;
background-color: #e5e5e5;
border-bottom: 1px solid #ffffff;
}
#panel .dropdown-menu a {
display: block;
padding: 3px 20px;
clear: both;
font-weight: normal;
line-height: 20px;
color: #333333;
white-space: nowrap;
}
#panel .dropdown-menu li a {
padding-left: 15px;
}
#panel .dropdown-menu ul {
margin-left: 40px;
}
#panel .dropdown-menu ul li a {
padding-left: 5px;#advanced span.trigger{display:block;position:absolute; background:url(../img/tm/pro_button.png) 0 0 no-repeat; width:42px; height:42px; right:0; bottom:-42px; cursor:pointer;}
#advanced span.trigger strong{display:block; background: url(../img/tm/plus-minus.png) 0 0 no-repeat; width:11px; height:11px; position:absolute; right:7px; bottom:24px; opacity:0;}
#advanced.closed span.trigger strong{opacity:1;}
#advanced span.trigger em{display:block; background: url(../img/tm/plus-minus.png) -11px 0 no-repeat; width:11px; height:11px; position:absolute; right:7px; bottom:24px;}
}
#advanced .dropdown-menu li li a {
display: inline-block\0/;
}
#panel .dropdown-menu li > a:hover,
#panel .dropdown-menu li > a:focus,
#panel .dropdown-submenu:hover > a {
color: #ffffff;
text-decoration: none;
background-color: #0088cc;
background-color: #0081c2;
background-image: -moz-linear-gradient(top, #0088cc, #0077b3);
background-image: -webkit-gradient(linear, 0 0, 0 100%, from(#0088cc), to(#0077b3));
background-image: -webkit-linear-gradient(top, #0088cc, #0077b3);
background-image: -o-linear-gradient(top, #0088cc, #0077b3);
background-image: linear-gradient(to bottom, #0088cc, #0077b3);
background-repeat: repeat-x;
filter: progid:dximagetransform.microsoft.gradient(startColorstr='#ff0088cc', endColorstr='#ff0077b3', GradientType=0);
}
#panel .dropdown-menu .active > a,
#panel .dropdown-menu .active > a:hover {
color: #ffffff;
text-decoration: none;
background-color: #0088cc;
background-color: #0081c2;
background-image: linear-gradient(to bottom, #0088cc, #0077b3);
background-image: -moz-linear-gradient(top, #0088cc, #0077b3);
background-image: -webkit-gradient(linear, 0 0, 0 100%, from(#0088cc), to(#0077b3));
background-image: -webkit-linear-gradient(top, #0088cc, #0077b3);
background-image: -o-linear-gradient(top, #0088cc, #0077b3);
background-repeat: repeat-x;
outline: 0;
filter: progid:dximagetransform.microsoft.gradient(startColorstr='#ff0088cc', endColorstr='#ff0077b3', GradientType=0);
}
#panel .dropdown-menu .disabled > a,
#panel .dropdown-menu .disabled > a:hover {
color: #999999;
}
#panel .dropdown-menu .disabled > a:hover {
text-decoration: none;
cursor: default;
background-color: transparent;
}
#panel .open {
*z-index: 1000;
}
#panel .open > .dropdown-menu {
display: block;
}
#panel .pull-right > .dropdown-menu {
right: 0;
left: auto;
}
#panel .dropup .caret,
#panel .navbar-fixed-bottom .dropdown .caret {
border-top: 0;
border-bottom: 4px solid #000000;
content: "\2191";
}
#panel .dropup .dropdown-menu,
#panel .navbar-fixed-bottom .dropdown .dropdown-menu {
top: auto;
bottom: 100%;
margin-bottom: 1px;
}
#panel .navbar-inverse .nav li.dropdown > .dropdown-toggle .caret {
border-top-color: #999999;
border-bottom-color: #999999;
}
#panel .navbar-inverse .nav li.dropdown.open > .dropdown-toggle .caret,
#panel .navbar-inverse .nav li.dropdown.active > .dropdown-toggle .caret,
#panel .navbar-inverse .nav li.dropdown.open.active > .dropdown-toggle .caret {
border-top-color: #ffffff;
border-bottom-color: #ffffff;
}
#panel .navbar .nav > li > a {
padding: 10px 15px;
}
@media (min-width: 200px) and (max-width: 979px) {
#panel #advanced {
position: static;
}
#panel #advanced span.trigger {
display: none;
}
#panel .navbar-fixed-top {
margin-bottom: 0;
}
#panel .navbar-inner {
padding: 0 5px !important;
}
#panel .navbar .brand {
float: left;
margin-left: 10px;
}
#panel .navbar .nav > li > a {
padding: 9px 15px;
}
#panel .dropdown-menu {
position: static;
display: block;
border-radius:0px;
background: none;
border:none;
box-shadow: none;
-webkit-box-shadow: none;
-moz-box-shadow: none;
width: 100%;
}
#panel .dropdown-menu li a {
font-weight: bold;
font-size: 13px;
color: #555;
}
#panel .dropdown-menu li {
padding:5px 0 5px 15px;
}
#panel .navbar-fixed-top, .navbar-fixed-bottom {
margin: 0 !important;
}
}
\ No newline at end of file