Especificação



Baixar 2,81 Mb.
Página11/24
Encontro01.07.2018
Tamanho2,81 Mb.
1   ...   7   8   9   10   11   12   13   14   ...   24

A2 Classes para leitura do arquivo de vídeo YUV




      1. A2.1 Classe YUVParser



package br.ufsc.inf.guiga.media.parser.video;
import java.io.IOException;
import javax.media.BadHeaderException;

import javax.media.Control;

import javax.media.Demultiplexer;

import javax.media.IncompatibleSourceException;

import javax.media.Time;

import javax.media.Track;

import javax.media.control.FormatControl;

import javax.media.format.YUVFormat;

import javax.media.protocol.ContentDescriptor;

import javax.media.protocol.DataSource;

import javax.media.protocol.Positionable;

import javax.media.protocol.PullSourceStream;
import br.ufsc.inf.guiga.media.Global;
import com.sun.media.parser.BasicPullParser;
/**

* YCbCr raw video file parser. Extract video track from a raw YUV stream.

*

* @author Guilherme Ferreira



*/

public class YUVParser extends BasicPullParser implements Positionable {
private static final String PLUGIN_NAME = "YCbCr Parser";

protected static final int VIDEO_TRACK = 0;

protected ContentDescriptor[] supportedFormat;

protected Track[] tracks;

protected PullSourceStream stream;
/**

* Default constructor.

*/

public YUVParser() {

super();

tracks = new Track[1];

supportedFormat = new ContentDescriptor[] { new ContentDescriptor("video.yuv") };

stream = null;

controls = new Control[2];

}
/**

* Gets the duration of this media stream when played at the default rate.

*


* Note that each track can have a different duration and a different start time. This

* method returns the total duration from when the first track starts and the last

* track ends.

*

* @return A Time object that represents the duration or DURATION_UNKNOWN



* if the duration cannot be determined.

*/

public Time getDuration() {



return tracks[VIDEO_TRACK].getDuration();

}
/**

* Gets the current media time. This is the stream position that the next readFrame

* will read.

*

* @return The current position in the media stream as a Time object.



*/

public Time getMediaTime() {

YUVVideoTrack videoTrack = (YUVVideoTrack) tracks[VIDEO_TRACK];

Time t = videoTrack.mapFrameToTime(videoTrack.getCurrentFrame());
return t;

}
/**

* Sets the stream position (media time) to the specified Time. Returns the rounded

* position that was actually set. Implementations should set the position to a key

* frame, if possible.

*

* @param where The new stream position, specified as a Time.



* @param rounding The rounding technique to be used: RoundUp, RoundDown, or

* RoundNearest.

* @return The actual stream position that was set as a Time object.

*/

public Time setPosition(Time where, int rounding) {

YUVVideoTrack videoTrack = (YUVVideoTrack) tracks[VIDEO_TRACK];

Time oldTime = videoTrack.mapFrameToTime(videoTrack.getCurrentFrame());

videoTrack.setCurrentFrame(videoTrack.mapTimeToFrame(where));
return oldTime;

}
/**

* Sets the media source this MediaHandler should use to obtain content.

*

* @param source The DataSource used by this MediaHandler.



* @throws java.io.IOException Thrown if there is an error using the DataSource

* @throws javax.media.IncompatibleSourceException Thrown if this MediaHandler cannot

* make use of the DataSource (cannot handle the media).

*/

public void setSource(DataSource source)



throws IOException, IncompatibleSourceException

{

super.setSource(source);


stream = (PullSourceStream) streams[0];
// Set default track properties

String controlName = FormatControl.class.getName();

FormatControl control = (FormatControl) source.getControl(controlName);

YUVFormat yuvFormat = (YUVFormat) control.getFormat();



int totalSize = (int) stream.getContentLength();

int frameSize = YUVFormatHandler.getFrameSize(yuvFormat);

int maxFrameNumber = totalSize / frameSize;

float fps = yuvFormat.getFrameRate();

Time duration = new Time((maxFrameNumber / fps));

Time startTime = new Time(0);

int numBuffer = 1;

boolean enabled = true;
// Set global attributes

Global.getInstance().setTotalFrameNumber(maxFrameNumber);


// Create the video track

YUVVideoTrack videoTrack = new YUVVideoTrack(this, yuvFormat, enabled, duration,

startTime, numBuffer, frameSize, stream);

tracks[VIDEO_TRACK] = videoTrack;

}
/**

* Gets the name of this plug-in as a human-readable string.

*

* @return A String that contains the descriptive name of the plug-in.



*/

public String getName() {

return PLUGIN_NAME;

}
/**

* @return a lists of all of the input content descriptors that this

* {@link Demultiplexer} supports.

*/

public ContentDescriptor[] getSupportedInputContentDescriptors() {

return supportedFormat;

}
/**

* Retrieves the individual tracks that the media stream contains. A stream can

* contain multiple media tracks, such as separate tracks for audio, video, and midi

* data. The information specific to a track is abstracted by an instance of a class

* that implements the {@link Track} interface. The {@link Track} interface also

* provides methods for enabling or disabling a track.

*


* When getTracks is called, the stream header is read and parsed (if there is one),

* the track information is retrieved, the maximum frame size for each track is

* computed, and the play list is built (if applicable).

*

* @return An array of Track objects. The length of the array is equal to the number



* of tracks in the stream.

* @throws java.io.IOException If there is an error when trying to read from the

* DataSource.

* @throws javax.media.BadHeaderException If the header information is incomplete or

* inconsistent.

*/

public Track[] getTracks() throws IOException, BadHeaderException {



if (tracks[VIDEO_TRACK] != null)

return tracks;
return tracks;

}
}


      1. A2.2 Classe YUVVideoTrack



package br.ufsc.inf.guiga.media.parser.video;
import javax.media.Buffer;

import javax.media.Time;

import javax.media.Track;

import javax.media.format.VideoFormat;

import javax.media.format.YUVFormat;

import javax.media.protocol.PullSourceStream;
import com.sun.media.parser.BasicTrack;
/**

* YUV Video Track.

*

* Once a YUV raw video is composed only by video, this is the only track available.

*

* @author Guilherme Ferreira



*/

public class YUVVideoTrack extends BasicTrack {
protected int currentFrame;

protected int numberOfFrames;

protected int dataSize;
/**

* Create a track to handle YUV video.

*

* @param parser the parser owning this video track



* @param format the YUV Video format

* @param enabled

* @param duration total video duration

* @param startTime

* @param numBuffers

* @param dataSize frame size in bytes. Each time readFrame is called,

* this amount of bytes is read from input stream.

* @param stream

*/

public YUVVideoTrack(

YUVParser parser,

YUVFormat format,

boolean enabled,

Time duration,

Time startTime,

int numBuffers,

int dataSize,

PullSourceStream stream)

{

super(parser, format, enabled, duration, startTime, numBuffers, dataSize, stream);
float fps = ((VideoFormat) getFormat()).getFrameRate();

this.numberOfFrames = (int) (fps * duration.getSeconds());

this.currentFrame = 0;

this.dataSize = dataSize;

}
/**

* Gets the Time that corresponds to the specified frame number.

*

* @param frameNumber zero based frame index.



* @return A Time object that corresponds to the specified frame. If the mapping

* cannot be established, TIME_UNKNOWN is returned.

*/

public Time mapFrameToTime(int frameNumber) {

double time = 0d;

if ((frameNumber < 0) || (frameNumber >= numberOfFrames)) {

return Track.TIME_UNKNOWN;

}

time = frameNumber / ((VideoFormat) getFormat()).getFrameRate();



Time t = new Time(time);
return t;

}
/**

* Converts the given media time to the corresponding frame number.

*


* The frame returned is the nearest frame that has a media time less than or equal to

* the given media time.

*

* @param mediaTime the input media time for the conversion.



* @return the converted frame number the given media time. If the conversion fails,

* FRAME_UNKNOWN is returned.

*/

public int mapTimeToFrame(Time mediaTime) {

double time = 0d;

int frameNumber = 0;

time = mediaTime.getSeconds();



if (time < 0.0) {

return Integer.MAX_VALUE;

}

frameNumber = (int) Math.round(time * ((VideoFormat) getFormat()).getFrameRate());



if ((frameNumber < 0) || (frameNumber >= numberOfFrames)) {

return (numberOfFrames - 1);

}

return frameNumber;

}
/**

* Reads the next frame for this Track.

*

* @param buffer The {@link Buffer} into which the data is to be read. If readFrame is



* successful, buffer.getLength returns the length of the data that was

* read.


*/

public void readFrame(Buffer buffer) {

// positioning the stream to read the current frame

setSeekLocation(currentFrame * dataSize);

currentFrame++;

// read frame data into buffer

super.readFrame(buffer);

}
/**

* @return the total number of frames this video contains. This total number is

* calculated through total content size by the frame size.

*/

public int getNumberOfFrames() {

return numberOfFrames;

}
/**

* @return the next frame to be read by this track. Starting from 0 to total number of

* frames.

*/

public int getCurrentFrame() {

return currentFrame;

}
/**

* Set the next frame to be read by this Track.

*

* @param currentFrame the next frame to be read by readFrame.



*/

public void setCurrentFrame(int currentFrame) {

this.currentFrame = currentFrame;

}
}


      1. A2.3 Classe YUVFormatHandler



package br.ufsc.inf.guiga.media.parser.video;
import java.awt.Dimension;
import javax.media.format.YUVFormat;
/**

* Contains standard YUV formats and utilities.

*

* @author Guilherme



*/

public class YUVFormatHandler {
public static Dimension SQCIF = new Dimension(128, 96);

public static Dimension QCIF = new Dimension(176, 144);

public static Dimension CIF = new Dimension(352, 288);

public static Dimension _4CIF = new Dimension(704, 576);
public static YUVFormat getSQCIF(int yuvType, float frameRate) {

return new YUVFormat(SQCIF, 0, YUVFormat.byteArray, frameRate, yuvType, 0, 0, 0,

0, 0);


}
public static YUVFormat getQCIF(int yuvType, float frameRate) {

return new YUVFormat(QCIF, 0, YUVFormat.byteArray, frameRate, yuvType, 0, 0, 0,

0, 0);


}
public static YUVFormat getCIF(int yuvType, float frameRate) {

return new YUVFormat(CIF, 0, YUVFormat.byteArray, frameRate, yuvType, 0, 0, 0, 0,

0);


}
public static YUVFormat get4CIF(int yuvType, float frameRate) {

return new YUVFormat(_4CIF, 0, YUVFormat.byteArray, frameRate, yuvType, 0, 0, 0,

0, 0);


}
public static YUVFormat getSizeableCIF(Dimension size, int yuvType, float frameRate) {

return new YUVFormat(size, 0, YUVFormat.byteArray, frameRate, yuvType, 0, 0, 0,

0, 0);


}

/**


* Compute the amount of bytes required to store a frame in YCbCr with a given format

* (e.g. 4:2:2, 4:2:0, 4:1:1).

*

* @param videoFormat the {@link YUVFormat} of this frame.



* @return the amount of bytes required to store a YUV frame with such format.

*/

public static int getFrameSize(YUVFormat videoFormat) {



int symbolSizeInBytes = 1; // 8-bit for each Y, Cb and Cr pixel
int frameWidth = videoFormat.getSize().width;

int frameHeight = videoFormat.getSize().height;

int frameChromaWidth = videoFormat.getSize().width / 2;

int frameChromaHeight = videoFormat.getSize().height / 2;
int imgSizeY = frameWidth * frameHeight;

int imgSizeUV = frameChromaWidth * frameChromaHeight;

int bytesY = imgSizeY * symbolSizeInBytes;

int bytesUV = imgSizeUV * symbolSizeInBytes;

int frameSizeInBytes = bytesY + 2 * bytesUV;
return frameSizeInBytes;

}
}


      1. A2.4 Classe YUVFrameBuffer



package br.ufsc.inf.guiga.media.parser.video;
import javax.media.format.YUVFormat;
import br.ufsc.inf.guiga.media.codec.video.h264.vcl.FrameBuffer;
/**

* Buffer for YUV color space video frame.

*

* This class holds separate arrays for each YCbCr component, allowing to access individual

* components for each pixel.

*

* @author Guilherme Ferreira



*/

public class YUVFrameBuffer extends FrameBuffer {

private int[][] Y;

private int[][] Cr;

private int[][] Cb;

protected int uvWidth;

protected int uvHeight;
/**

* Creates a YUV frame buffer from a {@link YUVFormat}, specifying width and height for

* luma and chroma.

*

* @param format the {@link YUVFormat} of this frame.



*/

public YUVFrameBuffer(YUVFormat format) {

super(format);
int uvWidth = 0;

int uvHeight = 0;
switch (format.getYuvType()) {

case YUVFormat.YUV_420:

uvWidth = format.getSize().width / 2;

uvHeight = format.getSize().height / 2;

break;

}
init(format.getSize().width, format.getSize().height, uvWidth, uvHeight);

}
protected void init(int width, int height, int uvWidth, int uvHeight) {

this.uvWidth = uvWidth;

this.uvHeight = uvHeight;
Y = new int[height][width];

Cr = new int[uvHeight][uvWidth];

Cb = new int[uvHeight][uvWidth];

}
/**

* @return the chroma frame width.

*/

public int getUVWidth() {



return uvWidth;

}
/**

* @return the chroma frame height.

*/

public int getUVHeight() {



return uvHeight;

}
/**

* Initializes the Y, Cb and Cr arrays, coping the parameter to the internal buffer.

*

* @param data the YCbCr frame. The first part of data is Y array, the second part of data



* is the Cb and then, the next is Cr. Each part size depends on YUV format

* selected. For example, in 4:2:0 format, Y has width * height size, and Cb and

* Cr has one fourth of Y area each.

*/

public void setData(byte[] data) {



int indexY, indexUV; // YUV indexes

int cbOffset = getWidth() * getHeight();

int crOffset = cbOffset + (uvWidth * uvHeight);
// the Luma component vector to matrix copy

for (int y = 0; y < getHeight(); y++) {

for (int x = 0; x < getWidth(); x++) {

// calculate array components indexes

indexY = x + y * getWidth();

// set the Y component

Y[y][x] = (data[indexY] & 0xFF);

}

}


// the Chroma component vector to matrix copy

for (int y = 0; y < getUVHeight(); y++) {

for (int x = 0; x < getUVWidth(); x++) {

// calculate array components indexes

indexUV = x + y * getUVWidth();

// set the Cr and Cb components

Cb[y][x] = (data[indexUV + cbOffset] & 0xFF);

Cr[y][x] = (data[indexUV + crOffset] & 0xFF);

}

}

}


/**

* Copy the internal Y, Cb and Cr arrays to the parameter.

*

* @param data The first part of data is Y array, the second part of data is the Cb and



* then, the next is Cr. Each part size depends on YUV format selected.

*/

public byte[] getData() {



return null;

}
/**

* Copy the parameter Y, Cb and Cr arrays to this object Y, Cb and Cr internal arrays.

*

* @param other another {@link YUVFrameBuffer}.



*/

public void copyData(FrameBuffer other) {

YUVFrameBuffer otherYUV = (YUVFrameBuffer) other;


// verify if luma size match

if ((other.getWidth() > getWidth()) || (other.getHeight() > getHeight())) {

Y = new int[other.getHeight()][other.getWidth()];

}

// verify if chroma size match



if ((otherYUV.getUVWidth() > getUVWidth())

|| (otherYUV.getUVHeight() > getUVHeight()))

{

Cb = new int[otherYUV.getUVHeight()][otherYUV.getUVWidth()];



Cr = new int[otherYUV.getUVHeight()][otherYUV.getUVWidth()];

}
System.arraycopy(otherYUV.Y, 0, Y, 0, Y.length);

System.arraycopy(otherYUV.Cb, 0, Cb, 0, Cb.length);

System.arraycopy(otherYUV.Cr, 0, Cr, 0, Cr.length);

}
/**

* Gets an 8-bit luma pixel value at a given position.

*

* @param x horizontal pixel position.



* @param y vertical pixel position.

* @return an eight bit length value of Y component at specific (x,y) position in the

* frame.

*/

public int getY8bit(int x, int y) {



return Y[y][x];

}
/**

* Sets an 8-bit luma pixel value at a given position.

*

* @param x horizontal pixel position.



* @param y vertical pixel position.

* @param value an eight bit length value of Y component at specific (x,y) position in the

* frame.

*/

public void setY8bit(int x, int y, int value) {

Y[y][x] = value;

}
/**

* Gets an 8-bit chroma pixel value at a given position.

*

* @param x horizontal pixel position.



* @param y vertical pixel position.

* @return an eight bit length value of Cb component at specific (x,y) position in the

* frame.

*/

public int getCb8bit(int x, int y) {



return Cb[y][x];

}
/**

* Sets an 8-bit chroma pixel value at a given position.

*

* @param x horizontal pixel position.



* @param y vertical pixel position.

* @param value an eight bit length value of Cb component at specific (x,y) position in the

* frame.

*/

public void setCb8bit(int x, int y, int value) {

Cb[y][x] = value;

}
/**

* Gets an 8-bit chroma pixel value at a given position.

*

* @param x horizontal pixel position.



* @param y vertical pixel position.

* @return an eight bit length value of Cr component at specific (x,y) position in the

* frame.

*/

public int getCr8bit(int x, int y) {



return Cr[y][x];

}
/**

* Sets an 8-bit chroma pixel value at a given position.

*

* @param x horizontal pixel position.



* @param y vertical pixel position.

* @param value an eight bit length value of Cr component at specific (x,y) position in

* the frame.

*/

public void setCr8bit(int x, int y, int value) {

Cr[y][x] = value;

}
}





1   ...   7   8   9   10   11   12   13   14   ...   24


©livred.info 2017
enviar mensagem

    Página principal