sync with bastp git-head

This commit is contained in:
Adrian Ulrich 2016-10-16 20:22:57 +02:00
parent 95cd3b3329
commit ca939650d4
6 changed files with 150 additions and 98 deletions

View File

@ -26,10 +26,10 @@ import java.util.HashMap;
public class Bastp {
public Bastp() {
}
public HashMap getTags(String fname) {
HashMap tags = new HashMap();
try {

View File

@ -24,15 +24,11 @@ import java.util.HashMap;
import java.util.Vector;
public class Common {
private static final long MAX_PKT_SIZE = 524288;
private static final int MAX_COMMENT_SIZE = 512;
public void xdie(String reason) throws IOException {
throw new IOException(reason);
}
/*
** Returns a 32bit int from given byte offset in LE
*/
/**
* Returns a 32bit int from given byte offset in LE
*/
public int b2le32(byte[] b, int off) {
int r = 0;
for(int i=0; i<4; i++) {
@ -41,6 +37,16 @@ public class Common {
return r;
}
/**
* Same as b2le32 but reads from a RandomAccessFile instead of a buffer
*/
public int raf2le32(RandomAccessFile fh, long off) throws IOException {
byte[] scratch = new byte[4];
fh.seek(off);
fh.read(scratch);
return b2le32(scratch, 0);
}
public int b2be32(byte[] b, int off) {
return swap32(b2le32(b, off));
}
@ -49,55 +55,89 @@ public class Common {
return((i&0xff)<<24)+((i&0xff00)<<8)+((i&0xff0000)>>8)+((i>>24)&0xff);
}
/*
** Returns a 16bit int from given byte offset in LE
*/
/**
* Returns a 16bit int from given byte offset in LE
*/
public int b2le16(byte[] b, int off) {
return ( b2u(b[off]) | b2u(b[off+1]) << 8 );
}
/*
** convert 'byte' value into unsigned int
*/
/**
* convert 'byte' value into unsigned int
*/
public int b2u(byte x) {
return (x & 0xFF);
}
/*
** Printout debug message to STDOUT
*/
/**
* Printout debug message to STDOUT
*/
public void debug(String s) {
System.out.println("DBUG "+s);
}
public HashMap parse_vorbis_comment(RandomAccessFile s, long offset, long payload_len) throws IOException {
/**
* Throws an exception, killing the parser
*/
public void xdie(String reason) throws IOException {
throw new IOException(reason);
}
public HashMap parse_vorbis_comment(RandomAccessFile fh, PageInfo.PageParser pp, long offset, long payload_len) throws IOException {
HashMap tags = new HashMap();
int comments = 0; // number of found comments
int xoff = 0; // offset within 'scratch'
int can_read = (int)(payload_len > MAX_PKT_SIZE ? MAX_PKT_SIZE : payload_len);
byte[] scratch = new byte[can_read];
long last_byte = offset + payload_len;
// seek to given position and slurp in the payload
s.seek(offset);
s.read(scratch);
// skip vendor string in format: [LEN][VENDOR_STRING]
xoff += 4 + b2le32(scratch, xoff); // 4 = LEN = 32bit int
comments = b2le32(scratch, xoff);
xoff += 4;
// skip vendor string in format: [LEN][VENDOR_STRING] -> 4 = LEN = 32bit int
offset += 4 + raf2le32(fh, offset);
for(int i=0; i<comments; i++) {
int clen = (int)b2le32(scratch, xoff);
xoff += 4+clen;
if(xoff > scratch.length)
xdie("string out of bounds");
String tag_raw = new String(scratch, xoff-clen, clen);
String[] tag_vec = tag_raw.split("=",2);
String tag_key = tag_vec[0].toUpperCase();
// we can now read the number of comments in this file, we will also
// adjust offset to point to the value after this 32bit int
int comments = raf2le32(fh, offset);
offset += 4;
addTagEntry(tags, tag_key, tag_vec[1]);
for ( ; comments > 0; comments--) {
int comment_len = raf2le32(fh, offset);
offset += 4;
long can_read = last_byte - offset; // indicates the last byte of this page
int do_read = (int)(can_read > comment_len ? comment_len : can_read); // how much data is readable in this page
if (do_read >= 3) {
int bsize = (do_read > MAX_COMMENT_SIZE ? MAX_COMMENT_SIZE : do_read);
byte[] data = new byte[bsize];
fh.seek(offset);
fh.read(data);
String tag_raw = new String(data);
String[] tag_vec = tag_raw.split("=", 2);
String tag_key = tag_vec[0].toUpperCase();
addTagEntry(tags, tag_key, tag_vec[1]);
}
// set offset to begin of next tag (OR the end of this page!)
offset += do_read;
// We hit the end of a stream
// this is most likely due to the fact that we cropped do_read to not cross
// the page boundary -> we must now calculate the position of the next tag
if (offset == last_byte) {
int partial_cruft = comment_len - do_read; // how many bytes we did not read
while(partial_cruft > 0) {
PageInfo pi = pp.parse_stream_page(fh, last_byte);
if (pi.header_len <1 || pi.payload_len < 1)
xdie("Data from callback doesnt make much sense");
offset += pi.header_len; // move position behind page header
last_byte = offset + pi.payload_len; // and adjust the last byte to pos + payload_size
if (offset+partial_cruft < last_byte) {
offset += partial_cruft; // partial data ends in this block: just adjust the ofset
break;
} else {
// this page just contains data from the partial tag -> skip to next one
offset = last_byte;
partial_cruft -= pi.payload_len;
}
}
}
}
return tags;
}

View File

@ -23,7 +23,7 @@ import java.util.HashMap;
import java.util.Enumeration;
public class FlacFile extends Common {
public class FlacFile extends Common implements PageInfo.PageParser {
private static final int FLAC_TYPE_STREAMINFO = 0; // Basic info about the stream
private static final int FLAC_TYPE_COMMENT = 4; // ID of 'VorbisComment's
@ -33,28 +33,27 @@ public class FlacFile extends Common {
public HashMap getTags(RandomAccessFile s) throws IOException {
int xoff = 4; // skip file magic
int retry = 64;
int r[];
boolean need_infos = true;
boolean need_tags = true;
HashMap infos = new HashMap();
HashMap tags = new HashMap();
for(; retry > 0; retry--) {
r = parse_metadata_block(s, xoff);
if(r[2] == FLAC_TYPE_STREAMINFO) {
infos = parse_streaminfo_block(s, xoff+r[0], r[1]);
PageInfo pi = parse_stream_page(s, xoff);
if(pi.type == FLAC_TYPE_STREAMINFO) {
infos = parse_streaminfo_block(s, xoff+pi.header_len, pi.payload_len);
need_infos = false;
}
if(r[2] == FLAC_TYPE_COMMENT) {
tags = parse_vorbis_comment(s, xoff+r[0], r[1]);
if(pi.type == FLAC_TYPE_COMMENT) {
tags = parse_vorbis_comment(s, this, xoff+pi.header_len, pi.payload_len);
need_tags = false;
}
if(r[3] != 0 || (need_tags == false && need_infos == false))
if(pi.last_page == true || (need_tags == false && need_infos == false))
break; // eof reached
// else: calculate next offset
xoff += r[0] + r[1];
xoff += pi.header_len + pi.payload_len;
}
// Copy duration to final hashmap if found in infoblock
@ -65,34 +64,31 @@ public class FlacFile extends Common {
return tags;
}
/* Parses the metadata block at 'offset' and returns
** [header_size, payload_size, type, stop_after]
*/
private int[] parse_metadata_block(RandomAccessFile s, long offset) throws IOException {
int[] result = new int[4];
/**
* Parses the metadata block at 'offset'
*/
public PageInfo parse_stream_page(RandomAccessFile s, long offset) throws IOException {
byte[] mb_head = new byte[4];
int stop_after = 0;
int block_type = 0;
int block_size = 0;
s.seek(offset);
if( s.read(mb_head) != 4 )
xdie("failed to read metadata block header");
block_size = b2be32(mb_head,0); // read whole header as 32 big endian
block_type = (block_size >> 24) & 127; // BIT 1-7 are the type
stop_after = (((block_size >> 24) & 128) > 0 ? 1 : 0 ); // BIT 0 indicates the last-block flag
block_size = (block_size & 0x00FFFFFF); // byte 1-7 are the size
// debug("size="+block_size+", type="+block_type+", is_last="+stop_after);
result[0] = 4; // hardcoded - only returned to be consistent with OGG parser
result[1] = block_size;
result[2] = block_type;
result[3] = stop_after;
return result;
PageInfo pi = new PageInfo();
pi.header_len = 4; // fixed size in flac
pi.payload_len = block_size;
pi.type = block_type;
pi.last_page = (stop_after != 0);
return pi;
}
/*

View File

@ -23,7 +23,7 @@ import java.io.RandomAccessFile;
import java.util.HashMap;
public class OggFile extends Common {
public class OggFile extends Common implements PageInfo.PageParser {
private static final int OGG_PAGE_SIZE = 27; // Static size of an OGG Page
private static final int OGG_TYPE_IDENTIFICATION = 1; // Identification header
@ -42,15 +42,15 @@ public class OggFile extends Common {
HashMap identification = new HashMap();
for( ; retry > 0 ; retry-- ) {
long res[] = parse_ogg_page(s, offset);
if(res[2] == OGG_TYPE_IDENTIFICATION) {
identification = parse_ogg_vorbis_identification(s, offset+res[0], res[1]);
PageInfo pi = parse_stream_page(s, offset);
if(pi.type == OGG_TYPE_IDENTIFICATION) {
identification = parse_ogg_vorbis_identification(s, offset+pi.header_len, pi.payload_len);
need_id = false;
} else if(res[2] == OGG_TYPE_COMMENT) {
tags = parse_ogg_vorbis_comment(s, offset+res[0], res[1]);
} else if(pi.type == OGG_TYPE_COMMENT) {
tags = parse_ogg_vorbis_comment(s, offset+pi.header_len, pi.payload_len);
need_tags = false;
}
offset += res[0] + res[1];
offset += pi.header_len + pi.payload_len;
if (need_tags == false && need_id == false) {
break;
}
@ -71,10 +71,10 @@ public class OggFile extends Common {
}
/* Parses the ogg page at offset 'offset' and returns
** [header_size, payload_size, type]
*/
protected long[] parse_ogg_page(RandomAccessFile s, long offset) throws IOException {
/**
* Parses the ogg page at offset 'offset'
*/
public PageInfo parse_stream_page(RandomAccessFile s, long offset) throws IOException {
long[] result = new long[3]; // [header_size, payload_size]
byte[] p_header = new byte[OGG_PAGE_SIZE]; // buffer for the page header
byte[] scratch;
@ -101,18 +101,18 @@ public class OggFile extends Common {
psize += b2u(scratch[i]);
}
}
// populate result array
result[0] = (s.getFilePointer() - offset);
result[1] = psize;
result[2] = -1;
PageInfo pi = new PageInfo();
pi.header_len = (s.getFilePointer() - offset);
pi.payload_len = psize;
pi.type = -1;
/* next byte is most likely the type -> pre-read */
if(psize >= 1 && s.read(p_header, 0, 1) == 1) {
result[2] = b2u(p_header[0]);
pi.type = b2u(p_header[0]);
}
return result;
return pi;
}
/* In 'vorbiscomment' field is prefixed with \3vorbis in OGG files
@ -131,7 +131,7 @@ public class OggFile extends Common {
if( (new String(pfx, 0, pfx_len)).equals("\3vorbis") == false )
xdie("Damaged packet found!");
return parse_vorbis_comment(s, offset+pfx_len, pl_len-pfx_len);
return parse_vorbis_comment(s, this, offset+pfx_len, pl_len-pfx_len);
}
/*

View File

@ -35,18 +35,18 @@ public class OpusFile extends OggFile {
// contain the OpusHeader while the 2nd MUST contain the
// OggHeader payload: https://wiki.xiph.org/OggOpus
long pos = 0;
long offsets[] = parse_ogg_page(s, pos);
PageInfo pi = parse_stream_page(s, pos);
HashMap tags = new HashMap();
HashMap opus_head = parse_opus_head(s, pos+offsets[0], offsets[1]);
pos += offsets[0]+offsets[1];
HashMap opus_head = parse_opus_head(s, pos+pi.header_len, pi.payload_len);
pos += pi.header_len+pi.payload_len;
// Check if we parsed a version number and ensure it doesn't have any
// of the upper 4 bits set (eg: <= 15)
if(opus_head.containsKey("version") && (Integer)opus_head.get("version") <= 0xF) {
// Get next page: The spec requires this to be an OpusTags head
offsets = parse_ogg_page(s, pos);
tags = parse_opus_vorbis_comment(s, pos+offsets[0], offsets[1]);
pi = parse_stream_page(s, pos);
tags = parse_opus_vorbis_comment(s, pos+pi.header_len, pi.payload_len);
// ...and merge replay gain intos into the tags map
calculate_gain(opus_head, tags);
}
@ -121,7 +121,7 @@ public class OpusFile extends OggFile {
if((new String(magic, 0, magic_len)).equals("OpusTags") == false)
xdie("Damaged packet found!");
return parse_vorbis_comment(s, offset+magic_len, pl_len-magic_len);
return parse_vorbis_comment(s, this, offset+magic_len, pl_len-magic_len);
}
}

View File

@ -0,0 +1,16 @@
package ch.blinkenlights.bastp;
import java.io.IOException;
import java.io.RandomAccessFile;
public class PageInfo {
long header_len;
long payload_len;
int type;
boolean last_page;
public static interface PageParser {
PageInfo parse_stream_page(RandomAccessFile fh, long offset) throws IOException;
}
}