Browse Source

It works, we get a spectogram on the ipad :) (still with fixed size and such)

master
Joshua Moerman 10 years ago
parent
commit
3e7d71c6d2
  1. 10
      SpectogramPrototype.xcodeproj/project.pbxproj
  2. 21
      SpectogramPrototype/AudioFile.h
  3. 93
      SpectogramPrototype/AudioFile.m
  4. 11
      SpectogramPrototype/Base.lproj/Main_iPad.storyboard
  5. 23
      SpectogramPrototype/FFTTest.m
  6. 5
      SpectogramPrototype/RuledScrollView.m
  7. 1
      SpectogramPrototype/ViewController.h
  8. 139
      SpectogramPrototype/ViewController.m
  9. BIN
      testMusic.mp3

10
SpectogramPrototype.xcodeproj/project.pbxproj

@ -29,6 +29,8 @@
426665BC1869EF4B005D62AC /* CoreAudio.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 426665BB1869EF4B005D62AC /* CoreAudio.framework */; };
426665C11869F194005D62AC /* AudioToolbox.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 426665C01869F194005D62AC /* AudioToolbox.framework */; };
426665C51869F50B005D62AC /* MediaPlayer.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 426665C41869F50B005D62AC /* MediaPlayer.framework */; };
42E12A91186F122400866CB3 /* testMusic.mp3 in Resources */ = {isa = PBXBuildFile; fileRef = 42E12A90186F122400866CB3 /* testMusic.mp3 */; };
42E12A94186F1F4C00866CB3 /* AudioFile.m in Sources */ = {isa = PBXBuildFile; fileRef = 42E12A93186F1F4C00866CB3 /* AudioFile.m */; };
/* End PBXBuildFile section */
/* Begin PBXContainerItemProxy section */
@ -71,6 +73,9 @@
426665BB1869EF4B005D62AC /* CoreAudio.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreAudio.framework; path = System/Library/Frameworks/CoreAudio.framework; sourceTree = SDKROOT; };
426665C01869F194005D62AC /* AudioToolbox.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = AudioToolbox.framework; path = System/Library/Frameworks/AudioToolbox.framework; sourceTree = SDKROOT; };
426665C41869F50B005D62AC /* MediaPlayer.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = MediaPlayer.framework; path = System/Library/Frameworks/MediaPlayer.framework; sourceTree = SDKROOT; };
42E12A90186F122400866CB3 /* testMusic.mp3 */ = {isa = PBXFileReference; lastKnownFileType = audio.mp3; path = testMusic.mp3; sourceTree = "<group>"; };
42E12A92186F1F4C00866CB3 /* AudioFile.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = AudioFile.h; sourceTree = "<group>"; };
42E12A93186F1F4C00866CB3 /* AudioFile.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = AudioFile.m; sourceTree = "<group>"; };
/* End PBXFileReference section */
/* Begin PBXFrameworksBuildPhase section */
@ -104,6 +109,7 @@
424F849C18661F8000687D3B = {
isa = PBXGroup;
children = (
42E12A90186F122400866CB3 /* testMusic.mp3 */,
424F84AE18661F8000687D3B /* SpectogramPrototype */,
424F84D018661F8000687D3B /* SpectogramPrototypeTests */,
424F84A718661F8000687D3B /* Frameworks */,
@ -144,6 +150,8 @@
424F84BD18661F8000687D3B /* Main_iPad.storyboard */,
424F84C018661F8000687D3B /* ViewController.h */,
424F84C118661F8000687D3B /* ViewController.m */,
42E12A92186F1F4C00866CB3 /* AudioFile.h */,
42E12A93186F1F4C00866CB3 /* AudioFile.m */,
426665B31869A7CC005D62AC /* RuledScrollView.h */,
426665B41869A7CC005D62AC /* RuledScrollView.m */,
426665B61869CEFE005D62AC /* FFTTest.h */,
@ -263,6 +271,7 @@
424F84BF18661F8000687D3B /* Main_iPad.storyboard in Resources */,
424F84C418661F8000687D3B /* Images.xcassets in Resources */,
424F84BC18661F8000687D3B /* Main_iPhone.storyboard in Resources */,
42E12A91186F122400866CB3 /* testMusic.mp3 in Resources */,
4261F08818671ECD00AA0EF9 /* ContentView.xib in Resources */,
424F84B318661F8000687D3B /* InfoPlist.strings in Resources */,
);
@ -283,6 +292,7 @@
isa = PBXSourcesBuildPhase;
buildActionMask = 2147483647;
files = (
42E12A94186F1F4C00866CB3 /* AudioFile.m in Sources */,
426665B81869CEFE005D62AC /* FFTTest.m in Sources */,
424F84C218661F8000687D3B /* ViewController.m in Sources */,
424F84B918661F8000687D3B /* AppDelegate.m in Sources */,

21
SpectogramPrototype/AudioFile.h

@ -0,0 +1,21 @@
//
// AudioFile.h
// SpectogramPrototype
//
// Created by Joshua Moerman on 28/12/13.
// Copyright (c) 2013 Joshua Moerman. All rights reserved.
//
#import <Foundation/Foundation.h>
// Uses the ExtAudioFileRef from the AudioToolkit
// Handles conversion to PCM and such
@interface AudioFile : NSObject
// Loads a file (and sets the right internal format)
+ (AudioFile*) audioFileFromURL:(NSURL*) url;
// Reads nSamples of data (left channel), return actual number of samples read
- (unsigned int) fillArray:(float*)array withNumberOfSamples:(unsigned int)nSamples;
@end

93
SpectogramPrototype/AudioFile.m

@ -0,0 +1,93 @@
//
// AudioFile.m
// SpectogramPrototype
//
// Created by Joshua Moerman on 28/12/13.
// Copyright (c) 2013 Joshua Moerman. All rights reserved.
//
#import "AudioFile.h"
#import <AudioToolbox/ExtendedAudioFile.h>
@interface AudioFile (){
ExtAudioFileRef audioFile;
AudioStreamBasicDescription format;
}
- (id)initWithExtAudioFileRef:(ExtAudioFileRef) audioFile;
- (void)setupFormat;
@end
@implementation AudioFile
#pragma mark
#pragma mark Creation
- (id)initWithExtAudioFileRef:(ExtAudioFileRef) af{
if(self = [super init]){
audioFile = af;
[self setupFormat];
}
return self;
}
- (void)setupFormat{
format.mSampleRate = 44100.0f;
format.mFormatID = kAudioFormatLinearPCM;
format.mFormatFlags = kLinearPCMFormatFlagIsPacked | kLinearPCMFormatFlagIsSignedInteger;
format.mBytesPerPacket = 4;
format.mFramesPerPacket = 1;
format.mBytesPerFrame = 4;
format.mChannelsPerFrame = 2;
format.mBitsPerChannel = 16;
format.mReserved = 0;
UInt32 size = sizeof(format);
OSStatus err = ExtAudioFileSetProperty(audioFile, kExtAudioFileProperty_ClientDataFormat, size, &format);
assert(err == 0);
}
- (void)dealloc {
OSStatus err = ExtAudioFileDispose(audioFile);
assert(err == 0);
}
+ (AudioFile *)audioFileFromURL:(NSURL *)url{
ExtAudioFileRef audioFile = NULL;
OSStatus err = ExtAudioFileOpenURL((__bridge CFURLRef) url, &audioFile);
assert(err == 0 && audioFile);
return [[AudioFile alloc] initWithExtAudioFileRef:audioFile];
}
#pragma mark
#pragma mark Usage
- (unsigned int)fillArray:(float *)array withNumberOfSamples:(unsigned int)nSamples{
AudioBufferList bufferList;
bufferList.mNumberBuffers = 1;
bufferList.mBuffers[0].mNumberChannels = 2;
bufferList.mBuffers[0].mDataByteSize = nSamples * format.mBytesPerFrame;
bufferList.mBuffers[0].mData = calloc(bufferList.mBuffers[0].mDataByteSize, 1);
UInt32 nFrames = nSamples;
OSStatus err = ExtAudioFileRead(audioFile, &nFrames, &bufferList);
assert(err == 0);
short * buffer = bufferList.mBuffers[0].mData;
float * end = array + nSamples;
for(unsigned int i = 0; i < nFrames; ++i){
*array++ = *buffer++ / 32768.0;
// skip right channel
buffer++;
}
while(array != end){
*array++ = 0.0;
}
free(bufferList.mBuffers[0].mData);
return nFrames;
}
@end

11
SpectogramPrototype/Base.lproj/Main_iPad.storyboard

@ -21,17 +21,6 @@
<autoresizingMask key="autoresizingMask" flexibleMaxX="YES" flexibleMaxY="YES"/>
<color key="backgroundColor" red="0.40000000600000002" green="1" blue="0.40000000600000002" alpha="1" colorSpace="calibratedRGB"/>
</scrollView>
<button opaque="NO" contentMode="scaleToFill" fixedFrame="YES" contentHorizontalAlignment="center" contentVerticalAlignment="center" buttonType="roundedRect" lineBreakMode="middleTruncation" translatesAutoresizingMaskIntoConstraints="NO" id="Bzu-fO-jsM">
<rect key="frame" x="200" y="200" width="111" height="42"/>
<autoresizingMask key="autoresizingMask" flexibleMaxX="YES" flexibleMaxY="YES"/>
<fontDescription key="fontDescription" type="system" pointSize="22"/>
<state key="normal" title="Calculate!">
<color key="titleShadowColor" white="0.5" alpha="1" colorSpace="calibratedWhite"/>
</state>
<connections>
<action selector="calculateButtonPressed:" destination="DUW-zh-mwd" eventType="touchUpInside" id="4Rp-2D-265"/>
</connections>
</button>
<button opaque="NO" contentMode="scaleToFill" fixedFrame="YES" contentHorizontalAlignment="center" contentVerticalAlignment="center" buttonType="roundedRect" lineBreakMode="middleTruncation" translatesAutoresizingMaskIntoConstraints="NO" id="kj8-JR-vKG">
<rect key="frame" x="200" y="300" width="111" height="42"/>
<autoresizingMask key="autoresizingMask" flexibleMaxX="YES" flexibleMaxY="YES"/>

23
SpectogramPrototype/FFTTest.m

@ -17,7 +17,7 @@
@end
const int logN = 3;
const int logN = 12;
const int N = 1 << logN;
const float scaling = 1.0 / N;
@ -28,11 +28,15 @@ const float scaling = 1.0 / N;
if(self = [super init]){
acceptedSize = N;
// allocate structs
setup = vDSP_create_fftsetup(logN, kFFTRadix2);
window = calloc(N, sizeof(float));
buffer = calloc(N, sizeof(float));
assert(setup && window && buffer);
// initialize bits
vDSP_blkman_window(window, N, 0);
}
return self;
}
@ -46,15 +50,8 @@ const float scaling = 1.0 / N;
- (void)inPlaceFFT:(float *)data forSize:(unsigned int)n{
assert(n == N);
// Output the source data
for(int i = 0; i < N; ++i){
printf("%f, ", data[i]);
}
printf("\n");
// Window our time - slice
// vDSP_blkman_window(window, N, 0);
// vmul(data, 1, window, 1, data, 1, N);
// Windowing
vDSP_vmul(data, 1, window, 1, data, 1, N);
// Rearrange data
DSPSplitComplex split_buffer = { buffer, buffer + N/2 };
@ -67,11 +64,5 @@ const float scaling = 1.0 / N;
vDSP_zvabs(&split_buffer, 1, data, 1, N/2);
vDSP_vsmul(data, 1, &scaling, data, 1, N/2);
// Output results:
for(int i = 0; i < N/2; ++i){
printf("%f, ", data[i]);
}
printf("\n");
}
@end

5
SpectogramPrototype/RuledScrollView.m

@ -68,14 +68,15 @@
- (void)placeTicks {
[topRuler.subviews makeObjectsPerformSelector:@selector(removeFromSuperview)];
for (CGFloat x = rulerWidth + 50; x < topRuler.frame.size.width; x += 100){
CGFloat spacing = 600.0f;
for (CGFloat x = rulerWidth + 0.5*spacing; x < topRuler.frame.size.width; x += spacing){
UIView * line = [[UIView alloc] initWithFrame:CGRectMake(x, 0, 1, rulerHeight)];
line.backgroundColor = [UIColor blackColor];
[topRuler addSubview:line];
CGFloat width = 100;
UILabel * text = [[UILabel alloc] initWithFrame:CGRectMake(x - width/2, 0, width, rulerHeight)];
text.text = [NSString stringWithFormat:@"%.0f", x];
text.text = [NSString stringWithFormat:@"%.0f", 0.1 * x];
text.textAlignment = NSTextAlignmentCenter;
[topRuler addSubview:text];
}

1
SpectogramPrototype/ViewController.h

@ -15,7 +15,6 @@
@property (nonatomic) IBOutlet RuledScrollView * scrollView;
@property (nonatomic) IBOutlet UIView * contentView;
- (IBAction) calculateButtonPressed:(id)sender;
- (IBAction) mediaButtonPressed:(UIButton*)sender;
@end

139
SpectogramPrototype/ViewController.m

@ -12,10 +12,42 @@
#import "ViewController.h"
#import "RuledScrollView.h"
#import "FFTTest.h"
#import "AudioFile.h"
typedef enum {
iPad,
iPod,
Simulator
} device;
// NOTE: the simulator has no media, so we cant use MPMediaPicker
// and the iPad has to use a popover, while the iPod/iPhone doesn't
// So that's why we check the device
device get_device(){
device device = iPad;
NSRange range = [[[UIDevice currentDevice] model] rangeOfString:@"Simulator"];
if(range.location != NSNotFound) {
device = Simulator;
}
if([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPhone){
device = iPod;
}
return device;
}
UInt8 clamp_to_uint8(float x){
if(x >= 1.0) return 255;
if(x <= 0.0) return 0;
return 255.0 * x;
}
@interface ViewController () <MPMediaPickerControllerDelegate> {
UIPopoverController * pop;
FFTTest * fft_handler;
float * data;
}
- (void) openAudioFile:(NSURL*)filePath;
@property (nonatomic, strong) UILabel* header;
@ -40,57 +72,90 @@
[super didReceiveMemoryWarning];
}
- (void)calculateButtonPressed:(id)sender{
- (void)openAudioFile:(NSURL *)filePath{
AudioFile * audioFile = [AudioFile audioFileFromURL:filePath];
if(!fft_handler)
fft_handler = [[FFTTest alloc] init];
unsigned int size = fft_handler.acceptedSize;
float * data = calloc(size, sizeof(float));
[fft_handler inPlaceFFT:data forSize:size];
free(data);
}
- (void)openAudioFile:(NSURL *)filePath{
OSStatus err;
unsigned int width = size/4;
unsigned int height = size/8;
ExtAudioFileRef audioFile = NULL;
err = ExtAudioFileOpenURL((__bridge CFURLRef) filePath, &audioFile);
assert(err == 0);
if(!data)
data = calloc(size, sizeof(float));
AudioBufferList bufferList;
bufferList.mNumberBuffers = 1;
bufferList.mBuffers[0].mNumberChannels = 2;
bufferList.mBuffers[0].mDataByteSize = 32768;
bufferList.mBuffers[0].mData = calloc(32768, 1);
char * rgba = calloc(width*height*4, sizeof(char));
while(true){
bufferList.mBuffers[0].mDataByteSize = 32768;
UInt32 nFrames = 32768 / 6;
err = ExtAudioFileRead(audioFile, &nFrames, &bufferList);
assert(err == 0);
if(!nFrames) break;
for(unsigned int x = 0; x < width; ++x){
[audioFile fillArray:data withNumberOfSamples:size];
[fft_handler inPlaceFFT:data forSize:size];
for(unsigned int y = 0; y < height; ++y){
unsigned int yy = height - y - 1;
rgba[4*width*yy + 4*x + 0] = clamp_to_uint8(100.0 * data[y]);
rgba[4*width*yy + 4*x + 1] = clamp_to_uint8(50.0 * data[y]);
rgba[4*width*yy + 4*x + 2] = clamp_to_uint8(10.0 * data[y]);
rgba[4*width*yy + 4*x + 3] = 0;
}
}
NSLog(@"Read some bits");
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
assert(colorSpace);
CGContextRef bitmapContext = CGBitmapContextCreate(rgba, width, height, 8, 4*width, colorSpace, (CGBitmapInfo)kCGImageAlphaNoneSkipLast);
assert(bitmapContext);
CGImageRef cgImage = CGBitmapContextCreateImage(bitmapContext);
assert(cgImage);
UIImage * newUIImage = [UIImage imageWithCGImage:cgImage];
assert(newUIImage);
err = ExtAudioFileDispose(audioFile);
assert(err == 0);
CGColorSpaceRelease(colorSpace);
CGContextRelease(bitmapContext);
CGImageRelease(cgImage);
free(rgba);
UIImageView * view = [[UIImageView alloc] initWithImage:newUIImage];
view.frame = CGRectMake(0, 0, width, height);
[contentView removeFromSuperview];
contentView = view;
[scrollView.content addSubview:contentView];
contentView.autoresizingMask = UIViewAutoresizingNone;
scrollView.contentSize = contentView.bounds.size;
}
- (void)mediaButtonPressed:(UIButton*)sender{
MPMediaPickerController * mppc = [[MPMediaPickerController alloc] initWithMediaTypes:MPMediaTypeAnyAudio];
mppc.delegate = self;
mppc.allowsPickingMultipleItems = NO;
mppc.showsCloudItems = NO;
if(pop){
[pop dismissPopoverAnimated:YES];
pop = nil;
}
MPMediaPickerController * mppc = nil;
device device = get_device();
pop = [[UIPopoverController alloc] initWithContentViewController:mppc];
[pop presentPopoverFromRect:sender.frame inView:self.view permittedArrowDirections:UIPopoverArrowDirectionAny animated:YES];
switch (device) {
case iPad:
mppc = [[MPMediaPickerController alloc] initWithMediaTypes:MPMediaTypeAnyAudio];
mppc.delegate = self;
mppc.allowsPickingMultipleItems = NO;
mppc.showsCloudItems = NO;
if(pop){
[pop dismissPopoverAnimated:YES];
pop = nil;
}
pop = [[UIPopoverController alloc] initWithContentViewController:mppc];
[pop presentPopoverFromRect:sender.frame inView:self.view permittedArrowDirections:UIPopoverArrowDirectionAny animated:YES];
break;
case iPod:
mppc = [[MPMediaPickerController alloc] initWithMediaTypes:MPMediaTypeAnyAudio];
mppc.delegate = self;
mppc.allowsPickingMultipleItems = NO;
mppc.showsCloudItems = NO;
[self presentViewController:mppc animated:YES completion:nil];
break;
case Simulator:
NSLog(@"WARNING: No MPMediaPicker in Simulator, opening some file");
[self openAudioFile:[[NSBundle mainBundle] URLForResource:@"testMusic" withExtension:@".mp3"]];
break;
}
}
- (void)mediaPicker:(MPMediaPickerController *)mediaPicker didPickMediaItems:(MPMediaItemCollection *)mediaItemCollection {

BIN
testMusic.mp3

Binary file not shown.