Retired Document
Important: This sample code may not represent best practices for current development. The project may use deprecated symbols and illustrate technologies and techniques that are no longer recommended.
source/QDesign_decomp.c
/* |
** Apple Macintosh Developer Technical Support |
** |
** Routines demonstrating how to play QDesign compressed AIFF files |
** using a combination of QuickTime and the Sound Manager. |
** |
** by Mark Cookson, Apple Developer Technical Support |
** |
** File: QDesign_decomp.c |
** |
** Copyright ©1998-1999 Apple Computer, Inc. |
** All rights reserved. |
** |
** You may incorporate this sample code into your applications without |
** restriction, though the sample code has been provided "AS IS" and the |
** responsibility for its operation is 100% yours. However, what you are |
** not permitted to do is to redistribute the source as "Apple Sample |
** Code" after having made changes. If you're going to re-distribute the |
** source, we require that you make it clear in the source that the code |
** was descended from Apple Sample Code, but that you've made changes. |
*/ |
#include <Memory.h> |
#include <QuickDraw.h> |
#include <Fonts.h> |
#include <Windows.h> |
#include <Menus.h> |
#include <TextEdit.h> |
#include <Dialogs.h> |
#include <Sound.h> |
#include <SoundInput.h> |
#include <Files.h> |
#include <Navigation.h> |
#include <stdio.h> |
#include "SoundStruct.h" |
#include "AIFF.h" |
typedef struct { |
long atomSize; // how big this structure is (big endian) |
long atomType; // atom type |
char waveData[28]; |
} AtomQDMCWaveFormatEx; |
typedef struct { |
AudioFormatAtom formatData; |
AtomQDMCWaveFormatEx endianData; |
AudioTerminatorAtom terminatorData; |
} AudioCompressionAtom, *AudioCompressionAtomPtr, **AudioCompressionAtomHandle; |
// Prototypes |
OSErr InstallRequiredAppleEvents (void); |
pascal OSErr HandleOApp (AppleEvent *theAppleEvent, AppleEvent *reply, long handlerRefcon); |
pascal OSErr HandleODoc (AppleEvent *theAppleEvent, AppleEvent *reply, long handlerRefcon); |
pascal OSErr HandlePDoc (AppleEvent *theAppleEvent, AppleEvent *reply, long handlerRefcon); |
pascal OSErr HandleQuit (AppleEvent *theAppleEvent, AppleEvent *reply, long handlerRefcon); |
OSErr GetSoundToPlay (FSSpec *fileToPlay); |
OSErr PlaySound (FSSpec *fileToPlay); |
// Globals |
Boolean gBufferDone = false, |
gDone = false; |
static pascal void SoundCallBackFcn (SndChannelPtr theChannel, SndCommand *theCmd) { |
#pragma unused (theChannel) |
#if !GENERATINGCFM |
long oldA5; |
oldA5 = SetA5 (theCmd->param2); |
#endif |
gBufferDone = true; |
#if !GENERATINGCFM |
oldA5 = SetA5 (oldA5); |
#endif |
} |
static OSErr MenuBarInit (void) { |
Handle menuBar; |
MenuHandle menu; |
OSErr err = noErr; |
menuBar = GetNewMBar (128); |
if (menuBar != nil) { |
SetMenuBar (menuBar); |
menu = GetMenuHandle (128); |
if (menu != nil) { |
AppendResMenu (menu, 'DRVR'); |
DrawMenuBar (); |
} else { |
err = memFullErr; |
} |
} else { |
err = memFullErr; |
} |
return err; |
} |
static OSErr DispatchMenuChoice (long menuChoice) { |
OSErr err = noErr; |
short menu; |
short item; |
MenuHandle appleMenu; |
Str255 accName; |
short accNumber; |
FSSpec fileToPlay; |
if (menuChoice != 0) { |
menu = HiWord (menuChoice); |
item = LoWord (menuChoice); |
switch (menu) { |
case 128: // Apple Menu |
appleMenu = GetMenuHandle (128); |
GetMenuItemText (appleMenu, item, accName); |
accNumber = OpenDeskAcc (accName); |
break; |
case 129: // File Menu |
switch (item) { |
case 1: // Open |
err = GetSoundToPlay (&fileToPlay); |
break; |
case 3: // Quit |
gDone = true; |
break; |
} |
} |
} |
HiliteMenu (0); |
return err; |
} |
void main (void) { |
OSErr err = noErr; |
Boolean gotEvent; |
EventRecord event; |
WindowPtr window; |
short thePart; |
MaxApplZone (); |
InitGraf (&qd.thePort); |
InitFonts (); |
InitWindows (); |
InitMenus (); |
TEInit (); |
InitDialogs ((long)nil); |
InitCursor (); |
err = InstallRequiredAppleEvents (); |
err = MenuBarInit (); |
while (!gDone) { |
gotEvent = WaitNextEvent (everyEvent, &event, 10, nil); |
if (gotEvent) { |
switch (event.what) { |
case kHighLevelEvent: |
err = AEProcessAppleEvent (&event); |
break; |
case mouseDown: |
thePart = FindWindow (event.where, &window); |
switch (thePart) { |
case inMenuBar: |
DispatchMenuChoice (MenuSelect (event.where)); |
break; |
} |
break; |
case keyDown: |
if (event.modifiers & cmdKey) { |
err = DispatchMenuChoice (MenuKey (event.message & charCodeMask)); |
} |
break; |
} |
} |
} |
} |
OSErr GetSoundToPlay (FSSpec *fileToPlay) { |
OSErr err = noErr; |
SFTypeList typeList = {'AIFF', 'AIFC', 0, 0}; |
StandardFileReply sfReply; |
if (NavServicesAvailable () == true) { |
NavReplyRecord navReply; |
NavDialogOptions dialogOptions; |
err = NavGetDefaultDialogOptions (&dialogOptions); |
if (err == noErr) { |
dialogOptions.dialogOptionFlags = kNavAllFilesInPopup; |
} |
if (err == noErr) { |
err = NavGetFile (nil, &navReply, &dialogOptions, nil, nil, nil, nil, nil); |
} |
if (navReply.validRecord && err == noErr) { |
ProcessSerialNumber processSN = {0, kCurrentProcess}; |
AEAddressDesc targetAddress = {typeNull, nil}; |
AppleEvent theODOC = {typeNull, nil}, |
theReply = {typeNull, nil}; |
// Create an Apple Event to ourselves. |
err = AECreateDesc (typeProcessSerialNumber, &processSN, sizeof (ProcessSerialNumber), &targetAddress); |
if (err == noErr) { |
// Create the open document event. |
err = AECreateAppleEvent (kCoreEventClass, kAEOpenDocuments, &targetAddress, kAutoGenerateReturnID, kAnyTransactionID, &theODOC); |
AEDisposeDesc (&targetAddress); |
} |
if (err == noErr) { |
// Put the list of files into the open document event Apple Event. |
err = AEPutParamDesc (&theODOC, keyDirectObject, &(navReply.selection)); |
} |
if (err == noErr) { |
// Send the open document event to ourselves. |
err = AESend (&theODOC, &theReply, kAENoReply, kAENormalPriority, kAEDefaultTimeout, nil, nil); |
AEDisposeDesc (&theODOC); |
AEDisposeDesc (&theReply); |
} |
} |
(void)NavDisposeReply (&navReply); |
} else { |
StandardGetFile (nil, 2, typeList, &sfReply); |
if (sfReply.sfGood == true) { |
*fileToPlay = sfReply.sfFile; |
err = PlaySound (fileToPlay); |
} else { |
err = userCanceledErr; |
} |
} |
return err; |
} |
OSErr PlaySound (FSSpec *fileToPlay) { |
OSErr err = noErr; |
SoundInfo theSoundInfo; |
SndChannelPtr soundChan = nil; |
AudioCompressionAtom decomAtom; |
SoundComponentData inputFormat, |
outputFormat; |
SndCallBackUPP SoundCallBackFcnUPP = nil; |
CmpSoundHeader AIFFSndHeader1, |
AIFFSndHeader2; |
CmpSoundHeaderPtr AIFFSndHeader = nil; |
short whichBuffer = 0; |
SndCommand playCmd1, |
playCmd2, |
callCmd; |
SndCommand* playCmd = nil; |
SoundConverter sc = nil; |
Ptr decomBuf1 = nil, |
decomBuf2 = nil, |
AIFFBuffer = nil, |
compressedBuf = nil, |
decomBuf = nil; |
long length = 0; |
unsigned long inputFrames = 0, |
inputBytes = 0, |
outputFrames = 0, |
outputBytes = 0, |
bytesConverted = 0, |
targetBytes = 32768; |
Boolean soundDone = false; |
if (err == noErr) { |
err = FSpOpenDF (fileToPlay, fsRdPerm, &theSoundInfo.refNum); |
if (err == noErr) { |
err = ASoundGetAIFFHeader (&theSoundInfo, &length, &(decomAtom.formatData)); |
} |
if (err == noErr) { |
inputFormat.flags = 0; |
inputFormat.format = decomAtom.formatData.format; |
inputFormat.numChannels = theSoundInfo.doubleHeader.dbhNumChannels; |
inputFormat.sampleSize = theSoundInfo.doubleHeader.dbhSampleSize; |
inputFormat.sampleRate = theSoundInfo.doubleHeader.dbhSampleRate; |
inputFormat.sampleCount = 0; |
inputFormat.buffer = nil; |
inputFormat.reserved = 0; |
outputFormat.flags = 0; |
outputFormat.format = kSoundNotCompressed; |
outputFormat.numChannels = inputFormat.numChannels; |
outputFormat.sampleSize = inputFormat.sampleSize; |
outputFormat.sampleRate = inputFormat.sampleRate; |
outputFormat.sampleCount = 0; |
outputFormat.buffer = nil; |
outputFormat.reserved = 0; |
err = SoundConverterOpen (&inputFormat, &outputFormat, &sc); |
} |
if (err == noErr) { |
err = SoundConverterSetInfo (sc, siDecompressionParams, &decomAtom); |
} |
if (err == noErr) { |
do { |
targetBytes *= 2; |
err = SoundConverterGetBufferSizes (sc, targetBytes, &inputFrames, &inputBytes, &outputBytes); |
} while (err == notEnoughBufferSpace && targetBytes < (MaxBlock () / 4)); |
} |
if (err == noErr) { |
AIFFBuffer = NewPtr (length); |
err = MemError (); |
} |
if (err == noErr) { |
decomBuf1 = NewPtr (outputBytes); |
err = MemError (); |
} |
if (err == noErr) { |
decomBuf2 = NewPtr (outputBytes); |
err = MemError (); |
} |
if (err == noErr) { |
compressedBuf = NewPtr (inputBytes); |
err = MemError (); |
} |
if (err == noErr) { |
err = SetFPos (theSoundInfo.refNum, fsFromStart, theSoundInfo.dataStart); |
} |
if (err == noErr) { |
err = FSRead (theSoundInfo.refNum, &length, AIFFBuffer); |
} |
if (err == noErr) { |
BlockMoveData (AIFFBuffer, compressedBuf, inputBytes); |
err = SoundConverterBeginConversion (sc); |
} |
if (err == noErr) { |
bytesConverted = 0; |
err = SoundConverterConvertBuffer (sc, compressedBuf, inputFrames, decomBuf1, &outputFrames, &outputBytes); |
bytesConverted += inputBytes; |
} |
if (err == noErr) { |
if (bytesConverted + inputBytes > length) { |
inputBytes = length - bytesConverted; |
} |
BlockMoveData (AIFFBuffer + bytesConverted, compressedBuf, inputBytes); |
err = SoundConverterConvertBuffer (sc, compressedBuf, inputFrames, decomBuf2, &outputFrames, &outputBytes); |
bytesConverted += inputBytes; |
} |
if (err == noErr) { |
SoundCallBackFcnUPP = NewSndCallBackProc (SoundCallBackFcn); |
err = SndNewChannel (&soundChan, sampledSynth, 0, SoundCallBackFcnUPP); |
} |
if (err == noErr) { |
AIFFSndHeader1.samplePtr = decomBuf1; |
AIFFSndHeader1.numChannels = outputFormat.numChannels; |
AIFFSndHeader1.sampleRate = outputFormat.sampleRate; |
AIFFSndHeader1.loopStart = 0; |
AIFFSndHeader1.loopEnd = 0; |
AIFFSndHeader1.encode = cmpSH; |
AIFFSndHeader1.baseFrequency = kMiddleC; |
AIFFSndHeader1.numFrames = outputFrames; |
AIFFSndHeader1.AIFFSampleRate = 0; // not used |
AIFFSndHeader1.markerChunk = nil; |
AIFFSndHeader1.format = outputFormat.format; |
AIFFSndHeader1.futureUse2 = 0; |
AIFFSndHeader1.stateVars = nil; |
AIFFSndHeader1.leftOverSamples = nil; |
AIFFSndHeader1.compressionID = fixedCompression; // even uncompressed sounds use fixedCompression |
AIFFSndHeader1.packetSize = 0; // the Sound Manager will figure this out for us |
AIFFSndHeader1.snthID = 0; |
AIFFSndHeader1.sampleSize = outputFormat.sampleSize; |
AIFFSndHeader1.sampleArea[0] = 0; // no samples here because use samplePtr instead |
BlockMoveData (&AIFFSndHeader1, &AIFFSndHeader2, sizeof (AIFFSndHeader1)); |
AIFFSndHeader2.samplePtr = decomBuf2; |
playCmd1.cmd = bufferCmd; |
playCmd1.param1 = 0; // not used, but clear it out anyway just to be safe |
playCmd1.param2 = (long)&AIFFSndHeader1; |
playCmd2.cmd = bufferCmd; |
playCmd2.param1 = 0; // not used, but clear it out anyway just to be safe |
playCmd2.param2 = (long)&AIFFSndHeader2; |
whichBuffer = 1; // buffer 1 will be free when callback runs |
callCmd.cmd = callBackCmd; |
callCmd.param2 = SetCurrentA5 (); |
soundDone = false; |
gBufferDone = false; |
err = SndDoCommand (soundChan, &playCmd1, true); |
} |
if (err == noErr) { |
err = SndDoCommand (soundChan, &callCmd, true); |
} |
if (err == noErr) { |
err = SndDoCommand (soundChan, &playCmd2, true); |
} |
if (err == noErr) { |
while (!soundDone) { |
if (gBufferDone == true) { |
if (whichBuffer == 1) { |
playCmd = &playCmd1; |
decomBuf = decomBuf1; |
AIFFSndHeader = &AIFFSndHeader1; |
whichBuffer = 2; |
} else { |
playCmd = &playCmd2; |
decomBuf = decomBuf2; |
AIFFSndHeader = &AIFFSndHeader2; |
whichBuffer = 1; |
} |
if (bytesConverted < length) { |
if (bytesConverted + inputBytes > length) { |
inputBytes = length - bytesConverted; |
} |
BlockMoveData (AIFFBuffer + bytesConverted, compressedBuf, inputBytes); |
(void)SoundConverterConvertBuffer (sc, compressedBuf, inputFrames, decomBuf, &outputFrames, &outputBytes); |
bytesConverted += inputBytes; |
AIFFSndHeader->numFrames = outputFrames; |
gBufferDone = false; |
(void)SndDoCommand (soundChan, &callCmd, true); // Reuse callBackCmd. |
(void)SndDoCommand (soundChan, playCmd, true); // Play the next buffer. |
} else { |
(void)SoundConverterEndConversion (sc, decomBuf, &outputFrames, &outputBytes); |
AIFFSndHeader->numFrames = outputFrames; |
(void)SndDoCommand (soundChan, playCmd, true); // Play the last buffer. |
soundDone = true; |
} |
} |
} |
} |
if (sc != nil) { |
err = SoundConverterClose (sc); |
} |
if (err == noErr) { |
err = SndDisposeChannel (soundChan, false); // wait until sounds stops playing before disposing of channel |
} |
if (theSoundInfo.refNum) |
FSClose (theSoundInfo.refNum); |
if (SoundCallBackFcnUPP) |
DisposeRoutineDescriptor (SoundCallBackFcnUPP); |
if (decomBuf1) |
DisposePtr (decomBuf1); |
if (decomBuf2) |
DisposePtr (decomBuf2); |
if (AIFFBuffer) |
DisposePtr (AIFFBuffer); |
if (compressedBuf) |
DisposePtr (compressedBuf); |
} |
return err; |
} |
OSErr InstallRequiredAppleEvents (void) { |
OSErr err; |
err = AEInstallEventHandler (kCoreEventClass, kAEOpenApplication, NewAEEventHandlerProc (HandleOApp), 0, false); |
if (err == noErr) |
err = AEInstallEventHandler (kCoreEventClass, kAEOpenDocuments, NewAEEventHandlerProc (HandleODoc), 0, false); |
if (err == noErr) |
err = AEInstallEventHandler (kCoreEventClass, kAEPrintDocuments, NewAEEventHandlerProc (HandlePDoc), 0, false); |
if (err == noErr) |
err = AEInstallEventHandler (kCoreEventClass, kAEQuitApplication, NewAEEventHandlerProc (HandleQuit), 0, false); |
return err; |
} |
pascal OSErr HandleOApp (AppleEvent *theAppleEvent, AppleEvent *reply, long handlerRefcon) { |
#pragma unused (theAppleEvent, reply, handlerRefcon) |
return noErr; /* We're up and running */ |
} |
pascal OSErr HandleODoc (AppleEvent *theAppleEvent, AppleEvent *reply, long handlerRefcon) { |
#pragma unused (reply, handlerRefcon) |
AEDescList docList; |
OSErr err; |
long i = 1, |
itemsInList; |
Size actualSize; |
AEKeyword keywd; |
DescType returnedType; |
err = AEGetParamDesc (theAppleEvent, keyDirectObject, typeAEList, &docList); |
if (err == noErr) { |
err = AECountItems (&docList, &itemsInList); |
} |
if (err == noErr) { |
FSSpecPtr fileSpecPtr; |
do { |
fileSpecPtr = (FSSpecPtr)NewPtr (sizeof (FSSpec)); |
err = MemError (); |
if (err == noErr) { |
err = AEGetNthPtr (&docList, i, typeFSS, &keywd, &returnedType, fileSpecPtr, sizeof (FSSpec), &actualSize); |
} |
if (err == noErr) { |
HParamBlockRec pb; |
pb.fileParam.ioCompletion = nil; |
pb.fileParam.ioNamePtr = fileSpecPtr->name; |
pb.fileParam.ioVRefNum = fileSpecPtr->vRefNum; |
pb.fileParam.ioDirID = fileSpecPtr->parID; |
pb.fileParam.ioFDirIndex = 0; |
err = PBHGetFInfoSync (&pb); |
if (err == noErr && pb.fileParam.ioFlFndrInfo.fdType != 'pref') { |
err = PlaySound (fileSpecPtr); |
DisposePtr ((Ptr)fileSpecPtr); |
} |
} |
i += 1; |
} while (err == noErr); |
// The last time through the loop we allocate a pointer we don't need. |
DisposePtr ((Ptr)fileSpecPtr); |
} |
(void)AEDisposeDesc (&docList); |
return err; |
} |
pascal OSErr HandlePDoc (AppleEvent *theAppleEvent, AppleEvent *reply, long handlerRefcon) { |
#pragma unused (theAppleEvent, reply, handlerRefcon) |
return noErr; |
} |
pascal OSErr HandleQuit (AppleEvent *theAppleEvent, AppleEvent *reply, long handlerRefcon) { |
#pragma unused (theAppleEvent, reply, handlerRefcon) |
gDone = true; |
return noErr; |
} |
Copyright © 2003 Apple Computer, Inc. All Rights Reserved. Terms of Use | Privacy Policy | Updated: 2003-01-14