#include "widget.h"
#include "ui_widget.h"

#include <QtCore>
#include "my_SDL_sysaudio.h"

extern "C" void SDL_CalculateAudioSpec(SDL_AudioSpec *spec)
{
    switch (spec->format) {
        case AUDIO_U8:
            spec->silence = 0x80;
            break;
        default:
            spec->silence = 0x00;
            break;
    }
    spec->size = (spec->format&0xFF)/8;
    spec->size *= spec->channels;
    spec->size *= spec->samples;
}

void Widget::sdl_audio_callback(Uint8 *stream, int len)
{
    qDebug() << "[Widget::sdl_audio_callback()]" << len;

    Q_ASSERT(len%2==0);

    if(m_audio_packet_queue.size()==0)
    {
        memset(stream, 0, len);
        ////SDL_PauseAudio(1);
        v_audio_dev->paused = 1;
        v_audio_dev->enabled = 0;
        return;
    }
    T_AVPacket &v_head_packet = m_audio_packet_queue.head();
    m_audio_temp = v_head_packet;

    int left_len = len;
    int read_len = 0;

    QBuffer v_read_buffer(&m_audio_buffer);
    v_read_buffer.open(QIODevice::ReadOnly);
    QByteArray v_result = v_read_buffer.read(left_len);
    v_read_buffer.close();
    m_audio_buffer = m_audio_buffer.mid(v_result.size());
    left_len -= v_result.size();
    read_len += v_result.size();


    if(m_time_line.isNull()) m_time_line.start();
    int v_elapsed = m_time_line.elapsed();
    m_audio_elapsed = m_audio_temp.m_av_packet.pts;

    qDebug()
            << "(A)[frame]" << m_audio_temp.m_frame_no
            << "[m_audio_elapsed]" << m_audio_elapsed
            << "[v_elapsed]" << v_elapsed
            << "[diff]" << v_elapsed - m_audio_elapsed
            << "[audioCtx->sample_rate]" << audioCtx->sample_rate
            ;
    int v_diff = v_elapsed - m_audio_elapsed;
    m_time_line = m_time_line.addMSecs(v_diff/2);

    while(left_len > 0)
    {
        int v_dec_len = sizeof(m_audio_decode_array);
        memset(m_audio_decode_array, 0, sizeof(m_audio_decode_array));
        qDebug() << "[m_audio_temp.m_av_packet.size]" << m_audio_temp.m_av_packet.size;
        int v_enc_len = avcodec_decode_audio3(
                audioCtx,
                (int16_t *)m_audio_decode_array,
                &v_dec_len,
                &m_audio_temp.m_av_packet
                );
        if (v_enc_len < 0)
        {
            qDebug() << "(B)2" << v_enc_len << m_audio_packet_queue.size();
            /* if error, we skip the frame */
            m_audio_temp.m_av_packet.size = 0; //frame is finished
        }
        else
        {
            qDebug() << "(B)3:len1=" << v_enc_len << "[data_size]" << v_dec_len;
            if (audioCtx->sample_fmt == SAMPLE_FMT_S16)
            {
                v_result.append((char *)m_audio_decode_array, v_dec_len);
            }
            else
            {
                AVAudioConvert *convCtx =
                        av_audio_convert_alloc(
                                SAMPLE_FMT_S16,
                                1,
                                audioCtx->sample_fmt,
                                1,
                                NULL,
                                0);
                const void *v_ibuf[6]= {m_audio_decode_array};
                void *v_obuf[6]= {m_audio_convert_array};
                int v_istride[6]= {av_get_bits_per_sample_format(audioCtx->sample_fmt)/8};
                int v_ostride[6]= {av_get_bits_per_sample_format(SAMPLE_FMT_S16)/8};
                int v_len= v_dec_len/v_istride[0];
                if (av_audio_convert(convCtx, v_obuf, v_ostride, v_ibuf, v_istride, v_len)<0)
                {
                    qDebug() << "av_audio_convert() failed";
                    break;
                }
                av_audio_convert_free(convCtx);
                qDebug() << "av_audio_convert() successful";
                v_dec_len = v_len * v_ostride[0];
                v_result.append((char *)m_audio_convert_array, v_dec_len);
            }
            m_audio_temp.m_av_packet.data += v_enc_len;
            m_audio_temp.m_av_packet.size -= v_enc_len;
            left_len -= v_dec_len;
            read_len += v_dec_len;
        }
        if(m_audio_temp.m_av_packet.size<=0)
        {
            // delete old one
            av_free_packet(&m_audio_packet_queue.head().m_av_packet);
            m_audio_packet_queue.dequeue();
            // pop new one
            if(m_audio_packet_queue.size()==0) break;
            m_audio_temp = m_audio_packet_queue.head();
        }
    }
    Q_ASSERT(read_len==v_result.size());

    qDebug() << "[len]" << len << "[read_len]" << read_len;
    if(read_len <= len)
    {
        memcpy(stream, v_result.constData(), v_result.size());
    }
    else
    {
        memcpy(stream, v_result.constData(), len);
        m_audio_buffer.append(v_result.mid(len));
    }
}

Widget::Widget(QWidget *parent) :
    QWidget(parent),
    ui(new Ui::Widget),
    m_audio_elapsed(0),
    m_audio_thread(this)
{
    ui->setupUi(this);

    filename="C:/6tQzHr5mgbc_34.flv";
    //filename="C:/EWeSEyvAJpc_34.flv";
    qDebug() << filename;

    av_register_all();

    if(av_open_input_file(&formatCtx, filename.toLocal8Bit().constData(), NULL, 0, NULL)) {
        qDebug() << "Can't open video";
        return;
    }

    if(av_find_stream_info(formatCtx)<0) {
        qDebug() << "Can't find stream info";
    }

    dump_format(formatCtx, 0, filename.toLocal8Bit().constData(), false);

    //[Video]
    videoStream=-1;
    for(unsigned int i=0; i<formatCtx->nb_streams; i++)
        if(formatCtx->streams[i]->codec->codec_type==CODEC_TYPE_VIDEO) {
            videoStream=i;
            break;
        }
    if(videoStream<0) {
        qDebug() << "No video stream.";
        return;
    }

    qDebug() << "[videoStream]" << videoStream;

    //[Video]
    videoCtx=formatCtx->streams[videoStream]->codec;
    videoCodec=avcodec_find_decoder(videoCtx->codec_id);
    if(!videoCodec) {
        qDebug() << "Unsupported video codec.";
        return;
    }

    //[Video]
    if(avcodec_open(videoCtx, videoCodec)<0) {
        qDebug() << "avcodec_open (video) failed";
        return;
    }

    m_v_stream = formatCtx->streams[videoStream];

    //[Audio]
    audioStream=-1;
    for(unsigned int i=0; i<formatCtx->nb_streams; i++)
        if(formatCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_AUDIO) {
            audioStream=i;
            break;
        }
    if(audioStream<0) {
        qDebug() << "No audio stream.";
        return;
    }

    qDebug() << "[audioStream]" << audioStream;

    //[Audio]
    audioCtx=formatCtx->streams[audioStream]->codec;

    if (audioCtx->channels > 0) {
        audioCtx->request_channels = FFMIN(2, audioCtx->channels);
    } else {
        audioCtx->request_channels = 2;
    }

    audioCodec=avcodec_find_decoder(audioCtx->codec_id);
    if(!audioCodec) {
        qDebug() << "Unsupported audio codec.";
        return;
    }

    //[Audio]
    if(avcodec_open(audioCtx, audioCodec)<0) {
        qDebug() << "avcodec_open (audio) failed";
        return;
    }

    qDebug() << "[audioCodec->long_name]" << audioCodec->long_name;
    qDebug() << "[audioCtx->sample_fmt]" << audioCtx->sample_fmt;

    qDebug() << "[audioCtx->sample_rate]" << audioCtx->sample_rate;
    //http://d.hatena.ne.jp/rainbow_beam/20100210/1265809987
    SDL_AudioSpec wanted_spec; //, spec;
    //44100 or 22050, 11025
    wanted_spec.freq = audioCtx->sample_rate;
    wanted_spec.format = AUDIO_S16SYS;
    wanted_spec.channels = audioCtx->channels;
    wanted_spec.silence = 0;
    wanted_spec.samples = 1024;

    memset(&m_audio_temp, 0, sizeof(m_audio_temp));

    extern AudioBootStrap MY_DSOUND_bootstrap;
    v_audio_dev = MY_DSOUND_bootstrap.create(-1);
    qDebug() << "[v_dev]" << v_audio_dev;
    memcpy(&v_audio_dev->spec, &wanted_spec, sizeof(SDL_AudioSpec));
    ////v_audio_dev->convert.needed = 0;
    v_audio_dev->enabled = 1;
    v_audio_dev->paused  = 1;
    v_audio_dev->opened = v_audio_dev->OpenAudio(v_audio_dev, &v_audio_dev->spec)+1;

    if(!v_audio_dev->opened)
    {
        qDebug() << "(!v_audio_dev->opened)";
        return;
    }

    //double v_time_base = 1.0 * v_stream->time_base.num / v_stream->time_base.den;
    qDebug() << "[formatCtx->duration]" << formatCtx->duration << QString("[%1 sec]").arg(formatCtx->duration / AV_TIME_BASE);
    qDebug()<< "[v_stream->start_time]"
            << m_v_stream->start_time
            ;
    qDebug()<< "[codecCtx->time_base]"
            << videoCtx->time_base.num
            << videoCtx->time_base.den
            << 1.0 * videoCtx->time_base.num / videoCtx->time_base.den
            << 1.0 / (1.0 * videoCtx->time_base.num / videoCtx->time_base.den)
            ;
    qDebug()<< "[v_stream->avg_frame_rate]"
            << m_v_stream->avg_frame_rate.num
            << m_v_stream->avg_frame_rate.den
            << 1.0 * m_v_stream->avg_frame_rate.num / m_v_stream->avg_frame_rate.den
            ;
    qDebug()<< "[v_stream->r_frame_rate]"
            << m_v_stream->r_frame_rate.num
            << m_v_stream->r_frame_rate.den
            << 1.0 * m_v_stream->r_frame_rate.num / m_v_stream->r_frame_rate.den
            ;
    qDebug()<< "[v_stream->time_base]"
            << m_v_stream->time_base.num
            << m_v_stream->time_base.den
            << 1.0 * m_v_stream->time_base.num / m_v_stream->time_base.den
            ;

    frame=avcodec_alloc_frame();
    m_av_picture = new T_AVPicture(PIX_FMT_BGRA, this->width(), this->height());

    frameNo=0;
    m_drop_count = 0;


    this->setWindowTitle(filename);
    this->resize(videoCtx->width, videoCtx->height);

    m_fps1.wake();
    m_fps2.wake();

    T_AVPacket v_av_packet;
    ////int v_video_frame_count = 0;
    while(av_read_frame(formatCtx, &v_av_packet.m_av_packet)>=0)
    {
        if(v_av_packet.m_av_packet.stream_index==videoStream)
        {
            v_av_packet.m_frame_no = m_video_packet_queue.size();
            m_video_packet_queue.enqueue(v_av_packet);
        }
        else if(v_av_packet.m_av_packet.stream_index==audioStream)
        {
            v_av_packet.m_frame_no = m_audio_packet_queue.size();
            m_audio_packet_queue.enqueue(v_av_packet);
        }
        else
        {
            av_free_packet(&v_av_packet.m_av_packet);
            continue;
        }
    }

    qDebug() << "read end";
    qDebug() << "[m_video_packet_queue.size()]" << m_video_packet_queue.size();
    qDebug() << "[m_audio_packet_queue.size()]" << m_audio_packet_queue.size();

#if 0x0
    connect(&m_timer, SIGNAL(timeout()), SLOT(onTimerTimeout()));
    m_timer.start(10);
    SDL_PauseAudio(0);
#else
    connect(&m_timer, SIGNAL(timeout()), SLOT(onTimerTimeout()));
    m_timer.start(10);
    v_audio_dev->paused = 0;
    m_audio_thread.start(QThread::HighPriority);
#endif
}

Widget::~Widget()
{
#if 0x0
    SDL_PauseAudio(1);
#endif
    v_audio_dev->paused = 1;
    qDebug() << "[Widget::~Widget()]";
    delete ui;

#if 0x0
    delete m_av_picture;
    av_free(frame);
    avcodec_close(videoCtx);
    av_close_input_file(formatCtx);
#endif
}

void T_AudioThread::run()
{
    SDL_AudioDevice *v_audio_dev = m_widget->v_audio_dev;

    while(v_audio_dev->enabled)
    {
        qDebug() << QDateTime::currentDateTime();
        //::Sleep(10);
        //sdl_audio_callback();
#if 0x0
        Uint8 *stream = m_widget->DX5_GetAudioBuf();
        memset(stream, 0, m_widget->mixlen);
        m_widget->sdl_audio_callback(stream, m_widget->mixlen);
        m_widget->DX5_PlayAudio();
        m_widget->DX5_WaitAudio_BusyWait();
#else
        Uint8 *stream = v_audio_dev->GetAudioBuf(v_audio_dev);
        memset(stream, v_audio_dev->spec.silence, v_audio_dev->spec.size);
        if ( ! v_audio_dev->paused )
        {
            m_widget->sdl_audio_callback(stream, v_audio_dev->spec.size);
        }
        v_audio_dev->PlayAudio(v_audio_dev);
        v_audio_dev->WaitAudio(v_audio_dev);
#endif
    }
    v_audio_dev->WaitDone(v_audio_dev);
}

void Widget::onTimerTimeout()
{
    T_MutexLocker v_locker(&m_frame_mutex);
    qDebug() << "[Widget::onTimerTimeout()]";
    if(m_video_packet_queue.size()==0)
    {
        qDebug() << "Widget::onTimerTimeout(1)";
        m_timer.stop();
        return;
    }
    if(m_time_line.isNull()) m_time_line.start();
    int v_elapsed = m_time_line.elapsed();
    //int v_diff = v_elapsed - m_audio_elapsed;
    //m_time_line.addMSecs(-v_diff);

    T_AVPacket &v_head_packet = m_video_packet_queue.head();
    if(v_elapsed < v_head_packet.m_av_packet.pts)
    {
        qDebug() << "Widget::onTimerTimeout(2)" << v_elapsed << v_head_packet.m_av_packet.pts;
        return;
    }
    for(;;)
    {
        T_AVPacket v_curr_packet = m_video_packet_queue.dequeue();
        frameNo = v_curr_packet.m_frame_no;
        avcodec_decode_video(
                videoCtx,
                frame,
                &frameFinished,
                v_curr_packet.m_av_packet.data,
                v_curr_packet.m_av_packet.size
                );
        qDebug() << "[m_video_packet_queue.size()]" << m_video_packet_queue.size() << frameFinished;
        if(frameFinished)
        {
            if(m_video_packet_queue.size()==0)
            {
                //there is no next frame
                qDebug() << "(1)       [frameNo]" << frameNo << "[packet.pts]" << v_curr_packet.m_av_packet.pts;
                m_fps1.add(1);
                m_fps2.add(1);
            }
            else
            {
                T_AVPacket &v_next_packet = m_video_packet_queue.head();
                if(v_elapsed >= v_next_packet.m_av_packet.pts)
                {
                    // should render next frame
                    qDebug() << "=====>DROP[frameNo]" << frameNo << "[packet.pts]" << v_curr_packet.m_av_packet.pts;
                    m_fps1.add(1);
                    m_drop_count++;
                    av_free_packet(&v_curr_packet.m_av_packet);
                    continue;
                }
                // should render current frame
                qDebug() << "(2)       [frameNo]" << frameNo << "[packet.pts]" << v_curr_packet.m_av_packet.pts;
                m_fps1.add(1);
                m_fps2.add(1);
            }
            //if(frameNo > 30) goto skip;
            {
                m_av_picture->resize(this->width(), this->height());
                SwsContext *v_sws_ctx = sws_getContext(
                        videoCtx->width, //int srcW,
                        videoCtx->height, //int srcH,
                        videoCtx->pix_fmt, //enum PixelFormat srcFormat,
                        m_av_picture->m_width, //int dstW,
                        m_av_picture->m_height, //int dstH,
                        m_av_picture->m_pix_fmt, //enum PixelFormat dstFormat,
                        SWS_BICUBIC, //int flags,
                        NULL, //SwsFilter *srcFilter,
                        NULL, //SwsFilter *dstFilter,
                        NULL  //const double *param
                        );
                //qDebug() << v_sws_ctx;
                int v_scale_result = sws_scale(
                        v_sws_ctx,
                        frame->data,
                        frame->linesize,
                        0,
                        videoCtx->height,
                        m_av_picture->m_av_picture.data,
                        m_av_picture->m_av_picture.linesize
                        );
                //qDebug() << "[v_scale_result]" << v_scale_result;
                sws_freeContext(v_sws_ctx);
                QImage v_img(
                        m_av_picture->m_av_picture.data[0],
                        m_av_picture->m_width,
                        m_av_picture->m_height,
                        QImage::Format_ARGB32
                        );
                //qDebug() << img.isNull();
                m_display_mutex.lock();
                m_displayBuffer = v_img;
                m_display_mutex.unlock();
            }
            skip:
            QWidget::update();
        }
        av_free_packet(&v_curr_packet.m_av_packet);
        break;
    }
}

#if 0x0
Uint8 *Widget::DX5_GetAudioBuf()
{
    DWORD   cursor, junk;
    HRESULT result;
    DWORD   rawlen;

    /* Figure out which blocks to fill next */
    locked_buf = NULL;
    result = IDirectSoundBuffer_GetCurrentPosition(mixbuf, &junk, &cursor);
    if ( result == DSERR_BUFFERLOST ) {
        IDirectSoundBuffer_Restore(mixbuf);
        result = IDirectSoundBuffer_GetCurrentPosition(mixbuf,
                                &junk, &cursor);
    }
    if ( result != DS_OK ) {
        qDebug() << "IDirectSoundBuffer_GetCurrentPosition() failed" << result;
        return(NULL);
    }
    cursor /= mixlen;
#ifdef DEBUG_SOUND
    /* Detect audio dropouts */
    { DWORD spot = cursor;
      if ( spot < lastchunk ) {
        spot += NUM_BUFFERS;
      }
      if ( spot > lastchunk+1 ) {
        fprintf(stderr, "Audio dropout, missed %d fragments\n",
                (spot - (lastchunk+1)));
      }
    }
#endif
    lastchunk = cursor;
    cursor = (cursor+1)%NUM_BUFFERS;
    cursor *= mixlen;

    /* Lock the audio buffer */
    result = IDirectSoundBuffer_Lock(mixbuf, cursor, mixlen,
                (LPVOID *)&locked_buf, &rawlen, NULL, &junk, 0);
    if ( result == DSERR_BUFFERLOST ) {
        IDirectSoundBuffer_Restore(mixbuf);
        result = IDirectSoundBuffer_Lock(mixbuf, cursor, mixlen,
                (LPVOID *)&locked_buf, &rawlen, NULL, &junk, 0);
    }
    if ( result != DS_OK ) {
        qDebug() << "IDirectSoundBuffer_Lock() failed" << result;
        return(NULL);
    }
    return(locked_buf);
}

void Widget::DX5_PlayAudio()
{
    /* Unlock the buffer, allowing it to play */
    if ( locked_buf ) {
        IDirectSoundBuffer_Unlock(mixbuf, locked_buf, mixlen, NULL, 0);
    }

}

void Widget::DX5_WaitAudio_BusyWait()
{
    qDebug() << "[Widget::DX5_WaitAudio_BusyWait()]";
    DWORD status;
    DWORD cursor, junk;
    HRESULT result;

    /* Semi-busy wait, since we have no way of getting play notification
       on a primary mixing buffer located in hardware (DirectX 5.0)
    */
    qDebug() << "[Widget::DX5_WaitAudio_BusyWait(1)]";
    result = IDirectSoundBuffer_GetCurrentPosition(mixbuf, &junk, &cursor);
    if ( result != DS_OK )
    {
        qDebug() << "[Widget::DX5_WaitAudio_BusyWait(2)]";
        if ( result == DSERR_BUFFERLOST ) {
            qDebug() << "[Widget::DX5_WaitAudio_BusyWait(3)]";
            IDirectSoundBuffer_Restore(mixbuf);
        }
        qDebug() << "IDirectSoundBuffer_GetCurrentPosition() failed" << result;
#ifdef DEBUG_SOUND
        SetDSerror("DirectSound GetCurrentPosition", result);
#endif
        return;
    }

    qDebug() << "[Widget::DX5_WaitAudio_BusyWait(4)]" << cursor << mixlen << lastchunk;
    while ( (cursor/mixlen) == lastchunk ) {
        qDebug() << "[Widget::DX5_WaitAudio_BusyWait(5)]" << cursor << mixlen << lastchunk;
        /* FIXME: find out how much time is left and sleep that long */
        SDL_Delay(1);

        /* Try to restore a lost sound buffer */
        IDirectSoundBuffer_GetStatus(mixbuf, &status);
        if ( (status&DSBSTATUS_BUFFERLOST) ) {
            qDebug() << "[Widget::DX5_WaitAudio_BusyWait(6)]" << cursor << mixlen << lastchunk;
            IDirectSoundBuffer_Restore(mixbuf);
            IDirectSoundBuffer_GetStatus(mixbuf, &status);
            if ( (status&DSBSTATUS_BUFFERLOST) ) {
                qDebug() << "(status&DSBSTATUS_BUFFERLOST)";
                break;
            }
        }
        qDebug() << "[Widget::DX5_WaitAudio_BusyWait(7)]" << (status&DSBSTATUS_PLAYING);
        if ( ! (status&DSBSTATUS_PLAYING) ) {
            result = IDirectSoundBuffer_Play(mixbuf, 0, 0, DSBPLAY_LOOPING);
            if ( result == DS_OK ) {
                qDebug() << "IDirectSoundBuffer_Play(): OK";
                continue;
            }
            qDebug() << "IDirectSoundBuffer_Play(): NG" << result;
#ifdef DEBUG_SOUND
            SetDSerror("DirectSound Play", result);
#endif
            return;
        }

        /* Find out where we are playing */
        result = IDirectSoundBuffer_GetCurrentPosition(mixbuf,
                                &junk, &cursor);
        if ( result != DS_OK ) {
            qDebug() << "IDirectSoundBuffer_GetCurrentPosition() feiled" << result;
            return;
        }
    }
    qDebug() << "[Widget::DX5_WaitAudio_BusyWait()] END";
}


/* This function tries to create a secondary audio buffer, and returns the
   number of audio chunks available in the created buffer.
*/
static int CreateSecondary(LPDIRECTSOUND sndObj, HWND focus,
    LPDIRECTSOUNDBUFFER *sndbuf, WAVEFORMATEX *wavefmt, Uint32 chunksize)
{
    const int numchunks = 8;
    HRESULT result;
    DSBUFFERDESC format;
    LPVOID pvAudioPtr1, pvAudioPtr2;
    DWORD  dwAudioBytes1, dwAudioBytes2;

    /* Try to set primary mixing privileges */
    if ( focus ) {
        result = IDirectSound_SetCooperativeLevel(sndObj,
                    focus, DSSCL_PRIORITY);
    } else {
        result = IDirectSound_SetCooperativeLevel(sndObj,
                    GetDesktopWindow(), DSSCL_NORMAL);
    }
    if ( result != DS_OK ) {
#ifdef DEBUG_SOUND
        SetDSerror("DirectSound SetCooperativeLevel", result);
#endif
        return(-1);
    }

    /* Try to create the secondary buffer */
    SDL_memset(&format, 0, sizeof(format));
    format.dwSize = sizeof(format);
    format.dwFlags = DSBCAPS_GETCURRENTPOSITION2;
#ifdef USE_POSITION_NOTIFY
    format.dwFlags |= DSBCAPS_CTRLPOSITIONNOTIFY;
#endif
    if ( ! focus ) {
        format.dwFlags |= DSBCAPS_GLOBALFOCUS;
    } else {
        format.dwFlags |= DSBCAPS_STICKYFOCUS;
    }
    format.dwBufferBytes = numchunks*chunksize;
    if ( (format.dwBufferBytes < DSBSIZE_MIN) ||
         (format.dwBufferBytes > DSBSIZE_MAX) ) {
        SDL_SetError("Sound buffer size must be between %d and %d",
                DSBSIZE_MIN/numchunks, DSBSIZE_MAX/numchunks);
        return(-1);
    }
    format.dwReserved = 0;
    format.lpwfxFormat = wavefmt;
    result = IDirectSound_CreateSoundBuffer(sndObj, &format, sndbuf, NULL);
    if ( result != DS_OK ) {
        //SetDSerror("DirectSound CreateSoundBuffer", result);
        qDebug() << "IDirectSound_CreateSoundBuffer() failed" << result;
        return(-1);
    }
    IDirectSoundBuffer_SetFormat(*sndbuf, wavefmt);

    /* Silence the initial audio buffer */
    result = IDirectSoundBuffer_Lock(*sndbuf, 0, format.dwBufferBytes,
                                     (LPVOID *)&pvAudioPtr1, &dwAudioBytes1,
                                     (LPVOID *)&pvAudioPtr2, &dwAudioBytes2,
                                     DSBLOCK_ENTIREBUFFER);
    if ( result == DS_OK ) {
        if ( wavefmt->wBitsPerSample == 8 ) {
            SDL_memset(pvAudioPtr1, 0x80, dwAudioBytes1);
        } else {
            SDL_memset(pvAudioPtr1, 0x00, dwAudioBytes1);
        }
        IDirectSoundBuffer_Unlock(*sndbuf,
                                  (LPVOID)pvAudioPtr1, dwAudioBytes1,
                                  (LPVOID)pvAudioPtr2, dwAudioBytes2);
    }

    /* We're ready to go */
    return(numchunks);
}
#endif
