import React, { useEffect, useRef, useState, useCallback } from 'react';
import { decodeBase64, decodeAudioData } from '../utils/audioUtils';

interface PlayerProps {
  base64Audio: string | null;
}

const Player: React.FC<PlayerProps> = ({ base64Audio }) => {
  const [isPlaying, setIsPlaying] = useState(false);
  const audioContextRef = useRef<AudioContext | null>(null);
  const sourceNodeRef = useRef<AudioBufferSourceNode | null>(null);
  const audioBufferRef = useRef<AudioBuffer | null>(null);
  const [error, setError] = useState<string | null>(null);
  
  // Animation refs
  const canvasRef = useRef<HTMLCanvasElement>(null);
  // Fixed: Initialize useRef with 0 as it requires an argument in this environment
  const animationFrameRef = useRef<number>(0);
  const analyserRef = useRef<AnalyserNode | null>(null);

  // Initialize Audio Buffer when base64Audio changes
  useEffect(() => {
    const initAudio = async () => {
      if (!base64Audio) return;

      try {
        setError(null);
        // Create context if not exists
        if (!audioContextRef.current) {
          audioContextRef.current = new (window.AudioContext || (window as any).webkitAudioContext)({
            sampleRate: 24000 // Gemini TTS output sample rate
          });
        }

        const ctx = audioContextRef.current;
        const rawBytes = decodeBase64(base64Audio);
        const buffer = await decodeAudioData(rawBytes, ctx, 24000, 1);
        audioBufferRef.current = buffer;
      } catch (err) {
        console.error("Error decoding audio:", err);
        setError("Failed to process audio data.");
      }
    };

    initAudio();

    return () => {
      stopAudio();
    };
  // eslint-disable-next-line react-hooks/exhaustive-deps
  }, [base64Audio]);


  const drawVisualizer = () => {
    if (!analyserRef.current || !canvasRef.current) return;

    const canvas = canvasRef.current;
    const ctx = canvas.getContext('2d');
    if (!ctx) return;

    const bufferLength = analyserRef.current.frequencyBinCount;
    const dataArray = new Uint8Array(bufferLength);
    analyserRef.current.getByteFrequencyData(dataArray);

    ctx.clearRect(0, 0, canvas.width, canvas.height);

    const barWidth = (canvas.width / bufferLength) * 2.5;
    let barHeight;
    let x = 0;

    for (let i = 0; i < bufferLength; i++) {
      barHeight = dataArray[i] / 2;
      
      const gradient = ctx.createLinearGradient(0, canvas.height, 0, 0);
      gradient.addColorStop(0, '#3b82f6'); // blue-500
      gradient.addColorStop(1, '#a855f7'); // purple-500

      ctx.fillStyle = gradient;
      ctx.fillRect(x, canvas.height - barHeight, barWidth, barHeight);

      x += barWidth + 1;
    }

    animationFrameRef.current = requestAnimationFrame(drawVisualizer);
  };

  const playAudio = async () => {
    if (!audioContextRef.current || !audioBufferRef.current) return;

    // Ensure context is running (browsers suspend it until user interaction)
    if (audioContextRef.current.state === 'suspended') {
      await audioContextRef.current.resume();
    }

    // Stop previous if any
    if (sourceNodeRef.current) {
      sourceNodeRef.current.stop();
    }

    const source = audioContextRef.current.createBufferSource();
    source.buffer = audioBufferRef.current;
    
    // Setup Analyser
    const analyser = audioContextRef.current.createAnalyser();
    analyser.fftSize = 256;
    source.connect(analyser);
    analyser.connect(audioContextRef.current.destination);
    analyserRef.current = analyser;

    source.onended = () => {
        setIsPlaying(false);
        if (animationFrameRef.current) cancelAnimationFrame(animationFrameRef.current);
    };

    source.start();
    sourceNodeRef.current = source;
    setIsPlaying(true);
    drawVisualizer();
  };

  const stopAudio = useCallback(() => {
    if (sourceNodeRef.current) {
      sourceNodeRef.current.stop();
      sourceNodeRef.current = null;
    }
    setIsPlaying(false);
    if (animationFrameRef.current) cancelAnimationFrame(animationFrameRef.current);
  }, []);

  const togglePlay = () => {
    if (isPlaying) {
      stopAudio();
    } else {
      playAudio();
    }
  };

  if (!base64Audio) return null;

  return (
    <div className="mt-8 bg-gray-800 rounded-xl p-6 shadow-xl border border-gray-700 w-full max-w-2xl mx-auto animate-fade-in">
       <div className="flex flex-col items-center">
        <h3 className="text-lg font-semibold text-white mb-4">Now Playing</h3>
        
        {/* Visualizer Canvas */}
        <div className="w-full h-32 bg-gray-900 rounded-lg mb-6 overflow-hidden relative border border-gray-700">
             <canvas 
                ref={canvasRef} 
                width={600} 
                height={128} 
                className="w-full h-full"
             />
            {!isPlaying && !error && (
                <div className="absolute inset-0 flex items-center justify-center text-gray-500 text-sm">
                    Press Play
                </div>
            )}
        </div>

        {error ? (
             <p className="text-red-400 mb-4">{error}</p>
        ) : (
            <button
            onClick={togglePlay}
            className={`flex items-center justify-center w-16 h-16 rounded-full transition-all shadow-lg hover:shadow-blue-500/40 focus:outline-none focus:ring-4 focus:ring-blue-500/30 ${
                isPlaying 
                ? 'bg-red-500 hover:bg-red-600 text-white' 
                : 'bg-blue-500 hover:bg-blue-600 text-white'
            }`}
            >
            {isPlaying ? (
                 /* Pause Icon */
                <svg className="w-8 h-8" fill="currentColor" viewBox="0 0 24 24">
                <path d="M6 4h4v16H6V4zm8 0h4v16h-4V4z" />
                </svg>
            ) : (
                 /* Play Icon */
                <svg className="w-8 h-8 ml-1" fill="currentColor" viewBox="0 0 24 24">
                <path d="M8 5v14l11-7z" />
                </svg>
            )}
            </button>
        )}
       </div>
    </div>
  );
};

export default Player;