import { Callout } from "nextra/components";
import VideoEmbed from "../components/video-embed";
import { PackageName, NpmInstall, ImportStatement } from "../components";

# 🚀 Quickstart Guide

Build your first AI-powered React mapping app in under 5 minutes. Create a React application that can detect objects in satellite imagery with just a few lines of code.

## Quick Links

- **Documentation**: [docs.geobase.app/geoai](https://docs.geobase.app/geoai) - Comprehensive documentation, examples, and API reference
- **Live Examples**: [docs.geobase.app/geoai-live](https://docs.geobase.app/geoai-live) - Interactive examples and demos
- **Community**: [GitHub Discussions](https://github.com/decision-labs/geoai.js/discussions) - Ask questions, share ideas, and connect with other developers
- **Code**: [GitHub Repository](https://github.com/decision-labs/geoai.js) - Source code and contributions
- **Issues**: [GitHub Issues](https://github.com/decision-labs/geoai.js/issues) - Report bugs and request features

<Callout type="default" emoji="🚀">
  **NEW: Meta's DINOv3 Now Available!** We've integrated Meta's groundbreaking DINOv3 model for image feature extraction.
  [Try the DINOv3 demo →](/supported-tasks/image-feature-extraction) or [learn more about DINOv3](https://ai.meta.com/dinov3/).
</Callout>

<VideoEmbed
  src="https://geobase-docs.s3.amazonaws.com/geobase-ai-assets/oil-storage-tank-detection.mp4"
  title="Oil Storage Tank Detection Demo"
/>

<Callout type="info" emoji="ℹ️">
  This guide uses the latest version of <PackageName /> for optimal
  performance and compatibility.
</Callout>

## Prerequisites

Before getting started, ensure you have:

- Basic React/TypeScript knowledge
- Node.js 16+ installed
- A map provider API key (Geobase or Mapbox)

<Callout type="warning" emoji="⚠️">
  Make sure your Node.js version is 16 or higher for compatibility with the
  latest dependencies.
</Callout>

## Development

### Step 1: Create React App

#### Option A: Create a new app manually

Create a new React app and install the required dependencies:

```bash
npx create-react-app my-geoai-app --template typescript
cd my-geoai-app
npm install maplibre-gl maplibre-gl-draw geoais
```

#### Option B: Clone an Quickstart example
```bash
curl -L https://github.com/decision-labs/geoai.js/archive/refs/heads/main.zip -o repo.zip
unzip repo.zip "geoai.js-main/examples/01-quickstart/*"
mkdir -p examples
mv geoai.js-main/examples/01-quickstart examples/
rm -rf geoai.js-main repo.zip

cd examples/01-quickstart
pnpm i
# or
# npm install
pnpm start
# or
# npm start
```

### Step 2: Setup Your Map Provider

Choose your preferred map data provider:

#### Option A: ESRI

```typescript
const config = {
  provider: "esri",
  serviceUrl: "https://server.arcgisonline.com/ArcGIS/rest/services",
  serviceName: "World_Imagery",
  tileSize: 256,
  attribution: "ESRI World Imagery",
};
```

#### Option B: Geobase (Recommended if you have your own imagery)

You can use your own imagery with Geobase. To do this, you need to create a project in Geobase and get the project reference and API key via the Geobase dashboard. You can sign up for an account at 👉 [geobase.app](https://geobase.app/).

```javascript
const config = {
  provider: "geobase",
  projectRef: "your-project-ref",
  apikey: "your-api-key",
  cogImagery: "your-imagery-url",
};
```

#### Option B: Mapbox

```javascript
const config = {
  provider: "mapbox",
  apiKey: "your-mapbox-token",
  style: "mapbox://styles/mapbox/satellite-v9",
};
```

<Callout type="info" emoji="📝">
  **Getting API Keys:** Visit [Geobase](https://geobase.app) or
  [Mapbox](https://mapbox.com) to get your free API keys.
</Callout>

### Step 3: Create Your AI Map Component

Replace the contents of `src/App.tsx` with this React component:



```typescript
import React, { useEffect, useRef, useState } from 'react';
import maplibregl from 'maplibre-gl';
import 'maplibre-gl/dist/maplibre-gl.css';
import { geoai, ProviderParams } from 'geoai';
import MaplibreDraw from 'maplibre-gl-draw';
import 'maplibre-gl-draw/dist/mapbox-gl-draw.css';

const mapProviderConfig = {
  provider: "esri",
  serviceUrl: "https://server.arcgisonline.com/ArcGIS/rest/services",
  serviceName: "World_Imagery",
  tileSize: 256,
  attribution: "ESRI World Imagery",
};
const inferenceZoomLevel = 15; // The zoom level at which the inference will be run

function App() {
  const mapContainer = useRef<HTMLDivElement>(null);
  const map = useRef<maplibregl.Map | null>(null);
  const drawRef = useRef<MaplibreDraw | null>(null);
  const [pipeline, setPipeline] = useState<any>(null);
  const [status, setStatus] = useState({ color: '#9e9e9e', text: 'Waiting...' });

  useEffect(() => {
    if (!mapContainer.current) return;

    // Initialize map
    map.current = new maplibregl.Map({
      container: mapContainer.current,
      style: {
        version: 8,
        sources: {
          satellite: {
            type: "raster",
            tiles: ["https://server.arcgisonline.com/ArcGIS/rest/services/World_Imagery/MapServer/tile/{z}/{y}/{x}"],
            tileSize: 256,
            attribution: "ESRI World Imagery",
          },
        },
        layers: [{ id: "satellite", type: "raster", source: "satellite" }],
      },
      center: [54.690310447932006, 24.75763471820723],
      zoom: 15,
    });

    const draw = new MaplibreDraw({ displayControlsDefault: false, controls: { polygon: true, trash: true } });
    // @ts-ignore
    map.current.addControl(draw);
    drawRef.current = draw;

    // Make controls bigger
    const style = document.createElement('style');
    style.textContent = '.maplibregl-ctrl-group button { width: 50px !important; height: 50px !important; font-size: 20px !important; } .maplibregl-ctrl-group { border-radius: 8px !important; }';
    document.head.appendChild(style);

    // Initialize pipeline
    (async () => {
      setStatus({ color: '#ffa500', text: 'Initializing AI Model...' });
      try {
        // Initialize pipeline
        const newPipeline = await geoai.pipeline(
          [{ task: "oil-storage-tank-detection" }],
          mapProviderConfig as ProviderParams
        );
        setPipeline(newPipeline);
        setStatus({ color: '#4caf50', text: 'AI Model Ready! Draw a polygon to detect oil storage tanks using the controls on the right.' });
        
        // Set up draw event listener after pipeline is ready
        map.current?.on('draw.create', async (e) => {
          console.log('Draw event triggered', e.features[0]);
          setStatus({ color: '#2196f3', text: 'Processing detection...' });
          try {
            // Run inference
            const result = await newPipeline.inference({
              inputs: { polygon: e.features[0] },
              mapSourceParams: { zoomLevel: inferenceZoomLevel }
            });

            if (map.current?.getSource('detections')) {
              map.current.removeLayer('detections');
              map.current.removeSource('detections');
            }

            map.current?.addSource("detections", {
              type: "geojson",
              data: result.detections,
            });
            map.current?.addLayer({
              id: 'detections',
              type: 'fill',
              source: 'detections',
              paint: { 'fill-color': '#ff0000', 'fill-opacity': 0.5 }
            });

            setStatus({
              color: '#4caf50',
              text: `Found ${result.detections.features?.length || 0} oil storage tank${(result.detections.features?.length || 0) !== 1 ? 's' : ''}!`,
            });
          } catch (error) {
            console.error('Detection error:', error);
            setStatus({ color: '#f44336', text: 'Error during detection' });
          }
        });
      } catch (error) {
        console.error('Pipeline initialization error:', error);
        setStatus({ color: '#f44336', text: 'Failed to Initialize Model' });
      }
    })();

    return () => map.current?.remove();
  }, []);

  const resetMap = () => {
    // Clear drawn features using the draw reference
    drawRef.current?.deleteAll();
    
    // Clear detections
    if (map.current?.getSource('detections')) {
      map.current.removeLayer('detections');
      map.current.removeSource('detections');
    }
    
    setStatus({ color: '#4caf50', text: 'AI Model Ready! Draw a polygon to detect oil storage tanks using the controls on the right.' });
  };

  return (
    <div style={{ height: '100vh', display: 'flex', flexDirection: 'column' }}>
      <div style={{ padding: '16px', backgroundColor: status.color, color: 'white', fontSize: '20px', textAlign: 'center', fontWeight: 'bold', display: 'flex', justifyContent: 'space-between', alignItems: 'center' }}>
        <div style={{ flex: 1 }}>{status.text}</div>
        {status.text.includes('Found') && (
          <button onClick={resetMap} style={{ padding: '8px 16px', backgroundColor: 'rgba(255,255,255,1)', color: 'black', border: '1px solid white', borderRadius: '4px', cursor: 'pointer', fontSize: '14px', marginLeft: '16px' }}>
            Reset
          </button>
        )}
      </div>
      <div ref={mapContainer} style={{ height: '100%', width: '100%' }} />
    </div>
  );
}

export default App;
```

### Step 4: Run Your Application

Start your React development server:

```bash
npm start
```

Your app will open at `http://localhost:3000`. Draw a polygon on the map and watch the AI detect objects in real-time!

<Callout type="success" emoji="🎉">
  Congratulations! You now have a working AI-powered mapping application. The AI
  will analyze satellite imagery within drawn polygons and detect objects like
  buildings, vehicles, and infrastructure.
</Callout>

---

For more advanced Web Worker patterns and techniques, see the [Web Worker documentation](./workers.mdx).
