diff --git a/docs/pages/supported-tasks/_meta.js b/docs/pages/supported-tasks/_meta.js
index 4b4fb7a9..e1a3c875 100644
--- a/docs/pages/supported-tasks/_meta.js
+++ b/docs/pages/supported-tasks/_meta.js
@@ -4,6 +4,7 @@ export default {
"building-detection": "Building Detection",
"building-footprint-segmentation": "Building Footprint Segmentation",
"car-detection": "Car Detection",
+ "coconut-tree-detection": "Coconut Tree Detection",
"ship-detection": "Ship Detection",
"solar-panel-detection": "Solar Panel Detection",
"oil-storage-tank-detection": "Oil Storage Tank Detection",
diff --git a/docs/pages/supported-tasks/coconut-tree-detection.mdx b/docs/pages/supported-tasks/coconut-tree-detection.mdx
new file mode 100644
index 00000000..17cfeaa1
--- /dev/null
+++ b/docs/pages/supported-tasks/coconut-tree-detection.mdx
@@ -0,0 +1,190 @@
+import { Callout } from "nextra/components";
+
+# Coconut Tree Detection
+
+> **Detect coconut trees in high-resolution aerial imagery using YOLOv11n**
+
+The coconut tree detection model identifies coconut palm trees in aerial and satellite imagery. This model is particularly useful for agricultural monitoring, plantation management, and environmental assessment in tropical regions.
+
+
+ **Model Architecture**: YOLOv11n (Nano) - optimized for fast inference while maintaining high accuracy for coconut tree detection.
+
+
+## Features
+
+- **High Precision**: 88.6% precision with 80.2% recall
+- **Fast Inference**: ~11ms inference time on modern hardware
+- **Small Model Size**: Only ~5.4MB for efficient deployment
+- **Real-time Capable**: Suitable for interactive applications
+
+## Use Cases
+
+### πΎ Agricultural Monitoring
+- **Plantation Management**: Monitor tree health and density
+- **Crop Inventory**: Automated counting for insurance and assessment
+- **Growth Tracking**: Compare detection results over time
+
+### π°οΈ Remote Sensing
+- **Land Use Analysis**: Classify agricultural vs. natural areas
+- **Environmental Monitoring**: Track deforestation and reforestation
+- **Biodiversity Assessment**: Evaluate forest composition
+
+### π Commercial Applications
+- **Insurance**: Automated crop assessment for agricultural insurance
+- **Investment**: Due diligence for agricultural investments
+- **Supply Chain**: Monitor coconut production capacity
+
+## Example Usage
+
+```javascript
+import { geoai } from "geoai";
+
+// Initialize the pipeline
+const pipeline = await geoai.pipeline([
+ { task: "coconut-tree-detection" }
+], { provider: "geobase", apikey: "your-api-key", cogImagery: "satellite", projectRef: "your-project" });
+
+// Define your area of interest
+const plantationArea = {
+ type: "Feature",
+ geometry: {
+ type: "Polygon",
+ coordinates: [[
+ [-80.1234, 25.7617],
+ [-80.1200, 25.7617],
+ [-80.1200, 25.7580],
+ [-80.1234, 25.7580],
+ [-80.1234, 25.7617]
+ ]]
+ }
+};
+
+// Run coconut tree detection
+const result = await pipeline.inference({
+ inputs: { polygon: plantationArea },
+ mapSourceParams: { zoomLevel: 18 }
+});
+
+console.log(`Found ${result.detections.features.length} coconut trees! π΄`);
+
+// Access individual detections
+result.detections.features.forEach((tree, index) => {
+ console.log(`Tree ${index + 1}: ${tree.properties.confidence.toFixed(3)} confidence`);
+});
+```
+
+## Parameters
+
+### Input Parameters
+
+| Parameter | Type | Required | Description |
+|-----------|------|----------|-------------|
+| `polygon` | GeoJSON Feature | β
| Area of interest for coconut tree detection |
+| `confidenceThreshold` | number | β | Minimum confidence threshold (default: 0.5) |
+| `nmsThreshold` | number | β | Non-maximum suppression threshold (default: 0.5) |
+
+### Map Source Parameters
+
+| Parameter | Type | Description |
+|-----------|------|-------------|
+| `zoomLevel` | number | Zoom level for imagery (recommended: 17-19) |
+| `bands` | number[] | Spectral bands to use (default: RGB) |
+
+## Output Format
+
+The model returns a GeoJSON FeatureCollection where each feature represents a detected coconut tree:
+
+```json
+{
+ "type": "FeatureCollection",
+ "features": [
+ {
+ "type": "Feature",
+ "geometry": {
+ "type": "Polygon",
+ "coordinates": [[[lng, lat], [lng, lat], ...]]
+ },
+ "properties": {
+ "confidence": 0.89,
+ "class": "coconut_tree"
+ }
+ }
+ ]
+}
+```
+
+## Performance Metrics
+
+| Metric | Value | Description |
+|--------|-------|-------------|
+| **Precision** | 88.6% | Accuracy of positive detections |
+| **Recall** | 80.2% | Percentage of trees successfully detected |
+| **mAP@0.5** | 85.0% | Mean Average Precision at IoU 0.5 |
+| **mAP@0.5:0.95** | 50.7% | Mean Average Precision across IoU thresholds |
+| **Inference Time** | ~11ms | Average processing time per image |
+| **Model Size** | 5.4MB | Optimized for fast loading |
+
+## Best Practices
+
+### Image Quality
+- **Resolution**: Use high-resolution imagery (β€10cm/pixel) for best results
+- **Zoom Level**: Recommended zoom levels 17-19 for optimal tree visibility
+- **Weather**: Clear conditions without cloud cover provide best accuracy
+
+### Area Selection
+- **Size**: Process areas in manageable chunks (< 1 kmΒ²) for optimal performance
+- **Overlap**: Use 10-20% overlap between adjacent areas for complete coverage
+- **Terrain**: Model works best on relatively flat terrain typical of plantations
+
+### Confidence Tuning
+- **High Precision**: Use confidenceThreshold > 0.7 to minimize false positives
+- **High Recall**: Use confidenceThreshold < 0.3 to catch more trees (may include false positives)
+- **Balanced**: Default 0.5 provides good balance of precision and recall
+
+## Limitations
+
+
+ **Current Limitations**:
+ - Trained primarily on aerial imagery from specific geographic regions
+ - Performance may vary with different coconut tree varieties
+ - Dense vegetation may cause occlusion and missed detections
+
+
+- **Geographic Bias**: Training data primarily from specific tropical regions
+- **Tree Varieties**: Optimized for common coconut palm varieties
+- **Occlusion**: Dense canopy may hide some trees
+- **Resolution Dependency**: Requires high-resolution imagery for accuracy
+
+## Technical Details
+
+### Model Architecture
+- **Base Model**: YOLOv11n (Ultralytics)
+- **Framework**: ONNX for cross-platform compatibility
+- **Input Size**: 256Γ256Γ3 (RGB)
+- **Output**: Bounding boxes with confidence scores
+
+### Training Data
+- **Dataset Size**: 772 high-resolution aerial patches
+- **Spatial Resolution**: ~9cm ground sampling distance
+- **Geographic Coverage**: Multiple tropical regions
+- **Annotation Quality**: Expert-validated coconut tree labels
+
+### Version Information
+- **Current Version**: v1.0 (Baseline)
+- **Model ID**: `geobase/coconut-tree-detection-v1-yolov11n`
+- **Release Date**: August 2024
+- **License**: MIT
+
+## Related Tasks
+
+- [Building Detection](./building-detection) - For identifying structures in agricultural areas
+- [Object Detection](./object-detection) - For general purpose object detection
+- [Land Cover Classification](./land-cover-classification) - For broader landscape analysis
+
+## Support
+
+For questions, issues, or feature requests related to coconut tree detection:
+
+- **Documentation**: [GeoAI.js Docs](https://docs.geobase.app/geoai)
+- **GitHub Issues**: [Report Issues](https://github.com/decision-labs/geoai.js/issues)
+- **Community**: [GitHub Discussions](https://github.com/decision-labs/geoai.js/discussions)
diff --git a/src/core/types.ts b/src/core/types.ts
index 1c3b7bc3..e46ed845 100644
--- a/src/core/types.ts
+++ b/src/core/types.ts
@@ -12,6 +12,7 @@ import {
SolarPanelDetection,
WetLandSegmentation,
} from "@/models/geoai_models";
+import { CoconutTreeDetection } from "@/models/coconut_tree_detection";
import { LandCoverClassification } from "@/models/land_cover_classification";
import { ObjectDetection } from "@/models/object_detection";
import { OilStorageTankDetection } from "@/models/oil_storage_tank_detection";
@@ -104,7 +105,8 @@ export type GeobaseAiModelTask =
| "wetland-segmentation"
| "building-detection"
| "oil-storage-tank-detection"
- | "building-footprint-segmentation";
+ | "building-footprint-segmentation"
+ | "coconut-tree-detection";
export type ModelInstance =
| MaskGeneration
@@ -119,7 +121,8 @@ export type ModelInstance =
| BuildingDetection
| OilStorageTankDetection
| BuildingFootPrintSegmentation
- | ImageFeatureExtraction;
+ | ImageFeatureExtraction
+ | CoconutTreeDetection;
export type ModelConfig = {
task: HuggingFaceModelTask | GeobaseAiModelTask;
diff --git a/src/models/coconut_tree_detection.ts b/src/models/coconut_tree_detection.ts
new file mode 100644
index 00000000..3db929bb
--- /dev/null
+++ b/src/models/coconut_tree_detection.ts
@@ -0,0 +1,251 @@
+import { BaseModel } from "@/models/base_model";
+import {
+ ImageProcessor,
+ PreTrainedModel,
+ PretrainedModelOptions,
+} from "@huggingface/transformers";
+import { parametersChanged } from "@/utils/utils";
+import { ProviderParams } from "@/geoai";
+import { GeoRawImage } from "@/types/images/GeoRawImage";
+import * as ort from "onnxruntime-web";
+import { InferenceParams, ObjectDetectionResults } from "@/core/types";
+
+export class CoconutTreeDetection extends BaseModel {
+ protected static instance: CoconutTreeDetection | null = null;
+ protected model: ort.InferenceSession | undefined;
+ protected processor: ImageProcessor | undefined;
+
+ private constructor(
+ model_id: string,
+ providerParams: ProviderParams,
+ modelParams?: PretrainedModelOptions
+ ) {
+ super(model_id, providerParams, modelParams);
+ }
+
+ static async getInstance(
+ model_id: string,
+ providerParams: ProviderParams,
+ modelParams?: PretrainedModelOptions
+ ): Promise<{ instance: CoconutTreeDetection }> {
+ if (
+ !CoconutTreeDetection.instance ||
+ parametersChanged(
+ CoconutTreeDetection.instance,
+ model_id,
+ providerParams,
+ modelParams
+ )
+ ) {
+ CoconutTreeDetection.instance = new CoconutTreeDetection(
+ model_id,
+ providerParams,
+ modelParams
+ );
+ await CoconutTreeDetection.instance.initialize();
+ }
+ return { instance: CoconutTreeDetection.instance };
+ }
+
+ protected async initializeModel(): Promise {
+ // Only load the model if not already loaded
+ if (this.model) return;
+ this.processor = await ImageProcessor.from_pretrained(this.model_id);
+ const pretrainedModel = await PreTrainedModel.from_pretrained(
+ this.model_id,
+ this.modelParams
+ );
+ this.model = pretrainedModel.sessions.model;
+ }
+
+ async inference(params: InferenceParams): Promise {
+ const {
+ inputs: { polygon },
+ postProcessingParams: {
+ confidenceThreshold = 0.5,
+ nmsThreshold = 0.5,
+ } = {},
+ mapSourceParams,
+ } = params;
+
+ if (!polygon) {
+ throw new Error("Polygon input is required for coconut tree detection");
+ }
+
+ if (!polygon.geometry || polygon.geometry.type !== "Polygon") {
+ throw new Error("Input must be a valid GeoJSON Polygon feature");
+ }
+
+ // Ensure initialization is complete
+ await this.initialize();
+
+ // Double-check data provider after initialization
+ if (!this.dataProvider) {
+ throw new Error("Data provider not initialized");
+ }
+
+ const geoRawImage = await this.polygonToImage(
+ polygon,
+ mapSourceParams?.zoomLevel,
+ mapSourceParams?.bands,
+ mapSourceParams?.expression
+ );
+
+ if (!this.processor) {
+ throw new Error("Processor not initialized");
+ }
+
+ const inputs = await this.processor(geoRawImage);
+ const inferenceStartTime = performance.now();
+ console.log("[coconut-tree-detection] starting inference...");
+
+ let outputs;
+ try {
+ if (!this.model) {
+ throw new Error("Model not initialized");
+ }
+ outputs = await this.model.run({ images: inputs.pixel_values });
+ } catch (error) {
+ console.debug("error", error);
+ throw error;
+ }
+
+ const processedOutputs = await this.postProcessor(
+ outputs.output0,
+ geoRawImage,
+ confidenceThreshold as number,
+ nmsThreshold as number
+ );
+
+ const inferenceEndTime = performance.now();
+ console.log(
+ `[coconut-tree-detection] inference completed. Time taken: ${(inferenceEndTime - inferenceStartTime).toFixed(2)}ms`
+ );
+
+ return {
+ detections: processedOutputs,
+ geoRawImage,
+ };
+ }
+
+ protected async postProcessor(
+ outputs: ort.Tensor,
+ geoRawImage: GeoRawImage,
+ CONFIDENCE_THRESHOLD: number = 0.5,
+ NMS_THRESHOLD: number = 0.5
+ ): Promise {
+ // Get the output tensor data - YOLOv11 format: [1, 5, 8400]
+ const outputData = outputs.data as Float32Array;
+ const [, , numDetections] = outputs.dims;
+
+ // Reshape the output to [detections, features] format
+ const predictions = [];
+ for (let i = 0; i < numDetections; i++) {
+ const detection = {
+ center_x: outputData[i], // x center
+ center_y: outputData[numDetections + i], // y center
+ width: outputData[2 * numDetections + i], // width
+ height: outputData[3 * numDetections + i], // height
+ confidence: outputData[4 * numDetections + i], // confidence for coconut_tree class
+ };
+ predictions.push(detection);
+ }
+
+ // Filter by confidence threshold
+ const filteredPredictions = predictions.filter(
+ detection => detection.confidence > CONFIDENCE_THRESHOLD
+ );
+
+ // Perform non-maximum suppression
+ const finalPredictions = [];
+ const sorted = filteredPredictions.sort(
+ (a, b) => b.confidence - a.confidence
+ );
+
+ for (const pred of sorted) {
+ let keep = true;
+ for (const final of finalPredictions) {
+ const iou = this.calculateIOU(pred, final);
+ if (iou > NMS_THRESHOLD) {
+ keep = false;
+ break;
+ }
+ }
+ if (keep) {
+ finalPredictions.push(pred);
+ }
+ }
+
+ // Convert predictions to GeoJSON featureCollection
+ const geoFeatures: GeoJSON.Feature[] = finalPredictions.map(detection => {
+ // Convert center coordinates and dimensions to corner coordinates
+ const x1 = detection.center_x - detection.width / 2;
+ const y1 = detection.center_y - detection.height / 2;
+ const x2 = detection.center_x + detection.width / 2;
+ const y2 = detection.center_y + detection.height / 2;
+
+ // Convert normalized coordinates to image coordinates
+ const imageWidth = geoRawImage.width;
+ const imageHeight = geoRawImage.height;
+
+ // Scale coordinates from normalized space to image dimensions
+ const coords = [
+ [x1 * imageWidth, y1 * imageHeight],
+ [x2 * imageWidth, y1 * imageHeight],
+ [x2 * imageWidth, y2 * imageHeight],
+ [x1 * imageWidth, y2 * imageHeight],
+ [x1 * imageWidth, y1 * imageHeight], // Close the polygon
+ ];
+
+ // Convert image coordinates to geo coordinates
+ const geoCoords = coords.map(coord =>
+ geoRawImage.pixelToWorld(coord[0], coord[1])
+ );
+
+ return {
+ type: "Feature",
+ properties: {
+ confidence: detection.confidence,
+ class: "coconut_tree",
+ },
+ geometry: {
+ type: "Polygon",
+ coordinates: [geoCoords],
+ },
+ };
+ });
+
+ return {
+ type: "FeatureCollection",
+ features: geoFeatures,
+ };
+ }
+
+ // Helper function to calculate Intersection over Union
+ private calculateIOU(
+ box1: { center_x: number; center_y: number; width: number; height: number },
+ box2: { center_x: number; center_y: number; width: number; height: number }
+ ): number {
+ const x1 = Math.max(
+ box1.center_x - box1.width / 2,
+ box2.center_x - box2.width / 2
+ );
+ const y1 = Math.max(
+ box1.center_y - box1.height / 2,
+ box2.center_y - box2.height / 2
+ );
+ const x2 = Math.min(
+ box1.center_x + box1.width / 2,
+ box2.center_x + box2.width / 2
+ );
+ const y2 = Math.min(
+ box1.center_y + box1.height / 2,
+ box2.center_y + box2.height / 2
+ );
+
+ const intersection = Math.max(0, x2 - x1) * Math.max(0, y2 - y1);
+ const area1 = box1.width * box1.height;
+ const area2 = box2.width * box2.height;
+ return intersection / (area1 + area2 - intersection);
+ }
+}
diff --git a/src/registry.ts b/src/registry.ts
index 3fb4fa3e..a6ba5ece 100644
--- a/src/registry.ts
+++ b/src/registry.ts
@@ -20,6 +20,7 @@ import {
SolarPanelDetection,
WetLandSegmentation,
} from "./models/geoai_models";
+import { CoconutTreeDetection } from "./models/coconut_tree_detection";
import { OilStorageTankDetection } from "./models/oil_storage_tank_detection";
import { BuildingFootPrintSegmentation } from "./models/building_footprint_segmentation";
import { ImageFeatureExtraction } from "./models/image_feature_extraction";
@@ -327,6 +328,31 @@ export const modelRegistry: ModelConfig[] = [
);
},
},
+ {
+ task: "coconut-tree-detection",
+ library: "geoai",
+ description:
+ "Detects coconut trees in high-resolution aerial imagery. Optimized for agricultural monitoring, plantation management, and environmental assessment. Uses YOLOv11n architecture for fast inference.",
+ examples: [
+ "Find all coconut trees in this plantation area.",
+ "Detect coconut palms in this coastal region.",
+ "Count coconut trees in this agricultural zone.",
+ "Identify coconut trees for inventory purposes.",
+ "Monitor coconut tree density in this farm.",
+ ],
+ ioConfig: {} as baseIOConfig,
+ geobase_ai_pipeline: (
+ params: ProviderParams,
+ modelId: string = "geobase/coconut-detection-v1-yolov11n",
+ modelParams: PretrainedModelOptions = {
+ model_file_name: "model",
+ }
+ ): Promise<{
+ instance: CoconutTreeDetection;
+ }> => {
+ return CoconutTreeDetection.getInstance(modelId, params, modelParams);
+ },
+ },
{
task: "image-feature-extraction",
library: "@huggingface/transformers",
diff --git a/test/coconutTreeDetection.output.test.ts b/test/coconutTreeDetection.output.test.ts
new file mode 100644
index 00000000..bf6451d7
--- /dev/null
+++ b/test/coconutTreeDetection.output.test.ts
@@ -0,0 +1,336 @@
+import { describe, it, expect, beforeAll } from "vitest";
+import { geoai } from "../src/index";
+import { writeFileSync } from "fs";
+import { join } from "path";
+
+// Test polygon for coconut tree detection (Southeast Asia - tropical region)
+// Using imagery from: https://oin-hotosm-temp.s3.us-east-1.amazonaws.com/65c6eb328931500001717ddc/0/65c6eb328931500001717ddd.tif
+// Expanded area to capture more potential coconut trees
+const testPolygon: GeoJSON.Feature = {
+ type: "Feature",
+ properties: {},
+ geometry: {
+ type: "Polygon",
+ coordinates: [
+ [
+ [96.1053, 4.2168], // Moderately expanded bounds
+ [96.1057, 4.2168],
+ [96.1057, 4.2162],
+ [96.1053, 4.2162],
+ [96.1053, 4.2168],
+ ],
+ ],
+ },
+};
+
+describe("Coconut Tree Detection - Output Results", () => {
+ // Skip tests if no API key is provided
+ const skipTest = !process.env.GEOBASE_API_KEY;
+
+ beforeAll(() => {
+ if (skipTest) {
+ console.log(
+ "β οΈ Skipping Coconut Tree Detection output tests - GEOBASE_API_KEY not provided"
+ );
+ }
+ });
+
+ it.skipIf(skipTest)(
+ "should detect coconut trees and save results as GeoJSON",
+ async () => {
+ const pipeline = await geoai.pipeline(
+ [{ task: "coconut-tree-detection" }],
+ {
+ provider: "geobase",
+ apikey: process.env.GEOBASE_API_KEY!,
+ cogImagery:
+ "https://oin-hotosm-temp.s3.us-east-1.amazonaws.com/65c6eb328931500001717ddc/0/65c6eb328931500001717ddd.tif",
+ projectRef: process.env.GEOBASE_PROJECT_REF!,
+ }
+ );
+
+ console.log("π΄ Running coconut tree detection...");
+
+ const result = await pipeline.inference({
+ inputs: { polygon: testPolygon },
+ mapSourceParams: { zoomLevel: 21 },
+ postProcessingParams: {
+ confidenceThreshold: 0.1, // Much lower threshold to catch more detections
+ nmsThreshold: 0.3, // Lower NMS to allow more overlapping detections
+ },
+ });
+
+ // Create comprehensive output with all information
+ const outputData = {
+ metadata: {
+ timestamp: new Date().toISOString(),
+ model: "geobase/coconut-detection-v1-yolov11n",
+ task: "coconut-tree-detection",
+ testArea: {
+ location: "Southeast Asia (96.10Β°E, 4.21Β°N)",
+ imagery: "OpenAerialMap",
+ zoomLevel: 18,
+ confidenceThreshold: 0.3,
+ nmsThreshold: 0.5,
+ },
+ performance: {
+ detectionCount: result.detections.features.length,
+ imageResolution: `${result.geoRawImage.width}x${result.geoRawImage.height}`,
+ imageBounds: {
+ west: result.geoRawImage.bounds.west,
+ south: result.geoRawImage.bounds.south,
+ east: result.geoRawImage.bounds.east,
+ north: result.geoRawImage.bounds.north,
+ },
+ },
+ },
+ testPolygon: testPolygon,
+ detections: result.detections,
+ summary: {
+ totalDetections: result.detections.features.length,
+ averageConfidence:
+ result.detections.features.length > 0
+ ? result.detections.features.reduce(
+ (sum, f) => sum + (f.properties?.confidence || 0),
+ 0
+ ) / result.detections.features.length
+ : 0,
+ confidenceRange:
+ result.detections.features.length > 0
+ ? {
+ min: Math.min(
+ ...result.detections.features.map(
+ f => f.properties?.confidence || 0
+ )
+ ),
+ max: Math.max(
+ ...result.detections.features.map(
+ f => f.properties?.confidence || 0
+ )
+ ),
+ }
+ : null,
+ },
+ };
+
+ // Save to multiple formats for easy viewing
+ const outputDir = join(process.cwd(), "test-outputs");
+
+ // Ensure directory exists
+ try {
+ require("fs").mkdirSync(outputDir, { recursive: true });
+ } catch (e) {
+ // Directory might already exist
+ }
+
+ // 1. Full data with metadata (JSON)
+ const fullOutputPath = join(
+ outputDir,
+ "coconut-detection-full-results.json"
+ );
+ writeFileSync(fullOutputPath, JSON.stringify(outputData, null, 2));
+
+ // 2. Pure GeoJSON for visualization tools
+ const geoJsonPath = join(outputDir, "coconut-detection-results.geojson");
+ writeFileSync(geoJsonPath, JSON.stringify(result.detections, null, 2));
+
+ // 3. Combined GeoJSON with test area and detections
+ const combinedGeoJson = {
+ type: "FeatureCollection",
+ features: [
+ {
+ ...testPolygon,
+ properties: {
+ ...testPolygon.properties,
+ type: "test_area",
+ description: "Test area for coconut tree detection",
+ },
+ },
+ ...result.detections.features.map(feature => ({
+ ...feature,
+ properties: {
+ ...feature.properties,
+ type: "coconut_detection",
+ },
+ })),
+ ],
+ };
+
+ const combinedPath = join(
+ outputDir,
+ "coconut-detection-with-test-area.geojson"
+ );
+ writeFileSync(combinedPath, JSON.stringify(combinedGeoJson, null, 2));
+
+ console.log(`π΄ Detection Results:`);
+ console.log(
+ ` Found: ${result.detections.features.length} coconut trees`
+ );
+ console.log(
+ ` Image: ${result.geoRawImage.width}x${result.geoRawImage.height}px`
+ );
+ console.log(
+ ` Bounds: [${result.geoRawImage.bounds.west.toFixed(6)}, ${result.geoRawImage.bounds.south.toFixed(6)}, ${result.geoRawImage.bounds.east.toFixed(6)}, ${result.geoRawImage.bounds.north.toFixed(6)}]`
+ );
+
+ if (result.detections.features.length > 0) {
+ console.log(
+ ` Confidence range: ${outputData.summary.confidenceRange?.min.toFixed(3)} - ${outputData.summary.confidenceRange?.max.toFixed(3)}`
+ );
+ console.log(
+ ` Average confidence: ${outputData.summary.averageConfidence.toFixed(3)}`
+ );
+
+ result.detections.features.forEach((detection, idx) => {
+ console.log(
+ ` Tree ${idx + 1}: ${detection.properties?.confidence?.toFixed(3)} confidence`
+ );
+ });
+ }
+
+ console.log(`\nπ Output files saved:`);
+ console.log(` Full results: ${fullOutputPath}`);
+ console.log(` GeoJSON only: ${geoJsonPath}`);
+ console.log(` Combined view: ${combinedPath}`);
+
+ console.log(`\nπΊοΈ To visualize:`);
+ console.log(` β’ Upload any .geojson file to: https://geojson.io`);
+ console.log(` β’ Or use QGIS, ArcGIS, or other GIS tools`);
+ console.log(` β’ Combined file shows both test area and detections`);
+
+ // Verify the response structure
+ expect(result).toHaveProperty("detections");
+ expect(result).toHaveProperty("geoRawImage");
+ expect(result.detections).toHaveProperty("type", "FeatureCollection");
+ expect(result.detections).toHaveProperty("features");
+ expect(Array.isArray(result.detections.features)).toBe(true);
+ },
+ 120000
+ ); // Extended timeout
+
+ it.skipIf(skipTest)(
+ "should test different confidence thresholds and save comparison",
+ async () => {
+ const pipeline = await geoai.pipeline(
+ [{ task: "coconut-tree-detection" }],
+ {
+ provider: "geobase",
+ apikey: process.env.GEOBASE_API_KEY!,
+ cogImagery:
+ "https://oin-hotosm-temp.s3.us-east-1.amazonaws.com/65c6eb328931500001717ddc/0/65c6eb328931500001717ddd.tif",
+ projectRef: process.env.GEOBASE_PROJECT_REF!,
+ }
+ );
+
+ const thresholds = [0.2, 0.3, 0.5, 0.7, 0.9];
+ const results = [];
+
+ console.log("π§ͺ Testing different confidence thresholds...");
+
+ for (const threshold of thresholds) {
+ console.log(` Testing threshold: ${threshold}`);
+
+ const result = await pipeline.inference({
+ inputs: { polygon: testPolygon },
+ mapSourceParams: { zoomLevel: 18 },
+ postProcessingParams: {
+ confidenceThreshold: threshold,
+ nmsThreshold: 0.5,
+ },
+ });
+
+ results.push({
+ threshold,
+ detectionCount: result.detections.features.length,
+ detections: result.detections,
+ });
+
+ console.log(
+ ` β Found ${result.detections.features.length} detections`
+ );
+ }
+
+ // Create comparison GeoJSON
+ const comparisonGeoJson = {
+ type: "FeatureCollection",
+ features: [
+ {
+ ...testPolygon,
+ properties: {
+ type: "test_area",
+ description: "Test area for confidence threshold comparison",
+ },
+ },
+ ],
+ };
+
+ // Add detections from each threshold with different styling
+ const colors = ["#ff0000", "#ff8800", "#ffff00", "#88ff00", "#00ff00"];
+
+ results.forEach((result, idx) => {
+ result.detections.features.forEach((feature, detIdx) => {
+ comparisonGeoJson.features.push({
+ ...feature,
+ properties: {
+ ...feature.properties,
+ type: "coconut_detection",
+ confidenceThreshold: result.threshold,
+ color: colors[idx],
+ detectionId: `t${result.threshold}_d${detIdx}`,
+ description: `Detected at threshold ${result.threshold} (confidence: ${feature.properties?.confidence?.toFixed(3)})`,
+ },
+ });
+ });
+ });
+
+ const outputDir = join(process.cwd(), "test-outputs");
+ const comparisonPath = join(
+ outputDir,
+ "coconut-detection-threshold-comparison.geojson"
+ );
+ const summaryPath = join(
+ outputDir,
+ "coconut-detection-threshold-summary.json"
+ );
+
+ writeFileSync(comparisonPath, JSON.stringify(comparisonGeoJson, null, 2));
+ writeFileSync(
+ summaryPath,
+ JSON.stringify(
+ {
+ metadata: {
+ timestamp: new Date().toISOString(),
+ model: "geobase/coconut-detection-v1-yolov11n",
+ test: "confidence_threshold_comparison",
+ },
+ testArea: testPolygon,
+ results: results.map(r => ({
+ threshold: r.threshold,
+ detectionCount: r.detectionCount,
+ detections: r.detections.features.map(f => ({
+ confidence: f.properties?.confidence,
+ coordinates: f.geometry,
+ })),
+ })),
+ },
+ null,
+ 2
+ )
+ );
+
+ console.log(`\nπ Threshold Comparison Results:`);
+ results.forEach(result => {
+ console.log(
+ ` Threshold ${result.threshold}: ${result.detectionCount} detections`
+ );
+ });
+
+ console.log(`\nπ Comparison files saved:`);
+ console.log(` Visualization: ${comparisonPath}`);
+ console.log(` Summary data: ${summaryPath}`);
+
+ expect(results.length).toBe(thresholds.length);
+ },
+ 180000
+ ); // Extended timeout for multiple inferences
+});
diff --git a/test/coconutTreeDetection.test.ts b/test/coconutTreeDetection.test.ts
new file mode 100644
index 00000000..25d33b08
--- /dev/null
+++ b/test/coconutTreeDetection.test.ts
@@ -0,0 +1,239 @@
+import { describe, it, expect, beforeAll } from "vitest";
+import { geoai } from "../src/index";
+
+// Test polygon for coconut tree detection (Southeast Asia - tropical region)
+// Using imagery from: https://oin-hotosm-temp.s3.us-east-1.amazonaws.com/65c6eb328931500001717ddc/0/65c6eb328931500001717ddd.tif
+const testPolygon: GeoJSON.Feature = {
+ type: "Feature",
+ properties: {},
+ geometry: {
+ type: "Polygon",
+ coordinates: [
+ [
+ [96.10557612318473, 4.216740108849962],
+ [96.10530371636793, 4.216740108849962],
+ [96.10530371636793, 4.2163301933439215],
+ [96.10557612318473, 4.2163301933439215],
+ [96.10557612318473, 4.216740108849962],
+ ],
+ ],
+ },
+};
+
+describe("Coconut Tree Detection", () => {
+ // Skip tests if no API key is provided
+ const skipTest = !process.env.GEOBASE_API_KEY;
+
+ beforeAll(() => {
+ if (skipTest) {
+ console.log(
+ "β οΈ Skipping Coconut Tree Detection tests - GEOBASE_API_KEY not provided"
+ );
+ }
+ });
+
+ it.skipIf(skipTest)(
+ "should detect coconut trees in aerial imagery",
+ async () => {
+ const pipeline = await geoai.pipeline(
+ [{ task: "coconut-tree-detection" }],
+ {
+ provider: "geobase",
+ apikey: process.env.GEOBASE_API_KEY!,
+ cogImagery:
+ "https://oin-hotosm-temp.s3.us-east-1.amazonaws.com/65c6eb328931500001717ddc/0/65c6eb328931500001717ddd.tif",
+ projectRef: process.env.GEOBASE_PROJECT_REF!,
+ }
+ );
+
+ const result = await pipeline.inference({
+ inputs: { polygon: testPolygon },
+ mapSourceParams: { zoomLevel: 18 },
+ });
+
+ // Verify the response structure
+ expect(result).toHaveProperty("detections");
+ expect(result).toHaveProperty("geoRawImage");
+ expect(result.detections).toHaveProperty("type", "FeatureCollection");
+ expect(result.detections).toHaveProperty("features");
+ expect(Array.isArray(result.detections.features)).toBe(true);
+
+ // Log results for manual verification
+ console.log(`Found ${result.detections.features.length} coconut trees`);
+
+ if (result.detections.features.length > 0) {
+ console.log("Sample detection:", result.detections.features[0]);
+
+ // Verify detection properties
+ const detection = result.detections.features[0];
+ expect(detection).toHaveProperty("type", "Feature");
+ expect(detection).toHaveProperty("geometry");
+ expect(detection).toHaveProperty("properties");
+ expect(detection.properties).toHaveProperty("confidence");
+ expect(detection.properties).toHaveProperty("class", "coconut_tree");
+ expect(detection.geometry).toHaveProperty("type", "Polygon");
+
+ // Verify confidence is within valid range
+ expect(detection.properties.confidence).toBeGreaterThan(0);
+ expect(detection.properties.confidence).toBeLessThanOrEqual(1);
+ }
+ },
+ 60000
+ ); // Extended timeout for model initialization
+
+ it.skipIf(skipTest)(
+ "should respect confidence threshold parameter",
+ async () => {
+ const pipeline = await geoai.pipeline(
+ [{ task: "coconut-tree-detection" }],
+ {
+ provider: "geobase",
+ apikey: process.env.GEOBASE_API_KEY!,
+ cogImagery:
+ "https://oin-hotosm-temp.s3.us-east-1.amazonaws.com/65c6eb328931500001717ddc/0/65c6eb328931500001717ddd.tif",
+ projectRef: process.env.GEOBASE_PROJECT_REF!,
+ }
+ );
+
+ // Test with high confidence threshold
+ const highConfidenceResult = await pipeline.inference({
+ inputs: { polygon: testPolygon },
+ postProcessingParams: { confidenceThreshold: 0.8 },
+ mapSourceParams: { zoomLevel: 18 },
+ });
+
+ // Test with low confidence threshold
+ const lowConfidenceResult = await pipeline.inference({
+ inputs: { polygon: testPolygon },
+ postProcessingParams: { confidenceThreshold: 0.3 },
+ mapSourceParams: { zoomLevel: 18 },
+ });
+
+ // Low confidence should return >= high confidence detections
+ expect(
+ lowConfidenceResult.detections.features.length
+ ).toBeGreaterThanOrEqual(highConfidenceResult.detections.features.length);
+
+ console.log(
+ `High confidence (0.8): ${highConfidenceResult.detections.features.length} trees`
+ );
+ console.log(
+ `Low confidence (0.3): ${lowConfidenceResult.detections.features.length} trees`
+ );
+ },
+ 60000
+ );
+
+ it.skipIf(skipTest)(
+ "should handle different zoom levels",
+ async () => {
+ const pipeline = await geoai.pipeline(
+ [{ task: "coconut-tree-detection" }],
+ {
+ provider: "geobase",
+ apikey: process.env.GEOBASE_API_KEY!,
+ cogImagery:
+ "https://oin-hotosm-temp.s3.us-east-1.amazonaws.com/65c6eb328931500001717ddc/0/65c6eb328931500001717ddd.tif",
+ projectRef: process.env.GEOBASE_PROJECT_REF!,
+ }
+ );
+
+ // Test with different zoom levels
+ const zoomLevels = [17, 18, 19];
+
+ for (const zoomLevel of zoomLevels) {
+ const result = await pipeline.inference({
+ inputs: { polygon: testPolygon },
+ mapSourceParams: { zoomLevel },
+ });
+
+ expect(result).toHaveProperty("detections");
+ expect(result.detections).toHaveProperty("features");
+
+ console.log(
+ `Zoom ${zoomLevel}: ${result.detections.features.length} trees detected`
+ );
+ }
+ },
+ 120000
+ ); // Extended timeout for multiple requests
+
+ it("should validate input parameters", async () => {
+ if (skipTest) return;
+
+ const pipeline = await geoai.pipeline(
+ [{ task: "coconut-tree-detection" }],
+ {
+ provider: "geobase",
+ apikey: process.env.GEOBASE_API_KEY!,
+ cogImagery:
+ "https://oin-hotosm-temp.s3.us-east-1.amazonaws.com/65c6eb328931500001717ddc/0/65c6eb328931500001717ddd.tif",
+ projectRef: process.env.GEOBASE_PROJECT_REF!,
+ }
+ );
+
+ // Should throw error for missing polygon
+ await expect(
+ pipeline.inference({
+ inputs: {} as any,
+ })
+ ).rejects.toThrow("Polygon input is required");
+
+ // Should throw error for invalid geometry type
+ const invalidPolygon = {
+ type: "Feature" as const,
+ geometry: {
+ type: "Point" as const,
+ coordinates: [-80.1234, 25.7617],
+ },
+ properties: {},
+ };
+
+ await expect(
+ pipeline.inference({
+ inputs: { polygon: invalidPolygon as any },
+ })
+ ).rejects.toThrow("Input must be a valid GeoJSON Polygon feature");
+ });
+
+ it("should handle model initialization correctly", async () => {
+ if (skipTest) return;
+
+ // Test that model can be initialized multiple times without error
+ const pipeline1 = await geoai.pipeline(
+ [{ task: "coconut-tree-detection" }],
+ {
+ provider: "geobase",
+ apikey: process.env.GEOBASE_API_KEY!,
+ cogImagery:
+ "https://oin-hotosm-temp.s3.us-east-1.amazonaws.com/65c6eb328931500001717ddc/0/65c6eb328931500001717ddd.tif",
+ projectRef: process.env.GEOBASE_PROJECT_REF!,
+ }
+ );
+
+ const pipeline2 = await geoai.pipeline(
+ [{ task: "coconut-tree-detection" }],
+ {
+ provider: "geobase",
+ apikey: process.env.GEOBASE_API_KEY!,
+ cogImagery:
+ "https://oin-hotosm-temp.s3.us-east-1.amazonaws.com/65c6eb328931500001717ddc/0/65c6eb328931500001717ddd.tif",
+ projectRef: process.env.GEOBASE_PROJECT_REF!,
+ }
+ );
+
+ // Both pipelines should work
+ const result1 = await pipeline1.inference({
+ inputs: { polygon: testPolygon },
+ mapSourceParams: { zoomLevel: 18 },
+ });
+
+ const result2 = await pipeline2.inference({
+ inputs: { polygon: testPolygon },
+ mapSourceParams: { zoomLevel: 18 },
+ });
+
+ expect(result1).toHaveProperty("detections");
+ expect(result2).toHaveProperty("detections");
+ }, 60000);
+});