// ... existing code ...
<template>
  <view class="container">
    <view class="image-container">
      <image v-if="imageSrc" :src="imageSrc" mode="aspectFit" class="preview-image" />
      <canvas canvas-id="baselineCanvas" class="baseline-canvas" @touchstart="startDrawing" @touchmove="drawing" @touchend="stopDrawing"></canvas>
    </view>
    <canvas canvas-id="mergeCanvas" class="merge-canvas" style="width: 100%; height: 500rpx;"></canvas>
    <view class="action-container">
      <view class="action-button" :class="{ 'disabled': !imageSrc }" @click="detectContour">
        <text class="button-text">检测轮廓</text>
      </view>
      <view class="action-button" :class="{ 'disabled': !imageSrc }" @click="calculateContactAngle">
        <text class="button-text">计算接触角</text>
      </view>
      <view class="action-button" :class="{ 'disabled': !imageSrc }" @click="saveResult">
        <text class="button-text">保存结果</text>
      </view>
    </view>
    <view v-if="resultImageSrc || resultData" class="result-container">
      <image v-if="resultImageSrc" :src="resultImageSrc" mode="aspectFit" class="result-image" />
      <view v-if="resultData" class="result-data">
        <view class="result-item">
          <text class="result-label">接触角数据：</text>
          <text class="result-value">左接触角: {{ resultData.left_angle?.toFixed(1) }}°</text>
          <text class="result-value">右接触角: {{ resultData.right_angle?.toFixed(1) }}°</text>
          <text class="result-value">平均接触角: {{ resultData.average_angle?.toFixed(1) }}°</text>
        </view>
        <view class="result-item">
          <text class="result-label">基线位置：</text>
          <text class="result-value">Y坐标: {{ resultData.baseline_y }}</text>
        </view>
        <view class="result-item">
          <text class="result-label">接触点：</text>
          <view v-for="(point, index) in resultData.contact_points" :key="index" class="contact-point">
            <text class="result-value">点{{index + 1}}: ({{point.x?.toFixed(1)}}, {{point.y?.toFixed(1)}})</text>
          </view>
        </view>
        <view class="result-item">
          <text class="result-label">椭圆参数：</text>
          <text class="result-value">中心点: ({{resultData.ellipse_params?.center_x?.toFixed(1)}}, {{resultData.ellipse_params?.center_y?.toFixed(1)}})</text>
          <text class="result-value">半长轴: {{resultData.ellipse_params?.semi_axis_a?.toFixed(1)}}</text>
          <text class="result-value">半短轴: {{resultData.ellipse_params?.semi_axis_b?.toFixed(1)}}</text>
          <text class="result-value">旋转角度: {{resultData.ellipse_params?.theta_degrees?.toFixed(1)}}°</text>
        </view>
      </view>
    </view>
  </view>
</template>

<script setup>
import { ref, onMounted, nextTick } from 'vue'
import { onLoad } from '@dcloudio/uni-app'

const imageSrc = ref('')
const resultImageSrc = ref('')
const baselineImageSrc = ref('')  // 新增基线图片引用
const resultData = ref(null)
const imageBase64 = ref('')
const isDrawing = ref(false)
const startY = ref(0) // 只记录Y坐标，因为我们要画水平线
const ctx = ref(null)

// 初始化画布
const initCanvas = async () => {
  ctx.value = uni.createCanvasContext('baselineCanvas', this)
  
  // 设置绘制样式 - 使用与后端匹配的蓝色
  // HSV范围: H: 100-115, S: 150-255, V: 100-255
  // 转换为RGB: 使用HSV中间值 H:107, S:202, V:177
  ctx.value.setStrokeStyle('#1E90FF')  // 使用深天蓝(Deep Sky Blue)，这个颜色在HSV范围内
  ctx.value.setLineWidth(2)
  ctx.value.setLineCap('round')
}


// 开始绘制
const startDrawing = (e) => {
  isDrawing.value = true;
  const touch = e.touches[0];
  
  // 获取画布的位置信息
  const query = uni.createSelectorQuery();
  query.select('.baseline-canvas').boundingClientRect((rect) => {
    const canvasY = touch.y - rect.top; // 转换为画布相对坐标
    startY.value = canvasY;
    drawHorizontalLine(canvasY);
  }).exec();
};

// 绘制水平线
const drawHorizontalLine = (y) => {
  if (!ctx.value) return;
  
  const query = uni.createSelectorQuery();
  query.select('.baseline-canvas').boundingClientRect((res) => {
    if (res) {
      const canvasWidth = res.width;
      const canvasHeight = res.height;
      
      // 限制 y 坐标在画布范围内
      y = Math.max(0, Math.min(y, canvasHeight));
      
      // 清除之前的绘制
      ctx.value.clearRect(0, 0, canvasWidth, canvasHeight);
      
      // 绘制新的水平线
      ctx.value.beginPath();
      ctx.value.moveTo(0, y);
      ctx.value.lineTo(canvasWidth, y);
      ctx.value.stroke();
      ctx.value.draw(true);
      
      console.log('基线绘制完成:', { y, canvasWidth, canvasHeight });
    }
  }).exec();
};

// 绘制过程
// const drawing = (e) => {
//   if (!isDrawing.value) return
  
//   const touch = e.touches[0]
//   startY.value = touch.y
//   drawHorizontalLine(startY.value)
// }
// 绘制过程
const drawing = (e) => {
  if (!isDrawing.value) return;
  const touch = e.touches[0];
  
  // 获取画布的位置信息
  const query = uni.createSelectorQuery();
  query.select('.baseline-canvas').boundingClientRect((rect) => {
    const canvasY = touch.y - rect.top; // 转换为画布相对坐标
    startY.value = canvasY;
    drawHorizontalLine(canvasY);
  }).exec();
};

// 停止绘制
const stopDrawing = () => {
  isDrawing.value = false
}

// 清除基线
const clearBaseline = () => {
  if (ctx.value) {
    const query = uni.createSelectorQuery()
    query.select('.baseline-canvas')
      .boundingClientRect()
      .exec((res) => {
        if (res[0]) {
          ctx.value.clearRect(0, 0, res[0].width, res[0].height)
          ctx.value.draw(true)
        }
      })
  }
}

// 获取基线数据
const getBaselineData = () => {
  if (!ctx.value) return null
  
  const query = uni.createSelectorQuery()
  return new Promise((resolve) => {
    query.select('.baseline-canvas')
      .boundingClientRect()
      .exec((res) => {
        if (res[0]) {
          // 返回基线Y坐标
          resolve({
            y: startY.value,
            width: res[0].width
          })
        } else {
          resolve(null)
        }
      })
  })
}

// 获取画布内容并转换为base64
const getCanvasBase64 = () => {
  return new Promise((resolve) => {
    const query = uni.createSelectorQuery();
    // 获取image-container的尺寸，图片和基线都以此为参照
    query.select('.image-container')
      .boundingClientRect()
      .exec(async (containerRes) => {
        if (!containerRes[0]) {
          console.error('未找到image-container元素');
          resolve(null);
          return;
        }

        const containerWidthPx = containerRes[0].width;
        const containerHeightPx = containerRes[0].height;
        console.log('容器尺寸 (image-container):', { containerWidthPx, containerHeightPx });

        // 1. 获取原始图片信息
        let imageInfo;
        try {
          imageInfo = await new Promise((res, rej) => {
            uni.getImageInfo({
              src: imageSrc.value,
              success: res,
              fail: rej
            });
          });
        } catch (err) {
          console.error('获取原始图片信息失败:', err);
          resolve(null);
          return;
        }

        const originalWidth = imageInfo.width;
        const originalHeight = imageInfo.height;
        console.log('原始图片尺寸:', { originalWidth, originalHeight });

        // 2. 计算图片在image-container中 aspectFit 后的实际显示尺寸和位置
        let renderedImageWidth;
        let renderedImageHeight;
        let offsetX = 0;
        let offsetY = 0;

        const imageAspectRatio = originalWidth / originalHeight;
        const containerAspectRatio = containerWidthPx / containerHeightPx;

        if (imageAspectRatio > containerAspectRatio) {
          // 图片更宽，按容器宽度适配，高度居中
          renderedImageWidth = containerWidthPx;
          renderedImageHeight = containerWidthPx / imageAspectRatio;
          offsetY = (containerHeightPx - renderedImageHeight) / 2;
        } else {
          // 图片更高或等比例，按容器高度适配，宽度居中
          renderedImageHeight = containerHeightPx;
          renderedImageWidth = containerHeightPx * imageAspectRatio;
          offsetX = (containerWidthPx - renderedImageWidth) / 2;
        }

        console.log('图片在mergeCanvas上渲染尺寸和偏移:', { renderedImageWidth, renderedImageHeight, offsetX, offsetY });
        console.log('用户绘制基线Y坐标 (startY.value):', startY.value);

        // 3. 创建新的canvas来合并图片
        const mergeCanvas = uni.createCanvasContext('mergeCanvas', this);
        
        // 清除mergeCanvas，确保背景透明或白色（默认）
        mergeCanvas.clearRect(0, 0, containerWidthPx, containerHeightPx);

        // 4. 绘制适配后的原始图片到合并画布
        mergeCanvas.drawImage(
          imageSrc.value,
          offsetX,
          offsetY,
          renderedImageWidth,
          renderedImageHeight
        );

        // 5. 计算基线在合并画布上的Y坐标
        // startY.value 是用户在 baselineCanvas (与 image-container 同尺寸) 上的Y坐标
        // 直接使用 startY.value 作为基线在 mergeCanvas 上的Y坐标。
        // 基线将在 mergeCanvas 的 (offsetX, startY.value) 到 (offsetX + renderedImageWidth, startY.value) 绘制。
        // 最终导出图片时，将裁剪 mergeCanvas 的 (offsetX, offsetY) 到 (offsetX + renderedImageWidth, offsetY + renderedImageHeight) 区域。
        // 这样，在导出的图片中，基线的Y坐标将是 startY.value - offsetY。

        const finalBaselineYOnMergeCanvas = startY.value; // 基线在mergeCanvas上的实际Y坐标
        const expectedBaselineYInCroppedImage = startY.value - offsetY;
        console.log('基线在mergeCanvas上的绘制Y坐标:', finalBaselineYOnMergeCanvas);
        console.log('基线在最终裁剪图片中的预期Y坐标:', expectedBaselineYInCroppedImage);

        // 6. 绘制基线到合并画布
        mergeCanvas.setStrokeStyle('#1E90FF'); // 使用与后端匹配的蓝色
        mergeCanvas.setLineWidth(2);
        mergeCanvas.setLineCap('round');
        mergeCanvas.beginPath();
        // 基线的X坐标应与图片的渲染区域对齐
        mergeCanvas.moveTo(offsetX, finalBaselineYOnMergeCanvas); // 从图片的左边界开始
        mergeCanvas.lineTo(offsetX + renderedImageWidth, finalBaselineYOnMergeCanvas); // 到图片的右边界结束
        mergeCanvas.stroke();
        console.log('在mergeCanvas上绘制基线，Y坐标:', finalBaselineYOnMergeCanvas, '范围:', `${offsetX} - ${offsetX + renderedImageWidth}`);

        // 7. 绘制并获取合并后的图片
        mergeCanvas.draw(false, () => {
          uni.canvasToTempFilePath({
            canvasId: 'mergeCanvas',
            x: offsetX, // 裁剪起始X坐标为图片实际渲染的X
            y: offsetY, // 裁剪起始Y坐标为图片实际渲染的Y
            width: renderedImageWidth, // 裁剪宽度为图片实际渲染的宽度
            height: renderedImageHeight, // 裁剪高度为图片实际渲染的高度
            destWidth: renderedImageWidth, // 导出图片宽度
            destHeight: renderedImageHeight, // 导出图片高度
            success: (mergeRes) => {
              baselineImageSrc.value = mergeRes.tempFilePath;
              // 读取临时文件并转换为base64
              uni.getFileSystemManager().readFile({
                filePath: mergeRes.tempFilePath,
                encoding: 'base64',
                success: (readRes) => {
                  if (readRes && readRes.data) {
                    const base64Data = 'data:image/jpeg;base64,' + readRes.data;
                    console.log('合并图片base64转换成功，长度:', base64Data.length);
                    resolve(base64Data);
                  } else {
                    console.error('合并图片数据为空');
                    resolve(null);
                  }
                },
                fail: (err) => {
                  console.error('读取合并文件失败:', err);
                  resolve(null);
                }
              });
            },
            fail: (err) => {
              console.error('获取合并画布内容失败:', err);
              resolve(null);
            }
          });
        });
      });
  });
};

// 修改calculateContactAngle函数
const calculateContactAngle = async () => {
  try {
    // 直接使用原始图片
    if (!imageBase64.value) {
      uni.showToast({
        title: '图片数据无效',
        icon: 'none'
      })
      return
    }

    // 获取基线画布内容
    console.log('开始获取基线画布内容...')
    const mergedImageBase64 = await getCanvasBase64()
    if (!mergedImageBase64) {
      uni.showToast({
        title: '获取基线失败',
        icon: 'none'
      })
      return
    }
    console.log('基线画布内容获取成功')

    // 准备请求数据
    const requestData = {
      imageSrc: mergedImageBase64  // 直接发送合并后的图片
    }

    console.log('发送请求数据:', {
      imageSrcLength: requestData.imageSrc.length
    })

    // 发送请求  http://s.wangchenyang.info/lunkuo/Droppy/analyzeImage
    const response = await uni.request({
      url: 'http://47.97.2.25:5000/Droppy/analyzeImage',
      method: 'POST',
      data: requestData,
      header: {
        'content-type': 'application/json'
      }
    })

    console.log('收到响应:', response)

    if (response.statusCode === 200) {
      const result = response.data
      console.log('处理结果:', result)
      
      if (result.status === 'success') {
        // 处理成功响应
        const analysisResults = result.analysisResults
        console.log('分析结果:', analysisResults)
        
        // 直接保存完整的分析结果
        resultData.value = analysisResults

        // 将结果图片Base64转为可显示的URL
        if (result.processedImage) {
          // 确保图片数据有正确的前缀
          if (!result.processedImage.startsWith('data:image')) {
            resultImageSrc.value = 'data:image/jpeg;base64,' + result.processedImage
          } else {
            resultImageSrc.value = result.processedImage
          }
          console.log('设置结果图片')
        }

        // 显示成功提示
        uni.showToast({
          title: '计算成功',
          icon: 'success',
          duration: 2000
        })
      } else {
        throw new Error(result.analysisResults?.error || '计算失败')
      }
    } else {
      throw new Error(response.data?.error || '计算失败')
    }
  } catch (error) {
    console.error('计算接触角失败:', error)
    uni.showToast({
      title: error.message || '计算失败',
      icon: 'none',
      duration: 2000
    })
  }
}

onLoad(async (options) => {
  // 从缓存中获取图片路径
  const analyzedImagePath = uni.getStorageSync('analyzedImagePath')
  if (analyzedImagePath) {
    imageSrc.value = analyzedImagePath
    try {
      // 将图片转换为Base64
      const res = await new Promise((resolve, reject) => {
        uni.getFileSystemManager().readFile({
          filePath: analyzedImagePath,
          encoding: 'base64',
          success: (res) => resolve(res),
          fail: (err) => reject(err)
        })
      })
      
      if (res && res.data) {
        // 设置Base64编码的图片数据，添加data:image/jpeg;base64,前缀
        imageBase64.value = 'data:image/jpeg;base64,' + res.data
      } else {
        throw new Error('图片数据为空')
      }
      
      // 清除存储，避免影响下次使用
      uni.removeStorageSync('analyzedImagePath')
    } catch (error) {
      console.error('转换图片失败:', error)
      uni.showToast({
        title: '图片转换失败: ' + (error.message || '未知错误'),
        icon: 'none',
        duration: 2000
      })
    }
  } else {
    uni.showToast({
      title: '未找到图片',
      icon: 'none',
      duration: 2000
    })
  }
})

onMounted(() => {
  nextTick(() => {
    initCanvas()
  })
})
</script>

<style>
.container {
  padding: 20rpx;
  display: flex;
  flex-direction: column;
  align-items: center;
  gap: 20rpx;
}

.image-container {
  position: relative;
  width: 100%;
  height: 500rpx;
  border: 1px solid #ddd;
  border-radius: 8rpx;
  overflow: hidden;
}

.preview-image {
  width: 100%;
  height: 100%;
}

.baseline-canvas {
  position: absolute;
  top: 0;
  left: 0;
  width: 100%;
  height: 100%;
  z-index: 1;
}

.action-container {
  display: flex;
  flex-direction: column;
  gap: 20rpx;
  width: 100%;
}

.action-button {
  background: linear-gradient(135deg, #4a90e2, #357abd);
  padding: 20rpx 40rpx;
  border-radius: 50rpx;
  display: flex;
  justify-content: center;
  align-items: center;
  transition: all 0.3s ease;
}

.action-button:active {
  transform: scale(0.95);
  opacity: 0.8;
}

.action-button.disabled {
  background: #cccccc;
  cursor: not-allowed;
}

.button-text {
  color: white;
  font-size: 32rpx;
  font-weight: 500;
  white-space: nowrap;
}

.result-container {
  width: 100%;
  margin-top: 20rpx;
}

.result-image {
  width: 100%;
  height: 500rpx;
  border: 1px solid #ddd;
  border-radius: 8rpx;
}

.result-data {
  margin-top: 20rpx;
  padding: 20rpx;
  background-color: #f5f5f5;
  border-radius: 8rpx;
}

.result-item {
  margin-bottom: 20rpx;
  padding: 10rpx;
  background-color: #fff;
  border-radius: 6rpx;
}

.result-label {
  font-size: 28rpx;
  color: #666;
  font-weight: bold;
  margin-bottom: 10rpx;
  display: block;
}

.result-value {
  font-size: 26rpx;
  color: #333;
  margin: 6rpx 0;
  display: block;
}

.contact-point {
  margin-left: 20rpx;
}
.baseline-canvas {
  position: absolute;
  top: 0;
  left: 0;
  width: 100%;
  height: 100%;
  z-index: 1;
}
</style>