import type { FastifyBaseLogger } from "fastify";
import { createHash } from "node:crypto";
import { basename } from "node:path";
import { writeAuditLog } from "./audit.js";
import { extractDocumentText } from "./document-reviews.js";
import { createId } from "./id.js";
import { consolidateCaseFacts, createDocumentAndExtraction } from "./intelligence.js";
import { prisma } from "./prisma.js";
import { deleteBinaryFile, readBinaryFile, saveBinaryFile } from "./storage.js";

type RepositoryFolderUploadJobRow = {
  id: string;
  law_firm_id: string;
  client_id: string;
  case_id: string | null;
  created_by_user_id: string | null;
  fallback_document_type_code: string | null;
  status_code: string;
  stage_code: string;
  total_files: number | bigint;
  processed_files: number | bigint;
  succeeded_files: number | bigint;
  failed_files: number | bigint;
  extracted_field_count: number | bigint;
  synced_client_field_count: number | bigint;
  classification_ai_count: number | bigint;
  classification_heuristic_count: number | bigint;
  classification_fallback_count: number | bigint;
  current_file_name: string | null;
  consolidation_status_code: string | null;
  consolidation_ai_run_id: string | null;
  consolidated_fact_count: number | bigint;
  error_message: string | null;
  started_at: Date | string | null;
  completed_at: Date | string | null;
  created_at: Date | string;
  updated_at: Date | string;
};

type RepositoryFolderUploadJobFileRow = {
  id: string;
  job_id: string;
  sort_order: number | bigint;
  file_name: string;
  relative_path: string | null;
  title: string;
  mime_type: string;
  size_bytes: number | bigint;
  status_code: string;
  document_type_code: string | null;
  classification_source: string | null;
  classification_confidence: number | string | null;
  classification_reason: string | null;
  repository_item_id: string | null;
  file_id: string | null;
  document_record_id: string | null;
  extracted_field_count: number | bigint;
  synced_client_field_count: number | bigint;
  error_message: string | null;
  started_at: Date | string | null;
  completed_at: Date | string | null;
  created_at: Date | string;
  updated_at: Date | string;
};

type FolderUploadJobActor = {
  userId: string | null;
  officeId: string | null;
};

type FolderUploadJobFileStagingInput = {
  jobId: string;
  sortOrder: number;
  fileName: string;
  relativePath: string | null;
  title: string;
  mimeType: string;
  sizeBytes: number;
  stagingStorageProvider?: string | null;
  stagingStorageBucket?: string | null;
  stagingObjectKey?: string | null;
  stagingStorageRegion?: string | null;
  stagingOriginalFileName?: string | null;
  initialStatusCode?: "queued" | "failed";
  errorMessage?: string | null;
};

export type RepositoryFolderUploadJobSummary = {
  id: string;
  clientId: string;
  caseId: string | null;
  fallbackDocumentTypeCode: string | null;
  statusCode: string;
  stageCode: string;
  totalFiles: number;
  processedFiles: number;
  succeededFiles: number;
  failedFiles: number;
  remainingFiles: number;
  extractedFieldCount: number;
  syncedClientFieldCount: number;
  currentFileName: string | null;
  errorMessage: string | null;
  createdAt: string;
  updatedAt: string;
  startedAt: string | null;
  completedAt: string | null;
  consolidation: {
    statusCode: string | null;
    aiRunId: string | null;
    createdFactCount: number;
  } | null;
  classificationCounts: {
    ai: number;
    heuristic: number;
    fallback: number;
  };
};

export type RepositoryFolderUploadJobDetail = RepositoryFolderUploadJobSummary & {
  files: Array<{
    id: string;
    sortOrder: number;
    fileName: string;
    relativePath: string | null;
    title: string;
    mimeType: string;
    sizeBytes: number;
    statusCode: string;
    documentTypeCode: string | null;
    classificationSource: string | null;
    classificationConfidence: number | null;
    classificationReason: string | null;
    repositoryItemId: string | null;
    fileId: string | null;
    documentRecordId: string | null;
    extractedFieldCount: number;
    syncedClientFieldCount: number;
    errorMessage: string | null;
    startedAt: string | null;
    completedAt: string | null;
  }>;
};

const activeRepositoryFolderUploadJobs = new Map<string, Promise<void>>();

function sanitizeRepositoryDocumentProcessingErrorMessage(error: unknown) {
  const message =
    error instanceof Error ? error.message : "Unexpected failure while processing the file.";

  if (message.includes("Can't find end of central directory")) {
    return "This DOCX file is invalid, corrupted, or not a real DOCX archive.";
  }

  return message;
}

function toNumber(value: number | bigint | string | null | undefined) {
  if (typeof value === "number") {
    return Number.isFinite(value) ? value : 0;
  }

  if (typeof value === "bigint") {
    return Number(value);
  }

  if (typeof value === "string" && value.trim()) {
    const normalized = Number(value);
    return Number.isFinite(normalized) ? normalized : 0;
  }

  return 0;
}

function toIsoString(value: Date | string | null | undefined) {
  if (!value) {
    return null;
  }

  if (value instanceof Date) {
    return value.toISOString();
  }

  const parsed = new Date(value);
  return Number.isNaN(parsed.getTime()) ? String(value) : parsed.toISOString();
}

function normalizeRelativePath(input: string | null | undefined) {
  const normalized = String(input ?? "").trim().replace(/\\/g, "/");
  return normalized || null;
}

function getFileNameFromRelativePath(input: string | null | undefined) {
  const normalized = normalizeRelativePath(input);
  return basename(normalized || "document");
}

function buildRepositoryFolderUploadJobSummary(
  job: RepositoryFolderUploadJobRow,
): RepositoryFolderUploadJobSummary {
  const totalFiles = toNumber(job.total_files);
  const processedFiles = toNumber(job.processed_files);
  const succeededFiles = toNumber(job.succeeded_files);
  const failedFiles = toNumber(job.failed_files);

  return {
    id: job.id,
    clientId: job.client_id,
    caseId: job.case_id,
    fallbackDocumentTypeCode: job.fallback_document_type_code,
    statusCode: job.status_code,
    stageCode: job.stage_code,
    totalFiles,
    processedFiles,
    succeededFiles,
    failedFiles,
    remainingFiles: Math.max(0, totalFiles - processedFiles),
    extractedFieldCount: toNumber(job.extracted_field_count),
    syncedClientFieldCount: toNumber(job.synced_client_field_count),
    currentFileName: job.current_file_name,
    errorMessage: job.error_message,
    createdAt: toIsoString(job.created_at) ?? new Date().toISOString(),
    updatedAt: toIsoString(job.updated_at) ?? new Date().toISOString(),
    startedAt: toIsoString(job.started_at),
    completedAt: toIsoString(job.completed_at),
    consolidation: {
      statusCode: job.consolidation_status_code,
      aiRunId: job.consolidation_ai_run_id,
      createdFactCount: toNumber(job.consolidated_fact_count),
    },
    classificationCounts: {
      ai: toNumber(job.classification_ai_count),
      heuristic: toNumber(job.classification_heuristic_count),
      fallback: toNumber(job.classification_fallback_count),
    },
  };
}

async function getRepositoryFolderUploadJobRow(
  lawFirmId: string,
  jobId: string,
) {
  const [job] = await prisma.$queryRaw<Array<RepositoryFolderUploadJobRow>>`
    SELECT
      id,
      law_firm_id,
      client_id,
      case_id,
      created_by_user_id,
      fallback_document_type_code,
      status_code,
      stage_code,
      total_files,
      processed_files,
      succeeded_files,
      failed_files,
      extracted_field_count,
      synced_client_field_count,
      classification_ai_count,
      classification_heuristic_count,
      classification_fallback_count,
      current_file_name,
      consolidation_status_code,
      consolidation_ai_run_id,
      consolidated_fact_count,
      error_message,
      started_at,
      completed_at,
      created_at,
      updated_at
    FROM repository_folder_upload_jobs
    WHERE id = ${jobId}
      AND law_firm_id = ${lawFirmId}
    LIMIT 1
  `;

  return job ?? null;
}

async function listRepositoryFolderUploadJobFileRows(jobId: string) {
  return prisma.$queryRaw<Array<RepositoryFolderUploadJobFileRow>>`
    SELECT
      id,
      job_id,
      sort_order,
      file_name,
      relative_path,
      title,
      mime_type,
      size_bytes,
      status_code,
      document_type_code,
      classification_source,
      classification_confidence,
      classification_reason,
      repository_item_id,
      file_id,
      document_record_id,
      extracted_field_count,
      synced_client_field_count,
      error_message,
      started_at,
      completed_at,
      created_at,
      updated_at
    FROM repository_folder_upload_job_files
    WHERE job_id = ${jobId}
    ORDER BY sort_order ASC
  `;
}

async function recalculateRepositoryFolderUploadJob(jobId: string) {
  const [aggregate] = await prisma.$queryRaw<
    Array<{
      total_files: number | bigint;
      processed_files: number | bigint;
      succeeded_files: number | bigint;
      failed_files: number | bigint;
      extracted_field_count: number | bigint;
      synced_client_field_count: number | bigint;
      classification_ai_count: number | bigint;
      classification_heuristic_count: number | bigint;
      classification_fallback_count: number | bigint;
    }>
  >`
    SELECT
      COUNT(*) AS total_files,
      SUM(CASE WHEN status_code IN ('completed', 'failed') THEN 1 ELSE 0 END) AS processed_files,
      SUM(CASE WHEN status_code = 'completed' THEN 1 ELSE 0 END) AS succeeded_files,
      SUM(CASE WHEN status_code = 'failed' THEN 1 ELSE 0 END) AS failed_files,
      SUM(extracted_field_count) AS extracted_field_count,
      SUM(synced_client_field_count) AS synced_client_field_count,
      SUM(CASE WHEN classification_source = 'ai' THEN 1 ELSE 0 END) AS classification_ai_count,
      SUM(CASE WHEN classification_source = 'heuristic' THEN 1 ELSE 0 END) AS classification_heuristic_count,
      SUM(CASE WHEN classification_source = 'fallback' THEN 1 ELSE 0 END) AS classification_fallback_count
    FROM repository_folder_upload_job_files
    WHERE job_id = ${jobId}
  `;

  const [processingFile] = await prisma.$queryRaw<Array<{ file_name: string | null }>>`
    SELECT file_name
    FROM repository_folder_upload_job_files
    WHERE job_id = ${jobId}
      AND status_code = 'processing'
    ORDER BY sort_order ASC
    LIMIT 1
  `;

  await prisma.$executeRaw`
    UPDATE repository_folder_upload_jobs
    SET
      total_files = ${toNumber(aggregate?.total_files)},
      processed_files = ${toNumber(aggregate?.processed_files)},
      succeeded_files = ${toNumber(aggregate?.succeeded_files)},
      failed_files = ${toNumber(aggregate?.failed_files)},
      extracted_field_count = ${toNumber(aggregate?.extracted_field_count)},
      synced_client_field_count = ${toNumber(aggregate?.synced_client_field_count)},
      classification_ai_count = ${toNumber(aggregate?.classification_ai_count)},
      classification_heuristic_count = ${toNumber(aggregate?.classification_heuristic_count)},
      classification_fallback_count = ${toNumber(aggregate?.classification_fallback_count)},
      current_file_name = ${processingFile?.file_name ?? null},
      updated_at = CURRENT_TIMESTAMP
    WHERE id = ${jobId}
  `;
}

async function updateRepositoryFolderUploadJobStatus(input: {
  jobId: string;
  stageCode?: string;
  statusCode?: string;
  errorMessage?: string | null;
  consolidationStatusCode?: string | null;
  consolidationAiRunId?: string | null;
  consolidatedFactCount?: number | null;
  clearCompletedAt?: boolean;
}) {
  const [job] = await prisma.$queryRaw<Array<{ started_at: Date | string | null }>>`
    SELECT started_at
    FROM repository_folder_upload_jobs
    WHERE id = ${input.jobId}
    LIMIT 1
  `;

  const shouldSetStartedAt = !job?.started_at && (input.statusCode === "processing" || input.stageCode === "processing_files");
  const shouldUpdateErrorMessage = input.errorMessage !== undefined;

  await prisma.$executeRaw`
    UPDATE repository_folder_upload_jobs
    SET
      stage_code = COALESCE(${input.stageCode ?? null}, stage_code),
      status_code = COALESCE(${input.statusCode ?? null}, status_code),
      error_message = CASE
        WHEN ${shouldUpdateErrorMessage ? 1 : 0} = 1 THEN ${input.errorMessage ?? null}
        ELSE error_message
      END,
      consolidation_status_code = COALESCE(${input.consolidationStatusCode ?? null}, consolidation_status_code),
      consolidation_ai_run_id = COALESCE(${input.consolidationAiRunId ?? null}, consolidation_ai_run_id),
      consolidated_fact_count = COALESCE(${input.consolidatedFactCount ?? null}, consolidated_fact_count),
      started_at = CASE
        WHEN ${shouldSetStartedAt ? 1 : 0} = 1 THEN CURRENT_TIMESTAMP
        ELSE started_at
      END,
      completed_at = CASE
        WHEN ${input.clearCompletedAt ? 1 : 0} = 1 THEN NULL
        WHEN ${input.statusCode === "completed" || input.statusCode === "completed_with_errors" || input.statusCode === "failed" ? 1 : 0} = 1 THEN CURRENT_TIMESTAMP
        ELSE completed_at
      END,
      updated_at = CURRENT_TIMESTAMP
    WHERE id = ${input.jobId}
  `;
}

async function finalizeRepositoryFolderUploadJob(jobId: string) {
  await recalculateRepositoryFolderUploadJob(jobId);

  const [job] = await prisma.$queryRaw<Array<RepositoryFolderUploadJobRow>>`
    SELECT
      id,
      law_firm_id,
      client_id,
      case_id,
      created_by_user_id,
      fallback_document_type_code,
      status_code,
      stage_code,
      total_files,
      processed_files,
      succeeded_files,
      failed_files,
      extracted_field_count,
      synced_client_field_count,
      classification_ai_count,
      classification_heuristic_count,
      classification_fallback_count,
      current_file_name,
      consolidation_status_code,
      consolidation_ai_run_id,
      consolidated_fact_count,
      error_message,
      started_at,
      completed_at,
      created_at,
      updated_at
    FROM repository_folder_upload_jobs
    WHERE id = ${jobId}
    LIMIT 1
  `;

  if (!job) {
    return null;
  }

  const succeededFiles = toNumber(job.succeeded_files);
  const failedFiles = toNumber(job.failed_files);
  const totalFiles = toNumber(job.total_files);
  const processedFiles = toNumber(job.processed_files);

  let statusCode = job.status_code;
  if (processedFiles >= totalFiles) {
    if (succeededFiles > 0 && failedFiles > 0) {
      statusCode = "completed_with_errors";
    } else if (succeededFiles > 0) {
      statusCode = "completed";
    } else if (failedFiles > 0) {
      statusCode = "failed";
    }
  } else {
    statusCode = "processing";
  }

  const stageCode =
    statusCode === "processing" ? job.stage_code :
    statusCode === "failed" ? "failed" :
    "completed";

  await updateRepositoryFolderUploadJobStatus({
    jobId,
    statusCode,
    stageCode,
    errorMessage:
      statusCode === "failed" ?
        job.error_message || "All files in this folder import failed." :
        null,
  });

  return getRepositoryFolderUploadJobRow(job.law_firm_id, jobId);
}

async function processRepositoryFolderUploadJobInternal(input: {
  jobId: string;
  logger?: FastifyBaseLogger | null;
}) {
  const [job] = await prisma.$queryRaw<Array<RepositoryFolderUploadJobRow>>`
    SELECT
      id,
      law_firm_id,
      client_id,
      case_id,
      created_by_user_id,
      fallback_document_type_code,
      status_code,
      stage_code,
      total_files,
      processed_files,
      succeeded_files,
      failed_files,
      extracted_field_count,
      synced_client_field_count,
      classification_ai_count,
      classification_heuristic_count,
      classification_fallback_count,
      current_file_name,
      consolidation_status_code,
      consolidation_ai_run_id,
      consolidated_fact_count,
      error_message,
      started_at,
      completed_at,
      created_at,
      updated_at
    FROM repository_folder_upload_jobs
    WHERE id = ${input.jobId}
    LIMIT 1
  `;

  if (!job) {
    return;
  }

  await updateRepositoryFolderUploadJobStatus({
    jobId: job.id,
    stageCode: "processing_files",
    statusCode: "processing",
    errorMessage: null,
    clearCompletedAt: true,
  });
  await recalculateRepositoryFolderUploadJob(job.id);

  const pendingFiles = await prisma.$queryRaw<
    Array<
      RepositoryFolderUploadJobFileRow & {
        staging_storage_provider: string | null;
        staging_storage_bucket: string | null;
        staging_object_key: string | null;
        staging_storage_region: string | null;
        staging_original_file_name: string | null;
      }
    >
  >`
    SELECT
      id,
      job_id,
      sort_order,
      file_name,
      relative_path,
      title,
      mime_type,
      size_bytes,
      staging_storage_provider,
      staging_storage_bucket,
      staging_object_key,
      staging_storage_region,
      staging_original_file_name,
      status_code,
      document_type_code,
      classification_source,
      classification_confidence,
      classification_reason,
      repository_item_id,
      file_id,
      document_record_id,
      extracted_field_count,
      synced_client_field_count,
      error_message,
      started_at,
      completed_at,
      created_at,
      updated_at
    FROM repository_folder_upload_job_files
    WHERE job_id = ${job.id}
      AND status_code IN ('queued', 'processing')
    ORDER BY sort_order ASC
  `;

  for (const jobFile of pendingFiles) {
    await prisma.$executeRaw`
      UPDATE repository_folder_upload_job_files
      SET
        status_code = 'processing',
        error_message = NULL,
        started_at = COALESCE(started_at, CURRENT_TIMESTAMP),
        completed_at = NULL,
        updated_at = CURRENT_TIMESTAMP
      WHERE id = ${jobFile.id}
    `;

    await recalculateRepositoryFolderUploadJob(job.id);

    try {
      if (!jobFile.staging_storage_provider || !jobFile.staging_object_key) {
        throw new Error("The staged upload file is no longer available.");
      }

      const bytes = await readBinaryFile({
        storageProvider: jobFile.staging_storage_provider,
        objectKey: jobFile.staging_object_key,
      });
      const textContent = await extractDocumentText({
        fileName: jobFile.staging_original_file_name || jobFile.file_name,
        mimeType: jobFile.mime_type,
        bytes,
        providedText: null,
      });

      const created = await createDocumentAndExtraction({
        lawFirmId: job.law_firm_id,
        clientId: job.client_id,
        caseId: job.case_id,
        actorUserId: job.created_by_user_id,
        title: jobFile.title,
        documentTypeCode: job.fallback_document_type_code || "other_supporting",
        documentTypeMode: "auto",
        originalFileName: jobFile.staging_original_file_name || jobFile.file_name,
        mimeType: jobFile.mime_type,
        fileBuffer: bytes,
        textContent: textContent || null,
      });

      await prisma.$executeRaw`
        UPDATE repository_folder_upload_job_files
        SET
          status_code = 'completed',
          document_type_code = ${created.documentTypeCode},
          classification_source = ${created.classificationSource},
          classification_confidence = ${created.classificationConfidence ?? null},
          classification_reason = ${created.classificationReason},
          repository_item_id = ${created.repositoryItemId},
          file_id = ${created.fileId},
          document_record_id = ${created.documentRecordId},
          extracted_field_count = ${Object.keys(created.extraction?.extractedData ?? {}).length},
          synced_client_field_count = ${created.syncedClientFieldKeys?.length ?? 0},
          error_message = NULL,
          completed_at = CURRENT_TIMESTAMP,
          updated_at = CURRENT_TIMESTAMP
        WHERE id = ${jobFile.id}
      `;

      await writeAuditLog({
        lawFirmId: job.law_firm_id,
        officeId: null,
        actorUserId: job.created_by_user_id,
        entityType: "document_record",
        entityId: created.documentRecordId,
        action: "document.folder_upload",
        afterJson: {
          documentTypeCode: created.documentTypeCode,
          classificationSource: created.classificationSource,
          classificationReason: created.classificationReason,
          title: jobFile.title,
          checksum: createHash("sha256").update(bytes).digest("hex"),
          originalFileName: jobFile.staging_original_file_name || jobFile.file_name,
          jobId: job.id,
        },
      }).catch(() => undefined);

      await deleteBinaryFile({
        storageProvider: jobFile.staging_storage_provider,
        objectKey: jobFile.staging_object_key,
      }).catch(() => undefined);
    } catch (error) {
      const errorMessage = sanitizeRepositoryDocumentProcessingErrorMessage(error);

      await prisma.$executeRaw`
        UPDATE repository_folder_upload_job_files
        SET
          status_code = 'failed',
          error_message = ${errorMessage},
          completed_at = CURRENT_TIMESTAMP,
          updated_at = CURRENT_TIMESTAMP
        WHERE id = ${jobFile.id}
      `;
    }

    await recalculateRepositoryFolderUploadJob(job.id);
  }

  const refreshedJob = await finalizeRepositoryFolderUploadJob(job.id);

  if (
    refreshedJob &&
    refreshedJob.case_id &&
    toNumber(refreshedJob.succeeded_files) > 0 &&
    refreshedJob.consolidation_status_code !== "completed"
  ) {
    if (!refreshedJob.created_by_user_id) {
      await updateRepositoryFolderUploadJobStatus({
        jobId: refreshedJob.id,
        consolidationStatusCode: "failed",
        errorMessage: "The import job has no actor user available for fact consolidation.",
      });
      await finalizeRepositoryFolderUploadJob(job.id);
      return;
    }

    await updateRepositoryFolderUploadJobStatus({
      jobId: refreshedJob.id,
      stageCode: "consolidating_case_facts",
      statusCode: "processing",
      consolidationStatusCode: "processing",
      errorMessage: null,
      clearCompletedAt: true,
    });

    try {
      const consolidated = await consolidateCaseFacts({
        lawFirmId: refreshedJob.law_firm_id,
        caseId: refreshedJob.case_id,
        actorUserId: refreshedJob.created_by_user_id,
      });

      await updateRepositoryFolderUploadJobStatus({
        jobId: refreshedJob.id,
        consolidationStatusCode: "completed",
        consolidationAiRunId: consolidated.aiRunId,
        consolidatedFactCount: consolidated.createdFactCount,
      });
    } catch (error) {
      await updateRepositoryFolderUploadJobStatus({
        jobId: refreshedJob.id,
        consolidationStatusCode: "failed",
        errorMessage:
          error instanceof Error ?
            error.message :
            "Case fact consolidation failed after the folder import.",
      });
    }
  } else if (refreshedJob) {
    await updateRepositoryFolderUploadJobStatus({
      jobId: refreshedJob.id,
      consolidationStatusCode:
        refreshedJob.case_id && toNumber(refreshedJob.succeeded_files) === 0 ? "skipped" : refreshedJob.consolidation_status_code || "skipped",
    });
  }

  await finalizeRepositoryFolderUploadJob(job.id);
}

export async function createRepositoryFolderUploadJob(input: {
  lawFirmId: string;
  clientId: string;
  caseId?: string | null;
  createdByUserId?: string | null;
  fallbackDocumentTypeCode?: string | null;
}) {
  const jobId = createId();

  await prisma.$executeRaw`
    INSERT INTO repository_folder_upload_jobs (
      id,
      law_firm_id,
      client_id,
      case_id,
      created_by_user_id,
      fallback_document_type_code,
      status_code,
      stage_code,
      consolidation_status_code,
      created_at,
      updated_at
    ) VALUES (
      ${jobId},
      ${input.lawFirmId},
      ${input.clientId},
      ${input.caseId ?? null},
      ${input.createdByUserId ?? null},
      ${String(input.fallbackDocumentTypeCode ?? "").trim() || null},
      'queued',
      'queued',
      ${input.caseId ? "pending" : "skipped"},
      CURRENT_TIMESTAMP,
      CURRENT_TIMESTAMP
    )
  `;

  return jobId;
}

export async function addRepositoryFolderUploadJobFile(input: FolderUploadJobFileStagingInput) {
  const fileId = createId();
  const normalizedStatus = input.initialStatusCode ?? "queued";

  await prisma.$executeRaw`
    INSERT INTO repository_folder_upload_job_files (
      id,
      job_id,
      sort_order,
      file_name,
      relative_path,
      title,
      mime_type,
      size_bytes,
      staging_storage_provider,
      staging_storage_bucket,
      staging_object_key,
      staging_storage_region,
      staging_original_file_name,
      status_code,
      error_message,
      started_at,
      completed_at,
      created_at,
      updated_at
    ) VALUES (
      ${fileId},
      ${input.jobId},
      ${input.sortOrder},
      ${input.fileName},
      ${normalizeRelativePath(input.relativePath)},
      ${input.title},
      ${input.mimeType},
      ${input.sizeBytes},
      ${input.stagingStorageProvider ?? null},
      ${input.stagingStorageBucket ?? null},
      ${input.stagingObjectKey ?? null},
      ${input.stagingStorageRegion ?? null},
      ${input.stagingOriginalFileName ?? null},
      ${normalizedStatus},
      ${input.errorMessage ?? null},
      ${normalizedStatus === "failed" ? new Date() : null},
      ${normalizedStatus === "failed" ? new Date() : null},
      CURRENT_TIMESTAMP,
      CURRENT_TIMESTAMP
    )
  `;

  await recalculateRepositoryFolderUploadJob(input.jobId);
  return fileId;
}

export async function stageRepositoryFolderUploadFile(input: {
  lawFirmId: string;
  caseId?: string | null;
  originalRelativePath: string;
  mimeType: string;
  bytes: Buffer;
}) {
  const relativePath = normalizeRelativePath(input.originalRelativePath);
  const fileName = getFileNameFromRelativePath(relativePath);
  const staged = await saveBinaryFile({
    lawFirmId: input.lawFirmId,
    caseId: input.caseId ?? null,
    fileName: `${createId()}_${relativePath || fileName}`,
    bytes: input.bytes,
    kind: "uploads",
  });

  return {
    fileName,
    relativePath,
    storageProvider: "local_dev",
    storageBucket: "workspace",
    storageRegion: "local",
    objectKey: staged.relativeObjectKey,
    originalFileName: relativePath || fileName,
    sizeBytes: input.bytes.length,
  };
}

export async function startRepositoryFolderUploadJob(input: {
  jobId: string;
  logger?: FastifyBaseLogger | null;
}) {
  if (activeRepositoryFolderUploadJobs.has(input.jobId)) {
    return;
  }

  const task = processRepositoryFolderUploadJobInternal(input)
    .catch(async (error) => {
      input.logger?.error({ error, jobId: input.jobId }, "Repository folder upload job failed");
      await updateRepositoryFolderUploadJobStatus({
        jobId: input.jobId,
        stageCode: "failed",
        statusCode: "failed",
        errorMessage:
          error instanceof Error ? error.message : "Unexpected repository folder upload failure.",
      });
      await finalizeRepositoryFolderUploadJob(input.jobId);
    })
    .finally(() => {
      activeRepositoryFolderUploadJobs.delete(input.jobId);
    });

  activeRepositoryFolderUploadJobs.set(input.jobId, task);
}

export async function getRepositoryFolderUploadJobDetail(input: {
  lawFirmId: string;
  jobId: string;
  logger?: FastifyBaseLogger | null;
}) {
  const job = await getRepositoryFolderUploadJobRow(input.lawFirmId, input.jobId);

  if (!job) {
    return null;
  }

  if (
    (job.status_code === "queued" || job.status_code === "processing") &&
    !activeRepositoryFolderUploadJobs.has(job.id)
  ) {
    await startRepositoryFolderUploadJob({
      jobId: job.id,
      logger: input.logger,
    });
  }

  const files = await listRepositoryFolderUploadJobFileRows(job.id);
  return {
    ...buildRepositoryFolderUploadJobSummary(job),
    files: files.map((item) => ({
      id: item.id,
      sortOrder: toNumber(item.sort_order),
      fileName: item.file_name,
      relativePath: item.relative_path,
      title: item.title,
      mimeType: item.mime_type,
      sizeBytes: toNumber(item.size_bytes),
      statusCode: item.status_code,
      documentTypeCode: item.document_type_code,
      classificationSource: item.classification_source,
      classificationConfidence:
        item.classification_confidence === null ? null : Number(item.classification_confidence),
      classificationReason: item.classification_reason,
      repositoryItemId: item.repository_item_id,
      fileId: item.file_id,
      documentRecordId: item.document_record_id,
      extractedFieldCount: toNumber(item.extracted_field_count),
      syncedClientFieldCount: toNumber(item.synced_client_field_count),
      errorMessage: item.error_message,
      startedAt: toIsoString(item.started_at),
      completedAt: toIsoString(item.completed_at),
    })),
  } satisfies RepositoryFolderUploadJobDetail;
}

export async function listRepositoryFolderUploadJobs(input: {
  lawFirmId: string;
  clientId?: string | null;
  caseId?: string | null;
  limit?: number;
  logger?: FastifyBaseLogger | null;
}) {
  const limit = Math.min(10, Math.max(1, input.limit ?? 5));
  const clientId = String(input.clientId ?? "").trim();
  const caseId = String(input.caseId ?? "").trim();

  const jobs = await prisma.$queryRaw<Array<RepositoryFolderUploadJobRow>>`
    SELECT
      id,
      law_firm_id,
      client_id,
      case_id,
      created_by_user_id,
      fallback_document_type_code,
      status_code,
      stage_code,
      total_files,
      processed_files,
      succeeded_files,
      failed_files,
      extracted_field_count,
      synced_client_field_count,
      classification_ai_count,
      classification_heuristic_count,
      classification_fallback_count,
      current_file_name,
      consolidation_status_code,
      consolidation_ai_run_id,
      consolidated_fact_count,
      error_message,
      started_at,
      completed_at,
      created_at,
      updated_at
    FROM repository_folder_upload_jobs
    WHERE law_firm_id = ${input.lawFirmId}
      AND (${clientId || null} IS NULL OR client_id = ${clientId || null})
      AND (${caseId || null} IS NULL OR case_id = ${caseId || null})
    ORDER BY created_at DESC
    LIMIT ${limit}
  `;

  for (const job of jobs) {
    if (
      (job.status_code === "queued" || job.status_code === "processing") &&
      !activeRepositoryFolderUploadJobs.has(job.id)
    ) {
      await startRepositoryFolderUploadJob({
        jobId: job.id,
        logger: input.logger,
      });
    }
  }

  return jobs.map(buildRepositoryFolderUploadJobSummary);
}

export async function resumePendingRepositoryFolderUploadJobs(logger?: FastifyBaseLogger | null) {
  const pendingJobs = await prisma.$queryRaw<Array<{ id: string }>>`
    SELECT id
    FROM repository_folder_upload_jobs
    WHERE status_code IN ('queued', 'processing')
    ORDER BY created_at ASC
  `;

  for (const job of pendingJobs) {
    if (!activeRepositoryFolderUploadJobs.has(job.id)) {
      await startRepositoryFolderUploadJob({ jobId: job.id, logger });
    }
  }
}
