Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@

import { useRef, useState } from 'react'
import { createLogger } from '@sim/logger'
import { useQueryClient } from '@tanstack/react-query'
import {
Button,
Label,
Expand All @@ -14,7 +13,7 @@ import {
Textarea,
} from '@/components/emcn'
import type { DocumentData } from '@/lib/knowledge/types'
import { knowledgeKeys } from '@/hooks/queries/knowledge'
import { useCreateChunk } from '@/hooks/queries/knowledge'

const logger = createLogger('CreateChunkModal')

Expand All @@ -31,74 +30,53 @@ export function CreateChunkModal({
document,
knowledgeBaseId,
}: CreateChunkModalProps) {
const queryClient = useQueryClient()
const {
mutate: createChunk,
isPending: isCreating,
error: mutationError,
reset: resetMutation,
} = useCreateChunk()
const [content, setContent] = useState('')
const [isCreating, setIsCreating] = useState(false)
const [error, setError] = useState<string | null>(null)
const [showUnsavedChangesAlert, setShowUnsavedChangesAlert] = useState(false)
const isProcessingRef = useRef(false)

const error = mutationError?.message ?? null
const hasUnsavedChanges = content.trim().length > 0

const handleCreateChunk = async () => {
const handleCreateChunk = () => {
if (!document || content.trim().length === 0 || isProcessingRef.current) {
if (isProcessingRef.current) {
logger.warn('Chunk creation already in progress, ignoring duplicate request')
}
return
}

try {
isProcessingRef.current = true
setIsCreating(true)
setError(null)

const response = await fetch(
`/api/knowledge/${knowledgeBaseId}/documents/${document.id}/chunks`,
{
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({
content: content.trim(),
enabled: true,
}),
}
)

if (!response.ok) {
const result = await response.json()
throw new Error(result.error || 'Failed to create chunk')
isProcessingRef.current = true

createChunk(
{
knowledgeBaseId,
documentId: document.id,
content: content.trim(),
enabled: true,
},
{
onSuccess: () => {
isProcessingRef.current = false
onClose()
},
onError: () => {
isProcessingRef.current = false
},
}

const result = await response.json()

if (result.success && result.data) {
logger.info('Chunk created successfully:', result.data.id)

await queryClient.invalidateQueries({
queryKey: knowledgeKeys.detail(knowledgeBaseId),
})

onClose()
} else {
throw new Error(result.error || 'Failed to create chunk')
}
} catch (err) {
logger.error('Error creating chunk:', err)
setError(err instanceof Error ? err.message : 'An error occurred')
} finally {
isProcessingRef.current = false
setIsCreating(false)
}
)
}

const onClose = () => {
onOpenChange(false)
setContent('')
setError(null)
setShowUnsavedChangesAlert(false)
resetMutation()
}

const handleCloseAttempt = () => {
Expand Down
Original file line number Diff line number Diff line change
@@ -1,13 +1,8 @@
'use client'

import { useState } from 'react'
import { createLogger } from '@sim/logger'
import { useQueryClient } from '@tanstack/react-query'
import { Button, Modal, ModalBody, ModalContent, ModalFooter, ModalHeader } from '@/components/emcn'
import type { ChunkData } from '@/lib/knowledge/types'
import { knowledgeKeys } from '@/hooks/queries/knowledge'

const logger = createLogger('DeleteChunkModal')
import { useDeleteChunk } from '@/hooks/queries/knowledge'

interface DeleteChunkModalProps {
chunk: ChunkData | null
Expand All @@ -24,44 +19,12 @@ export function DeleteChunkModal({
isOpen,
onClose,
}: DeleteChunkModalProps) {
const queryClient = useQueryClient()
const [isDeleting, setIsDeleting] = useState(false)
const { mutate: deleteChunk, isPending: isDeleting } = useDeleteChunk()

const handleDeleteChunk = async () => {
const handleDeleteChunk = () => {
if (!chunk || isDeleting) return

try {
setIsDeleting(true)

const response = await fetch(
`/api/knowledge/${knowledgeBaseId}/documents/${documentId}/chunks/${chunk.id}`,
{
method: 'DELETE',
}
)

if (!response.ok) {
throw new Error('Failed to delete chunk')
}

const result = await response.json()

if (result.success) {
logger.info('Chunk deleted successfully:', chunk.id)

await queryClient.invalidateQueries({
queryKey: knowledgeKeys.detail(knowledgeBaseId),
})

onClose()
} else {
throw new Error(result.error || 'Failed to delete chunk')
}
} catch (err) {
logger.error('Error deleting chunk:', err)
} finally {
setIsDeleting(false)
}
deleteChunk({ knowledgeBaseId, documentId, chunkId: chunk.id }, { onSuccess: onClose })
}

if (!chunk) return null
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ import {
} from '@/hooks/kb/use-knowledge-base-tag-definitions'
import { useNextAvailableSlot } from '@/hooks/kb/use-next-available-slot'
import { type TagDefinitionInput, useTagDefinitions } from '@/hooks/kb/use-tag-definitions'
import { useUpdateDocumentTags } from '@/hooks/queries/knowledge'

const logger = createLogger('DocumentTagsModal')

Expand Down Expand Up @@ -58,8 +59,6 @@ function formatValueForDisplay(value: string, fieldType: string): string {
try {
const date = new Date(value)
if (Number.isNaN(date.getTime())) return value
// For UTC dates, display the UTC date to prevent timezone shifts
// e.g., 2002-05-16T00:00:00.000Z should show as "May 16, 2002" not "May 15, 2002"
if (typeof value === 'string' && (value.endsWith('Z') || /[+-]\d{2}:\d{2}$/.test(value))) {
return new Date(
date.getUTCFullYear(),
Expand Down Expand Up @@ -96,6 +95,7 @@ export function DocumentTagsModal({
const documentTagHook = useTagDefinitions(knowledgeBaseId, documentId)
const kbTagHook = useKnowledgeBaseTagDefinitions(knowledgeBaseId)
const { getNextAvailableSlot: getServerNextSlot } = useNextAvailableSlot(knowledgeBaseId)
const { mutateAsync: updateDocumentTags } = useUpdateDocumentTags()

const { saveTagDefinitions, tagDefinitions, fetchTagDefinitions } = documentTagHook
const { tagDefinitions: kbTagDefinitions, fetchTagDefinitions: refreshTagDefinitions } = kbTagHook
Expand All @@ -118,7 +118,6 @@ export function DocumentTagsModal({
const definition = definitions.find((def) => def.tagSlot === slot)

if (rawValue !== null && rawValue !== undefined && definition) {
// Convert value to string for storage
const stringValue = String(rawValue).trim()
if (stringValue) {
tags.push({
Expand All @@ -142,41 +141,34 @@ export function DocumentTagsModal({
async (tagsToSave: DocumentTag[]) => {
if (!documentData) return

try {
const tagData: Record<string, string> = {}

// Only include tags that have values (omit empty ones)
// Use empty string for slots that should be cleared
ALL_TAG_SLOTS.forEach((slot) => {
const tag = tagsToSave.find((t) => t.slot === slot)
if (tag?.value.trim()) {
tagData[slot] = tag.value.trim()
} else {
// Use empty string to clear a tag (API schema expects string, not null)
tagData[slot] = ''
}
})

const response = await fetch(`/api/knowledge/${knowledgeBaseId}/documents/${documentId}`, {
method: 'PUT',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify(tagData),
})

if (!response.ok) {
throw new Error('Failed to update document tags')
const tagData: Record<string, string> = {}

ALL_TAG_SLOTS.forEach((slot) => {
const tag = tagsToSave.find((t) => t.slot === slot)
if (tag?.value.trim()) {
tagData[slot] = tag.value.trim()
} else {
tagData[slot] = ''
}
})

onDocumentUpdate?.(tagData as Record<string, string>)
await fetchTagDefinitions()
} catch (error) {
logger.error('Error updating document tags:', error)
throw error
}
await updateDocumentTags({
knowledgeBaseId,
documentId,
tags: tagData,
})

onDocumentUpdate?.(tagData)
await fetchTagDefinitions()
},
[documentData, knowledgeBaseId, documentId, fetchTagDefinitions, onDocumentUpdate]
[
documentData,
knowledgeBaseId,
documentId,
updateDocumentTags,
fetchTagDefinitions,
onDocumentUpdate,
]
)

const handleRemoveTag = async (index: number) => {
Expand Down
Loading