this will load data from Opensearch.
it doesn't have prompts as well
This commit is contained in:
@@ -9,6 +9,9 @@ from .ui.callbacks.interactions import InteractionCallbacks
|
||||
|
||||
def create_app():
|
||||
app = dash.Dash(__name__, external_stylesheets=[dbc.themes.BOOTSTRAP])
|
||||
|
||||
# Allow callbacks to components that are dynamically created in tabs
|
||||
app.config.suppress_callback_exceptions = True
|
||||
|
||||
layout_manager = AppLayout()
|
||||
app.layout = layout_manager.create_layout()
|
||||
|
@@ -73,6 +73,12 @@ class AppSettings:
|
||||
HOST = os.getenv("EMBEDDINGBUDDY_HOST", "127.0.0.1")
|
||||
PORT = int(os.getenv("EMBEDDINGBUDDY_PORT", "8050"))
|
||||
|
||||
# OpenSearch Configuration
|
||||
OPENSEARCH_DEFAULT_SIZE = 100
|
||||
OPENSEARCH_SAMPLE_SIZE = 5
|
||||
OPENSEARCH_CONNECTION_TIMEOUT = 30
|
||||
OPENSEARCH_VERIFY_CERTS = True
|
||||
|
||||
# Bootstrap Theme
|
||||
EXTERNAL_STYLESHEETS = [
|
||||
"https://cdn.jsdelivr.net/npm/bootstrap@5.1.3/dist/css/bootstrap.min.css"
|
||||
|
@@ -1,6 +1,7 @@
|
||||
import numpy as np
|
||||
from typing import List, Optional, Tuple
|
||||
from ..models.schemas import Document, ProcessedData
|
||||
from ..models.field_mapper import FieldMapper
|
||||
from .parser import NDJSONParser
|
||||
|
||||
|
||||
@@ -26,6 +27,42 @@ class DataProcessor:
|
||||
except Exception as e:
|
||||
return ProcessedData(documents=[], embeddings=np.array([]), error=str(e))
|
||||
|
||||
def process_opensearch_data(
|
||||
self, raw_documents: List[dict], field_mapping
|
||||
) -> ProcessedData:
|
||||
"""Process raw OpenSearch documents using field mapping."""
|
||||
try:
|
||||
# Transform documents using field mapping
|
||||
transformed_docs = FieldMapper.transform_documents(
|
||||
raw_documents, field_mapping
|
||||
)
|
||||
|
||||
# Parse transformed documents
|
||||
documents = []
|
||||
for doc_dict in transformed_docs:
|
||||
try:
|
||||
# Ensure required fields are present with defaults if needed
|
||||
if "id" not in doc_dict or not doc_dict["id"]:
|
||||
doc_dict["id"] = f"doc_{len(documents)}"
|
||||
|
||||
doc = Document(**doc_dict)
|
||||
documents.append(doc)
|
||||
except Exception:
|
||||
continue # Skip invalid documents
|
||||
|
||||
if not documents:
|
||||
return ProcessedData(
|
||||
documents=[],
|
||||
embeddings=np.array([]),
|
||||
error="No valid documents after transformation",
|
||||
)
|
||||
|
||||
embeddings = self._extract_embeddings(documents)
|
||||
return ProcessedData(documents=documents, embeddings=embeddings)
|
||||
|
||||
except Exception as e:
|
||||
return ProcessedData(documents=[], embeddings=np.array([]), error=str(e))
|
||||
|
||||
def _extract_embeddings(self, documents: List[Document]) -> np.ndarray:
|
||||
if not documents:
|
||||
return np.array([])
|
||||
|
0
src/embeddingbuddy/data/sources/__init__.py
Normal file
0
src/embeddingbuddy/data/sources/__init__.py
Normal file
189
src/embeddingbuddy/data/sources/opensearch.py
Normal file
189
src/embeddingbuddy/data/sources/opensearch.py
Normal file
@@ -0,0 +1,189 @@
|
||||
from typing import Dict, List, Optional, Any, Tuple
|
||||
import logging
|
||||
from opensearchpy import OpenSearch
|
||||
from opensearchpy.exceptions import OpenSearchException
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class OpenSearchClient:
|
||||
def __init__(self):
|
||||
self.client: Optional[OpenSearch] = None
|
||||
self.connection_info: Optional[Dict[str, Any]] = None
|
||||
|
||||
def connect(
|
||||
self,
|
||||
url: str,
|
||||
username: Optional[str] = None,
|
||||
password: Optional[str] = None,
|
||||
api_key: Optional[str] = None,
|
||||
verify_certs: bool = True,
|
||||
) -> Tuple[bool, str]:
|
||||
"""
|
||||
Connect to OpenSearch instance.
|
||||
|
||||
Returns:
|
||||
Tuple of (success: bool, message: str)
|
||||
"""
|
||||
try:
|
||||
# Parse URL to extract host and port
|
||||
if url.startswith("http://") or url.startswith("https://"):
|
||||
host = url
|
||||
else:
|
||||
host = f"https://{url}"
|
||||
|
||||
# Build auth configuration
|
||||
auth_config = {}
|
||||
if username and password:
|
||||
auth_config["http_auth"] = (username, password)
|
||||
elif api_key:
|
||||
auth_config["api_key"] = api_key
|
||||
|
||||
# Create client
|
||||
self.client = OpenSearch([host], verify_certs=verify_certs, **auth_config)
|
||||
|
||||
# Test connection
|
||||
info = self.client.info()
|
||||
self.connection_info = {
|
||||
"url": host,
|
||||
"cluster_name": info.get("cluster_name", "Unknown"),
|
||||
"version": info.get("version", {}).get("number", "Unknown"),
|
||||
}
|
||||
|
||||
return (
|
||||
True,
|
||||
f"Connected to {info.get('cluster_name', 'OpenSearch cluster')}",
|
||||
)
|
||||
|
||||
except OpenSearchException as e:
|
||||
logger.error(f"OpenSearch connection error: {e}")
|
||||
return False, f"Connection failed: {str(e)}"
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error connecting to OpenSearch: {e}")
|
||||
return False, f"Unexpected error: {str(e)}"
|
||||
|
||||
def get_index_mapping(self, index_name: str) -> Tuple[bool, Optional[Dict], str]:
|
||||
"""
|
||||
Get the mapping for a specific index.
|
||||
|
||||
Returns:
|
||||
Tuple of (success: bool, mapping: Dict or None, message: str)
|
||||
"""
|
||||
if not self.client:
|
||||
return False, None, "Not connected to OpenSearch"
|
||||
|
||||
try:
|
||||
mapping = self.client.indices.get_mapping(index=index_name)
|
||||
return True, mapping, "Mapping retrieved successfully"
|
||||
except OpenSearchException as e:
|
||||
logger.error(f"Error getting mapping for index {index_name}: {e}")
|
||||
return False, None, f"Failed to get mapping: {str(e)}"
|
||||
|
||||
def analyze_fields(self, index_name: str) -> Tuple[bool, Optional[Dict], str]:
|
||||
"""
|
||||
Analyze index fields to detect potential embedding and text fields.
|
||||
|
||||
Returns:
|
||||
Tuple of (success: bool, analysis: Dict or None, message: str)
|
||||
"""
|
||||
success, mapping, message = self.get_index_mapping(index_name)
|
||||
if not success:
|
||||
return False, None, message
|
||||
|
||||
try:
|
||||
# Extract field information from mapping
|
||||
index_mapping = mapping[index_name]["mappings"]["properties"]
|
||||
|
||||
analysis = {
|
||||
"vector_fields": [],
|
||||
"text_fields": [],
|
||||
"keyword_fields": [],
|
||||
"numeric_fields": [],
|
||||
"all_fields": [],
|
||||
}
|
||||
|
||||
for field_name, field_info in index_mapping.items():
|
||||
field_type = field_info.get("type", "unknown")
|
||||
analysis["all_fields"].append(field_name)
|
||||
|
||||
if field_type == "dense_vector":
|
||||
analysis["vector_fields"].append(
|
||||
{
|
||||
"name": field_name,
|
||||
"dimension": field_info.get("dimension", "unknown"),
|
||||
}
|
||||
)
|
||||
elif field_type == "text":
|
||||
analysis["text_fields"].append(field_name)
|
||||
elif field_type == "keyword":
|
||||
analysis["keyword_fields"].append(field_name)
|
||||
elif field_type in ["integer", "long", "float", "double"]:
|
||||
analysis["numeric_fields"].append(field_name)
|
||||
|
||||
return True, analysis, "Field analysis completed"
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error analyzing fields: {e}")
|
||||
return False, None, f"Field analysis failed: {str(e)}"
|
||||
|
||||
def fetch_sample_data(
|
||||
self, index_name: str, size: int = 5
|
||||
) -> Tuple[bool, List[Dict], str]:
|
||||
"""
|
||||
Fetch sample documents from the index.
|
||||
|
||||
Returns:
|
||||
Tuple of (success: bool, documents: List[Dict], message: str)
|
||||
"""
|
||||
if not self.client:
|
||||
return False, [], "Not connected to OpenSearch"
|
||||
|
||||
try:
|
||||
response = self.client.search(
|
||||
index=index_name, body={"query": {"match_all": {}}, "size": size}
|
||||
)
|
||||
|
||||
documents = [hit["_source"] for hit in response["hits"]["hits"]]
|
||||
return True, documents, f"Retrieved {len(documents)} sample documents"
|
||||
|
||||
except OpenSearchException as e:
|
||||
logger.error(f"Error fetching sample data: {e}")
|
||||
return False, [], f"Failed to fetch sample data: {str(e)}"
|
||||
|
||||
def fetch_data(
|
||||
self, index_name: str, size: int = 100
|
||||
) -> Tuple[bool, List[Dict], str]:
|
||||
"""
|
||||
Fetch documents from the index.
|
||||
|
||||
Returns:
|
||||
Tuple of (success: bool, documents: List[Dict], message: str)
|
||||
"""
|
||||
if not self.client:
|
||||
return False, [], "Not connected to OpenSearch"
|
||||
|
||||
try:
|
||||
response = self.client.search(
|
||||
index=index_name, body={"query": {"match_all": {}}, "size": size}
|
||||
)
|
||||
|
||||
documents = [hit["_source"] for hit in response["hits"]["hits"]]
|
||||
total_hits = response["hits"]["total"]["value"]
|
||||
|
||||
message = f"Retrieved {len(documents)} documents from {total_hits} total"
|
||||
return True, documents, message
|
||||
|
||||
except OpenSearchException as e:
|
||||
logger.error(f"Error fetching data: {e}")
|
||||
return False, [], f"Failed to fetch data: {str(e)}"
|
||||
|
||||
def disconnect(self):
|
||||
"""Disconnect from OpenSearch."""
|
||||
if self.client:
|
||||
self.client = None
|
||||
self.connection_info = None
|
||||
|
||||
def is_connected(self) -> bool:
|
||||
"""Check if connected to OpenSearch."""
|
||||
return self.client is not None
|
219
src/embeddingbuddy/models/field_mapper.py
Normal file
219
src/embeddingbuddy/models/field_mapper.py
Normal file
@@ -0,0 +1,219 @@
|
||||
from dataclasses import dataclass
|
||||
from typing import Dict, List, Optional, Any
|
||||
import logging
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass
|
||||
class FieldMapping:
|
||||
"""Configuration for mapping OpenSearch fields to standard format."""
|
||||
|
||||
embedding_field: str
|
||||
text_field: str
|
||||
id_field: Optional[str] = None
|
||||
category_field: Optional[str] = None
|
||||
subcategory_field: Optional[str] = None
|
||||
tags_field: Optional[str] = None
|
||||
|
||||
|
||||
class FieldMapper:
|
||||
"""Handles field mapping and data transformation from OpenSearch to standard format."""
|
||||
|
||||
@staticmethod
|
||||
def suggest_mappings(field_analysis: Dict) -> Dict[str, List[str]]:
|
||||
"""
|
||||
Suggest field mappings based on field analysis.
|
||||
|
||||
Each dropdown will show ALL available fields, but ordered by relevance
|
||||
with the most likely candidates first.
|
||||
|
||||
Args:
|
||||
field_analysis: Analysis results from OpenSearchClient.analyze_fields
|
||||
|
||||
Returns:
|
||||
Dictionary with suggested fields for each mapping (ordered by relevance)
|
||||
"""
|
||||
all_fields = field_analysis.get("all_fields", [])
|
||||
vector_fields = [vf["name"] for vf in field_analysis.get("vector_fields", [])]
|
||||
text_fields = field_analysis.get("text_fields", [])
|
||||
keyword_fields = field_analysis.get("keyword_fields", [])
|
||||
numeric_fields = field_analysis.get("numeric_fields", [])
|
||||
|
||||
# Helper function to create ordered suggestions
|
||||
def create_ordered_suggestions(primary_candidates, all_available_fields):
|
||||
# Start with primary candidates, then add all other fields
|
||||
ordered = []
|
||||
# Add primary candidates first
|
||||
for field in primary_candidates:
|
||||
if field in all_available_fields and field not in ordered:
|
||||
ordered.append(field)
|
||||
# Add remaining fields
|
||||
for field in all_available_fields:
|
||||
if field not in ordered:
|
||||
ordered.append(field)
|
||||
return ordered
|
||||
|
||||
suggestions = {}
|
||||
|
||||
# Embedding field suggestions (vector fields first, then all fields)
|
||||
embedding_candidates = vector_fields.copy()
|
||||
suggestions["embedding"] = create_ordered_suggestions(embedding_candidates, all_fields)
|
||||
|
||||
# Text field suggestions (text fields first, then all fields)
|
||||
text_candidates = text_fields.copy()
|
||||
suggestions["text"] = create_ordered_suggestions(text_candidates, all_fields)
|
||||
|
||||
# ID field suggestions (ID-like fields first, then all fields)
|
||||
id_candidates = [f for f in keyword_fields if any(
|
||||
keyword in f.lower() for keyword in ["id", "_id", "doc", "document"]
|
||||
)]
|
||||
id_candidates.append("_id") # _id is always available
|
||||
suggestions["id"] = create_ordered_suggestions(id_candidates, all_fields)
|
||||
|
||||
# Category field suggestions (category-like fields first, then all fields)
|
||||
category_candidates = [f for f in keyword_fields if any(
|
||||
keyword in f.lower() for keyword in ["category", "class", "type", "label"]
|
||||
)]
|
||||
suggestions["category"] = create_ordered_suggestions(category_candidates, all_fields)
|
||||
|
||||
# Subcategory field suggestions (subcategory-like fields first, then all fields)
|
||||
subcategory_candidates = [f for f in keyword_fields if any(
|
||||
keyword in f.lower() for keyword in ["subcategory", "subclass", "subtype", "subtopic"]
|
||||
)]
|
||||
suggestions["subcategory"] = create_ordered_suggestions(subcategory_candidates, all_fields)
|
||||
|
||||
# Tags field suggestions (tag-like fields first, then all fields)
|
||||
tags_candidates = [f for f in keyword_fields if any(
|
||||
keyword in f.lower() for keyword in ["tag", "tags", "keyword", "keywords"]
|
||||
)]
|
||||
suggestions["tags"] = create_ordered_suggestions(tags_candidates, all_fields)
|
||||
|
||||
return suggestions
|
||||
|
||||
@staticmethod
|
||||
def validate_mapping(
|
||||
mapping: FieldMapping, available_fields: List[str]
|
||||
) -> List[str]:
|
||||
"""
|
||||
Validate that the field mapping is correct.
|
||||
|
||||
Returns:
|
||||
List of validation errors (empty if valid)
|
||||
"""
|
||||
errors = []
|
||||
|
||||
# Required fields validation
|
||||
if not mapping.embedding_field:
|
||||
errors.append("Embedding field is required")
|
||||
elif mapping.embedding_field not in available_fields:
|
||||
errors.append(
|
||||
f"Embedding field '{mapping.embedding_field}' not found in index"
|
||||
)
|
||||
|
||||
if not mapping.text_field:
|
||||
errors.append("Text field is required")
|
||||
elif mapping.text_field not in available_fields:
|
||||
errors.append(f"Text field '{mapping.text_field}' not found in index")
|
||||
|
||||
# Optional fields validation
|
||||
optional_fields = {
|
||||
"id_field": mapping.id_field,
|
||||
"category_field": mapping.category_field,
|
||||
"subcategory_field": mapping.subcategory_field,
|
||||
"tags_field": mapping.tags_field,
|
||||
}
|
||||
|
||||
for field_name, field_value in optional_fields.items():
|
||||
if field_value and field_value not in available_fields:
|
||||
errors.append(
|
||||
f"Field '{field_value}' for {field_name} not found in index"
|
||||
)
|
||||
|
||||
return errors
|
||||
|
||||
@staticmethod
|
||||
def transform_documents(
|
||||
documents: List[Dict[str, Any]], mapping: FieldMapping
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Transform OpenSearch documents to standard format using field mapping.
|
||||
|
||||
Args:
|
||||
documents: Raw documents from OpenSearch
|
||||
mapping: Field mapping configuration
|
||||
|
||||
Returns:
|
||||
List of transformed documents in standard format
|
||||
"""
|
||||
transformed = []
|
||||
|
||||
for doc in documents:
|
||||
try:
|
||||
# Build standard format document
|
||||
standard_doc = {}
|
||||
|
||||
# Required fields
|
||||
if mapping.embedding_field in doc:
|
||||
standard_doc["embedding"] = doc[mapping.embedding_field]
|
||||
else:
|
||||
logger.warning(
|
||||
f"Missing embedding field '{mapping.embedding_field}' in document"
|
||||
)
|
||||
continue
|
||||
|
||||
if mapping.text_field in doc:
|
||||
standard_doc["text"] = str(doc[mapping.text_field])
|
||||
else:
|
||||
logger.warning(
|
||||
f"Missing text field '{mapping.text_field}' in document"
|
||||
)
|
||||
continue
|
||||
|
||||
# Optional fields
|
||||
if mapping.id_field and mapping.id_field in doc:
|
||||
standard_doc["id"] = str(doc[mapping.id_field])
|
||||
|
||||
if mapping.category_field and mapping.category_field in doc:
|
||||
standard_doc["category"] = str(doc[mapping.category_field])
|
||||
|
||||
if mapping.subcategory_field and mapping.subcategory_field in doc:
|
||||
standard_doc["subcategory"] = str(doc[mapping.subcategory_field])
|
||||
|
||||
if mapping.tags_field and mapping.tags_field in doc:
|
||||
tags = doc[mapping.tags_field]
|
||||
# Handle both string and list tags
|
||||
if isinstance(tags, list):
|
||||
standard_doc["tags"] = [str(tag) for tag in tags]
|
||||
else:
|
||||
standard_doc["tags"] = [str(tags)]
|
||||
|
||||
transformed.append(standard_doc)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error transforming document: {e}")
|
||||
continue
|
||||
|
||||
logger.info(f"Transformed {len(transformed)} documents out of {len(documents)}")
|
||||
return transformed
|
||||
|
||||
@staticmethod
|
||||
def create_mapping_from_dict(mapping_dict: Dict[str, str]) -> FieldMapping:
|
||||
"""
|
||||
Create a FieldMapping from a dictionary.
|
||||
|
||||
Args:
|
||||
mapping_dict: Dictionary with field mappings
|
||||
|
||||
Returns:
|
||||
FieldMapping instance
|
||||
"""
|
||||
return FieldMapping(
|
||||
embedding_field=mapping_dict.get("embedding", ""),
|
||||
text_field=mapping_dict.get("text", ""),
|
||||
id_field=mapping_dict.get("id") or None,
|
||||
category_field=mapping_dict.get("category") or None,
|
||||
subcategory_field=mapping_dict.get("subcategory") or None,
|
||||
tags_field=mapping_dict.get("tags") or None,
|
||||
)
|
@@ -1,10 +1,14 @@
|
||||
from dash import callback, Input, Output, State
|
||||
from dash import callback, Input, Output, State, no_update
|
||||
from ...data.processor import DataProcessor
|
||||
from ...data.sources.opensearch import OpenSearchClient
|
||||
from ...models.field_mapper import FieldMapper
|
||||
from ...config.settings import AppSettings
|
||||
|
||||
|
||||
class DataProcessingCallbacks:
|
||||
def __init__(self):
|
||||
self.processor = DataProcessor()
|
||||
self.opensearch_client = OpenSearchClient()
|
||||
self._register_callbacks()
|
||||
|
||||
def _register_callbacks(self):
|
||||
@@ -67,6 +71,283 @@ class DataProcessingCallbacks:
|
||||
"embeddings": processed_data.embeddings.tolist(),
|
||||
}
|
||||
|
||||
# OpenSearch callbacks
|
||||
@callback(
|
||||
[
|
||||
Output("tab-content", "children"),
|
||||
],
|
||||
[Input("data-source-tabs", "active_tab")],
|
||||
prevent_initial_call=False,
|
||||
)
|
||||
def render_tab_content(active_tab):
|
||||
from ...ui.components.datasource import DataSourceComponent
|
||||
|
||||
datasource = DataSourceComponent()
|
||||
|
||||
if active_tab == "opensearch-tab":
|
||||
return [datasource.create_opensearch_tab()]
|
||||
else:
|
||||
return [datasource.create_file_upload_tab()]
|
||||
|
||||
@callback(
|
||||
Output("auth-collapse", "is_open"),
|
||||
[Input("auth-toggle", "n_clicks")],
|
||||
[State("auth-collapse", "is_open")],
|
||||
prevent_initial_call=True,
|
||||
)
|
||||
def toggle_auth(n_clicks, is_open):
|
||||
if n_clicks:
|
||||
return not is_open
|
||||
return is_open
|
||||
|
||||
@callback(
|
||||
Output("auth-toggle", "children"),
|
||||
[Input("auth-collapse", "is_open")],
|
||||
prevent_initial_call=False,
|
||||
)
|
||||
def update_auth_button_text(is_open):
|
||||
return "Hide Authentication" if is_open else "Show Authentication"
|
||||
|
||||
@callback(
|
||||
[
|
||||
Output("connection-status", "children"),
|
||||
Output("field-mapping-section", "children"),
|
||||
Output("field-mapping-section", "style"),
|
||||
Output("load-data-section", "style"),
|
||||
Output("load-opensearch-data-btn", "disabled"),
|
||||
Output("embedding-field-dropdown", "options"),
|
||||
Output("text-field-dropdown", "options"),
|
||||
Output("id-field-dropdown", "options"),
|
||||
Output("category-field-dropdown", "options"),
|
||||
Output("subcategory-field-dropdown", "options"),
|
||||
Output("tags-field-dropdown", "options"),
|
||||
],
|
||||
[Input("test-connection-btn", "n_clicks")],
|
||||
[
|
||||
State("opensearch-url", "value"),
|
||||
State("opensearch-index", "value"),
|
||||
State("opensearch-username", "value"),
|
||||
State("opensearch-password", "value"),
|
||||
State("opensearch-api-key", "value"),
|
||||
],
|
||||
prevent_initial_call=True,
|
||||
)
|
||||
def test_opensearch_connection(
|
||||
n_clicks, url, index_name, username, password, api_key
|
||||
):
|
||||
if not n_clicks or not url or not index_name:
|
||||
return no_update, no_update, no_update, no_update, no_update, no_update, no_update, no_update, no_update, no_update, no_update
|
||||
|
||||
# Test connection
|
||||
success, message = self.opensearch_client.connect(
|
||||
url=url,
|
||||
username=username,
|
||||
password=password,
|
||||
api_key=api_key,
|
||||
verify_certs=AppSettings.OPENSEARCH_VERIFY_CERTS,
|
||||
)
|
||||
|
||||
if not success:
|
||||
return (
|
||||
self._create_status_alert(f"❌ {message}", "danger"),
|
||||
[],
|
||||
{"display": "none"},
|
||||
{"display": "none"},
|
||||
True,
|
||||
[], # empty options for hidden dropdowns
|
||||
[],
|
||||
[],
|
||||
[],
|
||||
[],
|
||||
[],
|
||||
)
|
||||
|
||||
# Analyze fields
|
||||
success, field_analysis, analysis_message = (
|
||||
self.opensearch_client.analyze_fields(index_name)
|
||||
)
|
||||
|
||||
if not success:
|
||||
return (
|
||||
self._create_status_alert(f"❌ {analysis_message}", "danger"),
|
||||
[],
|
||||
{"display": "none"},
|
||||
{"display": "none"},
|
||||
True,
|
||||
[], # empty options for hidden dropdowns
|
||||
[],
|
||||
[],
|
||||
[],
|
||||
[],
|
||||
[],
|
||||
)
|
||||
|
||||
# Generate field suggestions
|
||||
field_suggestions = FieldMapper.suggest_mappings(field_analysis)
|
||||
|
||||
from ...ui.components.datasource import DataSourceComponent
|
||||
|
||||
datasource = DataSourceComponent()
|
||||
field_mapping_ui = datasource.create_field_mapping_interface(
|
||||
field_suggestions
|
||||
)
|
||||
|
||||
return (
|
||||
self._create_status_alert(f"✅ {message}", "success"),
|
||||
field_mapping_ui,
|
||||
{"display": "block"},
|
||||
{"display": "block"},
|
||||
False,
|
||||
[{"label": field, "value": field} for field in field_suggestions.get("embedding", [])],
|
||||
[{"label": field, "value": field} for field in field_suggestions.get("text", [])],
|
||||
[{"label": field, "value": field} for field in field_suggestions.get("id", [])],
|
||||
[{"label": field, "value": field} for field in field_suggestions.get("category", [])],
|
||||
[{"label": field, "value": field} for field in field_suggestions.get("subcategory", [])],
|
||||
[{"label": field, "value": field} for field in field_suggestions.get("tags", [])],
|
||||
)
|
||||
|
||||
@callback(
|
||||
[
|
||||
Output("processed-data", "data", allow_duplicate=True),
|
||||
Output("opensearch-success-alert", "children", allow_duplicate=True),
|
||||
Output("opensearch-success-alert", "is_open", allow_duplicate=True),
|
||||
Output("opensearch-error-alert", "children", allow_duplicate=True),
|
||||
Output("opensearch-error-alert", "is_open", allow_duplicate=True),
|
||||
],
|
||||
[Input("load-opensearch-data-btn", "n_clicks")],
|
||||
[
|
||||
State("opensearch-index", "value"),
|
||||
State("embedding-field-dropdown", "value"),
|
||||
State("text-field-dropdown", "value"),
|
||||
State("id-field-dropdown", "value"),
|
||||
State("category-field-dropdown", "value"),
|
||||
State("subcategory-field-dropdown", "value"),
|
||||
State("tags-field-dropdown", "value"),
|
||||
],
|
||||
prevent_initial_call=True,
|
||||
)
|
||||
def load_opensearch_data(
|
||||
n_clicks,
|
||||
index_name,
|
||||
embedding_field,
|
||||
text_field,
|
||||
id_field,
|
||||
category_field,
|
||||
subcategory_field,
|
||||
tags_field,
|
||||
):
|
||||
if not n_clicks or not index_name or not embedding_field or not text_field:
|
||||
return no_update, no_update, no_update, no_update, no_update
|
||||
|
||||
try:
|
||||
# Create field mapping
|
||||
field_mapping = FieldMapper.create_mapping_from_dict(
|
||||
{
|
||||
"embedding": embedding_field,
|
||||
"text": text_field,
|
||||
"id": id_field,
|
||||
"category": category_field,
|
||||
"subcategory": subcategory_field,
|
||||
"tags": tags_field,
|
||||
}
|
||||
)
|
||||
|
||||
# Fetch data from OpenSearch
|
||||
success, raw_documents, message = self.opensearch_client.fetch_data(
|
||||
index_name, size=AppSettings.OPENSEARCH_DEFAULT_SIZE
|
||||
)
|
||||
|
||||
if not success:
|
||||
return (
|
||||
no_update,
|
||||
"",
|
||||
False,
|
||||
f"❌ Failed to fetch data: {message}",
|
||||
True,
|
||||
)
|
||||
|
||||
# Process the data
|
||||
processed_data = self.processor.process_opensearch_data(
|
||||
raw_documents, field_mapping
|
||||
)
|
||||
|
||||
if processed_data.error:
|
||||
return (
|
||||
{"error": processed_data.error},
|
||||
"",
|
||||
False,
|
||||
f"❌ Data processing error: {processed_data.error}",
|
||||
True,
|
||||
)
|
||||
|
||||
success_message = f"✅ Successfully loaded {len(processed_data.documents)} documents from OpenSearch"
|
||||
|
||||
return (
|
||||
{
|
||||
"documents": [
|
||||
self._document_to_dict(doc)
|
||||
for doc in processed_data.documents
|
||||
],
|
||||
"embeddings": processed_data.embeddings.tolist(),
|
||||
},
|
||||
success_message,
|
||||
True,
|
||||
"",
|
||||
False,
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
return (no_update, "", False, f"❌ Unexpected error: {str(e)}", True)
|
||||
|
||||
# Sync callbacks to update hidden dropdowns from UI dropdowns
|
||||
@callback(
|
||||
Output("embedding-field-dropdown", "value"),
|
||||
Input("embedding-field-dropdown-ui", "value"),
|
||||
prevent_initial_call=True,
|
||||
)
|
||||
def sync_embedding_dropdown(value):
|
||||
return value
|
||||
|
||||
@callback(
|
||||
Output("text-field-dropdown", "value"),
|
||||
Input("text-field-dropdown-ui", "value"),
|
||||
prevent_initial_call=True,
|
||||
)
|
||||
def sync_text_dropdown(value):
|
||||
return value
|
||||
|
||||
@callback(
|
||||
Output("id-field-dropdown", "value"),
|
||||
Input("id-field-dropdown-ui", "value"),
|
||||
prevent_initial_call=True,
|
||||
)
|
||||
def sync_id_dropdown(value):
|
||||
return value
|
||||
|
||||
@callback(
|
||||
Output("category-field-dropdown", "value"),
|
||||
Input("category-field-dropdown-ui", "value"),
|
||||
prevent_initial_call=True,
|
||||
)
|
||||
def sync_category_dropdown(value):
|
||||
return value
|
||||
|
||||
@callback(
|
||||
Output("subcategory-field-dropdown", "value"),
|
||||
Input("subcategory-field-dropdown-ui", "value"),
|
||||
prevent_initial_call=True,
|
||||
)
|
||||
def sync_subcategory_dropdown(value):
|
||||
return value
|
||||
|
||||
@callback(
|
||||
Output("tags-field-dropdown", "value"),
|
||||
Input("tags-field-dropdown-ui", "value"),
|
||||
prevent_initial_call=True,
|
||||
)
|
||||
def sync_tags_dropdown(value):
|
||||
return value
|
||||
|
||||
@staticmethod
|
||||
def _document_to_dict(doc):
|
||||
return {
|
||||
@@ -118,3 +399,10 @@ class DataProcessingCallbacks:
|
||||
f"❌ Error processing file{file_part}: {error}. "
|
||||
"Please check that your file is valid NDJSON with required 'text' and 'embedding' fields."
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _create_status_alert(message: str, color: str):
|
||||
"""Create a status alert component."""
|
||||
import dash_bootstrap_components as dbc
|
||||
|
||||
return dbc.Alert(message, color=color, className="mb-2")
|
||||
|
320
src/embeddingbuddy/ui/components/datasource.py
Normal file
320
src/embeddingbuddy/ui/components/datasource.py
Normal file
@@ -0,0 +1,320 @@
|
||||
from dash import dcc, html
|
||||
import dash_bootstrap_components as dbc
|
||||
from .upload import UploadComponent
|
||||
|
||||
|
||||
class DataSourceComponent:
|
||||
def __init__(self):
|
||||
self.upload_component = UploadComponent()
|
||||
|
||||
def create_tabbed_interface(self):
|
||||
"""Create tabbed interface for different data sources."""
|
||||
return dbc.Card(
|
||||
[
|
||||
dbc.CardHeader(
|
||||
[
|
||||
dbc.Tabs(
|
||||
[
|
||||
dbc.Tab(label="File Upload", tab_id="file-tab"),
|
||||
dbc.Tab(label="OpenSearch", tab_id="opensearch-tab"),
|
||||
],
|
||||
id="data-source-tabs",
|
||||
active_tab="file-tab",
|
||||
)
|
||||
]
|
||||
),
|
||||
dbc.CardBody([html.Div(id="tab-content")]),
|
||||
]
|
||||
)
|
||||
|
||||
def create_file_upload_tab(self):
|
||||
"""Create file upload tab content."""
|
||||
return html.Div(
|
||||
[
|
||||
self.upload_component.create_error_alert(),
|
||||
self.upload_component.create_data_upload(),
|
||||
self.upload_component.create_prompts_upload(),
|
||||
self.upload_component.create_reset_button(),
|
||||
]
|
||||
)
|
||||
|
||||
def create_opensearch_tab(self):
|
||||
"""Create OpenSearch tab content."""
|
||||
return html.Div(
|
||||
[
|
||||
# Connection section
|
||||
html.H6("Connection", className="mb-2"),
|
||||
dbc.Row(
|
||||
[
|
||||
dbc.Col(
|
||||
[
|
||||
dbc.Label("OpenSearch URL:"),
|
||||
dbc.Input(
|
||||
id="opensearch-url",
|
||||
type="text",
|
||||
placeholder="https://opensearch.example.com:9200",
|
||||
className="mb-2",
|
||||
),
|
||||
],
|
||||
width=12,
|
||||
),
|
||||
]
|
||||
),
|
||||
dbc.Row(
|
||||
[
|
||||
dbc.Col(
|
||||
[
|
||||
dbc.Label("Index Name:"),
|
||||
dbc.Input(
|
||||
id="opensearch-index",
|
||||
type="text",
|
||||
placeholder="my-embeddings-index",
|
||||
className="mb-2",
|
||||
),
|
||||
],
|
||||
width=6,
|
||||
),
|
||||
dbc.Col(
|
||||
[
|
||||
dbc.Button(
|
||||
"Test Connection",
|
||||
id="test-connection-btn",
|
||||
color="primary",
|
||||
size="sm",
|
||||
className="mt-4",
|
||||
),
|
||||
],
|
||||
width=6,
|
||||
className="d-flex align-items-end",
|
||||
),
|
||||
]
|
||||
),
|
||||
# Authentication section (collapsible)
|
||||
dbc.Collapse(
|
||||
[
|
||||
html.Hr(),
|
||||
html.H6("Authentication (Optional)", className="mb-2"),
|
||||
dbc.Row(
|
||||
[
|
||||
dbc.Col(
|
||||
[
|
||||
dbc.Label("Username:"),
|
||||
dbc.Input(
|
||||
id="opensearch-username",
|
||||
type="text",
|
||||
className="mb-2",
|
||||
),
|
||||
],
|
||||
width=6,
|
||||
),
|
||||
dbc.Col(
|
||||
[
|
||||
dbc.Label("Password:"),
|
||||
dbc.Input(
|
||||
id="opensearch-password",
|
||||
type="password",
|
||||
className="mb-2",
|
||||
),
|
||||
],
|
||||
width=6,
|
||||
),
|
||||
]
|
||||
),
|
||||
dbc.Label("OR"),
|
||||
dbc.Input(
|
||||
id="opensearch-api-key",
|
||||
type="text",
|
||||
placeholder="API Key",
|
||||
className="mb-2",
|
||||
),
|
||||
],
|
||||
id="auth-collapse",
|
||||
is_open=False,
|
||||
),
|
||||
dbc.Button(
|
||||
"Show Authentication",
|
||||
id="auth-toggle",
|
||||
color="link",
|
||||
size="sm",
|
||||
className="p-0 mb-3",
|
||||
),
|
||||
# Connection status
|
||||
html.Div(id="connection-status", className="mb-3"),
|
||||
# Field mapping section (hidden initially)
|
||||
html.Div(id="field-mapping-section", style={"display": "none"}),
|
||||
|
||||
# Hidden dropdowns to prevent callback errors
|
||||
html.Div([
|
||||
dcc.Dropdown(id="embedding-field-dropdown", style={"display": "none"}),
|
||||
dcc.Dropdown(id="text-field-dropdown", style={"display": "none"}),
|
||||
dcc.Dropdown(id="id-field-dropdown", style={"display": "none"}),
|
||||
dcc.Dropdown(id="category-field-dropdown", style={"display": "none"}),
|
||||
dcc.Dropdown(id="subcategory-field-dropdown", style={"display": "none"}),
|
||||
dcc.Dropdown(id="tags-field-dropdown", style={"display": "none"}),
|
||||
], style={"display": "none"}),
|
||||
# Load data button (hidden initially)
|
||||
html.Div(
|
||||
[
|
||||
dbc.Button(
|
||||
"Load Data",
|
||||
id="load-opensearch-data-btn",
|
||||
color="success",
|
||||
className="mb-2",
|
||||
disabled=True,
|
||||
),
|
||||
],
|
||||
id="load-data-section",
|
||||
style={"display": "none"},
|
||||
),
|
||||
# OpenSearch status/results
|
||||
html.Div(id="opensearch-status", className="mb-3"),
|
||||
]
|
||||
)
|
||||
|
||||
def create_field_mapping_interface(self, field_suggestions):
|
||||
"""Create field mapping interface based on detected fields."""
|
||||
return html.Div(
|
||||
[
|
||||
html.Hr(),
|
||||
html.H6("Field Mapping", className="mb-2"),
|
||||
html.P(
|
||||
"Map your OpenSearch fields to the required format:",
|
||||
className="text-muted small",
|
||||
),
|
||||
# Required fields
|
||||
dbc.Row(
|
||||
[
|
||||
dbc.Col(
|
||||
[
|
||||
dbc.Label(
|
||||
"Embedding Field (required):", className="fw-bold"
|
||||
),
|
||||
dcc.Dropdown(
|
||||
id="embedding-field-dropdown-ui",
|
||||
options=[
|
||||
{"label": field, "value": field}
|
||||
for field in field_suggestions.get("embedding", [])
|
||||
],
|
||||
value=field_suggestions.get("embedding", [None])[0], # Default to first suggestion
|
||||
placeholder="Select embedding field...",
|
||||
className="mb-2",
|
||||
),
|
||||
],
|
||||
width=6,
|
||||
),
|
||||
dbc.Col(
|
||||
[
|
||||
dbc.Label(
|
||||
"Text Field (required):", className="fw-bold"
|
||||
),
|
||||
dcc.Dropdown(
|
||||
id="text-field-dropdown-ui",
|
||||
options=[
|
||||
{"label": field, "value": field}
|
||||
for field in field_suggestions.get("text", [])
|
||||
],
|
||||
value=field_suggestions.get("text", [None])[0], # Default to first suggestion
|
||||
placeholder="Select text field...",
|
||||
className="mb-2",
|
||||
),
|
||||
],
|
||||
width=6,
|
||||
),
|
||||
]
|
||||
),
|
||||
# Optional fields
|
||||
html.H6("Optional Fields", className="mb-2 mt-3"),
|
||||
dbc.Row(
|
||||
[
|
||||
dbc.Col(
|
||||
[
|
||||
dbc.Label("ID Field:"),
|
||||
dcc.Dropdown(
|
||||
id="id-field-dropdown-ui",
|
||||
options=[
|
||||
{"label": field, "value": field}
|
||||
for field in field_suggestions.get("id", [])
|
||||
],
|
||||
value=field_suggestions.get("id", [None])[0], # Default to first suggestion
|
||||
placeholder="Select ID field...",
|
||||
className="mb-2",
|
||||
),
|
||||
],
|
||||
width=6,
|
||||
),
|
||||
dbc.Col(
|
||||
[
|
||||
dbc.Label("Category Field:"),
|
||||
dcc.Dropdown(
|
||||
id="category-field-dropdown-ui",
|
||||
options=[
|
||||
{"label": field, "value": field}
|
||||
for field in field_suggestions.get("category", [])
|
||||
],
|
||||
value=field_suggestions.get("category", [None])[0], # Default to first suggestion
|
||||
placeholder="Select category field...",
|
||||
className="mb-2",
|
||||
),
|
||||
],
|
||||
width=6,
|
||||
),
|
||||
]
|
||||
),
|
||||
dbc.Row(
|
||||
[
|
||||
dbc.Col(
|
||||
[
|
||||
dbc.Label("Subcategory Field:"),
|
||||
dcc.Dropdown(
|
||||
id="subcategory-field-dropdown-ui",
|
||||
options=[
|
||||
{"label": field, "value": field}
|
||||
for field in field_suggestions.get("subcategory", [])
|
||||
],
|
||||
value=field_suggestions.get("subcategory", [None])[0], # Default to first suggestion
|
||||
placeholder="Select subcategory field...",
|
||||
className="mb-2",
|
||||
),
|
||||
],
|
||||
width=6,
|
||||
),
|
||||
dbc.Col(
|
||||
[
|
||||
dbc.Label("Tags Field:"),
|
||||
dcc.Dropdown(
|
||||
id="tags-field-dropdown-ui",
|
||||
options=[
|
||||
{"label": field, "value": field}
|
||||
for field in field_suggestions.get("tags", [])
|
||||
],
|
||||
value=field_suggestions.get("tags", [None])[0], # Default to first suggestion
|
||||
placeholder="Select tags field...",
|
||||
className="mb-2",
|
||||
),
|
||||
],
|
||||
width=6,
|
||||
),
|
||||
]
|
||||
),
|
||||
]
|
||||
)
|
||||
|
||||
def create_error_alert(self):
|
||||
"""Create error alert component for OpenSearch issues."""
|
||||
return dbc.Alert(
|
||||
id="opensearch-error-alert",
|
||||
dismissable=True,
|
||||
is_open=False,
|
||||
color="danger",
|
||||
className="mb-3",
|
||||
)
|
||||
|
||||
def create_success_alert(self):
|
||||
"""Create success alert component for OpenSearch operations."""
|
||||
return dbc.Alert(
|
||||
id="opensearch-success-alert",
|
||||
dismissable=True,
|
||||
is_open=False,
|
||||
color="success",
|
||||
className="mb-3",
|
||||
)
|
@@ -1,21 +1,22 @@
|
||||
from dash import dcc, html
|
||||
import dash_bootstrap_components as dbc
|
||||
from .upload import UploadComponent
|
||||
from .datasource import DataSourceComponent
|
||||
|
||||
|
||||
class SidebarComponent:
|
||||
def __init__(self):
|
||||
self.upload_component = UploadComponent()
|
||||
self.datasource_component = DataSourceComponent()
|
||||
|
||||
def create_layout(self):
|
||||
return dbc.Col(
|
||||
[
|
||||
html.H5("Upload Data", className="mb-3"),
|
||||
self.upload_component.create_error_alert(),
|
||||
self.upload_component.create_data_upload(),
|
||||
self.upload_component.create_prompts_upload(),
|
||||
self.upload_component.create_reset_button(),
|
||||
html.H5("Visualization Controls", className="mb-3"),
|
||||
html.H5("Data Sources", className="mb-3"),
|
||||
self.datasource_component.create_error_alert(),
|
||||
self.datasource_component.create_success_alert(),
|
||||
self.datasource_component.create_tabbed_interface(),
|
||||
html.H5("Visualization Controls", className="mb-3 mt-4"),
|
||||
]
|
||||
+ self._create_method_dropdown()
|
||||
+ self._create_color_dropdown()
|
||||
|
Reference in New Issue
Block a user