Typeahead datastore (#321)

* Wiring up the datastore

* Writes into the data store

* Bulk save needs the timestamps

* Prep to do the local query

* Local typeahead working

* Pre pr cleanup

* ignore migrations dir in pre-commit for ruff w/ burnettk

* Getting ./bin/pyl to pass

---------

Co-authored-by: jasquat <jasquat@users.noreply.github.com>
This commit is contained in:
jbirddog 2023-06-09 15:28:59 -04:00 committed by GitHub
parent 26d7efb598
commit 0c1e83a892
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
8 changed files with 173 additions and 0 deletions

View File

@ -51,6 +51,7 @@ repos:
language: system
types: [python]
require_serial: true
exclude: "/migrations/"
- id: trailing-whitespace
files: ^spiffworkflow-backend/
name: Trim Trailing Whitespace

View File

@ -0,0 +1,44 @@
"""empty message
Revision ID: 377be1608b45
Revises: e4b6bbf83a3e
Create Date: 2023-06-07 12:39:19.989484
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '377be1608b45'
down_revision = 'e4b6bbf83a3e'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('typeahead',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('category', sa.String(length=255), nullable=True),
sa.Column('search_term', sa.String(length=255), nullable=True),
sa.Column('result', sa.JSON(), nullable=True),
sa.Column('updated_at_in_seconds', sa.Integer(), nullable=True),
sa.Column('created_at_in_seconds', sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
with op.batch_alter_table('typeahead', schema=None) as batch_op:
batch_op.create_index(batch_op.f('ix_typeahead_category'), ['category'], unique=False)
batch_op.create_index(batch_op.f('ix_typeahead_search_term'), ['search_term'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('typeahead', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('ix_typeahead_search_term'))
batch_op.drop_index(batch_op.f('ix_typeahead_category'))
op.drop_table('typeahead')
# ### end Alembic commands ###

View File

@ -0,0 +1,65 @@
from time import time
from typing import Any
from SpiffWorkflow.bpmn.serializer.helpers.spec import BpmnSpecConverter # type: ignore
from SpiffWorkflow.bpmn.specs.data_spec import BpmnDataStoreSpecification # type: ignore
from SpiffWorkflow.task import Task as SpiffTask # type: ignore
from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.models.typeahead import TypeaheadModel
class TypeaheadDataStore(BpmnDataStoreSpecification): # type: ignore
"""TypeaheadDataStore."""
def get(self, my_task: SpiffTask) -> None:
"""get."""
raise Exception("This is a write only data store.")
def set(self, my_task: SpiffTask) -> None:
"""set."""
typeahead_data_by_category = my_task.data[self.bpmn_id]
for category, items in typeahead_data_by_category.items():
db.session.query(TypeaheadModel).filter_by(category=category).delete()
objects = [self._make_object(category, item) for item in items]
db.session.bulk_save_objects(objects)
db.session.commit()
del my_task.data[self.bpmn_id]
def _make_object(self, category: str, item: dict[str, Any]) -> TypeaheadModel:
now = round(time())
return TypeaheadModel(
category=category,
search_term=item["search_term"],
result=item["result"],
created_at_in_seconds=now,
updated_at_in_seconds=now,
)
@staticmethod
def register_converter(spec_config: dict[str, Any]) -> None:
spec_config["task_specs"].append(TypeaheadDataStoreConverter)
@staticmethod
def register_data_store_class(data_store_classes: dict[str, Any]) -> None:
data_store_classes["TypeaheadDataStore"] = TypeaheadDataStore
class TypeaheadDataStoreConverter(BpmnSpecConverter): # type: ignore
"""TypeaheadDataStoreConverter."""
def __init__(self, registry): # type: ignore
"""__init__."""
super().__init__(TypeaheadDataStore, registry)
def to_dict(self, spec: Any) -> dict[str, Any]:
"""to_dict."""
return {
"bpmn_id": spec.bpmn_id,
"bpmn_name": spec.bpmn_name,
"capacity": spec.capacity,
"is_unlimited": spec.is_unlimited,
}
def from_dict(self, dct: dict[str, Any]) -> TypeaheadDataStore:
"""from_dict."""
return TypeaheadDataStore(**dct)

View File

@ -79,5 +79,8 @@ from spiffworkflow_backend.models.active_user import (
from spiffworkflow_backend.models.process_model_cycle import (
ProcessModelCycleModel,
) # noqa: F401
from spiffworkflow_backend.models.typeahead import (
TypeaheadModel,
) # noqa: F401
add_listeners()

View File

@ -0,0 +1,16 @@
from dataclasses import dataclass
from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
from spiffworkflow_backend.models.db import db
@dataclass
class TypeaheadModel(SpiffworkflowBaseDBModel):
__tablename__ = "typeahead"
id: int = db.Column(db.Integer, primary_key=True)
category: str = db.Column(db.String(255), index=True)
search_term: str = db.Column(db.String(255), index=True)
result: dict = db.Column(db.JSON)
updated_at_in_seconds: int = db.Column(db.Integer)
created_at_in_seconds: int = db.Column(db.Integer)

View File

@ -1,3 +1,4 @@
import json
from typing import Any
import flask.wrappers
@ -6,6 +7,8 @@ from flask import current_app
from flask.wrappers import Response
from spiffworkflow_backend.config import HTTP_REQUEST_TIMEOUT_SECONDS
from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.models.typeahead import TypeaheadModel
def connector_proxy_typeahead_url() -> Any:
@ -14,6 +17,34 @@ def connector_proxy_typeahead_url() -> Any:
def typeahead(category: str, prefix: str, limit: int) -> flask.wrappers.Response:
if _has_local_data(category):
return _local_typeahead(category, prefix, limit)
return _remote_typeahead(category, prefix, limit)
def _local_typeahead(category: str, prefix: str, limit: int) -> flask.wrappers.Response:
results = (
db.session.query(TypeaheadModel.result)
.filter(
TypeaheadModel.category == category,
TypeaheadModel.search_term.ilike(f"{prefix}%"), # type: ignore
)
.order_by(TypeaheadModel.search_term)
.limit(limit)
.all()
or []
)
# this is a bummer but sqlalchemy returns a tuple of one field for each result
results = [result[0] for result in results]
response = json.dumps(results)
return Response(response, status=200, mimetype="application/json")
def _remote_typeahead(category: str, prefix: str, limit: int) -> flask.wrappers.Response:
url = f"{connector_proxy_typeahead_url()}/v1/typeahead/{category}?prefix={prefix}&limit={limit}"
proxy_response = requests.get(url, timeout=HTTP_REQUEST_TIMEOUT_SECONDS)
@ -21,3 +52,7 @@ def typeahead(category: str, prefix: str, limit: int) -> flask.wrappers.Response
response = proxy_response.text
return Response(response, status=status, mimetype="application/json")
def _has_local_data(category: str) -> bool:
return db.session.query(TypeaheadModel.category).filter_by(category=category).first() is not None

View File

@ -1,5 +1,8 @@
from typing import Any
from SpiffWorkflow.dmn.parser.BpmnDmnParser import BpmnDmnParser # type: ignore
from SpiffWorkflow.spiff.parser.process import SpiffBpmnParser # type: ignore
from spiffworkflow_backend.data_stores.typeahead import TypeaheadDataStore
from spiffworkflow_backend.specs.start_event import StartEvent
@ -10,3 +13,7 @@ class MyCustomParser(BpmnDmnParser): # type: ignore
OVERRIDE_PARSER_CLASSES.update(SpiffBpmnParser.OVERRIDE_PARSER_CLASSES)
StartEvent.register_parser_class(OVERRIDE_PARSER_CLASSES)
DATA_STORE_CLASSES: dict[str, Any] = {}
TypeaheadDataStore.register_data_store_class(DATA_STORE_CLASSES)

View File

@ -41,6 +41,7 @@ from SpiffWorkflow.spiff.serializer.config import SPIFF_SPEC_CONFIG # type: ign
from SpiffWorkflow.task import Task as SpiffTask # type: ignore
from SpiffWorkflow.task import TaskState
from SpiffWorkflow.util.deep_merge import DeepMerge # type: ignore
from spiffworkflow_backend.data_stores.typeahead import TypeaheadDataStore
from spiffworkflow_backend.exceptions.api_error import ApiError
from spiffworkflow_backend.models.bpmn_process import BpmnProcessModel
from spiffworkflow_backend.models.bpmn_process_definition import BpmnProcessDefinitionModel
@ -84,6 +85,7 @@ from spiffworkflow_backend.specs.start_event import StartEvent
from sqlalchemy import and_
StartEvent.register_converter(SPIFF_SPEC_CONFIG)
TypeaheadDataStore.register_converter(SPIFF_SPEC_CONFIG)
# Sorry about all this crap. I wanted to move this thing to another file, but
# importing a bunch of types causes circular imports.