Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
213 changes: 101 additions & 112 deletions __init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,11 +13,11 @@
from bson import json_util

import fiftyone as fo
from fiftyone.core.utils import add_sys_path
import fiftyone.operators as foo
import fiftyone.operators.types as types


from .voxelgpt import ask_voxelgpt_generator
import db
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Isn't the idea of voxel51/fiftyone#4029 that users should adopt this style?

from voxelgpt.voxelgpt import ask_voxelgpt_generator
from voxelgpt import db

where the name of their source folder is treated as the package name

class AskVoxelGPT(foo.Operator):
@property
def config(self):
Expand Down Expand Up @@ -47,47 +47,43 @@ def execute(self, ctx):
inject_voxelgpt_secrets(ctx)

try:
with add_sys_path(os.path.dirname(os.path.abspath(__file__))):
# pylint: disable=no-name-in-module
from voxelgpt import ask_voxelgpt_generator

streaming_message = None

for response in ask_voxelgpt_generator(
query,
sample_collection=sample_collection,
dialect="string",
allow_streaming=True,
):
type = response["type"]
data = response["data"]

if type == "view":
yield self.view(ctx, data["view"])
elif type == "message":
kwargs = {}

if data["overwrite"]:
kwargs["overwrite_last"] = True

yield self.message(
ctx, data["message"], messages, **kwargs
)
elif type == "streaming":
kwargs = {}

if streaming_message is None:
streaming_message = data["content"]
else:
streaming_message += data["content"]
kwargs["overwrite_last"] = True

yield self.message(
ctx, streaming_message, messages, **kwargs
)

if data["last"]:
streaming_message = None
streaming_message = None

for response in ask_voxelgpt_generator(
query,
sample_collection=sample_collection,
dialect="string",
allow_streaming=True,
):
type = response["type"]
data = response["data"]

if type == "view":
yield self.view(ctx, data["view"])
elif type == "message":
kwargs = {}

if data["overwrite"]:
kwargs["overwrite_last"] = True

yield self.message(
ctx, data["message"], messages, **kwargs
)
elif type == "streaming":
kwargs = {}

if streaming_message is None:
streaming_message = data["content"]
else:
streaming_message += data["content"]
kwargs["overwrite_last"] = True

yield self.message(
ctx, streaming_message, messages, **kwargs
)

if data["last"]:
streaming_message = None
except Exception as e:
yield self.error(ctx, e)

Expand Down Expand Up @@ -152,65 +148,61 @@ def execute(self, ctx):
inject_voxelgpt_secrets(ctx)

try:
with add_sys_path(os.path.dirname(os.path.abspath(__file__))):
# pylint: disable=import-error,no-name-in-module
import db
from voxelgpt import ask_voxelgpt_generator

# Log user query
table = db.table(db.UserQueryTable)
ctx.params["query_id"] = table.insert_query(query)

streaming_message = None

for response in ask_voxelgpt_generator(
query,
sample_collection=sample_collection,
chat_history=chat_history,
dialect="markdown",
allow_streaming=True,
):
type = response["type"]
data = response["data"]

if type == "view":
if orig_view is not None:
message = (
"I'm remembering your previous view. Any "
"follow-up questions in this session will be "
"posed with respect to it"
)
yield self.message(
ctx, message, orig_view=orig_view
)

yield self.view(ctx, data["view"])
elif type == "message":
kwargs = {}

if data["overwrite"]:
kwargs["overwrite_last"] = True

kwargs["history"] = data["history"]
yield self.message(ctx, data["message"], **kwargs)
elif type == "streaming":
kwargs = {}

if streaming_message is None:
streaming_message = data["content"]
else:
streaming_message += data["content"]
kwargs["overwrite_last"] = True

if data["last"]:
kwargs["history"] = streaming_message

yield self.message(ctx, streaming_message, **kwargs)

if data["last"]:
streaming_message = None
elif type == "warning":
yield self.warning(ctx, data["message"])

# Log user query
table = db.table(db.UserQueryTable)
ctx.params["query_id"] = table.insert_query(query)

streaming_message = None

for response in ask_voxelgpt_generator(
query,
sample_collection=sample_collection,
chat_history=chat_history,
dialect="markdown",
allow_streaming=True,
):
type = response["type"]
data = response["data"]

if type == "view":
if orig_view is not None:
message = (
"I'm remembering your previous view. Any "
"follow-up questions in this session will be "
"posed with respect to it"
)
yield self.message(
ctx, message, orig_view=orig_view
)

yield self.view(ctx, data["view"])
elif type == "message":
kwargs = {}

if data["overwrite"]:
kwargs["overwrite_last"] = True

kwargs["history"] = data["history"]
yield self.message(ctx, data["message"], **kwargs)
elif type == "streaming":
kwargs = {}

if streaming_message is None:
streaming_message = data["content"]
else:
streaming_message += data["content"]
kwargs["overwrite_last"] = True

if data["last"]:
kwargs["history"] = streaming_message

yield self.message(ctx, streaming_message, **kwargs)

if data["last"]:
streaming_message = None
elif type == "warning":
yield self.warning(ctx, data["message"])
except Exception as e:
yield self.error(ctx, e)
finally:
Expand Down Expand Up @@ -367,17 +359,14 @@ def execute(self, ctx):
query_id = ctx.params["query_id"]
vote = ctx.params["vote"]

with add_sys_path(os.path.dirname(os.path.abspath(__file__))):
# pylint: disable=import-error,no-name-in-module
import db

table = db.table(db.UserQueryTable)
if vote == "upvote":
table.upvote_query(query_id)
elif vote == "downvote":
table.downvote_query(query_id)
else:
raise ValueError(f"Invalid vote '{vote}'")
table = db.table(db.UserQueryTable)
if vote == "upvote":
table.upvote_query(query_id)
elif vote == "downvote":
table.downvote_query(query_id)
else:
raise ValueError(f"Invalid vote '{vote}'")


def get_plugin_setting(dataset, plugin_name, key, default=None):
Expand Down
26 changes: 13 additions & 13 deletions voxelgpt.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,27 +11,27 @@

import fiftyone as fo

from links.query_moderator import moderate_query
from links.dataset_schema_handler import query_schema
from links.query_intent_classifier import classify_query_intent
from links.docs_query_dispatcher import run_docs_query, stream_docs_query
from links.computer_vision_query_dispatcher import (
from voxelgpt.links.query_moderator import moderate_query
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Unfortunately we can't use this import style here because the voxelgpt.py module is also intended for direct usage via Python, in which case the code is not running as a package.

That said, the changes in this module aren't actually required right? Relative imports should be fine.

from voxelgpt.links.dataset_schema_handler import query_schema
from voxelgpt.links.query_intent_classifier import classify_query_intent
from voxelgpt.links.docs_query_dispatcher import run_docs_query, stream_docs_query
from voxelgpt.links.computer_vision_query_dispatcher import (
run_computer_vision_query,
stream_computer_vision_query,
)
from links.view_stage_example_selector import (
from voxelgpt.links.view_stage_example_selector import (
generate_view_stage_examples_prompt,
)
from links.view_stage_description_selector import (
from voxelgpt.links.view_stage_description_selector import (
generate_view_stage_descriptions_prompt,
get_most_relevant_view_stages,
)
from links.algorithm_selector import select_algorithms
from links.run_selector import select_runs
from links.field_selector import select_fields
from links.label_class_selector import select_label_classes
from links.dataset_view_generator import get_gpt_view_stage_strings
from links.effective_query_generator import generate_effective_query
from voxelgpt.links.algorithm_selector import select_algorithms
from voxelgpt.links.run_selector import select_runs
from voxelgpt.links.field_selector import select_fields
from voxelgpt.links.label_class_selector import select_label_classes
from voxelgpt.links.dataset_view_generator import get_gpt_view_stage_strings
from voxelgpt.links.effective_query_generator import generate_effective_query


_SUPPORTED_DIALECTS = ("string", "markdown", "raw")
Expand Down