Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
29 commits
Select commit Hold shift + click to select a range
5aad042
fix: merge conflict
Prajna1999 Jan 20, 2026
f47c20c
chore: update dependencies
Prajna1999 Jan 20, 2026
1e03961
feat: add google ai provider for Gemini models
Prajna1999 Jan 20, 2026
4ac4de8
feat: working stt with gemini and hotfixing circular import
Prajna1999 Jan 21, 2026
3c0bae7
Merge branch 'main' into feature/unified-api-stt-new
Prajna1999 Jan 21, 2026
7db94f1
Merge branch 'main' into feature/unified-api-stt-new
Prajna1999 Jan 21, 2026
196eb5c
feat: llm_call table, type enforce gAI stt response
Prajna1999 Jan 21, 2026
dca3139
Merge remote-tracking branch 'refs/remotes/origin/feature/unified-api…
Prajna1999 Jan 21, 2026
271d677
feat: discriminated union type enforcing for stt, tts and text comple…
Prajna1999 Jan 22, 2026
5ae59e5
Merge branch 'main' into feature/unified-api-stt-new
Prajna1999 Jan 22, 2026
250ce9f
fix: type annotation
Prajna1999 Jan 22, 2026
1742a8b
chore: fix alembic revision for shure
Prajna1999 Jan 23, 2026
ebb2394
feat: add google stt task to async job
Prajna1999 Jan 23, 2026
0bcb697
feat: yolo commit and linting issues
Prajna1999 Jan 26, 2026
a6850a3
feat: query input takes audio_url and base64 as audio file input
Prajna1999 Jan 26, 2026
f4693f6
chore: test cases for google ai and async job fixes, supress mappers …
Prajna1999 Jan 26, 2026
909e249
fix: test cases for config
Prajna1999 Jan 27, 2026
a7b0062
chore: clean PLAN.md
Prajna1999 Jan 27, 2026
fa25199
chore: extract stt code into its own
Prajna1999 Jan 28, 2026
24007a2
Merge branch 'main' into feature/unified-api-stt-new
Prajna1999 Jan 29, 2026
bbd2c7f
Refactor evaluation endpoint to use stored configuration and remove a…
avirajsingh7 Dec 9, 2025
b907440
fix: default original provider bug
Prajna1999 Jan 30, 2026
9f38f45
fix: coderrabbit comments
Prajna1999 Jan 31, 2026
f6348b5
Merge branch 'main' into feature/unified-api-stt-new
Prajna1999 Jan 31, 2026
b3ea8ec
fix: migration number
Prajna1999 Jan 31, 2026
26e0a6a
chore: formatting issue solved
Prajna1999 Jan 31, 2026
a623efa
fix: eval core crud test cases
Prajna1999 Jan 31, 2026
5c86cf2
fix: test cases for evaluation and test_llm
Prajna1999 Feb 1, 2026
be7f1ad
feat: basic tts implementation with gemini-2.5-pro-preview-tts
Prajna1999 Feb 2, 2026
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
185 changes: 185 additions & 0 deletions backend/app/alembic/versions/042_add_llm_call_table.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,185 @@
"""add_llm_call_table

Revision ID: 042
Revises: 041
Create Date: 2026-01-26 15:20:23.873332

"""
from alembic import op
import sqlalchemy as sa
import sqlmodel.sql.sqltypes
from sqlalchemy.dialects import postgresql

# revision identifiers, used by Alembic.
revision = "042"
down_revision = "041"
branch_labels = None
depends_on = None


def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"llm_call",
sa.Column(
"id",
sa.Uuid(),
nullable=False,
comment="Unique identifier for the LLM call record",
),
sa.Column(
"job_id",
sa.Uuid(),
nullable=False,
comment="Reference to the parent job (status tracked in job table)",
),
sa.Column(
"project_id",
sa.Integer(),
nullable=False,
comment="Reference to the project this LLM call belongs to",
),
sa.Column(
"organization_id",
sa.Integer(),
nullable=False,
comment="Reference to the organization this LLM call belongs to",
),
sa.Column(
"input",
sqlmodel.sql.sqltypes.AutoString(),
nullable=False,
comment="User input - text string, binary data, or file path for multimodal",
),
sa.Column(
"input_type",
sa.String(),
nullable=False,
comment="Input type: text, audio, image",
),
sa.Column(
"output_type",
sa.String(),
nullable=True,
comment="Expected output type: text, audio, image",
),
sa.Column(
"provider",
sa.String(),
nullable=False,
comment="AI provider: openai, google, anthropic",
),
sa.Column(
"model",
sqlmodel.sql.sqltypes.AutoString(),
nullable=False,
comment="Specific model used e.g. 'gpt-4o', 'gemini-2.5-pro'",
),
sa.Column(
"provider_response_id",
sqlmodel.sql.sqltypes.AutoString(),
nullable=True,
comment="Original response ID from the provider (e.g., OpenAI's response ID)",
),
sa.Column(
"content",
postgresql.JSONB(astext_type=sa.Text()),
nullable=True,
comment="Response content: {text: '...'}, {audio_bytes: '...'}, or {image: '...'}",
),
sa.Column(
"usage",
postgresql.JSONB(astext_type=sa.Text()),
nullable=True,
comment="Token usage: {input_tokens, output_tokens, reasoning_tokens}",
),
sa.Column(
"conversation_id",
sqlmodel.sql.sqltypes.AutoString(),
nullable=True,
comment="Identifier linking this response to its conversation thread",
),
sa.Column(
"auto_create",
sa.Boolean(),
nullable=True,
comment="Whether to auto-create conversation if conversation_id doesn't exist (OpenAI specific)",
),
sa.Column(
"config",
postgresql.JSONB(astext_type=sa.Text()),
nullable=True,
comment="Configuration: {config_id, config_version} for stored config OR {config_blob} for ad-hoc config",
),
sa.Column(
"created_at",
sa.DateTime(),
nullable=False,
comment="Timestamp when the LLM call was created",
),
sa.Column(
"updated_at",
sa.DateTime(),
nullable=False,
comment="Timestamp when the LLM call was last updated",
),
sa.Column(
"deleted_at",
sa.DateTime(),
nullable=True,
comment="Timestamp when the record was soft-deleted",
),
sa.ForeignKeyConstraint(["job_id"], ["job.id"], ondelete="CASCADE"),
sa.ForeignKeyConstraint(
["organization_id"], ["organization.id"], ondelete="CASCADE"
),
sa.ForeignKeyConstraint(["project_id"], ["project.id"], ondelete="CASCADE"),
sa.PrimaryKeyConstraint("id"),
)
op.create_index(
"idx_llm_call_conversation_id",
"llm_call",
["conversation_id"],
unique=False,
postgresql_where=sa.text("conversation_id IS NOT NULL AND deleted_at IS NULL"),
)
op.create_index(
"idx_llm_call_job_id",
"llm_call",
["job_id"],
unique=False,
postgresql_where=sa.text("deleted_at IS NULL"),
)
op.alter_column(
"collection",
"llm_service_name",
existing_type=sa.VARCHAR(),
comment="Name of the LLM service",
existing_comment="Name of the LLM service provider",
existing_nullable=False,
)
# ### end Alembic commands ###


def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.alter_column(
"collection",
"llm_service_name",
existing_type=sa.VARCHAR(),
comment="Name of the LLM service provider",
existing_comment="Name of the LLM service",
existing_nullable=False,
)
op.drop_index(
"idx_llm_call_job_id",
table_name="llm_call",
postgresql_where=sa.text("deleted_at IS NULL"),
)
op.drop_index(
"idx_llm_call_conversation_id",
table_name="llm_call",
postgresql_where=sa.text("conversation_id IS NOT NULL AND deleted_at IS NULL"),
)
op.drop_table("llm_call")
# ### end Alembic commands ###
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
"""extend collection table for provider agnostic support

Revision ID: 042
Revises: 041
Revision ID: 043
Revises: 042
Create Date: 2026-01-15 16:53:19.495583

"""
Expand All @@ -12,8 +12,8 @@


# revision identifiers, used by Alembic.
revision = "042"
down_revision = "041"
revision = "043"
down_revision = "042"
branch_labels = None
depends_on = None

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
"""remove:enum checks llm_call provider
Revision ID: 044
Revises: 043
Create Date: 2026-01-30 11:22:45.165543
"""
from alembic import op
import sqlalchemy as sa
import sqlmodel.sql.sqltypes


# revision identifiers, used by Alembic.
revision = "044"
down_revision = "043"
branch_labels = None
depends_on = None


def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.alter_column(
"llm_call",
"provider",
existing_type=sa.VARCHAR(),
comment="AI provider as sent by user (e.g openai, -native, google)",
existing_comment="AI provider: openai, google, anthropic",
existing_nullable=False,
)
# ### end Alembic commands ###


def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.alter_column(
"llm_call",
"provider",
existing_type=sa.VARCHAR(),
comment="AI provider: openai, google, anthropic",
existing_comment="AI provider as sent by user (e.g openai, -native, google)",
existing_nullable=False,
)
# ### end Alembic commands ###
11 changes: 7 additions & 4 deletions backend/app/api/routes/config/version.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
from app.api.deps import SessionDep, AuthContextDep
from app.crud.config import ConfigCrud, ConfigVersionCrud
from app.models import (
ConfigVersionCreate,
ConfigVersionCreatePartial,
ConfigVersionPublic,
Message,
ConfigVersionItems,
Expand All @@ -24,18 +24,21 @@
)
def create_version(
config_id: UUID,
version_create: ConfigVersionCreate,
version_create: ConfigVersionCreatePartial,
current_user: AuthContextDep,
session: SessionDep,
):
"""
Create a new version for an existing configuration.
The version number is automatically incremented.

Only include the fields you want to update in config_blob.
Provider, model, and params can be changed.
Type is inherited from existing config and cannot be changed.
"""
version_crud = ConfigVersionCrud(
session=session, project_id=current_user.project_.id, config_id=config_id
)
version = version_crud.create_or_raise(version_create=version_create)
version = version_crud.create_from_partial_or_raise(version_create=version_create)

return APIResponse.success_response(
data=ConfigVersionPublic(**version.model_dump()),
Expand Down
1 change: 1 addition & 0 deletions backend/app/celery/beat.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
"""
Celery beat scheduler for cron jobs.
"""

import logging
from celery import Celery
from app.celery.celery_app import celery_app
Expand Down
1 change: 1 addition & 0 deletions backend/app/celery/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
Utility functions for easy Celery integration across the application.
Business logic modules can use these functions without knowing Celery internals.
"""

import logging
from typing import Any, Dict, Optional
from celery.result import AsyncResult
Expand Down
1 change: 1 addition & 0 deletions backend/app/celery/worker.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
"""
Celery worker management script.
"""

import logging
import multiprocessing
from celery.bin import worker
Expand Down
2 changes: 1 addition & 1 deletion backend/app/cli/bench/commands.py
Original file line number Diff line number Diff line change
Expand Up @@ -210,7 +210,7 @@ def send_benchmark_request(
)
else:
typer.echo(response.text)
typer.echo(f"[{i+1}/{total}] FAILED - Status: {response.status_code}")
typer.echo(f"[{i + 1}/{total}] FAILED - Status: {response.status_code}")
raise Exception(f"Request failed with status code {response.status_code}")


Expand Down
Loading
Loading