Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
270 changes: 242 additions & 28 deletions cmd/drift_query.go
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
package cmd

import (
"bytes"
"context"
"encoding/json"
"fmt"
Expand All @@ -10,7 +11,9 @@ import (
"github.com/Use-Tusk/tusk-cli/internal/api"
"github.com/Use-Tusk/tusk-cli/internal/config"
"github.com/Use-Tusk/tusk-cli/internal/driftquery"
queryv1 "github.com/Use-Tusk/tusk-drift-schemas/generated/go/query"
"github.com/spf13/cobra"
"google.golang.org/protobuf/encoding/protojson"
)

var driftQueryCmd = &cobra.Command{
Expand Down Expand Up @@ -58,39 +61,31 @@ func resolveQueryServiceID(flagValue string, cfg *config.Config) (string, error)
// buildWhereFromFlags constructs a SpanWhereClause from convenience flags.
// Returns nil if no flags were set.
func buildWhereFromFlags(name, packageName, traceID, environment string, minDuration int, rootSpansOnly bool) *driftquery.SpanWhereClause {
w := &driftquery.SpanWhereClause{}
empty := true
fields := map[string]*driftquery.FieldPredicate{}

if name != "" {
w.Name = &driftquery.StringFilter{Eq: &name}
empty = false
fields["name"] = &driftquery.FieldPredicate{Eq: driftquery.StringValue(name)}
}
if packageName != "" {
w.PackageName = &driftquery.StringFilter{Eq: &packageName}
empty = false
fields["packageName"] = &driftquery.FieldPredicate{Eq: driftquery.StringValue(packageName)}
}
if traceID != "" {
w.TraceID = &driftquery.StringFilter{Eq: &traceID}
empty = false
fields["traceId"] = &driftquery.FieldPredicate{Eq: driftquery.StringValue(traceID)}
}
if environment != "" {
w.Environment = &driftquery.StringFilter{Eq: &environment}
empty = false
fields["environment"] = &driftquery.FieldPredicate{Eq: driftquery.StringValue(environment)}
}
if minDuration > 0 {
d := float64(minDuration)
w.Duration = &driftquery.NumberFilter{Gte: &d}
empty = false
fields["duration"] = &driftquery.FieldPredicate{Gte: driftquery.NumberValue(float64(minDuration))}
}
if rootSpansOnly {
w.IsRootSpan = &driftquery.BooleanFilter{Eq: true}
empty = false
fields["isRootSpan"] = &driftquery.FieldPredicate{Eq: driftquery.BoolValue(true)}
}

if empty {
if len(fields) == 0 {
return nil
}
return w
return &driftquery.SpanWhereClause{Fields: fields}
}

// parseOrderBy parses "field:direction" into an OrderByField.
Expand All @@ -99,30 +94,179 @@ func parseOrderBy(s string) (*driftquery.OrderByField, error) {
if len(parts) != 2 {
return nil, fmt.Errorf("invalid --order-by format %q, expected field:direction (e.g. timestamp:DESC)", s)
}
field := parts[0]
direction := strings.ToUpper(parts[1])
if direction != "ASC" && direction != "DESC" {
return nil, fmt.Errorf("invalid direction %q, expected ASC or DESC", parts[1])
field, ok := spanSortFieldByName[strings.TrimSpace(parts[0])]
if !ok {
return nil, fmt.Errorf("invalid field %q, expected one of: timestamp, createdAt, updatedAt, duration, name, traceId", parts[0])
}
direction, err := parseSortDirection(parts[1])
if err != nil {
return nil, err
}
return &driftquery.OrderByField{Field: field, Direction: direction}, nil
}

// parseWhereJSON parses a JSON string into a SpanWhereClause.
func parseWhereJSON(s string) (*driftquery.SpanWhereClause, error) {
normalizedJSON, err := normalizeWhereJSONEnums([]byte(s))
if err != nil {
return nil, fmt.Errorf("invalid --where JSON: %w", err)
}

var where driftquery.SpanWhereClause
if err := json.Unmarshal([]byte(s), &where); err != nil {
if err := protojson.Unmarshal(normalizedJSON, &where); err != nil {
return nil, fmt.Errorf("invalid --where JSON: %w", err)
}
return &where, nil
}

// parseJsonbFiltersJSON parses a JSON string into a slice of JsonbFilter.
func parseJsonbFiltersJSON(s string) ([]driftquery.JsonbFilter, error) {
var filters []driftquery.JsonbFilter
if err := json.Unmarshal([]byte(s), &filters); err != nil {
return nil, fmt.Errorf("invalid --jsonb-filters JSON: %w", err)
func normalizeWhereJSONEnums(input []byte) ([]byte, error) {
var whereJSON any
decoder := json.NewDecoder(bytes.NewReader(input))
decoder.UseNumber()
if err := decoder.Decode(&whereJSON); err != nil {
return nil, err
}

normalizeWhereClauseJSON(whereJSON)

normalized, err := json.Marshal(whereJSON)
if err != nil {
return nil, err
}
return normalized, nil
}

func normalizeWhereClauseJSON(value any) {
clause, ok := value.(map[string]any)
if !ok {
return
}

if fields, ok := clause["fields"].(map[string]any); ok {
for _, predicate := range fields {
normalizeFieldPredicateJSON(predicate)
}
}

if andClauses, ok := clause["and"].([]any); ok {
for _, nested := range andClauses {
normalizeWhereClauseJSON(nested)
}
}

if orClauses, ok := clause["or"].([]any); ok {
for _, nested := range orClauses {
normalizeWhereClauseJSON(nested)
}
}

if notClause, ok := clause["not"]; ok {
normalizeWhereClauseJSON(notClause)
}
}

func normalizeFieldPredicateJSON(value any) {
predicate, ok := value.(map[string]any)
if !ok {
return
}

access, ok := predicate["access"].(map[string]any)
if !ok {
return
}

if castAs, ok := access["castAs"].(string); ok {
if enumValue, ok := castTypeByName[strings.ToLower(strings.TrimSpace(castAs))]; ok {
access["castAs"] = enumValue
}
}

if decode, ok := access["decode"].(string); ok {
if enumValue, ok := decodeStrategyByName[strings.ToLower(strings.TrimSpace(decode))]; ok {
access["decode"] = enumValue
}
}
}

func parseSortDirection(direction string) (driftquery.SortDirection, error) {
switch strings.ToUpper(strings.TrimSpace(direction)) {
case "ASC":
return queryv1.SortDirection_SORT_DIRECTION_ASC, nil
case "DESC":
return queryv1.SortDirection_SORT_DIRECTION_DESC, nil
default:
return queryv1.SortDirection_SORT_DIRECTION_UNSPECIFIED, fmt.Errorf("invalid direction %q, expected ASC or DESC", direction)
}
return filters, nil
}

func parseAggregateMetric(name string) (driftquery.AggregateMetric, error) {
metric, ok := aggregateMetricByName[strings.TrimSpace(name)]
if !ok {
return queryv1.AggregateMetric_AGGREGATE_METRIC_UNSPECIFIED, fmt.Errorf("invalid metric %q", name)
}
return metric, nil
}

func parseAggregateMetrics(names []string) ([]driftquery.AggregateMetric, error) {
metrics := make([]driftquery.AggregateMetric, 0, len(names))
for _, name := range names {
metric, err := parseAggregateMetric(name)
if err != nil {
return nil, err
}
metrics = append(metrics, metric)
}
return metrics, nil
}

func parseAggregateGroupFields(names []string) ([]driftquery.AggregateGroupField, error) {
fields := make([]driftquery.AggregateGroupField, 0, len(names))
for _, name := range names {
field, ok := aggregateGroupFieldByName[strings.TrimSpace(name)]
if !ok {
return nil, fmt.Errorf("invalid group-by field %q", name)
}
fields = append(fields, field)
}
return fields, nil
}

func parseMetricOrderBy(s string) (*driftquery.MetricOrderBy, error) {
parts := strings.SplitN(s, ":", 2)
if len(parts) != 2 {
return nil, fmt.Errorf("invalid --order-by format %q, expected metric:direction (e.g. count:DESC)", s)
}
metric, err := parseAggregateMetric(parts[0])
if err != nil {
return nil, err
}
direction, err := parseSortDirection(parts[1])
if err != nil {
return nil, err
}
return &driftquery.MetricOrderBy{Metric: metric, Direction: direction}, nil
}

func parseTimeBucket(s string) (driftquery.TimeBucket, error) {
bucket, ok := timeBucketByName[strings.TrimSpace(s)]
if !ok {
return queryv1.TimeBucket_TIME_BUCKET_UNSPECIFIED, fmt.Errorf("invalid time bucket %q, expected hour, day, or week", s)
}
return bucket, nil
}

func parseSelectableFields(s string) ([]driftquery.SelectableSpanField, error) {
names := splitComma(s)
fields := make([]driftquery.SelectableSpanField, 0, len(names))
for _, name := range names {
field, ok := selectableFieldByName[name]
if !ok {
return nil, fmt.Errorf("invalid field %q", name)
}
fields = append(fields, field)
}
return fields, nil
}

// splitComma splits a comma-separated string into trimmed non-empty parts.
Expand All @@ -140,3 +284,73 @@ func splitComma(s string) []string {
}
return result
}

var spanSortFieldByName = map[string]driftquery.SpanSortField{
"timestamp": queryv1.SpanSortField_SPAN_SORT_FIELD_TIMESTAMP,
"createdAt": queryv1.SpanSortField_SPAN_SORT_FIELD_CREATED_AT,
"updatedAt": queryv1.SpanSortField_SPAN_SORT_FIELD_UPDATED_AT,
"duration": queryv1.SpanSortField_SPAN_SORT_FIELD_DURATION,
"name": queryv1.SpanSortField_SPAN_SORT_FIELD_NAME,
"traceId": queryv1.SpanSortField_SPAN_SORT_FIELD_TRACE_ID,
}

var aggregateMetricByName = map[string]driftquery.AggregateMetric{
"count": queryv1.AggregateMetric_AGGREGATE_METRIC_COUNT,
"errorCount": queryv1.AggregateMetric_AGGREGATE_METRIC_ERROR_COUNT,
"errorRate": queryv1.AggregateMetric_AGGREGATE_METRIC_ERROR_RATE,
"avgDuration": queryv1.AggregateMetric_AGGREGATE_METRIC_AVG_DURATION,
"minDuration": queryv1.AggregateMetric_AGGREGATE_METRIC_MIN_DURATION,
"maxDuration": queryv1.AggregateMetric_AGGREGATE_METRIC_MAX_DURATION,
"p50Duration": queryv1.AggregateMetric_AGGREGATE_METRIC_P50_DURATION,
"p95Duration": queryv1.AggregateMetric_AGGREGATE_METRIC_P95_DURATION,
"p99Duration": queryv1.AggregateMetric_AGGREGATE_METRIC_P99_DURATION,
}

var aggregateGroupFieldByName = map[string]driftquery.AggregateGroupField{
"name": queryv1.AggregateGroupField_AGGREGATE_GROUP_FIELD_NAME,
"kind": queryv1.AggregateGroupField_AGGREGATE_GROUP_FIELD_KIND,
"packageName": queryv1.AggregateGroupField_AGGREGATE_GROUP_FIELD_PACKAGE_NAME,
"instrumentationName": queryv1.AggregateGroupField_AGGREGATE_GROUP_FIELD_INSTRUMENTATION_NAME,
"environment": queryv1.AggregateGroupField_AGGREGATE_GROUP_FIELD_ENVIRONMENT,
"statusCode": queryv1.AggregateGroupField_AGGREGATE_GROUP_FIELD_STATUS_CODE,
}

var timeBucketByName = map[string]driftquery.TimeBucket{
"hour": queryv1.TimeBucket_TIME_BUCKET_HOUR,
"day": queryv1.TimeBucket_TIME_BUCKET_DAY,
"week": queryv1.TimeBucket_TIME_BUCKET_WEEK,
}

var selectableFieldByName = map[string]driftquery.SelectableSpanField{
"id": queryv1.SelectableSpanField_SELECTABLE_SPAN_FIELD_ID,
"spanId": queryv1.SelectableSpanField_SELECTABLE_SPAN_FIELD_SPAN_ID,
"traceId": queryv1.SelectableSpanField_SELECTABLE_SPAN_FIELD_TRACE_ID,
"parentSpanId": queryv1.SelectableSpanField_SELECTABLE_SPAN_FIELD_PARENT_SPAN_ID,
"name": queryv1.SelectableSpanField_SELECTABLE_SPAN_FIELD_NAME,
"kind": queryv1.SelectableSpanField_SELECTABLE_SPAN_FIELD_KIND,
"status": queryv1.SelectableSpanField_SELECTABLE_SPAN_FIELD_STATUS,
"timestamp": queryv1.SelectableSpanField_SELECTABLE_SPAN_FIELD_TIMESTAMP,
"duration": queryv1.SelectableSpanField_SELECTABLE_SPAN_FIELD_DURATION,
"isRootSpan": queryv1.SelectableSpanField_SELECTABLE_SPAN_FIELD_IS_ROOT_SPAN,
"metadata": queryv1.SelectableSpanField_SELECTABLE_SPAN_FIELD_METADATA,
"packageName": queryv1.SelectableSpanField_SELECTABLE_SPAN_FIELD_PACKAGE_NAME,
"instrumentationName": queryv1.SelectableSpanField_SELECTABLE_SPAN_FIELD_INSTRUMENTATION_NAME,
"inputValue": queryv1.SelectableSpanField_SELECTABLE_SPAN_FIELD_INPUT_VALUE,
"outputValue": queryv1.SelectableSpanField_SELECTABLE_SPAN_FIELD_OUTPUT_VALUE,
"inputSchema": queryv1.SelectableSpanField_SELECTABLE_SPAN_FIELD_INPUT_SCHEMA,
"outputSchema": queryv1.SelectableSpanField_SELECTABLE_SPAN_FIELD_OUTPUT_SCHEMA,
"environment": queryv1.SelectableSpanField_SELECTABLE_SPAN_FIELD_ENVIRONMENT,
"createdAt": queryv1.SelectableSpanField_SELECTABLE_SPAN_FIELD_CREATED_AT,
"updatedAt": queryv1.SelectableSpanField_SELECTABLE_SPAN_FIELD_UPDATED_AT,
}

var castTypeByName = map[string]queryv1.CastType{
"text": queryv1.CastType_CAST_TYPE_TEXT,
"int": queryv1.CastType_CAST_TYPE_INT,
"float": queryv1.CastType_CAST_TYPE_FLOAT,
"boolean": queryv1.CastType_CAST_TYPE_BOOLEAN,
}

var decodeStrategyByName = map[string]queryv1.DecodeStrategy{
"base64": queryv1.DecodeStrategy_DECODE_STRATEGY_BASE64,
}
Loading
Loading