diff --git a/arazzo/arazzo.go b/arazzo/arazzo.go
index 4e6d9ec..575e36d 100644
--- a/arazzo/arazzo.go
+++ b/arazzo/arazzo.go
@@ -109,11 +109,11 @@ func (a *Arazzo) Validate(ctx context.Context, opts ...validation.Option) []erro
arazzoVersion, err := version.Parse(a.Arazzo)
if err != nil {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("arazzo.version is invalid %s: %s", a.Arazzo, err.Error()), core, core.Arazzo))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidFormat, fmt.Errorf("arazzo.version is invalid %s: %w", a.Arazzo, err), core, core.Arazzo))
}
if arazzoVersion != nil {
if arazzoVersion.GreaterThan(*MaximumSupportedVersion) {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("arazzo.version only Arazzo versions between %s and %s are supported", MinimumSupportedVersion, MaximumSupportedVersion), core, core.Arazzo))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationSupportedVersion, fmt.Errorf("arazzo.version only Arazzo versions between %s and %s are supported", MinimumSupportedVersion, MaximumSupportedVersion), core, core.Arazzo))
}
}
@@ -125,7 +125,7 @@ func (a *Arazzo) Validate(ctx context.Context, opts ...validation.Option) []erro
errs = append(errs, sourceDescription.Validate(ctx, opts...)...)
if _, ok := sourceDescriptionNames[sourceDescription.Name]; ok {
- errs = append(errs, validation.NewSliceError(validation.NewValueValidationError("sourceDescription.name %s is not unique", sourceDescription.Name), core, core.SourceDescriptions, i))
+ errs = append(errs, validation.NewSliceError(validation.SeverityError, validation.RuleValidationDuplicateKey, fmt.Errorf("sourceDescription.name %s is not unique", sourceDescription.Name), core, core.SourceDescriptions, i))
}
sourceDescriptionNames[sourceDescription.Name] = true
@@ -137,7 +137,7 @@ func (a *Arazzo) Validate(ctx context.Context, opts ...validation.Option) []erro
errs = append(errs, workflow.Validate(ctx, opts...)...)
if _, ok := workflowIds[workflow.WorkflowID]; ok {
- errs = append(errs, validation.NewSliceError(validation.NewValueValidationError("workflow.workflowId %s is not unique", workflow.WorkflowID), core, core.Workflows, i))
+ errs = append(errs, validation.NewSliceError(validation.SeverityError, validation.RuleValidationDuplicateKey, fmt.Errorf("workflow.workflowId %s is not unique", workflow.WorkflowID), core, core.Workflows, i))
}
workflowIds[workflow.WorkflowID] = true
diff --git a/arazzo/arazzo_examples_test.go b/arazzo/arazzo_examples_test.go
index e2f54e3..5d20c0b 100644
--- a/arazzo/arazzo_examples_test.go
+++ b/arazzo/arazzo_examples_test.go
@@ -190,6 +190,6 @@ func Example_validating() {
fmt.Printf("%s\n", err.Error())
}
// Output:
- // [3:3] info.version is missing
- // [13:9] step at least one of operationId, operationPath or workflowId fields must be set
+ // [3:3] error validation-required-field info.version is required
+ // [13:9] error validation-required-field step at least one of operationId, operationPath or workflowId fields must be set
}
diff --git a/arazzo/arazzo_test.go b/arazzo/arazzo_test.go
index 8c567aa..326746d 100644
--- a/arazzo/arazzo_test.go
+++ b/arazzo/arazzo_test.go
@@ -300,11 +300,11 @@ sourceDescriptions:
column int
underlyingError error
}{
- {line: 1, column: 1, underlyingError: validation.NewMissingFieldError("arazzo.workflows is missing")},
- {line: 1, column: 9, underlyingError: validation.NewValueValidationError("arazzo.version only Arazzo versions between 1.0.0 and 1.0.1 are supported")},
- {line: 4, column: 3, underlyingError: validation.NewMissingFieldError("info.version is missing")},
- {line: 6, column: 5, underlyingError: validation.NewMissingFieldError("sourceDescription.url is missing")},
- {line: 7, column: 11, underlyingError: validation.NewValueValidationError("sourceDescription.type must be one of [openapi, arazzo]")},
+ {line: 1, column: 1, underlyingError: errors.New("arazzo.workflows is required")},
+ {line: 1, column: 9, underlyingError: errors.New("arazzo.version only Arazzo versions between 1.0.0 and 1.0.1 are supported")},
+ {line: 4, column: 3, underlyingError: errors.New("info.version is required")},
+ {line: 6, column: 5, underlyingError: errors.New("sourceDescription.url is required")},
+ {line: 7, column: 11, underlyingError: errors.New("sourceDescription.type must be one of [openapi, arazzo]")},
}
require.Len(t, validationErrs, len(expectedErrors), "number of validation errors should match")
@@ -546,8 +546,8 @@ var stressTests = []struct {
args: args{
location: "https://raw.githubusercontent.com/Redocly/museum-openapi-example/2770b2b2e59832d245c7b0eb0badf6568d7efb53/arazzo/museum-api.arazzo.yaml",
validationIgnores: []string{
- "[71:24] invalid jsonpath expression: Error at line 1, column 7: unexpected token when parsing segment", // legit invalid RFC 9535 syntax
- "[107:24] invalid jsonpath expression: Error at line 1, column 7: unexpected token when parsing segment", // legit invalid RFC 9535 syntax
+ "[71:24] error validation-invalid-syntax invalid jsonpath expression: Error at line 1, column 7: unexpected token when parsing segment", // legit invalid RFC 9535 syntax
+ "[107:24] error validation-invalid-syntax invalid jsonpath expression: Error at line 1, column 7: unexpected token when parsing segment", // legit invalid RFC 9535 syntax
},
},
wantTitle: "Redocly Museum API Test Workflow",
@@ -564,7 +564,7 @@ var stressTests = []struct {
args: args{
location: "https://raw.githubusercontent.com/Redocly/warp-single-sidebar/b78fc09da52d7755e92e1bc8f990edd37421cbde/apis/arazzo.yaml",
validationIgnores: []string{
- "[63:24] invalid jsonpath expression: Error at line 1, column 12: unexpected token when parsing segment", // legit invalid RFC 9535 syntax
+ "[63:24] error validation-invalid-syntax invalid jsonpath expression: Error at line 1, column 12: unexpected token when parsing segment", // legit invalid RFC 9535 syntax
},
},
wantTitle: "Warp API",
@@ -605,10 +605,10 @@ var stressTests = []struct {
args: args{
location: "https://raw.githubusercontent.com/OAI/Arazzo-Specification/23852b8b0d13ab1e3288a57a990611ffed45ab5d/examples/1.0.0/oauth.arazzo.yaml",
validationIgnores: []string{
- "[65:24] invalid jsonpath expression: Error at line 1, column 15: unexpected token when parsing segment", // legit invalid RFC 9535 syntax
- "[105:24] invalid jsonpath expression: Error at line 1, column 15: unexpected token when parsing segment", // legit invalid RFC 9535 syntax
- "[155:24] invalid jsonpath expression: Error at line 1, column 15: unexpected token when parsing segment", // legit invalid RFC 9535 syntax
- "[175:24] invalid jsonpath expression: Error at line 1, column 15: unexpected token when parsing segment", // legit invalid RFC 9535 syntax
+ "[65:24] error validation-invalid-syntax invalid jsonpath expression: Error at line 1, column 15: unexpected token when parsing segment", // legit invalid RFC 9535 syntax
+ "[105:24] error validation-invalid-syntax invalid jsonpath expression: Error at line 1, column 15: unexpected token when parsing segment", // legit invalid RFC 9535 syntax
+ "[155:24] error validation-invalid-syntax invalid jsonpath expression: Error at line 1, column 15: unexpected token when parsing segment", // legit invalid RFC 9535 syntax
+ "[175:24] error validation-invalid-syntax invalid jsonpath expression: Error at line 1, column 15: unexpected token when parsing segment", // legit invalid RFC 9535 syntax
},
},
wantTitle: "Example OAuth service",
@@ -632,7 +632,7 @@ var stressTests = []struct {
args: args{
location: "https://raw.githubusercontent.com/frankkilcommins/simple-spectral-arazzo-GA/4ec8856f1cf21c0f77597c715c150ef3e2772a89/apis/OnlineStore.arazzo.yaml",
validationIgnores: []string{
- "info.title is missing", // legit issue
+ "info.title is required", // legit issue
"operationId must be a valid expression if there are multiple OpenAPI source descriptions", // legit issue
"$responses.body.menuItems[0].subcategories[0].id", // legit issue
},
@@ -645,9 +645,9 @@ var stressTests = []struct {
args: args{
location: "https://raw.githubusercontent.com/leidenheit/itarazzo-library/3b335e1c4293444add52b5f2476420e2d871b1a5/src/test/resources/test.arazzo.yaml",
validationIgnores: []string{
- "expression is not valid, must begin with $: 4711Chocolate", // legit issue
- "[32:24] invalid jsonpath expression: Error at line 1, column 0: unexpected token", // unsupported version: draft-goessner-dispatch-jsonpath-00
- "[36:24] invalid jsonpath expression: Error at line 1, column 5: unexpected token when parsing segment", // unsupported version: draft-goessner-dispatch-jsonpath-00
+ "expression is not valid, must begin with $: 4711Chocolate", // legit issue
+ "[32:24] error validation-invalid-syntax invalid jsonpath expression: Error at line 1, column 0: unexpected token", // unsupported version: draft-goessner-dispatch-jsonpath-00
+ "[36:24] error validation-invalid-syntax invalid jsonpath expression: Error at line 1, column 5: unexpected token when parsing segment", // unsupported version: draft-goessner-dispatch-jsonpath-00
},
},
wantTitle: "A cookie eating workflow",
@@ -659,9 +659,9 @@ var stressTests = []struct {
validationIgnores: []string{
"jsonpointer must start with /: $.status", // legit issues TODO: improve the error returned as it is wrong
"jsonpointer must start with /: $.id", // legit issues TODO: improve the error returned as it is wrong
- "[81:24] invalid jsonpath expression: Error at line 1, column 7: unexpected token when parsing segment", // unsupported version: draft-goessner-dispatch-jsonpath-00
- "[110:24] invalid jsonpath expression: Error at line 1, column 5: unexpected token when parsing segment", // unsupported version: draft-goessner-dispatch-jsonpath-00
- "[114:24] invalid jsonpath expression: Error at line 1, column 9: unexpected token when parsing segment", // unsupported version: draft-goessner-dispatch-jsonpath-00
+ "[81:24] error validation-invalid-syntax invalid jsonpath expression: Error at line 1, column 7: unexpected token when parsing segment", // unsupported version: draft-goessner-dispatch-jsonpath-00
+ "[110:24] error validation-invalid-syntax invalid jsonpath expression: Error at line 1, column 5: unexpected token when parsing segment", // unsupported version: draft-goessner-dispatch-jsonpath-00
+ "[114:24] error validation-invalid-syntax invalid jsonpath expression: Error at line 1, column 9: unexpected token when parsing segment", // unsupported version: draft-goessner-dispatch-jsonpath-00
},
},
wantTitle: "PetStore - Example of Workflows",
@@ -671,7 +671,7 @@ var stressTests = []struct {
args: args{
location: "https://raw.githubusercontent.com/ritza-co/e2e-testing-arazzo/c0615c3708a1e4c0fcaeb79edae78ddc4eb5ba82/arazzo.yaml",
validationIgnores: []string{
- "[42:24] invalid jsonpath expression: Error at line 1, column 8: unexpected token", // legit invalid RFC 9535 syntax
+ "[42:24] error validation-invalid-syntax invalid jsonpath expression: Error at line 1, column 8: unexpected token", // legit invalid RFC 9535 syntax
},
},
wantTitle: "Build-a-Bot Workflow",
diff --git a/arazzo/components.go b/arazzo/components.go
index 515a75e..c952306 100644
--- a/arazzo/components.go
+++ b/arazzo/components.go
@@ -2,6 +2,7 @@ package arazzo
import (
"context"
+ "fmt"
"regexp"
"github.com/speakeasy-api/openapi/arazzo/core"
@@ -44,7 +45,7 @@ func (c *Components) Validate(ctx context.Context, opts ...validation.Option) []
for key, input := range c.Inputs.All() {
if !componentNameRegex.MatchString(key) {
- errs = append(errs, validation.NewMapKeyError(validation.NewValueValidationError("components.inputs key must be a valid key [%s]: %s", componentNameRegex.String(), key), core, core.Inputs, key))
+ errs = append(errs, validation.NewMapKeyError(validation.SeverityError, validation.RuleValidationInvalidFormat, fmt.Errorf("components.inputs key must be a valid key [%s]: %s", componentNameRegex.String(), key), core, core.Inputs, key))
}
errs = append(errs, input.Validate(ctx, opts...)...)
@@ -52,7 +53,7 @@ func (c *Components) Validate(ctx context.Context, opts ...validation.Option) []
for key, parameter := range c.Parameters.All() {
if !componentNameRegex.MatchString(key) {
- errs = append(errs, validation.NewMapKeyError(validation.NewValueValidationError("components.parameters key must be a valid key [%s]: %s", componentNameRegex.String(), key), core, core.Parameters, key))
+ errs = append(errs, validation.NewMapKeyError(validation.SeverityError, validation.RuleValidationInvalidFormat, fmt.Errorf("components.parameters key must be a valid key [%s]: %s", componentNameRegex.String(), key), core, core.Parameters, key))
}
paramOps := opts
@@ -63,7 +64,7 @@ func (c *Components) Validate(ctx context.Context, opts ...validation.Option) []
for key, successAction := range c.SuccessActions.All() {
if !componentNameRegex.MatchString(key) {
- errs = append(errs, validation.NewMapKeyError(validation.NewValueValidationError("components.successActions key must be a valid key [%s]: %s", componentNameRegex.String(), key), core, core.SuccessActions, key))
+ errs = append(errs, validation.NewMapKeyError(validation.SeverityError, validation.RuleValidationInvalidFormat, fmt.Errorf("components.successActions key must be a valid key [%s]: %s", componentNameRegex.String(), key), core, core.SuccessActions, key))
}
successActionOps := opts
@@ -74,7 +75,7 @@ func (c *Components) Validate(ctx context.Context, opts ...validation.Option) []
for key, failureAction := range c.FailureActions.All() {
if !componentNameRegex.MatchString(key) {
- errs = append(errs, validation.NewMapKeyError(validation.NewValueValidationError("components.failureActions key must be a valid key [%s]: %s", componentNameRegex.String(), key), core, core.FailureActions, key))
+ errs = append(errs, validation.NewMapKeyError(validation.SeverityError, validation.RuleValidationInvalidFormat, fmt.Errorf("components.failureActions key must be a valid key [%s]: %s", componentNameRegex.String(), key), core, core.FailureActions, key))
}
failureActionOps := opts
diff --git a/arazzo/core/criterion.go b/arazzo/core/criterion.go
index d5849e0..1e09d61 100644
--- a/arazzo/core/criterion.go
+++ b/arazzo/core/criterion.go
@@ -63,7 +63,7 @@ func (c *CriterionTypeUnion) Unmarshal(ctx context.Context, parentName string, n
c.DetermineValidity(validationErrs)
default:
return []error{
- validation.NewValidationError(validation.NewTypeMismatchError(parentName, "criterionTypeUnion expected string or object, got %s", yml.NodeKindToString(resolvedNode.Kind)), resolvedNode),
+ validation.NewValidationError(validation.SeverityError, validation.RuleValidationTypeMismatch, validation.NewTypeMismatchError(parentName, "criterionTypeUnion expected string or object, got %s", yml.NodeKindToString(resolvedNode.Kind)), resolvedNode),
}, nil
}
diff --git a/arazzo/core/reusable.go b/arazzo/core/reusable.go
index 5d32faf..63c02c0 100644
--- a/arazzo/core/reusable.go
+++ b/arazzo/core/reusable.go
@@ -34,10 +34,8 @@ func (r *Reusable[T]) Unmarshal(ctx context.Context, parentName string, node *ya
if resolvedNode.Kind != yaml.MappingNode {
r.SetValid(false, false)
- r.SetValid(false, false)
-
return []error{
- validation.NewValidationError(validation.NewTypeMismatchError(parentName, "reusable expected object, got %s", yml.NodeKindToString(resolvedNode.Kind)), resolvedNode),
+ validation.NewValidationError(validation.SeverityError, validation.RuleValidationTypeMismatch, validation.NewTypeMismatchError(parentName, "reusable expected object, got %s", yml.NodeKindToString(resolvedNode.Kind)), resolvedNode),
}, nil
}
diff --git a/arazzo/criterion/condition.go b/arazzo/criterion/condition.go
index 34e10c1..2b913a9 100644
--- a/arazzo/criterion/condition.go
+++ b/arazzo/criterion/condition.go
@@ -2,6 +2,7 @@ package criterion
import (
"errors"
+ "fmt"
"strings"
"github.com/speakeasy-api/openapi/expression"
@@ -81,21 +82,21 @@ func (s *Condition) Validate(valueNode *yaml.Node, opts ...validation.Option) []
errs := []error{}
if s.Expression == "" {
- errs = append(errs, validation.NewValidationError(validation.NewMissingValueError("expression is required"), valueNode))
+ errs = append(errs, validation.NewValidationError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("expression is required"), valueNode))
}
if err := s.Expression.Validate(); err != nil {
- errs = append(errs, validation.NewValidationError(validation.NewValueValidationError(err.Error()), valueNode))
+ errs = append(errs, validation.NewValidationError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("%s", err.Error()), valueNode))
}
switch s.Operator {
case OperatorLT, OperatorLTE, OperatorGT, OperatorGTE, OperatorEQ, OperatorNE, OperatorNot, OperatorAnd, OperatorOr:
default:
- errs = append(errs, validation.NewValidationError(validation.NewValueValidationError("operator must be one of [%s]", strings.Join([]string{string(OperatorLT), string(OperatorLTE), string(OperatorGT), string(OperatorGTE), string(OperatorEQ), string(OperatorNE), string(OperatorNot), string(OperatorAnd), string(OperatorOr)}, ", ")), valueNode))
+ errs = append(errs, validation.NewValidationError(validation.SeverityError, validation.RuleValidationAllowedValues, fmt.Errorf("operator must be one of [%s]", strings.Join([]string{string(OperatorLT), string(OperatorLTE), string(OperatorGT), string(OperatorGTE), string(OperatorEQ), string(OperatorNE), string(OperatorNot), string(OperatorAnd), string(OperatorOr)}, ", ")), valueNode))
}
if s.Value == "" {
- errs = append(errs, validation.NewValidationError(validation.NewMissingValueError("value is required"), valueNode))
+ errs = append(errs, validation.NewValidationError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("value is required"), valueNode))
}
return errs
diff --git a/arazzo/criterion/criterion.go b/arazzo/criterion/criterion.go
index 14bdbff..3684a41 100644
--- a/arazzo/criterion/criterion.go
+++ b/arazzo/criterion/criterion.go
@@ -2,6 +2,7 @@ package criterion
import (
"context"
+ "errors"
"fmt"
"regexp"
"strings"
@@ -59,7 +60,7 @@ func (c *CriterionExpressionType) Validate(opts ...validation.Option) []error {
switch c.Version {
case CriterionTypeVersionDraftGoessnerDispatchJsonPath00:
default:
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("version must be one of [%s]", strings.Join([]string{string(CriterionTypeVersionDraftGoessnerDispatchJsonPath00)}, ", ")), core, core.Version))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationAllowedValues, fmt.Errorf("version must be one of [%s]", strings.Join([]string{string(CriterionTypeVersionDraftGoessnerDispatchJsonPath00)}, ", ")), core, core.Version))
}
case CriterionTypeXPath:
switch c.Version {
@@ -67,10 +68,10 @@ func (c *CriterionExpressionType) Validate(opts ...validation.Option) []error {
case CriterionTypeVersionXPath20:
case CriterionTypeVersionXPath10:
default:
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("version must be one of [%s]", strings.Join([]string{string(CriterionTypeVersionXPath30), string(CriterionTypeVersionXPath20), string(CriterionTypeVersionXPath10)}, ", ")), core, core.Version))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationAllowedValues, fmt.Errorf("version must be one of [%s]", strings.Join([]string{string(CriterionTypeVersionXPath30), string(CriterionTypeVersionXPath20), string(CriterionTypeVersionXPath10)}, ", ")), core, core.Version))
}
default:
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("type must be one of [%s]", strings.Join([]string{string(CriterionTypeJsonPath), string(CriterionTypeXPath)}, ", ")), core, core.Type))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationAllowedValues, fmt.Errorf("type must be one of [%s]", strings.Join([]string{string(CriterionTypeJsonPath), string(CriterionTypeXPath)}, ", ")), core, core.Type))
}
if len(errs) == 0 {
@@ -190,7 +191,7 @@ func (c *Criterion) Validate(opts ...validation.Option) []error {
errs := []error{}
if c.Condition == "" {
- errs = append(errs, validation.NewValueError(validation.NewMissingValueError("condition is required"), core, core.Condition))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("condition is required"), core, core.Condition))
}
if c.Type.Type != nil {
@@ -200,19 +201,19 @@ func (c *Criterion) Validate(opts ...validation.Option) []error {
case CriterionTypeJsonPath:
case CriterionTypeXPath:
default:
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("type must be one of [%s]", strings.Join([]string{string(CriterionTypeSimple), string(CriterionTypeRegex), string(CriterionTypeJsonPath), string(CriterionTypeXPath)}, ", ")), core, core.Type))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationAllowedValues, fmt.Errorf("type must be one of [%s]", strings.Join([]string{string(CriterionTypeSimple), string(CriterionTypeRegex), string(CriterionTypeJsonPath), string(CriterionTypeXPath)}, ", ")), core, core.Type))
}
} else if c.Type.ExpressionType != nil {
errs = append(errs, c.Type.ExpressionType.Validate(opts...)...)
}
if c.Type.IsTypeProvided() && c.Context == nil {
- errs = append(errs, validation.NewValueError(validation.NewMissingValueError("context is required, if type is set"), core, core.Context))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("context is required, if type is set"), core, core.Context))
}
if c.Context != nil {
if err := c.Context.Validate(); err != nil {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError(err.Error()), core, core.Context))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("%s", err.Error()), core, core.Context))
}
}
@@ -235,18 +236,18 @@ func (c *Criterion) validateCondition(opts ...validation.Option) []error {
case CriterionTypeSimple:
cond, err := newCondition(c.Condition)
if err != nil && c.Context == nil {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError(err.Error()), core, core.Condition))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("%s", err.Error()), core, core.Condition))
} else if cond != nil {
errs = append(errs, cond.Validate(valueNode, opts...)...)
}
case CriterionTypeRegex:
_, err := regexp.Compile(c.Condition)
if err != nil {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("invalid regex expression: %s", err.Error()), core, core.Condition))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("invalid regex expression: %w", err), core, core.Condition))
}
case CriterionTypeJsonPath:
if _, err := jsonpath.NewPath(c.Condition); err != nil {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("invalid jsonpath expression: %s", err), core, core.Condition))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("invalid jsonpath expression: %w", err), core, core.Condition))
}
case CriterionTypeXPath:
// TODO validate xpath
diff --git a/arazzo/failureaction.go b/arazzo/failureaction.go
index fb36fd8..e11b5d3 100644
--- a/arazzo/failureaction.go
+++ b/arazzo/failureaction.go
@@ -3,6 +3,7 @@ package arazzo
import (
"context"
"errors"
+ "fmt"
"strings"
"github.com/speakeasy-api/openapi/arazzo/core"
@@ -69,22 +70,22 @@ func (f *FailureAction) Validate(ctx context.Context, opts ...validation.Option)
errs := []error{}
if core.Name.Present && f.Name == "" {
- errs = append(errs, validation.NewValueError(validation.NewMissingValueError("failureAction.name is required"), core, core.Name))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("failureAction.name is required"), core, core.Name))
}
switch f.Type {
case FailureActionTypeEnd:
if f.WorkflowID != nil {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("failureAction.workflowId is not allowed when type: end is specified"), core, core.WorkflowID))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationMutuallyExclusiveFields, errors.New("failureAction.workflowId is not allowed when type: end is specified"), core, core.WorkflowID))
}
if f.StepID != nil {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("failureAction.stepId is not allowed when type: end is specified"), core, core.StepID))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationMutuallyExclusiveFields, errors.New("failureAction.stepId is not allowed when type: end is specified"), core, core.StepID))
}
if f.RetryAfter != nil {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("failureAction.retryAfter is not allowed when type: end is specified"), core, core.RetryAfter))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationMutuallyExclusiveFields, errors.New("failureAction.retryAfter is not allowed when type: end is specified"), core, core.RetryAfter))
}
if f.RetryLimit != nil {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("failureAction.retryLimit is not allowed when type: end is specified"), core, core.RetryLimit))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationMutuallyExclusiveFields, errors.New("failureAction.retryLimit is not allowed when type: end is specified"), core, core.RetryLimit))
}
case FailureActionTypeGoto:
workflowIDNode := core.WorkflowID.GetKeyNodeOrRoot(core.RootNode)
@@ -100,10 +101,10 @@ func (f *FailureAction) Validate(ctx context.Context, opts ...validation.Option)
required: true,
}, opts...)...)
if f.RetryAfter != nil {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("failureAction.retryAfter is not allowed when type: goto is specified"), core, core.RetryAfter))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationMutuallyExclusiveFields, errors.New("failureAction.retryAfter is not allowed when type: goto is specified"), core, core.RetryAfter))
}
if f.RetryLimit != nil {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("failureAction.retryLimit is not allowed when type: goto is specified"), core, core.RetryLimit))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationMutuallyExclusiveFields, errors.New("failureAction.retryLimit is not allowed when type: goto is specified"), core, core.RetryLimit))
}
case FailureActionTypeRetry:
workflowIDNode := core.WorkflowID.GetKeyNodeOrRoot(core.RootNode)
@@ -120,16 +121,16 @@ func (f *FailureAction) Validate(ctx context.Context, opts ...validation.Option)
}, opts...)...)
if f.RetryAfter != nil {
if *f.RetryAfter < 0 {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("failureAction.retryAfter must be greater than or equal to 0"), core, core.RetryAfter))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationAllowedValues, errors.New("failureAction.retryAfter must be greater than or equal to 0"), core, core.RetryAfter))
}
}
if f.RetryLimit != nil {
if *f.RetryLimit < 0 {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("failureAction.retryLimit must be greater than or equal to 0"), core, core.RetryLimit))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationAllowedValues, errors.New("failureAction.retryLimit must be greater than or equal to 0"), core, core.RetryLimit))
}
}
default:
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("failureAction.type must be one of [%s]", strings.Join([]string{string(FailureActionTypeEnd), string(FailureActionTypeGoto), string(FailureActionTypeRetry)}, ", ")), core, core.Type))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationAllowedValues, fmt.Errorf("failureAction.type must be one of [%s]", strings.Join([]string{string(FailureActionTypeEnd), string(FailureActionTypeGoto), string(FailureActionTypeRetry)}, ", ")), core, core.Type))
}
for i := range f.Criteria {
diff --git a/arazzo/info.go b/arazzo/info.go
index 48cb5b4..29000f2 100644
--- a/arazzo/info.go
+++ b/arazzo/info.go
@@ -2,6 +2,7 @@ package arazzo
import (
"context"
+ "errors"
"github.com/speakeasy-api/openapi/arazzo/core"
"github.com/speakeasy-api/openapi/extensions"
@@ -34,11 +35,11 @@ func (i *Info) Validate(ctx context.Context, opts ...validation.Option) []error
errs := []error{}
if core.Title.Present && i.Title == "" {
- errs = append(errs, validation.NewValueError(validation.NewMissingValueError("info.title is required"), core, core.Title))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("info.title is required"), core, core.Title))
}
if core.Version.Present && i.Version == "" {
- errs = append(errs, validation.NewValueError(validation.NewMissingValueError("info.version is required"), core, core.Version))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("info.version is required"), core, core.Version))
}
i.Valid = len(errs) == 0 && core.GetValid()
diff --git a/arazzo/parameter.go b/arazzo/parameter.go
index e97e5c9..9b5a446 100644
--- a/arazzo/parameter.go
+++ b/arazzo/parameter.go
@@ -2,6 +2,8 @@ package arazzo
import (
"context"
+ "errors"
+ "fmt"
"strings"
"github.com/speakeasy-api/openapi/arazzo/core"
@@ -55,7 +57,7 @@ func (p *Parameter) Validate(ctx context.Context, opts ...validation.Option) []e
s := validation.GetContextObject[Step](o)
if core.Name.Present && p.Name == "" {
- errs = append(errs, validation.NewValueError(validation.NewMissingValueError("parameter fieldname is required"), core, core.Name))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("parameter fieldname is required"), core, core.Name))
}
in := In("")
@@ -71,25 +73,25 @@ func (p *Parameter) Validate(ctx context.Context, opts ...validation.Option) []e
default:
if p.In == nil || in == "" {
if w == nil && s != nil && s.WorkflowID == nil {
- errs = append(errs, validation.NewValueError(validation.NewMissingValueError("parameter.in is required within a step when workflowId is not set"), core, core.In))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("parameter.in is required within a step when workflowId is not set"), core, core.In))
}
}
if in != "" {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("parameter.in must be one of [%s] but was %s", strings.Join([]string{string(InPath), string(InQuery), string(InHeader), string(InCookie)}, ", "), in), core, core.In))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationAllowedValues, fmt.Errorf("parameter.in must be one of [%s] but was %s", strings.Join([]string{string(InPath), string(InQuery), string(InHeader), string(InCookie)}, ", "), in), core, core.In))
}
}
if core.Value.Present && p.Value == nil {
- errs = append(errs, validation.NewValueError(validation.NewMissingValueError("parameter.value is required"), core, core.Value))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("parameter.value is required"), core, core.Value))
} else if p.Value != nil {
_, expression, err := expression.GetValueOrExpressionValue(p.Value)
if err != nil {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError(err.Error()), core, core.Value))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("%s", err.Error()), core, core.Value))
}
if expression != nil {
if err := expression.Validate(); err != nil {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError(err.Error()), core, core.Value))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("%s", err.Error()), core, core.Value))
}
}
}
diff --git a/arazzo/payloadreplacement.go b/arazzo/payloadreplacement.go
index a879396..ad0e850 100644
--- a/arazzo/payloadreplacement.go
+++ b/arazzo/payloadreplacement.go
@@ -2,6 +2,8 @@ package arazzo
import (
"context"
+ "errors"
+ "fmt"
"github.com/speakeasy-api/openapi/arazzo/core"
"github.com/speakeasy-api/openapi/expression"
@@ -32,23 +34,23 @@ func (p *PayloadReplacement) Validate(ctx context.Context, opts ...validation.Op
errs := []error{}
if core.Target.Present && p.Target == "" {
- errs = append(errs, validation.NewValueError(validation.NewMissingValueError("payloadReplacement.target is required"), core, core.Target))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("payloadReplacement.target is required"), core, core.Target))
}
if err := p.Target.Validate(); err != nil {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("payloadReplacement.target is invalid: "+err.Error()), core, core.Target))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("payloadReplacement.target is invalid: %w", err), core, core.Target))
}
if core.Value.Present && p.Value == nil {
- errs = append(errs, validation.NewValueError(validation.NewMissingValueError("payloadReplacement.value is required"), core, core.Value))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("payloadReplacement.value is required"), core, core.Value))
} else if p.Value != nil {
_, expression, err := expression.GetValueOrExpressionValue(p.Value)
if err != nil {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("payloadReplacement.value is invalid: "+err.Error()), core, core.Value))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("payloadReplacement.value is invalid: %w", err), core, core.Value))
}
if expression != nil {
if err := expression.Validate(); err != nil {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("payloadReplacement.value is invalid: "+err.Error()), core, core.Value))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("payloadReplacement.value is invalid: %w", err), core, core.Value))
}
}
}
diff --git a/arazzo/requestbody.go b/arazzo/requestbody.go
index c7f2ac6..89c1753 100644
--- a/arazzo/requestbody.go
+++ b/arazzo/requestbody.go
@@ -2,6 +2,7 @@ package arazzo
import (
"context"
+ "fmt"
"mime"
"github.com/speakeasy-api/openapi/arazzo/core"
@@ -36,7 +37,7 @@ func (r *RequestBody) Validate(ctx context.Context, opts ...validation.Option) [
if r.ContentType != nil {
_, _, err := mime.ParseMediaType(*r.ContentType)
if err != nil {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("requestBody.contentType is not valid: %s", err.Error()), core, core.ContentType))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidFormat, fmt.Errorf("requestBody.contentType is not valid: %w", err), core, core.ContentType))
}
}
@@ -47,7 +48,7 @@ func (r *RequestBody) Validate(ctx context.Context, opts ...validation.Option) [
if err == nil && exp != nil {
// Only validate if the entire payload IS an expression (not just contains expressions)
if err := exp.Validate(); err != nil {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("requestBody.payload expression is not valid: %s", err.Error()), core, core.Payload))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("requestBody.payload expression is not valid: %w", err), core, core.Payload))
}
}
// If exp is nil, the payload is a value (not an expression) - no validation needed
diff --git a/arazzo/reusable.go b/arazzo/reusable.go
index 686f5bd..36ccf64 100644
--- a/arazzo/reusable.go
+++ b/arazzo/reusable.go
@@ -3,6 +3,7 @@ package arazzo
import (
"context"
"errors"
+ "fmt"
"reflect"
"unicode"
"unicode/utf8"
@@ -117,7 +118,7 @@ func (r *Reusable[T, V, C]) Validate(ctx context.Context, opts ...validation.Opt
case "parameters":
default:
if r.Value != nil {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("reusableParameter.value is not allowed when object is not a parameter"), core, core.Value))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationMutuallyExclusiveFields, errors.New("reusableParameter.value is not allowed when object is not a parameter"), core, core.Value))
}
}
@@ -136,7 +137,7 @@ func (r *Reusable[T, V, C]) validateReference(ctx context.Context, a *Arazzo, ob
core := r.GetCore()
if err := r.Reference.Validate(); err != nil {
return []error{
- validation.NewValueError(validation.NewValueValidationError("%s.reference is invalid: %s", componentTypeToReusableType(objComponentType), err.Error()), core, core.Reference),
+ validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("%s.reference is invalid: %w", componentTypeToReusableType(objComponentType), err), core, core.Reference),
}
}
@@ -144,13 +145,13 @@ func (r *Reusable[T, V, C]) validateReference(ctx context.Context, a *Arazzo, ob
if typ != expression.ExpressionTypeComponents {
return []error{
- validation.NewValueError(validation.NewValueValidationError("%s.reference must be a components expression, got %s", componentTypeToReusableType(objComponentType), r.Reference.GetType()), core, core.Reference),
+ validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("%s.reference must be a components expression, got %s", componentTypeToReusableType(objComponentType), r.Reference.GetType()), core, core.Reference),
}
}
if componentType == "" || len(references) != 1 {
return []error{
- validation.NewValueError(validation.NewValueValidationError("%s.reference must be a components expression with 3 parts, got %s", componentTypeToReusableType(objComponentType), *r.Reference), core, core.Reference),
+ validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("%s.reference must be a components expression with 3 parts, got %s", componentTypeToReusableType(objComponentType), *r.Reference), core, core.Reference),
}
}
@@ -186,7 +187,7 @@ func (r *Reusable[T, V, C]) validateReference(ctx context.Context, a *Arazzo, ob
}, opts...)
default:
return []error{
- validation.NewValueError(validation.NewValueValidationError("reference to %s is not valid, valid components are [parameters, successActions, failureActions]", componentType), core, core.Reference),
+ validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidReference, fmt.Errorf("reference to %s is not valid, valid components are [parameters, successActions, failureActions]", componentType), core, core.Reference),
}
}
}
@@ -203,20 +204,20 @@ type validateComponentReferenceArgs[T any] struct {
func validateComponentReference[T any, V interfaces.Validator[T]](ctx context.Context, args validateComponentReferenceArgs[V], opts ...validation.Option) []error {
if args.componentType != args.objComponentType {
return []error{
- validation.NewValidationError(validation.NewValueValidationError("%s.reference expected a %s reference got %s", componentTypeToReusableType(args.objComponentType), args.objComponentType, args.componentType), args.referenceValueNode),
+ validation.NewValidationError(validation.SeverityError, validation.RuleValidationTypeMismatch, fmt.Errorf("%s.reference expected a %s reference got %s", componentTypeToReusableType(args.objComponentType), args.objComponentType, args.componentType), args.referenceValueNode),
}
}
if args.components == nil {
return []error{
- validation.NewValidationError(validation.NewValueValidationError("%s.reference to missing component %s, components.%s not present", componentTypeToReusableType(args.objComponentType), *args.reference, args.componentType), args.referenceValueNode),
+ validation.NewValidationError(validation.SeverityError, validation.RuleValidationInvalidReference, fmt.Errorf("%s.reference to missing component %s, components.%s not present", componentTypeToReusableType(args.objComponentType), *args.reference, args.componentType), args.referenceValueNode),
}
}
component, ok := args.components.Get(args.componentName)
if !ok {
return []error{
- validation.NewValidationError(validation.NewValueValidationError("%s.reference to missing component %s, components.%s.%s not present", componentTypeToReusableType(args.objComponentType), *args.reference, args.componentType, args.componentName), args.referenceValueNode),
+ validation.NewValidationError(validation.SeverityError, validation.RuleValidationInvalidReference, fmt.Errorf("%s.reference to missing component %s, components.%s.%s not present", componentTypeToReusableType(args.objComponentType), *args.reference, args.componentType, args.componentName), args.referenceValueNode),
}
}
diff --git a/arazzo/sourcedescription.go b/arazzo/sourcedescription.go
index a9a6b61..8997e53 100644
--- a/arazzo/sourcedescription.go
+++ b/arazzo/sourcedescription.go
@@ -2,6 +2,8 @@ package arazzo
import (
"context"
+ "errors"
+ "fmt"
"net/url"
"strings"
@@ -57,14 +59,14 @@ func (s *SourceDescription) Validate(ctx context.Context, opts ...validation.Opt
errs := []error{}
if core.Name.Present && s.Name == "" {
- errs = append(errs, validation.NewValueError(validation.NewMissingValueError("sourceDescription.name is required"), core, core.Name))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("sourceDescription.name is required"), core, core.Name))
}
if core.URL.Present && s.URL == "" {
- errs = append(errs, validation.NewValueError(validation.NewMissingValueError("sourceDescription.url is required"), core, core.URL))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("sourceDescription.url is required"), core, core.URL))
} else if core.URL.Present {
if _, err := url.Parse(s.URL); err != nil {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("sourceDescription.url is not a valid url/uri according to RFC 3986: %s", err), core, core.URL))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidFormat, fmt.Errorf("sourceDescription.url is not a valid url/uri according to RFC 3986: %w", err), core, core.URL))
}
}
@@ -72,7 +74,7 @@ func (s *SourceDescription) Validate(ctx context.Context, opts ...validation.Opt
case SourceDescriptionTypeOpenAPI:
case SourceDescriptionTypeArazzo:
default:
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("sourceDescription.type must be one of [%s]", strings.Join([]string{SourceDescriptionTypeOpenAPI, SourceDescriptionTypeArazzo}, ", ")), core, core.Type))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationAllowedValues, fmt.Errorf("sourceDescription.type must be one of [%s]", strings.Join([]string{SourceDescriptionTypeOpenAPI, SourceDescriptionTypeArazzo}, ", ")), core, core.Type))
}
s.Valid = len(errs) == 0 && core.GetValid()
diff --git a/arazzo/step.go b/arazzo/step.go
index 40605f4..620dc8f 100644
--- a/arazzo/step.go
+++ b/arazzo/step.go
@@ -90,10 +90,10 @@ func (s *Step) Validate(ctx context.Context, opts ...validation.Option) []error
errs := []error{}
if core.StepID.Present && s.StepID == "" {
- errs = append(errs, validation.NewValueError(validation.NewMissingValueError("step.stepId is required"), core, core.StepID))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("step.stepId is required"), core, core.StepID))
} else if s.StepID != "" {
if !stepIDRegex.MatchString(s.StepID) {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("step.stepId must be a valid name [%s]: %s", stepIDRegex.String(), s.StepID), core, core.StepID))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidFormat, fmt.Errorf("step.stepId must be a valid name [%s]: %s", stepIDRegex.String(), s.StepID), core, core.StepID))
}
numStepsWithID := 0
@@ -103,7 +103,7 @@ func (s *Step) Validate(ctx context.Context, opts ...validation.Option) []error
}
}
if numStepsWithID > 1 {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("step.stepId must be unique within the workflow, found %d steps with the same stepId", numStepsWithID), core, core.StepID))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationDuplicateKey, fmt.Errorf("step.stepId must be unique within the workflow, found %d steps with the same stepId", numStepsWithID), core, core.StepID))
}
}
@@ -121,10 +121,10 @@ func (s *Step) Validate(ctx context.Context, opts ...validation.Option) []error
}
switch numSet {
case 0:
- errs = append(errs, validation.NewValidationError(validation.NewMissingValueError("step at least one of operationId, operationPath or workflowId fields must be set"), core.RootNode))
+ errs = append(errs, validation.NewValidationError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("step at least one of operationId, operationPath or workflowId fields must be set"), core.RootNode))
case 1:
default:
- errs = append(errs, validation.NewValidationError(validation.NewValueValidationError("step only one of operationId, operationPath or workflowId.can be set"), core.RootNode))
+ errs = append(errs, validation.NewValidationError(validation.SeverityError, validation.RuleValidationMutuallyExclusiveFields, errors.New("step only one of operationId, operationPath or workflowId can be set"), core.RootNode))
}
if s.OperationID != nil {
@@ -135,65 +135,65 @@ func (s *Step) Validate(ctx context.Context, opts ...validation.Option) []error
}
}
if numOpenAPISourceDescriptions > 1 && !s.OperationID.IsExpression() {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("step.operationId must be a valid expression if there are multiple OpenAPI source descriptions"), core, core.OperationID))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidSyntax, errors.New("step.operationId must be a valid expression if there are multiple OpenAPI source descriptions"), core, core.OperationID))
}
if s.OperationID.IsExpression() {
if err := s.OperationID.Validate(); err != nil {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("step.operationId expression is invalid: %s", err.Error()), core, core.OperationID))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("step.operationId expression is invalid: %w", err), core, core.OperationID))
}
typ, sourceDescriptionName, _, _ := s.OperationID.GetParts()
if typ != expression.ExpressionTypeSourceDescriptions {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("step.operationId must be a sourceDescriptions expression, got %s", typ), core, core.OperationID))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("step.operationId must be a sourceDescriptions expression, got %s", typ), core, core.OperationID))
}
if a.SourceDescriptions.Find(sourceDescriptionName) == nil {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("step.operationId referencing sourceDescription %s not found", sourceDescriptionName), core, core.OperationID))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidReference, fmt.Errorf("step.operationId referencing sourceDescription %s not found", sourceDescriptionName), core, core.OperationID))
}
}
}
if s.OperationPath != nil {
if err := s.OperationPath.Validate(); err != nil {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("step.operationPath expression is invalid: %s", err.Error()), core, core.OperationPath))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("step.operationPath expression is invalid: %w", err), core, core.OperationPath))
}
typ, sourceDescriptionName, expressionParts, jp := s.OperationPath.GetParts()
if typ != expression.ExpressionTypeSourceDescriptions {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("step.operationPath must be a sourceDescriptions expression, got %s", typ), core, core.OperationPath))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("step.operationPath must be a sourceDescriptions expression, got %s", typ), core, core.OperationPath))
}
if a.SourceDescriptions.Find(sourceDescriptionName) == nil {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("step.operationPath referencing sourceDescription %s not found", sourceDescriptionName), core, core.OperationPath))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidReference, fmt.Errorf("step.operationPath referencing sourceDescription %s not found", sourceDescriptionName), core, core.OperationPath))
}
if len(expressionParts) != 1 || expressionParts[0] != "url" {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("step.operationPath must reference the url of a sourceDescription"), core, core.OperationPath))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidSyntax, errors.New("step.operationPath must reference the url of a sourceDescription"), core, core.OperationPath))
}
if jp == "" {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("step.operationPath must contain a json pointer to the operation path within the sourceDescription"), core, core.OperationPath))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("step.operationPath must contain a json pointer to the operation path within the sourceDescription"), core, core.OperationPath))
}
}
if s.WorkflowID != nil {
if s.WorkflowID.IsExpression() {
if err := s.WorkflowID.Validate(); err != nil {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("step.workflowId expression is invalid: %s", err.Error()), core, core.WorkflowID))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("step.workflowId expression is invalid: %w", err), core, core.WorkflowID))
}
typ, sourceDescriptionName, _, _ := s.WorkflowID.GetParts()
if typ != expression.ExpressionTypeSourceDescriptions {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("step.workflowId must be a sourceDescriptions expression, got %s", typ), core, core.WorkflowID))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("step.workflowId must be a sourceDescriptions expression, got %s", typ), core, core.WorkflowID))
}
if a.SourceDescriptions.Find((sourceDescriptionName)) == nil {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("step.workflowId referencing sourceDescription %s not found", sourceDescriptionName), core, core.WorkflowID))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidReference, fmt.Errorf("step.workflowId referencing sourceDescription %s not found", sourceDescriptionName), core, core.WorkflowID))
}
} else if a.Workflows.Find(pointer.Value(s.WorkflowID).String()) == nil {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("step.workflowId referencing workflow %s not found", *s.WorkflowID), core, core.WorkflowID))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidReference, fmt.Errorf("step.workflowId referencing workflow %s not found", *s.WorkflowID), core, core.WorkflowID))
}
}
@@ -206,14 +206,14 @@ func (s *Step) Validate(ctx context.Context, opts ...validation.Option) []error
if parameter.Reference != nil {
_, ok := parameterRefs[string(*parameter.Reference)]
if ok {
- errs = append(errs, validation.NewSliceError(validation.NewValueValidationError("step.parameters duplicate parameter found with reference %s", *parameter.Reference), core, core.Parameters, i))
+ errs = append(errs, validation.NewSliceError(validation.SeverityError, validation.RuleValidationDuplicateKey, fmt.Errorf("step.parameters duplicate parameter found with reference %s", *parameter.Reference), core, core.Parameters, i))
}
parameterRefs[string(*parameter.Reference)] = true
} else if parameter.Object != nil {
id := fmt.Sprintf("%s.%v", parameter.Object.Name, parameter.Object.In)
_, ok := parameters[id]
if ok {
- errs = append(errs, validation.NewSliceError(validation.NewValueValidationError("step.parameters duplicate parameter found with name %s and in %v", parameter.Object.Name, parameter.Object.In), core, core.Parameters, i))
+ errs = append(errs, validation.NewSliceError(validation.SeverityError, validation.RuleValidationDuplicateKey, fmt.Errorf("step.parameters duplicate parameter found with name %s and in %v", parameter.Object.Name, parameter.Object.In), core, core.Parameters, i))
}
parameters[id] = true
}
@@ -221,7 +221,7 @@ func (s *Step) Validate(ctx context.Context, opts ...validation.Option) []error
if s.RequestBody != nil {
if s.WorkflowID != nil {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("step.requestBody should not be set when workflowId is set"), core, core.RequestBody))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationMutuallyExclusiveFields, errors.New("step.requestBody should not be set when workflowId is set"), core, core.RequestBody))
}
errs = append(errs, s.RequestBody.Validate(ctx, opts...)...)
@@ -240,14 +240,14 @@ func (s *Step) Validate(ctx context.Context, opts ...validation.Option) []error
if onSuccess.Reference != nil {
_, ok := successActionRefs[string(*onSuccess.Reference)]
if ok {
- errs = append(errs, validation.NewSliceError(validation.NewValueValidationError("step.onSuccess duplicate successAction found with reference %s", *onSuccess.Reference), core, core.OnSuccess, i))
+ errs = append(errs, validation.NewSliceError(validation.SeverityError, validation.RuleValidationDuplicateKey, fmt.Errorf("step.onSuccess duplicate successAction found with reference %s", *onSuccess.Reference), core, core.OnSuccess, i))
}
successActionRefs[string(*onSuccess.Reference)] = true
} else if onSuccess.Object != nil {
id := fmt.Sprintf("%s.%v", onSuccess.Object.Name, onSuccess.Object.Type)
_, ok := successActions[id]
if ok {
- errs = append(errs, validation.NewSliceError(validation.NewValueValidationError("step.onSuccess duplicate successAction found with name %s and type %v", onSuccess.Object.Name, onSuccess.Object.Type), core, core.OnSuccess, i))
+ errs = append(errs, validation.NewSliceError(validation.SeverityError, validation.RuleValidationDuplicateKey, fmt.Errorf("step.onSuccess duplicate successAction found with name %s and type %v", onSuccess.Object.Name, onSuccess.Object.Type), core, core.OnSuccess, i))
}
successActions[id] = true
}
@@ -262,14 +262,14 @@ func (s *Step) Validate(ctx context.Context, opts ...validation.Option) []error
if onFailure.Reference != nil {
_, ok := failureActionRefs[string(*onFailure.Reference)]
if ok {
- errs = append(errs, validation.NewSliceError(validation.NewValueValidationError("step.onFailure duplicate failureAction found with reference %s", *onFailure.Reference), core, core.OnFailure, i))
+ errs = append(errs, validation.NewSliceError(validation.SeverityError, validation.RuleValidationDuplicateKey, fmt.Errorf("step.onFailure duplicate failureAction found with reference %s", *onFailure.Reference), core, core.OnFailure, i))
}
failureActionRefs[string(*onFailure.Reference)] = true
} else if onFailure.Object != nil {
id := fmt.Sprintf("%s.%v", onFailure.Object.Name, onFailure.Object.Type)
_, ok := failureActions[id]
if ok {
- errs = append(errs, validation.NewSliceError(validation.NewValueValidationError("step.onFailure duplicate failureAction found with name %s and type %v", onFailure.Object.Name, onFailure.Object.Type), core, core.OnFailure, i))
+ errs = append(errs, validation.NewSliceError(validation.SeverityError, validation.RuleValidationDuplicateKey, fmt.Errorf("step.onFailure duplicate failureAction found with name %s and type %v", onFailure.Object.Name, onFailure.Object.Type), core, core.OnFailure, i))
}
failureActions[id] = true
}
@@ -277,11 +277,11 @@ func (s *Step) Validate(ctx context.Context, opts ...validation.Option) []error
for name, output := range s.Outputs.All() {
if !outputNameRegex.MatchString(name) {
- errs = append(errs, validation.NewMapKeyError(validation.NewValueValidationError("step.outputs name must be a valid name [%s]: %s", outputNameRegex.String(), name), core, core.Outputs, name))
+ errs = append(errs, validation.NewMapKeyError(validation.SeverityError, validation.RuleValidationInvalidFormat, fmt.Errorf("step.outputs name must be a valid name [%s]: %s", outputNameRegex.String(), name), core, core.Outputs, name))
}
if err := output.Validate(); err != nil {
- errs = append(errs, validation.NewMapValueError(validation.NewValueValidationError("step.outputs expression is invalid: %s", err.Error()), core, core.Outputs, name))
+ errs = append(errs, validation.NewMapValueError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("step.outputs expression is invalid: %w", err), core, core.Outputs, name))
}
}
diff --git a/arazzo/successaction.go b/arazzo/successaction.go
index e73d4fe..80e0c3f 100644
--- a/arazzo/successaction.go
+++ b/arazzo/successaction.go
@@ -3,6 +3,7 @@ package arazzo
import (
"context"
"errors"
+ "fmt"
"strings"
"github.com/speakeasy-api/openapi/arazzo/core"
@@ -64,16 +65,16 @@ func (s *SuccessAction) Validate(ctx context.Context, opts ...validation.Option)
errs := []error{}
if core.Name.Present && s.Name == "" {
- errs = append(errs, validation.NewValueError(validation.NewMissingValueError("successAction.name is required"), core, core.Name))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("successAction.name is required"), core, core.Name))
}
switch s.Type {
case SuccessActionTypeEnd:
if s.WorkflowID != nil {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("successAction.workflowId is not allowed when type: end is specified"), core, core.WorkflowID))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationMutuallyExclusiveFields, errors.New("successAction.workflowId is not allowed when type: end is specified"), core, core.WorkflowID))
}
if s.StepID != nil {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("successAction.stepId is not allowed when type: end is specified"), core, core.StepID))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationMutuallyExclusiveFields, errors.New("successAction.stepId is not allowed when type: end is specified"), core, core.StepID))
}
case SuccessActionTypeGoto:
workflowIDNode := core.WorkflowID.GetKeyNodeOrRoot(core.RootNode)
@@ -90,7 +91,7 @@ func (s *SuccessAction) Validate(ctx context.Context, opts ...validation.Option)
required: true,
}, opts...)...)
default:
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("successAction.type must be one of [%s]", strings.Join([]string{string(SuccessActionTypeEnd), string(SuccessActionTypeGoto)}, ", ")), core, core.Type))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationAllowedValues, fmt.Errorf("successAction.type must be one of [%s]", strings.Join([]string{string(SuccessActionTypeEnd), string(SuccessActionTypeGoto)}, ", ")), core, core.Type))
}
for i := range s.Criteria {
@@ -120,28 +121,28 @@ func validationActionWorkflowIDAndStepID(ctx context.Context, parentName string,
errs := []error{}
if params.required && params.workflowID == nil && params.stepID == nil {
- errs = append(errs, validation.NewValidationError(validation.NewMissingValueError("%s.workflowId or stepId is required", parentName), params.workflowIDNode))
+ errs = append(errs, validation.NewValidationError(validation.SeverityError, validation.RuleValidationRequiredField, fmt.Errorf("%s.workflowId or stepId is required", parentName), params.workflowIDNode))
}
if params.workflowID != nil && params.stepID != nil {
- errs = append(errs, validation.NewValidationError(validation.NewValueValidationError("%s.workflowId and stepId are mutually exclusive, only one can be specified", parentName), params.workflowIDNode))
+ errs = append(errs, validation.NewValidationError(validation.SeverityError, validation.RuleValidationMutuallyExclusiveFields, fmt.Errorf("%s.workflowId and stepId are mutually exclusive, only one can be specified", parentName), params.workflowIDNode))
}
if params.workflowID != nil {
if params.workflowID.IsExpression() {
if err := params.workflowID.Validate(); err != nil {
- errs = append(errs, validation.NewValidationError(validation.NewValueValidationError("%s.workflowId expression is invalid: %s", parentName, err.Error()), params.workflowIDNode))
+ errs = append(errs, validation.NewValidationError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("%s.workflowId expression is invalid: %w", parentName, err), params.workflowIDNode))
}
typ, sourceDescriptionName, _, _ := params.workflowID.GetParts()
if typ != expression.ExpressionTypeSourceDescriptions {
- errs = append(errs, validation.NewValidationError(validation.NewValueValidationError("%s.workflowId must be a sourceDescriptions expression, got %s", parentName, typ), params.workflowIDNode))
+ errs = append(errs, validation.NewValidationError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("%s.workflowId must be a sourceDescriptions expression, got %s", parentName, typ), params.workflowIDNode))
}
if params.arazzo.SourceDescriptions.Find(sourceDescriptionName) == nil {
- errs = append(errs, validation.NewValidationError(validation.NewValueValidationError("%s.sourceDescription value %s not found", parentName, sourceDescriptionName), params.workflowIDNode))
+ errs = append(errs, validation.NewValidationError(validation.SeverityError, validation.RuleValidationInvalidReference, fmt.Errorf("%s.sourceDescription value %s not found", parentName, sourceDescriptionName), params.workflowIDNode))
}
} else if params.arazzo.Workflows.Find(pointer.Value(params.workflowID).String()) == nil {
- errs = append(errs, validation.NewValidationError(validation.NewValueValidationError("%s.workflowId value %s does not exist", parentName, *params.workflowID), params.workflowIDNode))
+ errs = append(errs, validation.NewValidationError(validation.SeverityError, validation.RuleValidationInvalidReference, fmt.Errorf("%s.workflowId value %s does not exist", parentName, *params.workflowID), params.workflowIDNode))
}
}
if params.stepID != nil {
@@ -206,11 +207,11 @@ func validationActionWorkflowIDAndStepID(ctx context.Context, parentName string,
}
if !foundStepId {
- errs = append(errs, validation.NewValidationError(validation.NewValueValidationError("%s.stepId value %s does not exist in any parent workflows", parentName, pointer.Value(params.stepID)), params.workflowIDNode))
+ errs = append(errs, validation.NewValidationError(validation.SeverityError, validation.RuleValidationInvalidReference, fmt.Errorf("%s.stepId value %s does not exist in any parent workflows", parentName, pointer.Value(params.stepID)), params.workflowIDNode))
}
}
} else if w.Steps.Find(pointer.Value(params.stepID)) == nil {
- errs = append(errs, validation.NewValidationError(validation.NewValueValidationError("%s.stepId value %s does not exist in workflow %s", parentName, pointer.Value(params.stepID), w.WorkflowID), params.workflowIDNode))
+ errs = append(errs, validation.NewValidationError(validation.SeverityError, validation.RuleValidationInvalidReference, fmt.Errorf("%s.stepId value %s does not exist in workflow %s", parentName, pointer.Value(params.stepID), w.WorkflowID), params.workflowIDNode))
}
}
diff --git a/arazzo/workflow.go b/arazzo/workflow.go
index 411685c..08eef2a 100644
--- a/arazzo/workflow.go
+++ b/arazzo/workflow.go
@@ -3,6 +3,7 @@ package arazzo
import (
"context"
"errors"
+ "fmt"
"regexp"
"github.com/speakeasy-api/openapi/arazzo/core"
@@ -78,7 +79,7 @@ func (w *Workflow) Validate(ctx context.Context, opts ...validation.Option) []er
errs := []error{}
if core.WorkflowID.Present && w.WorkflowID == "" {
- errs = append(errs, validation.NewValueError(validation.NewMissingValueError("workflow.workflowId is required"), core, core.WorkflowID))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("workflow.workflowId is required"), core, core.WorkflowID))
}
if w.Inputs != nil {
@@ -88,20 +89,20 @@ func (w *Workflow) Validate(ctx context.Context, opts ...validation.Option) []er
for i, dependsOn := range w.DependsOn {
if dependsOn.IsExpression() {
if err := dependsOn.Validate(); err != nil {
- errs = append(errs, validation.NewSliceError(validation.NewValueValidationError("workflow.dependsOn expression is invalid: %s", err.Error()), core, core.DependsOn, i))
+ errs = append(errs, validation.NewSliceError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("workflow.dependsOn expression is invalid: %w", err), core, core.DependsOn, i))
}
typ, sourceDescriptionName, _, _ := dependsOn.GetParts()
if typ != expression.ExpressionTypeSourceDescriptions {
- errs = append(errs, validation.NewSliceError(validation.NewValueValidationError("workflow.dependsOn must be a sourceDescriptions expression if not a workflowId, got %s", typ), core, core.DependsOn, i))
+ errs = append(errs, validation.NewSliceError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("workflow.dependsOn must be a sourceDescriptions expression if not a workflowId, got %s", typ), core, core.DependsOn, i))
}
if a.SourceDescriptions.Find(sourceDescriptionName) == nil {
- errs = append(errs, validation.NewSliceError(validation.NewValueValidationError("workflow.dependsOn sourceDescription %s not found", sourceDescriptionName), core, core.DependsOn, i))
+ errs = append(errs, validation.NewSliceError(validation.SeverityError, validation.RuleValidationInvalidReference, fmt.Errorf("workflow.dependsOn sourceDescription %s not found", sourceDescriptionName), core, core.DependsOn, i))
}
} else if a.Workflows.Find(string(dependsOn)) == nil {
- errs = append(errs, validation.NewSliceError(validation.NewValueValidationError("workflow.dependsOn workflowId %s not found", dependsOn), core, core.DependsOn, i))
+ errs = append(errs, validation.NewSliceError(validation.SeverityError, validation.RuleValidationInvalidReference, fmt.Errorf("workflow.dependsOn workflowId %s not found", dependsOn), core, core.DependsOn, i))
}
}
@@ -119,11 +120,11 @@ func (w *Workflow) Validate(ctx context.Context, opts ...validation.Option) []er
for name, output := range w.Outputs.All() {
if !outputNameRegex.MatchString(name) {
- errs = append(errs, validation.NewMapKeyError(validation.NewValueValidationError("workflow.outputs name must be a valid name [%s]: %s", outputNameRegex.String(), name), core, core.Outputs, name))
+ errs = append(errs, validation.NewMapKeyError(validation.SeverityError, validation.RuleValidationInvalidFormat, fmt.Errorf("workflow.outputs name must be a valid name [%s]: %s", outputNameRegex.String(), name), core, core.Outputs, name))
}
if err := output.Validate(); err != nil {
- errs = append(errs, validation.NewMapValueError(validation.NewValueValidationError("workflow.outputs expression is invalid: %s", err.Error()), core, core.Outputs, name))
+ errs = append(errs, validation.NewMapValueError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("workflow.outputs expression is invalid: %w", err), core, core.Outputs, name))
}
}
diff --git a/cmd/openapi/commands/openapi/explore.go b/cmd/openapi/commands/openapi/explore.go
index 9694135..152f202 100644
--- a/cmd/openapi/commands/openapi/explore.go
+++ b/cmd/openapi/commands/openapi/explore.go
@@ -2,6 +2,7 @@ package openapi
import (
"context"
+ "errors"
"fmt"
"os"
"path/filepath"
@@ -56,7 +57,7 @@ func runExplore(cmd *cobra.Command, args []string) error {
}
if len(operations) == 0 {
- return fmt.Errorf("no operations found in the OpenAPI document")
+ return errors.New("no operations found in the OpenAPI document")
}
// Get document info for display
@@ -95,7 +96,7 @@ func loadOpenAPIDocument(ctx context.Context, file string) (*openapi.OpenAPI, er
return nil, fmt.Errorf("failed to unmarshal OpenAPI document: %w", err)
}
if doc == nil {
- return nil, fmt.Errorf("failed to parse OpenAPI document: document is nil")
+ return nil, errors.New("failed to parse OpenAPI document: document is nil")
}
// Report validation errors as warnings but continue
diff --git a/cmd/openapi/commands/openapi/lint.go b/cmd/openapi/commands/openapi/lint.go
new file mode 100644
index 0000000..2959b33
--- /dev/null
+++ b/cmd/openapi/commands/openapi/lint.go
@@ -0,0 +1,143 @@
+package openapi
+
+import (
+ "context"
+ "fmt"
+ "os"
+ "path/filepath"
+
+ "github.com/speakeasy-api/openapi/linter"
+ "github.com/speakeasy-api/openapi/openapi"
+ openapiLinter "github.com/speakeasy-api/openapi/openapi/linter"
+ "github.com/spf13/cobra"
+)
+
+var lintCmd = &cobra.Command{
+ Use: "lint ",
+ Short: "Lint an OpenAPI specification document",
+ Long: `Lint an OpenAPI specification document for style, consistency, and best practices.
+
+This command runs both spec validation and additional lint rules including:
+- Path parameter validation
+- Operation ID requirements
+- Consistent naming conventions
+- Security best practices
+
+Output can be formatted as text (default) or JSON.`,
+ Args: cobra.ExactArgs(1),
+ Run: runLint,
+}
+
+var (
+ lintOutputFormat string
+ lintRuleset string
+ lintConfigFile string
+ lintDisableRules []string
+)
+
+func init() {
+ lintCmd.Flags().StringVarP(&lintOutputFormat, "format", "f", "text", "Output format (text, json)")
+ lintCmd.Flags().StringVarP(&lintRuleset, "ruleset", "r", "all", "Ruleset to use")
+ lintCmd.Flags().StringVarP(&lintConfigFile, "config", "c", "", "Path to lint configuration file")
+ lintCmd.Flags().StringSliceVarP(&lintDisableRules, "disable", "d", nil, "Rules to disable")
+}
+
+func runLint(cmd *cobra.Command, args []string) {
+ ctx := cmd.Context()
+ file := args[0]
+
+ if err := lintOpenAPI(ctx, file); err != nil {
+ fmt.Fprintf(os.Stderr, "Error: %v\n", err)
+ os.Exit(1)
+ }
+}
+
+func lintOpenAPI(ctx context.Context, file string) error {
+ cleanFile := filepath.Clean(file)
+
+ // Get absolute path for document location
+ absPath, err := filepath.Abs(cleanFile)
+ if err != nil {
+ return fmt.Errorf("failed to get absolute path: %w", err)
+ }
+
+ // Load the OpenAPI document
+ f, err := os.Open(cleanFile)
+ if err != nil {
+ return fmt.Errorf("failed to open file: %w", err)
+ }
+ defer f.Close()
+
+ // Unmarshal with validation to get validation errors
+ doc, validationErrors, err := openapi.Unmarshal(ctx, f)
+ if err != nil {
+ return fmt.Errorf("failed to unmarshal file: %w", err)
+ }
+
+ // Build linter configuration
+ config := buildLintConfig()
+
+ // Create the OpenAPI linter with default rules
+ lint := openapiLinter.NewLinter(config)
+
+ // Create document info with location
+ docInfo := linter.NewDocumentInfo(doc, absPath)
+
+ // Run linting with validation errors passed in
+ output, err := lint.Lint(ctx, docInfo, validationErrors, nil)
+ if err != nil {
+ return fmt.Errorf("linting failed: %w", err)
+ }
+
+ // Format and print output
+ switch lintOutputFormat {
+ case "json":
+ fmt.Println(output.FormatJSON())
+ default:
+ fmt.Printf("%s\n", cleanFile)
+ fmt.Println(output.FormatText())
+ }
+
+ // Exit with error code if there are errors
+ if output.HasErrors() {
+ return fmt.Errorf("linting found %d errors", output.ErrorCount())
+ }
+
+ return nil
+}
+
+func buildLintConfig() *linter.Config {
+ config := linter.NewConfig()
+
+ // Load from config file if specified
+ if lintConfigFile != "" {
+ // TODO: Load config from file
+ }
+
+ // Apply ruleset
+ if lintRuleset != "" {
+ config.Extends = []string{lintRuleset}
+ }
+
+ // Disable specified rules
+ for _, rule := range lintDisableRules {
+ if config.Rules == nil {
+ config.Rules = make(map[string]linter.RuleConfig)
+ }
+ config.Rules[rule] = linter.RuleConfig{Enabled: ptr(false)}
+ }
+
+ // Set output format
+ switch lintOutputFormat {
+ case "json":
+ config.OutputFormat = linter.OutputFormatJSON
+ default:
+ config.OutputFormat = linter.OutputFormatText
+ }
+
+ return config
+}
+
+func ptr[T any](v T) *T {
+ return &v
+}
diff --git a/cmd/openapi/commands/openapi/root.go b/cmd/openapi/commands/openapi/root.go
index e492e60..f261920 100644
--- a/cmd/openapi/commands/openapi/root.go
+++ b/cmd/openapi/commands/openapi/root.go
@@ -5,6 +5,7 @@ import "github.com/spf13/cobra"
// Apply adds OpenAPI commands to the provided root command
func Apply(rootCmd *cobra.Command) {
rootCmd.AddCommand(validateCmd)
+ rootCmd.AddCommand(lintCmd)
rootCmd.AddCommand(upgradeCmd)
rootCmd.AddCommand(inlineCmd)
rootCmd.AddCommand(cleanCmd)
diff --git a/cmd/openapi/commands/openapi/snip.go b/cmd/openapi/commands/openapi/snip.go
index 8ccf1a5..5bc265b 100644
--- a/cmd/openapi/commands/openapi/snip.go
+++ b/cmd/openapi/commands/openapi/snip.go
@@ -2,6 +2,7 @@ package openapi
import (
"context"
+ "errors"
"fmt"
"strings"
@@ -95,7 +96,7 @@ func runSnip(cmd *cobra.Command, args []string) error {
// If -w is specified without any operation selection flags, error
if snipWriteInPlace && !(hasRemoveFlags || hasKeepFlags) {
- return fmt.Errorf("--write flag requires specifying operations via --operationId/--operation or --keepOperationId/--keepOperation")
+ return errors.New("--write flag requires specifying operations via --operationId/--operation or --keepOperationId/--keepOperation")
}
// Interactive mode when no flags provided
@@ -105,7 +106,7 @@ func runSnip(cmd *cobra.Command, args []string) error {
// Disallow mixing keep + remove flags; ambiguous intent
if hasRemoveFlags && hasKeepFlags {
- return fmt.Errorf("cannot combine keep and remove flags; use either --operationId/--operation or --keepOperationId/--keepOperation")
+ return errors.New("cannot combine keep and remove flags; use either --operationId/--operation or --keepOperationId/--keepOperation")
}
// CLI mode
@@ -138,7 +139,7 @@ func runSnipCLI(ctx context.Context, inputFile, outputFile string) error {
}
if len(operationsToRemove) == 0 {
- return fmt.Errorf("no operations specified for removal")
+ return errors.New("no operations specified for removal")
}
// Perform the snip
@@ -175,7 +176,7 @@ func runSnipCLIKeep(ctx context.Context, inputFile, outputFile string) error {
return err
}
if len(keepOps) == 0 {
- return fmt.Errorf("no operations specified to keep")
+ return errors.New("no operations specified to keep")
}
// Collect all operations from the document
@@ -184,7 +185,7 @@ func runSnipCLIKeep(ctx context.Context, inputFile, outputFile string) error {
return fmt.Errorf("failed to collect operations: %w", err)
}
if len(allOps) == 0 {
- return fmt.Errorf("no operations found in the OpenAPI document")
+ return errors.New("no operations found in the OpenAPI document")
}
// Build lookup sets for keep filters
@@ -248,7 +249,7 @@ func runSnipInteractive(ctx context.Context, inputFile, outputFile string) error
}
if len(operations) == 0 {
- return fmt.Errorf("no operations found in the OpenAPI document")
+ return errors.New("no operations found in the OpenAPI document")
}
// Get document info
@@ -296,7 +297,7 @@ func runSnipInteractive(ctx context.Context, inputFile, outputFile string) error
// Get the final model state
tuiModel, ok := finalModel.(tui.Model)
if !ok {
- return fmt.Errorf("unexpected model type")
+ return errors.New("unexpected model type")
}
// Check if user performed an action or just quit
diff --git a/cmd/openapi/commands/overlay/README.md b/cmd/openapi/commands/overlay/README.md
index ded9867..19adc34 100644
--- a/cmd/openapi/commands/overlay/README.md
+++ b/cmd/openapi/commands/overlay/README.md
@@ -11,7 +11,6 @@ OpenAPI Overlays provide a way to modify OpenAPI and Arazzo specifications witho
- [`apply`](#apply)
- [`validate`](#validate)
- [`compare`](#compare)
- - [`upgrade`](#upgrade)
- [What are OpenAPI Overlays?](#what-are-openapi-overlays)
- [Example Overlay](#example-overlay)
- [Common Use Cases](#common-use-cases)
@@ -100,40 +99,6 @@ Features:
- Creates overlay files that can recreate the transformation
- Supports both positional arguments and explicit flags
-### `upgrade`
-
-Upgrade an Overlay document to the latest supported version (1.1.0).
-
-```bash
-# Preview upgrade (output to stdout)
-openapi overlay upgrade my-overlay.yaml
-
-# Upgrade and save to new file
-openapi overlay upgrade my-overlay.yaml upgraded-overlay.yaml
-
-# Upgrade in-place
-openapi overlay upgrade -w my-overlay.yaml
-```
-
-Features:
-
-- Updates the Overlay version field from 1.0.0 to 1.1.0
-- Enables RFC 9535 JSONPath as the default implementation
-- Clears redundant `x-speakeasy-jsonpath: rfc9535` (now default in 1.1.0)
-- All existing actions remain valid and functional
-- Validates overlay before and after upgrade
-
-Version Differences:
-
-| Version | Default JSONPath | Setting |
-| ------- | ---------------- | -------------------------------------------- |
-| 1.0.0 | Legacy yamlpath | `x-speakeasy-jsonpath: rfc9535` for RFC 9535 |
-| 1.1.0+ | RFC 9535 | `x-speakeasy-jsonpath: legacy` for legacy |
-
-Options:
-
-- `-w, --write`: Write result in-place to input file
-
## What are OpenAPI Overlays?
OpenAPI Overlays are documents that describe modifications to be applied to OpenAPI specifications. They allow you to:
diff --git a/cmd/openapi/commands/overlay/root.go b/cmd/openapi/commands/overlay/root.go
index b949db0..1aecb92 100644
--- a/cmd/openapi/commands/overlay/root.go
+++ b/cmd/openapi/commands/overlay/root.go
@@ -5,6 +5,5 @@ import "github.com/spf13/cobra"
func Apply(rootCmd *cobra.Command) {
rootCmd.AddCommand(applyCmd)
rootCmd.AddCommand(compareCmd)
- rootCmd.AddCommand(upgradeCmd)
rootCmd.AddCommand(validateCmd)
}
diff --git a/cmd/openapi/commands/overlay/upgrade.go b/cmd/openapi/commands/overlay/upgrade.go
deleted file mode 100644
index b04f835..0000000
--- a/cmd/openapi/commands/overlay/upgrade.go
+++ /dev/null
@@ -1,122 +0,0 @@
-package overlay
-
-import (
- "fmt"
- "os"
-
- "github.com/speakeasy-api/openapi/overlay"
- "github.com/speakeasy-api/openapi/overlay/loader"
- "github.com/spf13/cobra"
- "gopkg.in/yaml.v3"
-)
-
-var upgradeCmd = &cobra.Command{
- Use: "upgrade [output-file]",
- Short: "Upgrade an Overlay document to the latest supported version (1.1.0)",
- Long: `Upgrade an Overlay specification document to the latest supported version (1.1.0).
-
-The upgrade process includes:
-- Updating the Overlay version field from 1.0.0 to 1.1.0
-- Enabling RFC 9535 JSONPath as the default implementation
-- Clearing redundant x-speakeasy-jsonpath: rfc9535 (now default in 1.1.0)
-- All existing actions remain valid and functional
-- Support for new 1.1.0 features like copy actions and info description
-
-Version Differences:
- 1.0.0: Legacy JSONPath by default, RFC 9535 opt-in with x-speakeasy-jsonpath: rfc9535
- 1.1.0: RFC 9535 JSONPath by default, legacy opt-out with x-speakeasy-jsonpath: legacy
-
-Output options:
- - No output file specified: writes to stdout (pipe-friendly)
- - Output file specified: writes to the specified file
- - --write flag: writes in-place to the input file`,
- Example: ` # Preview upgrade (output to stdout)
- openapi overlay upgrade my-overlay.yaml
-
- # Upgrade and save to new file
- openapi overlay upgrade my-overlay.yaml upgraded-overlay.yaml
-
- # Upgrade in-place
- openapi overlay upgrade -w my-overlay.yaml`,
- Args: cobra.RangeArgs(1, 2),
- Run: runOverlayUpgrade,
-}
-
-var overlayWriteInPlace bool
-
-func init() {
- upgradeCmd.Flags().BoolVarP(&overlayWriteInPlace, "write", "w", false,
- "write result in-place to input file")
-}
-
-func runOverlayUpgrade(cmd *cobra.Command, args []string) {
- ctx := cmd.Context()
- inputFile := args[0]
-
- var outputFile string
- if len(args) > 1 {
- outputFile = args[1]
- }
-
- // Load the overlay
- o, err := loader.LoadOverlay(inputFile)
- if err != nil {
- Dief("Failed to load overlay: %v", err)
- }
-
- // Validate the overlay before upgrade
- if err := o.Validate(); err != nil {
- Dief("Overlay validation failed: %v", err)
- }
-
- originalVersion := o.Version
-
- // Perform the upgrade
- upgraded, err := overlay.Upgrade(ctx, o)
- if err != nil {
- Dief("Failed to upgrade overlay: %v", err)
- }
-
- // Print status
- if !upgraded {
- fmt.Fprintf(os.Stderr, "No upgrade needed - overlay is already at version %s\n", originalVersion)
- } else {
- fmt.Fprintf(os.Stderr, "Successfully upgraded overlay from %s to %s\n", originalVersion, o.Version)
- }
-
- // Validate the upgraded overlay
- if err := o.Validate(); err != nil {
- Dief("Upgraded overlay failed validation: %v", err)
- }
-
- // Serialize output
- output, err := o.ToString()
- if err != nil {
- Dief("Failed to serialize overlay: %v", err)
- }
-
- // Determine output destination
- switch {
- case overlayWriteInPlace:
- if err := os.WriteFile(inputFile, []byte(output), 0644); err != nil {
- Dief("Failed to write to input file: %v", err)
- }
- fmt.Fprintf(os.Stderr, "Wrote upgraded overlay to %s\n", inputFile)
- case outputFile != "":
- if err := os.WriteFile(outputFile, []byte(output), 0644); err != nil {
- Dief("Failed to write to output file: %v", err)
- }
- fmt.Fprintf(os.Stderr, "Wrote upgraded overlay to %s\n", outputFile)
- default:
- // Write to stdout
- var node yaml.Node
- if err := yaml.Unmarshal([]byte(output), &node); err != nil {
- Dief("Failed to parse output: %v", err)
- }
- encoder := yaml.NewEncoder(os.Stdout)
- encoder.SetIndent(2)
- if err := encoder.Encode(&node); err != nil {
- Dief("Failed to write to stdout: %v", err)
- }
- }
-}
diff --git a/cmd/openapi/internal/explore/tui/input.go b/cmd/openapi/internal/explore/tui/input.go
index 990af4d..a8a07a7 100644
--- a/cmd/openapi/internal/explore/tui/input.go
+++ b/cmd/openapi/internal/explore/tui/input.go
@@ -1,6 +1,7 @@
package tui
import (
+ "errors"
"fmt"
"strings"
@@ -111,7 +112,7 @@ func PromptForFilePath(prompt, defaultValue string) (string, error) {
inputModel, ok := finalModel.(InputModel)
if !ok {
- return "", fmt.Errorf("unexpected model type")
+ return "", errors.New("unexpected model type")
}
if inputModel.IsCancelled() {
diff --git a/cmd/update-lint-docs/main.go b/cmd/update-lint-docs/main.go
new file mode 100644
index 0000000..a8ebfa5
--- /dev/null
+++ b/cmd/update-lint-docs/main.go
@@ -0,0 +1,180 @@
+package main
+
+import (
+ "fmt"
+ "os"
+ "path/filepath"
+ "regexp"
+ "sort"
+ "strings"
+
+ "github.com/speakeasy-api/openapi/linter"
+ "github.com/speakeasy-api/openapi/openapi"
+ openapiLinter "github.com/speakeasy-api/openapi/openapi/linter"
+)
+
+func main() {
+ if err := updateLintDocs(); err != nil {
+ fmt.Fprintf(os.Stderr, "Error: %v\n", err)
+ os.Exit(1)
+ }
+}
+
+func updateLintDocs() error {
+ fmt.Println("🔄 Updating lint rules in README files...")
+
+ if err := updateOpenAPILintDocs(); err != nil {
+ return fmt.Errorf("failed to update OpenAPI lint docs: %w", err)
+ }
+
+ if err := updateRuleLinks(); err != nil {
+ return fmt.Errorf("failed to update rule links: %w", err)
+ }
+
+ fmt.Println("🎉 Lint docs updated successfully!")
+ return nil
+}
+
+func updateOpenAPILintDocs() error {
+ readmeFile := "openapi/linter/README.md"
+
+ // Check if README exists
+ if _, err := os.Stat(readmeFile); os.IsNotExist(err) {
+ fmt.Printf("⚠️ No README file found: %s\n", readmeFile)
+ return nil
+ }
+
+ // Create linter to get the registry
+ config := linter.NewConfig()
+ lint := openapiLinter.NewLinter(config)
+ docGen := linter.NewDocGenerator(lint.Registry())
+
+ // Generate rules table
+ content := generateRulesTable(docGen)
+
+ // Update README file
+ if err := updateReadmeFile(readmeFile, content); err != nil {
+ return fmt.Errorf("failed to update README: %w", err)
+ }
+
+ fmt.Printf("✅ Updated %s\n", readmeFile)
+ return nil
+}
+
+func generateRulesTable(docGen *linter.DocGenerator[*openapi.OpenAPI]) string {
+ docs := docGen.GenerateAllRuleDocs()
+
+ // Sort rules alphabetically by ID
+ sort.Slice(docs, func(i, j int) bool {
+ return docs[i].ID < docs[j].ID
+ })
+
+ var content strings.Builder
+ content.WriteString("| Rule | Severity | Description |\n")
+ content.WriteString("|------|----------|-------------|\n")
+
+ for _, doc := range docs {
+ // Escape pipe characters in description
+ desc := strings.ReplaceAll(doc.Description, "|", "\\|")
+ // Replace newlines with spaces
+ desc = strings.ReplaceAll(desc, "\n", " ")
+ content.WriteString(fmt.Sprintf("| `%s` | %s | %s |\n", doc.ID, doc.ID, doc.DefaultSeverity, desc))
+ }
+
+ return content.String()
+}
+
+func updateReadmeFile(filename, newContent string) error {
+ // Read the current README
+ data, err := os.ReadFile(filename) //nolint:gosec
+ if err != nil {
+ return err
+ }
+
+ content := string(data)
+
+ // Find the start and end markers
+ startMarker := ""
+ endMarker := ""
+
+ startIdx := strings.Index(content, startMarker)
+ endIdx := strings.Index(content, endMarker)
+
+ if startIdx == -1 || endIdx == -1 {
+ return fmt.Errorf("could not find lint rules markers in %s", filename)
+ }
+
+ // Replace the content between markers
+ before := content[:startIdx+len(startMarker)]
+ after := content[endIdx:]
+
+ newFileContent := before + "\n\n" + newContent + "\n" + after
+
+ // Write the updated content
+ return os.WriteFile(filename, []byte(newFileContent), 0600)
+}
+
+func updateRuleLinks() error {
+ const baseURL = "https://github.com/speakeasy-api/openapi/blob/main/openapi/linter/README.md"
+ rulesDir := "openapi/linter/rules"
+
+ // Get all rule files
+ entries, err := os.ReadDir(rulesDir)
+ if err != nil {
+ return fmt.Errorf("failed to read rules directory: %w", err)
+ }
+
+ // Pattern to match Link() method - captures receiver and return value
+ linkPattern := regexp.MustCompile(`func (\([^)]+\)) Link\(\) string \{\s*return "[^"]*"\s*\}`)
+
+ updatedCount := 0
+ for _, entry := range entries {
+ if entry.IsDir() || !strings.HasSuffix(entry.Name(), ".go") || strings.HasSuffix(entry.Name(), "_test.go") {
+ continue
+ }
+
+ filePath := filepath.Join(rulesDir, entry.Name())
+
+ // Read the file
+ data, err := os.ReadFile(filePath) //nolint:gosec
+ if err != nil {
+ return fmt.Errorf("failed to read %s: %w", filePath, err)
+ }
+
+ content := string(data)
+
+ // Find the rule ID constant
+ ruleIDPattern := regexp.MustCompile(`const (Rule\w+) = "([^"]+)"`)
+ matches := ruleIDPattern.FindStringSubmatch(content)
+ if len(matches) < 3 {
+ continue // Skip if no rule ID found
+ }
+ ruleID := matches[2]
+
+ // Create the new link
+ newLink := fmt.Sprintf("%s#%s", baseURL, ruleID)
+
+ // Replace the Link() method, preserving the receiver
+ newContent := linkPattern.ReplaceAllStringFunc(content, func(match string) string {
+ receiverMatch := regexp.MustCompile(`func (\([^)]+\))`).FindStringSubmatch(match)
+ if len(receiverMatch) > 1 {
+ return fmt.Sprintf(`func %s Link() string {
+ return "%s"
+}`, receiverMatch[1], newLink)
+ }
+ return match
+ })
+
+ // Only write if content changed
+ if newContent != content {
+ if err := os.WriteFile(filePath, []byte(newContent), 0600); err != nil {
+ return fmt.Errorf("failed to write %s: %w", filePath, err)
+ }
+ updatedCount++
+ fmt.Printf("✅ Updated link in %s\n", filePath)
+ }
+ }
+
+ fmt.Printf("✅ Updated links in %d rule files\n", updatedCount)
+ return nil
+}
diff --git a/jsonschema/oas3/core/discriminator_test.go b/jsonschema/oas3/core/discriminator_test.go
new file mode 100644
index 0000000..322032f
--- /dev/null
+++ b/jsonschema/oas3/core/discriminator_test.go
@@ -0,0 +1,325 @@
+package core
+
+import (
+ "testing"
+
+ "github.com/speakeasy-api/openapi/marshaller"
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestDiscriminator_Unmarshal_AllFields_Success(t *testing.T) {
+ t.Parallel()
+
+ tests := []struct {
+ name string
+ yaml string
+ }{
+ {
+ name: "all fields populated",
+ yaml: `
+propertyName: petType
+mapping:
+ dog: "#/components/schemas/Dog"
+ cat: "#/components/schemas/Cat"
+defaultMapping: "#/components/schemas/Pet"
+x-custom: value
+`,
+ },
+ {
+ name: "only required propertyName field",
+ yaml: `
+propertyName: type
+`,
+ },
+ {
+ name: "propertyName with mapping",
+ yaml: `
+propertyName: objectType
+mapping:
+ typeA: "#/components/schemas/TypeA"
+ typeB: "#/components/schemas/TypeB"
+`,
+ },
+ {
+ name: "propertyName with defaultMapping",
+ yaml: `
+propertyName: kind
+defaultMapping: "#/components/schemas/DefaultType"
+`,
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ var target Discriminator
+ validationErrs, err := marshaller.UnmarshalCore(ctx, "", parseYAML(t, tt.yaml), &target)
+
+ require.NoError(t, err, "unmarshal should succeed")
+ require.Empty(t, validationErrs, "should not have validation errors")
+ assert.NotNil(t, target, "Discriminator should not be nil")
+ })
+ }
+}
+
+func TestDiscriminator_Unmarshal_PropertyNameField_Success(t *testing.T) {
+ t.Parallel()
+
+ tests := []struct {
+ name string
+ yaml string
+ expectedPropertyName string
+ }{
+ {
+ name: "simple property name",
+ yaml: `propertyName: type`,
+ expectedPropertyName: "type",
+ },
+ {
+ name: "camelCase property name",
+ yaml: `propertyName: petType`,
+ expectedPropertyName: "petType",
+ },
+ {
+ name: "snake_case property name",
+ yaml: `propertyName: pet_type`,
+ expectedPropertyName: "pet_type",
+ },
+ {
+ name: "kebab-case property name",
+ yaml: `propertyName: pet-type`,
+ expectedPropertyName: "pet-type",
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ var target Discriminator
+ validationErrs, err := marshaller.UnmarshalCore(ctx, "", parseYAML(t, tt.yaml), &target)
+
+ require.NoError(t, err, "unmarshal should succeed")
+ require.Empty(t, validationErrs, "should not have validation errors")
+ assert.Equal(t, tt.expectedPropertyName, target.PropertyName.Value, "should parse propertyName correctly")
+ })
+ }
+}
+
+func TestDiscriminator_Unmarshal_MappingField_Success(t *testing.T) {
+ t.Parallel()
+
+ tests := []struct {
+ name string
+ yaml string
+ key string
+ expectedRef string
+ expectedSize int
+ }{
+ {
+ name: "single mapping entry",
+ yaml: `
+propertyName: type
+mapping:
+ dog: "#/components/schemas/Dog"
+`,
+ key: "dog",
+ expectedRef: "#/components/schemas/Dog",
+ expectedSize: 1,
+ },
+ {
+ name: "multiple mapping entries",
+ yaml: `
+propertyName: type
+mapping:
+ dog: "#/components/schemas/Dog"
+ cat: "#/components/schemas/Cat"
+ bird: "#/components/schemas/Bird"
+`,
+ key: "cat",
+ expectedRef: "#/components/schemas/Cat",
+ expectedSize: 3,
+ },
+ {
+ name: "mapping with external refs",
+ yaml: `
+propertyName: type
+mapping:
+ local: "#/components/schemas/Local"
+ external: "https://example.com/schemas/External"
+`,
+ key: "external",
+ expectedRef: "https://example.com/schemas/External",
+ expectedSize: 2,
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ var target Discriminator
+ validationErrs, err := marshaller.UnmarshalCore(ctx, "", parseYAML(t, tt.yaml), &target)
+
+ require.NoError(t, err, "unmarshal should succeed")
+ require.Empty(t, validationErrs, "should not have validation errors")
+ require.NotNil(t, target.Mapping.Value, "mapping should be set")
+ assert.Equal(t, tt.expectedSize, target.Mapping.Value.Len(), "should have correct number of mappings")
+
+ value, found := target.Mapping.Value.Get(tt.key)
+ require.True(t, found, "should find mapping key")
+ assert.Equal(t, tt.expectedRef, value.Value, "should parse mapping value correctly")
+ })
+ }
+}
+
+func TestDiscriminator_Unmarshal_DefaultMappingField_Success(t *testing.T) {
+ t.Parallel()
+
+ tests := []struct {
+ name string
+ yaml string
+ expectedDefaultMapping string
+ }{
+ {
+ name: "defaultMapping with component ref",
+ yaml: `
+propertyName: type
+defaultMapping: "#/components/schemas/Default"
+`,
+ expectedDefaultMapping: "#/components/schemas/Default",
+ },
+ {
+ name: "defaultMapping with external ref",
+ yaml: `
+propertyName: type
+defaultMapping: "https://example.com/schemas/Default"
+`,
+ expectedDefaultMapping: "https://example.com/schemas/Default",
+ },
+ {
+ name: "defaultMapping with path ref",
+ yaml: `
+propertyName: type
+defaultMapping: "#/definitions/Default"
+`,
+ expectedDefaultMapping: "#/definitions/Default",
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ var target Discriminator
+ validationErrs, err := marshaller.UnmarshalCore(ctx, "", parseYAML(t, tt.yaml), &target)
+
+ require.NoError(t, err, "unmarshal should succeed")
+ require.Empty(t, validationErrs, "should not have validation errors")
+ require.NotNil(t, target.DefaultMapping.Value, "defaultMapping should be set")
+ assert.Equal(t, tt.expectedDefaultMapping, *target.DefaultMapping.Value, "should parse defaultMapping correctly")
+ })
+ }
+}
+
+func TestDiscriminator_Unmarshal_Extensions_Success(t *testing.T) {
+ t.Parallel()
+
+ tests := []struct {
+ name string
+ yaml string
+ extensionKey string
+ expectedValue string
+ }{
+ {
+ name: "single extension",
+ yaml: `
+propertyName: type
+x-custom: value
+`,
+ extensionKey: "x-custom",
+ expectedValue: "value",
+ },
+ {
+ name: "multiple extensions",
+ yaml: `
+propertyName: type
+x-first: value1
+x-second: value2
+`,
+ extensionKey: "x-first",
+ expectedValue: "value1",
+ },
+ {
+ name: "extension with all fields",
+ yaml: `
+propertyName: type
+mapping:
+ dog: "#/components/schemas/Dog"
+defaultMapping: "#/components/schemas/Pet"
+x-vendor: custom-value
+`,
+ extensionKey: "x-vendor",
+ expectedValue: "custom-value",
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ var target Discriminator
+ validationErrs, err := marshaller.UnmarshalCore(ctx, "", parseYAML(t, tt.yaml), &target)
+
+ require.NoError(t, err, "unmarshal should succeed")
+ require.Empty(t, validationErrs, "should not have validation errors")
+ require.NotNil(t, target.Extensions, "extensions should be set")
+
+ ext, found := target.Extensions.Get(tt.extensionKey)
+ require.True(t, found, "should find extension")
+ assert.Equal(t, tt.expectedValue, ext.Value.Value, "should parse extension value correctly")
+ })
+ }
+}
+
+func TestDiscriminator_Unmarshal_MinimalObject_Success(t *testing.T) {
+ t.Parallel()
+
+ ctx := t.Context()
+ yaml := `propertyName: type`
+
+ var target Discriminator
+ validationErrs, err := marshaller.UnmarshalCore(ctx, "", parseYAML(t, yaml), &target)
+
+ require.NoError(t, err, "unmarshal should succeed")
+ require.Empty(t, validationErrs, "should not have validation errors")
+ assert.Equal(t, "type", target.PropertyName.Value, "should parse propertyName")
+ assert.Nil(t, target.Mapping.Value, "mapping should be nil")
+ assert.Nil(t, target.DefaultMapping.Value, "defaultMapping should be nil")
+}
+
+func TestDiscriminator_Unmarshal_EmptyMapping_Success(t *testing.T) {
+ t.Parallel()
+
+ ctx := t.Context()
+ yaml := `
+propertyName: type
+mapping: {}
+`
+
+ var target Discriminator
+ validationErrs, err := marshaller.UnmarshalCore(ctx, "", parseYAML(t, yaml), &target)
+
+ require.NoError(t, err, "unmarshal should succeed")
+ require.Empty(t, validationErrs, "should not have validation errors")
+ assert.Equal(t, "type", target.PropertyName.Value, "should parse propertyName")
+ require.NotNil(t, target.Mapping.Value, "mapping should not be nil")
+ assert.Equal(t, 0, target.Mapping.Value.Len(), "mapping should be empty")
+}
diff --git a/jsonschema/oas3/core/externaldoc_test.go b/jsonschema/oas3/core/externaldoc_test.go
new file mode 100644
index 0000000..ea4afea
--- /dev/null
+++ b/jsonschema/oas3/core/externaldoc_test.go
@@ -0,0 +1,228 @@
+package core
+
+import (
+ "testing"
+
+ "github.com/speakeasy-api/openapi/marshaller"
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestExternalDocumentation_Unmarshal_AllFields_Success(t *testing.T) {
+ t.Parallel()
+
+ tests := []struct {
+ name string
+ yaml string
+ }{
+ {
+ name: "all fields populated",
+ yaml: `
+url: https://example.com/docs
+description: Additional documentation
+x-custom: value
+`,
+ },
+ {
+ name: "only required url field",
+ yaml: `
+url: https://example.com
+`,
+ },
+ {
+ name: "url with description",
+ yaml: `
+url: https://api.example.com/reference
+description: API Reference Documentation
+`,
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ var target ExternalDocumentation
+ validationErrs, err := marshaller.UnmarshalCore(ctx, "", parseYAML(t, tt.yaml), &target)
+
+ require.NoError(t, err, "unmarshal should succeed")
+ require.Empty(t, validationErrs, "should not have validation errors")
+ assert.NotNil(t, target, "ExternalDocumentation should not be nil")
+ })
+ }
+}
+
+func TestExternalDocumentation_Unmarshal_URLField_Success(t *testing.T) {
+ t.Parallel()
+
+ tests := []struct {
+ name string
+ yaml string
+ expectedURL string
+ }{
+ {
+ name: "https url",
+ yaml: `url: https://example.com/docs`,
+ expectedURL: "https://example.com/docs",
+ },
+ {
+ name: "http url",
+ yaml: `url: http://example.com/docs`,
+ expectedURL: "http://example.com/docs",
+ },
+ {
+ name: "url with path",
+ yaml: `url: https://api.example.com/v1/reference`,
+ expectedURL: "https://api.example.com/v1/reference",
+ },
+ {
+ name: "url with query params",
+ yaml: `url: https://example.com/docs?version=2.0`,
+ expectedURL: "https://example.com/docs?version=2.0",
+ },
+ {
+ name: "url with fragment",
+ yaml: `url: https://example.com/docs#section`,
+ expectedURL: "https://example.com/docs#section",
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ var target ExternalDocumentation
+ validationErrs, err := marshaller.UnmarshalCore(ctx, "", parseYAML(t, tt.yaml), &target)
+
+ require.NoError(t, err, "unmarshal should succeed")
+ require.Empty(t, validationErrs, "should not have validation errors")
+ assert.Equal(t, tt.expectedURL, target.URL.Value, "should parse url correctly")
+ })
+ }
+}
+
+func TestExternalDocumentation_Unmarshal_DescriptionField_Success(t *testing.T) {
+ t.Parallel()
+
+ tests := []struct {
+ name string
+ yaml string
+ expectedDescription string
+ }{
+ {
+ name: "simple description",
+ yaml: `
+url: https://example.com
+description: Documentation
+`,
+ expectedDescription: "Documentation",
+ },
+ {
+ name: "multi-word description",
+ yaml: `
+url: https://example.com
+description: Complete API documentation and reference guide
+`,
+ expectedDescription: "Complete API documentation and reference guide",
+ },
+ {
+ name: "description with special chars",
+ yaml: `
+url: https://example.com
+description: "Documentation: API & SDK Guide"
+`,
+ expectedDescription: "Documentation: API & SDK Guide",
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ var target ExternalDocumentation
+ validationErrs, err := marshaller.UnmarshalCore(ctx, "", parseYAML(t, tt.yaml), &target)
+
+ require.NoError(t, err, "unmarshal should succeed")
+ require.Empty(t, validationErrs, "should not have validation errors")
+ require.NotNil(t, target.Description.Value, "description should be set")
+ assert.Equal(t, tt.expectedDescription, *target.Description.Value, "should parse description correctly")
+ })
+ }
+}
+
+func TestExternalDocumentation_Unmarshal_Extensions_Success(t *testing.T) {
+ t.Parallel()
+
+ tests := []struct {
+ name string
+ yaml string
+ extensionKey string
+ expectedValue string
+ }{
+ {
+ name: "single extension",
+ yaml: `
+url: https://example.com
+x-custom: value
+`,
+ extensionKey: "x-custom",
+ expectedValue: "value",
+ },
+ {
+ name: "multiple extensions",
+ yaml: `
+url: https://example.com
+x-first: value1
+x-second: value2
+`,
+ extensionKey: "x-first",
+ expectedValue: "value1",
+ },
+ {
+ name: "extension with url and description",
+ yaml: `
+url: https://example.com/docs
+description: API docs
+x-vendor: custom-value
+`,
+ extensionKey: "x-vendor",
+ expectedValue: "custom-value",
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ var target ExternalDocumentation
+ validationErrs, err := marshaller.UnmarshalCore(ctx, "", parseYAML(t, tt.yaml), &target)
+
+ require.NoError(t, err, "unmarshal should succeed")
+ require.Empty(t, validationErrs, "should not have validation errors")
+ require.NotNil(t, target.Extensions, "extensions should be set")
+
+ ext, found := target.Extensions.Get(tt.extensionKey)
+ require.True(t, found, "should find extension")
+ assert.Equal(t, tt.expectedValue, ext.Value.Value, "should parse extension value correctly")
+ })
+ }
+}
+
+func TestExternalDocumentation_Unmarshal_MinimalObject_Success(t *testing.T) {
+ t.Parallel()
+
+ ctx := t.Context()
+ yaml := `url: https://example.com`
+
+ var target ExternalDocumentation
+ validationErrs, err := marshaller.UnmarshalCore(ctx, "", parseYAML(t, yaml), &target)
+
+ require.NoError(t, err, "unmarshal should succeed")
+ require.Empty(t, validationErrs, "should not have validation errors")
+ assert.Equal(t, "https://example.com", target.URL.Value, "should parse url")
+ assert.Nil(t, target.Description.Value, "description should be nil")
+}
diff --git a/jsonschema/oas3/core/factory_registration.go b/jsonschema/oas3/core/factory_registration.go
index 6f710f6..8b97ffc 100644
--- a/jsonschema/oas3/core/factory_registration.go
+++ b/jsonschema/oas3/core/factory_registration.go
@@ -9,7 +9,6 @@ import (
// init registers all JSON Schema core types with the marshaller factory system
func init() {
// Register all JSON Schema core types
- marshaller.RegisterType(func() *Schema { return &Schema{} })
marshaller.RegisterType(func() *Discriminator { return &Discriminator{} })
marshaller.RegisterType(func() *ExternalDocumentation { return &ExternalDocumentation{} })
marshaller.RegisterType(func() *XML { return &XML{} })
@@ -20,13 +19,8 @@ func init() {
return &core.EitherValue[[]marshaller.Node[string], string]{}
})
- // Register Node-wrapped EitherValue for additionalProperties
- marshaller.RegisterType(func() *marshaller.Node[*core.EitherValue[Schema, bool]] {
- return &marshaller.Node[*core.EitherValue[Schema, bool]]{}
- })
-
- // Register sequencedmap for additionalProperties (used in properties field)
- marshaller.RegisterType(func() *sequencedmap.Map[string, marshaller.Node[*core.EitherValue[Schema, bool]]] {
- return &sequencedmap.Map[string, marshaller.Node[*core.EitherValue[Schema, bool]]]{}
+ // Register sequencedmap for properties and similar fields
+ marshaller.RegisterType(func() *sequencedmap.Map[string, *core.EitherValue[Schema, bool]] {
+ return &sequencedmap.Map[string, *core.EitherValue[Schema, bool]]{}
})
}
diff --git a/jsonschema/oas3/core/jsonschema_test.go b/jsonschema/oas3/core/jsonschema_test.go
index d5a944b..297fe11 100644
--- a/jsonschema/oas3/core/jsonschema_test.go
+++ b/jsonschema/oas3/core/jsonschema_test.go
@@ -70,3 +70,222 @@ minLength: 1
assert.True(t, target.Left.Value.Type.Value.IsRight, "Type should be Right type (string)")
assert.Equal(t, "string", target.Left.Value.Type.Value.Right.Value, "Type should be 'string'")
}
+
+func TestJSONSchema_Unmarshal_TypeArray_Success(t *testing.T) {
+ t.Parallel()
+
+ ctx := t.Context()
+
+ // YAML with type as array (tests EitherValue[[]marshaller.Node[string], string])
+ testYaml := `
+type: [string, number]
+`
+ var node yaml.Node
+ err := yaml.Unmarshal([]byte(testYaml), &node)
+ require.NoError(t, err)
+
+ var target JSONSchema
+ validationErrs, err := marshaller.UnmarshalCore(ctx, "", node.Content[0], &target)
+
+ require.NoError(t, err, "Should not have syntax errors")
+ require.Empty(t, validationErrs, "Should not have validation errors")
+ require.NotNil(t, target, "JSONSchema should not be nil")
+ assert.True(t, target.IsLeft, "JSONSchema should be Left type (Schema)")
+
+ // Verify type array was unmarshaled
+ require.NotNil(t, target.Left.Value.Type.Value, "Type should be set")
+ assert.True(t, target.Left.Value.Type.Value.IsLeft, "Type should be Left type (array)")
+ assert.Len(t, target.Left.Value.Type.Value.Left.Value, 2, "Should have 2 types")
+}
+
+func TestJSONSchema_Unmarshal_PropertiesWithAdditionalProperties_Success(t *testing.T) {
+ t.Parallel()
+
+ ctx := t.Context()
+
+ // YAML with properties and additionalProperties (tests sequencedmap and nested schemas)
+ testYaml := `
+type: object
+properties:
+ name:
+ type: string
+ age:
+ type: integer
+additionalProperties:
+ type: string
+`
+ var node yaml.Node
+ err := yaml.Unmarshal([]byte(testYaml), &node)
+ require.NoError(t, err)
+
+ var target JSONSchema
+ validationErrs, err := marshaller.UnmarshalCore(ctx, "", node.Content[0], &target)
+
+ require.NoError(t, err, "Should not have syntax errors")
+ require.Empty(t, validationErrs, "Should not have validation errors")
+ require.NotNil(t, target, "JSONSchema should not be nil")
+
+ // Verify properties map
+ require.NotNil(t, target.Left.Value.Properties.Value, "Properties should be set")
+ assert.Equal(t, 2, target.Left.Value.Properties.Value.Len(), "Should have 2 properties")
+
+ // Verify additionalProperties schema
+ require.NotNil(t, target.Left.Value.AdditionalProperties.Value, "AdditionalProperties should be set")
+ assert.True(t, target.Left.Value.AdditionalProperties.Value.IsLeft, "AdditionalProperties should be schema")
+}
+
+func TestJSONSchema_Unmarshal_WithDiscriminator_Success(t *testing.T) {
+ t.Parallel()
+
+ ctx := t.Context()
+
+ // YAML with discriminator (tests Discriminator type registration)
+ testYaml := `
+type: object
+discriminator:
+ propertyName: petType
+ mapping:
+ dog: "#/components/schemas/Dog"
+ cat: "#/components/schemas/Cat"
+`
+ var node yaml.Node
+ err := yaml.Unmarshal([]byte(testYaml), &node)
+ require.NoError(t, err)
+
+ var target JSONSchema
+ validationErrs, err := marshaller.UnmarshalCore(ctx, "", node.Content[0], &target)
+
+ require.NoError(t, err, "Should not have syntax errors")
+ require.Empty(t, validationErrs, "Should not have validation errors")
+ require.NotNil(t, target, "JSONSchema should not be nil")
+ assert.True(t, target.IsLeft, "JSONSchema should be Left type (Schema)")
+
+ // Verify discriminator was unmarshaled
+ require.NotNil(t, target.Left.Value.Discriminator.Value, "Discriminator should be set")
+ assert.Equal(t, "petType", target.Left.Value.Discriminator.Value.PropertyName.Value, "Should parse propertyName")
+ require.NotNil(t, target.Left.Value.Discriminator.Value.Mapping.Value, "Mapping should be set")
+ assert.Equal(t, 2, target.Left.Value.Discriminator.Value.Mapping.Value.Len(), "Should have 2 mappings")
+}
+
+func TestJSONSchema_Unmarshal_WithExternalDocs_Success(t *testing.T) {
+ t.Parallel()
+
+ ctx := t.Context()
+
+ // YAML with externalDocs (tests ExternalDocumentation type registration)
+ testYaml := `
+type: string
+description: A user identifier
+externalDocs:
+ url: https://example.com/docs/user-id
+ description: User ID documentation
+`
+ var node yaml.Node
+ err := yaml.Unmarshal([]byte(testYaml), &node)
+ require.NoError(t, err)
+
+ var target JSONSchema
+ validationErrs, err := marshaller.UnmarshalCore(ctx, "", node.Content[0], &target)
+
+ require.NoError(t, err, "Should not have syntax errors")
+ require.Empty(t, validationErrs, "Should not have validation errors")
+ require.NotNil(t, target, "JSONSchema should not be nil")
+ assert.True(t, target.IsLeft, "JSONSchema should be Left type (Schema)")
+
+ // Verify externalDocs was unmarshaled
+ require.NotNil(t, target.Left.Value.ExternalDocs.Value, "ExternalDocs should be set")
+ assert.Equal(t, "https://example.com/docs/user-id", target.Left.Value.ExternalDocs.Value.URL.Value, "Should parse URL")
+ require.NotNil(t, target.Left.Value.ExternalDocs.Value.Description.Value, "Description should be set")
+ assert.Equal(t, "User ID documentation", *target.Left.Value.ExternalDocs.Value.Description.Value, "Should parse description")
+}
+
+func TestJSONSchema_Unmarshal_WithXML_Success(t *testing.T) {
+ t.Parallel()
+
+ ctx := t.Context()
+
+ // YAML with xml (tests XML type registration)
+ testYaml := `
+type: object
+xml:
+ name: Person
+ namespace: http://example.com/schema
+ prefix: per
+ wrapped: true
+`
+ var node yaml.Node
+ err := yaml.Unmarshal([]byte(testYaml), &node)
+ require.NoError(t, err)
+
+ var target JSONSchema
+ validationErrs, err := marshaller.UnmarshalCore(ctx, "", node.Content[0], &target)
+
+ require.NoError(t, err, "Should not have syntax errors")
+ require.Empty(t, validationErrs, "Should not have validation errors")
+ require.NotNil(t, target, "JSONSchema should not be nil")
+ assert.True(t, target.IsLeft, "JSONSchema should be Left type (Schema)")
+
+ // Verify xml was unmarshaled
+ require.NotNil(t, target.Left.Value.XML.Value, "XML should be set")
+ require.NotNil(t, target.Left.Value.XML.Value.Name.Value, "Name should be set")
+ assert.Equal(t, "Person", *target.Left.Value.XML.Value.Name.Value, "Should parse name")
+ require.NotNil(t, target.Left.Value.XML.Value.Namespace.Value, "Namespace should be set")
+ assert.Equal(t, "http://example.com/schema", *target.Left.Value.XML.Value.Namespace.Value, "Should parse namespace")
+ require.NotNil(t, target.Left.Value.XML.Value.Prefix.Value, "Prefix should be set")
+ assert.Equal(t, "per", *target.Left.Value.XML.Value.Prefix.Value, "Should parse prefix")
+ require.NotNil(t, target.Left.Value.XML.Value.Wrapped.Value, "Wrapped should be set")
+ assert.True(t, *target.Left.Value.XML.Value.Wrapped.Value, "Should parse wrapped as true")
+}
+
+func TestJSONSchema_Unmarshal_ComplexSchema_Success(t *testing.T) {
+ t.Parallel()
+
+ ctx := t.Context()
+
+ // YAML with multiple nested features to test all registrations together
+ testYaml := `
+type: object
+properties:
+ id:
+ type: string
+ xml:
+ attribute: true
+ name:
+ type: string
+discriminator:
+ propertyName: type
+externalDocs:
+ url: https://example.com/docs
+`
+ var node yaml.Node
+ err := yaml.Unmarshal([]byte(testYaml), &node)
+ require.NoError(t, err)
+
+ var target JSONSchema
+ validationErrs, err := marshaller.UnmarshalCore(ctx, "", node.Content[0], &target)
+
+ require.NoError(t, err, "Should not have syntax errors")
+ require.Empty(t, validationErrs, "Should not have validation errors")
+ require.NotNil(t, target, "JSONSchema should not be nil")
+ assert.True(t, target.IsLeft, "JSONSchema should be Left type (Schema)")
+
+ // Verify properties
+ require.NotNil(t, target.Left.Value.Properties.Value, "Properties should be set")
+ assert.Equal(t, 2, target.Left.Value.Properties.Value.Len(), "Should have 2 properties")
+
+ // Verify id property has xml
+ idProp, found := target.Left.Value.Properties.Value.Get("id")
+ require.True(t, found, "Should find id property")
+ require.NotNil(t, idProp, "id property should not be nil")
+ require.NotNil(t, idProp.Left.Value.XML.Value, "id should have XML")
+ require.NotNil(t, idProp.Left.Value.XML.Value.Attribute.Value, "XML attribute should be set")
+ assert.True(t, *idProp.Left.Value.XML.Value.Attribute.Value, "XML attribute should be true")
+
+ // Verify discriminator
+ require.NotNil(t, target.Left.Value.Discriminator.Value, "Discriminator should be set")
+ assert.Equal(t, "type", target.Left.Value.Discriminator.Value.PropertyName.Value, "Should parse discriminator propertyName")
+
+ // Verify externalDocs
+ require.NotNil(t, target.Left.Value.ExternalDocs.Value, "ExternalDocs should be set")
+ assert.Equal(t, "https://example.com/docs", target.Left.Value.ExternalDocs.Value.URL.Value, "Should parse externalDocs URL")
+}
diff --git a/jsonschema/oas3/core/xml_test.go b/jsonschema/oas3/core/xml_test.go
new file mode 100644
index 0000000..493a8b8
--- /dev/null
+++ b/jsonschema/oas3/core/xml_test.go
@@ -0,0 +1,333 @@
+package core
+
+import (
+ "testing"
+
+ "github.com/speakeasy-api/openapi/marshaller"
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+ "gopkg.in/yaml.v3"
+)
+
+func parseYAML(t *testing.T, yml string) *yaml.Node {
+ t.Helper()
+ var node yaml.Node
+ err := yaml.Unmarshal([]byte(yml), &node)
+ require.NoError(t, err)
+ return node.Content[0]
+}
+
+func TestXML_Unmarshal_AllFields_Success(t *testing.T) {
+ t.Parallel()
+
+ tests := []struct {
+ name string
+ yaml string
+ }{
+ {
+ name: "all fields populated",
+ yaml: `
+name: Person
+namespace: http://example.com/schema/Person
+prefix: per
+attribute: true
+wrapped: false
+x-custom: value
+`,
+ },
+ {
+ name: "only required fields",
+ yaml: `
+name: Item
+`,
+ },
+ {
+ name: "namespace and prefix",
+ yaml: `
+namespace: http://example.com/ns
+prefix: ex
+`,
+ },
+ {
+ name: "boolean flags",
+ yaml: `
+attribute: true
+wrapped: true
+`,
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ var target XML
+ validationErrs, err := marshaller.UnmarshalCore(ctx, "", parseYAML(t, tt.yaml), &target)
+
+ require.NoError(t, err, "unmarshal should succeed")
+ require.Empty(t, validationErrs, "should not have validation errors")
+ assert.NotNil(t, target, "XML should not be nil")
+ })
+ }
+}
+
+func TestXML_Unmarshal_NameField_Success(t *testing.T) {
+ t.Parallel()
+
+ tests := []struct {
+ name string
+ yaml string
+ expectedName string
+ }{
+ {
+ name: "simple name",
+ yaml: `name: Person`,
+ expectedName: "Person",
+ },
+ {
+ name: "camelCase name",
+ yaml: `name: personDetails`,
+ expectedName: "personDetails",
+ },
+ {
+ name: "PascalCase name",
+ yaml: `name: PersonDetails`,
+ expectedName: "PersonDetails",
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ var target XML
+ validationErrs, err := marshaller.UnmarshalCore(ctx, "", parseYAML(t, tt.yaml), &target)
+
+ require.NoError(t, err, "unmarshal should succeed")
+ require.Empty(t, validationErrs, "should not have validation errors")
+ require.NotNil(t, target.Name.Value, "name should be set")
+ assert.Equal(t, tt.expectedName, *target.Name.Value, "should parse name correctly")
+ })
+ }
+}
+
+func TestXML_Unmarshal_NamespaceField_Success(t *testing.T) {
+ t.Parallel()
+
+ tests := []struct {
+ name string
+ yaml string
+ expectedNamespace string
+ }{
+ {
+ name: "http namespace",
+ yaml: `namespace: http://example.com/schema`,
+ expectedNamespace: "http://example.com/schema",
+ },
+ {
+ name: "https namespace",
+ yaml: `namespace: https://example.com/api/v1`,
+ expectedNamespace: "https://example.com/api/v1",
+ },
+ {
+ name: "urn namespace",
+ yaml: `namespace: urn:example:schema`,
+ expectedNamespace: "urn:example:schema",
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ var target XML
+ validationErrs, err := marshaller.UnmarshalCore(ctx, "", parseYAML(t, tt.yaml), &target)
+
+ require.NoError(t, err, "unmarshal should succeed")
+ require.Empty(t, validationErrs, "should not have validation errors")
+ require.NotNil(t, target.Namespace.Value, "namespace should be set")
+ assert.Equal(t, tt.expectedNamespace, *target.Namespace.Value, "should parse namespace correctly")
+ })
+ }
+}
+
+func TestXML_Unmarshal_PrefixField_Success(t *testing.T) {
+ t.Parallel()
+
+ tests := []struct {
+ name string
+ yaml string
+ expectedPrefix string
+ }{
+ {
+ name: "short prefix",
+ yaml: `prefix: ex`,
+ expectedPrefix: "ex",
+ },
+ {
+ name: "longer prefix",
+ yaml: `prefix: example`,
+ expectedPrefix: "example",
+ },
+ {
+ name: "single char prefix",
+ yaml: `prefix: x`,
+ expectedPrefix: "x",
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ var target XML
+ validationErrs, err := marshaller.UnmarshalCore(ctx, "", parseYAML(t, tt.yaml), &target)
+
+ require.NoError(t, err, "unmarshal should succeed")
+ require.Empty(t, validationErrs, "should not have validation errors")
+ require.NotNil(t, target.Prefix.Value, "prefix should be set")
+ assert.Equal(t, tt.expectedPrefix, *target.Prefix.Value, "should parse prefix correctly")
+ })
+ }
+}
+
+func TestXML_Unmarshal_AttributeField_Success(t *testing.T) {
+ t.Parallel()
+
+ tests := []struct {
+ name string
+ yaml string
+ expectedAttribute bool
+ }{
+ {
+ name: "attribute true",
+ yaml: `attribute: true`,
+ expectedAttribute: true,
+ },
+ {
+ name: "attribute false",
+ yaml: `attribute: false`,
+ expectedAttribute: false,
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ var target XML
+ validationErrs, err := marshaller.UnmarshalCore(ctx, "", parseYAML(t, tt.yaml), &target)
+
+ require.NoError(t, err, "unmarshal should succeed")
+ require.Empty(t, validationErrs, "should not have validation errors")
+ require.NotNil(t, target.Attribute.Value, "attribute should be set")
+ assert.Equal(t, tt.expectedAttribute, *target.Attribute.Value, "should parse attribute correctly")
+ })
+ }
+}
+
+func TestXML_Unmarshal_WrappedField_Success(t *testing.T) {
+ t.Parallel()
+
+ tests := []struct {
+ name string
+ yaml string
+ expectedWrapped bool
+ }{
+ {
+ name: "wrapped true",
+ yaml: `wrapped: true`,
+ expectedWrapped: true,
+ },
+ {
+ name: "wrapped false",
+ yaml: `wrapped: false`,
+ expectedWrapped: false,
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ var target XML
+ validationErrs, err := marshaller.UnmarshalCore(ctx, "", parseYAML(t, tt.yaml), &target)
+
+ require.NoError(t, err, "unmarshal should succeed")
+ require.Empty(t, validationErrs, "should not have validation errors")
+ require.NotNil(t, target.Wrapped.Value, "wrapped should be set")
+ assert.Equal(t, tt.expectedWrapped, *target.Wrapped.Value, "should parse wrapped correctly")
+ })
+ }
+}
+
+func TestXML_Unmarshal_Extensions_Success(t *testing.T) {
+ t.Parallel()
+
+ tests := []struct {
+ name string
+ yaml string
+ extensionKey string
+ expectedValue string
+ }{
+ {
+ name: "single extension",
+ yaml: `
+x-custom: value
+`,
+ extensionKey: "x-custom",
+ expectedValue: "value",
+ },
+ {
+ name: "multiple extensions",
+ yaml: `
+x-first: value1
+x-second: value2
+`,
+ extensionKey: "x-first",
+ expectedValue: "value1",
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ var target XML
+ validationErrs, err := marshaller.UnmarshalCore(ctx, "", parseYAML(t, tt.yaml), &target)
+
+ require.NoError(t, err, "unmarshal should succeed")
+ require.Empty(t, validationErrs, "should not have validation errors")
+ require.NotNil(t, target.Extensions, "extensions should be set")
+
+ ext, found := target.Extensions.Get(tt.extensionKey)
+ require.True(t, found, "should find extension")
+ assert.Equal(t, tt.expectedValue, ext.Value.Value, "should parse extension value correctly")
+ })
+ }
+}
+
+func TestXML_Unmarshal_EmptyObject_Success(t *testing.T) {
+ t.Parallel()
+
+ ctx := t.Context()
+ yaml := `{}`
+
+ var target XML
+ validationErrs, err := marshaller.UnmarshalCore(ctx, "", parseYAML(t, yaml), &target)
+
+ require.NoError(t, err, "unmarshal should succeed")
+ require.Empty(t, validationErrs, "should not have validation errors")
+ assert.Nil(t, target.Name.Value, "name should be nil")
+ assert.Nil(t, target.Namespace.Value, "namespace should be nil")
+ assert.Nil(t, target.Prefix.Value, "prefix should be nil")
+ assert.Nil(t, target.Attribute.Value, "attribute should be nil")
+ assert.Nil(t, target.Wrapped.Value, "wrapped should be nil")
+}
diff --git a/jsonschema/oas3/discriminator.go b/jsonschema/oas3/discriminator.go
index f10f00b..23f5af0 100644
--- a/jsonschema/oas3/discriminator.go
+++ b/jsonschema/oas3/discriminator.go
@@ -2,6 +2,7 @@ package oas3
import (
"context"
+ "errors"
"github.com/speakeasy-api/openapi/extensions"
"github.com/speakeasy-api/openapi/internal/interfaces"
@@ -72,17 +73,13 @@ func (d *Discriminator) Validate(ctx context.Context, opts ...validation.Option)
errs := []error{}
// propertyName is REQUIRED in all OpenAPI versions
- if core.PropertyName.Present {
- if core.PropertyName.Value == "" {
- errs = append(errs, validation.NewValueError(validation.NewMissingValueError("discriminator.propertyName is required"), core, core.PropertyName))
- }
- } else {
- errs = append(errs, validation.NewValueError(validation.NewMissingValueError("discriminator.propertyName is required"), core, core.PropertyName))
+ if core.PropertyName.Present && d.PropertyName == "" {
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("discriminator.propertyName is required"), core, core.PropertyName))
}
// defaultMapping validation - must not be empty if present
- if core.DefaultMapping.Present && (core.DefaultMapping.Value == nil || *core.DefaultMapping.Value == "") {
- errs = append(errs, validation.NewValueError(validation.NewMissingValueError("discriminator.defaultMapping cannot be empty"), core, core.DefaultMapping))
+ if core.DefaultMapping.Present && d.GetDefaultMapping() == "" {
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationEmptyValue, errors.New("discriminator.defaultMapping cannot be empty"), core, core.DefaultMapping))
}
d.Valid = len(errs) == 0 && core.GetValid()
diff --git a/jsonschema/oas3/discriminator_validate_test.go b/jsonschema/oas3/discriminator_validate_test.go
index e8dbf2c..c691570 100644
--- a/jsonschema/oas3/discriminator_validate_test.go
+++ b/jsonschema/oas3/discriminator_validate_test.go
@@ -94,8 +94,7 @@ func TestDiscriminator_Validate_Error(t *testing.T) {
dog: "#/components/schemas/Dog"
`,
wantErrs: []string{
- "[1:1] discriminator.propertyName is missing",
- "[1:1] discriminator.propertyName is required",
+ "[1:1] error validation-required-field discriminator.propertyName is required",
},
},
{
@@ -105,7 +104,7 @@ propertyName: ""
mapping:
dog: "#/components/schemas/Dog"
`,
- wantErrs: []string{"[2:15] discriminator.propertyName is required"},
+ wantErrs: []string{"[2:15] error validation-required-field discriminator.propertyName is required"},
},
}
diff --git a/jsonschema/oas3/externaldoc.go b/jsonschema/oas3/externaldoc.go
index 9063d74..2e3f2f7 100644
--- a/jsonschema/oas3/externaldoc.go
+++ b/jsonschema/oas3/externaldoc.go
@@ -2,6 +2,8 @@ package oas3
import (
"context"
+ "errors"
+ "fmt"
"net/url"
"reflect"
@@ -86,10 +88,10 @@ func (e *ExternalDocumentation) Validate(ctx context.Context, opts ...validation
if core.URL.Present {
if core.URL.Value == "" {
- errs = append(errs, validation.NewValueError(validation.NewMissingValueError("externalDocumentation.url is required"), core, core.URL))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("externalDocumentation.url is required"), core, core.URL))
} else {
if _, err := url.Parse(core.URL.Value); err != nil {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("externalDocumentation.url is not a valid uri: %s", err), core, core.URL))
+ errs = append(errs, validation.NewValueError(validation.SeverityWarning, validation.RuleValidationInvalidFormat, fmt.Errorf("externalDocumentation.url is not a valid uri: %w", err), core, core.URL))
}
}
}
diff --git a/jsonschema/oas3/externaldoc_validate_test.go b/jsonschema/oas3/externaldoc_validate_test.go
index bdecc95..b725f50 100644
--- a/jsonschema/oas3/externaldoc_validate_test.go
+++ b/jsonschema/oas3/externaldoc_validate_test.go
@@ -86,7 +86,7 @@ func TestExternalDoc_Validate_Error(t *testing.T) {
yml: `
description: Some documentation
`,
- wantErrs: []string{"[2:1] externalDocumentation.url is missing"},
+ wantErrs: []string{"[2:1] error validation-required-field externalDocumentation.url is required"},
},
{
name: "empty URL",
@@ -94,7 +94,7 @@ description: Some documentation
description: Some documentation
url: ""
`,
- wantErrs: []string{"[3:6] externalDocumentation.url is required"},
+ wantErrs: []string{"[3:6] error validation-required-field externalDocumentation.url is required"},
},
{
name: "invalid URL format",
diff --git a/jsonschema/oas3/jsonschema_validate_test.go b/jsonschema/oas3/jsonschema_validate_test.go
index 577ee2a..eba5e47 100644
--- a/jsonschema/oas3/jsonschema_validate_test.go
+++ b/jsonschema/oas3/jsonschema_validate_test.go
@@ -227,7 +227,7 @@ func TestJSONSchema_Validate_Error(t *testing.T) {
name: "schema fails direct validation",
yml: `
"test"`,
- wantErrs: []string{"[2:1] failed to validate either Schema [expected object, got `te...`] or bool [line 2: cannot unmarshal !!str `test` into bool]"},
+ wantErrs: []string{"[2:1] error validation-type-mismatch failed to validate either Schema [expected object, got `te...`] or bool [line 2: cannot unmarshal !!str `test` into bool]"},
},
{
name: "child schema fails validation",
@@ -243,8 +243,8 @@ description:
$ref: "#/components/schemas/stream/properties/profiles/description"
`,
wantErrs: []string{
- "[2:1] schema.description expected string, got object",
- "[10:5] schema.description expected string, got object",
+ "[2:1] error validation-type-mismatch schema.description expected string, got object",
+ "[10:5] error validation-type-mismatch schema.description expected string, got object",
},
},
{
@@ -253,8 +253,8 @@ description:
type: invalid_type
`,
wantErrs: []string{
- "[2:7] schema.type expected array, got string",
- "[2:7] schema.type value must be one of 'array', 'boolean', 'integer', 'null', 'number', 'object', 'string'",
+ "[2:7] error validation-invalid-schema schema.type value must be one of 'array', 'boolean', 'integer', 'null', 'number', 'object', 'string'",
+ "[2:7] error validation-type-mismatch schema.type expected array, got string",
},
},
}
diff --git a/jsonschema/oas3/resolution.go b/jsonschema/oas3/resolution.go
index 22c615f..0e09d53 100644
--- a/jsonschema/oas3/resolution.go
+++ b/jsonschema/oas3/resolution.go
@@ -64,7 +64,7 @@ func (j *JSONSchema[Referenceable]) GetAbsRef() references.Reference {
if j.referenceResolutionCache == nil {
return ref
}
- return references.Reference(j.referenceResolutionCache.AbsoluteReference + "#" + ref.GetJSONPointer().String())
+ return references.Reference(j.referenceResolutionCache.AbsoluteDocumentPath + "#" + ref.GetJSONPointer().String())
}
// Resolve will fully resolve the reference and return the JSONSchema referenced. This will recursively resolve any intermediate references as well.
@@ -180,7 +180,7 @@ func (s *JSONSchema[Referenceable]) resolve(ctx context.Context, opts references
// The ResolveResult.ResolvedDocument should be used as the new TargetDocument
if s.referenceResolutionCache.ResolvedDocument != nil {
opts.TargetDocument = s.referenceResolutionCache.ResolvedDocument
- opts.TargetLocation = s.referenceResolutionCache.AbsoluteReference
+ opts.TargetLocation = s.referenceResolutionCache.AbsoluteDocumentPath
}
}
@@ -195,7 +195,7 @@ func (s *JSONSchema[Referenceable]) resolve(ctx context.Context, opts references
if result := s.tryResolveViaRegistry(ctx, ref, opts); result != nil {
// Compute absolute reference for circular detection
// Use the result's AbsoluteReference combined with any anchor/fragment
- absRef := result.AbsoluteReference
+ absRef := result.AbsoluteDocumentPath
if anchor := ExtractAnchor(string(ref)); anchor != "" {
absRef = absRef + "#" + anchor
} else if jp := ref.GetJSONPointer(); jp != "" {
@@ -279,7 +279,7 @@ func (s *JSONSchema[Referenceable]) resolve(ctx context.Context, opts references
// Use $id as base URI if present in the resolved schema (JSON Schema spec)
// The $id keyword identifies a schema resource with its canonical URI
// and serves as the base URI for relative references within that schema
- baseURI := result.AbsoluteReference
+ baseURI := result.AbsoluteDocumentPath
if !schema.IsBool() && schema.GetSchema() != nil {
if schemaID := schema.GetSchema().GetID(); schemaID != "" {
baseURI = schemaID
@@ -301,9 +301,13 @@ func (s *JSONSchema[Referenceable]) resolve(ctx context.Context, opts references
localBaseURI = jsID
}
}
+ // Get the ref to build absolute reference with fragment
+ jsRef := js.GetRef()
+ absRef := utils.BuildAbsoluteReference(localBaseURI, string(jsRef.GetJSONPointer()))
js.referenceResolutionCache = &references.ResolveResult[JSONSchemaReferenceable]{
- AbsoluteReference: localBaseURI,
- ResolvedDocument: result.ResolvedDocument,
+ AbsoluteDocumentPath: localBaseURI,
+ AbsoluteReference: references.Reference(absRef),
+ ResolvedDocument: result.ResolvedDocument,
}
}
return nil
diff --git a/jsonschema/oas3/resolution_defs.go b/jsonschema/oas3/resolution_defs.go
index 9d12dd8..97b09da 100644
--- a/jsonschema/oas3/resolution_defs.go
+++ b/jsonschema/oas3/resolution_defs.go
@@ -5,6 +5,7 @@ import (
"fmt"
"strings"
+ "github.com/speakeasy-api/openapi/internal/utils"
"github.com/speakeasy-api/openapi/jsonpointer"
"github.com/speakeasy-api/openapi/references"
"gopkg.in/yaml.v3"
@@ -144,9 +145,11 @@ func (s *JSONSchema[Referenceable]) tryResolveLocalDefs(_ context.Context, ref r
absRef = schemaID
}
+ absRefWithFragment := utils.BuildAbsoluteReference(absRef, string(ref.GetJSONPointer()))
return &references.ResolveResult[JSONSchemaReferenceable]{
- Object: defSchema,
- AbsoluteReference: absRef,
+ Object: defSchema,
+ AbsoluteDocumentPath: absRef,
+ AbsoluteReference: references.Reference(absRefWithFragment),
}
}
diff --git a/jsonschema/oas3/resolution_external.go b/jsonschema/oas3/resolution_external.go
index a186090..a97d189 100644
--- a/jsonschema/oas3/resolution_external.go
+++ b/jsonschema/oas3/resolution_external.go
@@ -5,6 +5,7 @@ import (
"errors"
"fmt"
+ "github.com/speakeasy-api/openapi/internal/utils"
"github.com/speakeasy-api/openapi/jsonpointer"
"github.com/speakeasy-api/openapi/marshaller"
"github.com/speakeasy-api/openapi/references"
@@ -33,7 +34,7 @@ func (s *JSONSchema[Referenceable]) resolveExternalAnchorReference(ctx context.C
// Use $id as base URI if present in the resolved schema (JSON Schema spec)
// The $id keyword identifies a schema resource with its canonical URI
// and serves as the base URI for anchor lookups within that schema
- baseURI := docResult.AbsoluteReference
+ baseURI := docResult.AbsoluteDocumentPath
if !externalDoc.IsBool() && externalDoc.GetSchema() != nil {
if schemaID := externalDoc.GetSchema().GetID(); schemaID != "" {
baseURI = schemaID
@@ -60,8 +61,8 @@ func (s *JSONSchema[Referenceable]) resolveExternalAnchorReference(ctx context.C
// This handles the case where the reference uses the retrieval URL instead of the canonical $id
// Example: fetch https://example.com/a.json, but $id is https://cdn.example.com/canonical.json
// A reference to "https://example.com/a.json#foo" should still resolve
- if resolved == nil && docResult.AbsoluteReference != "" && docResult.AbsoluteReference != baseURI {
- resolved = registry.LookupByAnchor(docResult.AbsoluteReference, anchor)
+ if resolved == nil && docResult.AbsoluteDocumentPath != "" && docResult.AbsoluteDocumentPath != baseURI {
+ resolved = registry.LookupByAnchor(docResult.AbsoluteDocumentPath, anchor)
}
// Fallback: try with empty base URI
@@ -73,10 +74,12 @@ func (s *JSONSchema[Referenceable]) resolveExternalAnchorReference(ctx context.C
return nil, validationErrs, fmt.Errorf("anchor not found in external document: %s#%s", ref.GetURI(), anchor)
}
+ absRef := utils.BuildAbsoluteReference(baseURI, "#"+anchor)
return &references.ResolveResult[JSONSchemaReferenceable]{
- Object: resolved,
- AbsoluteReference: baseURI,
- ResolvedDocument: docResult.ResolvedDocument,
+ Object: resolved,
+ AbsoluteDocumentPath: baseURI,
+ AbsoluteReference: references.Reference(absRef),
+ ResolvedDocument: docResult.ResolvedDocument,
}, validationErrs, nil
}
@@ -105,7 +108,7 @@ func (s *JSONSchema[Referenceable]) resolveExternalRefWithFragment(ctx context.C
// Use $id as base URI if present in the resolved schema (JSON Schema spec)
// The $id keyword identifies a schema resource with its canonical URI
// and serves as the base URI for relative references within that schema
- baseURI := docResult.AbsoluteReference
+ baseURI := docResult.AbsoluteDocumentPath
if !externalDoc.IsBool() && externalDoc.GetSchema() != nil {
if schemaID := externalDoc.GetSchema().GetID(); schemaID != "" {
baseURI = schemaID
@@ -119,9 +122,10 @@ func (s *JSONSchema[Referenceable]) resolveExternalRefWithFragment(ctx context.C
if jp == "" {
// No fragment, return the whole document with canonical base URI
return &references.ResolveResult[JSONSchemaReferenceable]{
- Object: externalDoc,
- AbsoluteReference: baseURI,
- ResolvedDocument: docResult.ResolvedDocument,
+ Object: externalDoc,
+ AbsoluteDocumentPath: baseURI,
+ AbsoluteReference: references.Reference(baseURI),
+ ResolvedDocument: docResult.ResolvedDocument,
}, validationErrs, nil
}
@@ -150,10 +154,12 @@ func (s *JSONSchema[Referenceable]) resolveExternalRefWithFragment(ctx context.C
target.GetSchema().SetEffectiveBaseURI(baseURI)
}
+ absRef := utils.BuildAbsoluteReference(baseURI, string(jp))
return &references.ResolveResult[JSONSchemaReferenceable]{
- Object: target,
- AbsoluteReference: baseURI,
- ResolvedDocument: docResult.ResolvedDocument,
+ Object: target,
+ AbsoluteDocumentPath: baseURI,
+ AbsoluteReference: references.Reference(absRef),
+ ResolvedDocument: docResult.ResolvedDocument,
}, validationErrs, nil
}
diff --git a/jsonschema/oas3/resolution_registry.go b/jsonschema/oas3/resolution_registry.go
index 89b7c14..2bd0e3e 100644
--- a/jsonschema/oas3/resolution_registry.go
+++ b/jsonschema/oas3/resolution_registry.go
@@ -3,6 +3,7 @@ package oas3
import (
"context"
+ "github.com/speakeasy-api/openapi/internal/utils"
"github.com/speakeasy-api/openapi/references"
)
@@ -34,10 +35,12 @@ func (s *JSONSchema[Referenceable]) tryResolveViaRegistry(ctx context.Context, r
}
if resolved := registry.LookupByAnchor(anchorBase, anchor); resolved != nil {
+ absRef := utils.BuildAbsoluteReference(anchorBase, "#"+anchor)
return &references.ResolveResult[JSONSchemaReferenceable]{
- Object: resolved,
- AbsoluteReference: anchorBase,
- ResolvedDocument: opts.TargetDocument,
+ Object: resolved,
+ AbsoluteDocumentPath: anchorBase,
+ AbsoluteReference: references.Reference(absRef),
+ ResolvedDocument: opts.TargetDocument,
}
}
@@ -45,10 +48,12 @@ func (s *JSONSchema[Referenceable]) tryResolveViaRegistry(ctx context.Context, r
// This handles the case where anchors were registered without a document base URI
if ref.GetURI() == "" && anchorBase != "" {
if resolved := registry.LookupByAnchor("", anchor); resolved != nil {
+ absRef := "#" + anchor
return &references.ResolveResult[JSONSchemaReferenceable]{
- Object: resolved,
- AbsoluteReference: "",
- ResolvedDocument: opts.TargetDocument,
+ Object: resolved,
+ AbsoluteDocumentPath: "",
+ AbsoluteReference: references.Reference(absRef),
+ ResolvedDocument: opts.TargetDocument,
}
}
}
@@ -57,10 +62,12 @@ func (s *JSONSchema[Referenceable]) tryResolveViaRegistry(ctx context.Context, r
docBase := registry.GetDocumentBaseURI()
if docBase != "" && docBase != anchorBase {
if resolved := registry.LookupByAnchor(docBase, anchor); resolved != nil {
+ absRef := utils.BuildAbsoluteReference(docBase, "#"+anchor)
return &references.ResolveResult[JSONSchemaReferenceable]{
- Object: resolved,
- AbsoluteReference: docBase,
- ResolvedDocument: opts.TargetDocument,
+ Object: resolved,
+ AbsoluteDocumentPath: docBase,
+ AbsoluteReference: references.Reference(absRef),
+ ResolvedDocument: opts.TargetDocument,
}
}
}
@@ -108,19 +115,22 @@ func (s *JSONSchema[Referenceable]) tryResolveViaRegistry(ctx context.Context, r
// If there's no JSON pointer, return the schema directly
if jp == "" {
return &references.ResolveResult[JSONSchemaReferenceable]{
- Object: resolvedSchema,
- AbsoluteReference: absoluteReference,
- ResolvedDocument: opts.TargetDocument,
+ Object: resolvedSchema,
+ AbsoluteDocumentPath: absoluteReference,
+ AbsoluteReference: references.Reference(absoluteReference),
+ ResolvedDocument: opts.TargetDocument,
}
}
// There's a JSON pointer - navigate within the found schema
target, err := navigateJSONPointer(ctx, resolvedSchema, jp)
if err == nil && target != nil {
+ absRef := utils.BuildAbsoluteReference(absoluteReference, string(jp))
return &references.ResolveResult[JSONSchemaReferenceable]{
- Object: target,
- AbsoluteReference: absoluteReference,
- ResolvedDocument: opts.TargetDocument,
+ Object: target,
+ AbsoluteDocumentPath: absoluteReference,
+ AbsoluteReference: references.Reference(absRef),
+ ResolvedDocument: opts.TargetDocument,
}
}
// If navigation failed, fall through to external resolution
@@ -171,8 +181,8 @@ func (s *JSONSchema[Referenceable]) getEffectiveBaseURI(opts references.ResolveO
}
// Check if we have a cached absolute reference
- if s.referenceResolutionCache != nil && s.referenceResolutionCache.AbsoluteReference != "" {
- return s.referenceResolutionCache.AbsoluteReference
+ if s.referenceResolutionCache != nil && s.referenceResolutionCache.AbsoluteDocumentPath != "" {
+ return s.referenceResolutionCache.AbsoluteDocumentPath
}
// Fall back to target location
diff --git a/jsonschema/oas3/resolution_test.go b/jsonschema/oas3/resolution_test.go
index 2a5ba3e..a42625d 100644
--- a/jsonschema/oas3/resolution_test.go
+++ b/jsonschema/oas3/resolution_test.go
@@ -485,9 +485,9 @@ func TestJSONSchema_Resolve_Caching(t *testing.T) {
// Set up cached resolved schema using the actual cache field
schema.referenceResolutionCache = &references.ResolveResult[JSONSchema[Referenceable]]{
- Object: resolved,
- AbsoluteReference: "testdata/simple_schema.yaml#/components/schemas/User",
- ResolvedDocument: resolved,
+ Object: resolved,
+ AbsoluteDocumentPath: "testdata/simple_schema.yaml#/components/schemas/User",
+ ResolvedDocument: resolved,
}
root, err := LoadTestSchemaFromFile(t.Context(), "testdata/simple_schema.yaml")
@@ -1928,7 +1928,7 @@ func TestGetEffectiveBaseURI_Success(t *testing.T) {
schema := createSchemaWithRef("#foo")
schema.referenceResolutionCache = &references.ResolveResult[JSONSchema[Referenceable]]{
- AbsoluteReference: "https://example.com/cached.json",
+ AbsoluteDocumentPath: "https://example.com/cached.json",
}
opts := ResolveOptions{
diff --git a/jsonschema/oas3/schema.go b/jsonschema/oas3/schema.go
index 88b14bf..495f296 100644
--- a/jsonschema/oas3/schema.go
+++ b/jsonschema/oas3/schema.go
@@ -494,6 +494,68 @@ func (s *Schema) GetFormat() string {
return *s.Format
}
+// IsReferenceOnly returns true if this schema only contains a $ref and no other properties.
+// This is used for the no-ref-siblings linter rule in OAS 3.0.x (in OAS 3.1+, $ref can have siblings).
+func (s *Schema) IsReferenceOnly() bool {
+ if !s.IsReference() {
+ return false
+ }
+
+ // Check all schema fields - if any are set, it's not reference-only
+ return s.Type == nil &&
+ len(s.AllOf) == 0 &&
+ len(s.OneOf) == 0 &&
+ len(s.AnyOf) == 0 &&
+ s.Discriminator == nil &&
+ len(s.Examples) == 0 &&
+ len(s.PrefixItems) == 0 &&
+ s.Contains == nil &&
+ s.MinContains == nil &&
+ s.MaxContains == nil &&
+ s.If == nil &&
+ s.Else == nil &&
+ s.Then == nil &&
+ (s.DependentSchemas == nil || s.DependentSchemas.Len() == 0) &&
+ (s.PatternProperties == nil || s.PatternProperties.Len() == 0) &&
+ s.PropertyNames == nil &&
+ s.UnevaluatedItems == nil &&
+ s.UnevaluatedProperties == nil &&
+ s.Items == nil &&
+ s.Anchor == nil &&
+ s.ID == nil &&
+ s.Not == nil &&
+ (s.Properties == nil || s.Properties.Len() == 0) &&
+ (s.Defs == nil || s.Defs.Len() == 0) &&
+ s.Title == nil &&
+ s.MultipleOf == nil &&
+ s.Maximum == nil &&
+ s.Minimum == nil &&
+ s.MaxLength == nil &&
+ s.MinLength == nil &&
+ s.Pattern == nil &&
+ s.Format == nil &&
+ s.MaxItems == nil &&
+ s.MinItems == nil &&
+ s.UniqueItems == nil &&
+ s.MaxProperties == nil &&
+ s.MinProperties == nil &&
+ len(s.Required) == 0 &&
+ len(s.Enum) == 0 &&
+ s.AdditionalProperties == nil &&
+ s.Description == nil &&
+ s.Default == nil &&
+ s.Const == nil &&
+ s.Nullable == nil &&
+ s.ReadOnly == nil &&
+ s.WriteOnly == nil &&
+ s.ExternalDocs == nil &&
+ s.Example == nil &&
+ s.Deprecated == nil &&
+ s.Schema == nil &&
+ s.XML == nil &&
+ (s.Extensions == nil || s.Extensions.Len() == 0)
+}
+
// GetMaxItems returns the value of the MaxItems field. Returns nil if not set.
func (s *Schema) GetMaxItems() *int64 {
if s == nil {
diff --git a/jsonschema/oas3/schema_exclusive_validation_test.go b/jsonschema/oas3/schema_exclusive_validation_test.go
index b2b16c9..54c3160 100644
--- a/jsonschema/oas3/schema_exclusive_validation_test.go
+++ b/jsonschema/oas3/schema_exclusive_validation_test.go
@@ -227,7 +227,7 @@ exclusiveMinimum: true
exclusiveMaximum: false
`,
openAPIVersion: pointer.From("3.1.0"),
- wantErrs: []string{"[5:19] schema.exclusiveMinimum expected number, got boolean", "[6:19] schema.exclusiveMaximum expected number, got boolean"},
+ wantErrs: []string{"[5:19] error validation-type-mismatch schema.exclusiveMinimum expected number, got boolean", "[6:19] error validation-type-mismatch schema.exclusiveMaximum expected number, got boolean"},
},
{
name: "boolean exclusiveMinimum with 3.1 $schema should fail",
@@ -239,7 +239,7 @@ maximum: 100
exclusiveMinimum: true
exclusiveMaximum: false
`,
- wantErrs: []string{"[6:19] schema.exclusiveMinimum expected number, got boolean", "[7:19] schema.exclusiveMaximum expected number, got boolean"},
+ wantErrs: []string{"[6:19] error validation-type-mismatch schema.exclusiveMinimum expected number, got boolean", "[7:19] error validation-type-mismatch schema.exclusiveMaximum expected number, got boolean"},
},
// Invalid types should always fail
{
@@ -248,7 +248,7 @@ exclusiveMaximum: false
type: number
exclusiveMinimum: "invalid"
`,
- wantErrs: []string{"[2:1] schema.exclusiveMinimum expected number, got string", "[3:19] schema.exclusiveMinimum failed to validate either bool [schema.exclusiveMinimum line 3: cannot unmarshal !!str `invalid` into bool] or float64 [schema.exclusiveMinimum line 3: cannot unmarshal !!str `invalid` into float64]"},
+ wantErrs: []string{"[2:1] error validation-type-mismatch schema.exclusiveMinimum expected number, got string", "[3:19] error validation-type-mismatch schema.exclusiveMinimum failed to validate either bool [schema.exclusiveMinimum line 3: cannot unmarshal !!str `invalid` into bool] or float64 [schema.exclusiveMinimum line 3: cannot unmarshal !!str `invalid` into float64]"},
},
{
name: "invalid string type for exclusiveMaximum",
@@ -256,7 +256,7 @@ exclusiveMinimum: "invalid"
type: number
exclusiveMaximum: "invalid"
`,
- wantErrs: []string{"[2:1] schema.exclusiveMaximum expected number, got string", "[3:19] schema.exclusiveMaximum failed to validate either bool [schema.exclusiveMaximum line 3: cannot unmarshal !!str `invalid` into bool] or float64 [schema.exclusiveMaximum line 3: cannot unmarshal !!str `invalid` into float64]"},
+ wantErrs: []string{"[2:1] error validation-type-mismatch schema.exclusiveMaximum expected number, got string", "[3:19] error validation-type-mismatch schema.exclusiveMaximum failed to validate either bool [schema.exclusiveMaximum line 3: cannot unmarshal !!str `invalid` into bool] or float64 [schema.exclusiveMaximum line 3: cannot unmarshal !!str `invalid` into float64]"},
},
{
name: "invalid array type for exclusiveMinimum",
@@ -264,7 +264,7 @@ exclusiveMaximum: "invalid"
type: number
exclusiveMinimum: [1, 2, 3]
`,
- wantErrs: []string{"[2:1] schema.exclusiveMinimum expected number, got array", "[3:19] schema.exclusiveMinimum failed to validate either bool [schema.exclusiveMinimum expected bool, got sequence] or float64 [schema.exclusiveMinimum expected float64, got sequence]"},
+ wantErrs: []string{"[2:1] error validation-type-mismatch schema.exclusiveMinimum expected number, got array", "[3:19] error validation-type-mismatch schema.exclusiveMinimum failed to validate either bool [schema.exclusiveMinimum expected bool, got sequence] or float64 [schema.exclusiveMinimum expected float64, got sequence]"},
},
// Mixed boolean and numeric should fail with OpenAPI 3.0 (only supports boolean)
{
@@ -276,7 +276,7 @@ exclusiveMinimum: true
exclusiveMaximum: 50.5
`,
openAPIVersion: pointer.From("3.0.3"),
- wantErrs: []string{"[5:19] schema.exclusiveMaximum expected boolean, got number"},
+ wantErrs: []string{"[5:19] error validation-type-mismatch schema.exclusiveMaximum expected boolean, got number"},
},
{
name: "mixed numeric exclusiveMinimum and boolean exclusiveMaximum with OpenAPI 3.0 should fail",
@@ -287,7 +287,7 @@ exclusiveMinimum: 0.5
exclusiveMaximum: true
`,
openAPIVersion: pointer.From("3.0.3"),
- wantErrs: []string{"[4:19] schema.exclusiveMinimum expected boolean, got number"},
+ wantErrs: []string{"[4:19] error validation-type-mismatch schema.exclusiveMinimum expected boolean, got number"},
},
}
diff --git a/jsonschema/oas3/schema_validate_test.go b/jsonschema/oas3/schema_validate_test.go
index 644c6c6..cfb3103 100644
--- a/jsonschema/oas3/schema_validate_test.go
+++ b/jsonschema/oas3/schema_validate_test.go
@@ -379,8 +379,8 @@ externalDocs:
description: More information
`,
wantErrs: []string{
- "[2:1] schema.externalDocs missing property 'url'",
- "[5:3] externalDocumentation.url is missing",
+ "[2:1] error validation-required-field schema.externalDocs missing property 'url'",
+ "[5:3] error validation-required-field externalDocumentation.url is required",
},
},
{
@@ -390,8 +390,8 @@ type: invalid_type
title: Invalid Type
`,
wantErrs: []string{
- "[2:7] schema.type expected array, got string",
- "[2:7] schema.type value must be one of 'array', 'boolean', 'integer', 'null', 'number', 'object', 'string'",
+ "[2:7] error validation-invalid-schema schema.type value must be one of 'array', 'boolean', 'integer', 'null', 'number', 'object', 'string'",
+ "[2:7] error validation-type-mismatch schema.type expected array, got string",
},
},
{
@@ -400,7 +400,7 @@ title: Invalid Type
type: string
minLength: -1
`,
- wantErrs: []string{"[3:12] schema.minLength minimum: got -1, want 0"},
+ wantErrs: []string{"[3:12] error validation-invalid-schema schema.minLength minimum: got -1, want 0"},
},
{
name: "negative multipleOf",
@@ -408,7 +408,7 @@ minLength: -1
type: number
multipleOf: -1
`,
- wantErrs: []string{"[3:13] schema.multipleOf exclusiveMinimum: got -1, want 0"},
+ wantErrs: []string{"[3:13] error validation-invalid-schema schema.multipleOf exclusiveMinimum: got -1, want 0"},
},
{
name: "zero multipleOf",
@@ -416,7 +416,7 @@ multipleOf: -1
type: number
multipleOf: 0
`,
- wantErrs: []string{"[3:13] schema.multipleOf exclusiveMinimum: got 0, want 0"},
+ wantErrs: []string{"[3:13] error validation-invalid-schema schema.multipleOf exclusiveMinimum: got 0, want 0"},
},
{
name: "invalid additionalProperties type",
@@ -425,9 +425,9 @@ type: object
additionalProperties: "invalid"
`,
wantErrs: []string{
- "[2:1] schema.additionalProperties expected one of [boolean, object], got string",
- "[2:1] schema.additionalProperties expected one of [boolean, object], got string",
- "[3:23] schema.additionalProperties failed to validate either Schema [schema.additionalProperties expected object, got `invalid`] or bool [schema.additionalProperties line 3: cannot unmarshal !!str `invalid` into bool]",
+ "[2:1] error validation-type-mismatch schema.additionalProperties expected one of [boolean, object], got string",
+ "[2:1] error validation-type-mismatch schema.additionalProperties expected one of [boolean, object], got string",
+ "[3:23] error validation-type-mismatch schema.additionalProperties failed to validate either Schema [schema.additionalProperties expected object, got `invalid`] or bool [schema.additionalProperties line 3: cannot unmarshal !!str `invalid` into bool]",
},
},
{
@@ -436,7 +436,7 @@ additionalProperties: "invalid"
type: array
minItems: -1
`,
- wantErrs: []string{"[3:11] schema.minItems minimum: got -1, want 0"},
+ wantErrs: []string{"[3:11] error validation-invalid-schema schema.minItems minimum: got -1, want 0"},
},
{
name: "negative minProperties",
@@ -444,7 +444,7 @@ minItems: -1
type: object
minProperties: -1
`,
- wantErrs: []string{"[3:16] schema.minProperties minimum: got -1, want 0"},
+ wantErrs: []string{"[3:16] error validation-invalid-schema schema.minProperties minimum: got -1, want 0"},
},
{
name: "invalid items type",
@@ -453,9 +453,9 @@ type: array
items: "invalid"
`,
wantErrs: []string{
- "[2:1] schema.items expected one of [boolean, object], got string",
- "[2:1] schema.items expected one of [boolean, object], got string",
- "[3:8] schema.items failed to validate either Schema [schema.items expected object, got `invalid`] or bool [schema.items line 3: cannot unmarshal !!str `invalid` into bool]",
+ "[2:1] error validation-type-mismatch schema.items expected one of [boolean, object], got string",
+ "[2:1] error validation-type-mismatch schema.items expected one of [boolean, object], got string",
+ "[3:8] error validation-type-mismatch schema.items failed to validate either Schema [schema.items expected object, got `invalid`] or bool [schema.items line 3: cannot unmarshal !!str `invalid` into bool]",
},
},
{
@@ -465,8 +465,8 @@ type: object
required: "invalid"
`,
wantErrs: []string{
- "[2:1] schema.required expected array, got string",
- "[3:11] schema.required expected sequence, got `invalid`",
+ "[2:1] error validation-type-mismatch schema.required expected array, got string",
+ "[3:11] error validation-type-mismatch schema.required expected sequence, got `invalid`",
},
},
{
@@ -475,8 +475,8 @@ required: "invalid"
allOf: "invalid"
`,
wantErrs: []string{
- "[2:1] schema.allOf expected array, got string",
- "[2:8] schema.allOf expected sequence, got `invalid`",
+ "[2:1] error validation-type-mismatch schema.allOf expected array, got string",
+ "[2:8] error validation-type-mismatch schema.allOf expected sequence, got `invalid`",
},
},
{
@@ -485,8 +485,8 @@ allOf: "invalid"
anyOf: "invalid"
`,
wantErrs: []string{
- "[2:1] schema.anyOf expected array, got string",
- "[2:8] schema.anyOf expected sequence, got `invalid`",
+ "[2:1] error validation-type-mismatch schema.anyOf expected array, got string",
+ "[2:8] error validation-type-mismatch schema.anyOf expected sequence, got `invalid`",
},
},
{
@@ -495,8 +495,8 @@ anyOf: "invalid"
oneOf: "invalid"
`,
wantErrs: []string{
- "[2:1] schema.oneOf expected array, got string",
- "[2:8] schema.oneOf expected sequence, got `invalid`",
+ "[2:1] error validation-type-mismatch schema.oneOf expected array, got string",
+ "[2:8] error validation-type-mismatch schema.oneOf expected sequence, got `invalid`",
},
},
{
@@ -506,49 +506,49 @@ $schema: "https://spec.openapis.org/oas/3.0/dialect/2024-10-18"
$ref: "#/components/schemas/User"
required: ["name", "email"]
`,
- wantErrs: []string{"[2:1] schema. additional properties '$ref' not allowed"},
+ wantErrs: []string{"[2:1] error validation-invalid-schema schema. additional properties '$ref' not allowed"},
},
{
name: "empty component name in $ref",
yml: `
$ref: "#/components/schemas/"
`,
- wantErrs: []string{"[2:1] invalid reference: component name cannot be empty"},
+ wantErrs: []string{"[2:1] error validation-invalid-reference invalid reference: component name cannot be empty"},
},
{
name: "missing component name in $ref",
yml: `
$ref: "#/components/schemas"
`,
- wantErrs: []string{"[2:1] invalid reference: component name cannot be empty"},
+ wantErrs: []string{"[2:1] error validation-invalid-reference invalid reference: component name cannot be empty"},
},
{
name: "component name with invalid characters in $ref",
yml: `
$ref: "#/components/schemas/User@Schema"
`,
- wantErrs: []string{`[2:1] invalid reference: component name "User@Schema" must match pattern ^[a-zA-Z0-9.\-_]+$`},
+ wantErrs: []string{`[2:1] error validation-invalid-reference invalid reference: component name "User@Schema" must match pattern ^[a-zA-Z0-9.\-_]+$`},
},
{
name: "component name with space in $ref",
yml: `
$ref: "#/components/schemas/User Schema"
`,
- wantErrs: []string{`[2:1] invalid reference: component name "User Schema" must match pattern ^[a-zA-Z0-9.\-_]+$`},
+ wantErrs: []string{`[2:1] error validation-invalid-reference invalid reference: component name "User Schema" must match pattern ^[a-zA-Z0-9.\-_]+$`},
},
{
name: "invalid JSON pointer - missing leading slash in $ref",
yml: `
$ref: "#components/schemas/User"
`,
- wantErrs: []string{"[2:1] invalid reference JSON pointer: validation error -- jsonpointer must start with /: components/schemas/User"},
+ wantErrs: []string{"[2:1] error validation-invalid-reference invalid reference JSON pointer: validation error -- jsonpointer must start with /: components/schemas/User"},
},
{
name: "empty JSON pointer in $ref",
yml: `
$ref: "#"
`,
- wantErrs: []string{"[2:1] invalid reference JSON pointer: empty"},
+ wantErrs: []string{"[2:1] error validation-invalid-reference invalid reference JSON pointer: empty"},
},
}
diff --git a/jsonschema/oas3/tests/testsuite b/jsonschema/oas3/tests/testsuite
index 8b826d6..d69537a 160000
--- a/jsonschema/oas3/tests/testsuite
+++ b/jsonschema/oas3/tests/testsuite
@@ -1 +1 @@
-Subproject commit 8b826d6b27981aa7a21c8eaa44ea0e6d70d6d256
+Subproject commit d69537acea93fd7481ad80ff3015d88ab0b13d5a
diff --git a/jsonschema/oas3/validation.go b/jsonschema/oas3/validation.go
index b27ac26..15acb2e 100644
--- a/jsonschema/oas3/validation.go
+++ b/jsonschema/oas3/validation.go
@@ -85,7 +85,7 @@ func (js *Schema) Validate(ctx context.Context, opts ...validation.Option) []err
// Validate reference string if present
if js.IsReference() {
if err := js.GetRef().Validate(); err != nil {
- errs = append(errs, validation.NewValidationError(err, js.GetCore().Ref.GetKeyNodeOrRoot(js.GetRootNode())))
+ errs = append(errs, validation.NewValidationError(validation.SeverityError, validation.RuleValidationInvalidReference, err, js.GetCore().Ref.GetKeyNodeOrRoot(js.GetRootNode())))
}
}
@@ -129,14 +129,14 @@ func (js *Schema) Validate(ctx context.Context, opts ...validation.Option) []err
if err := json.YAMLToJSON(core.RootNode, 0, buf); err != nil {
return []error{
- validation.NewValidationError(fmt.Errorf("schema is not valid json: %w", err), core.RootNode),
+ validation.NewValidationError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("schema is not valid json: %w", err), core.RootNode),
}
}
jsAny, err := jsValidator.UnmarshalJSON(buf)
if err != nil {
return []error{
- validation.NewValidationError(fmt.Errorf("schema is not valid json: %w", err), core.RootNode),
+ validation.NewValidationError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("schema is not valid json: %w", err), core.RootNode),
}
}
@@ -146,7 +146,7 @@ func (js *Schema) Validate(ctx context.Context, opts ...validation.Option) []err
if errors.As(err, &validationErr) {
errs = append(errs, getRootCauses(validationErr, *core)...)
} else {
- errs = append(errs, validation.NewValidationError(validation.NewValueValidationError("schema invalid: %s", err.Error()), core.RootNode))
+ errs = append(errs, validation.NewValidationError(validation.SeverityError, validation.RuleValidationInvalidSchema, fmt.Errorf("schema invalid: %s", err.Error()), core.RootNode))
}
}
@@ -172,7 +172,7 @@ func getRootCauses(err *jsValidator.ValidationError, js core.Schema) []error {
t, err := jsonpointer.GetTarget(js, errJP, jsonpointer.WithStructTags("key"))
if err != nil {
- errs = append(errs, validation.NewValidationError(err, js.GetRootNode()))
+ errs = append(errs, validation.NewValidationError(validation.SeverityError, validation.RuleValidationInvalidTarget, err, js.GetRootNode()))
continue
}
@@ -206,11 +206,11 @@ func getRootCauses(err *jsValidator.ValidationError, js core.Schema) []error {
msg = fmt.Sprintf("expected %s, got %s", want, t.Got)
- newErr = validation.NewValidationError(validation.NewTypeMismatchError(parentName, msg), valueNode)
+ newErr = validation.NewValidationError(validation.SeverityError, validation.RuleValidationTypeMismatch, validation.NewTypeMismatchError(parentName, msg), valueNode)
case *kind.Required:
- newErr = validation.NewValidationError(validation.NewMissingFieldError("%s %s", parentName, msg), valueNode)
+ newErr = validation.NewValidationError(validation.SeverityError, validation.RuleValidationRequiredField, fmt.Errorf("%s %s", parentName, msg), valueNode)
default:
- newErr = validation.NewValidationError(validation.NewValueValidationError("%s %s", parentName, msg), valueNode)
+ newErr = validation.NewValidationError(validation.SeverityError, validation.RuleValidationInvalidSchema, fmt.Errorf("%s %s", parentName, msg), valueNode)
}
if newErr != nil {
errs = append(errs, newErr)
diff --git a/jsonschema/oas3/walk.go b/jsonschema/oas3/walk.go
index f2a9440..34c3f43 100644
--- a/jsonschema/oas3/walk.go
+++ b/jsonschema/oas3/walk.go
@@ -65,7 +65,7 @@ func walkSchema(ctx context.Context, schema *JSONSchema[Referenceable], loc walk
}
if schema.IsSchema() {
- js := schema.Left
+ js := schema.GetSchema()
// Walk through allOf schemas
for i, schema := range js.AllOf {
diff --git a/jsonschema/oas3/xml.go b/jsonschema/oas3/xml.go
index e8c16ac..052dc46 100644
--- a/jsonschema/oas3/xml.go
+++ b/jsonschema/oas3/xml.go
@@ -2,6 +2,7 @@ package oas3
import (
"context"
+ "fmt"
"net/url"
"reflect"
@@ -124,9 +125,9 @@ func (x *XML) Validate(ctx context.Context, opts ...validation.Option) []error {
if x.Namespace != nil {
u, err := url.Parse(*x.Namespace)
if err != nil {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("xml.namespace is not a valid uri: %s", err), core, core.Namespace))
+ errs = append(errs, validation.NewValueError(validation.SeverityWarning, validation.RuleValidationInvalidFormat, fmt.Errorf("xml.namespace is not a valid uri: %w", err), core, core.Namespace))
} else if !u.IsAbs() {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("xml.namespace must be an absolute uri: %s", *x.Namespace), core, core.Namespace))
+ errs = append(errs, validation.NewValueError(validation.SeverityWarning, validation.RuleValidationInvalidFormat, fmt.Errorf("xml.namespace must be an absolute uri: %s", *x.Namespace), core, core.Namespace))
}
}
diff --git a/linter/README.md b/linter/README.md
new file mode 100644
index 0000000..da3ad40
--- /dev/null
+++ b/linter/README.md
@@ -0,0 +1,241 @@
+# Linter Engine
+
+This document provides an overview of the linter engine implementation.
+
+## Architecture Overview
+
+The linter engine is a generic, spec-agnostic framework for implementing configurable linting rules across different API specifications (OpenAPI, Arazzo, Swagger).
+
+### Core Components
+
+1. **Generic Linter Engine** ([`linter/`](linter/))
+ - [`Linter[T]`](linter/linter.go) - Main linting engine with configuration support
+ - [`Registry[T]`](linter/registry.go) - Rule registry with category management
+ - [`Rule`](linter/rule.go) - Base rule interface and specialized interfaces
+ - [`RuleConfig`](linter/config.go) - Per-rule configuration with severity overrides
+ - [`DocumentInfo[T]`](linter/document.go) - Document + location for reference resolution
+ - Format types for text and JSON output
+ - Parallel rule execution for improved performance
+
+2. **OpenAPI Linter** ([`openapi/linter/`](openapi/linter/))
+ - OpenAPI-specific linter implementation
+ - Rule registry with built-in rules
+ - Integration with OpenAPI parser and validator
+
+3. **Rules** ([`openapi/linter/rules/`](openapi/linter/rules/))
+ - Individual linting rules (e.g., [`style-path-params`](openapi/linter/rules/path_params.go))
+ - Each rule implements the [`RuleRunner[*openapi.OpenAPI]`](linter/rule.go) interface
+
+4. **CLI Integration** ([`cmd/openapi/commands/openapi.spec/lint.go`](cmd/openapi/commands/openapi.spec/lint.go))
+ - `openapi spec lint` command
+ - Configuration file support (`.lint.yaml`)
+ - Rule documentation generation (`--list-rules`)
+
+## Key Features
+
+### 1. Rule Configuration
+
+Rules can be configured via YAML configuration file:
+
+```yaml
+extends:
+ - all # or specific rulesets like "recommended", "strict"
+
+categories:
+ style:
+ enabled: true
+ severity: warning
+
+rules:
+ style-path-params:
+ enabled: true
+ severity: error
+ options:
+ # Rule-specific options
+```
+
+### 2. Severity Overrides
+
+Rules have default severities that can be overridden:
+- Fatal errors (terminate execution)
+- Error severity (build failures)
+- Warning severity (informational)
+
+### 3. External Reference Resolution
+
+Rules automatically resolve external references (HTTP URLs, file paths):
+
+```yaml
+paths:
+ /users/{userId}:
+ get:
+ parameters:
+ - $ref: "https://example.com/params/user-id.yaml"
+ responses:
+ '200':
+ description: ok
+```
+
+The linter:
+- Uses [`DocumentInfo.Location`](linter/document.go) as the base for resolving relative references
+- Supports custom HTTP clients and virtual filesystems via [`LintOptions.ResolveOptions`](linter/document.go)
+- Reports resolution errors as validation errors with proper severity and location
+
+### 5. Quick Fix Suggestions
+
+Rules can suggest fixes using [`validation.Error`](validation/validation.go) with quick fix support:
+
+```go
+validation.NewValidationErrorWithQuickFix(
+ severity,
+ rule,
+ fmt.Errorf("path parameter {%s} is not defined", param),
+ node,
+ &validation.QuickFix{
+ Description: "Add missing path parameter",
+ Replacement: "...",
+ },
+)
+```
+
+## Implemented Rules
+
+### style-path-params
+
+Ensures path template variables (e.g., `{userId}`) have corresponding parameter definitions with `in='path'`.
+
+**Checks:**
+- All template params must have corresponding parameter definitions
+- All path parameters must be used in the template
+- Works with parameters at PathItem level (inherited) and Operation level (can override)
+- Resolves external references to parameters
+
+**Example:**
+
+```yaml
+# ✅ Valid
+paths:
+ /users/{userId}:
+ get:
+ parameters:
+ - name: userId
+ in: path
+ required: true
+
+# ❌ Invalid - missing parameter definition
+paths:
+ /users/{userId}:
+ get:
+ responses:
+ '200':
+ description: ok
+```
+
+## Usage
+
+### CLI
+
+```bash
+# Lint with default configuration
+openapi spec lint openapi.yaml
+
+# Lint with custom config
+openapi spec lint --config .lint.yaml openapi.yaml
+
+# List all available rules
+openapi spec lint --list-rules
+
+# Output in JSON format
+openapi spec lint --format json openapi.yaml
+```
+
+### Programmatic
+
+```go
+import (
+ "context"
+ "github.com/speakeasy-api/openapi/linter"
+ openapiLinter "github.com/speakeasy-api/openapi/openapi/linter"
+)
+
+// Create linter with configuration
+config := &linter.Config{
+ Extends: []string{"all"},
+}
+lntr := openapiLinter.NewOpenAPILinter(config)
+
+// Lint document
+docInfo := &linter.DocumentInfo[*openapi.OpenAPI]{
+ Document: doc,
+ Location: "/path/to/openapi.yaml",
+}
+output, err := lntr.Lint(ctx, docInfo, nil, nil)
+if err != nil {
+ // Handle error
+}
+
+// Check results
+if output.HasErrors() {
+ fmt.Println(output.FormatText())
+}
+```
+
+## Adding New Rules
+
+To add a new rule:
+
+1. **Create the rule** in [`openapi/linter/rules/`](openapi/linter/rules/)
+
+```go
+type MyRule struct{}
+
+func (r *MyRule) ID() string { return "style-my-rule" }
+func (r *MyRule) Category() string { return "style" }
+func (r *MyRule) Description() string { return "..." }
+func (r *MyRule) Link() string { return "..." }
+func (r *MyRule) DefaultSeverity() validation.Severity {
+ return validation.SeverityWarning
+}
+func (r *MyRule) Versions() []string { return nil }
+
+func (r *MyRule) Run(ctx context.Context, docInfo *linter.DocumentInfo[*openapi.OpenAPI], config *linter.RuleConfig) []error {
+ doc := docInfo.Document
+ // Implement rule logic
+ // Use openapi.Walk() to traverse the document
+ // Return validation.Error instances for violations
+ return nil
+}
+```
+
+2. **Register the rule** in [`openapi/linter/linter.go`](openapi/linter/linter.go)
+
+```go
+registry.Register(&rules.MyRule{})
+```
+
+3. **Write tests** in [`openapi/linter/rules/my_rule_test.go`](openapi/linter/rules/)
+
+```go
+func TestMyRule_Success(t *testing.T) {
+ t.Parallel()
+ // ... test implementation
+}
+```
+
+## Design Principles
+
+1. **Generic Architecture** - The core linter is spec-agnostic (`Linter[T any]`)
+2. **Type Safety** - Spec-specific rules use typed interfaces (`RuleRunner[*openapi.OpenAPI]`)
+3. **Separation of Concerns** - Core engine, spec linters, and rules are separate packages
+4. **Extensibility** - Easy to add new rules, rulesets, and specs
+5. **Configuration Over Code** - Rule behavior controlled via YAML config
+6. **Reference Resolution** - Automatic external reference resolution with proper error handling
+7. **Testing** - Comprehensive test coverage with parallel execution
+
+## Next Steps
+
+1. Add more OpenAPI rules (e.g., security, best practices, naming conventions)
+2. Create linters for other specs (Arazzo, Swagger 2.0)
+3. Add auto-fix capabilities for rules that support it
+4. Implement rule documentation generation in markdown/HTML formats
+5. Add performance profiles and caching for large documents
diff --git a/linter/config.go b/linter/config.go
new file mode 100644
index 0000000..8484d97
--- /dev/null
+++ b/linter/config.go
@@ -0,0 +1,86 @@
+package linter
+
+import (
+ "github.com/speakeasy-api/openapi/references"
+ "github.com/speakeasy-api/openapi/validation"
+)
+
+// Config represents the linter configuration
+type Config struct {
+ // Extends specifies rulesets to extend (e.g., "recommended", "all")
+ Extends []string `yaml:"extends,omitempty" json:"extends,omitempty"`
+
+ // Rules contains per-rule configuration
+ Rules map[string]RuleConfig `yaml:"rules,omitempty" json:"rules,omitempty"`
+
+ // Categories contains per-category configuration
+ Categories map[string]CategoryConfig `yaml:"categories,omitempty" json:"categories,omitempty"`
+
+ // Ignores contains global ignore patterns
+ Ignores []IgnorePattern `yaml:"ignores,omitempty" json:"ignores,omitempty"`
+
+ // OutputFormat specifies the output format
+ OutputFormat OutputFormat `yaml:"output_format,omitempty" json:"output_format,omitempty"`
+}
+
+// RuleConfig configures a specific rule
+type RuleConfig struct {
+ // Enabled controls whether the rule is active
+ Enabled *bool `yaml:"enabled,omitempty" json:"enabled,omitempty"`
+
+ // Severity overrides the default severity
+ Severity *validation.Severity `yaml:"severity,omitempty" json:"severity,omitempty"`
+
+ // Options contains rule-specific configuration
+ Options map[string]any `yaml:"options,omitempty" json:"options,omitempty"`
+
+ // ResolveOptions contains runtime options for reference resolution (not serialized)
+ // These are set by the linter engine when running rules
+ ResolveOptions *references.ResolveOptions `yaml:"-" json:"-"`
+}
+
+// GetSeverity returns the effective severity, falling back to default if not overridden
+func (c *RuleConfig) GetSeverity(defaultSeverity validation.Severity) validation.Severity {
+ if c != nil && c.Severity != nil {
+ return *c.Severity
+ }
+ return defaultSeverity
+}
+
+// CategoryConfig configures an entire category of rules
+type CategoryConfig struct {
+ // Enabled controls whether all rules in the category are active
+ Enabled *bool `yaml:"enabled,omitempty" json:"enabled,omitempty"`
+
+ // Severity overrides the default severity for all rules in the category
+ Severity *validation.Severity `yaml:"severity,omitempty" json:"severity,omitempty"`
+}
+
+// IgnorePattern specifies a pattern for ignoring results
+type IgnorePattern struct {
+ // Rule is the rule ID to ignore (empty = all rules)
+ Rule string `yaml:"rule,omitempty" json:"rule,omitempty"`
+
+ // Path is a JSON pointer pattern to match
+ Path string `yaml:"path,omitempty" json:"path,omitempty"`
+
+ // Message pattern to match (regex)
+ MessagePattern string `yaml:"message_pattern,omitempty" json:"message_pattern,omitempty"`
+}
+
+type OutputFormat string
+
+const (
+ OutputFormatText OutputFormat = "text"
+ OutputFormatJSON OutputFormat = "json"
+)
+
+// NewConfig creates a new default configuration
+func NewConfig() *Config {
+ return &Config{
+ Extends: []string{"all"},
+ Rules: make(map[string]RuleConfig),
+ Categories: make(map[string]CategoryConfig),
+ OutputFormat: OutputFormatText,
+ }
+}
diff --git a/linter/config_test.go b/linter/config_test.go
new file mode 100644
index 0000000..ec30191
--- /dev/null
+++ b/linter/config_test.go
@@ -0,0 +1,54 @@
+package linter_test
+
+import (
+ "testing"
+
+ "github.com/speakeasy-api/openapi/linter"
+ "github.com/speakeasy-api/openapi/validation"
+ "github.com/stretchr/testify/assert"
+)
+
+func TestRuleConfig_GetSeverity(t *testing.T) {
+ t.Parallel()
+
+ t.Run("returns configured severity when set", func(t *testing.T) {
+ t.Parallel()
+
+ warningSeverity := validation.SeverityWarning
+ config := linter.RuleConfig{
+ Severity: &warningSeverity,
+ }
+
+ assert.Equal(t, validation.SeverityWarning, config.GetSeverity(validation.SeverityError))
+ })
+
+ t.Run("returns default severity when not set", func(t *testing.T) {
+ t.Parallel()
+
+ config := linter.RuleConfig{}
+
+ assert.Equal(t, validation.SeverityError, config.GetSeverity(validation.SeverityError))
+ })
+
+ t.Run("returns configured severity overriding different default", func(t *testing.T) {
+ t.Parallel()
+
+ hintSeverity := validation.SeverityHint
+ config := linter.RuleConfig{
+ Severity: &hintSeverity,
+ }
+
+ assert.Equal(t, validation.SeverityHint, config.GetSeverity(validation.SeverityWarning))
+ })
+}
+
+func TestNewConfig(t *testing.T) {
+ t.Parallel()
+
+ config := linter.NewConfig()
+ assert.NotNil(t, config)
+ assert.Equal(t, linter.OutputFormatText, config.OutputFormat)
+ assert.NotNil(t, config.Rules)
+ assert.NotNil(t, config.Categories)
+ assert.NotNil(t, config.Extends)
+}
diff --git a/linter/doc.go b/linter/doc.go
new file mode 100644
index 0000000..f58e399
--- /dev/null
+++ b/linter/doc.go
@@ -0,0 +1,260 @@
+package linter
+
+import (
+ "encoding/json"
+ "fmt"
+ "io"
+ "strings"
+)
+
+// DocGenerator generates documentation from registered rules
+type DocGenerator[T any] struct {
+ registry *Registry[T]
+}
+
+// NewDocGenerator creates a new documentation generator
+func NewDocGenerator[T any](registry *Registry[T]) *DocGenerator[T] {
+ return &DocGenerator[T]{registry: registry}
+}
+
+// RuleDoc represents documentation for a single rule
+type RuleDoc struct {
+ ID string `json:"id" yaml:"id"`
+ Category string `json:"category" yaml:"category"`
+ Description string `json:"description" yaml:"description"`
+ Rationale string `json:"rationale,omitempty" yaml:"rationale,omitempty"`
+ Link string `json:"link,omitempty" yaml:"link,omitempty"`
+ DefaultSeverity string `json:"default_severity" yaml:"default_severity"`
+ Versions []string `json:"versions,omitempty" yaml:"versions,omitempty"`
+ GoodExample string `json:"good_example,omitempty" yaml:"good_example,omitempty"`
+ BadExample string `json:"bad_example,omitempty" yaml:"bad_example,omitempty"`
+ FixAvailable bool `json:"fix_available" yaml:"fix_available"`
+ ConfigSchema map[string]any `json:"config_schema,omitempty" yaml:"config_schema,omitempty"`
+ ConfigDefaults map[string]any `json:"config_defaults,omitempty" yaml:"config_defaults,omitempty"`
+ Rulesets []string `json:"rulesets" yaml:"rulesets"`
+}
+
+// GenerateRuleDoc generates documentation for a single rule
+func (g *DocGenerator[T]) GenerateRuleDoc(rule RuleRunner[T]) *RuleDoc {
+ doc := &RuleDoc{
+ ID: rule.ID(),
+ Category: rule.Category(),
+ Description: rule.Description(),
+ Link: rule.Link(),
+ DefaultSeverity: rule.DefaultSeverity().String(),
+ Versions: rule.Versions(),
+ Rulesets: g.registry.RulesetsContaining(rule.ID()),
+ }
+
+ // Check for optional documentation interface
+ if documented, ok := any(rule).(DocumentedRule); ok {
+ doc.GoodExample = documented.GoodExample()
+ doc.BadExample = documented.BadExample()
+ doc.Rationale = documented.Rationale()
+ doc.FixAvailable = documented.FixAvailable()
+ }
+
+ // Check for configuration interface
+ if configurable, ok := any(rule).(ConfigurableRule); ok {
+ doc.ConfigSchema = configurable.ConfigSchema()
+ doc.ConfigDefaults = configurable.ConfigDefaults()
+ }
+
+ return doc
+}
+
+// GenerateAllRuleDocs generates documentation for all registered rules
+func (g *DocGenerator[T]) GenerateAllRuleDocs() []*RuleDoc {
+ var docs []*RuleDoc
+ for _, rule := range g.registry.AllRules() {
+ docs = append(docs, g.GenerateRuleDoc(rule))
+ }
+ return docs
+}
+
+// GenerateCategoryDocs groups rules by category
+func (g *DocGenerator[T]) GenerateCategoryDocs() map[string][]*RuleDoc {
+ categories := make(map[string][]*RuleDoc)
+ for _, rule := range g.registry.AllRules() {
+ doc := g.GenerateRuleDoc(rule)
+ categories[doc.Category] = append(categories[doc.Category], doc)
+ }
+ return categories
+}
+
+// WriteJSON writes rule documentation as JSON
+func (g *DocGenerator[T]) WriteJSON(w io.Writer) error {
+ docs := g.GenerateAllRuleDocs()
+ enc := json.NewEncoder(w)
+ enc.SetIndent("", " ")
+ return enc.Encode(map[string]any{
+ "rules": docs,
+ "categories": g.registry.AllCategories(),
+ "rulesets": g.registry.AllRulesets(),
+ })
+}
+
+// WriteMarkdown writes rule documentation as Markdown
+func (g *DocGenerator[T]) WriteMarkdown(w io.Writer) error {
+ docs := g.GenerateCategoryDocs()
+
+ if err := writeLine(w, "# Lint Rules Reference"); err != nil {
+ return err
+ }
+ if err := writeEmptyLine(w); err != nil {
+ return err
+ }
+
+ // Table of contents
+ if err := writeLine(w, "## Categories"); err != nil {
+ return err
+ }
+ if err := writeEmptyLine(w); err != nil {
+ return err
+ }
+ for category := range docs {
+ if err := writeF(w, "- [%s](#%s)\n", category, category); err != nil {
+ return err
+ }
+ }
+ if err := writeEmptyLine(w); err != nil {
+ return err
+ }
+
+ // Rules by category
+ for category, rules := range docs {
+ if err := writeF(w, "## %s\n\n", category); err != nil {
+ return err
+ }
+
+ for _, rule := range rules {
+ if err := g.writeRuleMarkdown(w, rule); err != nil {
+ return err
+ }
+ }
+ }
+
+ return nil
+}
+
+func (g *DocGenerator[T]) writeRuleMarkdown(w io.Writer, rule *RuleDoc) error {
+ if err := writeF(w, "### %s\n\n", rule.ID); err != nil {
+ return err
+ }
+ if err := writeF(w, "**Severity:** %s \n", rule.DefaultSeverity); err != nil {
+ return err
+ }
+ if err := writeF(w, "**Category:** %s \n", rule.Category); err != nil {
+ return err
+ }
+
+ if len(rule.Versions) > 0 {
+ if err := writeF(w, "**Applies to:** %s \n", strings.Join(rule.Versions, ", ")); err != nil {
+ return err
+ }
+ }
+
+ if rule.FixAvailable {
+ if err := writeLine(w, "**Auto-fix available:** Yes "); err != nil {
+ return err
+ }
+ }
+ if err := writeEmptyLine(w); err != nil {
+ return err
+ }
+
+ if err := writeF(w, "%s\n\n", rule.Description); err != nil {
+ return err
+ }
+
+ if rule.Rationale != "" {
+ if err := writeF(w, "#### Rationale\n\n%s\n\n", rule.Rationale); err != nil {
+ return err
+ }
+ }
+
+ if rule.BadExample != "" {
+ if err := writeLine(w, "#### ❌ Incorrect"); err != nil {
+ return err
+ }
+ if err := writeLine(w, "```yaml"); err != nil {
+ return err
+ }
+ if err := writeLine(w, rule.BadExample); err != nil {
+ return err
+ }
+ if err := writeLine(w, "```"); err != nil {
+ return err
+ }
+ if err := writeEmptyLine(w); err != nil {
+ return err
+ }
+ }
+
+ if rule.GoodExample != "" {
+ if err := writeLine(w, "#### ✅ Correct"); err != nil {
+ return err
+ }
+ if err := writeLine(w, "```yaml"); err != nil {
+ return err
+ }
+ if err := writeLine(w, rule.GoodExample); err != nil {
+ return err
+ }
+ if err := writeLine(w, "```"); err != nil {
+ return err
+ }
+ if err := writeEmptyLine(w); err != nil {
+ return err
+ }
+ }
+
+ if len(rule.ConfigSchema) > 0 {
+ if err := writeLine(w, "#### Configuration"); err != nil {
+ return err
+ }
+ if err := writeEmptyLine(w); err != nil {
+ return err
+ }
+ if err := writeLine(w, "| Option | Type | Default | Description |"); err != nil {
+ return err
+ }
+ if err := writeLine(w, "|--------|------|---------|-------------|"); err != nil {
+ return err
+ }
+ // Write config options table
+ if err := writeEmptyLine(w); err != nil {
+ return err
+ }
+ }
+
+ if rule.Link != "" {
+ if err := writeF(w, "[Documentation →](%s)\n\n", rule.Link); err != nil {
+ return err
+ }
+ }
+
+ if err := writeLine(w, "---"); err != nil {
+ return err
+ }
+ if err := writeEmptyLine(w); err != nil {
+ return err
+ }
+
+ return nil
+}
+
+func writeLine(w io.Writer, text string) error {
+ _, err := fmt.Fprintln(w, text)
+ return err
+}
+
+func writeEmptyLine(w io.Writer) error {
+ _, err := fmt.Fprintln(w)
+ return err
+}
+
+func writeF(w io.Writer, format string, args ...any) error {
+ _, err := fmt.Fprintf(w, format, args...)
+ return err
+}
diff --git a/linter/doc_test.go b/linter/doc_test.go
new file mode 100644
index 0000000..9ea3ed7
--- /dev/null
+++ b/linter/doc_test.go
@@ -0,0 +1,272 @@
+package linter_test
+
+import (
+ "bytes"
+ "encoding/json"
+ "testing"
+
+ "github.com/speakeasy-api/openapi/linter"
+ "github.com/speakeasy-api/openapi/validation"
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestDocGenerator_GenerateRuleDoc(t *testing.T) {
+ t.Parallel()
+
+ t.Run("basic rule documentation", func(t *testing.T) {
+ t.Parallel()
+
+ registry := linter.NewRegistry[*MockDoc]()
+ registry.Register(&mockRule{
+ id: "test-rule",
+ category: "style",
+ description: "Test rule description",
+ link: "https://example.com/rules/test-rule",
+ defaultSeverity: validation.SeverityError,
+ versions: []string{"3.1.0", "3.2.0"},
+ })
+
+ generator := linter.NewDocGenerator(registry)
+ rule, _ := registry.GetRule("test-rule")
+ doc := generator.GenerateRuleDoc(rule)
+
+ assert.Equal(t, "test-rule", doc.ID)
+ assert.Equal(t, "style", doc.Category)
+ assert.Equal(t, "Test rule description", doc.Description)
+ assert.Equal(t, "https://example.com/rules/test-rule", doc.Link)
+ assert.Equal(t, "error", doc.DefaultSeverity)
+ assert.Equal(t, []string{"3.1.0", "3.2.0"}, doc.Versions)
+ assert.Contains(t, doc.Rulesets, "all")
+ })
+
+ t.Run("documented rule with examples", func(t *testing.T) {
+ t.Parallel()
+
+ registry := linter.NewRegistry[*MockDoc]()
+ registry.Register(&documentedMockRule{
+ mockRule: mockRule{
+ id: "documented-rule",
+ category: "style",
+ description: "Rule with examples",
+ defaultSeverity: validation.SeverityWarning,
+ },
+ goodExample: "good:\n example: value",
+ badExample: "bad:\n example: value",
+ rationale: "This is why the rule exists",
+ fixAvailable: true,
+ })
+
+ generator := linter.NewDocGenerator(registry)
+ rule, _ := registry.GetRule("documented-rule")
+ doc := generator.GenerateRuleDoc(rule)
+
+ assert.Equal(t, "good:\n example: value", doc.GoodExample)
+ assert.Equal(t, "bad:\n example: value", doc.BadExample)
+ assert.Equal(t, "This is why the rule exists", doc.Rationale)
+ assert.True(t, doc.FixAvailable)
+ })
+
+ t.Run("configurable rule with schema", func(t *testing.T) {
+ t.Parallel()
+
+ registry := linter.NewRegistry[*MockDoc]()
+ registry.Register(&configurableMockRule{
+ mockRule: mockRule{
+ id: "configurable-rule",
+ category: "style",
+ description: "Configurable rule",
+ defaultSeverity: validation.SeverityError,
+ },
+ configSchema: map[string]any{
+ "maxLength": map[string]any{"type": "integer"},
+ },
+ configDefaults: map[string]any{
+ "maxLength": 100,
+ },
+ })
+
+ generator := linter.NewDocGenerator(registry)
+ rule, _ := registry.GetRule("configurable-rule")
+ doc := generator.GenerateRuleDoc(rule)
+
+ assert.NotNil(t, doc.ConfigSchema)
+ assert.Contains(t, doc.ConfigSchema, "maxLength")
+ assert.NotNil(t, doc.ConfigDefaults)
+ assert.Equal(t, 100, doc.ConfigDefaults["maxLength"])
+ })
+}
+
+func TestDocGenerator_GenerateAllRuleDocs(t *testing.T) {
+ t.Parallel()
+
+ registry := linter.NewRegistry[*MockDoc]()
+ registry.Register(&mockRule{id: "rule-1", category: "style", defaultSeverity: validation.SeverityError, description: "Rule 1"})
+ registry.Register(&mockRule{id: "rule-2", category: "security", defaultSeverity: validation.SeverityWarning, description: "Rule 2"})
+ registry.Register(&mockRule{id: "rule-3", category: "style", defaultSeverity: validation.SeverityHint, description: "Rule 3"})
+
+ generator := linter.NewDocGenerator(registry)
+ docs := generator.GenerateAllRuleDocs()
+
+ assert.Len(t, docs, 3)
+
+ // Verify all rules are documented
+ ids := make([]string, len(docs))
+ for i, doc := range docs {
+ ids[i] = doc.ID
+ }
+ assert.ElementsMatch(t, []string{"rule-1", "rule-2", "rule-3"}, ids)
+}
+
+func TestDocGenerator_GenerateCategoryDocs(t *testing.T) {
+ t.Parallel()
+
+ registry := linter.NewRegistry[*MockDoc]()
+ registry.Register(&mockRule{id: "style-1", category: "style", defaultSeverity: validation.SeverityError, description: "Style 1"})
+ registry.Register(&mockRule{id: "style-2", category: "style", defaultSeverity: validation.SeverityError, description: "Style 2"})
+ registry.Register(&mockRule{id: "security-1", category: "security", defaultSeverity: validation.SeverityError, description: "Security 1"})
+
+ generator := linter.NewDocGenerator(registry)
+ categoryDocs := generator.GenerateCategoryDocs()
+
+ assert.Len(t, categoryDocs, 2)
+ assert.Len(t, categoryDocs["style"], 2)
+ assert.Len(t, categoryDocs["security"], 1)
+
+ // Verify correct grouping
+ styleIDs := []string{categoryDocs["style"][0].ID, categoryDocs["style"][1].ID}
+ assert.ElementsMatch(t, []string{"style-1", "style-2"}, styleIDs)
+ assert.Equal(t, "security-1", categoryDocs["security"][0].ID)
+}
+
+func TestDocGenerator_WriteJSON(t *testing.T) {
+ t.Parallel()
+
+ registry := linter.NewRegistry[*MockDoc]()
+ registry.Register(&mockRule{
+ id: "test-rule",
+ category: "style",
+ description: "Test description",
+ link: "https://example.com",
+ defaultSeverity: validation.SeverityError,
+ })
+ _ = registry.RegisterRuleset("recommended", []string{"test-rule"})
+
+ generator := linter.NewDocGenerator(registry)
+
+ var buf bytes.Buffer
+ err := generator.WriteJSON(&buf)
+ require.NoError(t, err)
+
+ // Verify valid JSON
+ var result map[string]any
+ err = json.Unmarshal(buf.Bytes(), &result)
+ require.NoError(t, err)
+
+ // Verify structure
+ assert.Contains(t, result, "rules")
+ assert.Contains(t, result, "categories")
+ assert.Contains(t, result, "rulesets")
+
+ // Verify rules array
+ rules, ok := result["rules"].([]any)
+ require.True(t, ok)
+ assert.Len(t, rules, 1)
+
+ // Verify rule details
+ ruleMap, ok := rules[0].(map[string]any)
+ require.True(t, ok)
+ assert.Equal(t, "test-rule", ruleMap["id"])
+ assert.Equal(t, "style", ruleMap["category"])
+}
+
+func TestDocGenerator_WriteMarkdown(t *testing.T) {
+ t.Parallel()
+
+ registry := linter.NewRegistry[*MockDoc]()
+ registry.Register(&documentedMockRule{
+ mockRule: mockRule{
+ id: "test-rule",
+ category: "style",
+ description: "Test rule description",
+ link: "https://docs.example.com/rules/test-rule",
+ defaultSeverity: validation.SeverityError,
+ },
+ goodExample: "good:\n value: correct",
+ badExample: "bad:\n value: incorrect",
+ rationale: "This rule ensures consistency",
+ fixAvailable: true,
+ })
+
+ generator := linter.NewDocGenerator(registry)
+
+ var buf bytes.Buffer
+ err := generator.WriteMarkdown(&buf)
+ require.NoError(t, err)
+
+ output := buf.String()
+
+ // Verify markdown structure
+ assert.Contains(t, output, "# Lint Rules Reference")
+ assert.Contains(t, output, "## Categories")
+ assert.Contains(t, output, "## style") // Category header
+ assert.Contains(t, output, "### test-rule") // Rule header
+ assert.Contains(t, output, "**Severity:** error")
+ assert.Contains(t, output, "**Category:** style")
+ assert.Contains(t, output, "Test rule description")
+ assert.Contains(t, output, "#### Rationale")
+ assert.Contains(t, output, "This rule ensures consistency")
+ assert.Contains(t, output, "#### ❌ Incorrect")
+ assert.Contains(t, output, "bad:\n value: incorrect")
+ assert.Contains(t, output, "#### ✅ Correct")
+ assert.Contains(t, output, "good:\n value: correct")
+ assert.Contains(t, output, "**Auto-fix available:** Yes")
+ assert.Contains(t, output, "[Documentation →](https://docs.example.com/rules/test-rule)")
+ assert.Contains(t, output, "---") // Separator
+}
+
+func TestDocGenerator_WriteMarkdown_WithVersions(t *testing.T) {
+ t.Parallel()
+
+ registry := linter.NewRegistry[*MockDoc]()
+ registry.Register(&mockRule{
+ id: "versioned-rule",
+ category: "validation",
+ description: "Version-specific rule",
+ defaultSeverity: validation.SeverityError,
+ versions: []string{"3.1.0", "3.2.0"},
+ })
+
+ generator := linter.NewDocGenerator(registry)
+
+ var buf bytes.Buffer
+ err := generator.WriteMarkdown(&buf)
+ require.NoError(t, err)
+
+ output := buf.String()
+ assert.Contains(t, output, "**Applies to:** 3.1.0, 3.2.0")
+}
+
+// documentedMockRule implements DocumentedRule interface
+type documentedMockRule struct {
+ mockRule
+ goodExample string
+ badExample string
+ rationale string
+ fixAvailable bool
+}
+
+func (r *documentedMockRule) GoodExample() string { return r.goodExample }
+func (r *documentedMockRule) BadExample() string { return r.badExample }
+func (r *documentedMockRule) Rationale() string { return r.rationale }
+func (r *documentedMockRule) FixAvailable() bool { return r.fixAvailable }
+
+// configurableMockRule implements ConfigurableRule interface
+type configurableMockRule struct {
+ mockRule
+ configSchema map[string]any
+ configDefaults map[string]any
+}
+
+func (r *configurableMockRule) ConfigSchema() map[string]any { return r.configSchema }
+func (r *configurableMockRule) ConfigDefaults() map[string]any { return r.configDefaults }
diff --git a/linter/document.go b/linter/document.go
new file mode 100644
index 0000000..b4fe8c8
--- /dev/null
+++ b/linter/document.go
@@ -0,0 +1,48 @@
+package linter
+
+import (
+ "github.com/speakeasy-api/openapi/openapi"
+ "github.com/speakeasy-api/openapi/references"
+)
+
+// DocumentInfo contains a document and its metadata for linting
+type DocumentInfo[T any] struct {
+ // Document is the parsed document to lint
+ Document T
+
+ // Location is the absolute location (URL or file path) of the document
+ // This is used for resolving relative references
+ Location string
+
+ // Index contains an index of various nodes from the provided document
+ Index *openapi.Index
+}
+
+// NewDocumentInfo creates a new DocumentInfo with the given document and location
+func NewDocumentInfo[T any](doc T, location string) *DocumentInfo[T] {
+ return &DocumentInfo[T]{
+ Document: doc,
+ Location: location,
+ }
+}
+
+// NewDocumentInfoWithIndex creates a new DocumentInfo with a pre-computed index
+func NewDocumentInfoWithIndex[T any](doc T, location string, index *openapi.Index) *DocumentInfo[T] {
+ return &DocumentInfo[T]{
+ Document: doc,
+ Location: location,
+ Index: index,
+ }
+}
+
+// LintOptions contains runtime options for linting
+type LintOptions struct {
+ // ResolveOptions contains options for reference resolution
+ // If nil, default options will be used
+ ResolveOptions *references.ResolveOptions
+
+ // VersionFilter is the document version (e.g., "3.0", "3.1")
+ // If set, only rules that apply to this version will be run
+ // Rules with nil/empty Versions() apply to all versions
+ VersionFilter *string
+}
diff --git a/linter/document_test.go b/linter/document_test.go
new file mode 100644
index 0000000..89b8d45
--- /dev/null
+++ b/linter/document_test.go
@@ -0,0 +1,38 @@
+package linter_test
+
+import (
+ "testing"
+
+ "github.com/speakeasy-api/openapi/linter"
+ "github.com/speakeasy-api/openapi/openapi"
+ "github.com/stretchr/testify/assert"
+)
+
+func TestNewDocumentInfo(t *testing.T) {
+ t.Parallel()
+
+ doc := &MockDoc{ID: "test-doc"}
+ location := "/path/to/openapi.yaml"
+
+ docInfo := linter.NewDocumentInfo(doc, location)
+
+ assert.NotNil(t, docInfo)
+ assert.Equal(t, doc, docInfo.Document)
+ assert.Equal(t, location, docInfo.Location)
+ assert.Nil(t, docInfo.Index)
+}
+
+func TestNewDocumentInfoWithIndex(t *testing.T) {
+ t.Parallel()
+
+ doc := &MockDoc{ID: "test-doc"}
+ location := "/path/to/openapi.yaml"
+ index := &openapi.Index{}
+
+ docInfo := linter.NewDocumentInfoWithIndex(doc, location, index)
+
+ assert.NotNil(t, docInfo)
+ assert.Equal(t, doc, docInfo.Document)
+ assert.Equal(t, location, docInfo.Location)
+ assert.Equal(t, index, docInfo.Index)
+}
diff --git a/linter/format/format_test.go b/linter/format/format_test.go
new file mode 100644
index 0000000..766d745
--- /dev/null
+++ b/linter/format/format_test.go
@@ -0,0 +1,140 @@
+package format_test
+
+import (
+ "errors"
+ "strings"
+ "testing"
+
+ "github.com/speakeasy-api/openapi/linter/format"
+ "github.com/speakeasy-api/openapi/validation"
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+ "gopkg.in/yaml.v3"
+)
+
+func TestTextFormatter_Format(t *testing.T) {
+ t.Parallel()
+
+ tests := []struct {
+ name string
+ errors []error
+ contains []string
+ }{
+ {
+ name: "empty errors",
+ errors: []error{},
+ contains: []string{},
+ },
+ {
+ name: "single error",
+ errors: []error{
+ validation.NewValidationError(validation.SeverityError, "test-rule", errors.New("test error message"), nil),
+ },
+ contains: []string{"error", "test-rule", "test error message"},
+ },
+ {
+ name: "multiple errors with different severities",
+ errors: []error{
+ validation.NewValidationError(validation.SeverityError, "error-rule", errors.New("error message"), nil),
+ validation.NewValidationError(validation.SeverityWarning, "warning-rule", errors.New("warning message"), nil),
+ validation.NewValidationError(validation.SeverityHint, "hint-rule", errors.New("hint message"), nil),
+ },
+ contains: []string{
+ "error", "error-rule", "error message",
+ "warning", "warning-rule", "warning message",
+ "hint", "hint-rule", "hint message",
+ },
+ },
+ {
+ name: "error with line number",
+ errors: []error{
+ &validation.Error{
+ UnderlyingError: errors.New("at specific location"),
+ Node: &yaml.Node{Line: 42, Column: 10},
+ Severity: validation.SeverityError,
+ Rule: "location-rule",
+ },
+ },
+ contains: []string{"42", "10", "location-rule"},
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ t.Parallel()
+
+ formatter := format.NewTextFormatter()
+ result, err := formatter.Format(tt.errors)
+ require.NoError(t, err)
+
+ for _, substr := range tt.contains {
+ assert.Contains(t, result, substr, "output should contain %q", substr)
+ }
+ })
+ }
+}
+
+func TestJSONFormatter_Format(t *testing.T) {
+ t.Parallel()
+
+ tests := []struct {
+ name string
+ errors []error
+ contains []string
+ }{
+ {
+ name: "empty errors",
+ errors: []error{},
+ contains: []string{`"results"`, `"summary"`},
+ },
+ {
+ name: "single error",
+ errors: []error{
+ validation.NewValidationError(validation.SeverityError, "test-rule", errors.New("test error message"), nil),
+ },
+ contains: []string{`"error"`, `"test-rule"`, `"test error message"`},
+ },
+ {
+ name: "multiple errors",
+ errors: []error{
+ validation.NewValidationError(validation.SeverityError, "rule-1", errors.New("error 1"), nil),
+ validation.NewValidationError(validation.SeverityWarning, "rule-2", errors.New("error 2"), nil),
+ },
+ contains: []string{
+ `"rule-1"`, `"error 1"`,
+ `"rule-2"`, `"error 2"`,
+ `"warning"`,
+ },
+ },
+ {
+ name: "error with location",
+ errors: []error{
+ &validation.Error{
+ UnderlyingError: errors.New("located error"),
+ Node: &yaml.Node{Line: 15, Column: 25},
+ Severity: validation.SeverityError,
+ Rule: "location-rule",
+ },
+ },
+ contains: []string{`"line": 15`, `"column": 25`, `"location-rule"`},
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ t.Parallel()
+
+ formatter := format.NewJSONFormatter()
+ result, err := formatter.Format(tt.errors)
+ require.NoError(t, err)
+
+ // Verify it's valid JSON by checking structure (it's an object, not an array)
+ assert.True(t, strings.HasPrefix(strings.TrimSpace(result), "{"), "should start with {")
+ assert.True(t, strings.HasSuffix(strings.TrimSpace(result), "}"), "should end with }")
+
+ for _, substr := range tt.contains {
+ assert.Contains(t, result, substr, "JSON should contain %q", substr)
+ }
+ })
+ }
+}
diff --git a/linter/format/formatter.go b/linter/format/formatter.go
new file mode 100644
index 0000000..fac6c55
--- /dev/null
+++ b/linter/format/formatter.go
@@ -0,0 +1,5 @@
+package format
+
+type Formatter interface {
+ Format(results []error) (string, error)
+}
diff --git a/linter/format/json.go b/linter/format/json.go
new file mode 100644
index 0000000..6a60e19
--- /dev/null
+++ b/linter/format/json.go
@@ -0,0 +1,113 @@
+package format
+
+import (
+ "encoding/json"
+ "errors"
+ "strings"
+
+ "github.com/speakeasy-api/openapi/validation"
+)
+
+type JSONFormatter struct{}
+
+func NewJSONFormatter() *JSONFormatter {
+ return &JSONFormatter{}
+}
+
+type jsonOutput struct {
+ Results []jsonResult `json:"results"`
+ Summary jsonSummary `json:"summary"`
+}
+
+type jsonResult struct {
+ Rule string `json:"rule"`
+ Category string `json:"category"`
+ Severity string `json:"severity"`
+ Message string `json:"message"`
+ Location jsonLocation `json:"location"`
+ Document string `json:"document,omitempty"`
+ Fix *jsonFix `json:"fix,omitempty"`
+}
+
+type jsonLocation struct {
+ Line int `json:"line"`
+ Column int `json:"column"`
+ Pointer string `json:"pointer,omitempty"` // TODO: Add pointer support
+}
+
+type jsonFix struct {
+ Description string `json:"description"`
+}
+
+type jsonSummary struct {
+ Total int `json:"total"`
+ Errors int `json:"errors"`
+ Warnings int `json:"warnings"`
+ Hints int `json:"hints"`
+}
+
+func (f *JSONFormatter) Format(results []error) (string, error) {
+ output := jsonOutput{
+ Results: make([]jsonResult, 0, len(results)),
+ }
+
+ for _, err := range results {
+ var vErr *validation.Error
+ if errors.As(err, &vErr) {
+ category := "unknown"
+ if idx := strings.Index(vErr.Rule, "-"); idx > 0 {
+ category = vErr.Rule[:idx]
+ }
+
+ result := jsonResult{
+ Rule: vErr.Rule,
+ Category: category,
+ Severity: vErr.Severity.String(),
+ Message: vErr.UnderlyingError.Error(),
+ Location: jsonLocation{
+ Line: vErr.GetLineNumber(),
+ Column: vErr.GetColumnNumber(),
+ },
+ }
+
+ if vErr.DocumentLocation != "" {
+ result.Document = vErr.DocumentLocation
+ }
+
+ if vErr.Fix != nil {
+ result.Fix = &jsonFix{
+ Description: vErr.Fix.FixDescription(),
+ }
+ }
+
+ output.Results = append(output.Results, result)
+
+ switch vErr.Severity {
+ case validation.SeverityError:
+ output.Summary.Errors++
+ case validation.SeverityWarning:
+ output.Summary.Warnings++
+ case validation.SeverityHint:
+ output.Summary.Hints++
+ }
+ } else {
+ // Non-validation error
+ output.Results = append(output.Results, jsonResult{
+ Rule: "internal",
+ Category: "internal",
+ Severity: "error",
+ Message: err.Error(),
+ })
+ output.Summary.Errors++
+ }
+ }
+
+ output.Summary.Total = len(results)
+
+ bytes, err := json.MarshalIndent(output, "", " ")
+ if err != nil {
+ return "", err
+ }
+
+ return string(bytes), nil
+}
diff --git a/linter/format/text.go b/linter/format/text.go
new file mode 100644
index 0000000..cdf30f8
--- /dev/null
+++ b/linter/format/text.go
@@ -0,0 +1,59 @@
+package format
+
+import (
+ "errors"
+ "fmt"
+ "strings"
+
+ "github.com/speakeasy-api/openapi/validation"
+)
+
+type TextFormatter struct{}
+
+func NewTextFormatter() *TextFormatter {
+ return &TextFormatter{}
+}
+
+func (f *TextFormatter) Format(results []error) (string, error) {
+ var sb strings.Builder
+
+ errorCount := 0
+ warningCount := 0
+ hintCount := 0
+
+ for _, err := range results {
+ var vErr *validation.Error
+ if errors.As(err, &vErr) {
+ line := vErr.GetLineNumber()
+ col := vErr.GetColumnNumber()
+ severity := vErr.Severity
+ rule := vErr.Rule
+ msg := vErr.UnderlyingError.Error()
+ if vErr.DocumentLocation != "" {
+ msg = fmt.Sprintf("%s (document: %s)", msg, vErr.DocumentLocation)
+ }
+
+ sb.WriteString(fmt.Sprintf("%d:%d\t%s\t%s\t%s\n", line, col, severity, rule, msg))
+
+ switch severity {
+ case validation.SeverityError:
+ errorCount++
+ case validation.SeverityWarning:
+ warningCount++
+ case validation.SeverityHint:
+ hintCount++
+ }
+ } else {
+ // Non-validation error
+ sb.WriteString(fmt.Sprintf("-\t-\terror\tinternal\t%s\n", err.Error()))
+ errorCount++
+ }
+ }
+
+ if len(results) > 0 {
+ sb.WriteString("\n")
+ sb.WriteString(fmt.Sprintf("✖ %d problems (%d errors, %d warnings, %d hints)\n", len(results), errorCount, warningCount, hintCount))
+ }
+
+ return sb.String(), nil
+}
diff --git a/linter/linter.go b/linter/linter.go
new file mode 100644
index 0000000..1f82f6e
--- /dev/null
+++ b/linter/linter.go
@@ -0,0 +1,268 @@
+package linter
+
+import (
+ "context"
+ "errors"
+ "sort"
+ "sync"
+
+ "github.com/speakeasy-api/openapi/linter/format"
+ "github.com/speakeasy-api/openapi/validation"
+)
+
+// Linter is the main linting engine
+type Linter[T any] struct {
+ config *Config
+ registry *Registry[T]
+}
+
+// NewLinter creates a new linter with the given configuration
+func NewLinter[T any](config *Config, registry *Registry[T]) *Linter[T] {
+ return &Linter[T]{
+ config: config,
+ registry: registry,
+ }
+}
+
+// Registry returns the rule registry for documentation generation
+func (l *Linter[T]) Registry() *Registry[T] {
+ return l.registry
+}
+
+// Lint runs all configured rules against the document
+func (l *Linter[T]) Lint(ctx context.Context, docInfo *DocumentInfo[T], preExistingErrors []error, opts *LintOptions) (*Output, error) {
+ var allErrs []error
+
+ if len(preExistingErrors) > 0 {
+ allErrs = append(allErrs, preExistingErrors...)
+ }
+
+ // Run lint rules - these also return validation.Error instances
+ lintErrs := l.runRules(ctx, docInfo, opts)
+ allErrs = append(allErrs, lintErrs...)
+
+ // Apply severity overrides from config
+ allErrs = l.applySeverityOverrides(allErrs)
+
+ // Sort errors by location
+ validation.SortValidationErrors(allErrs)
+
+ // Format output
+ return l.formatOutput(allErrs), nil
+}
+
+func (l *Linter[T]) runRules(ctx context.Context, docInfo *DocumentInfo[T], opts *LintOptions) []error {
+ // Determine enabled rules
+ enabledRules := l.getEnabledRules()
+
+ // Run rules in parallel for better performance
+ var (
+ mu sync.Mutex
+ errs []error
+ wg sync.WaitGroup
+ )
+
+ for _, rule := range enabledRules {
+ ruleConfig := l.getRuleConfig(rule.ID())
+
+ // Skip if disabled (though getEnabledRules should handle this, double check)
+ if ruleConfig.Enabled != nil && !*ruleConfig.Enabled {
+ continue
+ }
+
+ // Filter rules based on version if VersionFilter is set
+ if opts != nil && opts.VersionFilter != nil && *opts.VersionFilter != "" {
+ ruleVersions := rule.Versions()
+ // If rule specifies versions, check if current version matches
+ if len(ruleVersions) > 0 {
+ versionMatches := false
+ for _, ruleVersion := range ruleVersions {
+ // Match against rule's supported versions
+ // Support both "3.1" and "3.1.0" formats
+ if ruleVersion == *opts.VersionFilter ||
+ (len(*opts.VersionFilter) > len(ruleVersion) &&
+ (*opts.VersionFilter)[:len(ruleVersion)] == ruleVersion) {
+ versionMatches = true
+ break
+ }
+ }
+ if !versionMatches {
+ continue // Skip this rule - doesn't apply to this version
+ }
+ }
+ // If rule.Versions() is nil/empty, it applies to all versions
+ }
+
+ // Set resolve options if provided
+ if opts != nil && opts.ResolveOptions != nil {
+ resolveOpts := *opts.ResolveOptions
+ // Set document location as target location if not already set
+ if resolveOpts.TargetLocation == "" && docInfo.Location != "" {
+ resolveOpts.TargetLocation = docInfo.Location
+ }
+ ruleConfig.ResolveOptions = &resolveOpts
+ }
+
+ // Run rule in parallel
+ wg.Add(1)
+ go func(r RuleRunner[T], cfg RuleConfig) {
+ defer wg.Done()
+
+ ruleErrs := r.Run(ctx, docInfo, &cfg)
+
+ mu.Lock()
+ errs = append(errs, ruleErrs...)
+ mu.Unlock()
+ }(rule, ruleConfig)
+ }
+
+ wg.Wait()
+ return errs
+}
+
+func (l *Linter[T]) getEnabledRules() []RuleRunner[T] {
+ // Start with all rules if "all" is extended (default)
+ // Or specific rulesets
+
+ // For now, simple implementation: check config for enabled rules
+ // If config.Extends contains "all", include all rules unless disabled
+
+ // Map to track enabled status: ruleID -> enabled
+ ruleStatus := make(map[string]bool)
+
+ // Apply rulesets
+ for _, ruleset := range l.config.Extends {
+ if ids, ok := l.registry.GetRuleset(ruleset); ok {
+ for _, id := range ids {
+ ruleStatus[id] = true
+ }
+ }
+ }
+
+ // Apply category config
+ // Category config overrides ruleset config but is overridden by individual rule config
+ for _, rule := range l.registry.AllRules() {
+ if catConfig, ok := l.config.Categories[rule.Category()]; ok {
+ if catConfig.Enabled != nil {
+ ruleStatus[rule.ID()] = *catConfig.Enabled
+ }
+ }
+ }
+
+ // Apply rule config
+ for id, ruleConfig := range l.config.Rules {
+ if ruleConfig.Enabled != nil {
+ ruleStatus[id] = *ruleConfig.Enabled
+ }
+ }
+
+ var enabled []RuleRunner[T]
+ for id, enabledFlag := range ruleStatus {
+ if enabledFlag {
+ if rule, ok := l.registry.GetRule(id); ok {
+ enabled = append(enabled, rule)
+ }
+ }
+ }
+
+ // Sort for deterministic order
+ sort.Slice(enabled, func(i, j int) bool {
+ return enabled[i].ID() < enabled[j].ID()
+ })
+
+ return enabled
+}
+
+func (l *Linter[T]) getRuleConfig(ruleID string) RuleConfig {
+ // Start with default config
+ config := RuleConfig{}
+
+ // Apply category config
+ if rule, ok := l.registry.GetRule(ruleID); ok {
+ if catConfig, ok := l.config.Categories[rule.Category()]; ok {
+ if catConfig.Severity != nil {
+ config.Severity = catConfig.Severity
+ }
+ }
+ }
+
+ // Apply rule config
+ if ruleConfig, ok := l.config.Rules[ruleID]; ok {
+ if ruleConfig.Severity != nil {
+ config.Severity = ruleConfig.Severity
+ }
+ if ruleConfig.Options != nil {
+ config.Options = ruleConfig.Options
+ }
+ }
+
+ return config
+}
+
+func (l *Linter[T]) applySeverityOverrides(errs []error) []error {
+ for _, err := range errs {
+ var vErr *validation.Error
+ if errors.As(err, &vErr) {
+ config := l.getRuleConfig(vErr.Rule)
+ if config.Severity != nil {
+ vErr.Severity = *config.Severity
+ }
+ }
+ }
+ return errs
+}
+
+func (l *Linter[T]) formatOutput(errs []error) *Output {
+ return &Output{
+ Results: errs,
+ Format: l.config.OutputFormat,
+ }
+}
+
+// Output represents the result of linting
+type Output struct {
+ Results []error
+ Format OutputFormat
+}
+
+func (o *Output) HasErrors() bool {
+ for _, err := range o.Results {
+ var vErr *validation.Error
+ if errors.As(err, &vErr) {
+ if vErr.Severity == validation.SeverityError {
+ return true
+ }
+ } else {
+ // Non-validation errors are treated as errors
+ return true
+ }
+ }
+ return false
+}
+
+func (o *Output) ErrorCount() int {
+ count := 0
+ for _, err := range o.Results {
+ var vErr *validation.Error
+ if errors.As(err, &vErr) {
+ if vErr.Severity == validation.SeverityError {
+ count++
+ }
+ } else {
+ count++
+ }
+ }
+ return count
+}
+
+func (o *Output) FormatText() string {
+ f := format.NewTextFormatter()
+ s, _ := f.Format(o.Results)
+ return s
+}
+
+func (o *Output) FormatJSON() string {
+ f := format.NewJSONFormatter()
+ s, _ := f.Format(o.Results)
+ return s
+}
diff --git a/linter/linter_test.go b/linter/linter_test.go
new file mode 100644
index 0000000..02d13f5
--- /dev/null
+++ b/linter/linter_test.go
@@ -0,0 +1,585 @@
+package linter_test
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "testing"
+
+ "github.com/speakeasy-api/openapi/linter"
+ "github.com/speakeasy-api/openapi/validation"
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+// Mock document type for testing
+type MockDoc struct {
+ ID string
+}
+
+// Mock rule for testing
+type mockRule struct {
+ id string
+ category string
+ description string
+ link string
+ defaultSeverity validation.Severity
+ versions []string
+ runFunc func(ctx context.Context, docInfo *linter.DocumentInfo[*MockDoc], config *linter.RuleConfig) []error
+}
+
+func (r *mockRule) ID() string { return r.id }
+func (r *mockRule) Category() string { return r.category }
+func (r *mockRule) Description() string { return r.description }
+func (r *mockRule) Link() string { return r.link }
+func (r *mockRule) DefaultSeverity() validation.Severity { return r.defaultSeverity }
+func (r *mockRule) Versions() []string { return r.versions }
+
+func (r *mockRule) Run(ctx context.Context, docInfo *linter.DocumentInfo[*MockDoc], config *linter.RuleConfig) []error {
+ if r.runFunc != nil {
+ return r.runFunc(ctx, docInfo, config)
+ }
+ return nil
+}
+
+func TestLinter_RuleSelection(t *testing.T) {
+ t.Parallel()
+
+ t.Run("extends all includes all rules", func(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ registry := linter.NewRegistry[*MockDoc]()
+ registry.Register(&mockRule{
+ id: "test-rule-1",
+ category: "style",
+ defaultSeverity: validation.SeverityError,
+ runFunc: func(_ context.Context, _ *linter.DocumentInfo[*MockDoc], _ *linter.RuleConfig) []error {
+ return []error{validation.NewValidationError(validation.SeverityError, "test-rule-1", errors.New("test error"), nil)}
+ },
+ })
+ registry.Register(&mockRule{
+ id: "test-rule-2",
+ category: "security",
+ defaultSeverity: validation.SeverityWarning,
+ runFunc: func(_ context.Context, _ *linter.DocumentInfo[*MockDoc], _ *linter.RuleConfig) []error {
+ return []error{validation.NewValidationError(validation.SeverityWarning, "test-rule-2", errors.New("test warning"), nil)}
+ },
+ })
+
+ config := &linter.Config{
+ Extends: []string{"all"},
+ }
+
+ lntr := linter.NewLinter(config, registry)
+ docInfo := &linter.DocumentInfo[*MockDoc]{
+ Document: &MockDoc{ID: "test"},
+ }
+
+ output, err := lntr.Lint(ctx, docInfo, nil, nil)
+ require.NoError(t, err)
+
+ // Should have errors from both rules
+ assert.Len(t, output.Results, 2)
+ })
+
+ t.Run("disabled rule not executed", func(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ registry := linter.NewRegistry[*MockDoc]()
+ registry.Register(&mockRule{
+ id: "test-rule-1",
+ category: "style",
+ defaultSeverity: validation.SeverityError,
+ runFunc: func(_ context.Context, _ *linter.DocumentInfo[*MockDoc], _ *linter.RuleConfig) []error {
+ return []error{validation.NewValidationError(validation.SeverityError, "test-rule-1", errors.New("test error"), nil)}
+ },
+ })
+
+ falseVal := false
+ config := &linter.Config{
+ Extends: []string{"all"},
+ Rules: map[string]linter.RuleConfig{
+ "test-rule-1": {
+ Enabled: &falseVal,
+ },
+ },
+ }
+
+ lntr := linter.NewLinter(config, registry)
+ docInfo := &linter.DocumentInfo[*MockDoc]{
+ Document: &MockDoc{ID: "test"},
+ }
+
+ output, err := lntr.Lint(ctx, docInfo, nil, nil)
+ require.NoError(t, err)
+
+ // Should have no errors since rule is disabled
+ assert.Empty(t, output.Results)
+ })
+
+ t.Run("category disabled affects all rules in category", func(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ registry := linter.NewRegistry[*MockDoc]()
+ registry.Register(&mockRule{
+ id: "style-rule-1",
+ category: "style",
+ defaultSeverity: validation.SeverityError,
+ runFunc: func(_ context.Context, _ *linter.DocumentInfo[*MockDoc], _ *linter.RuleConfig) []error {
+ return []error{validation.NewValidationError(validation.SeverityError, "style-rule-1", errors.New("style error 1"), nil)}
+ },
+ })
+ registry.Register(&mockRule{
+ id: "style-rule-2",
+ category: "style",
+ defaultSeverity: validation.SeverityError,
+ runFunc: func(_ context.Context, _ *linter.DocumentInfo[*MockDoc], _ *linter.RuleConfig) []error {
+ return []error{validation.NewValidationError(validation.SeverityError, "style-rule-2", errors.New("style error 2"), nil)}
+ },
+ })
+ registry.Register(&mockRule{
+ id: "security-rule-1",
+ category: "security",
+ defaultSeverity: validation.SeverityError,
+ runFunc: func(_ context.Context, _ *linter.DocumentInfo[*MockDoc], _ *linter.RuleConfig) []error {
+ return []error{validation.NewValidationError(validation.SeverityError, "security-rule-1", errors.New("security error"), nil)}
+ },
+ })
+
+ falseVal := false
+ config := &linter.Config{
+ Extends: []string{"all"},
+ Categories: map[string]linter.CategoryConfig{
+ "style": {
+ Enabled: &falseVal,
+ },
+ },
+ }
+
+ lntr := linter.NewLinter(config, registry)
+ docInfo := &linter.DocumentInfo[*MockDoc]{
+ Document: &MockDoc{ID: "test"},
+ }
+
+ output, err := lntr.Lint(ctx, docInfo, nil, nil)
+ require.NoError(t, err)
+
+ // Should only have security error, style rules disabled
+ require.Len(t, output.Results, 1)
+ assert.Contains(t, output.Results[0].Error(), "security-rule-1")
+ })
+}
+
+func TestLinter_SeverityOverrides(t *testing.T) {
+ t.Parallel()
+
+ t.Run("rule severity override", func(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ registry := linter.NewRegistry[*MockDoc]()
+ registry.Register(&mockRule{
+ id: "test-rule",
+ category: "style",
+ defaultSeverity: validation.SeverityError,
+ runFunc: func(_ context.Context, _ *linter.DocumentInfo[*MockDoc], _ *linter.RuleConfig) []error {
+ return []error{validation.NewValidationError(validation.SeverityError, "test-rule", errors.New("test error"), nil)}
+ },
+ })
+
+ warningSeverity := validation.SeverityWarning
+ config := &linter.Config{
+ Extends: []string{"all"},
+ Rules: map[string]linter.RuleConfig{
+ "test-rule": {
+ Severity: &warningSeverity,
+ },
+ },
+ }
+
+ lntr := linter.NewLinter(config, registry)
+ docInfo := &linter.DocumentInfo[*MockDoc]{
+ Document: &MockDoc{ID: "test"},
+ }
+
+ output, err := lntr.Lint(ctx, docInfo, nil, nil)
+ require.NoError(t, err)
+
+ require.Len(t, output.Results, 1)
+ var vErr *validation.Error
+ require.ErrorAs(t, output.Results[0], &vErr)
+ assert.Equal(t, validation.SeverityWarning, vErr.Severity)
+ })
+
+ t.Run("category severity override", func(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ registry := linter.NewRegistry[*MockDoc]()
+ registry.Register(&mockRule{
+ id: "style-rule",
+ category: "style",
+ defaultSeverity: validation.SeverityError,
+ runFunc: func(_ context.Context, _ *linter.DocumentInfo[*MockDoc], _ *linter.RuleConfig) []error {
+ return []error{validation.NewValidationError(validation.SeverityError, "style-rule", errors.New("style error"), nil)}
+ },
+ })
+
+ warningSeverity := validation.SeverityWarning
+ config := &linter.Config{
+ Extends: []string{"all"},
+ Categories: map[string]linter.CategoryConfig{
+ "style": {
+ Severity: &warningSeverity,
+ },
+ },
+ }
+
+ lntr := linter.NewLinter(config, registry)
+ docInfo := &linter.DocumentInfo[*MockDoc]{
+ Document: &MockDoc{ID: "test"},
+ }
+
+ output, err := lntr.Lint(ctx, docInfo, nil, nil)
+ require.NoError(t, err)
+
+ require.Len(t, output.Results, 1)
+ var vErr *validation.Error
+ require.ErrorAs(t, output.Results[0], &vErr)
+ assert.Equal(t, validation.SeverityWarning, vErr.Severity)
+ })
+
+ t.Run("rule severity override takes precedence over category", func(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ registry := linter.NewRegistry[*MockDoc]()
+ registry.Register(&mockRule{
+ id: "style-rule",
+ category: "style",
+ defaultSeverity: validation.SeverityError,
+ runFunc: func(_ context.Context, _ *linter.DocumentInfo[*MockDoc], _ *linter.RuleConfig) []error {
+ return []error{validation.NewValidationError(validation.SeverityError, "style-rule", errors.New("style error"), nil)}
+ },
+ })
+
+ warningSeverity := validation.SeverityWarning
+ hintSeverity := validation.SeverityHint
+ config := &linter.Config{
+ Extends: []string{"all"},
+ Categories: map[string]linter.CategoryConfig{
+ "style": {
+ Severity: &warningSeverity,
+ },
+ },
+ Rules: map[string]linter.RuleConfig{
+ "style-rule": {
+ Severity: &hintSeverity,
+ },
+ },
+ }
+
+ lntr := linter.NewLinter(config, registry)
+ docInfo := &linter.DocumentInfo[*MockDoc]{
+ Document: &MockDoc{ID: "test"},
+ }
+
+ output, err := lntr.Lint(ctx, docInfo, nil, nil)
+ require.NoError(t, err)
+
+ require.Len(t, output.Results, 1)
+ var vErr *validation.Error
+ require.ErrorAs(t, output.Results[0], &vErr)
+ // Rule severity should override category severity
+ assert.Equal(t, validation.SeverityHint, vErr.Severity)
+ })
+}
+
+func TestLinter_PreExistingErrors(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ registry := linter.NewRegistry[*MockDoc]()
+ registry.Register(&mockRule{
+ id: "test-rule",
+ category: "style",
+ defaultSeverity: validation.SeverityError,
+ runFunc: func(_ context.Context, _ *linter.DocumentInfo[*MockDoc], _ *linter.RuleConfig) []error {
+ return []error{validation.NewValidationError(validation.SeverityError, "test-rule", errors.New("lint error"), nil)}
+ },
+ })
+
+ config := &linter.Config{
+ Extends: []string{"all"},
+ }
+
+ lntr := linter.NewLinter(config, registry)
+ docInfo := &linter.DocumentInfo[*MockDoc]{
+ Document: &MockDoc{ID: "test"},
+ }
+
+ preExistingErrs := []error{
+ validation.NewValidationError(validation.SeverityError, "validation-required", errors.New("validation error"), nil),
+ }
+
+ output, err := lntr.Lint(ctx, docInfo, preExistingErrs, nil)
+ require.NoError(t, err)
+
+ // Should include both pre-existing and lint errors
+ assert.Len(t, output.Results, 2)
+}
+
+func TestLinter_ParallelExecution(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ registry := linter.NewRegistry[*MockDoc]()
+
+ // Create multiple rules that all run
+ for i := 0; i < 10; i++ {
+ ruleID := fmt.Sprintf("test-rule-%d", i)
+ registry.Register(&mockRule{
+ id: ruleID,
+ category: "test",
+ defaultSeverity: validation.SeverityError,
+ runFunc: func(_ context.Context, _ *linter.DocumentInfo[*MockDoc], _ *linter.RuleConfig) []error {
+ return []error{validation.NewValidationError(validation.SeverityError, ruleID, fmt.Errorf("error from %s", ruleID), nil)}
+ },
+ })
+ }
+
+ config := &linter.Config{
+ Extends: []string{"all"},
+ }
+
+ lntr := linter.NewLinter(config, registry)
+ docInfo := &linter.DocumentInfo[*MockDoc]{
+ Document: &MockDoc{ID: "test"},
+ }
+
+ output, err := lntr.Lint(ctx, docInfo, nil, nil)
+ require.NoError(t, err)
+
+ // Should have errors from all 10 rules
+ assert.Len(t, output.Results, 10)
+
+ // Verify all rules executed
+ foundRules := make(map[string]bool)
+ for _, result := range output.Results {
+ var vErr *validation.Error
+ if errors.As(result, &vErr) {
+ foundRules[vErr.Rule] = true
+ }
+ }
+ assert.Len(t, foundRules, 10, "all rules should have executed")
+}
+
+func TestOutput_HasErrors(t *testing.T) {
+ t.Parallel()
+
+ tests := []struct {
+ name string
+ results []error
+ hasErrors bool
+ }{
+ {
+ name: "no errors",
+ results: []error{},
+ hasErrors: false,
+ },
+ {
+ name: "only warnings",
+ results: []error{
+ validation.NewValidationError(validation.SeverityWarning, "test-rule", errors.New("warning"), nil),
+ },
+ hasErrors: false,
+ },
+ {
+ name: "only hints",
+ results: []error{
+ validation.NewValidationError(validation.SeverityHint, "test-rule", errors.New("hint"), nil),
+ },
+ hasErrors: false,
+ },
+ {
+ name: "has error severity",
+ results: []error{
+ validation.NewValidationError(validation.SeverityError, "test-rule", errors.New("error"), nil),
+ },
+ hasErrors: true,
+ },
+ {
+ name: "mixed severities with error",
+ results: []error{
+ validation.NewValidationError(validation.SeverityWarning, "test-rule", errors.New("warning"), nil),
+ validation.NewValidationError(validation.SeverityError, "test-rule", errors.New("error"), nil),
+ },
+ hasErrors: true,
+ },
+ {
+ name: "non-validation error treated as error",
+ results: []error{
+ errors.New("plain error"),
+ },
+ hasErrors: true,
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ t.Parallel()
+
+ output := &linter.Output{
+ Results: tt.results,
+ }
+
+ assert.Equal(t, tt.hasErrors, output.HasErrors())
+ })
+ }
+}
+
+func TestOutput_ErrorCount(t *testing.T) {
+ t.Parallel()
+
+ tests := []struct {
+ name string
+ results []error
+ errorCount int
+ }{
+ {
+ name: "no errors",
+ results: []error{},
+ errorCount: 0,
+ },
+ {
+ name: "only warnings",
+ results: []error{
+ validation.NewValidationError(validation.SeverityWarning, "test-rule", errors.New("warning"), nil),
+ },
+ errorCount: 0,
+ },
+ {
+ name: "one error",
+ results: []error{
+ validation.NewValidationError(validation.SeverityError, "test-rule", errors.New("error"), nil),
+ },
+ errorCount: 1,
+ },
+ {
+ name: "mixed severities",
+ results: []error{
+ validation.NewValidationError(validation.SeverityWarning, "test-rule", errors.New("warning"), nil),
+ validation.NewValidationError(validation.SeverityError, "test-rule-1", errors.New("error 1"), nil),
+ validation.NewValidationError(validation.SeverityHint, "test-rule", errors.New("hint"), nil),
+ validation.NewValidationError(validation.SeverityError, "test-rule-2", errors.New("error 2"), nil),
+ },
+ errorCount: 2,
+ },
+ {
+ name: "non-validation errors counted",
+ results: []error{
+ errors.New("plain error 1"),
+ validation.NewValidationError(validation.SeverityWarning, "test-rule", errors.New("warning"), nil),
+ errors.New("plain error 2"),
+ },
+ errorCount: 2,
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ t.Parallel()
+
+ output := &linter.Output{
+ Results: tt.results,
+ }
+
+ assert.Equal(t, tt.errorCount, output.ErrorCount())
+ })
+ }
+}
+
+func TestOutput_Formatting(t *testing.T) {
+ t.Parallel()
+
+ output := &linter.Output{
+ Results: []error{
+ validation.NewValidationError(validation.SeverityError, "test-rule", errors.New("test error"), nil),
+ },
+ Format: linter.OutputFormatText,
+ }
+
+ t.Run("format text non-empty", func(t *testing.T) {
+ t.Parallel()
+ text := output.FormatText()
+ assert.NotEmpty(t, text)
+ assert.Contains(t, text, "test-rule")
+ })
+
+ t.Run("format json non-empty", func(t *testing.T) {
+ t.Parallel()
+ json := output.FormatJSON()
+ assert.NotEmpty(t, json)
+ assert.Contains(t, json, "test-rule")
+ })
+}
+
+func TestLinter_ErrorSorting(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ registry := linter.NewRegistry[*MockDoc]()
+ registry.Register(&mockRule{
+ id: "test-rule",
+ category: "style",
+ defaultSeverity: validation.SeverityError,
+ runFunc: func(_ context.Context, _ *linter.DocumentInfo[*MockDoc], _ *linter.RuleConfig) []error {
+ // Return errors in unsorted order
+ return []error{
+ validation.NewValidationError(validation.SeverityError, "test-rule", errors.New("error 3"), nil),
+ validation.NewValidationError(validation.SeverityError, "test-rule", errors.New("error 1"), nil),
+ validation.NewValidationError(validation.SeverityError, "test-rule", errors.New("error 2"), nil),
+ }
+ },
+ })
+
+ config := &linter.Config{
+ Extends: []string{"all"},
+ }
+
+ lntr := linter.NewLinter(config, registry)
+ docInfo := &linter.DocumentInfo[*MockDoc]{
+ Document: &MockDoc{ID: "test"},
+ }
+
+ output, err := lntr.Lint(ctx, docInfo, nil, nil)
+ require.NoError(t, err)
+
+ // Errors should be sorted by validation.SortValidationErrors
+ assert.Len(t, output.Results, 3)
+}
+
+func TestLinter_Registry(t *testing.T) {
+ t.Parallel()
+
+ registry := linter.NewRegistry[*MockDoc]()
+ registry.Register(&mockRule{
+ id: "test-rule",
+ category: "style",
+ defaultSeverity: validation.SeverityError,
+ })
+
+ config := &linter.Config{}
+ lntr := linter.NewLinter(config, registry)
+
+ // Should be able to access registry for documentation
+ reg := lntr.Registry()
+ require.NotNil(t, reg)
+
+ rule, exists := reg.GetRule("test-rule")
+ assert.True(t, exists)
+ assert.Equal(t, "test-rule", rule.ID())
+}
diff --git a/linter/registry.go b/linter/registry.go
new file mode 100644
index 0000000..4366e8c
--- /dev/null
+++ b/linter/registry.go
@@ -0,0 +1,125 @@
+package linter
+
+import (
+ "fmt"
+ "sort"
+)
+
+// Registry holds registered rules
+type Registry[T any] struct {
+ rules map[string]RuleRunner[T]
+ rulesets map[string][]string // ruleset name -> rule IDs
+}
+
+// NewRegistry creates a new rule registry
+func NewRegistry[T any]() *Registry[T] {
+ return &Registry[T]{
+ rules: make(map[string]RuleRunner[T]),
+ rulesets: make(map[string][]string),
+ }
+}
+
+// Register registers a rule
+func (r *Registry[T]) Register(rule RuleRunner[T]) {
+ r.rules[rule.ID()] = rule
+}
+
+// RegisterRuleset registers a ruleset
+func (r *Registry[T]) RegisterRuleset(name string, ruleIDs []string) error {
+ if _, exists := r.rulesets[name]; exists {
+ return fmt.Errorf("ruleset %q already registered", name)
+ }
+
+ // Validate rule IDs
+ for _, id := range ruleIDs {
+ if _, exists := r.rules[id]; !exists {
+ return fmt.Errorf("rule %q in ruleset %q not found", id, name)
+ }
+ }
+
+ r.rulesets[name] = ruleIDs
+ return nil
+}
+
+// GetRule returns a rule by ID
+func (r *Registry[T]) GetRule(id string) (RuleRunner[T], bool) {
+ rule, ok := r.rules[id]
+ return rule, ok
+}
+
+// GetRuleset returns rule IDs for a ruleset
+func (r *Registry[T]) GetRuleset(name string) ([]string, bool) {
+ if name == "all" {
+ return r.AllRuleIDs(), true
+ }
+ ids, ok := r.rulesets[name]
+ return ids, ok
+}
+
+// AllRules returns all registered rules
+func (r *Registry[T]) AllRules() []RuleRunner[T] {
+ rules := make([]RuleRunner[T], 0, len(r.rules))
+ for _, rule := range r.rules {
+ rules = append(rules, rule)
+ }
+ // Sort for deterministic order
+ sort.Slice(rules, func(i, j int) bool {
+ return rules[i].ID() < rules[j].ID()
+ })
+ return rules
+}
+
+// AllRuleIDs returns all registered rule IDs
+func (r *Registry[T]) AllRuleIDs() []string {
+ ids := make([]string, 0, len(r.rules))
+ for id := range r.rules {
+ ids = append(ids, id)
+ }
+ sort.Strings(ids)
+ return ids
+}
+
+// AllCategories returns all unique categories
+func (r *Registry[T]) AllCategories() []string {
+ categories := make(map[string]bool)
+ for _, rule := range r.rules {
+ categories[rule.Category()] = true
+ }
+
+ cats := make([]string, 0, len(categories))
+ for cat := range categories {
+ cats = append(cats, cat)
+ }
+ sort.Strings(cats)
+ return cats
+}
+
+// AllRulesets returns all registered ruleset names
+func (r *Registry[T]) AllRulesets() []string {
+ names := make([]string, 0, len(r.rulesets)+1)
+ names = append(names, "all")
+ for name := range r.rulesets {
+ names = append(names, name)
+ }
+ sort.Strings(names)
+ return names
+}
+
+// RulesetsContaining returns names of rulesets that contain the given rule ID
+func (r *Registry[T]) RulesetsContaining(ruleID string) []string {
+ var sets []string
+
+ // "all" always contains everything
+ sets = append(sets, "all")
+
+ for name, ids := range r.rulesets {
+ for _, id := range ids {
+ if id == ruleID {
+ sets = append(sets, name)
+ break
+ }
+ }
+ }
+ sort.Strings(sets)
+ return sets
+}
diff --git a/linter/registry_test.go b/linter/registry_test.go
new file mode 100644
index 0000000..232123b
--- /dev/null
+++ b/linter/registry_test.go
@@ -0,0 +1,127 @@
+package linter_test
+
+import (
+ "testing"
+
+ "github.com/speakeasy-api/openapi/linter"
+ "github.com/speakeasy-api/openapi/validation"
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestRegistry_RegisterRuleset(t *testing.T) {
+ t.Parallel()
+
+ t.Run("successfully register ruleset", func(t *testing.T) {
+ t.Parallel()
+
+ registry := linter.NewRegistry[*MockDoc]()
+ registry.Register(&mockRule{id: "rule-1", category: "style", defaultSeverity: validation.SeverityError})
+ registry.Register(&mockRule{id: "rule-2", category: "style", defaultSeverity: validation.SeverityError})
+
+ err := registry.RegisterRuleset("recommended", []string{"rule-1", "rule-2"})
+ require.NoError(t, err)
+
+ ruleIDs, exists := registry.GetRuleset("recommended")
+ assert.True(t, exists)
+ assert.ElementsMatch(t, []string{"rule-1", "rule-2"}, ruleIDs)
+ })
+
+ t.Run("error when rule not found", func(t *testing.T) {
+ t.Parallel()
+
+ registry := linter.NewRegistry[*MockDoc]()
+ registry.Register(&mockRule{id: "rule-1", category: "style", defaultSeverity: validation.SeverityError})
+
+ err := registry.RegisterRuleset("test", []string{"rule-1", "nonexistent"})
+ require.Error(t, err)
+ assert.Contains(t, err.Error(), "nonexistent")
+ assert.Contains(t, err.Error(), "not found")
+ })
+
+ t.Run("error when ruleset already registered", func(t *testing.T) {
+ t.Parallel()
+
+ registry := linter.NewRegistry[*MockDoc]()
+ registry.Register(&mockRule{id: "rule-1", category: "style", defaultSeverity: validation.SeverityError})
+
+ err := registry.RegisterRuleset("test", []string{"rule-1"})
+ require.NoError(t, err)
+
+ err = registry.RegisterRuleset("test", []string{"rule-1"})
+ require.Error(t, err)
+ assert.Contains(t, err.Error(), "already registered")
+ })
+}
+
+func TestRegistry_AllCategories(t *testing.T) {
+ t.Parallel()
+
+ registry := linter.NewRegistry[*MockDoc]()
+ registry.Register(&mockRule{id: "rule-1", category: "style", defaultSeverity: validation.SeverityError})
+ registry.Register(&mockRule{id: "rule-2", category: "style", defaultSeverity: validation.SeverityError})
+ registry.Register(&mockRule{id: "rule-3", category: "security", defaultSeverity: validation.SeverityError})
+ registry.Register(&mockRule{id: "rule-4", category: "best-practices", defaultSeverity: validation.SeverityError})
+
+ categories := registry.AllCategories()
+ // Should be sorted
+ assert.Equal(t, []string{"best-practices", "security", "style"}, categories)
+}
+
+func TestRegistry_AllRulesets(t *testing.T) {
+ t.Parallel()
+
+ registry := linter.NewRegistry[*MockDoc]()
+ registry.Register(&mockRule{id: "rule-1", category: "style", defaultSeverity: validation.SeverityError})
+ require.NoError(t, registry.RegisterRuleset("recommended", []string{"rule-1"}))
+ require.NoError(t, registry.RegisterRuleset("strict", []string{"rule-1"}))
+
+ rulesets := registry.AllRulesets()
+ assert.Contains(t, rulesets, "all")
+ assert.Contains(t, rulesets, "recommended")
+ assert.Contains(t, rulesets, "strict")
+ // Should be sorted
+ assert.Equal(t, "all", rulesets[0])
+}
+
+func TestRegistry_RulesetsContaining(t *testing.T) {
+ t.Parallel()
+
+ registry := linter.NewRegistry[*MockDoc]()
+ registry.Register(&mockRule{id: "rule-1", category: "style", defaultSeverity: validation.SeverityError})
+ registry.Register(&mockRule{id: "rule-2", category: "security", defaultSeverity: validation.SeverityError})
+ require.NoError(t, registry.RegisterRuleset("recommended", []string{"rule-1"}))
+ require.NoError(t, registry.RegisterRuleset("strict", []string{"rule-1", "rule-2"}))
+
+ t.Run("rule in multiple rulesets", func(t *testing.T) {
+ t.Parallel()
+ rulesets := registry.RulesetsContaining("rule-1")
+ assert.Contains(t, rulesets, "all")
+ assert.Contains(t, rulesets, "recommended")
+ assert.Contains(t, rulesets, "strict")
+ })
+
+ t.Run("rule in subset of rulesets", func(t *testing.T) {
+ t.Parallel()
+ rulesets := registry.RulesetsContaining("rule-2")
+ assert.Contains(t, rulesets, "all")
+ assert.Contains(t, rulesets, "strict")
+ assert.NotContains(t, rulesets, "recommended")
+ })
+}
+
+func TestRegistry_GetRuleset_UnknownReturnsFalse(t *testing.T) {
+ t.Parallel()
+
+ registry := linter.NewRegistry[*MockDoc]()
+ _, exists := registry.GetRuleset("nonexistent")
+ assert.False(t, exists)
+}
+
+func TestRegistry_GetRule_UnknownReturnsFalse(t *testing.T) {
+ t.Parallel()
+
+ registry := linter.NewRegistry[*MockDoc]()
+ _, exists := registry.GetRule("nonexistent")
+ assert.False(t, exists)
+}
diff --git a/linter/rule.go b/linter/rule.go
new file mode 100644
index 0000000..6f8c8a5
--- /dev/null
+++ b/linter/rule.go
@@ -0,0 +1,67 @@
+package linter
+
+import (
+ "context"
+
+ "github.com/speakeasy-api/openapi/validation"
+)
+
+// Rule represents a single linting rule
+type Rule interface {
+ // ID returns the unique identifier for this rule (e.g., "style-path-params")
+ ID() string
+
+ // Category returns the rule category (e.g., "style", "validation", "security")
+ Category() string
+
+ // Description returns a human-readable description of what the rule checks
+ Description() string
+
+ // Link returns an optional URL to documentation for this rule
+ Link() string
+
+ // DefaultSeverity returns the default severity level for this rule
+ DefaultSeverity() validation.Severity
+
+ // Versions returns the spec versions this rule applies to (nil = all versions)
+ Versions() []string
+}
+
+// RuleRunner is the interface rules must implement to execute their logic
+// This is separate from Rule to allow different runner types for different specs
+type RuleRunner[T any] interface {
+ Rule
+
+ // Run executes the rule against the provided document
+ // DocumentInfo provides both the document and its location for resolving external references
+ // Returns any issues found as validation errors
+ Run(ctx context.Context, docInfo *DocumentInfo[T], config *RuleConfig) []error
+}
+
+// DocumentedRule provides extended documentation for a rule
+type DocumentedRule interface {
+ Rule
+
+ // GoodExample returns YAML showing correct usage
+ GoodExample() string
+
+ // BadExample returns YAML showing incorrect usage
+ BadExample() string
+
+ // Rationale explains why this rule exists
+ Rationale() string
+
+ // FixAvailable returns true if the rule provides auto-fix suggestions
+ FixAvailable() bool
+}
+
+// ConfigurableRule indicates a rule has configurable options
+type ConfigurableRule interface {
+ Rule
+
+ // ConfigSchema returns JSON Schema for rule-specific options
+ ConfigSchema() map[string]any
+
+ // ConfigDefaults returns default values for options
+ ConfigDefaults() map[string]any
+}
diff --git a/marshaller/model.go b/marshaller/model.go
index daee7ce..dfa8d49 100644
--- a/marshaller/model.go
+++ b/marshaller/model.go
@@ -119,37 +119,45 @@ func (m *Model[T]) GetRootNodeColumn() int {
return -1
}
-func (m *Model[T]) GetPropertyLine(prop string) int {
+func (m *Model[T]) GetPropertyNode(prop string) *yaml.Node {
// Use reflection to find the property in the core and then see if it is a marshaller.Node and if it is get the line of the key node if set
if m == nil {
- return -1
+ return nil
}
// Get reflection value of the core
coreValue := reflect.ValueOf(&m.core).Elem()
if !coreValue.IsValid() {
- return -1
+ return nil
}
// Find the field by name
fieldValue := coreValue.FieldByName(prop)
if !fieldValue.IsValid() {
- return -1
+ return nil
}
// Check if the field implements the interface we need to get the key node
// We need to check if it has a GetKeyNode method or if it's a Node type
fieldInterface := fieldValue.Interface()
+ var keyNode *yaml.Node
+
// Try to cast to a Node-like interface that has GetKeyNode method
if nodeWithKeyNode, ok := fieldInterface.(interface{ GetKeyNode() *yaml.Node }); ok {
- keyNode := nodeWithKeyNode.GetKeyNode()
- if keyNode != nil {
- return keyNode.Line
- }
+ keyNode = nodeWithKeyNode.GetKeyNode()
+
}
- return -1
+ return keyNode
+}
+
+func (m *Model[T]) GetPropertyLine(prop string) int {
+ node := m.GetPropertyNode(prop)
+ if node == nil {
+ return -1
+ }
+ return node.Line
}
// SetCore implements CoreAccessor interface
diff --git a/marshaller/model_test.go b/marshaller/model_test.go
index c96e4de..1e1822c 100644
--- a/marshaller/model_test.go
+++ b/marshaller/model_test.go
@@ -9,6 +9,153 @@ import (
"gopkg.in/yaml.v3"
)
+// TestModel_GetPropertyNode_Success tests the GetPropertyNode method with valid inputs
+func TestModel_GetPropertyNode_Success(t *testing.T) {
+ t.Parallel()
+
+ tests := []struct {
+ name string
+ setup func() *marshaller.Model[core.TestPrimitiveModel]
+ prop string
+ expected int
+ }{
+ {
+ name: "property with key node returns line number",
+ setup: func() *marshaller.Model[core.TestPrimitiveModel] {
+ keyNode := &yaml.Node{Line: 42}
+ coreModel := core.TestPrimitiveModel{
+ StringField: marshaller.Node[string]{
+ KeyNode: keyNode,
+ Key: "stringField",
+ Value: "testValue",
+ Present: true,
+ },
+ }
+ model := &marshaller.Model[core.TestPrimitiveModel]{
+ Valid: true,
+ }
+ model.SetCore(&coreModel)
+ return model
+ },
+ prop: "StringField",
+ expected: 42,
+ },
+ {
+ name: "property with nil key node returns -1",
+ setup: func() *marshaller.Model[core.TestPrimitiveModel] {
+ coreModel := core.TestPrimitiveModel{
+ StringField: marshaller.Node[string]{
+ KeyNode: nil,
+ Key: "stringField",
+ Value: "testValue",
+ Present: true,
+ },
+ }
+ model := &marshaller.Model[core.TestPrimitiveModel]{
+ Valid: true,
+ }
+ model.SetCore(&coreModel)
+ return model
+ },
+ prop: "StringField",
+ expected: -1,
+ },
+ {
+ name: "bool field with key node returns line number",
+ setup: func() *marshaller.Model[core.TestPrimitiveModel] {
+ keyNode := &yaml.Node{Line: 15}
+ coreModel := core.TestPrimitiveModel{
+ BoolField: marshaller.Node[bool]{
+ KeyNode: keyNode,
+ Key: "boolField",
+ Value: true,
+ Present: true,
+ },
+ }
+ model := &marshaller.Model[core.TestPrimitiveModel]{
+ Valid: true,
+ }
+ model.SetCore(&coreModel)
+ return model
+ },
+ prop: "BoolField",
+ expected: 15,
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ t.Parallel()
+
+ model := tt.setup()
+ actual := model.GetPropertyNode(tt.prop)
+ line := -1
+ if actual != nil {
+ line = actual.Line
+ }
+ assert.Equal(t, tt.expected, line, "line number should match expected value")
+ })
+ }
+}
+
+// TestModel_GetPropertyNode_Error tests the GetPropertyNode method with error conditions
+func TestModel_GetPropertyNode_Error(t *testing.T) {
+ t.Parallel()
+
+ tests := []struct {
+ name string
+ setup func() *marshaller.Model[core.TestPrimitiveModel]
+ prop string
+ expected int
+ }{
+ {
+ name: "nil model returns -1",
+ setup: func() *marshaller.Model[core.TestPrimitiveModel] {
+ return nil
+ },
+ prop: "StringField",
+ expected: -1,
+ },
+ {
+ name: "non-existent property returns -1",
+ setup: func() *marshaller.Model[core.TestPrimitiveModel] {
+ return &marshaller.Model[core.TestPrimitiveModel]{}
+ },
+ prop: "NonExistentField",
+ expected: -1,
+ },
+ {
+ name: "property that is not a Node returns -1",
+ setup: func() *marshaller.Model[core.TestPrimitiveModel] {
+ coreModel := core.TestPrimitiveModel{
+ CoreModel: marshaller.CoreModel{}, // This field doesn't implement GetKeyNode
+ }
+ model := &marshaller.Model[core.TestPrimitiveModel]{
+ Valid: true,
+ }
+ model.SetCore(&coreModel)
+ return model
+ },
+ prop: "CoreModel",
+ expected: -1,
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ t.Parallel()
+
+ model := tt.setup()
+ actual := model.GetPropertyNode(tt.prop)
+ if actual == nil {
+ assert.Equal(t, tt.expected, -1, "should return -1 for error conditions")
+ } else {
+ assert.Equal(t, tt.expected, actual.Line, "line number should match expected value")
+ }
+ })
+ }
+}
+
// TestModel_GetPropertyLine_Success tests the GetPropertyLine method with valid inputs
func TestModel_GetPropertyLine_Success(t *testing.T) {
t.Parallel()
diff --git a/marshaller/sequencedmap.go b/marshaller/sequencedmap.go
index bf55c1a..0bbb583 100644
--- a/marshaller/sequencedmap.go
+++ b/marshaller/sequencedmap.go
@@ -30,7 +30,7 @@ func unmarshalSequencedMap(ctx context.Context, parentName string, node *yaml.No
// Check if the node is actually a mapping node
if resolvedNode.Kind != yaml.MappingNode {
validationErr := validation.NewTypeMismatchError(parentName, "expected mapping node for sequenced map, got %v", resolvedNode.Kind)
- return []error{validation.NewValidationError(validationErr, resolvedNode)}, nil
+ return []error{validation.NewValidationError(validation.SeverityError, validation.RuleValidationTypeMismatch, validationErr, resolvedNode)}, nil
}
target.Init()
@@ -57,7 +57,9 @@ func unmarshalSequencedMap(ctx context.Context, parentName string, node *yaml.No
indicesToSkip[existing.lastIndex] = true
// Create validation error for the earlier occurrence
duplicateKeyErrs = append(duplicateKeyErrs, validation.NewValidationError(
- validation.NewValueValidationError("mapping key %q at line %d is a duplicate; previous definition at line %d", key, keyNode.Line, existing.firstLine),
+ validation.SeverityWarning,
+ validation.RuleValidationDuplicateKey,
+ fmt.Errorf("mapping key %q at line %d is a duplicate; previous definition at line %d", key, keyNode.Line, existing.firstLine),
keyNode,
))
// Update to point to current (last) occurrence
diff --git a/marshaller/unmarshaller.go b/marshaller/unmarshaller.go
index c066e88..5c287ea 100644
--- a/marshaller/unmarshaller.go
+++ b/marshaller/unmarshaller.go
@@ -192,7 +192,18 @@ func unmarshal(ctx context.Context, parentName string, node *yaml.Node, out refl
return nil, fmt.Errorf("expected Unmarshallable, got %s at line %d, column %d", out.Type(), resolvedNode.Line, resolvedNode.Column)
}
- return unmarshallable.Unmarshal(ctx, parentName, node)
+ validationErrs, err := unmarshallable.Unmarshal(ctx, parentName, node)
+ if err != nil {
+ return nil, err
+ }
+
+ if implementsInterface(out, coreModelerType) {
+ if coreModeler, ok := out.Interface().(CoreModeler); ok {
+ coreModeler.SetRootNode(node)
+ }
+ }
+
+ return validationErrs, nil
}
if implementsInterface(out, sequencedMapType) {
@@ -307,7 +318,7 @@ func unmarshalModel(ctx context.Context, parentName string, node *yaml.Node, str
if resolvedNode.Kind != yaml.MappingNode {
return []error{
- validation.NewValidationError(validation.NewTypeMismatchError(parentName, "expected object, got %s", yml.NodeKindToString(resolvedNode.Kind)), resolvedNode),
+ validation.NewValidationError(validation.SeverityError, validation.RuleValidationTypeMismatch, validation.NewTypeMismatchError(parentName, "expected object, got %s", yml.NodeKindToString(resolvedNode.Kind)), resolvedNode),
}, nil
}
@@ -368,7 +379,9 @@ func unmarshalModel(ctx context.Context, parentName string, node *yaml.Node, str
indicesToSkip[info.lastIndex] = true
// Create validation error for the earlier occurrence
duplicateKeyErrs = append(duplicateKeyErrs, validation.NewValidationError(
- validation.NewValueValidationError("mapping key %q at line %d is a duplicate; previous definition at line %d", key, keyNode.Line, info.firstLine),
+ validation.SeverityWarning,
+ validation.RuleValidationDuplicateKey,
+ fmt.Errorf("mapping key %q at line %d is a duplicate; previous definition at line %d", key, keyNode.Line, info.firstLine),
keyNode,
))
// Update to track this as the new last occurrence
@@ -489,7 +502,7 @@ func unmarshalModel(ctx context.Context, parentName string, node *yaml.Node, str
// Check for missing required fields using cached required field info
for tag := range fieldMap.RequiredFields {
if _, ok := foundRequiredFields.Load(tag); !ok {
- validationErrs = append(validationErrs, validation.NewValidationError(validation.NewMissingFieldError("%s.%s is missing", modelTag, tag), resolvedNode))
+ validationErrs = append(validationErrs, validation.NewValidationError(validation.SeverityError, validation.RuleValidationRequiredField, fmt.Errorf("%s.%s is required", modelTag, tag), resolvedNode))
}
}
@@ -532,7 +545,7 @@ func decodeNode(_ context.Context, parentName string, node *yaml.Node, out any)
// Check if this is a type mismatch error
if yamlTypeErr := asTypeMismatchError(err); yamlTypeErr != nil {
// Convert type mismatch to validation error
- validationErr := validation.NewValidationError(validation.NewTypeMismatchError(parentName, strings.Join(yamlTypeErr.Errors, ", ")), resolvedNode)
+ validationErr := validation.NewValidationError(validation.SeverityError, validation.RuleValidationTypeMismatch, validation.NewTypeMismatchError(parentName, strings.Join(yamlTypeErr.Errors, ", ")), resolvedNode)
return []error{validationErr}, nil //nolint:nilerr
}
@@ -678,7 +691,7 @@ func isMapType(out reflect.Value) bool {
// validateNodeKind checks if the node kind matches the expected kind and returns appropriate error
func validateNodeKind(resolvedNode *yaml.Node, expectedKind yaml.Kind, parentName string, reflectType reflect.Type, expectedType string) error {
if resolvedNode == nil {
- return validation.NewValidationError(validation.NewTypeMismatchError(parentName, "expected %s, got nil", yml.NodeKindToString(expectedKind)), nil)
+ return validation.NewValidationError(validation.SeverityError, validation.RuleValidationTypeMismatch, validation.NewTypeMismatchError(parentName, "expected %s, got nil", yml.NodeKindToString(expectedKind)), nil)
}
// Check if the node kind matches
@@ -725,11 +738,11 @@ func validateNodeKind(resolvedNode *yaml.Node, expectedKind yaml.Kind, parentNam
actualKindStr = fmt.Sprintf("`%s`", value)
}
- return validation.NewValidationError(validation.NewTypeMismatchError(parentName, "expected %s, got %s", expectedType, actualKindStr), resolvedNode)
+ return validation.NewValidationError(validation.SeverityError, validation.RuleValidationTypeMismatch, validation.NewTypeMismatchError(parentName, "expected %s, got %s", expectedType, actualKindStr), resolvedNode)
}
if !tagMatches {
- return validation.NewValidationError(validation.NewTypeMismatchError(parentName, "expected %s, got %s", expectedType, yml.NodeTagToString(resolvedNode.Tag)), resolvedNode)
+ return validation.NewValidationError(validation.SeverityError, validation.RuleValidationTypeMismatch, validation.NewTypeMismatchError(parentName, "expected %s, got %s", expectedType, yml.NodeTagToString(resolvedNode.Tag)), resolvedNode)
}
return nil
}
diff --git a/marshaller/unmarshalling_test.go b/marshaller/unmarshalling_test.go
index aa0ab3d..c319f8f 100644
--- a/marshaller/unmarshalling_test.go
+++ b/marshaller/unmarshalling_test.go
@@ -153,10 +153,10 @@ func TestUnmarshal_PrimitiveTypes_Error(t *testing.T) {
stringPtrField: "optional field"
`,
wantErrs: []string{
- "[2:1] testPrimitiveModel.boolField is missing",
- "[2:1] testPrimitiveModel.float64Field is missing",
- "[2:1] testPrimitiveModel.intField is missing",
- "[2:1] testPrimitiveModel.stringField is missing",
+ "[2:1] error validation-required-field testPrimitiveModel.boolField is required",
+ "[2:1] error validation-required-field testPrimitiveModel.float64Field is required",
+ "[2:1] error validation-required-field testPrimitiveModel.intField is required",
+ "[2:1] error validation-required-field testPrimitiveModel.stringField is required",
},
},
{
@@ -167,7 +167,7 @@ boolField: true
intField: 42
float64Field: 3.14
`,
- wantErrs: []string{"[2:14] testPrimitiveModel.stringField expected string, got sequence"},
+ wantErrs: []string{"[2:14] error validation-type-mismatch testPrimitiveModel.stringField expected string, got sequence"},
},
{
name: "type mismatch - bool field gets string",
@@ -177,7 +177,7 @@ boolField: "not a bool"
intField: 42
float64Field: 3.14
`,
- wantErrs: []string{"[3:12] testPrimitiveModel.boolField line 3: cannot unmarshal !!str `not a bool` into bool"},
+ wantErrs: []string{"[3:12] error validation-type-mismatch testPrimitiveModel.boolField line 3: cannot unmarshal !!str `not a bool` into bool"},
},
{
name: "type mismatch - int field gets string",
@@ -187,7 +187,7 @@ boolField: true
intField: "not an int"
float64Field: 3.14
`,
- wantErrs: []string{"[4:11] testPrimitiveModel.intField line 4: cannot unmarshal !!str `not an int` into int"},
+ wantErrs: []string{"[4:11] error validation-type-mismatch testPrimitiveModel.intField line 4: cannot unmarshal !!str `not an int` into int"},
},
{
name: "type mismatch - float field gets string",
@@ -197,7 +197,7 @@ boolField: true
intField: 42
float64Field: "not a float"
`,
- wantErrs: []string{"[5:15] testPrimitiveModel.float64Field line 5: cannot unmarshal !!str `not a f...` into float64"},
+ wantErrs: []string{"[5:15] error validation-type-mismatch testPrimitiveModel.float64Field line 5: cannot unmarshal !!str `not a f...` into float64"},
},
{
name: "multiple validation errors",
@@ -206,10 +206,10 @@ boolField: "not a bool"
intField: "not an int"
`,
wantErrs: []string{
- "[2:1] testPrimitiveModel.float64Field is missing",
- "[2:1] testPrimitiveModel.stringField is missing",
- "[2:12] testPrimitiveModel.boolField line 2: cannot unmarshal !!str `not a bool` into bool",
- "[3:11] testPrimitiveModel.intField line 3: cannot unmarshal !!str `not an int` into int",
+ "[2:1] error validation-required-field testPrimitiveModel.float64Field is required",
+ "[2:1] error validation-required-field testPrimitiveModel.stringField is required",
+ "[2:12] error validation-type-mismatch testPrimitiveModel.boolField line 2: cannot unmarshal !!str `not a bool` into bool",
+ "[3:11] error validation-type-mismatch testPrimitiveModel.intField line 3: cannot unmarshal !!str `not an int` into int",
},
},
}
@@ -358,9 +358,9 @@ nestedModel:
# missing required stringField, boolField, float64Field
`,
wantErrs: []string{
- "[8:3] testPrimitiveModel.stringField is missing",
- "[8:3] testPrimitiveModel.boolField is missing",
- "[8:3] testPrimitiveModel.float64Field is missing",
+ "[8:3] error validation-required-field testPrimitiveModel.stringField is required",
+ "[8:3] error validation-required-field testPrimitiveModel.boolField is required",
+ "[8:3] error validation-required-field testPrimitiveModel.float64Field is required",
},
},
{
@@ -374,7 +374,7 @@ nestedModelValue:
nestedModel:
- "this should be an object"
`,
- wantErrs: []string{"[8:3] testComplexModel.nestedModel expected object, got sequence"},
+ wantErrs: []string{"[8:3] error validation-type-mismatch testComplexModel.nestedModel expected object, got sequence"},
},
{
name: "type mismatch - array field gets object",
@@ -387,7 +387,7 @@ nestedModelValue:
arrayField:
key: "this should be an array"
`,
- wantErrs: []string{"[8:3] testComplexModel.arrayField expected sequence, got object"},
+ wantErrs: []string{"[8:3] error validation-type-mismatch testComplexModel.arrayField expected sequence, got object"},
},
{
name: "deeply nested validation error",
@@ -407,7 +407,7 @@ structArrayField:
float64Field: 4.56
# missing required stringField in second element
`,
- wantErrs: []string{"[12:5] testPrimitiveModel.stringField is missing"},
+ wantErrs: []string{"[12:5] error validation-required-field testPrimitiveModel.stringField is required"},
},
}
@@ -658,7 +658,7 @@ func TestUnmarshal_RequiredPointer_Error(t *testing.T) {
yml: `
optionalPtr: "only optional set"
`,
- wantErrs: []string{"[2:1] testRequiredPointerModel.requiredPtr is missing"},
+ wantErrs: []string{"[2:1] error validation-required-field testRequiredPointerModel.requiredPtr is required"},
},
{
name: "required pointer field with null value should be valid",
@@ -768,12 +768,12 @@ func TestUnmarshal_RequiredNilableTypes_Error(t *testing.T) {
optionalPtr: "only optional set"
`,
wantErrs: []string{
- "[2:1] testRequiredNilableModel.requiredEither is missing",
- "[2:1] testRequiredNilableModel.requiredMap is missing",
- "[2:1] testRequiredNilableModel.requiredPtr is missing",
- "[2:1] testRequiredNilableModel.requiredRawNode is missing",
- "[2:1] testRequiredNilableModel.requiredSlice is missing",
- "[2:1] testRequiredNilableModel.requiredStruct is missing",
+ "[2:1] error validation-required-field testRequiredNilableModel.requiredEither is required",
+ "[2:1] error validation-required-field testRequiredNilableModel.requiredMap is required",
+ "[2:1] error validation-required-field testRequiredNilableModel.requiredPtr is required",
+ "[2:1] error validation-required-field testRequiredNilableModel.requiredRawNode is required",
+ "[2:1] error validation-required-field testRequiredNilableModel.requiredSlice is required",
+ "[2:1] error validation-required-field testRequiredNilableModel.requiredStruct is required",
},
},
{
@@ -784,10 +784,10 @@ requiredSlice: ["item1"]
# missing requiredMap, requiredStruct, requiredEither, requiredRawNode
`,
wantErrs: []string{
- "[2:1] testRequiredNilableModel.requiredEither is missing",
- "[2:1] testRequiredNilableModel.requiredMap is missing",
- "[2:1] testRequiredNilableModel.requiredRawNode is missing",
- "[2:1] testRequiredNilableModel.requiredStruct is missing",
+ "[2:1] error validation-required-field testRequiredNilableModel.requiredEither is required",
+ "[2:1] error validation-required-field testRequiredNilableModel.requiredMap is required",
+ "[2:1] error validation-required-field testRequiredNilableModel.requiredRawNode is required",
+ "[2:1] error validation-required-field testRequiredNilableModel.requiredStruct is required",
},
},
{
@@ -804,10 +804,10 @@ requiredEither: "string value"
requiredRawNode: "raw value"
`,
wantErrs: []string{
- "[8:3] testPrimitiveModel.boolField is missing",
- "[8:3] testPrimitiveModel.float64Field is missing",
- "[8:3] testPrimitiveModel.intField is missing",
- "[8:3] testPrimitiveModel.stringField is missing",
+ "[8:3] error validation-required-field testPrimitiveModel.boolField is required",
+ "[8:3] error validation-required-field testPrimitiveModel.float64Field is required",
+ "[8:3] error validation-required-field testPrimitiveModel.intField is required",
+ "[8:3] error validation-required-field testPrimitiveModel.stringField is required",
},
},
}
diff --git a/mise-tasks/update-lint-docs b/mise-tasks/update-lint-docs
new file mode 100755
index 0000000..d32e476
--- /dev/null
+++ b/mise-tasks/update-lint-docs
@@ -0,0 +1,18 @@
+#!/usr/bin/env bash
+set -euo pipefail
+
+# Update Lint Docs - Automatically update lint rule documentation in READMEs
+# This script uses a Go program to generate a rules table from registered linter rules
+# and updates the corresponding README.md files between the lint rules tags.
+
+echo "🔄 Updating lint rules in README files..."
+
+echo "🚀 Running update-lint-docs tool..."
+go run ./cmd/update-lint-docs
+
+echo ""
+echo "📋 Summary:"
+echo " • Updated openapi/linter/README.md with rules from the OpenAPI linter registry"
+echo ""
+echo "💡 Rule documentation is automatically generated from the Rule interface methods."
+echo " To update the docs, modify the rule's Description(), Link(), etc. methods and re-run this task."
diff --git a/openapi/bundle.go b/openapi/bundle.go
index 84be045..abce65d 100644
--- a/openapi/bundle.go
+++ b/openapi/bundle.go
@@ -299,7 +299,7 @@ func bundleSchema(ctx context.Context, schema *oas3.JSONSchema[oas3.Referenceabl
if err := bundleObject(ctx, resolvedRefSchema, namingStrategy, references.ResolveOptions{
RootDocument: opts.RootDocument,
TargetDocument: targetDocInfo.ResolvedDocument,
- TargetLocation: targetDocInfo.AbsoluteReference,
+ TargetLocation: targetDocInfo.AbsoluteDocumentPath,
}, componentStorage); err != nil {
return fmt.Errorf("failed to bundle nested references in %s: %w", ref, err)
}
@@ -702,12 +702,12 @@ func bundleGenericReference[T any, V interfaces.Validator[T], C marshaller.CoreM
if targetDocInfo == nil {
return fmt.Errorf("failed to get resolution info for %s reference %s", componentType, refStr)
}
- componentStorage.componentLocations[componentType+"/"+componentName] = targetDocInfo.AbsoluteReference
+ componentStorage.componentLocations[componentType+"/"+componentName] = targetDocInfo.AbsoluteDocumentPath
if err := bundleObject(ctx, bundledRef, namingStrategy, references.ResolveOptions{
RootDocument: opts.RootDocument,
TargetDocument: targetDocInfo.ResolvedDocument,
- TargetLocation: targetDocInfo.AbsoluteReference,
+ TargetLocation: targetDocInfo.AbsoluteDocumentPath,
}, componentStorage); err != nil {
return fmt.Errorf("failed to bundle nested references in %s: %w", ref.GetReference(), err)
}
@@ -736,7 +736,7 @@ func getFinalAbsoluteRef[T any, V interfaces.Validator[T], C marshaller.CoreMode
nextRefInfo := resInfo.Object.GetReferenceResolutionInfo()
if nextRefInfo != nil {
// Build the absolute reference from the final resolution
- finalRef := nextRefInfo.AbsoluteReference
+ finalRef := nextRefInfo.AbsoluteDocumentPath
if nextRefInfo.Object != nil && nextRefInfo.Object.Reference != nil {
// Add the fragment from the chained reference
fragment := string(nextRefInfo.Object.Reference.GetJSONPointer())
diff --git a/openapi/callbacks.go b/openapi/callbacks.go
index c6b770c..b17bb62 100644
--- a/openapi/callbacks.go
+++ b/openapi/callbacks.go
@@ -2,6 +2,7 @@ package openapi
import (
"context"
+ "fmt"
"github.com/speakeasy-api/openapi/expression"
"github.com/speakeasy-api/openapi/extensions"
@@ -64,7 +65,7 @@ func (c *Callback) Validate(ctx context.Context, opts ...validation.Option) []er
}
}
- errs = append(errs, validation.NewValidationError(validation.NewValueValidationError("callback expression is invalid: %s", err.Error()), node))
+ errs = append(errs, validation.NewValidationError(validation.SeverityWarning, validation.RuleValidationInvalidFormat, fmt.Errorf("callback expression is invalid: %w", err), node))
}
errs = append(errs, pathItem.Validate(ctx, opts...)...)
diff --git a/openapi/callbacks_validate_test.go b/openapi/callbacks_validate_test.go
index bb5e816..33d09f6 100644
--- a/openapi/callbacks_validate_test.go
+++ b/openapi/callbacks_validate_test.go
@@ -117,7 +117,7 @@ func TestCallback_Validate_Error(t *testing.T) {
'200':
description: Webhook received
`,
- wantErrs: []string{"[2:1] callback expression is invalid: expression is not valid, must begin with $: request.body#/webhookUrl"},
+ wantErrs: []string{"[2:1] warning validation-invalid-format callback expression is invalid: expression is not valid, must begin with $: request.body#/webhookUrl"},
},
{
name: "invalid_expression_unknown_type",
@@ -129,7 +129,7 @@ func TestCallback_Validate_Error(t *testing.T) {
'200':
description: Webhook received
`,
- wantErrs: []string{"[2:1] callback expression is invalid: expression is not valid, must begin with one of [url, method, statusCode, request, response, inputs, outputs, steps, workflows, sourceDescriptions, components]: {$unknown.body#/webhookUrl}"},
+ wantErrs: []string{"[2:1] warning validation-invalid-format callback expression is invalid: expression is not valid, must begin with one of [url, method, statusCode, request, response, inputs, outputs, steps, workflows, sourceDescriptions, components]: {$unknown.body#/webhookUrl}"},
},
{
name: "invalid_expression_url_with_extra_parts",
@@ -141,7 +141,7 @@ func TestCallback_Validate_Error(t *testing.T) {
'200':
description: Webhook received
`,
- wantErrs: []string{"[2:1] callback expression is invalid: expression is not valid, extra characters after $url: {$url.extra}"},
+ wantErrs: []string{"[2:1] warning validation-invalid-format callback expression is invalid: expression is not valid, extra characters after $url: {$url.extra}"},
},
{
name: "invalid_expression_request_without_reference",
@@ -153,7 +153,7 @@ func TestCallback_Validate_Error(t *testing.T) {
'200':
description: Webhook received
`,
- wantErrs: []string{"[2:1] callback expression is invalid: expression is not valid, expected one of [header, query, path, body] after $request: {$request}"},
+ wantErrs: []string{"[2:1] warning validation-invalid-format callback expression is invalid: expression is not valid, expected one of [header, query, path, body] after $request: {$request}"},
},
{
name: "invalid_expression_request_unknown_reference",
@@ -165,7 +165,7 @@ func TestCallback_Validate_Error(t *testing.T) {
'200':
description: Webhook received
`,
- wantErrs: []string{"[2:1] callback expression is invalid: expression is not valid, expected one of [header, query, path, body] after $request: {$request.unknown}"},
+ wantErrs: []string{"[2:1] warning validation-invalid-format callback expression is invalid: expression is not valid, expected one of [header, query, path, body] after $request: {$request.unknown}"},
},
{
name: "invalid_expression_request_header_missing_token",
@@ -177,7 +177,7 @@ func TestCallback_Validate_Error(t *testing.T) {
'200':
description: Webhook received
`,
- wantErrs: []string{"[2:1] callback expression is invalid: expression is not valid, expected token after $request.header: {$request.header}"},
+ wantErrs: []string{"[2:1] warning validation-invalid-format callback expression is invalid: expression is not valid, expected token after $request.header: {$request.header}"},
},
{
name: "invalid_expression_request_header_invalid_token",
@@ -189,7 +189,7 @@ func TestCallback_Validate_Error(t *testing.T) {
'200':
description: Webhook received
`,
- wantErrs: []string{"[2:1] callback expression is invalid: header reference must be a valid token [^[!#$%&'*+\\-.^_`|~\\dA-Za-z]+$]: {$request.header.some@header}"},
+ wantErrs: []string{"[2:1] warning validation-invalid-format callback expression is invalid: header reference must be a valid token [^[!#$%&'*+\\-.^_`|~\\dA-Za-z]+$]: {$request.header.some@header}"},
},
{
name: "invalid_expression_request_query_missing_name",
@@ -201,7 +201,7 @@ func TestCallback_Validate_Error(t *testing.T) {
'200':
description: Webhook received
`,
- wantErrs: []string{"[2:1] callback expression is invalid: expression is not valid, expected name after $request.query: {$request.query}"},
+ wantErrs: []string{"[2:1] warning validation-invalid-format callback expression is invalid: expression is not valid, expected name after $request.query: {$request.query}"},
},
{
name: "invalid_expression_request_path_missing_name",
@@ -213,7 +213,7 @@ func TestCallback_Validate_Error(t *testing.T) {
'200':
description: Webhook received
`,
- wantErrs: []string{"[2:1] callback expression is invalid: expression is not valid, expected name after $request.path: {$request.path}"},
+ wantErrs: []string{"[2:1] warning validation-invalid-format callback expression is invalid: expression is not valid, expected name after $request.path: {$request.path}"},
},
{
name: "invalid_expression_request_body_with_extra_parts",
@@ -225,7 +225,7 @@ func TestCallback_Validate_Error(t *testing.T) {
'200':
description: Webhook received
`,
- wantErrs: []string{"[2:1] callback expression is invalid: expression is not valid, only json pointers are allowed after $request.body: {$request.body.extra}"},
+ wantErrs: []string{"[2:1] warning validation-invalid-format callback expression is invalid: expression is not valid, only json pointers are allowed after $request.body: {$request.body.extra}"},
},
{
name: "invalid_expression_invalid_json_pointer",
@@ -237,7 +237,7 @@ func TestCallback_Validate_Error(t *testing.T) {
'200':
description: Webhook received
`,
- wantErrs: []string{"[2:1] callback expression is invalid: validation error -- jsonpointer must start with /: some/path}"},
+ wantErrs: []string{"[2:1] warning validation-invalid-format callback expression is invalid: validation error -- jsonpointer must start with /: some/path}"},
},
{
name: "invalid_nested_pathitem_invalid_server",
@@ -251,7 +251,7 @@ func TestCallback_Validate_Error(t *testing.T) {
'200':
description: Webhook received
`,
- wantErrs: []string{"[4:7] server.url is missing"},
+ wantErrs: []string{"[4:7] error validation-required-field server.url is required"},
},
}
diff --git a/openapi/components_validate_test.go b/openapi/components_validate_test.go
index 01bffe8..6affb82 100644
--- a/openapi/components_validate_test.go
+++ b/openapi/components_validate_test.go
@@ -255,7 +255,7 @@ securitySchemes:
InvalidScheme:
description: Some scheme
`,
- wantErrs: []string{"[4:5] securityScheme.type is missing"},
+ wantErrs: []string{"[4:5] error validation-required-field securityScheme.type is required"},
},
}
diff --git a/openapi/core/reference.go b/openapi/core/reference.go
index 988e63a..9e1d212 100644
--- a/openapi/core/reference.go
+++ b/openapi/core/reference.go
@@ -34,7 +34,7 @@ func (r *Reference[T]) Unmarshal(ctx context.Context, parentName string, node *y
if resolvedNode.Kind != yaml.MappingNode {
r.SetValid(false, false)
- return []error{validation.NewValidationError(validation.NewTypeMismatchError(parentName, "reference expected object, got %s", yml.NodeKindToString(resolvedNode.Kind)), resolvedNode)}, nil
+ return []error{validation.NewValidationError(validation.SeverityError, validation.RuleValidationTypeMismatch, validation.NewTypeMismatchError(parentName, "reference expected object, got %s", yml.NodeKindToString(resolvedNode.Kind)), resolvedNode)}, nil
}
if _, _, ok := yml.GetMapElementNodes(ctx, resolvedNode, "$ref"); ok {
diff --git a/openapi/encoding.go b/openapi/encoding.go
index 50b000f..f0a9a65 100644
--- a/openapi/encoding.go
+++ b/openapi/encoding.go
@@ -128,7 +128,7 @@ func (e *Encoding) Validate(ctx context.Context, opts ...validation.Option) []er
for _, mediaType := range mediaTypes {
_, _, err := mime.ParseMediaType(mediaType)
if err != nil {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError(fmt.Sprintf("encoding.contentType %s is not a valid media type: %s", mediaType, err)), core, core.ContentType))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidFormat, fmt.Errorf("encoding.contentType %s is not a valid media type: %w", mediaType, err), core, core.ContentType))
}
}
}
@@ -140,7 +140,7 @@ func (e *Encoding) Validate(ctx context.Context, opts ...validation.Option) []er
if core.Style.Present {
allowedStyles := []string{string(SerializationStyleForm), string(SerializationStyleSpaceDelimited), string(SerializationStylePipeDelimited), string(SerializationStyleDeepObject)}
if !slices.Contains(allowedStyles, string(*e.Style)) {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError(fmt.Sprintf("encoding.style must be one of [%s]", strings.Join(allowedStyles, ", "))), core, core.Style))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationAllowedValues, fmt.Errorf("encoding.style must be one of [%s]", strings.Join(allowedStyles, ", ")), core, core.Style))
}
}
diff --git a/openapi/examples.go b/openapi/examples.go
index f2bc01b..2e8c064 100644
--- a/openapi/examples.go
+++ b/openapi/examples.go
@@ -2,6 +2,7 @@ package openapi
import (
"context"
+ "errors"
"fmt"
"net/url"
@@ -104,27 +105,27 @@ func (e *Example) Validate(ctx context.Context, opts ...validation.Option) []err
// Check mutual exclusivity: value and externalValue
if core.Value.Present && core.ExternalValue.Present {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("example.value and externalValue are mutually exclusive"), core, core.Value))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationMutuallyExclusiveFields, errors.New("example.value and example.externalValue are mutually exclusive"), core, core.Value))
}
// Check mutual exclusivity: dataValue and value
if core.DataValue.Present && core.Value.Present {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("example.dataValue and value are mutually exclusive"), core, core.DataValue))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationMutuallyExclusiveFields, errors.New("example.dataValue and example.value are mutually exclusive"), core, core.DataValue))
}
// Check mutual exclusivity: serializedValue and value
if core.SerializedValue.Present && core.Value.Present {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("example.serializedValue and value are mutually exclusive"), core, core.SerializedValue))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationMutuallyExclusiveFields, errors.New("example.serializedValue and example.value are mutually exclusive"), core, core.SerializedValue))
}
// Check mutual exclusivity: serializedValue and externalValue
if core.SerializedValue.Present && core.ExternalValue.Present {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("example.serializedValue and externalValue are mutually exclusive"), core, core.SerializedValue))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationMutuallyExclusiveFields, errors.New("example.serializedValue and example.externalValue are mutually exclusive"), core, core.SerializedValue))
}
if core.ExternalValue.Present {
if _, err := url.Parse(*e.ExternalValue); err != nil {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError(fmt.Sprintf("example.externalValue is not a valid uri: %s", err)), core, core.ExternalValue))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidFormat, fmt.Errorf("example.externalValue is not a valid uri: %w", err), core, core.ExternalValue))
}
}
diff --git a/openapi/examples_validate_test.go b/openapi/examples_validate_test.go
index dd1ac9a..7a7c670 100644
--- a/openapi/examples_validate_test.go
+++ b/openapi/examples_validate_test.go
@@ -157,14 +157,14 @@ func TestExample_Validate_Error(t *testing.T) {
summary: Example with invalid URL
externalValue: ":invalid"
`,
- wantErrs: []string{"[3:16] example.externalValue is not a valid uri: parse \":invalid\": missing protocol scheme"},
+ wantErrs: []string{"[3:16] error validation-invalid-format example.externalValue is not a valid uri: parse \":invalid\": missing protocol scheme"},
},
{
name: "invalid external value URL with spaces",
yml: `
externalValue: ":invalid url"
`,
- wantErrs: []string{"[2:16] example.externalValue is not a valid uri: parse \":invalid url\": missing protocol scheme"},
+ wantErrs: []string{"[2:16] error validation-invalid-format example.externalValue is not a valid uri: parse \":invalid url\": missing protocol scheme"},
},
{
name: "both value and external value provided",
@@ -173,7 +173,7 @@ summary: Invalid example
value: "test"
externalValue: "https://example.com/test.json"
`,
- wantErrs: []string{"[3:8] example.value and externalValue are mutually exclusive"},
+ wantErrs: []string{"[3:8] error validation-mutually-exclusive-fields example.value and example.externalValue are mutually exclusive"},
},
{
name: "multiple validation errors",
@@ -182,8 +182,8 @@ value: "test"
externalValue: ":invalid"
`,
wantErrs: []string{
- "[2:8] example.value and externalValue are mutually exclusive",
- "[3:16] example.externalValue is not a valid uri: parse \":invalid\": missing protocol scheme",
+ "[2:8] error validation-mutually-exclusive-fields example.value and example.externalValue are mutually exclusive",
+ "[3:16] error validation-invalid-format example.externalValue is not a valid uri: parse \":invalid\": missing protocol scheme",
},
},
{
@@ -194,7 +194,7 @@ dataValue:
id: 123
value: "test"
`,
- wantErrs: []string{"example.dataValue and value are mutually exclusive"},
+ wantErrs: []string{"error validation-mutually-exclusive-fields example.dataValue and example.value are mutually exclusive"},
},
{
name: "serializedValue and value are mutually exclusive",
@@ -203,7 +203,7 @@ summary: Invalid example
serializedValue: "test=123"
value: "test"
`,
- wantErrs: []string{"example.serializedValue and value are mutually exclusive"},
+ wantErrs: []string{"error validation-mutually-exclusive-fields example.serializedValue and example.value are mutually exclusive"},
},
{
name: "serializedValue and externalValue are mutually exclusive",
@@ -212,23 +212,23 @@ summary: Invalid example
serializedValue: "test=123"
externalValue: https://example.com/test.json
`,
- wantErrs: []string{"example.serializedValue and externalValue are mutually exclusive"},
+ wantErrs: []string{"error validation-mutually-exclusive-fields example.serializedValue and example.externalValue are mutually exclusive"},
},
{
name: "multiple mutual exclusivity violations",
yml: `
summary: Invalid example
dataValue:
- id: 123
+ id: 123
value: "test"
serializedValue: "test=123"
externalValue: https://example.com/test.json
`,
wantErrs: []string{
- "example.value and externalValue are mutually exclusive",
- "example.dataValue and value are mutually exclusive",
- "example.serializedValue and value are mutually exclusive",
- "example.serializedValue and externalValue are mutually exclusive",
+ "error validation-mutually-exclusive-fields example.value and example.externalValue are mutually exclusive",
+ "error validation-mutually-exclusive-fields example.dataValue and example.value are mutually exclusive",
+ "error validation-mutually-exclusive-fields example.serializedValue and example.value are mutually exclusive",
+ "error validation-mutually-exclusive-fields example.serializedValue and example.externalValue are mutually exclusive",
},
},
}
diff --git a/openapi/header.go b/openapi/header.go
index 36591af..6e7c1aa 100644
--- a/openapi/header.go
+++ b/openapi/header.go
@@ -2,6 +2,7 @@ package openapi
import (
"context"
+ "fmt"
"slices"
"strings"
@@ -131,7 +132,7 @@ func (h *Header) Validate(ctx context.Context, opts ...validation.Option) []erro
if core.Style.Present {
allowedStyles := []string{string(SerializationStyleSimple)}
if !slices.Contains(allowedStyles, string(*h.Style)) {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("header.style must be one of [%s]", strings.Join(allowedStyles, ", ")), core, core.Style))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationAllowedValues, fmt.Errorf("header.style must be one of [%s]", strings.Join(allowedStyles, ", ")), core, core.Style))
}
}
diff --git a/openapi/header_validate_test.go b/openapi/header_validate_test.go
index 4e229c6..056984b 100644
--- a/openapi/header_validate_test.go
+++ b/openapi/header_validate_test.go
@@ -128,8 +128,8 @@ schema:
description: Header with invalid schema
`,
wantErrs: []string{
- "[3:9] schema.type value must be one of 'array', 'boolean', 'integer', 'null', 'number', 'object', 'string'",
- "[3:9] schema.type expected array, got string",
+ "[3:9] error validation-invalid-schema schema.type value must be one of 'array', 'boolean', 'integer', 'null', 'number', 'object', 'string'",
+ "[3:9] error validation-type-mismatch schema.type expected array, got string",
},
},
}
diff --git a/openapi/index.go b/openapi/index.go
new file mode 100644
index 0000000..9cf6c6a
--- /dev/null
+++ b/openapi/index.go
@@ -0,0 +1,1552 @@
+package openapi
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "strings"
+
+ "github.com/speakeasy-api/openapi/internal/interfaces"
+ "github.com/speakeasy-api/openapi/jsonschema/oas3"
+ "github.com/speakeasy-api/openapi/marshaller"
+ "github.com/speakeasy-api/openapi/pointer"
+ "github.com/speakeasy-api/openapi/references"
+ "github.com/speakeasy-api/openapi/validation"
+ "gopkg.in/yaml.v3"
+)
+
+// CircularClassification represents the classification of a circular reference.
+type CircularClassification int
+
+const (
+ // CircularUnclassified means the circular reference has not been classified yet.
+ CircularUnclassified CircularClassification = iota
+ // CircularValid means the circular reference is valid (has a termination point).
+ CircularValid
+ // CircularInvalid means the circular reference is invalid (no termination point).
+ CircularInvalid
+ // CircularPending means the circular reference is part of polymorphic and needs post-processing.
+ CircularPending
+)
+
+// CircularPathSegment represents a segment of the path through the schema tree.
+// It captures constraint information needed to determine if a circular reference can terminate.
+type CircularPathSegment struct {
+ Field string // e.g., "properties", "items", "allOf", "oneOf", "anyOf", "additionalProperties"
+ PropertyName string // Set if Field == "properties"
+ IsRequired bool // Set if this property is in parent's Required array
+ ArrayMinItems int64 // Parent's MinItems value (0 means empty array terminates)
+ MinProperties int64 // Parent's MinProperties value (0 means empty object terminates)
+ BranchIndex int // Index in oneOf/anyOf/allOf array
+ IsNullable bool // True if this schema allows null (termination point)
+}
+
+// SchemaVisitInfo tracks the visitation state of a schema during indexing.
+type SchemaVisitInfo struct {
+ Location Locations // Location where first seen
+ InCurrentPath bool // True while actively walking this schema's children
+ CircularType CircularClassification // Classification result
+}
+
+// PolymorphicCircularRef tracks a polymorphic schema with recursive branches.
+// Used for post-processing to determine if all branches recurse.
+type PolymorphicCircularRef struct {
+ ParentSchema *oas3.JSONSchemaReferenceable // The parent with oneOf/anyOf/allOf
+ ParentLocation Locations // Location of the parent
+ Field string // "oneOf", "anyOf", or "allOf"
+ BranchResults map[int]CircularClassification // Index -> classification per branch
+ TotalBranches int // Total number of branches
+}
+
+// referenceStackEntry tracks a schema in the active reference resolution chain.
+// Uses JSON pointer strings for identity to handle type differences.
+type referenceStackEntry struct {
+ refTarget string // The $ref target (JSON pointer or URI)
+ location Locations // Where this reference was encountered
+}
+
+type Descriptioner interface {
+ GetDescription() string
+}
+
+type Summarizer interface {
+ GetSummary() string
+}
+
+type DescriptionAndSummary interface {
+ GetDescription() string
+ GetSummary() string
+}
+
+func (i *Index) currentDocumentPath() string {
+ if i == nil {
+ return ""
+ }
+ if len(i.currentDocumentStack) == 0 {
+ return ""
+ }
+ return i.currentDocumentStack[len(i.currentDocumentStack)-1]
+}
+
+// Index represents a pre-computed index of an OpenAPI document.
+// It provides efficient access to document elements without repeated full traversals.
+type Index struct {
+ Doc *OpenAPI
+
+ ExternalDocumentation []*IndexNode[*oas3.ExternalDocumentation] // All external documentation nodes
+
+ Tags []*IndexNode[*Tag] // All tags defined in the document
+
+ Servers []*IndexNode[*Server] // All servers defined in the document
+ ServerVariables []*IndexNode[*ServerVariable] // All server variables from all servers
+
+ BooleanSchemas []*IndexNode[*oas3.JSONSchemaReferenceable] // Boolean schema values (true/false)
+ InlineSchemas []*IndexNode[*oas3.JSONSchemaReferenceable] // Schemas defined inline (properties, items, etc.)
+ ComponentSchemas []*IndexNode[*oas3.JSONSchemaReferenceable] // Schemas in /components/schemas/ of main document
+ ExternalSchemas []*IndexNode[*oas3.JSONSchemaReferenceable] // Top-level schemas in external documents
+ SchemaReferences []*IndexNode[*oas3.JSONSchemaReferenceable] // All $ref pointers
+
+ InlinePathItems []*IndexNode[*ReferencedPathItem] // PathItems defined inline (in paths map)
+ ComponentPathItems []*IndexNode[*ReferencedPathItem] // PathItems in /components/pathItems/
+ ExternalPathItems []*IndexNode[*ReferencedPathItem] // Top-level PathItems in external documents
+ PathItemReferences []*IndexNode[*ReferencedPathItem] // All PathItem $ref pointers
+
+ Operations []*IndexNode[*Operation] // All operations (GET, POST, PUT, DELETE, PATCH, HEAD, OPTIONS, TRACE, etc.)
+
+ InlineParameters []*IndexNode[*ReferencedParameter] // Parameters defined inline in operations/path items
+ ComponentParameters []*IndexNode[*ReferencedParameter] // Parameters in /components/parameters/
+ ParameterReferences []*IndexNode[*ReferencedParameter] // All Parameter $ref pointers
+
+ Responses []*IndexNode[*Responses] // All Responses containers (operation.responses)
+
+ InlineResponses []*IndexNode[*ReferencedResponse] // Responses defined inline in operations
+ ComponentResponses []*IndexNode[*ReferencedResponse] // Responses in /components/responses/
+ ResponseReferences []*IndexNode[*ReferencedResponse] // All Response $ref pointers
+
+ InlineRequestBodies []*IndexNode[*ReferencedRequestBody] // RequestBodies defined inline in operations
+ ComponentRequestBodies []*IndexNode[*ReferencedRequestBody] // RequestBodies in /components/requestBodies/
+ RequestBodyReferences []*IndexNode[*ReferencedRequestBody] // All RequestBody $ref pointers
+
+ InlineHeaders []*IndexNode[*ReferencedHeader] // Headers defined inline
+ ComponentHeaders []*IndexNode[*ReferencedHeader] // Headers in /components/headers/
+ HeaderReferences []*IndexNode[*ReferencedHeader] // All Header $ref pointers
+
+ InlineExamples []*IndexNode[*ReferencedExample] // Examples defined inline
+ ComponentExamples []*IndexNode[*ReferencedExample] // Examples in /components/examples/
+ ExampleReferences []*IndexNode[*ReferencedExample] // All Example $ref pointers
+
+ InlineLinks []*IndexNode[*ReferencedLink] // Links defined inline in responses
+ ComponentLinks []*IndexNode[*ReferencedLink] // Links in /components/links/
+ LinkReferences []*IndexNode[*ReferencedLink] // All Link $ref pointers
+
+ InlineCallbacks []*IndexNode[*ReferencedCallback] // Callbacks defined inline in operations
+ ComponentCallbacks []*IndexNode[*ReferencedCallback] // Callbacks in /components/callbacks/
+ CallbackReferences []*IndexNode[*ReferencedCallback] // All Callback $ref pointers
+
+ ComponentSecuritySchemes []*IndexNode[*ReferencedSecurityScheme] // SecuritySchemes in /components/securitySchemes/
+ SecuritySchemeReferences []*IndexNode[*ReferencedSecurityScheme] // All SecurityScheme $ref pointers
+ SecurityRequirements []*IndexNode[*SecurityRequirement] // All security requirement objects
+
+ Discriminators []*IndexNode[*oas3.Discriminator] // All discriminator objects in schemas
+ XMLs []*IndexNode[*oas3.XML] // All XML metadata in schemas
+ MediaTypes []*IndexNode[*MediaType] // All media types in request/response bodies
+ Encodings []*IndexNode[*Encoding] // All encoding objects in media types
+ OAuthFlows []*IndexNode[*OAuthFlows] // All OAuth flows containers
+ OAuthFlowItems []*IndexNode[*OAuthFlow] // Individual OAuth flow objects (implicit, password, clientCredentials, authorizationCode)
+
+ DescriptionNodes []*IndexNode[Descriptioner] // All nodes that have a Description field
+ SummaryNodes []*IndexNode[Summarizer] // All nodes that have a Summary field
+ DescriptionAndSummaryNodes []*IndexNode[DescriptionAndSummary] // All nodes that have both Description and Summary fields
+
+ validationErrs []error
+ resolutionErrs []error
+ circularErrs []error
+
+ resolveOpts references.ResolveOptions
+
+ // Circular reference tracking (internal)
+ indexedSchemas map[*oas3.JSONSchemaReferenceable]bool // Tracks which schemas have been fully indexed
+ referenceStack []referenceStackEntry // Active reference resolution chain (by ref target)
+ polymorphicRefs []*PolymorphicCircularRef // Pending polymorphic circulars
+ visitedRefs map[string]bool // Tracks visited ref targets to avoid duplicates
+ currentDocumentStack []string // Stack of document paths being walked (for determining external vs main)
+}
+
+// IndexNode wraps a node with its location in the document.
+type IndexNode[T any] struct {
+ Node T
+
+ Location Locations
+}
+
+// BuildIndex creates a new Index by walking the entire OpenAPI document.
+// It resolves references and detects circular reference patterns.
+// Requires resolveOpts to have RootDocument, TargetDocument, and TargetLocation set.
+func BuildIndex(ctx context.Context, doc *OpenAPI, resolveOpts references.ResolveOptions) *Index {
+ if resolveOpts.RootDocument == nil {
+ panic("BuildIndex: resolveOpts.RootDocument is required")
+ }
+ if resolveOpts.TargetDocument == nil {
+ panic("BuildIndex: resolveOpts.TargetDocument is required")
+ }
+ if resolveOpts.TargetLocation == "" {
+ panic("BuildIndex: resolveOpts.TargetLocation is required")
+ }
+
+ idx := &Index{
+ Doc: doc,
+ resolveOpts: resolveOpts,
+ indexedSchemas: make(map[*oas3.JSONSchemaReferenceable]bool),
+ referenceStack: make([]referenceStackEntry, 0),
+ polymorphicRefs: make([]*PolymorphicCircularRef, 0),
+ visitedRefs: make(map[string]bool),
+ currentDocumentStack: []string{resolveOpts.TargetLocation}, // Start with main document
+ }
+
+ // Phase 1: Walk and index everything
+ _ = buildIndex(ctx, idx, doc)
+
+ // Phase 2: Post-process polymorphic circular refs
+ idx.finalizePolymorphicCirculars()
+
+ return idx
+}
+
+// GetAllSchemas returns all schemas in the index (boolean, inline, component, external, and references).
+func (i *Index) GetAllSchemas() []*IndexNode[*oas3.JSONSchemaReferenceable] {
+ if i == nil {
+ return nil
+ }
+
+ allSchemas := make([]*IndexNode[*oas3.JSONSchemaReferenceable], 0, len(i.BooleanSchemas)+
+ len(i.InlineSchemas)+
+ len(i.ComponentSchemas)+
+ len(i.ExternalSchemas)+
+ len(i.SchemaReferences),
+ )
+ allSchemas = append(allSchemas, i.BooleanSchemas...)
+ allSchemas = append(allSchemas, i.InlineSchemas...)
+ allSchemas = append(allSchemas, i.ComponentSchemas...)
+ allSchemas = append(allSchemas, i.ExternalSchemas...)
+ allSchemas = append(allSchemas, i.SchemaReferences...)
+ return allSchemas
+}
+
+// GetAllPathItems returns all path items in the index (inline, component, and references).
+func (i *Index) GetAllPathItems() []*IndexNode[*ReferencedPathItem] {
+ if i == nil {
+ return nil
+ }
+
+ allPathItems := make([]*IndexNode[*ReferencedPathItem], 0, len(i.InlinePathItems)+
+ len(i.ComponentPathItems)+
+ len(i.ExternalPathItems)+
+ len(i.PathItemReferences),
+ )
+ allPathItems = append(allPathItems, i.InlinePathItems...)
+ allPathItems = append(allPathItems, i.ComponentPathItems...)
+ allPathItems = append(allPathItems, i.ExternalPathItems...)
+ allPathItems = append(allPathItems, i.PathItemReferences...)
+ return allPathItems
+}
+
+// GetValidationErrors returns validation errors from resolution operations.
+func (i *Index) GetValidationErrors() []error {
+ if i == nil {
+ return nil
+ }
+ return i.validationErrs
+}
+
+// GetResolutionErrors returns errors from failed reference resolution.
+func (i *Index) GetResolutionErrors() []error {
+ if i == nil {
+ return nil
+ }
+ return i.resolutionErrs
+}
+
+// GetCircularReferenceErrors returns invalid (non-terminating) circular reference errors.
+func (i *Index) GetCircularReferenceErrors() []error {
+ if i == nil {
+ return nil
+ }
+ return i.circularErrs
+}
+
+// GetAllErrors returns all errors collected during indexing.
+func (i *Index) GetAllErrors() []error {
+ if i == nil {
+ return nil
+ }
+ all := make([]error, 0, len(i.validationErrs)+len(i.resolutionErrs)+len(i.circularErrs))
+ all = append(all, i.validationErrs...)
+ all = append(all, i.resolutionErrs...)
+ all = append(all, i.circularErrs...)
+ return all
+}
+
+// HasErrors returns true if any errors were collected during indexing.
+func (i *Index) HasErrors() bool {
+ if i == nil {
+ return false
+ }
+ return len(i.validationErrs) > 0 || len(i.resolutionErrs) > 0 || len(i.circularErrs) > 0
+}
+
+func buildIndex[T any](ctx context.Context, index *Index, obj *T) error {
+ for item := range Walk(ctx, obj) {
+ if err := item.Match(Matcher{
+ ExternalDocs: func(ed *oas3.ExternalDocumentation) error {
+ index.indexExternalDocs(ctx, item.Location, ed)
+ return nil
+ },
+ Tag: func(t *Tag) error { index.indexTag(ctx, item.Location, t); return nil },
+ Server: func(s *Server) error { index.indexServer(ctx, item.Location, s); return nil },
+ ServerVariable: func(sv *ServerVariable) error { index.indexServerVariable(ctx, item.Location, sv); return nil },
+ ReferencedPathItem: func(rpi *ReferencedPathItem) error {
+ index.indexReferencedPathItem(ctx, item.Location, rpi)
+ return nil
+ },
+ ReferencedParameter: func(rp *ReferencedParameter) error {
+ index.indexReferencedParameter(ctx, item.Location, rp)
+ return nil
+ },
+ Schema: func(j *oas3.JSONSchemaReferenceable) error {
+ return index.indexSchema(ctx, item.Location, j)
+ },
+ Discriminator: func(d *oas3.Discriminator) error {
+ index.indexDiscriminator(ctx, item.Location, d)
+ return nil
+ },
+ XML: func(x *oas3.XML) error {
+ index.indexXML(ctx, item.Location, x)
+ return nil
+ },
+ MediaType: func(mt *MediaType) error {
+ index.indexMediaType(ctx, item.Location, mt)
+ return nil
+ },
+ Encoding: func(enc *Encoding) error {
+ index.indexEncoding(ctx, item.Location, enc)
+ return nil
+ },
+ ReferencedHeader: func(rh *ReferencedHeader) error {
+ index.indexReferencedHeader(ctx, item.Location, rh)
+ return nil
+ },
+ ReferencedExample: func(re *ReferencedExample) error {
+ index.indexReferencedExample(ctx, item.Location, re)
+ return nil
+ },
+ Operation: func(op *Operation) error {
+ index.indexOperation(ctx, item.Location, op)
+ return nil
+ },
+ ReferencedRequestBody: func(rb *ReferencedRequestBody) error {
+ index.indexReferencedRequestBody(ctx, item.Location, rb)
+ return nil
+ },
+ Responses: func(r *Responses) error {
+ index.indexResponses(ctx, item.Location, r)
+ return nil
+ },
+ ReferencedResponse: func(rr *ReferencedResponse) error {
+ index.indexReferencedResponse(ctx, item.Location, rr)
+ return nil
+ },
+ ReferencedLink: func(rl *ReferencedLink) error {
+ index.indexReferencedLink(ctx, item.Location, rl)
+ return nil
+ },
+ ReferencedCallback: func(rc *ReferencedCallback) error {
+ index.indexReferencedCallback(ctx, item.Location, rc)
+ return nil
+ },
+ ReferencedSecurityScheme: func(rss *ReferencedSecurityScheme) error {
+ index.indexReferencedSecurityScheme(ctx, item.Location, rss)
+ return nil
+ },
+ Security: func(req *SecurityRequirement) error {
+ index.indexSecurityRequirement(ctx, item.Location, req)
+ return nil
+ },
+ OAuthFlows: func(of *OAuthFlows) error {
+ index.indexOAuthFlows(ctx, item.Location, of)
+ return nil
+ },
+ OAuthFlow: func(of *OAuthFlow) error {
+ index.indexOAuthFlow(ctx, item.Location, of)
+ return nil
+ },
+ Any: func(a any) error {
+ if d, ok := a.(Descriptioner); ok {
+ index.indexDescriptionNode(ctx, item.Location, d)
+ }
+ if s, ok := a.(Summarizer); ok {
+ index.indexSummaryNode(ctx, item.Location, s)
+ }
+ if ds, ok := a.(DescriptionAndSummary); ok {
+ index.indexDescriptionAndSummaryNode(ctx, item.Location, ds)
+ }
+ return nil
+ },
+ }); err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
+
+func (i *Index) indexSchema(ctx context.Context, loc Locations, schema *oas3.JSONSchemaReferenceable) error {
+ // Resolve if needed (do this first to get the resolved schema for tracking)
+ if !schema.IsResolved() {
+ vErrs, err := schema.Resolve(ctx, i.resolveOpts)
+ if err != nil {
+ i.resolutionErrs = append(i.resolutionErrs, validation.NewValidationErrorWithDocumentLocation(
+ validation.SeverityError,
+ "resolution-json-schema",
+ err,
+ getSchemaErrorNode(schema),
+ i.documentPathForSchema(schema),
+ ))
+ return nil
+ }
+ i.validationErrs = append(i.validationErrs, i.applyDocumentLocation(vErrs, i.documentPathForSchema(schema))...)
+ if resolved := schema.GetResolvedSchema(); resolved != nil && i.Doc != nil {
+ opts := i.referenceValidationOptions()
+ schemaErrs := resolved.Validate(ctx, opts...)
+ i.validationErrs = append(i.validationErrs, i.applyDocumentLocation(schemaErrs, i.documentPathForSchema(schema))...)
+ }
+ }
+
+ // Index the schema based on its type
+ if schema.IsBool() {
+ if !i.indexedSchemas[schema] {
+ i.BooleanSchemas = append(i.BooleanSchemas, &IndexNode[*oas3.JSONSchemaReferenceable]{
+ Node: schema,
+ Location: loc,
+ })
+ i.indexedSchemas[schema] = true
+ }
+ return nil
+ }
+
+ if schema.IsReference() {
+ // Add to references list (allow duplicates at different locations)
+ i.SchemaReferences = append(i.SchemaReferences, &IndexNode[*oas3.JSONSchemaReferenceable]{
+ Node: schema,
+ Location: loc,
+ })
+
+ // Get the $ref target for tracking
+ refTarget := getRefTarget(schema)
+ if refTarget == "" {
+ return nil // Can't track without a ref target
+ }
+
+ // IMPORTANT: Check circular reference BEFORE walking
+ // A schema might be visited AND currently in the reference stack (circular case)
+ for stackIdx, entry := range i.referenceStack {
+ if entry.refTarget == refTarget {
+ // CIRCULAR REFERENCE DETECTED - this is the SECOND+ encounter
+ // Build path segments from first occurrence to current
+ pathSegments := i.buildPathSegmentsFromStack(stackIdx, loc)
+ externalDocumentPath := ""
+ currentDocPath := i.currentDocumentPath()
+ if currentDocPath != i.resolveOpts.TargetLocation {
+ externalDocumentPath = currentDocPath
+ }
+ circularChain := i.buildCircularReferenceChain(stackIdx, refTarget)
+
+ // Classify the circular reference
+ classification, polymorphicInfo := i.classifyCircularPath(schema, pathSegments, loc)
+
+ if classification == CircularInvalid {
+ err := fmt.Errorf("non-terminating circular reference detected: %s", joinReferenceChainWithArrows(circularChain))
+ i.circularErrs = append(i.circularErrs, validation.NewValidationErrorWithDocumentLocation(
+ validation.SeverityError,
+ "circular-reference-invalid",
+ err,
+ getSchemaErrorNode(schema),
+ externalDocumentPath,
+ ))
+ } else if classification == CircularPending && polymorphicInfo != nil {
+ i.recordPolymorphicBranch(polymorphicInfo)
+ }
+ // CircularValid - no action needed
+
+ // Stop processing this branch - don't walk the same schema again
+ return nil
+ }
+ }
+
+ // Get the document path for the resolved schema
+ info := schema.GetReferenceResolutionInfo()
+ var docPath string
+ if info != nil {
+ docPath = info.AbsoluteDocumentPath
+ }
+
+ // Push ref target onto reference stack
+ i.referenceStack = append(i.referenceStack, referenceStackEntry{
+ refTarget: refTarget,
+ location: copyLocations(loc),
+ })
+
+ // Push document path onto document stack BEFORE walking
+ // This allows nested resolved documents (including returning to main) to
+ // attribute errors to the correct document.
+ currentDoc := ""
+ if len(i.currentDocumentStack) > 0 {
+ currentDoc = i.currentDocumentStack[len(i.currentDocumentStack)-1]
+ }
+ if docPath != "" && docPath != currentDoc {
+ i.currentDocumentStack = append(i.currentDocumentStack, docPath)
+ defer func() {
+ // Pop from document stack
+ if len(i.currentDocumentStack) > 1 {
+ i.currentDocumentStack = i.currentDocumentStack[:len(i.currentDocumentStack)-1]
+ }
+ }()
+ }
+
+ // Get the resolved schema and recursively walk it
+ // Walk API doesn't walk resolved references automatically - we must walk them
+ resolved := schema.GetResolvedSchema()
+ if resolved != nil {
+ // Convert Concrete to Referenceable for walking
+ refableResolved := oas3.ConcreteToReferenceable(resolved)
+ if err := buildIndex(ctx, i, refableResolved); err != nil {
+ i.referenceStack = i.referenceStack[:len(i.referenceStack)-1]
+ return err
+ }
+ }
+
+ // Pop from reference stack
+ i.referenceStack = i.referenceStack[:len(i.referenceStack)-1]
+
+ return nil
+ }
+
+ // Non-reference schema (component, external, or inline)
+ // Note: We don't use indexedSchemas check here because schemas can be referenced
+ // from multiple paths and should be indexed for each occurrence
+
+ // Check if this is a top-level component in the main document
+ if isTopLevelComponent(loc, "schemas") {
+ if !i.indexedSchemas[schema] {
+ i.ComponentSchemas = append(i.ComponentSchemas, &IndexNode[*oas3.JSONSchemaReferenceable]{
+ Node: schema,
+ Location: loc,
+ })
+ i.indexedSchemas[schema] = true
+ }
+ return nil
+ }
+
+ // Check if this is a top-level schema in an external document
+ // Important: Only mark as external if it's NOT from the main document
+ if isTopLevelExternalSchema(loc) {
+ if !i.isFromMainDocument(schema) && !i.indexedSchemas[schema] {
+ i.ExternalSchemas = append(i.ExternalSchemas, &IndexNode[*oas3.JSONSchemaReferenceable]{
+ Node: schema,
+ Location: loc,
+ })
+ i.indexedSchemas[schema] = true
+ }
+ return nil
+ }
+
+ // Everything else is an inline schema
+ // Inline schemas can appear multiple times (e.g., same property type in different schemas)
+ // but we only index each unique schema object once
+ if !i.indexedSchemas[schema] {
+ i.InlineSchemas = append(i.InlineSchemas, &IndexNode[*oas3.JSONSchemaReferenceable]{
+ Node: schema,
+ Location: loc,
+ })
+ i.indexedSchemas[schema] = true
+ }
+
+ return nil
+}
+
+// isTopLevelExternalSchema checks if the location represents a top-level schema
+// in an external document (i.e., at the root of an external document, not under /components/).
+func isTopLevelExternalSchema(loc Locations) bool {
+ // Top-level external schemas appear at location "/" (root of external doc)
+ // They have 0 location contexts (empty Locations slice)
+ if len(loc) == 0 {
+ return true
+ }
+
+ // Single context with no ParentField (or empty ParentField) also indicates root
+ if len(loc) == 1 && loc[0].ParentField == "" {
+ return true
+ }
+
+ return false
+}
+
+// isFromMainDocument checks if we're currently walking the main document
+// by checking the current document stack.
+func (i *Index) isFromMainDocument(_ *oas3.JSONSchemaReferenceable) bool {
+ if len(i.currentDocumentStack) == 0 {
+ return true // Safety fallback - assume main document
+ }
+
+ currentDoc := i.currentDocumentStack[len(i.currentDocumentStack)-1]
+ mainDoc := i.resolveOpts.TargetLocation
+
+ return currentDoc == mainDoc
+}
+
+// buildPathSegmentsFromStack builds path segments from a point in the reference stack to current location.
+func (i *Index) buildPathSegmentsFromStack(startStackIdx int, currentLoc Locations) []CircularPathSegment {
+ // Collect all locations from the stack starting point plus current
+ var segments []CircularPathSegment
+
+ // Add segments from each stack entry after the circular start point
+ for stackIdx := startStackIdx; stackIdx < len(i.referenceStack); stackIdx++ {
+ entry := i.referenceStack[stackIdx]
+ for _, locCtx := range entry.location {
+ segments = append(segments, buildPathSegment(locCtx))
+ }
+ }
+
+ // Add segments from current location
+ for _, locCtx := range currentLoc {
+ segments = append(segments, buildPathSegment(locCtx))
+ }
+
+ return segments
+}
+
+func (i *Index) buildCircularReferenceChain(startStackIdx int, refTarget string) []string {
+ chain := make([]string, 0, len(i.referenceStack)-startStackIdx+1)
+ for stackIdx := startStackIdx; stackIdx < len(i.referenceStack); stackIdx++ {
+ chain = append(chain, i.referenceStack[stackIdx].refTarget)
+ }
+ chain = append(chain, refTarget)
+ return chain
+}
+
+func (i *Index) indexExternalDocs(_ context.Context, loc Locations, ed *oas3.ExternalDocumentation) {
+ i.ExternalDocumentation = append(i.ExternalDocumentation, &IndexNode[*oas3.ExternalDocumentation]{
+ Node: ed,
+ Location: loc,
+ })
+}
+
+func (i *Index) indexTag(_ context.Context, loc Locations, tag *Tag) {
+ i.Tags = append(i.Tags, &IndexNode[*Tag]{
+ Node: tag,
+ Location: loc,
+ })
+}
+
+func (i *Index) indexServer(_ context.Context, loc Locations, server *Server) {
+ i.Servers = append(i.Servers, &IndexNode[*Server]{
+ Node: server,
+ Location: loc,
+ })
+}
+
+func (i *Index) indexServerVariable(_ context.Context, loc Locations, serverVariable *ServerVariable) {
+ i.ServerVariables = append(i.ServerVariables, &IndexNode[*ServerVariable]{
+ Node: serverVariable,
+ Location: loc,
+ })
+}
+
+func (i *Index) indexReferencedPathItem(ctx context.Context, loc Locations, pathItem *ReferencedPathItem) {
+ if pathItem == nil {
+ return
+ }
+
+ if pathItem.IsReference() && !pathItem.IsResolved() {
+ resolveAndValidateReference(i, ctx, pathItem)
+ }
+
+ // Index description and summary if both are present
+ // For PathItems wrapped in References, we need to get the underlying PathItem
+ obj := pathItem.GetObject()
+ if obj != nil {
+ desc := obj.GetDescription()
+ summary := obj.GetSummary()
+
+ if desc != "" {
+ i.indexDescriptionNode(ctx, loc, obj)
+ }
+ if summary != "" {
+ i.indexSummaryNode(ctx, loc, obj)
+ }
+ if desc != "" && summary != "" {
+ i.indexDescriptionAndSummaryNode(ctx, loc, obj)
+ }
+ }
+
+ // Categorize path items similarly to schemas
+ if pathItem.IsReference() {
+ i.PathItemReferences = append(i.PathItemReferences, &IndexNode[*ReferencedPathItem]{
+ Node: pathItem,
+ Location: loc,
+ })
+ return
+ }
+
+ // Check if this is a component path item
+ if isTopLevelComponent(loc, "pathItems") {
+ i.ComponentPathItems = append(i.ComponentPathItems, &IndexNode[*ReferencedPathItem]{
+ Node: pathItem,
+ Location: loc,
+ })
+ return
+ }
+
+ // Check if this is a top-level path item in an external document
+ // External path items appear at location "/" (root of external doc)
+ if isTopLevelExternalSchema(loc) {
+ i.ExternalPathItems = append(i.ExternalPathItems, &IndexNode[*ReferencedPathItem]{
+ Node: pathItem,
+ Location: loc,
+ })
+ return
+ }
+
+ // Everything else is an inline path item
+ i.InlinePathItems = append(i.InlinePathItems, &IndexNode[*ReferencedPathItem]{
+ Node: pathItem,
+ Location: loc,
+ })
+}
+
+func (i *Index) indexOperation(_ context.Context, loc Locations, operation *Operation) {
+ if operation == nil {
+ return
+ }
+ i.Operations = append(i.Operations, &IndexNode[*Operation]{
+ Node: operation,
+ Location: loc,
+ })
+}
+
+func (i *Index) indexReferencedParameter(ctx context.Context, loc Locations, param *ReferencedParameter) {
+ if param == nil {
+ return
+ }
+
+ if param.IsReference() && !param.IsResolved() {
+ resolveAndValidateReference(i, ctx, param)
+ }
+
+ if param.IsReference() {
+ i.ParameterReferences = append(i.ParameterReferences, &IndexNode[*ReferencedParameter]{
+ Node: param,
+ Location: loc,
+ })
+ return
+ }
+
+ if isTopLevelComponent(loc, "parameters") {
+ i.ComponentParameters = append(i.ComponentParameters, &IndexNode[*ReferencedParameter]{
+ Node: param,
+ Location: loc,
+ })
+ return
+ }
+
+ i.InlineParameters = append(i.InlineParameters, &IndexNode[*ReferencedParameter]{
+ Node: param,
+ Location: loc,
+ })
+}
+
+func (i *Index) indexResponses(_ context.Context, loc Locations, responses *Responses) {
+ if responses == nil {
+ return
+ }
+ i.Responses = append(i.Responses, &IndexNode[*Responses]{
+ Node: responses,
+ Location: loc,
+ })
+}
+
+func (i *Index) indexReferencedResponse(ctx context.Context, loc Locations, resp *ReferencedResponse) {
+ if resp == nil {
+ return
+ }
+
+ if resp.IsReference() && !resp.IsResolved() {
+ resolveAndValidateReference(i, ctx, resp)
+ }
+
+ if resp.IsReference() {
+ i.ResponseReferences = append(i.ResponseReferences, &IndexNode[*ReferencedResponse]{
+ Node: resp,
+ Location: loc,
+ })
+ return
+ }
+
+ if isTopLevelComponent(loc, "responses") {
+ i.ComponentResponses = append(i.ComponentResponses, &IndexNode[*ReferencedResponse]{
+ Node: resp,
+ Location: loc,
+ })
+ return
+ }
+
+ i.InlineResponses = append(i.InlineResponses, &IndexNode[*ReferencedResponse]{
+ Node: resp,
+ Location: loc,
+ })
+}
+
+func (i *Index) indexReferencedRequestBody(ctx context.Context, loc Locations, rb *ReferencedRequestBody) {
+ if rb == nil {
+ return
+ }
+
+ if rb.IsReference() && !rb.IsResolved() {
+ resolveAndValidateReference(i, ctx, rb)
+ }
+
+ if rb.IsReference() {
+ i.RequestBodyReferences = append(i.RequestBodyReferences, &IndexNode[*ReferencedRequestBody]{
+ Node: rb,
+ Location: loc,
+ })
+ return
+ }
+
+ if isTopLevelComponent(loc, "requestBodies") {
+ i.ComponentRequestBodies = append(i.ComponentRequestBodies, &IndexNode[*ReferencedRequestBody]{
+ Node: rb,
+ Location: loc,
+ })
+ return
+ }
+
+ i.InlineRequestBodies = append(i.InlineRequestBodies, &IndexNode[*ReferencedRequestBody]{
+ Node: rb,
+ Location: loc,
+ })
+}
+
+func (i *Index) indexReferencedHeader(ctx context.Context, loc Locations, header *ReferencedHeader) {
+ if header == nil {
+ return
+ }
+
+ if header.IsReference() && !header.IsResolved() {
+ resolveAndValidateReference(i, ctx, header)
+ }
+
+ if header.IsReference() {
+ i.HeaderReferences = append(i.HeaderReferences, &IndexNode[*ReferencedHeader]{
+ Node: header,
+ Location: loc,
+ })
+ return
+ }
+
+ if isTopLevelComponent(loc, "headers") {
+ i.ComponentHeaders = append(i.ComponentHeaders, &IndexNode[*ReferencedHeader]{
+ Node: header,
+ Location: loc,
+ })
+ return
+ }
+
+ i.InlineHeaders = append(i.InlineHeaders, &IndexNode[*ReferencedHeader]{
+ Node: header,
+ Location: loc,
+ })
+}
+
+func (i *Index) indexReferencedExample(ctx context.Context, loc Locations, example *ReferencedExample) {
+ if example == nil {
+ return
+ }
+
+ if example.IsReference() && !example.IsResolved() {
+ resolveAndValidateReference(i, ctx, example)
+ }
+
+ if example.IsReference() {
+ i.ExampleReferences = append(i.ExampleReferences, &IndexNode[*ReferencedExample]{
+ Node: example,
+ Location: loc,
+ })
+ return
+ }
+
+ if isTopLevelComponent(loc, "examples") {
+ i.ComponentExamples = append(i.ComponentExamples, &IndexNode[*ReferencedExample]{
+ Node: example,
+ Location: loc,
+ })
+ return
+ }
+
+ i.InlineExamples = append(i.InlineExamples, &IndexNode[*ReferencedExample]{
+ Node: example,
+ Location: loc,
+ })
+}
+
+func (i *Index) indexReferencedLink(ctx context.Context, loc Locations, link *ReferencedLink) {
+ if link == nil {
+ return
+ }
+
+ if link.IsReference() && !link.IsResolved() {
+ resolveAndValidateReference(i, ctx, link)
+ }
+
+ if link.IsReference() {
+ i.LinkReferences = append(i.LinkReferences, &IndexNode[*ReferencedLink]{
+ Node: link,
+ Location: loc,
+ })
+ return
+ }
+
+ if isTopLevelComponent(loc, "links") {
+ i.ComponentLinks = append(i.ComponentLinks, &IndexNode[*ReferencedLink]{
+ Node: link,
+ Location: loc,
+ })
+ return
+ }
+
+ i.InlineLinks = append(i.InlineLinks, &IndexNode[*ReferencedLink]{
+ Node: link,
+ Location: loc,
+ })
+}
+
+func (i *Index) indexReferencedCallback(ctx context.Context, loc Locations, callback *ReferencedCallback) {
+ if callback == nil {
+ return
+ }
+
+ if callback.IsReference() && !callback.IsResolved() {
+ resolveAndValidateReference(i, ctx, callback)
+ }
+
+ if callback.IsReference() {
+ i.CallbackReferences = append(i.CallbackReferences, &IndexNode[*ReferencedCallback]{
+ Node: callback,
+ Location: loc,
+ })
+ return
+ }
+
+ if isTopLevelComponent(loc, "callbacks") {
+ i.ComponentCallbacks = append(i.ComponentCallbacks, &IndexNode[*ReferencedCallback]{
+ Node: callback,
+ Location: loc,
+ })
+ return
+ }
+
+ i.InlineCallbacks = append(i.InlineCallbacks, &IndexNode[*ReferencedCallback]{
+ Node: callback,
+ Location: loc,
+ })
+}
+
+func (i *Index) indexReferencedSecurityScheme(ctx context.Context, loc Locations, ss *ReferencedSecurityScheme) {
+ if ss == nil {
+ return
+ }
+
+ if ss.IsReference() && !ss.IsResolved() {
+ resolveAndValidateReference(i, ctx, ss)
+ }
+
+ if ss.IsReference() {
+ i.SecuritySchemeReferences = append(i.SecuritySchemeReferences, &IndexNode[*ReferencedSecurityScheme]{
+ Node: ss,
+ Location: loc,
+ })
+ return
+ }
+
+ // SecuritySchemes are always components (no inline security schemes)
+ i.ComponentSecuritySchemes = append(i.ComponentSecuritySchemes, &IndexNode[*ReferencedSecurityScheme]{
+ Node: ss,
+ Location: loc,
+ })
+}
+
+func (i *Index) indexSecurityRequirement(_ context.Context, loc Locations, req *SecurityRequirement) {
+ if req == nil {
+ return
+ }
+
+ i.SecurityRequirements = append(i.SecurityRequirements, &IndexNode[*SecurityRequirement]{
+ Node: req,
+ Location: loc,
+ })
+}
+
+func (i *Index) indexDiscriminator(_ context.Context, loc Locations, discriminator *oas3.Discriminator) {
+ if discriminator == nil {
+ return
+ }
+ i.Discriminators = append(i.Discriminators, &IndexNode[*oas3.Discriminator]{
+ Node: discriminator,
+ Location: loc,
+ })
+}
+
+func (i *Index) indexXML(_ context.Context, loc Locations, xml *oas3.XML) {
+ if xml == nil {
+ return
+ }
+ i.XMLs = append(i.XMLs, &IndexNode[*oas3.XML]{
+ Node: xml,
+ Location: loc,
+ })
+}
+
+func (i *Index) indexMediaType(_ context.Context, loc Locations, mediaType *MediaType) {
+ if mediaType == nil {
+ return
+ }
+ i.MediaTypes = append(i.MediaTypes, &IndexNode[*MediaType]{
+ Node: mediaType,
+ Location: loc,
+ })
+}
+
+func (i *Index) indexEncoding(_ context.Context, loc Locations, encoding *Encoding) {
+ if encoding == nil {
+ return
+ }
+ i.Encodings = append(i.Encodings, &IndexNode[*Encoding]{
+ Node: encoding,
+ Location: loc,
+ })
+}
+
+func (i *Index) indexOAuthFlows(_ context.Context, loc Locations, flows *OAuthFlows) {
+ if flows == nil {
+ return
+ }
+ i.OAuthFlows = append(i.OAuthFlows, &IndexNode[*OAuthFlows]{
+ Node: flows,
+ Location: loc,
+ })
+}
+
+func (i *Index) indexOAuthFlow(_ context.Context, loc Locations, flow *OAuthFlow) {
+ if flow == nil {
+ return
+ }
+ i.OAuthFlowItems = append(i.OAuthFlowItems, &IndexNode[*OAuthFlow]{
+ Node: flow,
+ Location: loc,
+ })
+}
+
+func (i *Index) indexDescriptionNode(_ context.Context, loc Locations, d Descriptioner) {
+ if d == nil {
+ return
+ }
+ i.DescriptionNodes = append(i.DescriptionNodes, &IndexNode[Descriptioner]{
+ Node: d,
+ Location: loc,
+ })
+}
+
+func (i *Index) indexSummaryNode(_ context.Context, loc Locations, s Summarizer) {
+ if s == nil {
+ return
+ }
+ i.SummaryNodes = append(i.SummaryNodes, &IndexNode[Summarizer]{
+ Node: s,
+ Location: loc,
+ })
+}
+
+func (i *Index) indexDescriptionAndSummaryNode(_ context.Context, loc Locations, ds DescriptionAndSummary) {
+ if ds == nil {
+ return
+ }
+ i.DescriptionAndSummaryNodes = append(i.DescriptionAndSummaryNodes, &IndexNode[DescriptionAndSummary]{
+ Node: ds,
+ Location: loc,
+ })
+}
+
+func (i *Index) documentPathForSchema(schema *oas3.JSONSchemaReferenceable) string {
+ if i == nil || schema == nil {
+ return ""
+ }
+
+ if info := schema.GetReferenceResolutionInfo(); info != nil {
+ if info.AbsoluteDocumentPath != i.resolveOpts.TargetLocation {
+ return info.AbsoluteDocumentPath
+ }
+ if len(i.currentDocumentStack) > 0 {
+ current := i.currentDocumentStack[len(i.currentDocumentStack)-1]
+ if current != i.resolveOpts.TargetLocation {
+ return current
+ }
+ }
+ return ""
+ }
+
+ if len(i.currentDocumentStack) > 0 {
+ current := i.currentDocumentStack[len(i.currentDocumentStack)-1]
+ if current != i.resolveOpts.TargetLocation {
+ return current
+ }
+ return ""
+ }
+
+ return ""
+}
+
+func (i *Index) applyDocumentLocation(errs []error, documentPath string) []error {
+ if len(errs) == 0 || documentPath == "" {
+ return errs
+ }
+
+ updated := make([]error, 0, len(errs))
+ for _, err := range errs {
+ if err == nil {
+ continue
+ }
+ var vErr *validation.Error
+ if errors.As(err, &vErr) && vErr != nil {
+ if vErr.DocumentLocation == "" {
+ vErr.DocumentLocation = documentPath
+ }
+ updated = append(updated, vErr)
+ continue
+ }
+ updated = append(updated, err)
+ }
+
+ return updated
+}
+
+func (i *Index) referenceValidationOptions() []validation.Option {
+ if i == nil || i.Doc == nil {
+ return nil
+ }
+
+ return []validation.Option{
+ validation.WithContextObject(i.Doc),
+ validation.WithContextObject(&oas3.ParentDocumentVersion{OpenAPI: pointer.From(i.Doc.OpenAPI)}),
+ }
+}
+
+func documentPathForReference[T any, V interfaces.Validator[T], C marshaller.CoreModeler](i *Index, ref *Reference[T, V, C]) string {
+ if i == nil || ref == nil {
+ return ""
+ }
+
+ if info := ref.GetReferenceResolutionInfo(); info != nil {
+ if info.AbsoluteDocumentPath != i.resolveOpts.TargetLocation {
+ return info.AbsoluteDocumentPath
+ }
+ return ""
+ }
+
+ return ""
+}
+
+func resolveAndValidateReference[T any, V interfaces.Validator[T], C marshaller.CoreModeler](i *Index, ctx context.Context, ref *Reference[T, V, C]) {
+ if i == nil || ref == nil {
+ return
+ }
+
+ if _, err := ref.Resolve(ctx, i.resolveOpts); err != nil {
+ i.resolutionErrs = append(i.resolutionErrs, validation.NewValidationErrorWithDocumentLocation(
+ validation.SeverityError,
+ "resolution-openapi-reference",
+ err,
+ nil,
+ documentPathForReference(i, ref),
+ ))
+ return
+ }
+
+ obj := ref.GetObject()
+ if obj == nil || i.Doc == nil {
+ return
+ }
+
+ var validator V
+ if v, ok := any(obj).(V); ok {
+ validator = v
+ validationErrs := validator.Validate(ctx, i.referenceValidationOptions()...)
+ i.validationErrs = append(i.validationErrs, i.applyDocumentLocation(validationErrs, documentPathForReference(i, ref))...)
+ }
+}
+
+// isTopLevelComponent checks if the location represents a top-level component definition.
+// A top-level component has the path: /components/{componentType}/{name}
+func isTopLevelComponent(loc Locations, componentType string) bool {
+ // Location should be exactly: /components/{componentType}/{name}
+ // Length 2: [components context, {componentType}/{name} context]
+ if len(loc) != 2 {
+ return false
+ }
+
+ // First element: ParentField = "components"
+ if loc[0].ParentField != "components" {
+ return false
+ }
+
+ // Second element: ParentField = componentType, ParentKey = name
+ if loc[1].ParentField != componentType || loc[1].ParentKey == nil {
+ return false
+ }
+
+ return true
+}
+
+// getParentSchema extracts the parent schema from a LocationContext using the ParentMatchFunc.
+func getParentSchema(loc LocationContext) *oas3.Schema {
+ var parentSchema *oas3.Schema
+
+ // Use the ParentMatchFunc to capture the parent node
+ _ = loc.ParentMatchFunc(Matcher{
+ Schema: func(s *oas3.JSONSchemaReferenceable) error {
+ if s == nil {
+ return nil
+ }
+ if !s.IsBool() && !s.IsReference() {
+ parentSchema = s.GetSchema()
+ } else if s.IsReference() {
+ // For references, get the resolved schema
+ if resolved := s.GetResolvedSchema(); resolved != nil && !resolved.IsBool() {
+ parentSchema = resolved.GetSchema()
+ }
+ }
+ return nil
+ },
+ })
+
+ return parentSchema
+}
+
+// buildPathSegment creates a CircularPathSegment with constraint info from the parent schema.
+func buildPathSegment(loc LocationContext) CircularPathSegment {
+ segment := CircularPathSegment{
+ Field: loc.ParentField,
+ }
+
+ if loc.ParentKey != nil {
+ segment.PropertyName = *loc.ParentKey
+ }
+ if loc.ParentIndex != nil {
+ segment.BranchIndex = *loc.ParentIndex
+ }
+
+ parent := getParentSchema(loc)
+ if parent == nil {
+ return segment
+ }
+
+ // Check if parent schema is nullable (termination point)
+ segment.IsNullable = isNullable(parent)
+
+ // Extract constraints based on field type
+ switch loc.ParentField {
+ case "properties":
+ if loc.ParentKey != nil {
+ // Check if property is required
+ for _, req := range parent.GetRequired() {
+ if req == *loc.ParentKey {
+ segment.IsRequired = true
+ break
+ }
+ }
+ }
+ case "items":
+ segment.ArrayMinItems = parent.GetMinItems() // Returns 0 if nil (default)
+ case "additionalProperties":
+ if minProps := parent.GetMinProperties(); minProps != nil {
+ segment.MinProperties = *minProps
+ }
+ // Default is 0 (empty object allowed)
+ }
+
+ return segment
+}
+
+// isNullable checks if a schema allows null values (termination point for circular refs).
+func isNullable(schema *oas3.Schema) bool {
+ if schema == nil {
+ return false
+ }
+
+ // OAS 3.0 style: nullable: true
+ if schema.GetNullable() {
+ return true
+ }
+
+ // OAS 3.1 style: type includes "null"
+ types := schema.GetType()
+ for _, t := range types {
+ if t == oas3.SchemaTypeNull {
+ return true
+ }
+ }
+
+ return false
+}
+
+// classifyCircularPath determines if the path allows termination.
+// Returns (classification, polymorphicInfo) where polymorphicInfo is set if pending.
+func (i *Index) classifyCircularPath(schema *oas3.JSONSchemaReferenceable, segments []CircularPathSegment, loc Locations) (CircularClassification, *PolymorphicCircularRef) {
+ // Check if any segment allows termination
+ for segIdx, segment := range segments {
+ // Check nullable at any point in the path
+ if segment.IsNullable {
+ return CircularValid, nil
+ }
+
+ switch segment.Field {
+ case "properties":
+ // Optional property = valid termination
+ if !segment.IsRequired {
+ return CircularValid, nil
+ }
+
+ case "items":
+ // Empty array terminates if minItems == 0 (default)
+ if segment.ArrayMinItems == 0 {
+ return CircularValid, nil
+ }
+
+ case "additionalProperties":
+ // Empty object terminates if minProperties == 0 (default)
+ if segment.MinProperties == 0 {
+ return CircularValid, nil
+ }
+
+ case "oneOf", "anyOf":
+ // Mark for post-processing - need to check ALL branches
+ // Create polymorphic tracking info
+ parentLocLen := len(loc) - len(segments) + segIdx
+ if parentLocLen < 0 {
+ parentLocLen = 0
+ }
+ parentLoc := copyLocations(loc[:parentLocLen])
+
+ polymorphicInfo := &PolymorphicCircularRef{
+ ParentSchema: schema,
+ ParentLocation: parentLoc,
+ Field: segment.Field,
+ BranchResults: make(map[int]CircularClassification),
+ TotalBranches: countPolymorphicBranches(schema, segment.Field),
+ }
+ // Record this branch as potentially invalid (recurses)
+ polymorphicInfo.BranchResults[segment.BranchIndex] = CircularInvalid
+ return CircularPending, polymorphicInfo
+
+ case "allOf":
+ // For allOf, if ANY branch has invalid circular ref, the whole thing is invalid
+ // because ALL branches must be satisfied
+ // Check if rest of path allows termination
+ remaining := segments[segIdx+1:]
+ if !pathAllowsTermination(remaining) {
+ return CircularInvalid, nil
+ }
+ }
+ }
+
+ // No termination point found in non-polymorphic path
+ return CircularInvalid, nil
+}
+
+// countPolymorphicBranches counts the number of branches in a oneOf/anyOf schema.
+func countPolymorphicBranches(schema *oas3.JSONSchemaReferenceable, field string) int {
+ if schema == nil || schema.IsBool() {
+ return 0
+ }
+
+ innerSchema := schema.GetSchema()
+ if innerSchema == nil {
+ return 0
+ }
+
+ switch field {
+ case "oneOf":
+ if oneOf := innerSchema.GetOneOf(); oneOf != nil {
+ return len(oneOf)
+ }
+ case "anyOf":
+ if anyOf := innerSchema.GetAnyOf(); anyOf != nil {
+ return len(anyOf)
+ }
+ case "allOf":
+ if allOf := innerSchema.GetAllOf(); allOf != nil {
+ return len(allOf)
+ }
+ }
+
+ return 0
+}
+
+// pathAllowsTermination checks if any segment in the remaining path allows termination.
+func pathAllowsTermination(segments []CircularPathSegment) bool {
+ for _, seg := range segments {
+ if seg.IsNullable {
+ return true
+ }
+
+ switch seg.Field {
+ case "properties":
+ if !seg.IsRequired {
+ return true
+ }
+ case "items":
+ if seg.ArrayMinItems == 0 {
+ return true
+ }
+ case "additionalProperties":
+ if seg.MinProperties == 0 {
+ return true
+ }
+ case "oneOf", "anyOf":
+ // Assume polymorphic branches might provide termination
+ return true
+ }
+ }
+ return false
+}
+
+func joinReferenceChainWithArrows(chain []string) string {
+ if len(chain) == 0 {
+ return ""
+ }
+ if len(chain) == 1 {
+ return chain[0]
+ }
+
+ var result strings.Builder
+ result.WriteString(chain[0])
+ for i := 1; i < len(chain); i++ {
+ result.WriteString(" -> ")
+ result.WriteString(chain[i])
+ }
+ return result.String()
+}
+
+// recordPolymorphicBranch records a polymorphic branch for post-processing.
+func (i *Index) recordPolymorphicBranch(info *PolymorphicCircularRef) {
+ if info == nil {
+ return
+ }
+ i.polymorphicRefs = append(i.polymorphicRefs, info)
+}
+
+// finalizePolymorphicCirculars is called after all walking completes.
+// It analyzes polymorphic schemas to determine if ALL branches recurse.
+func (i *Index) finalizePolymorphicCirculars() {
+ // Group by parent schema
+ grouped := make(map[*oas3.JSONSchemaReferenceable]*PolymorphicCircularRef)
+
+ for _, ref := range i.polymorphicRefs {
+ existing, found := grouped[ref.ParentSchema]
+ if found {
+ // Merge branch results
+ for idx, classification := range ref.BranchResults {
+ existing.BranchResults[idx] = classification
+ }
+ } else {
+ grouped[ref.ParentSchema] = ref
+ }
+ }
+
+ // Analyze each polymorphic schema
+ for _, ref := range grouped {
+ switch ref.Field {
+ case "oneOf", "anyOf":
+ // Invalid only if ALL branches have invalid circular refs
+ allInvalid := true
+ for branchIdx := 0; branchIdx < ref.TotalBranches; branchIdx++ {
+ classification, found := ref.BranchResults[branchIdx]
+ if !found || classification != CircularInvalid {
+ // This branch either doesn't recurse or has valid termination
+ allInvalid = false
+ break
+ }
+ }
+
+ if allInvalid && ref.TotalBranches > 0 {
+ i.circularErrs = append(i.circularErrs, validation.NewValidationErrorWithDocumentLocation(
+ validation.SeverityError,
+ "circular-reference-invalid",
+ fmt.Errorf("non-terminating circular reference: all %s branches recurse with no base case", ref.Field),
+ getSchemaErrorNode(ref.ParentSchema),
+ i.documentPathForSchema(ref.ParentSchema),
+ ))
+ }
+
+ case "allOf":
+ // Invalid if ANY branch has invalid circular ref (already handled inline in classifyCircularPath)
+ // This case is here for completeness if we need cross-branch tracking
+ }
+ }
+}
+
+// copyLocations creates a copy of the Locations slice.
+func copyLocations(loc Locations) Locations {
+ if loc == nil {
+ return nil
+ }
+ result := make(Locations, len(loc))
+ copy(result, loc)
+ return result
+}
+
+// getRefTarget extracts the absolute $ref target string from a schema reference.
+// Uses the resolved AbsoluteReference from resolution cache for normalization.
+func getRefTarget(schema *oas3.JSONSchemaReferenceable) string {
+ if schema == nil || !schema.IsReference() {
+ return ""
+ }
+
+ if !schema.IsResolved() {
+ panic("getRefTarget called on unresolved schema reference")
+ }
+
+ info := schema.GetReferenceResolutionInfo()
+ if info == nil {
+ return ""
+ }
+
+ return info.AbsoluteReference.String()
+}
+
+// getSchemaErrorNode returns an appropriate YAML node for error reporting.
+func getSchemaErrorNode(schema *oas3.JSONSchemaReferenceable) *yaml.Node {
+ if schema == nil {
+ return nil
+ }
+ if schema.IsBool() {
+ return nil
+ }
+ innerSchema := schema.GetSchema()
+ if innerSchema == nil {
+ return nil
+ }
+ // Try to get the $ref node if it's a reference
+ if core := innerSchema.GetCore(); core != nil && core.Ref.Present {
+ return core.Ref.GetKeyNodeOrRoot(innerSchema.GetRootNode())
+ }
+ return innerSchema.GetRootNode()
+}
diff --git a/openapi/index_external_test.go b/openapi/index_external_test.go
new file mode 100644
index 0000000..8ff1be5
--- /dev/null
+++ b/openapi/index_external_test.go
@@ -0,0 +1,478 @@
+package openapi_test
+
+import (
+ "errors"
+ "fmt"
+ "io"
+ "io/fs"
+ "net/http"
+ "strings"
+ "testing"
+
+ "github.com/speakeasy-api/openapi/openapi"
+ "github.com/speakeasy-api/openapi/references"
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+// MockVirtualFS implements system.VirtualFS for testing external file references
+type MockVirtualFS struct {
+ files map[string]string
+}
+
+func NewMockVirtualFS() *MockVirtualFS {
+ return &MockVirtualFS{
+ files: make(map[string]string),
+ }
+}
+
+func (m *MockVirtualFS) AddFile(path, content string) {
+ m.files[path] = content
+}
+
+func (m *MockVirtualFS) Open(name string) (fs.File, error) {
+ content, exists := m.files[name]
+ if !exists {
+ return nil, fmt.Errorf("file not found: %s", name)
+ }
+ return &MockFile{content: content}, nil
+}
+
+// MockFile implements fs.File
+type MockFile struct {
+ content string
+ pos int
+}
+
+func (m *MockFile) Read(p []byte) (n int, err error) {
+ if m.pos >= len(m.content) {
+ return 0, io.EOF
+ }
+ n = copy(p, m.content[m.pos:])
+ m.pos += n
+ return n, nil
+}
+
+func (m *MockFile) Close() error {
+ return nil
+}
+
+func (m *MockFile) Stat() (fs.FileInfo, error) {
+ return nil, errors.New("not implemented")
+}
+
+// MockHTTPClient implements system.Client for testing external HTTP references
+type MockHTTPClient struct {
+ responses map[string]string
+}
+
+func NewMockHTTPClient() *MockHTTPClient {
+ return &MockHTTPClient{
+ responses: make(map[string]string),
+ }
+}
+
+func (m *MockHTTPClient) AddResponse(url, body string) {
+ m.responses[url] = body
+}
+
+func (m *MockHTTPClient) Do(req *http.Request) (*http.Response, error) {
+ url := req.URL.String()
+ body, exists := m.responses[url]
+ if !exists {
+ return nil, fmt.Errorf("no response configured for URL: %s", url)
+ }
+ return &http.Response{
+ StatusCode: http.StatusOK,
+ Body: io.NopCloser(strings.NewReader(body)),
+ Header: make(http.Header),
+ }, nil
+}
+
+// setupComprehensiveExternalRefs creates a complete test environment with:
+// - File-based external references
+// - HTTP-based external references
+// - Valid and invalid circular references
+// - Referenced and unreferenced schemas
+func setupComprehensiveExternalRefs(t *testing.T) (*openapi.Index, *MockVirtualFS, *MockHTTPClient) {
+ t.Helper()
+ ctx := t.Context()
+
+ vfs := NewMockVirtualFS()
+ httpClient := NewMockHTTPClient()
+
+ // Expected index counts (verified by tests):
+ // ExternalDocumentation: 2 (main doc + users tag)
+ // Tags: 2 (users, products)
+ // Servers: 2 (production, staging)
+ // ServerVariables: 1 (version variable)
+ // BooleanSchemas: 2 (true, false from additionalProperties)
+ // InlineSchemas: 10 (9 from external + 1 from LocalSchema.id property)
+ // ComponentSchemas: 2 (LocalSchema, AnotherLocal)
+ // ExternalSchemas: 6 (UserResponse, User, Address, Product, Category, TreeNode)
+ // SchemaReferences: 9 (all $ref pointers including circulars)
+ // CircularErrors: 1 (Product<->Category invalid circular)
+
+ // TODO: PathItems indexing (currently marked TODO in buildIndex)
+
+ // Main API document
+ vfs.AddFile("/api/openapi.yaml", `
+openapi: "3.1.0"
+info:
+ title: Comprehensive API
+ version: 1.0.0
+externalDocs:
+ url: https://docs.example.com
+ description: Main API Documentation
+tags:
+ - name: users
+ description: User operations
+ externalDocs:
+ url: https://docs.example.com/users
+ - name: products
+ description: Product operations
+servers:
+ - url: https://api.example.com/{version}
+ description: Production server
+ variables:
+ version:
+ default: v1
+ enum: [v1, v2]
+ - url: https://staging.example.com
+ description: Staging server
+paths:
+ /users:
+ get:
+ operationId: getUsers
+ responses:
+ "200":
+ description: Users response
+ content:
+ application/json:
+ schema:
+ $ref: 'schemas/user.yaml#/UserResponse'
+ /products:
+ get:
+ operationId: getProducts
+ responses:
+ "200":
+ description: Products response
+ content:
+ application/json:
+ schema:
+ $ref: 'https://schemas.example.com/product.yaml#/Product'
+ /trees:
+ get:
+ operationId: getTrees
+ responses:
+ "200":
+ description: Trees response
+ content:
+ application/json:
+ schema:
+ $ref: 'schemas/tree.yaml#/TreeNode'
+components:
+ schemas:
+ LocalSchema:
+ type: object
+ additionalProperties: true
+ properties:
+ id:
+ type: integer
+ AnotherLocal:
+ type: object
+ additionalProperties: false
+`)
+
+ // External file: User schemas with valid circular (optional property)
+ vfs.AddFile("/api/schemas/user.yaml", `
+UserResponse:
+ type: object
+ properties:
+ user:
+ $ref: '#/User'
+User:
+ type: object
+ required: [id, name]
+ properties:
+ id:
+ type: integer
+ name:
+ type: string
+ address:
+ $ref: '#/Address'
+Address:
+ type: object
+ properties:
+ street:
+ type: string
+ user:
+ $ref: '#/User'
+# Unreferenced schema in external file
+UnreferencedUser:
+ type: object
+ properties:
+ neverUsed:
+ type: string
+`)
+
+ // External file: Tree with valid self-reference (array with minItems=0)
+ vfs.AddFile("/api/schemas/tree.yaml", `
+TreeNode:
+ type: object
+ properties:
+ value:
+ type: string
+ children:
+ type: array
+ items:
+ $ref: '#/TreeNode'
+# Another unreferenced schema
+UnusedTreeType:
+ type: object
+ properties:
+ unusedProp:
+ type: boolean
+`)
+
+ // Unreferenced file - nothing from here should appear in index
+ vfs.AddFile("/api/schemas/completely-unreferenced.yaml", `
+TotallyUnused:
+ type: object
+ properties:
+ shouldNotAppear:
+ type: string
+`)
+
+ // External HTTP: Product with invalid circular (required + minItems)
+ httpClient.AddResponse("https://schemas.example.com/product.yaml", `
+Product:
+ type: object
+ required: [id, category]
+ properties:
+ id:
+ type: integer
+ name:
+ type: string
+ category:
+ $ref: '#/Category'
+Category:
+ type: object
+ required: [products]
+ properties:
+ name:
+ type: string
+ products:
+ type: array
+ minItems: 1
+ items:
+ $ref: '#/Product'
+# Unreferenced in HTTP document
+UnreferencedCategory:
+ type: object
+ properties:
+ alsoNeverUsed:
+ type: integer
+`)
+
+ // Unmarshal and build index
+ doc, validationErrs, err := openapi.Unmarshal(ctx, strings.NewReader(vfs.files["/api/openapi.yaml"]))
+ require.NoError(t, err)
+ require.Empty(t, validationErrs)
+
+ resolveOpts := references.ResolveOptions{
+ TargetLocation: "/api/openapi.yaml",
+ RootDocument: doc,
+ TargetDocument: doc,
+ VirtualFS: vfs,
+ HTTPClient: httpClient,
+ }
+ idx := openapi.BuildIndex(ctx, doc, resolveOpts)
+ require.NotNil(t, idx)
+
+ return idx, vfs, httpClient
+}
+
+func TestBuildIndex_ExternalReferences_Comprehensive(t *testing.T) {
+ t.Parallel()
+
+ idx, _, _ := setupComprehensiveExternalRefs(t)
+
+ tests := []struct {
+ name string
+ assertion func(t *testing.T, idx *openapi.Index)
+ }{
+ {
+ name: "external schemas count correct",
+ assertion: func(t *testing.T, idx *openapi.Index) {
+ t.Helper()
+ // External schemas: UserResponse, User, Address, Product, Category, TreeNode (6)
+ assert.Len(t, idx.ExternalSchemas, 6, "should have exactly 6 external schemas")
+ },
+ },
+ {
+ name: "external documentation count correct",
+ assertion: func(t *testing.T, idx *openapi.Index) {
+ t.Helper()
+ // ExternalDocs: main doc + users tag
+ assert.Len(t, idx.ExternalDocumentation, 2, "should have exactly 2 external documentation")
+ },
+ },
+ {
+ name: "tags count correct",
+ assertion: func(t *testing.T, idx *openapi.Index) {
+ t.Helper()
+ // Tags: users, products
+ assert.Len(t, idx.Tags, 2, "should have exactly 2 tags")
+ },
+ },
+ {
+ name: "servers count correct",
+ assertion: func(t *testing.T, idx *openapi.Index) {
+ t.Helper()
+ // Servers: production, staging
+ assert.Len(t, idx.Servers, 2, "should have exactly 2 servers")
+ },
+ },
+ {
+ name: "server variables count correct",
+ assertion: func(t *testing.T, idx *openapi.Index) {
+ t.Helper()
+ // ServerVariables: version
+ assert.Len(t, idx.ServerVariables, 1, "should have exactly 1 server variable")
+ },
+ },
+ {
+ name: "boolean schemas count correct",
+ assertion: func(t *testing.T, idx *openapi.Index) {
+ t.Helper()
+ // BooleanSchemas: true, false from additionalProperties
+ assert.Len(t, idx.BooleanSchemas, 2, "should have exactly 2 boolean schemas")
+ },
+ },
+ {
+ name: "component schemas count correct",
+ assertion: func(t *testing.T, idx *openapi.Index) {
+ t.Helper()
+ // ComponentSchemas: LocalSchema, AnotherLocal
+ assert.Len(t, idx.ComponentSchemas, 2, "should have exactly 2 component schemas")
+ },
+ },
+ {
+ name: "schema references count correct",
+ assertion: func(t *testing.T, idx *openapi.Index) {
+ t.Helper()
+ // Schema references: 9 $ref pointers total
+ assert.Len(t, idx.SchemaReferences, 9, "should have exactly 9 schema references")
+ },
+ },
+ {
+ name: "inline property schemas count correct",
+ assertion: func(t *testing.T, idx *openapi.Index) {
+ t.Helper()
+ // Inline schemas: 9 from external + 1 from LocalSchema.id
+ assert.Len(t, idx.InlineSchemas, 10, "should have exactly 10 inline schemas")
+ },
+ },
+ {
+ name: "inline path items count correct",
+ assertion: func(t *testing.T, idx *openapi.Index) {
+ t.Helper()
+ // InlinePathItems: /users, /products, /trees
+ assert.Len(t, idx.InlinePathItems, 3, "should have exactly 3 inline path items")
+ },
+ },
+ {
+ name: "operations count correct",
+ assertion: func(t *testing.T, idx *openapi.Index) {
+ t.Helper()
+ // Operations: getUsers, getProducts, getTrees
+ assert.Len(t, idx.Operations, 3, "should have exactly 3 operations")
+ },
+ },
+ {
+ name: "inline responses count correct",
+ assertion: func(t *testing.T, idx *openapi.Index) {
+ t.Helper()
+ // InlineResponses: 200 response for each operation
+ assert.Len(t, idx.InlineResponses, 3, "should have exactly 3 inline responses")
+ },
+ },
+ {
+ name: "circular error count correct",
+ assertion: func(t *testing.T, idx *openapi.Index) {
+ t.Helper()
+ // Should detect 1 invalid circular: Product<->Category
+ assert.Len(t, idx.GetCircularReferenceErrors(), 1, "should have exactly 1 circular error")
+ },
+ },
+ {
+ name: "no errors for valid references",
+ assertion: func(t *testing.T, idx *openapi.Index) {
+ t.Helper()
+ // Should have no resolution errors
+ assert.Empty(t, idx.GetResolutionErrors(), "should have no resolution errors")
+ assert.Empty(t, idx.GetValidationErrors(), "should have no validation errors")
+ },
+ },
+ {
+ name: "unreferenced schemas in external files not indexed",
+ assertion: func(t *testing.T, idx *openapi.Index) {
+ t.Helper()
+ for _, schema := range idx.GetAllSchemas() {
+ loc := string(schema.Location.ToJSONPointer())
+ assert.NotContains(t, loc, "UnreferencedUser", "UnreferencedUser should not be indexed")
+ assert.NotContains(t, loc, "UnusedTreeType", "UnusedTreeType should not be indexed")
+ assert.NotContains(t, loc, "TotallyUnused", "TotallyUnused should not be indexed")
+ assert.NotContains(t, loc, "UnreferencedCategory", "UnreferencedCategory should not be indexed")
+ }
+ },
+ },
+ {
+ name: "valid circular reference via optional property",
+ assertion: func(t *testing.T, idx *openapi.Index) {
+ t.Helper()
+ circularErrs := idx.GetCircularReferenceErrors()
+ for _, err := range circularErrs {
+ errStr := err.Error()
+ // User<->Address should not have circular error (address is optional)
+ if strings.Contains(errStr, "User") && strings.Contains(errStr, "Address") {
+ t.Errorf("User<->Address circular via optional property should be valid, got error: %v", err)
+ }
+ }
+ },
+ },
+ {
+ name: "valid circular reference via array minItems=0",
+ assertion: func(t *testing.T, idx *openapi.Index) {
+ t.Helper()
+ circularErrs := idx.GetCircularReferenceErrors()
+ for _, err := range circularErrs {
+ errStr := err.Error()
+ // TreeNode self-reference should not have circular error
+ if strings.Contains(errStr, "TreeNode") {
+ t.Errorf("TreeNode self-reference via array should be valid, got error: %v", err)
+ }
+ }
+ },
+ },
+ {
+ name: "schema references tracked with locations",
+ assertion: func(t *testing.T, idx *openapi.Index) {
+ t.Helper()
+ assert.NotEmpty(t, idx.SchemaReferences, "should have schema references")
+ for _, ref := range idx.SchemaReferences {
+ assert.NotNil(t, ref.Location, "reference should have location")
+ assert.NotNil(t, ref.Node, "reference should have node")
+ }
+ },
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ t.Parallel()
+ tt.assertion(t, idx)
+ })
+ }
+}
diff --git a/openapi/index_test.go b/openapi/index_test.go
new file mode 100644
index 0000000..5b5eab6
--- /dev/null
+++ b/openapi/index_test.go
@@ -0,0 +1,1142 @@
+package openapi_test
+
+import (
+ "context"
+ "strings"
+ "testing"
+
+ "github.com/speakeasy-api/openapi/openapi"
+ "github.com/speakeasy-api/openapi/references"
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func unmarshalOpenAPI(t *testing.T, ctx context.Context, yaml string) *openapi.OpenAPI {
+ t.Helper()
+ o, validationErrs, err := openapi.Unmarshal(ctx, strings.NewReader(yaml))
+ require.NoError(t, err, "unmarshal should succeed")
+ require.Empty(t, validationErrs, "should have no validation errors")
+ return o
+}
+
+func TestBuildIndex_EmptyDoc_Success(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ yaml := `
+openapi: "3.1.0"
+info:
+ title: Empty API
+ version: 1.0.0
+paths: {}
+`
+ doc := unmarshalOpenAPI(t, ctx, yaml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ })
+
+ require.NotNil(t, idx, "index should not be nil")
+ assert.Empty(t, idx.GetAllSchemas(), "should have no schemas")
+ assert.Empty(t, idx.GetAllPathItems(), "should have no path items")
+ assert.False(t, idx.HasErrors(), "should have no errors")
+}
+
+func TestBuildIndex_ComponentSchemas_Success(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ yaml := `
+openapi: "3.1.0"
+info:
+ title: Test API
+ version: 1.0.0
+paths: {}
+components:
+ schemas:
+ User:
+ type: object
+ properties:
+ id:
+ type: integer
+ name:
+ type: string
+ Pet:
+ type: object
+ properties:
+ name:
+ type: string
+`
+ doc := unmarshalOpenAPI(t, ctx, yaml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ })
+
+ require.NotNil(t, idx, "index should not be nil")
+ assert.False(t, idx.HasErrors(), "should have no errors")
+
+ // Should have component schemas indexed
+ assert.Len(t, idx.ComponentSchemas, 2, "should have 2 component schemas")
+
+ // Should have inline schemas within the components
+ assert.Len(t, idx.InlineSchemas, 3, "should have 3 inline schemas from properties")
+}
+
+func TestBuildIndex_InlineSchemas_Success(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ yaml := `
+openapi: "3.1.0"
+info:
+ title: Test API
+ version: 1.0.0
+paths:
+ /users:
+ get:
+ operationId: getUsers
+ responses:
+ "200":
+ description: Success
+ content:
+ application/json:
+ schema:
+ type: array
+ items:
+ type: object
+ properties:
+ id:
+ type: integer
+`
+ doc := unmarshalOpenAPI(t, ctx, yaml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ })
+
+ require.NotNil(t, idx, "index should not be nil")
+ assert.False(t, idx.HasErrors(), "should have no errors")
+
+ // Should have inline schemas: array, object (items), integer (id property)
+ assert.Len(t, idx.InlineSchemas, 3, "should have 3 inline schemas")
+ assert.Empty(t, idx.ComponentSchemas, "should have no component schemas")
+ assert.Empty(t, idx.SchemaReferences, "should have no schema references")
+}
+
+func TestBuildIndex_SchemaReferences_Success(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ yaml := `
+openapi: "3.1.0"
+info:
+ title: Test API
+ version: 1.0.0
+paths:
+ /users:
+ get:
+ operationId: getUsers
+ responses:
+ "200":
+ description: Success
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/User'
+components:
+ schemas:
+ User:
+ type: object
+ properties:
+ id:
+ type: integer
+`
+ doc := unmarshalOpenAPI(t, ctx, yaml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ })
+
+ require.NotNil(t, idx, "index should not be nil")
+ assert.False(t, idx.HasErrors(), "should have no errors")
+
+ // $ref to User schema
+ assert.Len(t, idx.SchemaReferences, 1, "should have 1 schema reference")
+ // User component schema
+ assert.Len(t, idx.ComponentSchemas, 1, "should have 1 component schema")
+ // id property inline schema
+ assert.Len(t, idx.InlineSchemas, 1, "should have 1 inline schema")
+}
+
+func TestBuildIndex_BooleanSchemas_Success(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ yaml := `
+openapi: "3.1.0"
+info:
+ title: Test API
+ version: 1.0.0
+paths: {}
+components:
+ schemas:
+ AnyValue:
+ type: object
+ additionalProperties: true
+ NoAdditional:
+ type: object
+ additionalProperties: false
+`
+ doc := unmarshalOpenAPI(t, ctx, yaml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ })
+
+ require.NotNil(t, idx, "index should not be nil")
+ assert.False(t, idx.HasErrors(), "should have no errors")
+
+ // Two boolean schemas (true and false for additionalProperties)
+ assert.Len(t, idx.BooleanSchemas, 2, "should have 2 boolean schemas")
+ // Two component schemas (AnyValue and NoAdditional)
+ assert.Len(t, idx.ComponentSchemas, 2, "should have 2 component schemas")
+ assert.Empty(t, idx.InlineSchemas, "should have no inline schemas")
+}
+
+func TestBuildIndex_Servers_Success(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ yaml := `
+openapi: "3.1.0"
+info:
+ title: Test API
+ version: 1.0.0
+servers:
+ - url: https://api.example.com
+ description: Production
+ variables:
+ version:
+ default: v1
+ enum: [v1, v2]
+ - url: https://staging.example.com
+ description: Staging
+paths: {}
+`
+ doc := unmarshalOpenAPI(t, ctx, yaml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ })
+
+ require.NotNil(t, idx, "index should not be nil")
+ assert.False(t, idx.HasErrors(), "should have no errors")
+
+ assert.Len(t, idx.Servers, 2, "should have 2 servers")
+ assert.Len(t, idx.ServerVariables, 1, "should have 1 server variable")
+}
+
+func TestBuildIndex_Tags_Success(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ yaml := `
+openapi: "3.1.0"
+info:
+ title: Test API
+ version: 1.0.0
+tags:
+ - name: users
+ description: User operations
+ - name: pets
+ description: Pet operations
+paths: {}
+`
+ doc := unmarshalOpenAPI(t, ctx, yaml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ })
+
+ require.NotNil(t, idx, "index should not be nil")
+ assert.False(t, idx.HasErrors(), "should have no errors")
+
+ assert.Len(t, idx.Tags, 2, "should have 2 tags")
+}
+
+func TestBuildIndex_ExternalDocs_Success(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ yaml := `
+openapi: "3.1.0"
+info:
+ title: Test API
+ version: 1.0.0
+externalDocs:
+ url: https://docs.example.com
+ description: API Documentation
+tags:
+ - name: users
+ externalDocs:
+ url: https://docs.example.com/users
+paths: {}
+`
+ doc := unmarshalOpenAPI(t, ctx, yaml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ })
+
+ require.NotNil(t, idx, "index should not be nil")
+ assert.False(t, idx.HasErrors(), "should have no errors")
+
+ assert.Len(t, idx.ExternalDocumentation, 2, "should have 2 external docs")
+}
+
+func TestBuildIndex_GetAllSchemas_Success(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ yaml := `
+openapi: "3.1.0"
+info:
+ title: Test API
+ version: 1.0.0
+paths:
+ /users:
+ get:
+ operationId: getUsers
+ responses:
+ "200":
+ description: Success
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/User'
+components:
+ schemas:
+ User:
+ type: object
+ additionalProperties: true
+ properties:
+ id:
+ type: integer
+`
+ doc := unmarshalOpenAPI(t, ctx, yaml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ })
+
+ require.NotNil(t, idx, "index should not be nil")
+
+ allSchemas := idx.GetAllSchemas()
+ assert.NotEmpty(t, allSchemas, "should have schemas")
+
+ // Should include boolean, inline, component, external, and reference schemas
+ totalExpected := len(idx.BooleanSchemas) + len(idx.InlineSchemas) +
+ len(idx.ComponentSchemas) + len(idx.ExternalSchemas) + len(idx.SchemaReferences)
+ assert.Len(t, allSchemas, totalExpected, "GetAllSchemas should return all schema types")
+}
+
+func TestBuildIndex_NilIndex_Methods_Success(t *testing.T) {
+ t.Parallel()
+
+ var idx *openapi.Index
+
+ assert.Nil(t, idx.GetAllSchemas(), "nil index GetAllSchemas should return nil")
+ assert.Nil(t, idx.GetAllPathItems(), "nil index GetAllPathItems should return nil")
+ assert.Nil(t, idx.GetValidationErrors(), "nil index GetValidationErrors should return nil")
+ assert.Nil(t, idx.GetResolutionErrors(), "nil index GetResolutionErrors should return nil")
+ assert.Nil(t, idx.GetCircularReferenceErrors(), "nil index GetCircularReferenceErrors should return nil")
+ assert.Nil(t, idx.GetAllErrors(), "nil index GetAllErrors should return nil")
+ assert.False(t, idx.HasErrors(), "nil index HasErrors should return false")
+}
+
+// Tests for circular reference detection
+
+func TestBuildIndex_CircularRef_OptionalProperty_Valid(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ // Optional property recursion - VALID (not required means {} is valid)
+ yaml := `
+openapi: "3.1.0"
+info:
+ title: Test API
+ version: 1.0.0
+paths: {}
+components:
+ schemas:
+ Node:
+ type: object
+ properties:
+ next:
+ $ref: '#/components/schemas/Node'
+`
+ doc := unmarshalOpenAPI(t, ctx, yaml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ })
+
+ require.NotNil(t, idx, "index should not be nil")
+
+ // Optional property circular refs should be VALID (no error)
+ circularErrs := idx.GetCircularReferenceErrors()
+ assert.Empty(t, circularErrs, "optional property circular ref should be valid (no error)")
+}
+
+func TestBuildIndex_CircularRef_RequiredProperty_Invalid(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ // Required property recursion - INVALID (no base case)
+ yaml := `
+openapi: "3.1.0"
+info:
+ title: Test API
+ version: 1.0.0
+paths: {}
+components:
+ schemas:
+ A:
+ type: object
+ required: [b]
+ properties:
+ b:
+ $ref: '#/components/schemas/B'
+ B:
+ type: object
+ required: [a]
+ properties:
+ a:
+ $ref: '#/components/schemas/A'
+`
+ doc := unmarshalOpenAPI(t, ctx, yaml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ })
+
+ require.NotNil(t, idx, "index should not be nil")
+
+ // Required property circular refs should be INVALID
+ circularErrs := idx.GetCircularReferenceErrors()
+ assert.NotEmpty(t, circularErrs, "required property circular ref should be invalid")
+}
+
+func TestBuildIndex_CircularRef_ArrayMinItemsZero_Valid(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ // Array with default minItems (0) - VALID (empty array terminates)
+ yaml := `
+openapi: "3.1.0"
+info:
+ title: Test API
+ version: 1.0.0
+paths: {}
+components:
+ schemas:
+ Category:
+ type: object
+ required: [children]
+ properties:
+ children:
+ type: array
+ items:
+ $ref: '#/components/schemas/Category'
+`
+ doc := unmarshalOpenAPI(t, ctx, yaml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ })
+
+ require.NotNil(t, idx, "index should not be nil")
+
+ // Array with minItems=0 circular refs should be VALID
+ circularErrs := idx.GetCircularReferenceErrors()
+ assert.Empty(t, circularErrs, "array with minItems=0 circular ref should be valid")
+}
+
+func TestBuildIndex_CircularRef_ArrayMinItemsOne_Invalid(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ // Array with minItems=1 - INVALID (can't have empty array)
+ yaml := `
+openapi: "3.1.0"
+info:
+ title: Test API
+ version: 1.0.0
+paths: {}
+components:
+ schemas:
+ Node:
+ type: object
+ required: [children]
+ properties:
+ children:
+ type: array
+ minItems: 1
+ items:
+ $ref: '#/components/schemas/Node'
+`
+ doc := unmarshalOpenAPI(t, ctx, yaml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ })
+
+ require.NotNil(t, idx, "index should not be nil")
+
+ // Array with minItems>=1 circular refs should be INVALID
+ circularErrs := idx.GetCircularReferenceErrors()
+ assert.NotEmpty(t, circularErrs, "array with minItems>=1 circular ref should be invalid")
+}
+
+func TestBuildIndex_CircularRef_Nullable_Valid(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ // Nullable type union - VALID (null is a base case)
+ yaml := `
+openapi: "3.1.0"
+info:
+ title: Test API
+ version: 1.0.0
+paths: {}
+components:
+ schemas:
+ Node:
+ type: [object, "null"]
+ required: [next]
+ properties:
+ next:
+ $ref: '#/components/schemas/Node'
+`
+ doc := unmarshalOpenAPI(t, ctx, yaml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ })
+
+ require.NotNil(t, idx, "index should not be nil")
+
+ // Nullable circular refs should be VALID
+ circularErrs := idx.GetCircularReferenceErrors()
+ assert.Empty(t, circularErrs, "nullable circular ref should be valid")
+}
+
+func TestBuildIndex_CircularRef_AdditionalPropertiesMinZero_Valid(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ // AdditionalProperties with default minProperties (0) - VALID
+ yaml := `
+openapi: "3.1.0"
+info:
+ title: Test API
+ version: 1.0.0
+paths: {}
+components:
+ schemas:
+ TrieNode:
+ type: object
+ required: [children]
+ properties:
+ children:
+ type: object
+ additionalProperties:
+ $ref: '#/components/schemas/TrieNode'
+`
+ doc := unmarshalOpenAPI(t, ctx, yaml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ })
+
+ require.NotNil(t, idx, "index should not be nil")
+
+ // AdditionalProperties with minProperties=0 should be VALID
+ circularErrs := idx.GetCircularReferenceErrors()
+ assert.Empty(t, circularErrs, "additionalProperties with minProperties=0 should be valid")
+}
+
+func TestBuildIndex_CircularRef_AdditionalPropertiesMinOne_Invalid(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ // AdditionalProperties with minProperties>=1 - INVALID
+ yaml := `
+openapi: "3.1.0"
+info:
+ title: Test API
+ version: 1.0.0
+paths: {}
+components:
+ schemas:
+ Node:
+ type: object
+ required: [children]
+ properties:
+ children:
+ type: object
+ minProperties: 1
+ additionalProperties:
+ $ref: '#/components/schemas/Node'
+`
+ doc := unmarshalOpenAPI(t, ctx, yaml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ })
+
+ require.NotNil(t, idx, "index should not be nil")
+
+ // AdditionalProperties with minProperties>=1 should be INVALID
+ circularErrs := idx.GetCircularReferenceErrors()
+ assert.NotEmpty(t, circularErrs, "additionalProperties with minProperties>=1 should be invalid")
+}
+
+func TestBuildIndex_CircularRef_OneOfWithNonRecursiveBranch_Valid(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ // oneOf with at least one non-recursive branch - VALID
+ yaml := `
+openapi: "3.1.0"
+info:
+ title: Test API
+ version: 1.0.0
+paths: {}
+components:
+ schemas:
+ Expr:
+ oneOf:
+ - $ref: '#/components/schemas/Literal'
+ - $ref: '#/components/schemas/BinaryExpr'
+ Literal:
+ type: object
+ properties:
+ value:
+ type: string
+ BinaryExpr:
+ type: object
+ required: [left, right]
+ properties:
+ left:
+ $ref: '#/components/schemas/Expr'
+ right:
+ $ref: '#/components/schemas/Expr'
+`
+ doc := unmarshalOpenAPI(t, ctx, yaml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ })
+
+ require.NotNil(t, idx, "index should not be nil")
+
+ // oneOf with a non-recursive branch should be VALID
+ circularErrs := idx.GetCircularReferenceErrors()
+ assert.Empty(t, circularErrs, "oneOf with non-recursive branch should be valid")
+}
+
+func TestBuildIndex_CircularRef_DirectSelfRef_Optional_Valid(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ // Direct self-reference through optional property - VALID
+ yaml := `
+openapi: "3.1.0"
+info:
+ title: Test API
+ version: 1.0.0
+paths: {}
+components:
+ schemas:
+ LinkedNode:
+ type: object
+ properties:
+ value:
+ type: string
+ next:
+ $ref: '#/components/schemas/LinkedNode'
+`
+ doc := unmarshalOpenAPI(t, ctx, yaml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ })
+
+ require.NotNil(t, idx, "index should not be nil")
+
+ circularErrs := idx.GetCircularReferenceErrors()
+ assert.Empty(t, circularErrs, "direct self-ref through optional should be valid")
+}
+
+func TestBuildIndex_CircularRef_DirectSelfRef_Required_Invalid(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ // Direct self-reference through required property - INVALID
+ yaml := `
+openapi: "3.1.0"
+info:
+ title: Test API
+ version: 1.0.0
+paths: {}
+components:
+ schemas:
+ InfiniteNode:
+ type: object
+ required: [self]
+ properties:
+ self:
+ $ref: '#/components/schemas/InfiniteNode'
+`
+ doc := unmarshalOpenAPI(t, ctx, yaml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ })
+
+ require.NotNil(t, idx, "index should not be nil")
+
+ circularErrs := idx.GetCircularReferenceErrors()
+ assert.NotEmpty(t, circularErrs, "direct self-ref through required should be invalid")
+}
+
+func TestBuildIndex_NoCircularRef_Success(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ // No circular reference - just regular refs
+ yaml := `
+openapi: "3.1.0"
+info:
+ title: Test API
+ version: 1.0.0
+paths:
+ /users:
+ get:
+ operationId: getUsers
+ responses:
+ "200":
+ description: Success
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/User'
+components:
+ schemas:
+ User:
+ type: object
+ properties:
+ id:
+ type: integer
+ address:
+ $ref: '#/components/schemas/Address'
+ Address:
+ type: object
+ properties:
+ street:
+ type: string
+ city:
+ type: string
+`
+ doc := unmarshalOpenAPI(t, ctx, yaml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ })
+
+ require.NotNil(t, idx, "index should not be nil")
+ assert.False(t, idx.HasErrors(), "should have no errors")
+ assert.Empty(t, idx.GetCircularReferenceErrors(), "should have no circular reference errors")
+}
+
+func TestBuildIndex_LocationInfo_Success(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ yaml := `
+openapi: "3.1.0"
+info:
+ title: Test API
+ version: 1.0.0
+paths: {}
+components:
+ schemas:
+ User:
+ type: object
+ properties:
+ id:
+ type: integer
+`
+ doc := unmarshalOpenAPI(t, ctx, yaml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ })
+
+ require.NotNil(t, idx, "index should not be nil")
+
+ // Check that schemas have location information
+ for _, schema := range idx.ComponentSchemas {
+ assert.NotNil(t, schema.Location, "schema should have location")
+ jp := schema.Location.ToJSONPointer()
+ assert.NotEmpty(t, jp, "location should produce JSON pointer")
+ }
+}
+
+func TestBuildIndex_Operations_Success(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ yaml := `
+openapi: "3.1.0"
+info:
+ title: Test API
+ version: 1.0.0
+paths:
+ /users:
+ get:
+ operationId: getUsers
+ summary: Get users
+ responses:
+ "200":
+ description: Success
+ post:
+ operationId: createUser
+ summary: Create user
+ responses:
+ "201":
+ description: Created
+ /products:
+ get:
+ operationId: getProducts
+ summary: Get products
+ responses:
+ "200":
+ description: Success
+`
+ doc := unmarshalOpenAPI(t, ctx, yaml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ })
+
+ require.NotNil(t, idx, "index should not be nil")
+ assert.False(t, idx.HasErrors(), "should have no errors")
+
+ // Should have 3 operations indexed
+ assert.Len(t, idx.Operations, 3, "should have 3 operations")
+ // Should have 2 inline path items
+ assert.Len(t, idx.InlinePathItems, 2, "should have 2 inline path items")
+}
+
+func TestBuildIndex_Parameters_Success(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ yaml := `
+openapi: "3.1.0"
+info:
+ title: Test API
+ version: 1.0.0
+paths:
+ /users/{id}:
+ parameters:
+ - name: id
+ in: path
+ required: true
+ schema:
+ type: integer
+ get:
+ operationId: getUser
+ responses:
+ "200":
+ description: Success
+ parameters:
+ - $ref: '#/components/parameters/PageSize'
+components:
+ parameters:
+ PageSize:
+ name: pageSize
+ in: query
+ schema:
+ type: integer
+`
+ doc := unmarshalOpenAPI(t, ctx, yaml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ })
+
+ require.NotNil(t, idx, "index should not be nil")
+ assert.False(t, idx.HasErrors(), "should have no errors")
+
+ // Should have 1 component parameter (PageSize)
+ assert.Len(t, idx.ComponentParameters, 1, "should have 1 component parameter")
+ // Should have 1 inline parameter (id in path)
+ assert.Len(t, idx.InlineParameters, 1, "should have 1 inline parameter")
+ // Should have 1 parameter reference ($ref to PageSize)
+ assert.Len(t, idx.ParameterReferences, 1, "should have 1 parameter reference")
+}
+
+func TestBuildIndex_Responses_Success(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ yaml := `
+openapi: "3.1.0"
+info:
+ title: Test API
+ version: 1.0.0
+paths:
+ /users:
+ get:
+ operationId: getUsers
+ responses:
+ "200":
+ description: Success
+ content:
+ application/json:
+ schema:
+ type: array
+ "404":
+ $ref: '#/components/responses/NotFound'
+components:
+ responses:
+ NotFound:
+ description: Not found
+`
+ doc := unmarshalOpenAPI(t, ctx, yaml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ })
+
+ require.NotNil(t, idx, "index should not be nil")
+ assert.False(t, idx.HasErrors(), "should have no errors")
+
+ // Should have 1 component response (NotFound)
+ assert.Len(t, idx.ComponentResponses, 1, "should have 1 component response")
+ // Should have 1 inline response (200)
+ assert.Len(t, idx.InlineResponses, 1, "should have 1 inline response")
+ // Should have 1 response reference ($ref to NotFound)
+ assert.Len(t, idx.ResponseReferences, 1, "should have 1 response reference")
+}
+
+func TestBuildIndex_RequestBodies_Success(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ yaml := `
+openapi: "3.1.0"
+info:
+ title: Test API
+ version: 1.0.0
+paths:
+ /users:
+ post:
+ operationId: createUser
+ requestBody:
+ description: User to create
+ content:
+ application/json:
+ schema:
+ type: object
+ responses:
+ "201":
+ description: Created
+ put:
+ operationId: updateUser
+ requestBody:
+ $ref: '#/components/requestBodies/UserBody'
+ responses:
+ "200":
+ description: Updated
+components:
+ requestBodies:
+ UserBody:
+ description: User body
+ content:
+ application/json:
+ schema:
+ type: object
+`
+ doc := unmarshalOpenAPI(t, ctx, yaml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ })
+
+ require.NotNil(t, idx, "index should not be nil")
+ assert.False(t, idx.HasErrors(), "should have no errors")
+
+ // Should have 1 component request body (UserBody)
+ assert.Len(t, idx.ComponentRequestBodies, 1, "should have 1 component request body")
+ // Should have 1 inline request body (POST)
+ assert.Len(t, idx.InlineRequestBodies, 1, "should have 1 inline request body")
+ // Should have 1 request body reference ($ref to UserBody)
+ assert.Len(t, idx.RequestBodyReferences, 1, "should have 1 request body reference")
+}
+
+func TestBuildIndex_MediaTypes_Success(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ yaml := `
+openapi: "3.1.0"
+info:
+ title: Test API
+ version: 1.0.0
+paths:
+ /users:
+ post:
+ operationId: createUser
+ requestBody:
+ content:
+ application/json:
+ schema:
+ type: object
+ application/xml:
+ schema:
+ type: object
+ responses:
+ "200":
+ description: Success
+ content:
+ application/json:
+ schema:
+ type: object
+`
+ doc := unmarshalOpenAPI(t, ctx, yaml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ })
+
+ require.NotNil(t, idx, "index should not be nil")
+ assert.False(t, idx.HasErrors(), "should have no errors")
+
+ // Should have 3 media types (2 in request, 1 in response)
+ assert.Len(t, idx.MediaTypes, 3, "should have 3 media types")
+}
+
+func TestBuildIndex_Discriminator_Success(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ yaml := `
+openapi: "3.1.0"
+info:
+ title: Test API
+ version: 1.0.0
+paths: {}
+components:
+ schemas:
+ Pet:
+ type: object
+ discriminator:
+ propertyName: petType
+ mapping:
+ dog: '#/components/schemas/Dog'
+ cat: '#/components/schemas/Cat'
+ properties:
+ petType:
+ type: string
+ Dog:
+ type: object
+ Cat:
+ type: object
+`
+ doc := unmarshalOpenAPI(t, ctx, yaml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ })
+
+ require.NotNil(t, idx, "index should not be nil")
+ assert.False(t, idx.HasErrors(), "should have no errors")
+
+ // Should have 1 discriminator
+ assert.Len(t, idx.Discriminators, 1, "should have 1 discriminator")
+}
+
+func TestBuildIndex_SecuritySchemes_Success(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ yaml := `
+openapi: "3.1.0"
+info:
+ title: Test API
+ version: 1.0.0
+paths: {}
+components:
+ securitySchemes:
+ apiKey:
+ type: apiKey
+ in: header
+ name: X-API-Key
+ oauth2:
+ type: oauth2
+ flows:
+ implicit:
+ authorizationUrl: https://example.com/oauth/authorize
+ scopes:
+ read: Read access
+ write: Write access
+`
+ doc := unmarshalOpenAPI(t, ctx, yaml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ })
+
+ require.NotNil(t, idx, "index should not be nil")
+ assert.False(t, idx.HasErrors(), "should have no errors")
+
+ // Should have 2 component security schemes
+ assert.Len(t, idx.ComponentSecuritySchemes, 2, "should have 2 component security schemes")
+ // Should have 1 OAuth flows container
+ assert.Len(t, idx.OAuthFlows, 1, "should have 1 OAuth flows")
+ // Should have 1 OAuth flow item (implicit)
+ assert.Len(t, idx.OAuthFlowItems, 1, "should have 1 OAuth flow item")
+}
diff --git a/openapi/info.go b/openapi/info.go
index 7ac8a58..3c1ad1d 100644
--- a/openapi/info.go
+++ b/openapi/info.go
@@ -2,6 +2,8 @@ package openapi
import (
"context"
+ "errors"
+ "fmt"
"net/mail"
"net/url"
@@ -106,16 +108,16 @@ func (i *Info) Validate(ctx context.Context, opts ...validation.Option) []error
errs := []error{}
if core.Title.Present && i.Title == "" {
- errs = append(errs, validation.NewValueError(validation.NewMissingValueError("info.title is required"), core, core.Title))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("info.title is required"), core, core.Title))
}
if core.Version.Present && i.Version == "" {
- errs = append(errs, validation.NewValueError(validation.NewMissingValueError("info.version is required"), core, core.Version))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("info.version is required"), core, core.Version))
}
if core.TermsOfService.Present {
if _, err := url.Parse(*i.TermsOfService); err != nil {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("info.termsOfService is not a valid uri: %s", err), core, core.TermsOfService))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidFormat, fmt.Errorf("info.termsOfService is not a valid uri: %w", err), core, core.TermsOfService))
}
}
@@ -186,13 +188,13 @@ func (c *Contact) Validate(ctx context.Context, opts ...validation.Option) []err
if core.URL.Present {
if _, err := url.Parse(*c.URL); err != nil {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("contact.url is not a valid uri: %s", err), core, core.URL))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidFormat, fmt.Errorf("contact.url is not a valid uri: %w", err), core, core.URL))
}
}
if core.Email.Present {
if _, err := mail.ParseAddress(*c.Email); err != nil {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("contact.email is not a valid email address: %s", err), core, core.Email))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidFormat, fmt.Errorf("contact.email is not a valid email address: %w", err), core, core.Email))
}
}
@@ -255,12 +257,12 @@ func (l *License) Validate(ctx context.Context, opts ...validation.Option) []err
errs := []error{}
if core.Name.Present && l.Name == "" {
- errs = append(errs, validation.NewValueError(validation.NewMissingValueError("license.name is required"), core, core.Name))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("license.name is required"), core, core.Name))
}
if core.URL.Present {
if _, err := url.Parse(*l.URL); err != nil {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("license.url is not a valid uri: %s", err), core, core.URL))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidFormat, fmt.Errorf("license.url is not a valid uri: %w", err), core, core.URL))
}
}
diff --git a/openapi/info_validate_test.go b/openapi/info_validate_test.go
index b469342..a434c63 100644
--- a/openapi/info_validate_test.go
+++ b/openapi/info_validate_test.go
@@ -108,7 +108,7 @@ func TestInfo_Validate_Error(t *testing.T) {
yml: `
version: 1.0.0
`,
- wantErrs: []string{"[2:1] info.title is missing"},
+ wantErrs: []string{"[2:1] error validation-required-field info.title is required"},
},
{
name: "empty title",
@@ -116,14 +116,14 @@ version: 1.0.0
title: ""
version: 1.0.0
`,
- wantErrs: []string{"[2:8] info.title is required"},
+ wantErrs: []string{"[2:8] error validation-required-field info.title is required"},
},
{
name: "missing version",
yml: `
title: Test API
`,
- wantErrs: []string{"[2:1] info.version is missing"},
+ wantErrs: []string{"[2:1] error validation-required-field info.version is required"},
},
{
name: "empty version",
@@ -131,7 +131,7 @@ title: Test API
title: Test API
version: ""
`,
- wantErrs: []string{"[3:10] info.version is required"},
+ wantErrs: []string{"[3:10] error validation-required-field info.version is required"},
},
{
name: "invalid termsOfService URI",
@@ -140,7 +140,7 @@ title: Test API
version: 1.0.0
termsOfService: ":invalid"
`,
- wantErrs: []string{"[4:17] info.termsOfService is not a valid uri: parse \":invalid\": missing protocol scheme"},
+ wantErrs: []string{"[4:17] error validation-invalid-format info.termsOfService is not a valid uri: parse \":invalid\": missing protocol scheme"},
},
{
name: "invalid contact URL",
@@ -151,7 +151,7 @@ contact:
name: Support
url: ":invalid"
`,
- wantErrs: []string{"[6:8] contact.url is not a valid uri: parse \":invalid\": missing protocol scheme"},
+ wantErrs: []string{"[6:8] error validation-invalid-format contact.url is not a valid uri: parse \":invalid\": missing protocol scheme"},
},
{
name: "invalid contact email",
@@ -162,7 +162,7 @@ contact:
name: Support
email: "not-an-email"
`,
- wantErrs: []string{"[6:10] contact.email is not a valid email address: mail: missing '@' or angle-addr"},
+ wantErrs: []string{"[6:10] error validation-invalid-format contact.email is not a valid email address: mail: missing '@' or angle-addr"},
},
{
name: "invalid license URL",
@@ -173,7 +173,7 @@ license:
name: MIT
url: ":invalid"
`,
- wantErrs: []string{"[6:8] license.url is not a valid uri: parse \":invalid\": missing protocol scheme"},
+ wantErrs: []string{"[6:8] error validation-invalid-format license.url is not a valid uri: parse \":invalid\": missing protocol scheme"},
},
{
name: "missing license name",
@@ -183,7 +183,7 @@ version: 1.0.0
license:
url: https://opensource.org/licenses/MIT
`,
- wantErrs: []string{"[5:3] license.name is missing"},
+ wantErrs: []string{"[5:3] error validation-required-field license.name is required"},
},
{
name: "multiple validation errors",
@@ -196,10 +196,10 @@ license:
name: ""
`,
wantErrs: []string{
- "[2:8] info.title is required",
- "[3:10] info.version is required",
- "[5:10] contact.email is not a valid email address: mail: missing '@' or angle-addr",
- "[7:9] license.name is required",
+ "[2:8] error validation-required-field info.title is required",
+ "[3:10] error validation-required-field info.version is required",
+ "[5:10] error validation-invalid-format contact.email is not a valid email address: mail: missing '@' or angle-addr",
+ "[7:9] error validation-required-field license.name is required",
},
},
}
@@ -326,7 +326,7 @@ func TestContact_Validate_Error(t *testing.T) {
name: Support
url: ":invalid"
`,
- wantErrs: []string{"[3:6] contact.url is not a valid uri: parse \":invalid\": missing protocol scheme"},
+ wantErrs: []string{"[3:6] error validation-invalid-format contact.url is not a valid uri: parse \":invalid\": missing protocol scheme"},
},
{
name: "invalid email",
@@ -334,7 +334,7 @@ url: ":invalid"
name: Support
email: "not-an-email"
`,
- wantErrs: []string{"[3:8] contact.email is not a valid email address: mail: missing '@' or angle-addr"},
+ wantErrs: []string{"[3:8] error validation-invalid-format contact.email is not a valid email address: mail: missing '@' or angle-addr"},
},
{
name: "invalid URL with spaces",
@@ -342,7 +342,7 @@ email: "not-an-email"
name: Support
url: ":invalid url"
`,
- wantErrs: []string{"[3:6] contact.url is not a valid uri: parse \":invalid url\": missing protocol scheme"},
+ wantErrs: []string{"[3:6] error validation-invalid-format contact.url is not a valid uri: parse \":invalid url\": missing protocol scheme"},
},
{
name: "invalid email missing @",
@@ -350,7 +350,7 @@ url: ":invalid url"
name: Support
email: "supportexample.com"
`,
- wantErrs: []string{"[3:8] contact.email is not a valid email address: mail: missing '@' or angle-addr"},
+ wantErrs: []string{"[3:8] error validation-invalid-format contact.email is not a valid email address: mail: missing '@' or angle-addr"},
},
{
name: "multiple validation errors",
@@ -360,8 +360,8 @@ url: ":invalid"
email: "invalid-email"
`,
wantErrs: []string{
- "[3:6] contact.url is not a valid uri: parse \":invalid\": missing protocol scheme",
- "[4:8] contact.email is not a valid email address: mail: missing '@' or angle-addr",
+ "[3:6] error validation-invalid-format contact.url is not a valid uri: parse \":invalid\": missing protocol scheme",
+ "[4:8] error validation-invalid-format contact.email is not a valid email address: mail: missing '@' or angle-addr",
},
},
}
@@ -476,7 +476,7 @@ func TestLicense_Validate_Error(t *testing.T) {
yml: `
url: https://opensource.org/licenses/MIT
`,
- wantErrs: []string{"[2:1] license.name is missing"},
+ wantErrs: []string{"[2:1] error validation-required-field license.name is required"},
},
{
name: "empty name",
@@ -484,7 +484,7 @@ url: https://opensource.org/licenses/MIT
name: ""
url: https://opensource.org/licenses/MIT
`,
- wantErrs: []string{"[2:7] license.name is required"},
+ wantErrs: []string{"[2:7] error validation-required-field license.name is required"},
},
{
name: "invalid URL",
@@ -492,7 +492,7 @@ url: https://opensource.org/licenses/MIT
name: MIT
url: ":invalid"
`,
- wantErrs: []string{"[3:6] license.url is not a valid uri: parse \":invalid\": missing protocol scheme"},
+ wantErrs: []string{"[3:6] error validation-invalid-format license.url is not a valid uri: parse \":invalid\": missing protocol scheme"},
},
{
name: "invalid URL with spaces",
@@ -500,7 +500,7 @@ url: ":invalid"
name: MIT
url: ":invalid url"
`,
- wantErrs: []string{"[3:6] license.url is not a valid uri: parse \":invalid url\": missing protocol scheme"},
+ wantErrs: []string{"[3:6] error validation-invalid-format license.url is not a valid uri: parse \":invalid url\": missing protocol scheme"},
},
{
name: "multiple validation errors",
@@ -509,8 +509,8 @@ name: ""
url: ":invalid"
`,
wantErrs: []string{
- "[2:7] license.name is required",
- "[3:6] license.url is not a valid uri: parse \":invalid\": missing protocol scheme",
+ "[2:7] error validation-required-field license.name is required",
+ "[3:6] error validation-invalid-format license.url is not a valid uri: parse \":invalid\": missing protocol scheme",
},
},
}
diff --git a/openapi/inline.go b/openapi/inline.go
index a16650d..096569d 100644
--- a/openapi/inline.go
+++ b/openapi/inline.go
@@ -349,7 +349,7 @@ func inlineReference[T any, V interfaces.Validator[T], C marshaller.CoreModeler]
recursiveOpts := ResolveOptions{
RootDocument: opts.RootDocument,
TargetDocument: targetDocInfo.ResolvedDocument,
- TargetLocation: targetDocInfo.AbsoluteReference,
+ TargetLocation: targetDocInfo.AbsoluteDocumentPath,
}
if err := inlineObject(ctx, ref, doc, recursiveOpts, collectedDefs, defHashes); err != nil {
return fmt.Errorf("failed to inline nested references in %s: %w", ref.GetReference(), err)
diff --git a/openapi/links.go b/openapi/links.go
index f9c13d7..b38bbc7 100644
--- a/openapi/links.go
+++ b/openapi/links.go
@@ -2,6 +2,8 @@ package openapi
import (
"context"
+ "errors"
+ "fmt"
"net/url"
"github.com/speakeasy-api/openapi/expression"
@@ -105,7 +107,7 @@ func (l *Link) Validate(ctx context.Context, opts ...validation.Option) []error
o := validation.GetContextObject[OpenAPI](op)
if core.OperationID.Present && core.OperationRef.Present {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("operationID and operationRef are mutually exclusive"), core, core.OperationID))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationMutuallyExclusiveFields, errors.New("operationID and operationRef are mutually exclusive"), core, core.OperationID))
}
if l.OperationID != nil {
@@ -135,36 +137,36 @@ func (l *Link) Validate(ctx context.Context, opts ...validation.Option) []error
}
if !foundOp {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("link.operationId value %s does not exist in document", *l.OperationID), core, core.OperationID))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationOperationNotFound, fmt.Errorf("link.operationId value %s does not exist in document", *l.OperationID), core, core.OperationID))
}
}
// TODO should we validate the reference resolves here? Or as part of the resolution operation? Or make it optional?
if l.OperationRef != nil {
if _, err := url.Parse(*l.OperationRef); err != nil {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("link.operationRef is not a valid uri: %s", err), core, core.OperationRef))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidFormat, fmt.Errorf("link.operationRef is not a valid uri: %w", err), core, core.OperationRef))
}
}
for key, exp := range l.GetParameters().All() {
_, expression, err := expression.GetValueOrExpressionValue(exp)
if err != nil {
- errs = append(errs, validation.NewMapValueError(validation.NewValueValidationError("link.parameters expression is invalid: %s", err.Error()), core, core.Parameters, key))
+ errs = append(errs, validation.NewMapValueError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("link.parameters expression is invalid: %w", err), core, core.Parameters, key))
}
if expression != nil {
if err := expression.Validate(); err != nil {
- errs = append(errs, validation.NewMapValueError(validation.NewValueValidationError("link.parameters expression is invalid: %s", err.Error()), core, core.Parameters, key))
+ errs = append(errs, validation.NewMapValueError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("link.parameters expression is invalid: %w", err), core, core.Parameters, key))
}
}
}
_, rbe, err := expression.GetValueOrExpressionValue(l.RequestBody)
if err != nil {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("link.requestBody expression is invalid: %s", err.Error()), core, core.RequestBody))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("link.requestBody expression is invalid: %w", err), core, core.RequestBody))
}
if rbe != nil {
if err := rbe.Validate(); err != nil {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("link.requestBody expression is invalid: %s", err.Error()), core, core.RequestBody))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("link.requestBody expression is invalid: %w", err), core, core.RequestBody))
}
}
diff --git a/openapi/links_validate_test.go b/openapi/links_validate_test.go
index ce2e2fe..5bedbde 100644
--- a/openapi/links_validate_test.go
+++ b/openapi/links_validate_test.go
@@ -177,7 +177,7 @@ server:
description: Invalid server without URL
description: Link with invalid server
`,
- wantErrs: []string{"[4:3] server.url is missing"},
+ wantErrs: []string{"[4:3] error validation-required-field server.url is required"},
},
{
name: "invalid_operation_ref_uri",
diff --git a/openapi/linter/README.md b/openapi/linter/README.md
new file mode 100644
index 0000000..d335540
--- /dev/null
+++ b/openapi/linter/README.md
@@ -0,0 +1,125 @@
+# OpenAPI Linter
+
+The OpenAPI linter validates OpenAPI specifications for style, consistency,
+and best practices beyond basic spec validation.
+
+## Quick Start
+
+### CLI
+
+```bash
+# Lint an OpenAPI specification
+openapi spec lint api.yaml
+
+# Output as JSON
+openapi spec lint --format json api.yaml
+
+# Disable specific rules
+openapi spec lint --disable style-path-params api.yaml
+```
+
+### Go API
+
+```go
+import (
+ "context"
+ "fmt"
+ "os"
+
+ "github.com/speakeasy-api/openapi/linter"
+ "github.com/speakeasy-api/openapi/openapi"
+ openapiLinter "github.com/speakeasy-api/openapi/openapi/linter"
+)
+
+func main() {
+ ctx := context.Background()
+
+ // Load your OpenAPI document
+ f, _ := os.Open("api.yaml")
+ doc, validationErrors, _ := openapi.Unmarshal(ctx, f)
+
+ // Create linter with default configuration
+ config := linter.NewConfig()
+ lint := openapiLinter.NewLinter(config)
+
+ // Run linting
+ output, _ := lint.Lint(ctx, doc, validationErrors)
+
+ // Print results
+ fmt.Println(output.FormatText())
+}
+```
+
+## Available Rules
+
+
+
+| Rule | Severity | Description |
+|------|----------|-------------|
+| `oas-schema-check` | error | Schemas must use type-appropriate constraints and have valid constraint values. For example, string types should use minLength/maxLength/pattern, numbers should use minimum/maximum/multipleOf, and constraint values must be logically valid (e.g., maxLength >= minLength). |
+| `oas3-example-missing` | hint | Schemas, parameters, headers, and media types should include example values to illustrate expected data formats. Examples improve documentation quality, help developers understand how to use the API correctly, and enable better testing and validation. |
+| `oas3-no-nullable` | warning | The nullable keyword is not supported in OpenAPI 3.1+ and should be replaced with a type array that includes null (e.g., type: [string, null]). This change aligns OpenAPI 3.1 with JSON Schema Draft 2020-12, which uses type arrays to express nullable values. |
+| `owasp-additional-properties-constrained` | hint | Schemas with additionalProperties set to true or a schema should define maxProperties to limit object size. Without size limits, APIs are vulnerable to resource exhaustion attacks where clients send excessively large objects. |
+| `owasp-array-limit` | error | Array schemas must specify maxItems to prevent resource exhaustion attacks. Without array size limits, malicious clients could send extremely large arrays that consume excessive memory or processing time. |
+| `owasp-auth-insecure-schemes` | error | Authentication schemes using outdated or insecure methods must be avoided or upgraded. Insecure authentication schemes like API keys in query parameters or HTTP Basic over HTTP expose credentials and create security vulnerabilities. |
+| `owasp-define-error-responses-401` | warning | Operations should define a 401 Unauthorized response with a proper schema to handle authentication failures. Documenting authentication error responses helps clients implement proper error handling and understand when credentials are invalid or missing. |
+| `owasp-define-error-responses-429` | warning | Operations should define a 429 Too Many Requests response with a proper schema to indicate rate limiting. Rate limit responses help clients understand when they've exceeded usage thresholds and need to slow down requests. |
+| `owasp-define-error-responses-500` | warning | Operations should define a 500 Internal Server Error response with a proper schema to handle unexpected failures. Documenting server error responses helps clients distinguish between client-side and server-side problems. |
+| `owasp-define-error-validation` | warning | Operations should define validation error responses (400, 422, or 4XX) to indicate request data problems. Validation error responses help clients understand when and why their request data is invalid or malformed. |
+| `owasp-integer-format` | error | Integer schemas must specify a format of int32 or int64 to define the expected size and range. Explicit integer formats prevent overflow vulnerabilities and ensure clients and servers agree on numeric boundaries. |
+| `owasp-integer-limit` | error | Integer schemas must specify minimum and maximum values (or exclusive variants) to prevent unbounded inputs. Without numeric limits, APIs are vulnerable to overflow attacks and unexpected behavior from extreme values. |
+| `owasp-jwt-best-practices` | error | Security schemes using OAuth2 or JWT must explicitly declare support for RFC8725 (JWT Best Current Practices) in the description. RFC8725 compliance ensures JWTs are validated properly and protected against common attacks like algorithm confusion. |
+| `owasp-no-additional-properties` | error | Object schemas must not allow arbitrary additional properties (set additionalProperties to false or omit it). Allowing unexpected properties can lead to mass assignment vulnerabilities where attackers inject unintended fields. |
+| `owasp-no-api-keys-in-url` | error | API keys must not be passed via URL parameters (query or path) as they are logged and cached. URL-based API keys appear in browser history, server logs, and proxy caches, creating security exposure. |
+| `owasp-no-credentials-in-url` | error | URL parameters must not contain credentials like API keys, passwords, or secrets. Credentials in URLs are logged by servers, proxies, and browsers, creating significant security risks. |
+| `owasp-no-http-basic` | error | Security schemes must not use HTTP Basic authentication without additional security layers. HTTP Basic sends credentials in easily-decoded base64 encoding, making it vulnerable to interception without HTTPS. |
+| `owasp-no-numeric-ids` | error | Resource identifiers must use random values like UUIDs instead of sequential numeric IDs. Sequential IDs enable enumeration attacks where attackers can guess valid IDs and access unauthorized resources. |
+| `owasp-protection-global-safe` | hint | Safe operations (GET, HEAD) should be protected by security schemes or explicitly marked as public. Unprotected read operations may expose sensitive data to unauthorized users. |
+| `owasp-protection-global-unsafe` | error | Unsafe operations (POST, PUT, PATCH, DELETE) must be protected by security schemes to prevent unauthorized modifications. Write operations without authentication create serious security vulnerabilities allowing data tampering. |
+| `owasp-protection-global-unsafe-strict` | hint | Unsafe operations (POST, PUT, PATCH, DELETE) must be protected by non-empty security schemes without explicit opt-outs. Strict authentication requirements ensure write operations cannot bypass security even with empty security arrays. |
+| `owasp-rate-limit` | error | 2XX and 4XX responses must define rate limiting headers (X-RateLimit-Limit, X-RateLimit-Remaining) to prevent API overload. Rate limit headers help clients manage their usage and avoid hitting limits. |
+| `owasp-rate-limit-retry-after` | error | 429 Too Many Requests responses must include a Retry-After header indicating when clients can retry. Retry-After headers prevent thundering herd problems by telling clients exactly when to resume requests. |
+| `owasp-security-hosts-https-oas3` | error | Server URLs must begin with https:// as the only permitted protocol. Using HTTPS is essential for protecting API traffic from interception, tampering, and eavesdropping attacks. |
+| `owasp-string-limit` | error | String schemas must specify maxLength, const, or enum to prevent unbounded data. Without string length limits, APIs are vulnerable to resource exhaustion from extremely long inputs. |
+| `owasp-string-restricted` | error | String schemas must specify format, const, enum, or pattern to restrict content. String restrictions prevent injection attacks and ensure data conforms to expected formats. |
+| `semantic-duplicated-enum` | warning | Enum arrays should not contain duplicate values. Duplicate enum values are redundant and can cause confusion or unexpected behavior in client code generation and validation. |
+| `semantic-no-ambiguous-paths` | error | Path definitions must be unambiguous and distinguishable from each other to ensure correct request routing. Ambiguous paths like `/users/{id}` and `/users/{name}` can cause runtime routing conflicts since both match the same URL pattern. |
+| `semantic-no-eval-in-markdown` | error | Markdown descriptions must not contain eval() statements, which pose serious security risks. Including eval() in documentation could enable code injection attacks if the documentation is rendered in contexts that execute JavaScript. |
+| `semantic-no-script-tags-in-markdown` | error | Markdown descriptions must not contain "
+paths:
+ /users:
+ get:
+ description: "safe"
+ responses:
+ '200':
+ description: ok
+ /admin:
+ get:
+ description: ""
+ responses:
+ '200':
+ description: ok
+`
+
+ expectedErrors := []string{
+ "[6:16] error semantic-no-script-tags-in-markdown description contains content with `