vendorify
diff --git a/go/vendor/github.com/go-openapi/analysis/.gitignore b/go/vendor/github.com/go-openapi/analysis/.gitignore
new file mode 100644
index 0000000..87c3bd3
--- /dev/null
+++ b/go/vendor/github.com/go-openapi/analysis/.gitignore
@@ -0,0 +1,5 @@
+secrets.yml
+coverage.out
+coverage.txt
+*.cov
+.idea
diff --git a/go/vendor/github.com/go-openapi/analysis/.golangci.yml b/go/vendor/github.com/go-openapi/analysis/.golangci.yml
new file mode 100644
index 0000000..922278c
--- /dev/null
+++ b/go/vendor/github.com/go-openapi/analysis/.golangci.yml
@@ -0,0 +1,19 @@
+linters-settings:
+  govet:
+    check-shadowing: true
+  golint:
+    min-confidence: 0
+  gocyclo:
+    min-complexity: 30
+  maligned:
+    suggest-new: true
+  dupl:
+    threshold: 100
+  goconst:
+    min-len: 2
+    min-occurrences: 4
+
+linters:
+  enable-all: true
+  disable:
+    - maligned
diff --git a/go/vendor/github.com/go-openapi/analysis/.travis.yml b/go/vendor/github.com/go-openapi/analysis/.travis.yml
new file mode 100644
index 0000000..9243555
--- /dev/null
+++ b/go/vendor/github.com/go-openapi/analysis/.travis.yml
@@ -0,0 +1,24 @@
+after_success:
+- bash <(curl -s https://codecov.io/bash)
+go:
+- '1.9'
+- 1.10.x
+- 1.11.x
+install:
+- go get -u github.com/axw/gocov/gocov
+- go get -u gopkg.in/matm/v1/gocov-html
+- go get -u github.com/cee-dub/go-junit-report
+- go get -u github.com/docker/go-units
+- go get -u github.com/stretchr/testify/assert
+- go get -u gopkg.in/yaml.v2
+- go get -u github.com/go-openapi/swag
+- go get -u github.com/go-openapi/jsonpointer
+- go get -u github.com/go-openapi/spec
+- go get -u github.com/go-openapi/strfmt
+- go get -u github.com/go-openapi/loads/fmts
+language: go
+notifications:
+  slack:
+    secure: Sf7kZf7ZGbnwWUMpffHwMu5A0cHkLK2MYY32LNTPj4+/3qC3Ghl7+9v4TSLOqOlCwdRNjOGblAq7s+GDJed6/xgRQl1JtCi1klzZNrYX4q01pgTPvvGcwbBkIYgeMaPeIRcK9OZnud7sRXdttozgTOpytps2U6Js32ip7uj5mHSg2ub0FwoSJwlS6dbezZ8+eDhoha0F/guY99BEwx8Bd+zROrT2TFGsSGOFGN6wFc7moCqTHO/YkWib13a2QNXqOxCCVBy/lt76Wp+JkeFppjHlzs/2lP3EAk13RIUAaesdEUHvIHrzCyNJEd3/+KO2DzsWOYfpktd+KBCvgaYOsoo7ubdT3IROeAegZdCgo/6xgCEsmFc9ZcqCfN5yNx2A+BZ2Vwmpws+bQ1E1+B5HDzzaiLcYfG4X2O210QVGVDLWsv1jqD+uPYeHY2WRfh5ZsIUFvaqgUEnwHwrK44/8REAhQavt1QAj5uJpsRd7CkRVPWRNK+yIky+wgbVUFEchRNmS55E7QWf+W4+4QZkQi7vUTMc9nbTUu2Es9NfvfudOpM2wZbn98fjpb/qq/nRv6Bk+ca+7XD5/IgNLMbWp2ouDdzbiHLCOfDUiHiDJhLfFZx9Bwo7ZwfzeOlbrQX66bx7xRKYmOe4DLrXhNcpbsMa8qbfxlZRCmYbubB/Y8h4=
+script:
+- hack/coverage
diff --git a/go/vendor/github.com/go-openapi/analysis/CODE_OF_CONDUCT.md b/go/vendor/github.com/go-openapi/analysis/CODE_OF_CONDUCT.md
new file mode 100644
index 0000000..9322b06
--- /dev/null
+++ b/go/vendor/github.com/go-openapi/analysis/CODE_OF_CONDUCT.md
@@ -0,0 +1,74 @@
+# Contributor Covenant Code of Conduct
+
+## Our Pledge
+
+In the interest of fostering an open and welcoming environment, we as
+contributors and maintainers pledge to making participation in our project and
+our community a harassment-free experience for everyone, regardless of age, body
+size, disability, ethnicity, gender identity and expression, level of experience,
+nationality, personal appearance, race, religion, or sexual identity and
+orientation.
+
+## Our Standards
+
+Examples of behavior that contributes to creating a positive environment
+include:
+
+* Using welcoming and inclusive language
+* Being respectful of differing viewpoints and experiences
+* Gracefully accepting constructive criticism
+* Focusing on what is best for the community
+* Showing empathy towards other community members
+
+Examples of unacceptable behavior by participants include:
+
+* The use of sexualized language or imagery and unwelcome sexual attention or
+advances
+* Trolling, insulting/derogatory comments, and personal or political attacks
+* Public or private harassment
+* Publishing others' private information, such as a physical or electronic
+  address, without explicit permission
+* Other conduct which could reasonably be considered inappropriate in a
+  professional setting
+
+## Our Responsibilities
+
+Project maintainers are responsible for clarifying the standards of acceptable
+behavior and are expected to take appropriate and fair corrective action in
+response to any instances of unacceptable behavior.
+
+Project maintainers have the right and responsibility to remove, edit, or
+reject comments, commits, code, wiki edits, issues, and other contributions
+that are not aligned to this Code of Conduct, or to ban temporarily or
+permanently any contributor for other behaviors that they deem inappropriate,
+threatening, offensive, or harmful.
+
+## Scope
+
+This Code of Conduct applies both within project spaces and in public spaces
+when an individual is representing the project or its community. Examples of
+representing a project or community include using an official project e-mail
+address, posting via an official social media account, or acting as an appointed
+representative at an online or offline event. Representation of a project may be
+further defined and clarified by project maintainers.
+
+## Enforcement
+
+Instances of abusive, harassing, or otherwise unacceptable behavior may be
+reported by contacting the project team at ivan+abuse@flanders.co.nz. All
+complaints will be reviewed and investigated and will result in a response that
+is deemed necessary and appropriate to the circumstances. The project team is
+obligated to maintain confidentiality with regard to the reporter of an incident.
+Further details of specific enforcement policies may be posted separately.
+
+Project maintainers who do not follow or enforce the Code of Conduct in good
+faith may face temporary or permanent repercussions as determined by other
+members of the project's leadership.
+
+## Attribution
+
+This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
+available at [http://contributor-covenant.org/version/1/4][version]
+
+[homepage]: http://contributor-covenant.org
+[version]: http://contributor-covenant.org/version/1/4/
diff --git a/go/vendor/github.com/go-openapi/analysis/LICENSE b/go/vendor/github.com/go-openapi/analysis/LICENSE
new file mode 100644
index 0000000..d645695
--- /dev/null
+++ b/go/vendor/github.com/go-openapi/analysis/LICENSE
@@ -0,0 +1,202 @@
+
+                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+
+   APPENDIX: How to apply the Apache License to your work.
+
+      To apply the Apache License to your work, attach the following
+      boilerplate notice, with the fields enclosed by brackets "[]"
+      replaced with your own identifying information. (Don't include
+      the brackets!)  The text should be enclosed in the appropriate
+      comment syntax for the file format. We also recommend that a
+      file or class name and description of purpose be included on the
+      same "printed page" as the copyright notice for easier
+      identification within third-party archives.
+
+   Copyright [yyyy] [name of copyright owner]
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
diff --git a/go/vendor/github.com/go-openapi/analysis/README.md b/go/vendor/github.com/go-openapi/analysis/README.md
new file mode 100644
index 0000000..3724bfc
--- /dev/null
+++ b/go/vendor/github.com/go-openapi/analysis/README.md
@@ -0,0 +1,9 @@
+# OpenAPI initiative analysis [![Build Status](https://travis-ci.org/go-openapi/analysis.svg?branch=master)](https://travis-ci.org/go-openapi/analysis) [![codecov](https://codecov.io/gh/go-openapi/analysis/branch/master/graph/badge.svg)](https://codecov.io/gh/go-openapi/analysis) [![Slack Status](https://slackin.goswagger.io/badge.svg)](https://slackin.goswagger.io)
+
+[![license](http://img.shields.io/badge/license-Apache%20v2-orange.svg)](https://raw.githubusercontent.com/go-openapi/analysis/master/LICENSE)
+[![GoDoc](https://godoc.org/github.com/go-openapi/analysis?status.svg)](http://godoc.org/github.com/go-openapi/analysis)
+[![GolangCI](https://golangci.com/badges/github.com/go-openapi/analysis.svg)](https://golangci.com)
+[![Go Report Card](https://goreportcard.com/badge/github.com/go-openapi/analysis)](https://goreportcard.com/report/github.com/go-openapi/analysis)
+
+
+A foundational library to analyze an OAI specification document for easier reasoning about the content.
diff --git a/go/vendor/github.com/go-openapi/analysis/analyzer.go b/go/vendor/github.com/go-openapi/analysis/analyzer.go
new file mode 100644
index 0000000..81dc18f
--- /dev/null
+++ b/go/vendor/github.com/go-openapi/analysis/analyzer.go
@@ -0,0 +1,892 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//    http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package analysis
+
+import (
+	"fmt"
+	slashpath "path"
+	"strconv"
+	"strings"
+
+	"github.com/go-openapi/jsonpointer"
+	"github.com/go-openapi/spec"
+	"github.com/go-openapi/swag"
+)
+
+type referenceAnalysis struct {
+	schemas        map[string]spec.Ref
+	responses      map[string]spec.Ref
+	parameters     map[string]spec.Ref
+	items          map[string]spec.Ref
+	headerItems    map[string]spec.Ref
+	parameterItems map[string]spec.Ref
+	allRefs        map[string]spec.Ref
+	pathItems      map[string]spec.Ref
+}
+
+func (r *referenceAnalysis) addRef(key string, ref spec.Ref) {
+	r.allRefs["#"+key] = ref
+}
+
+func (r *referenceAnalysis) addItemsRef(key string, items *spec.Items, location string) {
+	r.items["#"+key] = items.Ref
+	r.addRef(key, items.Ref)
+	if location == "header" {
+		// NOTE: in swagger 2.0, headers and parameters (but not body param schemas) are simple schemas
+		// and $ref are not supported here. However it is possible to analyze this.
+		r.headerItems["#"+key] = items.Ref
+	} else {
+		r.parameterItems["#"+key] = items.Ref
+	}
+}
+
+func (r *referenceAnalysis) addSchemaRef(key string, ref SchemaRef) {
+	r.schemas["#"+key] = ref.Schema.Ref
+	r.addRef(key, ref.Schema.Ref)
+}
+
+func (r *referenceAnalysis) addResponseRef(key string, resp *spec.Response) {
+	r.responses["#"+key] = resp.Ref
+	r.addRef(key, resp.Ref)
+}
+
+func (r *referenceAnalysis) addParamRef(key string, param *spec.Parameter) {
+	r.parameters["#"+key] = param.Ref
+	r.addRef(key, param.Ref)
+}
+
+func (r *referenceAnalysis) addPathItemRef(key string, pathItem *spec.PathItem) {
+	r.pathItems["#"+key] = pathItem.Ref
+	r.addRef(key, pathItem.Ref)
+}
+
+type patternAnalysis struct {
+	parameters  map[string]string
+	headers     map[string]string
+	items       map[string]string
+	schemas     map[string]string
+	allPatterns map[string]string
+}
+
+func (p *patternAnalysis) addPattern(key, pattern string) {
+	p.allPatterns["#"+key] = pattern
+}
+
+func (p *patternAnalysis) addParameterPattern(key, pattern string) {
+	p.parameters["#"+key] = pattern
+	p.addPattern(key, pattern)
+}
+
+func (p *patternAnalysis) addHeaderPattern(key, pattern string) {
+	p.headers["#"+key] = pattern
+	p.addPattern(key, pattern)
+}
+
+func (p *patternAnalysis) addItemsPattern(key, pattern string) {
+	p.items["#"+key] = pattern
+	p.addPattern(key, pattern)
+}
+
+func (p *patternAnalysis) addSchemaPattern(key, pattern string) {
+	p.schemas["#"+key] = pattern
+	p.addPattern(key, pattern)
+}
+
+// New takes a swagger spec object and returns an analyzed spec document.
+// The analyzed document contains a number of indices that make it easier to
+// reason about semantics of a swagger specification for use in code generation
+// or validation etc.
+func New(doc *spec.Swagger) *Spec {
+	a := &Spec{
+		spec:        doc,
+		consumes:    make(map[string]struct{}, 150),
+		produces:    make(map[string]struct{}, 150),
+		authSchemes: make(map[string]struct{}, 150),
+		operations:  make(map[string]map[string]*spec.Operation, 150),
+		allSchemas:  make(map[string]SchemaRef, 150),
+		allOfs:      make(map[string]SchemaRef, 150),
+		references: referenceAnalysis{
+			schemas:        make(map[string]spec.Ref, 150),
+			pathItems:      make(map[string]spec.Ref, 150),
+			responses:      make(map[string]spec.Ref, 150),
+			parameters:     make(map[string]spec.Ref, 150),
+			items:          make(map[string]spec.Ref, 150),
+			headerItems:    make(map[string]spec.Ref, 150),
+			parameterItems: make(map[string]spec.Ref, 150),
+			allRefs:        make(map[string]spec.Ref, 150),
+		},
+		patterns: patternAnalysis{
+			parameters:  make(map[string]string, 150),
+			headers:     make(map[string]string, 150),
+			items:       make(map[string]string, 150),
+			schemas:     make(map[string]string, 150),
+			allPatterns: make(map[string]string, 150),
+		},
+	}
+	a.initialize()
+	return a
+}
+
+// Spec is an analyzed specification object. It takes a swagger spec object and turns it into a registry
+// with a bunch of utility methods to act on the information in the spec.
+type Spec struct {
+	spec        *spec.Swagger
+	consumes    map[string]struct{}
+	produces    map[string]struct{}
+	authSchemes map[string]struct{}
+	operations  map[string]map[string]*spec.Operation
+	references  referenceAnalysis
+	patterns    patternAnalysis
+	allSchemas  map[string]SchemaRef
+	allOfs      map[string]SchemaRef
+}
+
+func (s *Spec) reset() {
+	s.consumes = make(map[string]struct{}, 150)
+	s.produces = make(map[string]struct{}, 150)
+	s.authSchemes = make(map[string]struct{}, 150)
+	s.operations = make(map[string]map[string]*spec.Operation, 150)
+	s.allSchemas = make(map[string]SchemaRef, 150)
+	s.allOfs = make(map[string]SchemaRef, 150)
+	s.references.schemas = make(map[string]spec.Ref, 150)
+	s.references.pathItems = make(map[string]spec.Ref, 150)
+	s.references.responses = make(map[string]spec.Ref, 150)
+	s.references.parameters = make(map[string]spec.Ref, 150)
+	s.references.items = make(map[string]spec.Ref, 150)
+	s.references.headerItems = make(map[string]spec.Ref, 150)
+	s.references.parameterItems = make(map[string]spec.Ref, 150)
+	s.references.allRefs = make(map[string]spec.Ref, 150)
+	s.patterns.parameters = make(map[string]string, 150)
+	s.patterns.headers = make(map[string]string, 150)
+	s.patterns.items = make(map[string]string, 150)
+	s.patterns.schemas = make(map[string]string, 150)
+	s.patterns.allPatterns = make(map[string]string, 150)
+}
+
+func (s *Spec) reload() {
+	s.reset()
+	s.initialize()
+}
+
+func (s *Spec) initialize() {
+	for _, c := range s.spec.Consumes {
+		s.consumes[c] = struct{}{}
+	}
+	for _, c := range s.spec.Produces {
+		s.produces[c] = struct{}{}
+	}
+	for _, ss := range s.spec.Security {
+		for k := range ss {
+			s.authSchemes[k] = struct{}{}
+		}
+	}
+	for path, pathItem := range s.AllPaths() {
+		s.analyzeOperations(path, &pathItem)
+	}
+
+	for name, parameter := range s.spec.Parameters {
+		refPref := slashpath.Join("/parameters", jsonpointer.Escape(name))
+		if parameter.Items != nil {
+			s.analyzeItems("items", parameter.Items, refPref, "parameter")
+		}
+		if parameter.In == "body" && parameter.Schema != nil {
+			s.analyzeSchema("schema", *parameter.Schema, refPref)
+		}
+		if parameter.Pattern != "" {
+			s.patterns.addParameterPattern(refPref, parameter.Pattern)
+		}
+	}
+
+	for name, response := range s.spec.Responses {
+		refPref := slashpath.Join("/responses", jsonpointer.Escape(name))
+		for k, v := range response.Headers {
+			hRefPref := slashpath.Join(refPref, "headers", k)
+			if v.Items != nil {
+				s.analyzeItems("items", v.Items, hRefPref, "header")
+			}
+			if v.Pattern != "" {
+				s.patterns.addHeaderPattern(hRefPref, v.Pattern)
+			}
+		}
+		if response.Schema != nil {
+			s.analyzeSchema("schema", *response.Schema, refPref)
+		}
+	}
+
+	for name, schema := range s.spec.Definitions {
+		s.analyzeSchema(name, schema, "/definitions")
+	}
+	// TODO: after analyzing all things and flattening schemas etc
+	// resolve all the collected references to their final representations
+	// best put in a separate method because this could get expensive
+}
+
+func (s *Spec) analyzeOperations(path string, pi *spec.PathItem) {
+	// TODO: resolve refs here?
+	// Currently, operations declared via pathItem $ref are known only after expansion
+	op := pi
+	if pi.Ref.String() != "" {
+		key := slashpath.Join("/paths", jsonpointer.Escape(path))
+		s.references.addPathItemRef(key, pi)
+	}
+	s.analyzeOperation("GET", path, op.Get)
+	s.analyzeOperation("PUT", path, op.Put)
+	s.analyzeOperation("POST", path, op.Post)
+	s.analyzeOperation("PATCH", path, op.Patch)
+	s.analyzeOperation("DELETE", path, op.Delete)
+	s.analyzeOperation("HEAD", path, op.Head)
+	s.analyzeOperation("OPTIONS", path, op.Options)
+	for i, param := range op.Parameters {
+		refPref := slashpath.Join("/paths", jsonpointer.Escape(path), "parameters", strconv.Itoa(i))
+		if param.Ref.String() != "" {
+			s.references.addParamRef(refPref, &param)
+		}
+		if param.Pattern != "" {
+			s.patterns.addParameterPattern(refPref, param.Pattern)
+		}
+		if param.Items != nil {
+			s.analyzeItems("items", param.Items, refPref, "parameter")
+		}
+		if param.Schema != nil {
+			s.analyzeSchema("schema", *param.Schema, refPref)
+		}
+	}
+}
+
+func (s *Spec) analyzeItems(name string, items *spec.Items, prefix, location string) {
+	if items == nil {
+		return
+	}
+	refPref := slashpath.Join(prefix, name)
+	s.analyzeItems(name, items.Items, refPref, location)
+	if items.Ref.String() != "" {
+		s.references.addItemsRef(refPref, items, location)
+	}
+	if items.Pattern != "" {
+		s.patterns.addItemsPattern(refPref, items.Pattern)
+	}
+}
+
+func (s *Spec) analyzeOperation(method, path string, op *spec.Operation) {
+	if op == nil {
+		return
+	}
+
+	for _, c := range op.Consumes {
+		s.consumes[c] = struct{}{}
+	}
+	for _, c := range op.Produces {
+		s.produces[c] = struct{}{}
+	}
+	for _, ss := range op.Security {
+		for k := range ss {
+			s.authSchemes[k] = struct{}{}
+		}
+	}
+	if _, ok := s.operations[method]; !ok {
+		s.operations[method] = make(map[string]*spec.Operation)
+	}
+	s.operations[method][path] = op
+	prefix := slashpath.Join("/paths", jsonpointer.Escape(path), strings.ToLower(method))
+	for i, param := range op.Parameters {
+		refPref := slashpath.Join(prefix, "parameters", strconv.Itoa(i))
+		if param.Ref.String() != "" {
+			s.references.addParamRef(refPref, &param)
+		}
+		if param.Pattern != "" {
+			s.patterns.addParameterPattern(refPref, param.Pattern)
+		}
+		s.analyzeItems("items", param.Items, refPref, "parameter")
+		if param.In == "body" && param.Schema != nil {
+			s.analyzeSchema("schema", *param.Schema, refPref)
+		}
+	}
+	if op.Responses != nil {
+		if op.Responses.Default != nil {
+			refPref := slashpath.Join(prefix, "responses", "default")
+			if op.Responses.Default.Ref.String() != "" {
+				s.references.addResponseRef(refPref, op.Responses.Default)
+			}
+			for k, v := range op.Responses.Default.Headers {
+				hRefPref := slashpath.Join(refPref, "headers", k)
+				s.analyzeItems("items", v.Items, hRefPref, "header")
+				if v.Pattern != "" {
+					s.patterns.addHeaderPattern(hRefPref, v.Pattern)
+				}
+			}
+			if op.Responses.Default.Schema != nil {
+				s.analyzeSchema("schema", *op.Responses.Default.Schema, refPref)
+			}
+		}
+		for k, res := range op.Responses.StatusCodeResponses {
+			refPref := slashpath.Join(prefix, "responses", strconv.Itoa(k))
+			if res.Ref.String() != "" {
+				s.references.addResponseRef(refPref, &res)
+			}
+			for k, v := range res.Headers {
+				hRefPref := slashpath.Join(refPref, "headers", k)
+				s.analyzeItems("items", v.Items, hRefPref, "header")
+				if v.Pattern != "" {
+					s.patterns.addHeaderPattern(hRefPref, v.Pattern)
+				}
+			}
+			if res.Schema != nil {
+				s.analyzeSchema("schema", *res.Schema, refPref)
+			}
+		}
+	}
+}
+
+func (s *Spec) analyzeSchema(name string, schema spec.Schema, prefix string) {
+	refURI := slashpath.Join(prefix, jsonpointer.Escape(name))
+	schRef := SchemaRef{
+		Name:     name,
+		Schema:   &schema,
+		Ref:      spec.MustCreateRef("#" + refURI),
+		TopLevel: prefix == "/definitions",
+	}
+
+	s.allSchemas["#"+refURI] = schRef
+
+	if schema.Ref.String() != "" {
+		s.references.addSchemaRef(refURI, schRef)
+	}
+	if schema.Pattern != "" {
+		s.patterns.addSchemaPattern(refURI, schema.Pattern)
+	}
+
+	for k, v := range schema.Definitions {
+		s.analyzeSchema(k, v, slashpath.Join(refURI, "definitions"))
+	}
+	for k, v := range schema.Properties {
+		s.analyzeSchema(k, v, slashpath.Join(refURI, "properties"))
+	}
+	for k, v := range schema.PatternProperties {
+		// NOTE: swagger 2.0 does not support PatternProperties.
+		// However it is possible to analyze this in a schema
+		s.analyzeSchema(k, v, slashpath.Join(refURI, "patternProperties"))
+	}
+	for i, v := range schema.AllOf {
+		s.analyzeSchema(strconv.Itoa(i), v, slashpath.Join(refURI, "allOf"))
+	}
+	if len(schema.AllOf) > 0 {
+		s.allOfs["#"+refURI] = schRef
+	}
+	for i, v := range schema.AnyOf {
+		// NOTE: swagger 2.0 does not support anyOf constructs.
+		// However it is possible to analyze this in a schema
+		s.analyzeSchema(strconv.Itoa(i), v, slashpath.Join(refURI, "anyOf"))
+	}
+	for i, v := range schema.OneOf {
+		// NOTE: swagger 2.0 does not support oneOf constructs.
+		// However it is possible to analyze this in a schema
+		s.analyzeSchema(strconv.Itoa(i), v, slashpath.Join(refURI, "oneOf"))
+	}
+	if schema.Not != nil {
+		// NOTE: swagger 2.0 does not support "not" constructs.
+		// However it is possible to analyze this in a schema
+		s.analyzeSchema("not", *schema.Not, refURI)
+	}
+	if schema.AdditionalProperties != nil && schema.AdditionalProperties.Schema != nil {
+		s.analyzeSchema("additionalProperties", *schema.AdditionalProperties.Schema, refURI)
+	}
+	if schema.AdditionalItems != nil && schema.AdditionalItems.Schema != nil {
+		// NOTE: swagger 2.0 does not support AdditionalItems.
+		// However it is possible to analyze this in a schema
+		s.analyzeSchema("additionalItems", *schema.AdditionalItems.Schema, refURI)
+	}
+	if schema.Items != nil {
+		if schema.Items.Schema != nil {
+			s.analyzeSchema("items", *schema.Items.Schema, refURI)
+		}
+		for i, sch := range schema.Items.Schemas {
+			s.analyzeSchema(strconv.Itoa(i), sch, slashpath.Join(refURI, "items"))
+		}
+	}
+}
+
+// SecurityRequirement is a representation of a security requirement for an operation
+type SecurityRequirement struct {
+	Name   string
+	Scopes []string
+}
+
+// SecurityRequirementsFor gets the security requirements for the operation
+func (s *Spec) SecurityRequirementsFor(operation *spec.Operation) [][]SecurityRequirement {
+	if s.spec.Security == nil && operation.Security == nil {
+		return nil
+	}
+
+	schemes := s.spec.Security
+	if operation.Security != nil {
+		schemes = operation.Security
+	}
+
+	result := [][]SecurityRequirement{}
+	for _, scheme := range schemes {
+		if len(scheme) == 0 {
+			// append a zero object for anonymous
+			result = append(result, []SecurityRequirement{{}})
+			continue
+		}
+		var reqs []SecurityRequirement
+		for k, v := range scheme {
+			if v == nil {
+				v = []string{}
+			}
+			reqs = append(reqs, SecurityRequirement{Name: k, Scopes: v})
+		}
+		result = append(result, reqs)
+	}
+	return result
+}
+
+// SecurityDefinitionsForRequirements gets the matching security definitions for a set of requirements
+func (s *Spec) SecurityDefinitionsForRequirements(requirements []SecurityRequirement) map[string]spec.SecurityScheme {
+	result := make(map[string]spec.SecurityScheme)
+
+	for _, v := range requirements {
+		if definition, ok := s.spec.SecurityDefinitions[v.Name]; ok {
+			if definition != nil {
+				result[v.Name] = *definition
+			}
+		}
+	}
+	return result
+}
+
+// SecurityDefinitionsFor gets the matching security definitions for a set of requirements
+func (s *Spec) SecurityDefinitionsFor(operation *spec.Operation) map[string]spec.SecurityScheme {
+	requirements := s.SecurityRequirementsFor(operation)
+	if len(requirements) == 0 {
+		return nil
+	}
+
+	result := make(map[string]spec.SecurityScheme)
+	for _, reqs := range requirements {
+		for _, v := range reqs {
+			if v.Name == "" {
+				// optional requirement
+				continue
+			}
+			if _, ok := result[v.Name]; ok {
+				// duplicate requirement
+				continue
+			}
+			if definition, ok := s.spec.SecurityDefinitions[v.Name]; ok {
+				if definition != nil {
+					result[v.Name] = *definition
+				}
+			}
+		}
+	}
+	return result
+}
+
+// ConsumesFor gets the mediatypes for the operation
+func (s *Spec) ConsumesFor(operation *spec.Operation) []string {
+
+	if len(operation.Consumes) == 0 {
+		cons := make(map[string]struct{}, len(s.spec.Consumes))
+		for _, k := range s.spec.Consumes {
+			cons[k] = struct{}{}
+		}
+		return s.structMapKeys(cons)
+	}
+
+	cons := make(map[string]struct{}, len(operation.Consumes))
+	for _, c := range operation.Consumes {
+		cons[c] = struct{}{}
+	}
+	return s.structMapKeys(cons)
+}
+
+// ProducesFor gets the mediatypes for the operation
+func (s *Spec) ProducesFor(operation *spec.Operation) []string {
+	if len(operation.Produces) == 0 {
+		prod := make(map[string]struct{}, len(s.spec.Produces))
+		for _, k := range s.spec.Produces {
+			prod[k] = struct{}{}
+		}
+		return s.structMapKeys(prod)
+	}
+
+	prod := make(map[string]struct{}, len(operation.Produces))
+	for _, c := range operation.Produces {
+		prod[c] = struct{}{}
+	}
+	return s.structMapKeys(prod)
+}
+
+func mapKeyFromParam(param *spec.Parameter) string {
+	return fmt.Sprintf("%s#%s", param.In, fieldNameFromParam(param))
+}
+
+func fieldNameFromParam(param *spec.Parameter) string {
+	// TODO: this should be x-go-name
+	if nm, ok := param.Extensions.GetString("go-name"); ok {
+		return nm
+	}
+	return swag.ToGoName(param.Name)
+}
+
+// ErrorOnParamFunc is a callback function to be invoked
+// whenever an error is encountered while resolving references
+// on parameters.
+//
+// This function takes as input the spec.Parameter which triggered the
+// error and the error itself.
+//
+// If the callback function returns false, the calling function should bail.
+//
+// If it returns true, the calling function should continue evaluating parameters.
+// A nil ErrorOnParamFunc must be evaluated as equivalent to panic().
+type ErrorOnParamFunc func(spec.Parameter, error) bool
+
+func (s *Spec) paramsAsMap(parameters []spec.Parameter, res map[string]spec.Parameter, callmeOnError ErrorOnParamFunc) {
+	for _, param := range parameters {
+		pr := param
+		if pr.Ref.String() != "" {
+			obj, _, err := pr.Ref.GetPointer().Get(s.spec)
+			if err != nil {
+				if callmeOnError != nil {
+					if callmeOnError(param, fmt.Errorf("invalid reference: %q", pr.Ref.String())) {
+						continue
+					}
+					break
+				} else {
+					panic(fmt.Sprintf("invalid reference: %q", pr.Ref.String()))
+				}
+			}
+			if objAsParam, ok := obj.(spec.Parameter); ok {
+				pr = objAsParam
+			} else {
+				if callmeOnError != nil {
+					if callmeOnError(param, fmt.Errorf("resolved reference is not a parameter: %q", pr.Ref.String())) {
+						continue
+					}
+					break
+				} else {
+					panic(fmt.Sprintf("resolved reference is not a parameter: %q", pr.Ref.String()))
+				}
+			}
+		}
+		res[mapKeyFromParam(&pr)] = pr
+	}
+}
+
+// ParametersFor the specified operation id.
+//
+// Assumes parameters properly resolve references if any and that
+// such references actually resolve to a parameter object.
+// Otherwise, panics.
+func (s *Spec) ParametersFor(operationID string) []spec.Parameter {
+	return s.SafeParametersFor(operationID, nil)
+}
+
+// SafeParametersFor the specified operation id.
+//
+// Does not assume parameters properly resolve references or that
+// such references actually resolve to a parameter object.
+//
+// Upon error, invoke a ErrorOnParamFunc callback with the erroneous
+// parameters. If the callback is set to nil, panics upon errors.
+func (s *Spec) SafeParametersFor(operationID string, callmeOnError ErrorOnParamFunc) []spec.Parameter {
+	gatherParams := func(pi *spec.PathItem, op *spec.Operation) []spec.Parameter {
+		bag := make(map[string]spec.Parameter)
+		s.paramsAsMap(pi.Parameters, bag, callmeOnError)
+		s.paramsAsMap(op.Parameters, bag, callmeOnError)
+
+		var res []spec.Parameter
+		for _, v := range bag {
+			res = append(res, v)
+		}
+		return res
+	}
+	for _, pi := range s.spec.Paths.Paths {
+		if pi.Get != nil && pi.Get.ID == operationID {
+			return gatherParams(&pi, pi.Get)
+		}
+		if pi.Head != nil && pi.Head.ID == operationID {
+			return gatherParams(&pi, pi.Head)
+		}
+		if pi.Options != nil && pi.Options.ID == operationID {
+			return gatherParams(&pi, pi.Options)
+		}
+		if pi.Post != nil && pi.Post.ID == operationID {
+			return gatherParams(&pi, pi.Post)
+		}
+		if pi.Patch != nil && pi.Patch.ID == operationID {
+			return gatherParams(&pi, pi.Patch)
+		}
+		if pi.Put != nil && pi.Put.ID == operationID {
+			return gatherParams(&pi, pi.Put)
+		}
+		if pi.Delete != nil && pi.Delete.ID == operationID {
+			return gatherParams(&pi, pi.Delete)
+		}
+	}
+	return nil
+}
+
+// ParamsFor the specified method and path. Aggregates them with the defaults etc, so it's all the params that
+// apply for the method and path.
+//
+// Assumes parameters properly resolve references if any and that
+// such references actually resolve to a parameter object.
+// Otherwise, panics.
+func (s *Spec) ParamsFor(method, path string) map[string]spec.Parameter {
+	return s.SafeParamsFor(method, path, nil)
+}
+
+// SafeParamsFor the specified method and path. Aggregates them with the defaults etc, so it's all the params that
+// apply for the method and path.
+//
+// Does not assume parameters properly resolve references or that
+// such references actually resolve to a parameter object.
+//
+// Upon error, invoke a ErrorOnParamFunc callback with the erroneous
+// parameters. If the callback is set to nil, panics upon errors.
+func (s *Spec) SafeParamsFor(method, path string, callmeOnError ErrorOnParamFunc) map[string]spec.Parameter {
+	res := make(map[string]spec.Parameter)
+	if pi, ok := s.spec.Paths.Paths[path]; ok {
+		s.paramsAsMap(pi.Parameters, res, callmeOnError)
+		s.paramsAsMap(s.operations[strings.ToUpper(method)][path].Parameters, res, callmeOnError)
+	}
+	return res
+}
+
+// OperationForName gets the operation for the given id
+func (s *Spec) OperationForName(operationID string) (string, string, *spec.Operation, bool) {
+	for method, pathItem := range s.operations {
+		for path, op := range pathItem {
+			if operationID == op.ID {
+				return method, path, op, true
+			}
+		}
+	}
+	return "", "", nil, false
+}
+
+// OperationFor the given method and path
+func (s *Spec) OperationFor(method, path string) (*spec.Operation, bool) {
+	if mp, ok := s.operations[strings.ToUpper(method)]; ok {
+		op, fn := mp[path]
+		return op, fn
+	}
+	return nil, false
+}
+
+// Operations gathers all the operations specified in the spec document
+func (s *Spec) Operations() map[string]map[string]*spec.Operation {
+	return s.operations
+}
+
+func (s *Spec) structMapKeys(mp map[string]struct{}) []string {
+	if len(mp) == 0 {
+		return nil
+	}
+
+	result := make([]string, 0, len(mp))
+	for k := range mp {
+		result = append(result, k)
+	}
+	return result
+}
+
+// AllPaths returns all the paths in the swagger spec
+func (s *Spec) AllPaths() map[string]spec.PathItem {
+	if s.spec == nil || s.spec.Paths == nil {
+		return nil
+	}
+	return s.spec.Paths.Paths
+}
+
+// OperationIDs gets all the operation ids based on method an dpath
+func (s *Spec) OperationIDs() []string {
+	if len(s.operations) == 0 {
+		return nil
+	}
+	result := make([]string, 0, len(s.operations))
+	for method, v := range s.operations {
+		for p, o := range v {
+			if o.ID != "" {
+				result = append(result, o.ID)
+			} else {
+				result = append(result, fmt.Sprintf("%s %s", strings.ToUpper(method), p))
+			}
+		}
+	}
+	return result
+}
+
+// OperationMethodPaths gets all the operation ids based on method an dpath
+func (s *Spec) OperationMethodPaths() []string {
+	if len(s.operations) == 0 {
+		return nil
+	}
+	result := make([]string, 0, len(s.operations))
+	for method, v := range s.operations {
+		for p := range v {
+			result = append(result, fmt.Sprintf("%s %s", strings.ToUpper(method), p))
+		}
+	}
+	return result
+}
+
+// RequiredConsumes gets all the distinct consumes that are specified in the specification document
+func (s *Spec) RequiredConsumes() []string {
+	return s.structMapKeys(s.consumes)
+}
+
+// RequiredProduces gets all the distinct produces that are specified in the specification document
+func (s *Spec) RequiredProduces() []string {
+	return s.structMapKeys(s.produces)
+}
+
+// RequiredSecuritySchemes gets all the distinct security schemes that are specified in the swagger spec
+func (s *Spec) RequiredSecuritySchemes() []string {
+	return s.structMapKeys(s.authSchemes)
+}
+
+// SchemaRef is a reference to a schema
+type SchemaRef struct {
+	Name     string
+	Ref      spec.Ref
+	Schema   *spec.Schema
+	TopLevel bool
+}
+
+// SchemasWithAllOf returns schema references to all schemas that are defined
+// with an allOf key
+func (s *Spec) SchemasWithAllOf() (result []SchemaRef) {
+	for _, v := range s.allOfs {
+		result = append(result, v)
+	}
+	return
+}
+
+// AllDefinitions returns schema references for all the definitions that were discovered
+func (s *Spec) AllDefinitions() (result []SchemaRef) {
+	for _, v := range s.allSchemas {
+		result = append(result, v)
+	}
+	return
+}
+
+// AllDefinitionReferences returns json refs for all the discovered schemas
+func (s *Spec) AllDefinitionReferences() (result []string) {
+	for _, v := range s.references.schemas {
+		result = append(result, v.String())
+	}
+	return
+}
+
+// AllParameterReferences returns json refs for all the discovered parameters
+func (s *Spec) AllParameterReferences() (result []string) {
+	for _, v := range s.references.parameters {
+		result = append(result, v.String())
+	}
+	return
+}
+
+// AllResponseReferences returns json refs for all the discovered responses
+func (s *Spec) AllResponseReferences() (result []string) {
+	for _, v := range s.references.responses {
+		result = append(result, v.String())
+	}
+	return
+}
+
+// AllPathItemReferences returns the references for all the items
+func (s *Spec) AllPathItemReferences() (result []string) {
+	for _, v := range s.references.pathItems {
+		result = append(result, v.String())
+	}
+	return
+}
+
+// AllItemsReferences returns the references for all the items in simple schemas (parameters or headers).
+//
+// NOTE: since Swagger 2.0 forbids $ref in simple params, this should always yield an empty slice for a valid
+// Swagger 2.0 spec.
+func (s *Spec) AllItemsReferences() (result []string) {
+	for _, v := range s.references.items {
+		result = append(result, v.String())
+	}
+	return
+}
+
+// AllReferences returns all the references found in the document, with possible duplicates
+func (s *Spec) AllReferences() (result []string) {
+	for _, v := range s.references.allRefs {
+		result = append(result, v.String())
+	}
+	return
+}
+
+// AllRefs returns all the unique references found in the document
+func (s *Spec) AllRefs() (result []spec.Ref) {
+	set := make(map[string]struct{})
+	for _, v := range s.references.allRefs {
+		a := v.String()
+		if a == "" {
+			continue
+		}
+		if _, ok := set[a]; !ok {
+			set[a] = struct{}{}
+			result = append(result, v)
+		}
+	}
+	return
+}
+
+func cloneStringMap(source map[string]string) map[string]string {
+	res := make(map[string]string, len(source))
+	for k, v := range source {
+		res[k] = v
+	}
+	return res
+}
+
+// ParameterPatterns returns all the patterns found in parameters
+// the map is cloned to avoid accidental changes
+func (s *Spec) ParameterPatterns() map[string]string {
+	return cloneStringMap(s.patterns.parameters)
+}
+
+// HeaderPatterns returns all the patterns found in response headers
+// the map is cloned to avoid accidental changes
+func (s *Spec) HeaderPatterns() map[string]string {
+	return cloneStringMap(s.patterns.headers)
+}
+
+// ItemsPatterns returns all the patterns found in simple array items
+// the map is cloned to avoid accidental changes
+func (s *Spec) ItemsPatterns() map[string]string {
+	return cloneStringMap(s.patterns.items)
+}
+
+// SchemaPatterns returns all the patterns found in schemas
+// the map is cloned to avoid accidental changes
+func (s *Spec) SchemaPatterns() map[string]string {
+	return cloneStringMap(s.patterns.schemas)
+}
+
+// AllPatterns returns all the patterns found in the spec
+// the map is cloned to avoid accidental changes
+func (s *Spec) AllPatterns() map[string]string {
+	return cloneStringMap(s.patterns.allPatterns)
+}
diff --git a/go/vendor/github.com/go-openapi/analysis/debug.go b/go/vendor/github.com/go-openapi/analysis/debug.go
new file mode 100644
index 0000000..84cc4e5
--- /dev/null
+++ b/go/vendor/github.com/go-openapi/analysis/debug.go
@@ -0,0 +1,47 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//    http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package analysis
+
+import (
+	"fmt"
+	"log"
+	"os"
+	"path/filepath"
+	"runtime"
+)
+
+var (
+	// Debug is true when the SWAGGER_DEBUG env var is not empty.
+	// It enables a more verbose logging of the spec analyzer.
+	Debug = os.Getenv("SWAGGER_DEBUG") != ""
+	// analysisLogger is a debug logger for this package
+	analysisLogger *log.Logger
+)
+
+func init() {
+	debugOptions()
+}
+
+func debugOptions() {
+	analysisLogger = log.New(os.Stdout, "analysis:", log.LstdFlags)
+}
+
+func debugLog(msg string, args ...interface{}) {
+	// A private, trivial trace logger, based on go-openapi/spec/expander.go:debugLog()
+	if Debug {
+		_, file1, pos1, _ := runtime.Caller(1)
+		analysisLogger.Printf("%s:%d: %s", filepath.Base(file1), pos1, fmt.Sprintf(msg, args...))
+	}
+}
diff --git a/go/vendor/github.com/go-openapi/analysis/doc.go b/go/vendor/github.com/go-openapi/analysis/doc.go
new file mode 100644
index 0000000..d5294c0
--- /dev/null
+++ b/go/vendor/github.com/go-openapi/analysis/doc.go
@@ -0,0 +1,43 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//    http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+/*
+Package analysis provides methods to work with a Swagger specification document from
+package go-openapi/spec.
+
+Analyzing a specification
+
+An analysed specification object (type Spec) provides methods to work with swagger definition.
+
+Flattening or expanding a specification
+
+Flattening a specification bundles all remote $ref in the main spec document.
+Depending on flattening options, additional preprocessing may take place:
+  - full flattening: replacing all inline complex constructs by a named entry in #/definitions
+  - expand: replace all $ref's in the document by their expanded content
+
+Merging several specifications
+
+Mixin several specifications merges all Swagger constructs, and warns about found conflicts.
+
+Fixing a specification
+
+Unmarshalling a specification with golang json unmarshalling may lead to
+some unwanted result on present but empty fields.
+
+Analyzing a Swagger schema
+
+Swagger schemas are analyzed to determine their complexity and qualify their content.
+*/
+package analysis
diff --git a/go/vendor/github.com/go-openapi/analysis/fixer.go b/go/vendor/github.com/go-openapi/analysis/fixer.go
new file mode 100644
index 0000000..bfe014c
--- /dev/null
+++ b/go/vendor/github.com/go-openapi/analysis/fixer.go
@@ -0,0 +1,76 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//    http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package analysis
+
+import "github.com/go-openapi/spec"
+
+// FixEmptyResponseDescriptions replaces empty ("") response
+// descriptions in the input with "(empty)" to ensure that the
+// resulting Swagger is stays valid.  The problem appears to arise
+// from reading in valid specs that have a explicit response
+// description of "" (valid, response.description is required), but
+// due to zero values being omitted upon re-serializing (omitempty) we
+// lose them unless we stick some chars in there.
+func FixEmptyResponseDescriptions(s *spec.Swagger) {
+	if s.Paths != nil {
+		for _, v := range s.Paths.Paths {
+			if v.Get != nil {
+				FixEmptyDescs(v.Get.Responses)
+			}
+			if v.Put != nil {
+				FixEmptyDescs(v.Put.Responses)
+			}
+			if v.Post != nil {
+				FixEmptyDescs(v.Post.Responses)
+			}
+			if v.Delete != nil {
+				FixEmptyDescs(v.Delete.Responses)
+			}
+			if v.Options != nil {
+				FixEmptyDescs(v.Options.Responses)
+			}
+			if v.Head != nil {
+				FixEmptyDescs(v.Head.Responses)
+			}
+			if v.Patch != nil {
+				FixEmptyDescs(v.Patch.Responses)
+			}
+		}
+	}
+	for k, v := range s.Responses {
+		FixEmptyDesc(&v)
+		s.Responses[k] = v
+	}
+}
+
+// FixEmptyDescs adds "(empty)" as the description for any Response in
+// the given Responses object that doesn't already have one.
+func FixEmptyDescs(rs *spec.Responses) {
+	FixEmptyDesc(rs.Default)
+	for k, v := range rs.StatusCodeResponses {
+		FixEmptyDesc(&v)
+		rs.StatusCodeResponses[k] = v
+	}
+}
+
+// FixEmptyDesc adds "(empty)" as the description to the given
+// Response object if it doesn't already have one and isn't a
+// ref. No-op on nil input.
+func FixEmptyDesc(rs *spec.Response) {
+	if rs == nil || rs.Description != "" || rs.Ref.Ref.GetURL() != nil {
+		return
+	}
+	rs.Description = "(empty)"
+}
diff --git a/go/vendor/github.com/go-openapi/analysis/flatten.go b/go/vendor/github.com/go-openapi/analysis/flatten.go
new file mode 100644
index 0000000..5d98f21
--- /dev/null
+++ b/go/vendor/github.com/go-openapi/analysis/flatten.go
@@ -0,0 +1,1500 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//    http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package analysis
+
+import (
+	"fmt"
+	"log"
+	"net/http"
+	"net/url"
+	"os"
+	slashpath "path"
+	"path/filepath"
+	"sort"
+	"strings"
+
+	"strconv"
+
+	"github.com/go-openapi/analysis/internal"
+	"github.com/go-openapi/jsonpointer"
+	swspec "github.com/go-openapi/spec"
+	"github.com/go-openapi/swag"
+)
+
+// FlattenOpts configuration for flattening a swagger specification.
+type FlattenOpts struct {
+	Spec           *Spec    // The analyzed spec to work with
+	flattenContext *context // Internal context to track flattening activity
+
+	BasePath string
+
+	// Flattening options
+	Expand       bool // If Expand is true, we skip flattening the spec and expand it instead
+	Minimal      bool
+	Verbose      bool
+	RemoveUnused bool
+
+	/* Extra keys */
+	_ struct{} // require keys
+}
+
+// ExpandOpts creates a spec.ExpandOptions to configure expanding a specification document.
+func (f *FlattenOpts) ExpandOpts(skipSchemas bool) *swspec.ExpandOptions {
+	return &swspec.ExpandOptions{RelativeBase: f.BasePath, SkipSchemas: skipSchemas}
+}
+
+// Swagger gets the swagger specification for this flatten operation
+func (f *FlattenOpts) Swagger() *swspec.Swagger {
+	return f.Spec.spec
+}
+
+// newRef stores information about refs created during the flattening process
+type newRef struct {
+	key      string
+	newName  string
+	path     string
+	isOAIGen bool
+	resolved bool
+	schema   *swspec.Schema
+	parents  []string
+}
+
+// context stores intermediary results from flatten
+type context struct {
+	newRefs  map[string]*newRef
+	warnings []string
+}
+
+func newContext() *context {
+	return &context{
+		newRefs:  make(map[string]*newRef, 150),
+		warnings: make([]string, 0),
+	}
+}
+
+// Flatten an analyzed spec and produce a self-contained spec bundle.
+//
+// There is a minimal and a full flattening mode.
+//
+// Minimally flattening a spec means:
+//  - Expanding parameters, responses, path items, parameter items and header items (references to schemas are left
+//    unscathed)
+//  - Importing external (http, file) references so they become internal to the document
+//  - Moving every JSON pointer to a $ref to a named definition (i.e. the reworked spec does not contain pointers
+//    like "$ref": "#/definitions/myObject/allOfs/1")
+//
+// A minimally flattened spec thus guarantees the following properties:
+//  - all $refs point to a local definition (i.e. '#/definitions/...')
+//  - definitions are unique
+//
+// NOTE: arbitrary JSON pointers (other than $refs to top level definitions) are rewritten as definitions if they
+// represent a complex schema or express commonality in the spec.
+// Otherwise, they are simply expanded.
+//
+// Minimal flattening is necessary and sufficient for codegen rendering using go-swagger.
+//
+// Fully flattening a spec means:
+//  - Moving every complex inline schema to be a definition with an auto-generated name in a depth-first fashion.
+//
+// By complex, we mean every JSON object with some properties.
+// Arrays, when they do not define a tuple,
+// or empty objects with or without additionalProperties, are not considered complex and remain inline.
+//
+// NOTE: rewritten schemas get a vendor extension x-go-gen-location so we know from which part of the spec definitions
+// have been created.
+//
+// Available flattening options:
+//  - Minimal: stops flattening after minimal $ref processing, leaving schema constructs untouched
+//  - Expand: expand all $ref's in the document (inoperant if Minimal set to true)
+//  - Verbose: croaks about name conflicts detected
+//  - RemoveUnused: removes unused parameters, responses and definitions after expansion/flattening
+//
+// NOTE: expansion removes all $ref save circular $ref, which remain in place
+//
+// TODO: additional options
+//  - ProgagateNameExtensions: ensure that created entries properly follow naming rules when their parent have set a
+//    x-go-name extension
+//  - LiftAllOfs:
+//     - limit the flattening of allOf members when simple objects
+//     - merge allOf with validation only
+//     - merge allOf with extensions only
+//     - ...
+//
+func Flatten(opts FlattenOpts) error {
+	// Make sure opts.BasePath is an absolute path
+	if !filepath.IsAbs(opts.BasePath) {
+		cwd, _ := os.Getwd()
+		opts.BasePath = filepath.Join(cwd, opts.BasePath)
+	}
+
+	opts.flattenContext = newContext()
+
+	// recursively expand responses, parameters, path items and items in simple schemas
+	// TODO: we should not expand discriminated types
+	if err := swspec.ExpandSpec(opts.Swagger(), opts.ExpandOpts(!opts.Expand)); err != nil {
+		return err
+	}
+
+	// strip current file from $ref's, so we can recognize them as proper definitions
+	// In particular, this works around for issue go-openapi/spec#76: leading absolute file in $ref is stripped
+	if err := normalizeRef(&opts); err != nil {
+		return err
+	}
+
+	if opts.RemoveUnused {
+		// optionally removes shared parameters and responses already expanded (now unused)
+		// default parameters (i.e. under paths) remain.
+		opts.Swagger().Parameters = nil
+		opts.Swagger().Responses = nil
+	}
+
+	opts.Spec.reload() // re-analyze
+
+	// at this point there are no other references left but schemas
+	if err := importExternalReferences(&opts); err != nil {
+		return err
+	}
+	opts.Spec.reload() // re-analyze
+
+	if !opts.Minimal && !opts.Expand {
+		// full flattening: rewrite inline schemas (schemas that aren't simple types or arrays or maps)
+		if err := nameInlinedSchemas(&opts); err != nil {
+			return err
+		}
+
+		opts.Spec.reload() // re-analyze
+	}
+
+	// rewrite JSON pointers other than $ref to named definitions
+	// and attempts to resolve conflicting names
+	if err := stripPointersAndOAIGen(&opts); err != nil {
+		return err
+	}
+
+	if opts.RemoveUnused {
+		// remove unused definitions
+		expected := make(map[string]struct{})
+		for k := range opts.Swagger().Definitions {
+			expected[slashpath.Join(definitionsPath, jsonpointer.Escape(k))] = struct{}{}
+		}
+		for _, k := range opts.Spec.AllDefinitionReferences() {
+			if _, ok := expected[k]; ok {
+				delete(expected, k)
+			}
+		}
+		for k := range expected {
+			debugLog("removing unused definition %s", slashpath.Base(k))
+			if opts.Verbose {
+				log.Printf("info: removing unused definition: %s", slashpath.Base(k))
+			}
+			delete(opts.Swagger().Definitions, slashpath.Base(k))
+		}
+		opts.Spec.reload() // re-analyze
+	}
+
+	// TODO: simplify known schema patterns to flat objects with properties
+	// examples:
+	//  - lift simple allOf object,
+	//  - empty allOf with validation only or extensions only
+	//  - rework allOf arrays
+	//  - rework allOf additionalProperties
+
+	if opts.Verbose {
+		// issue notifications
+		croak(&opts)
+	}
+	return nil
+}
+
+// isAnalyzedAsComplex determines if an analyzed schema is eligible to flattening (i.e. it is "complex").
+//
+// Complex means the schema is any of:
+//  - a simple type (primitive)
+//  - an array of something (items are possibly complex ; if this is the case, items will generate a definition)
+//  - a map of something (additionalProperties are possibly complex ; if this is the case, additionalProperties will
+//    generate a definition)
+func isAnalyzedAsComplex(asch *AnalyzedSchema) bool {
+	if !asch.IsSimpleSchema && !asch.IsArray && !asch.IsMap {
+		return true
+	}
+	return false
+}
+
+// nameInlinedSchemas replaces every complex inline construct by a named definition.
+func nameInlinedSchemas(opts *FlattenOpts) error {
+	debugLog("nameInlinedSchemas")
+	namer := &inlineSchemaNamer{
+		Spec:           opts.Swagger(),
+		Operations:     opRefsByRef(gatherOperations(opts.Spec, nil)),
+		flattenContext: opts.flattenContext,
+		opts:           opts,
+	}
+	depthFirst := sortDepthFirst(opts.Spec.allSchemas)
+	for _, key := range depthFirst {
+		sch := opts.Spec.allSchemas[key]
+		if sch.Schema != nil && sch.Schema.Ref.String() == "" && !sch.TopLevel { // inline schema
+			asch, err := Schema(SchemaOpts{Schema: sch.Schema, Root: opts.Swagger(), BasePath: opts.BasePath})
+			if err != nil {
+				return fmt.Errorf("schema analysis [%s]: %v", key, err)
+			}
+
+			if isAnalyzedAsComplex(asch) { // move complex schemas to definitions
+				if err := namer.Name(key, sch.Schema, asch); err != nil {
+					return err
+				}
+			}
+		}
+	}
+	return nil
+}
+
+var depthGroupOrder = []string{
+	"sharedParam", "sharedResponse", "sharedOpParam", "opParam", "codeResponse", "defaultResponse", "definition",
+}
+
+func sortDepthFirst(data map[string]SchemaRef) []string {
+	// group by category (shared params, op param, statuscode response, default response, definitions)
+	// sort groups internally by number of parts in the key and lexical names
+	// flatten groups into a single list of keys
+	sorted := make([]string, 0, len(data))
+	grouped := make(map[string]keys, len(data))
+	for k := range data {
+		split := keyParts(k)
+		var pk string
+		if split.IsSharedOperationParam() {
+			pk = "sharedOpParam"
+		}
+		if split.IsOperationParam() {
+			pk = "opParam"
+		}
+		if split.IsStatusCodeResponse() {
+			pk = "codeResponse"
+		}
+		if split.IsDefaultResponse() {
+			pk = "defaultResponse"
+		}
+		if split.IsDefinition() {
+			pk = "definition"
+		}
+		if split.IsSharedParam() {
+			pk = "sharedParam"
+		}
+		if split.IsSharedResponse() {
+			pk = "sharedResponse"
+		}
+		grouped[pk] = append(grouped[pk], key{Segments: len(split), Key: k})
+	}
+
+	for _, pk := range depthGroupOrder {
+		res := grouped[pk]
+		sort.Sort(res)
+		for _, v := range res {
+			sorted = append(sorted, v.Key)
+		}
+	}
+	return sorted
+}
+
+type key struct {
+	Segments int
+	Key      string
+}
+type keys []key
+
+func (k keys) Len() int      { return len(k) }
+func (k keys) Swap(i, j int) { k[i], k[j] = k[j], k[i] }
+func (k keys) Less(i, j int) bool {
+	return k[i].Segments > k[j].Segments || (k[i].Segments == k[j].Segments && k[i].Key < k[j].Key)
+}
+
+type inlineSchemaNamer struct {
+	Spec           *swspec.Swagger
+	Operations     map[string]opRef
+	flattenContext *context
+	opts           *FlattenOpts
+}
+
+func opRefsByRef(oprefs map[string]opRef) map[string]opRef {
+	result := make(map[string]opRef, len(oprefs))
+	for _, v := range oprefs {
+		result[v.Ref.String()] = v
+	}
+	return result
+}
+
+func (isn *inlineSchemaNamer) Name(key string, schema *swspec.Schema, aschema *AnalyzedSchema) error {
+	debugLog("naming inlined schema at %s", key)
+
+	parts := keyParts(key)
+	for _, name := range namesFromKey(parts, aschema, isn.Operations) {
+		if name != "" {
+			// create unique name
+			newName, isOAIGen := uniqifyName(isn.Spec.Definitions, swag.ToJSONName(name))
+
+			// clone schema
+			sch, err := cloneSchema(schema)
+			if err != nil {
+				return err
+			}
+
+			// replace values on schema
+			if err := rewriteSchemaToRef(isn.Spec, key,
+				swspec.MustCreateRef(slashpath.Join(definitionsPath, newName))); err != nil {
+				return fmt.Errorf("error while creating definition %q from inline schema: %v", newName, err)
+			}
+
+			// rewrite any dependent $ref pointing to this place,
+			// when not already pointing to a top-level definition.
+			// NOTE: this is important if such referers use arbitrary JSON pointers.
+			an := New(isn.Spec)
+			for k, v := range an.references.allRefs {
+				r, _, erd := deepestRef(isn.opts, v)
+				if erd != nil {
+					return fmt.Errorf("at %s, %v", k, erd)
+				}
+				if r.String() == key ||
+					r.String() == slashpath.Join(definitionsPath, newName) &&
+						slashpath.Dir(v.String()) != definitionsPath {
+					debugLog("found a $ref to a rewritten schema: %s points to %s", k, v.String())
+					// rewrite $ref to the new target
+					if err := updateRef(isn.Spec, k,
+						swspec.MustCreateRef(slashpath.Join(definitionsPath, newName))); err != nil {
+						return err
+					}
+				}
+			}
+
+			// NOTE: this extension is currently not used by go-swagger (provided for information only)
+			sch.AddExtension("x-go-gen-location", genLocation(parts))
+			// save cloned schema to definitions
+			saveSchema(isn.Spec, newName, sch)
+
+			// keep track of created refs
+			if isn.flattenContext != nil {
+				debugLog("track created ref: key=%s, newName=%s, isOAIGen=%t", key, newName, isOAIGen)
+				resolved := false
+				if _, ok := isn.flattenContext.newRefs[key]; ok {
+					resolved = isn.flattenContext.newRefs[key].resolved
+				}
+				isn.flattenContext.newRefs[key] = &newRef{
+					key:      key,
+					newName:  newName,
+					path:     slashpath.Join(definitionsPath, newName),
+					isOAIGen: isOAIGen,
+					resolved: resolved,
+					schema:   sch,
+				}
+			}
+		}
+	}
+	return nil
+}
+
+// genLocation indicates from which section of the specification (models or operations) a definition has been created.
+// This is reflected in the output spec with a "x-go-gen-location" extension. At the moment, this is is provided
+// for information only.
+func genLocation(parts splitKey) string {
+	if parts.IsOperation() {
+		return "operations"
+	}
+	if parts.IsDefinition() {
+		return "models"
+	}
+	return ""
+}
+
+func uniqifyName(definitions swspec.Definitions, name string) (string, bool) {
+	isOAIGen := false
+	if name == "" {
+		name = "oaiGen"
+		isOAIGen = true
+	}
+	if len(definitions) == 0 {
+		return name, isOAIGen
+	}
+
+	unq := true
+	for k := range definitions {
+		if strings.ToLower(k) == strings.ToLower(name) {
+			unq = false
+			break
+		}
+	}
+
+	if unq {
+		return name, isOAIGen
+	}
+
+	name += "OAIGen"
+	isOAIGen = true
+	var idx int
+	unique := name
+	_, known := definitions[unique]
+	for known {
+		idx++
+		unique = fmt.Sprintf("%s%d", name, idx)
+		_, known = definitions[unique]
+	}
+	return unique, isOAIGen
+}
+
+func namesFromKey(parts splitKey, aschema *AnalyzedSchema, operations map[string]opRef) []string {
+	var baseNames [][]string
+	var startIndex int
+	if parts.IsOperation() {
+		// params
+		if parts.IsOperationParam() || parts.IsSharedOperationParam() {
+			piref := parts.PathItemRef()
+			if piref.String() != "" && parts.IsOperationParam() {
+				if op, ok := operations[piref.String()]; ok {
+					startIndex = 5
+					baseNames = append(baseNames, []string{op.ID, "params", "body"})
+				}
+			} else if parts.IsSharedOperationParam() {
+				pref := parts.PathRef()
+				for k, v := range operations {
+					if strings.HasPrefix(k, pref.String()) {
+						startIndex = 4
+						baseNames = append(baseNames, []string{v.ID, "params", "body"})
+					}
+				}
+			}
+		}
+		// responses
+		if parts.IsOperationResponse() {
+			piref := parts.PathItemRef()
+			if piref.String() != "" {
+				if op, ok := operations[piref.String()]; ok {
+					startIndex = 6
+					baseNames = append(baseNames, []string{op.ID, parts.ResponseName(), "body"})
+				}
+			}
+		}
+	}
+
+	// definitions
+	if parts.IsDefinition() {
+		nm := parts.DefinitionName()
+		if nm != "" {
+			startIndex = 2
+			baseNames = append(baseNames, []string{parts.DefinitionName()})
+		}
+	}
+
+	var result []string
+	for _, segments := range baseNames {
+		nm := parts.BuildName(segments, startIndex, aschema)
+		if nm != "" {
+			result = append(result, nm)
+		}
+	}
+	sort.Strings(result)
+	return result
+}
+
+const (
+	paths           = "paths"
+	responses       = "responses"
+	parameters      = "parameters"
+	definitions     = "definitions"
+	definitionsPath = "#/definitions"
+)
+
+var ignoredKeys map[string]struct{}
+
+func init() {
+	ignoredKeys = map[string]struct{}{
+		"schema":     {},
+		"properties": {},
+		"not":        {},
+		"anyOf":      {},
+		"oneOf":      {},
+	}
+}
+
+type splitKey []string
+
+func (s splitKey) IsDefinition() bool {
+	return len(s) > 1 && s[0] == definitions
+}
+
+func (s splitKey) DefinitionName() string {
+	if !s.IsDefinition() {
+		return ""
+	}
+	return s[1]
+}
+
+func (s splitKey) isKeyName(i int) bool {
+	if i <= 0 {
+		return false
+	}
+	count := 0
+	for idx := i - 1; idx > 0; idx-- {
+		if s[idx] != "properties" {
+			break
+		}
+		count++
+	}
+
+	return count%2 != 0
+}
+
+func (s splitKey) BuildName(segments []string, startIndex int, aschema *AnalyzedSchema) string {
+	for i, part := range s[startIndex:] {
+		if _, ignored := ignoredKeys[part]; !ignored || s.isKeyName(startIndex+i) {
+			if part == "items" || part == "additionalItems" {
+				if aschema.IsTuple || aschema.IsTupleWithExtra {
+					segments = append(segments, "tuple")
+				} else {
+					segments = append(segments, "items")
+				}
+				if part == "additionalItems" {
+					segments = append(segments, part)
+				}
+				continue
+			}
+			segments = append(segments, part)
+		}
+	}
+	return strings.Join(segments, " ")
+}
+
+func (s splitKey) IsOperation() bool {
+	return len(s) > 1 && s[0] == paths
+}
+
+func (s splitKey) IsSharedOperationParam() bool {
+	return len(s) > 2 && s[0] == paths && s[2] == parameters
+}
+
+func (s splitKey) IsSharedParam() bool {
+	return len(s) > 1 && s[0] == parameters
+}
+
+func (s splitKey) IsOperationParam() bool {
+	return len(s) > 3 && s[0] == paths && s[3] == parameters
+}
+
+func (s splitKey) IsOperationResponse() bool {
+	return len(s) > 3 && s[0] == paths && s[3] == responses
+}
+
+func (s splitKey) IsSharedResponse() bool {
+	return len(s) > 1 && s[0] == responses
+}
+
+func (s splitKey) IsDefaultResponse() bool {
+	return len(s) > 4 && s[0] == paths && s[3] == responses && s[4] == "default"
+}
+
+func (s splitKey) IsStatusCodeResponse() bool {
+	isInt := func() bool {
+		_, err := strconv.Atoi(s[4])
+		return err == nil
+	}
+	return len(s) > 4 && s[0] == paths && s[3] == responses && isInt()
+}
+
+func (s splitKey) ResponseName() string {
+	if s.IsStatusCodeResponse() {
+		code, _ := strconv.Atoi(s[4])
+		return http.StatusText(code)
+	}
+	if s.IsDefaultResponse() {
+		return "Default"
+	}
+	return ""
+}
+
+var validMethods map[string]struct{}
+
+func init() {
+	validMethods = map[string]struct{}{
+		"GET":     {},
+		"HEAD":    {},
+		"OPTIONS": {},
+		"PATCH":   {},
+		"POST":    {},
+		"PUT":     {},
+		"DELETE":  {},
+	}
+}
+
+func (s splitKey) PathItemRef() swspec.Ref {
+	if len(s) < 3 {
+		return swspec.Ref{}
+	}
+	pth, method := s[1], s[2]
+	if _, validMethod := validMethods[strings.ToUpper(method)]; !validMethod && !strings.HasPrefix(method, "x-") {
+		return swspec.Ref{}
+	}
+	return swspec.MustCreateRef("#" + slashpath.Join("/", paths, jsonpointer.Escape(pth), strings.ToUpper(method)))
+}
+
+func (s splitKey) PathRef() swspec.Ref {
+	if !s.IsOperation() {
+		return swspec.Ref{}
+	}
+	return swspec.MustCreateRef("#" + slashpath.Join("/", paths, jsonpointer.Escape(s[1])))
+}
+
+func keyParts(key string) splitKey {
+	var res []string
+	for _, part := range strings.Split(key[1:], "/") {
+		if part != "" {
+			res = append(res, jsonpointer.Unescape(part))
+		}
+	}
+	return res
+}
+
+func rewriteSchemaToRef(spec *swspec.Swagger, key string, ref swspec.Ref) error {
+	debugLog("rewriting schema to ref for %s with %s", key, ref.String())
+	_, value, err := getPointerFromKey(spec, key)
+	if err != nil {
+		return err
+	}
+
+	switch refable := value.(type) {
+	case *swspec.Schema:
+		return rewriteParentRef(spec, key, ref)
+
+	case swspec.Schema:
+		return rewriteParentRef(spec, key, ref)
+
+	case *swspec.SchemaOrArray:
+		if refable.Schema != nil {
+			refable.Schema = &swspec.Schema{SchemaProps: swspec.SchemaProps{Ref: ref}}
+		}
+
+	case *swspec.SchemaOrBool:
+		if refable.Schema != nil {
+			refable.Schema = &swspec.Schema{SchemaProps: swspec.SchemaProps{Ref: ref}}
+		}
+	default:
+		return fmt.Errorf("no schema with ref found at %s for %T", key, value)
+	}
+
+	return nil
+}
+
+func rewriteParentRef(spec *swspec.Swagger, key string, ref swspec.Ref) error {
+	parent, entry, pvalue, err := getParentFromKey(spec, key)
+	if err != nil {
+		return err
+	}
+
+	debugLog("rewriting holder for %T", pvalue)
+	switch container := pvalue.(type) {
+	case swspec.Response:
+		if err := rewriteParentRef(spec, "#"+parent, ref); err != nil {
+			return err
+		}
+
+	case *swspec.Response:
+		container.Schema = &swspec.Schema{SchemaProps: swspec.SchemaProps{Ref: ref}}
+
+	case *swspec.Responses:
+		statusCode, err := strconv.Atoi(entry)
+		if err != nil {
+			return fmt.Errorf("%s not a number: %v", key[1:], err)
+		}
+		resp := container.StatusCodeResponses[statusCode]
+		resp.Schema = &swspec.Schema{SchemaProps: swspec.SchemaProps{Ref: ref}}
+		container.StatusCodeResponses[statusCode] = resp
+
+	case map[string]swspec.Response:
+		resp := container[entry]
+		resp.Schema = &swspec.Schema{SchemaProps: swspec.SchemaProps{Ref: ref}}
+		container[entry] = resp
+
+	case swspec.Parameter:
+		if err := rewriteParentRef(spec, "#"+parent, ref); err != nil {
+			return err
+		}
+
+	case map[string]swspec.Parameter:
+		param := container[entry]
+		param.Schema = &swspec.Schema{SchemaProps: swspec.SchemaProps{Ref: ref}}
+		container[entry] = param
+
+	case []swspec.Parameter:
+		idx, err := strconv.Atoi(entry)
+		if err != nil {
+			return fmt.Errorf("%s not a number: %v", key[1:], err)
+		}
+		param := container[idx]
+		param.Schema = &swspec.Schema{SchemaProps: swspec.SchemaProps{Ref: ref}}
+		container[idx] = param
+
+	case swspec.Definitions:
+		container[entry] = swspec.Schema{SchemaProps: swspec.SchemaProps{Ref: ref}}
+
+	case map[string]swspec.Schema:
+		container[entry] = swspec.Schema{SchemaProps: swspec.SchemaProps{Ref: ref}}
+
+	case []swspec.Schema:
+		idx, err := strconv.Atoi(entry)
+		if err != nil {
+			return fmt.Errorf("%s not a number: %v", key[1:], err)
+		}
+		container[idx] = swspec.Schema{SchemaProps: swspec.SchemaProps{Ref: ref}}
+
+	case *swspec.SchemaOrArray:
+		// NOTE: this is necessarily an array - otherwise, the parent would be *Schema
+		idx, err := strconv.Atoi(entry)
+		if err != nil {
+			return fmt.Errorf("%s not a number: %v", key[1:], err)
+		}
+		container.Schemas[idx] = swspec.Schema{SchemaProps: swspec.SchemaProps{Ref: ref}}
+
+	// NOTE: can't have case *swspec.SchemaOrBool = parent in this case is *Schema
+
+	default:
+		return fmt.Errorf("unhandled parent schema rewrite %s (%T)", key, pvalue)
+	}
+	return nil
+}
+
+func cloneSchema(schema *swspec.Schema) (*swspec.Schema, error) {
+	var sch swspec.Schema
+	if err := swag.FromDynamicJSON(schema, &sch); err != nil {
+		return nil, fmt.Errorf("cannot clone schema: %v", err)
+	}
+	return &sch, nil
+}
+
+func importExternalReferences(opts *FlattenOpts) error {
+	groupedRefs := reverseIndexForSchemaRefs(opts)
+	sortedRefStr := make([]string, 0, len(groupedRefs))
+
+	// sort $ref resolution to ensure deterministic name conflict resolution
+	for refStr := range groupedRefs {
+		sortedRefStr = append(sortedRefStr, refStr)
+	}
+	sort.Strings(sortedRefStr)
+
+	for _, refStr := range sortedRefStr {
+		entry := groupedRefs[refStr]
+		if !entry.Ref.HasFragmentOnly {
+			debugLog("importing external schema for [%s] from %s", strings.Join(entry.Keys, ", "), refStr)
+			// resolve to actual schema
+			sch := new(swspec.Schema)
+			sch.Ref = entry.Ref
+			if err := swspec.ExpandSchemaWithBasePath(sch, nil, opts.ExpandOpts(false)); err != nil {
+				return err
+			}
+			if sch == nil {
+				return fmt.Errorf("no schema found at %s for [%s]", refStr, strings.Join(entry.Keys, ", "))
+			}
+			debugLog("importing external schema for [%s] from %s", strings.Join(entry.Keys, ", "), refStr)
+
+			// generate a unique name - isOAIGen means that a naming conflict was resolved by changing the name
+			newName, isOAIGen := uniqifyName(opts.Swagger().Definitions, nameFromRef(entry.Ref))
+			debugLog("new name for [%s]: %s - with name conflict:%t",
+				strings.Join(entry.Keys, ", "), newName, isOAIGen)
+
+			// rewrite the external refs to local ones
+			for _, key := range entry.Keys {
+				if err := updateRef(opts.Swagger(), key,
+					swspec.MustCreateRef(slashpath.Join(definitionsPath, newName))); err != nil {
+					return err
+				}
+
+				// keep track of created refs
+				if opts.flattenContext != nil {
+					resolved := false
+					if _, ok := opts.flattenContext.newRefs[key]; ok {
+						resolved = opts.flattenContext.newRefs[key].resolved
+					}
+					opts.flattenContext.newRefs[key] = &newRef{
+						key:      key,
+						newName:  newName,
+						path:     slashpath.Join(definitionsPath, newName),
+						isOAIGen: isOAIGen,
+						resolved: resolved,
+						schema:   sch,
+					}
+				}
+			}
+
+			// add the resolved schema to the definitions
+			saveSchema(opts.Swagger(), newName, sch)
+		}
+	}
+	return nil
+}
+
+type refRevIdx struct {
+	Ref  swspec.Ref
+	Keys []string
+}
+
+// normalizePath renders absolute path on remote file refs
+func normalizePath(ref swspec.Ref, opts *FlattenOpts) (normalizedPath string) {
+	if ref.HasFragmentOnly || filepath.IsAbs(ref.String()) {
+		normalizedPath = ref.String()
+		return
+	}
+
+	refURL, _ := url.Parse(ref.String())
+	if refURL.Host != "" {
+		normalizedPath = ref.String()
+		return
+	}
+
+	parts := strings.Split(ref.String(), "#")
+	parts[0] = filepath.Join(filepath.Dir(opts.BasePath), parts[0])
+	normalizedPath = strings.Join(parts, "#")
+	return
+}
+
+func reverseIndexForSchemaRefs(opts *FlattenOpts) map[string]refRevIdx {
+	collected := make(map[string]refRevIdx)
+	for key, schRef := range opts.Spec.references.schemas {
+		// normalize paths before sorting,
+		// so we get together keys in same external file
+		normalizedPath := normalizePath(schRef, opts)
+		if entry, ok := collected[normalizedPath]; ok {
+			entry.Keys = append(entry.Keys, key)
+			collected[normalizedPath] = entry
+		} else {
+			collected[normalizedPath] = refRevIdx{
+				Ref:  schRef,
+				Keys: []string{key},
+			}
+		}
+	}
+	return collected
+}
+
+func nameFromRef(ref swspec.Ref) string {
+	u := ref.GetURL()
+	if u.Fragment != "" {
+		return swag.ToJSONName(slashpath.Base(u.Fragment))
+	}
+	if u.Path != "" {
+		bn := slashpath.Base(u.Path)
+		if bn != "" && bn != "/" {
+			ext := slashpath.Ext(bn)
+			if ext != "" {
+				return swag.ToJSONName(bn[:len(bn)-len(ext)])
+			}
+			return swag.ToJSONName(bn)
+		}
+	}
+	return swag.ToJSONName(strings.Replace(u.Host, ".", " ", -1))
+}
+
+func saveSchema(spec *swspec.Swagger, name string, schema *swspec.Schema) {
+	if schema == nil {
+		return
+	}
+	if spec.Definitions == nil {
+		spec.Definitions = make(map[string]swspec.Schema, 150)
+	}
+	spec.Definitions[name] = *schema
+}
+
+// getPointerFromKey retrieves the content of the JSON pointer "key"
+func getPointerFromKey(spec *swspec.Swagger, key string) (string, interface{}, error) {
+	// unescape chars in key, e.g. "{}" from path params
+	pth, _ := internal.PathUnescape(key[1:])
+	ptr, err := jsonpointer.New(pth)
+	if err != nil {
+		return "", nil, err
+	}
+
+	value, _, err := ptr.Get(spec)
+	if err != nil {
+		debugLog("error when getting key: %s with path: %s", key, pth)
+		return "", nil, err
+	}
+	return pth, value, nil
+}
+
+// getParentFromKey retrieves the container of the JSON pointer "key"
+func getParentFromKey(spec *swspec.Swagger, key string) (string, string, interface{}, error) {
+	// unescape chars in key, e.g. "{}" from path params
+	pth, _ := internal.PathUnescape(key[1:])
+
+	parent, entry := slashpath.Dir(pth), slashpath.Base(pth)
+	debugLog("getting schema holder at: %s, with entry: %s", parent, entry)
+
+	pptr, err := jsonpointer.New(parent)
+	if err != nil {
+		return "", "", nil, err
+	}
+	pvalue, _, err := pptr.Get(spec)
+	if err != nil {
+		return "", "", nil, fmt.Errorf("can't get parent for %s: %v", parent, err)
+	}
+	return parent, entry, pvalue, nil
+}
+
+// updateRef replaces a ref by another one
+func updateRef(spec *swspec.Swagger, key string, ref swspec.Ref) error {
+	debugLog("updating ref for %s with %s", key, ref.String())
+	pth, value, err := getPointerFromKey(spec, key)
+	if err != nil {
+		return err
+	}
+
+	switch refable := value.(type) {
+	case *swspec.Schema:
+		refable.Ref = ref
+	case *swspec.SchemaOrArray:
+		if refable.Schema != nil {
+			refable.Schema.Ref = ref
+		}
+	case *swspec.SchemaOrBool:
+		if refable.Schema != nil {
+			refable.Schema.Ref = ref
+		}
+	case swspec.Schema:
+		debugLog("rewriting holder for %T", refable)
+		_, entry, pvalue, erp := getParentFromKey(spec, key)
+		if erp != nil {
+			return err
+		}
+		switch container := pvalue.(type) {
+		case swspec.Definitions:
+			container[entry] = swspec.Schema{SchemaProps: swspec.SchemaProps{Ref: ref}}
+
+		case map[string]swspec.Schema:
+			container[entry] = swspec.Schema{SchemaProps: swspec.SchemaProps{Ref: ref}}
+
+		case []swspec.Schema:
+			idx, err := strconv.Atoi(entry)
+			if err != nil {
+				return fmt.Errorf("%s not a number: %v", pth, err)
+			}
+			container[idx] = swspec.Schema{SchemaProps: swspec.SchemaProps{Ref: ref}}
+
+		case *swspec.SchemaOrArray:
+			// NOTE: this is necessarily an array - otherwise, the parent would be *Schema
+			idx, err := strconv.Atoi(entry)
+			if err != nil {
+				return fmt.Errorf("%s not a number: %v", pth, err)
+			}
+			container.Schemas[idx] = swspec.Schema{SchemaProps: swspec.SchemaProps{Ref: ref}}
+
+		// NOTE: can't have case *swspec.SchemaOrBool = parent in this case is *Schema
+
+		default:
+			return fmt.Errorf("unhandled container type at %s: %T", key, value)
+		}
+
+	default:
+		return fmt.Errorf("no schema with ref found at %s for %T", key, value)
+	}
+
+	return nil
+}
+
+// updateRefWithSchema replaces a ref with a schema (i.e. re-inline schema)
+func updateRefWithSchema(spec *swspec.Swagger, key string, sch *swspec.Schema) error {
+	debugLog("updating ref for %s with schema", key)
+	pth, value, err := getPointerFromKey(spec, key)
+	if err != nil {
+		return err
+	}
+
+	switch refable := value.(type) {
+	case *swspec.Schema:
+		*refable = *sch
+	case swspec.Schema:
+		_, entry, pvalue, erp := getParentFromKey(spec, key)
+		if erp != nil {
+			return err
+		}
+		switch container := pvalue.(type) {
+		case swspec.Definitions:
+			container[entry] = *sch
+
+		case map[string]swspec.Schema:
+			container[entry] = *sch
+
+		case []swspec.Schema:
+			idx, err := strconv.Atoi(entry)
+			if err != nil {
+				return fmt.Errorf("%s not a number: %v", pth, err)
+			}
+			container[idx] = *sch
+
+		case *swspec.SchemaOrArray:
+			// NOTE: this is necessarily an array - otherwise, the parent would be *Schema
+			idx, err := strconv.Atoi(entry)
+			if err != nil {
+				return fmt.Errorf("%s not a number: %v", pth, err)
+			}
+			container.Schemas[idx] = *sch
+
+		// NOTE: can't have case *swspec.SchemaOrBool = parent in this case is *Schema
+
+		default:
+			return fmt.Errorf("unhandled type for parent of [%s]: %T", key, value)
+		}
+	case *swspec.SchemaOrArray:
+		*refable.Schema = *sch
+	// NOTE: can't have case *swspec.SchemaOrBool = parent in this case is *Schema
+	case *swspec.SchemaOrBool:
+		*refable.Schema = *sch
+	default:
+		return fmt.Errorf("no schema with ref found at %s for %T", key, value)
+	}
+
+	return nil
+}
+
+func containsString(names []string, name string) bool {
+	for _, nm := range names {
+		if nm == name {
+			return true
+		}
+	}
+	return false
+}
+
+type opRef struct {
+	Method string
+	Path   string
+	Key    string
+	ID     string
+	Op     *swspec.Operation
+	Ref    swspec.Ref
+}
+
+type opRefs []opRef
+
+func (o opRefs) Len() int           { return len(o) }
+func (o opRefs) Swap(i, j int)      { o[i], o[j] = o[j], o[i] }
+func (o opRefs) Less(i, j int) bool { return o[i].Key < o[j].Key }
+
+func gatherOperations(specDoc *Spec, operationIDs []string) map[string]opRef {
+	var oprefs opRefs
+
+	for method, pathItem := range specDoc.Operations() {
+		for pth, operation := range pathItem {
+			vv := *operation
+			oprefs = append(oprefs, opRef{
+				Key:    swag.ToGoName(strings.ToLower(method) + " " + pth),
+				Method: method,
+				Path:   pth,
+				ID:     vv.ID,
+				Op:     &vv,
+				Ref:    swspec.MustCreateRef("#" + slashpath.Join("/paths", jsonpointer.Escape(pth), method)),
+			})
+		}
+	}
+
+	sort.Sort(oprefs)
+
+	operations := make(map[string]opRef)
+	for _, opr := range oprefs {
+		nm := opr.ID
+		if nm == "" {
+			nm = opr.Key
+		}
+
+		oo, found := operations[nm]
+		if found && oo.Method != opr.Method && oo.Path != opr.Path {
+			nm = opr.Key
+		}
+		if len(operationIDs) == 0 || containsString(operationIDs, opr.ID) || containsString(operationIDs, nm) {
+			opr.ID = nm
+			opr.Op.ID = nm
+			operations[nm] = opr
+		}
+	}
+	return operations
+}
+
+// stripPointersAndOAIGen removes anonymous JSON pointers from spec and chain with name conflicts handler.
+// This loops until the spec has no such pointer and all name conflicts have been reduced as much as possible.
+func stripPointersAndOAIGen(opts *FlattenOpts) error {
+	// name all JSON pointers to anonymous documents
+	if err := namePointers(opts); err != nil {
+		return err
+	}
+
+	// remove unnecessary OAIGen ref (created when flattening external refs creates name conflicts)
+	hasIntroducedPointerOrInline, ers := stripOAIGen(opts)
+	if ers != nil {
+		return ers
+	}
+
+	// iterate as pointer or OAIGen resolution may introduce inline schemas or pointers
+	for hasIntroducedPointerOrInline {
+		if !opts.Minimal {
+			if err := nameInlinedSchemas(opts); err != nil {
+				return err
+			}
+		}
+
+		if err := namePointers(opts); err != nil {
+			return err
+		}
+
+		// restrip
+		if hasIntroducedPointerOrInline, ers = stripOAIGen(opts); ers != nil {
+			return ers
+		}
+	}
+	return nil
+}
+
+// stripOAIGen strips the spec from unnecessary OAIGen constructs, initially created to dedupe flattened definitions.
+// A dedupe is deemed unnecessary whenever:
+//  - the only conflict is with its (single) parent: OAIGen is merged into its parent
+//  - there is a conflict with multiple parents: merge OAIGen in first parent, the rewrite other parents to point to
+//    the first parent.
+//
+// This function returns a true bool whenever it re-inlined a complex schema, so the caller may chose to iterate
+// flattening again.
+//
+// NOTE: the OAIGen definition cannot be itself a $ref.
+func stripOAIGen(opts *FlattenOpts) (bool, error) {
+	debugLog("stripOAIGen")
+	replacedWithComplex := false
+	for k, v := range opts.Spec.references.allRefs {
+		// figure out referers of OAIGen definitions
+		for _, r := range opts.flattenContext.newRefs {
+			if r.isOAIGen && !r.resolved && r.path == v.String() { // bail on already resolved entries (avoid looping)
+				r.parents = append(r.parents, k)
+			}
+		}
+	}
+
+	for _, r := range opts.flattenContext.newRefs {
+		if r.isOAIGen && len(r.parents) >= 1 && r.schema.Ref.String() == "" {
+			pr := r.parents
+			sort.Strings(pr)
+			// rewrite first parent schema in lexicographical order
+			debugLog("rewrite first parent %s with schema", pr[0])
+			if err := updateRefWithSchema(opts.Swagger(), pr[0], r.schema); err != nil {
+				return false, err
+			}
+			// rewrite other parents to point to first parent
+			if len(pr) > 1 {
+				for _, p := range pr[1:] {
+					replacingRef := swspec.MustCreateRef(pr[0])
+					// Set complex when replacing ref is an anonymous jsonpointer: further processing may be required
+					replacedWithComplex = replacedWithComplex ||
+						slashpath.Dir(replacingRef.String()) != definitionsPath
+					debugLog("rewrite parent with ref: %s", replacingRef.String())
+					// NOTE: it is possible at this stage to introduce json pointers (to non-definitions places).
+					// Those are stripped later on.
+					if err := updateRef(opts.Swagger(), p, replacingRef); err != nil {
+						return false, err
+					}
+				}
+			}
+			// remove OAIGen definition
+			debugLog("removing definition %s", slashpath.Base(r.path))
+			delete(opts.Swagger().Definitions, slashpath.Base(r.path))
+			// mark naming conflict as resolved
+			opts.flattenContext.newRefs[r.key].isOAIGen = false
+			opts.flattenContext.newRefs[r.key].resolved = true
+
+			// determine if the previous substitution did inline a complex schema
+			if r.schema != nil && r.schema.Ref.String() == "" { // inline schema
+				asch, err := Schema(SchemaOpts{Schema: r.schema, Root: opts.Swagger(), BasePath: opts.BasePath})
+				if err != nil {
+					return false, err
+				}
+				debugLog("re-inline schema: parent: %s, %t", pr[0], isAnalyzedAsComplex(asch))
+				replacedWithComplex = replacedWithComplex ||
+					!(slashpath.Dir(pr[0]) == definitionsPath) && isAnalyzedAsComplex(asch)
+			}
+		}
+	}
+	opts.Spec.reload() // re-analyze
+	return replacedWithComplex, nil
+}
+
+// croak logs notifications and warnings about valid, but possibly unwanted constructs resulting
+// from flattening a spec
+func croak(opts *FlattenOpts) {
+	reported := make(map[string]bool, len(opts.flattenContext.newRefs))
+	for _, v := range opts.Spec.references.allRefs {
+		// warns about duplicate handling
+		for _, r := range opts.flattenContext.newRefs {
+			if r.isOAIGen && r.path == v.String() {
+				reported[r.newName] = true
+			}
+		}
+	}
+	for k := range reported {
+		log.Printf("warning: duplicate flattened definition name resolved as %s", k)
+	}
+	// warns about possible type mismatches
+	uniqueMsg := make(map[string]bool)
+	for _, msg := range opts.flattenContext.warnings {
+		if _, ok := uniqueMsg[msg]; ok {
+			continue
+		}
+		log.Printf("warning: %s", msg)
+		uniqueMsg[msg] = true
+	}
+}
+
+// namePointers replaces all JSON pointers to anonymous documents by a $ref to a new named definitions.
+//
+// This is carried on depth-first. Pointers to $refs which are top level definitions are replaced by the $ref itself.
+// Pointers to simple types are expanded, unless they express commonality (i.e. several such $ref are used).
+func namePointers(opts *FlattenOpts) error {
+	debugLog("name pointers")
+	refsToReplace := make(map[string]SchemaRef, len(opts.Spec.references.schemas))
+	//for k, ref := range opts.Spec.references.schemas {
+	for k, ref := range opts.Spec.references.allRefs {
+		if slashpath.Dir(ref.String()) == definitionsPath {
+			// this a ref to a top-level definition: ok
+			continue
+		}
+		replacingRef, sch, erd := deepestRef(opts, ref)
+		if erd != nil {
+			return fmt.Errorf("at %s, %v", k, erd)
+		}
+		debugLog("planning pointer to replace at %s: %s, resolved to: %s", k, ref.String(), replacingRef.String())
+		refsToReplace[k] = SchemaRef{
+			Name:     k,            // caller
+			Ref:      replacingRef, // callee
+			Schema:   sch,
+			TopLevel: slashpath.Dir(replacingRef.String()) == definitionsPath,
+		}
+	}
+	depthFirst := sortDepthFirst(refsToReplace)
+	namer := &inlineSchemaNamer{
+		Spec:           opts.Swagger(),
+		Operations:     opRefsByRef(gatherOperations(opts.Spec, nil)),
+		flattenContext: opts.flattenContext,
+		opts:           opts,
+	}
+
+	for _, key := range depthFirst {
+		v := refsToReplace[key]
+		// update current replacement, which may have been updated by previous changes of deeper elements
+		replacingRef, sch, erd := deepestRef(opts, v.Ref)
+		if erd != nil {
+			return fmt.Errorf("at %s, %v", key, erd)
+		}
+		v.Ref = replacingRef
+		v.Schema = sch
+		v.TopLevel = slashpath.Dir(replacingRef.String()) == definitionsPath
+		debugLog("replacing pointer at %s: resolved to: %s", key, v.Ref.String())
+
+		if v.TopLevel {
+			debugLog("replace pointer %s by canonical definition: %s", key, v.Ref.String())
+			// if the schema is a $ref to a top level definition, just rewrite the pointer to this $ref
+			if err := updateRef(opts.Swagger(), key, v.Ref); err != nil {
+				return err
+			}
+		} else {
+			// this is a JSON pointer to an anonymous document (internal or external):
+			// create a definition for this schema when:
+			// - it is a complex schema
+			// - or it is pointed by more than one $ref (i.e. expresses commonality)
+			// otherwise, expand the pointer (single reference to a simple type)
+			//
+			// The named definition for this follows the target's key, not the caller's
+			debugLog("namePointers at %s for %s", key, v.Ref.String())
+
+			// qualify the expanded schema
+			asch, ers := Schema(SchemaOpts{Schema: v.Schema, Root: opts.Swagger(), BasePath: opts.BasePath})
+			if ers != nil {
+				return fmt.Errorf("schema analysis [%s]: %v", key, ers)
+			}
+			callers := make([]string, 0, 64)
+
+			debugLog("looking for callers")
+			an := New(opts.Swagger())
+			for k, w := range an.references.allRefs {
+				r, _, erd := deepestRef(opts, w)
+				if erd != nil {
+					return fmt.Errorf("at %s, %v", key, erd)
+				}
+				if r.String() == v.Ref.String() {
+					callers = append(callers, k)
+				}
+			}
+			debugLog("callers for %s: %d", v.Ref.String(), len(callers))
+			if len(callers) == 0 {
+				// has already been updated and resolved
+				continue
+			}
+
+			parts := keyParts(v.Ref.String())
+			debugLog("number of callers for %s: %d", v.Ref.String(), len(callers))
+			// identifying edge case when the namer did nothing because we point to a non-schema object
+			// no definition is created and we expand the $ref for all callers
+			if (!asch.IsSimpleSchema || len(callers) > 1) && !parts.IsSharedParam() && !parts.IsSharedResponse() {
+				debugLog("replace JSON pointer at [%s] by definition: %s", key, v.Ref.String())
+				if err := namer.Name(v.Ref.String(), v.Schema, asch); err != nil {
+					return err
+				}
+
+				// regular case: we named the $ref as a definition, and we move all callers to this new $ref
+				for _, caller := range callers {
+					if caller != key {
+						// move $ref for next to resolve
+						debugLog("identified caller of %s at [%s]", v.Ref.String(), caller)
+						c := refsToReplace[caller]
+						c.Ref = v.Ref
+						refsToReplace[caller] = c
+					}
+				}
+			} else {
+				debugLog("expand JSON pointer for key=%s", key)
+				if err := updateRefWithSchema(opts.Swagger(), key, v.Schema); err != nil {
+					return err
+				}
+				// NOTE: there is no other caller to update
+			}
+		}
+	}
+	opts.Spec.reload() // re-analyze
+	return nil
+}
+
+// deepestRef finds the first definition ref, from a cascade of nested refs which are not definitions.
+//  - if no definition is found, returns the deepest ref.
+//  - pointers to external files are expanded
+//
+// NOTE: all external $ref's are assumed to be already expanded at this stage.
+func deepestRef(opts *FlattenOpts, ref swspec.Ref) (swspec.Ref, *swspec.Schema, error) {
+	if !ref.HasFragmentOnly {
+		// does nothing on external $refs
+		return ref, nil, nil
+	}
+	currentRef := ref
+	visited := make(map[string]bool, 64)
+DOWNREF:
+	for currentRef.String() != "" {
+		if slashpath.Dir(currentRef.String()) == definitionsPath {
+			// this is a top-level definition: stop here and return this ref
+			return currentRef, nil, nil
+		}
+		if _, beenThere := visited[currentRef.String()]; beenThere {
+			return swspec.Ref{}, nil,
+				fmt.Errorf("cannot resolve cyclic chain of pointers under %s", currentRef.String())
+		}
+		visited[currentRef.String()] = true
+		value, _, err := currentRef.GetPointer().Get(opts.Swagger())
+		if err != nil {
+			return swspec.Ref{}, nil, err
+		}
+		switch refable := value.(type) {
+		case *swspec.Schema:
+			if refable.Ref.String() == "" {
+				break DOWNREF
+			}
+			currentRef = refable.Ref
+
+		case swspec.Schema:
+			if refable.Ref.String() == "" {
+				break DOWNREF
+			}
+			currentRef = refable.Ref
+
+		case *swspec.SchemaOrArray:
+			if refable.Schema == nil || refable.Schema != nil && refable.Schema.Ref.String() == "" {
+				break DOWNREF
+			}
+			currentRef = refable.Schema.Ref
+
+		case *swspec.SchemaOrBool:
+			if refable.Schema == nil || refable.Schema != nil && refable.Schema.Ref.String() == "" {
+				break DOWNREF
+			}
+			currentRef = refable.Schema.Ref
+
+		case swspec.Response:
+			// a pointer points to a schema initially marshalled in responses section...
+			// Attempt to convert this to a schema. If this fails, the spec is invalid
+			asJSON, _ := refable.MarshalJSON()
+			var asSchema swspec.Schema
+			err := asSchema.UnmarshalJSON(asJSON)
+			if err != nil {
+				return swspec.Ref{}, nil,
+					fmt.Errorf("invalid type for resolved JSON pointer %s. Expected a schema a, got: %T",
+						currentRef.String(), value)
+
+			}
+			opts.flattenContext.warnings = append(opts.flattenContext.warnings,
+				fmt.Sprintf("found $ref %q (response) interpreted as schema", currentRef.String()))
+
+			if asSchema.Ref.String() == "" {
+				break DOWNREF
+			}
+			currentRef = asSchema.Ref
+
+		case swspec.Parameter:
+			// a pointer points to a schema initially marshalled in parameters section...
+			// Attempt to convert this to a schema. If this fails, the spec is invalid
+			asJSON, _ := refable.MarshalJSON()
+			var asSchema swspec.Schema
+			err := asSchema.UnmarshalJSON(asJSON)
+			if err != nil {
+				return swspec.Ref{}, nil,
+					fmt.Errorf("invalid type for resolved JSON pointer %s. Expected a schema a, got: %T",
+						currentRef.String(), value)
+
+			}
+			opts.flattenContext.warnings = append(opts.flattenContext.warnings,
+				fmt.Sprintf("found $ref %q (parameter) interpreted as schema", currentRef.String()))
+
+			if asSchema.Ref.String() == "" {
+				break DOWNREF
+			}
+			currentRef = asSchema.Ref
+
+		default:
+			return swspec.Ref{}, nil,
+				fmt.Errorf("unhandled type to resolve JSON pointer %s. Expected a Schema, got: %T",
+					currentRef.String(), value)
+
+		}
+	}
+	// assess what schema we're ending with
+	sch, erv := swspec.ResolveRefWithBase(opts.Swagger(), &currentRef, opts.ExpandOpts(false))
+	if erv != nil {
+		return swspec.Ref{}, nil, erv
+	}
+	if sch == nil {
+		return swspec.Ref{}, nil, fmt.Errorf("no schema found at %s", currentRef.String())
+	}
+	return currentRef, sch, nil
+}
+
+// normalizeRef strips the current file from any $ref. This works around issue go-openapi/spec#76:
+// leading absolute file in $ref is stripped
+func normalizeRef(opts *FlattenOpts) error {
+	debugLog("normalizeRef")
+	opts.Spec.reload() // re-analyze
+	for k, w := range opts.Spec.references.allRefs {
+		if strings.HasPrefix(w.String(), opts.BasePath+definitionsPath) { // may be a mix of / and \, depending on OS
+			// strip base path from definition
+			debugLog("stripping absolute path for: %s", w.String())
+			if err := updateRef(opts.Swagger(), k,
+				swspec.MustCreateRef(slashpath.Join(definitionsPath, slashpath.Base(w.String())))); err != nil {
+				return err
+			}
+		}
+	}
+	opts.Spec.reload() // re-analyze
+	return nil
+}
diff --git a/go/vendor/github.com/go-openapi/analysis/go.mod b/go/vendor/github.com/go-openapi/analysis/go.mod
new file mode 100644
index 0000000..1b03929
--- /dev/null
+++ b/go/vendor/github.com/go-openapi/analysis/go.mod
@@ -0,0 +1,10 @@
+module github.com/go-openapi/analysis
+
+require (
+	github.com/go-openapi/jsonpointer v0.17.0
+	github.com/go-openapi/loads v0.17.0
+	github.com/go-openapi/spec v0.17.0
+	github.com/go-openapi/strfmt v0.17.0
+	github.com/go-openapi/swag v0.17.0
+	github.com/stretchr/testify v1.2.2
+)
diff --git a/go/vendor/github.com/go-openapi/analysis/go.sum b/go/vendor/github.com/go-openapi/analysis/go.sum
new file mode 100644
index 0000000..5302ec9
--- /dev/null
+++ b/go/vendor/github.com/go-openapi/analysis/go.sum
@@ -0,0 +1,37 @@
+github.com/PuerkitoBio/purell v1.1.0 h1:rmGxhojJlM0tuKtfdvliR84CFHljx9ag64t2xmVkjK4=
+github.com/PuerkitoBio/purell v1.1.0/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0=
+github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578 h1:d+Bc7a5rLufV/sSk/8dngufqelfh6jnri85riMAaF/M=
+github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE=
+github.com/asaskevich/govalidator v0.0.0-20180720115003-f9ffefc3facf h1:eg0MeVzsP1G42dRafH3vf+al2vQIJU0YHX+1Tw87oco=
+github.com/asaskevich/govalidator v0.0.0-20180720115003-f9ffefc3facf/go.mod h1:lB+ZfQJz7igIIfQNfa7Ml4HSf2uFQQRzpGGRXenZAgY=
+github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
+github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/globalsign/mgo v0.0.0-20180905125535-1ca0a4f7cbcb h1:D4uzjWwKYQ5XnAvUbuvHW93esHg7F8N/OYeBBcJoTr0=
+github.com/globalsign/mgo v0.0.0-20180905125535-1ca0a4f7cbcb/go.mod h1:xkRDCp4j0OGD1HRkm4kmhM+pmpv3AKq5SU7GMg4oO/Q=
+github.com/go-openapi/analysis v0.0.0-20180825180245-b006789cd277/go.mod h1:k70tL6pCuVxPJOHXQ+wIac1FUrvNkHolPie/cLEU6hI=
+github.com/go-openapi/errors v0.17.0 h1:47T+LqPrQUxFXQnB22aLBfsTRFSqWp5y4OiFgQm+/Lw=
+github.com/go-openapi/errors v0.17.0/go.mod h1:La0D2x9HoXenv7MDEiAv6vWoe84CXFo0PQRk/jdQlww=
+github.com/go-openapi/jsonpointer v0.17.0 h1:Bpl2DtZ6k7wKqfFs7e+4P08+M9I3FQgn09a1UsRUQbk=
+github.com/go-openapi/jsonpointer v0.17.0/go.mod h1:+35s3my2LFTysnkMfxsJBAMHj/DoqoB9knIWoYG/Vk0=
+github.com/go-openapi/jsonreference v0.17.0 h1:d/o7/fsLWWQZACbihvZxcyLQ59jfUVs7WOJv/ak7T7A=
+github.com/go-openapi/jsonreference v0.17.0/go.mod h1:W3Z9FmVs9qj+KR4zFKmDPGiLdk1D9Rlm7cyMvf57TTg=
+github.com/go-openapi/loads v0.17.0 h1:H22nMs3GDQk4SwAaFQ+jLNw+0xoFeCueawhZlv8MBYs=
+github.com/go-openapi/loads v0.17.0/go.mod h1:72tmFy5wsWx89uEVddd0RjRWPZm92WRLhf7AC+0+OOU=
+github.com/go-openapi/spec v0.17.0 h1:MM5YaXBdBOEcjGHW5WayrAY5Ze2ydNyy71JHeTi7xUc=
+github.com/go-openapi/spec v0.17.0/go.mod h1:J8+jY1nAiCcj+friV/PDoE1/3eeccG9LYBs0tYvLOWc=
+github.com/go-openapi/strfmt v0.17.0 h1:79+bCyGHowS3rkr6z8RcG5jVzdKpeKXlDuW6yqE50TM=
+github.com/go-openapi/strfmt v0.17.0/go.mod h1:/bCWipNKhC9QMhD8HRe2EGbU8G0D4Yvh0G6X4k1Xwvg=
+github.com/go-openapi/swag v0.17.0 h1:7wu+dZ5k83kvUWeAb+WUkFiUhDzwGqzTR/NhWzeo1JU=
+github.com/go-openapi/swag v0.17.0/go.mod h1:DXUve3Dpr1UfpPtxFw+EFuQ41HhCWZfha5jSVRG7C7I=
+github.com/mailru/easyjson v0.0.0-20180823135443-60711f1a8329 h1:2gxZ0XQIU/5z3Z3bUBu+FXuk2pFbkN6tcwi/pjyaDic=
+github.com/mailru/easyjson v0.0.0-20180823135443-60711f1a8329/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc=
+github.com/mitchellh/mapstructure v1.1.2 h1:fmNYVwqnSfB9mZU6OS2O6GsXM+wcskZDuKQzvN1EDeE=
+github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
+github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
+github.com/stretchr/testify v1.2.2 h1:bSDNvY7ZPG5RlJ8otE/7V6gMiyenm9RtJ7IUVIAoJ1w=
+github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
+golang.org/x/net v0.0.0-20181005035420-146acd28ed58 h1:otZG8yDCO4LVps5+9bxOeNiCvgmOyt96J3roHTYs7oE=
+golang.org/x/net v0.0.0-20181005035420-146acd28ed58/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
+golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
+gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
+gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
diff --git a/go/vendor/github.com/go-openapi/analysis/internal/post_go18.go b/go/vendor/github.com/go-openapi/analysis/internal/post_go18.go
new file mode 100644
index 0000000..f96f55c
--- /dev/null
+++ b/go/vendor/github.com/go-openapi/analysis/internal/post_go18.go
@@ -0,0 +1,29 @@
+// +build go1.8
+
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//    http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package internal
+
+import "net/url"
+
+// PathUnescape provides url.PathUnescape(), with seamless
+// go version support for pre-go1.8
+//
+// TODO: this function is currently defined in go-openapi/swag,
+// but unexported. We might chose to export it, or simple phase
+// out pre-go1.8 support.
+func PathUnescape(path string) (string, error) {
+	return url.PathUnescape(path)
+}
diff --git a/go/vendor/github.com/go-openapi/analysis/internal/pre_go18.go b/go/vendor/github.com/go-openapi/analysis/internal/pre_go18.go
new file mode 100644
index 0000000..4cc6441
--- /dev/null
+++ b/go/vendor/github.com/go-openapi/analysis/internal/pre_go18.go
@@ -0,0 +1,29 @@
+// +build !go1.8
+
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//    http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package internal
+
+import "net/url"
+
+// PathUnescape provides url.PathUnescape(), with seamless
+// go version support for pre-go1.8
+//
+// TODO: this function is currently defined in go-openapi/swag,
+// but unexported. We might chose to export it, or simple phase
+// out pre-go1.8 support.
+func PathUnescape(path string) (string, error) {
+	return url.QueryUnescape(path)
+}
diff --git a/go/vendor/github.com/go-openapi/analysis/mixin.go b/go/vendor/github.com/go-openapi/analysis/mixin.go
new file mode 100644
index 0000000..49806b4
--- /dev/null
+++ b/go/vendor/github.com/go-openapi/analysis/mixin.go
@@ -0,0 +1,334 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//    http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package analysis
+
+import (
+	"fmt"
+	"reflect"
+
+	"github.com/go-openapi/spec"
+)
+
+// Mixin modifies the primary swagger spec by adding the paths and
+// definitions from the mixin specs. Top level parameters and
+// responses from the mixins are also carried over. Operation id
+// collisions are avoided by appending "Mixin<N>" but only if
+// needed.
+//
+// The following parts of primary are never modified by merging:
+//   - Info
+//   - BasePath
+//   - Host
+//   - ExternalDocs
+//
+// Consider calling FixEmptyResponseDescriptions() on the modified primary
+// if you read them from storage and they are valid to start with.
+//
+// Entries in "paths", "definitions", "parameters" and "responses" are
+// added to the primary in the order of the given mixins. If the entry
+// already exists in primary it is skipped with a warning message.
+//
+// The count of skipped entries (from collisions) is returned so any
+// deviation from the number expected can flag a warning in your build
+// scripts. Carefully review the collisions before accepting them;
+// consider renaming things if possible.
+//
+// No key normalization takes place (paths, type defs,
+// etc). Ensure they are canonical if your downstream tools do
+// key normalization of any form.
+//
+// Merging schemes (http, https), and consumers/producers do not account for
+// collisions.
+func Mixin(primary *spec.Swagger, mixins ...*spec.Swagger) []string {
+	skipped := make([]string, 0, len(mixins))
+	opIds := getOpIds(primary)
+	initPrimary(primary)
+
+	for i, m := range mixins {
+		skipped = append(skipped, mergeConsumes(primary, m)...)
+
+		skipped = append(skipped, mergeProduces(primary, m)...)
+
+		skipped = append(skipped, mergeTags(primary, m)...)
+
+		skipped = append(skipped, mergeSchemes(primary, m)...)
+
+		skipped = append(skipped, mergeSecurityDefinitions(primary, m)...)
+
+		skipped = append(skipped, mergeSecurityRequirements(primary, m)...)
+
+		skipped = append(skipped, mergeDefinitions(primary, m)...)
+
+		// merging paths requires a map of operationIDs to work with
+		skipped = append(skipped, mergePaths(primary, m, opIds, i)...)
+
+		skipped = append(skipped, mergeParameters(primary, m)...)
+
+		skipped = append(skipped, mergeResponses(primary, m)...)
+	}
+	return skipped
+}
+
+// getOpIds extracts all the paths.<path>.operationIds from the given
+// spec and returns them as the keys in a map with 'true' values.
+func getOpIds(s *spec.Swagger) map[string]bool {
+	rv := make(map[string]bool)
+	if s.Paths == nil {
+		return rv
+	}
+	for _, v := range s.Paths.Paths {
+		piops := pathItemOps(v)
+		for _, op := range piops {
+			rv[op.ID] = true
+		}
+	}
+	return rv
+}
+
+func pathItemOps(p spec.PathItem) []*spec.Operation {
+	var rv []*spec.Operation
+	rv = appendOp(rv, p.Get)
+	rv = appendOp(rv, p.Put)
+	rv = appendOp(rv, p.Post)
+	rv = appendOp(rv, p.Delete)
+	rv = appendOp(rv, p.Head)
+	rv = appendOp(rv, p.Patch)
+	return rv
+}
+
+func appendOp(ops []*spec.Operation, op *spec.Operation) []*spec.Operation {
+	if op == nil {
+		return ops
+	}
+	return append(ops, op)
+}
+
+func mergeSecurityDefinitions(primary *spec.Swagger, m *spec.Swagger) (skipped []string) {
+	for k, v := range m.SecurityDefinitions {
+		if _, exists := primary.SecurityDefinitions[k]; exists {
+			warn := fmt.Sprintf(
+				"SecurityDefinitions entry '%v' already exists in primary or higher priority mixin, skipping\n", k)
+			skipped = append(skipped, warn)
+			continue
+		}
+		primary.SecurityDefinitions[k] = v
+	}
+	return
+}
+
+func mergeSecurityRequirements(primary *spec.Swagger, m *spec.Swagger) (skipped []string) {
+	for _, v := range m.Security {
+		found := false
+		for _, vv := range primary.Security {
+			if reflect.DeepEqual(v, vv) {
+				found = true
+				break
+			}
+		}
+		if found {
+			warn := fmt.Sprintf(
+				"Security requirement: '%v' already exists in primary or higher priority mixin, skipping\n", v)
+			skipped = append(skipped, warn)
+			continue
+		}
+		primary.Security = append(primary.Security, v)
+	}
+	return
+}
+
+func mergeDefinitions(primary *spec.Swagger, m *spec.Swagger) (skipped []string) {
+	for k, v := range m.Definitions {
+		// assume name collisions represent IDENTICAL type. careful.
+		if _, exists := primary.Definitions[k]; exists {
+			warn := fmt.Sprintf(
+				"definitions entry '%v' already exists in primary or higher priority mixin, skipping\n", k)
+			skipped = append(skipped, warn)
+			continue
+		}
+		primary.Definitions[k] = v
+	}
+	return
+}
+
+func mergePaths(primary *spec.Swagger, m *spec.Swagger, opIds map[string]bool, mixIndex int) (skipped []string) {
+	if m.Paths != nil {
+		for k, v := range m.Paths.Paths {
+			if _, exists := primary.Paths.Paths[k]; exists {
+				warn := fmt.Sprintf(
+					"paths entry '%v' already exists in primary or higher priority mixin, skipping\n", k)
+				skipped = append(skipped, warn)
+				continue
+			}
+
+			// Swagger requires that operationIds be
+			// unique within a spec. If we find a
+			// collision we append "Mixin0" to the
+			// operatoinId we are adding, where 0 is mixin
+			// index.  We assume that operationIds with
+			// all the proivded specs are already unique.
+			piops := pathItemOps(v)
+			for _, piop := range piops {
+				if opIds[piop.ID] {
+					piop.ID = fmt.Sprintf("%v%v%v", piop.ID, "Mixin", mixIndex)
+				}
+				opIds[piop.ID] = true
+			}
+			primary.Paths.Paths[k] = v
+		}
+	}
+	return
+}
+
+func mergeParameters(primary *spec.Swagger, m *spec.Swagger) (skipped []string) {
+	for k, v := range m.Parameters {
+		// could try to rename on conflict but would
+		// have to fix $refs in the mixin. Complain
+		// for now
+		if _, exists := primary.Parameters[k]; exists {
+			warn := fmt.Sprintf(
+				"top level parameters entry '%v' already exists in primary or higher priority mixin, skipping\n", k)
+			skipped = append(skipped, warn)
+			continue
+		}
+		primary.Parameters[k] = v
+	}
+	return
+}
+
+func mergeResponses(primary *spec.Swagger, m *spec.Swagger) (skipped []string) {
+	for k, v := range m.Responses {
+		// could try to rename on conflict but would
+		// have to fix $refs in the mixin. Complain
+		// for now
+		if _, exists := primary.Responses[k]; exists {
+			warn := fmt.Sprintf(
+				"top level responses entry '%v' already exists in primary or higher priority mixin, skipping\n", k)
+			skipped = append(skipped, warn)
+			continue
+		}
+		primary.Responses[k] = v
+	}
+	return
+}
+
+func mergeConsumes(primary *spec.Swagger, m *spec.Swagger) (skipped []string) {
+	for _, v := range m.Consumes {
+		found := false
+		for _, vv := range primary.Consumes {
+			if v == vv {
+				found = true
+				break
+			}
+		}
+		if found {
+			// no warning here: we just skip it
+			continue
+		}
+		primary.Consumes = append(primary.Consumes, v)
+	}
+	return
+}
+
+func mergeProduces(primary *spec.Swagger, m *spec.Swagger) (skipped []string) {
+	for _, v := range m.Produces {
+		found := false
+		for _, vv := range primary.Produces {
+			if v == vv {
+				found = true
+				break
+			}
+		}
+		if found {
+			// no warning here: we just skip it
+			continue
+		}
+		primary.Produces = append(primary.Produces, v)
+	}
+	return
+}
+
+func mergeTags(primary *spec.Swagger, m *spec.Swagger) (skipped []string) {
+	for _, v := range m.Tags {
+		found := false
+		for _, vv := range primary.Tags {
+			if v.Name == vv.Name {
+				found = true
+				break
+			}
+		}
+		if found {
+			warn := fmt.Sprintf(
+				"top level tags entry with name '%v' already exists in primary or higher priority mixin, skipping\n", v.Name)
+			skipped = append(skipped, warn)
+			continue
+		}
+		primary.Tags = append(primary.Tags, v)
+	}
+	return
+}
+
+func mergeSchemes(primary *spec.Swagger, m *spec.Swagger) (skipped []string) {
+	for _, v := range m.Schemes {
+		found := false
+		for _, vv := range primary.Schemes {
+			if v == vv {
+				found = true
+				break
+			}
+		}
+		if found {
+			// no warning here: we just skip it
+			continue
+		}
+		primary.Schemes = append(primary.Schemes, v)
+	}
+	return
+}
+
+func initPrimary(primary *spec.Swagger) {
+	if primary.SecurityDefinitions == nil {
+		primary.SecurityDefinitions = make(map[string]*spec.SecurityScheme)
+	}
+	if primary.Security == nil {
+		primary.Security = make([]map[string][]string, 0, 10)
+	}
+	if primary.Produces == nil {
+		primary.Produces = make([]string, 0, 10)
+	}
+	if primary.Consumes == nil {
+		primary.Consumes = make([]string, 0, 10)
+	}
+	if primary.Tags == nil {
+		primary.Tags = make([]spec.Tag, 0, 10)
+	}
+	if primary.Schemes == nil {
+		primary.Schemes = make([]string, 0, 10)
+	}
+	if primary.Paths == nil {
+		primary.Paths = &spec.Paths{Paths: make(map[string]spec.PathItem)}
+	}
+	if primary.Paths.Paths == nil {
+		primary.Paths.Paths = make(map[string]spec.PathItem)
+	}
+	if primary.Definitions == nil {
+		primary.Definitions = make(spec.Definitions)
+	}
+	if primary.Parameters == nil {
+		primary.Parameters = make(map[string]spec.Parameter)
+	}
+	if primary.Responses == nil {
+		primary.Responses = make(map[string]spec.Response)
+	}
+}
diff --git a/go/vendor/github.com/go-openapi/analysis/schema.go b/go/vendor/github.com/go-openapi/analysis/schema.go
new file mode 100644
index 0000000..c0b77c5
--- /dev/null
+++ b/go/vendor/github.com/go-openapi/analysis/schema.go
@@ -0,0 +1,234 @@
+package analysis
+
+import (
+	"github.com/go-openapi/spec"
+	"github.com/go-openapi/strfmt"
+)
+
+// SchemaOpts configures the schema analyzer
+type SchemaOpts struct {
+	Schema   *spec.Schema
+	Root     interface{}
+	BasePath string
+	_        struct{}
+}
+
+// Schema analysis, will classify the schema according to known
+// patterns.
+func Schema(opts SchemaOpts) (*AnalyzedSchema, error) {
+	a := &AnalyzedSchema{
+		schema:   opts.Schema,
+		root:     opts.Root,
+		basePath: opts.BasePath,
+	}
+
+	a.initializeFlags()
+	a.inferKnownType()
+	a.inferEnum()
+	a.inferBaseType()
+
+	if err := a.inferMap(); err != nil {
+		return nil, err
+	}
+	if err := a.inferArray(); err != nil {
+		return nil, err
+	}
+
+	if err := a.inferTuple(); err != nil {
+		// NOTE(fredbi): currently, inferTuple() never returns an error
+		return nil, err
+	}
+
+	if err := a.inferFromRef(); err != nil {
+		return nil, err
+	}
+
+	a.inferSimpleSchema()
+	return a, nil
+}
+
+// AnalyzedSchema indicates what the schema represents
+type AnalyzedSchema struct {
+	schema   *spec.Schema
+	root     interface{}
+	basePath string
+
+	hasProps           bool
+	hasAllOf           bool
+	hasItems           bool
+	hasAdditionalProps bool
+	hasAdditionalItems bool
+	hasRef             bool
+
+	IsKnownType      bool
+	IsSimpleSchema   bool
+	IsArray          bool
+	IsSimpleArray    bool
+	IsMap            bool
+	IsSimpleMap      bool
+	IsExtendedObject bool
+	IsTuple          bool
+	IsTupleWithExtra bool
+	IsBaseType       bool
+	IsEnum           bool
+}
+
+// Inherits copies value fields from other onto this schema
+func (a *AnalyzedSchema) inherits(other *AnalyzedSchema) {
+	if other == nil {
+		return
+	}
+	a.hasProps = other.hasProps
+	a.hasAllOf = other.hasAllOf
+	a.hasItems = other.hasItems
+	a.hasAdditionalItems = other.hasAdditionalItems
+	a.hasAdditionalProps = other.hasAdditionalProps
+	a.hasRef = other.hasRef
+
+	a.IsKnownType = other.IsKnownType
+	a.IsSimpleSchema = other.IsSimpleSchema
+	a.IsArray = other.IsArray
+	a.IsSimpleArray = other.IsSimpleArray
+	a.IsMap = other.IsMap
+	a.IsSimpleMap = other.IsSimpleMap
+	a.IsExtendedObject = other.IsExtendedObject
+	a.IsTuple = other.IsTuple
+	a.IsTupleWithExtra = other.IsTupleWithExtra
+	a.IsBaseType = other.IsBaseType
+	a.IsEnum = other.IsEnum
+}
+
+func (a *AnalyzedSchema) inferFromRef() error {
+	if a.hasRef {
+		sch := new(spec.Schema)
+		sch.Ref = a.schema.Ref
+		err := spec.ExpandSchema(sch, a.root, nil)
+		if err != nil {
+			return err
+		}
+		if sch != nil {
+			// NOTE(fredbi): currently the only cause for errors in
+			// unresolved ref. Since spec.ExpandSchema() expands the
+			// schema recursively, there is no chance to get there,
+			// until we add more causes for error in this schema analysis.
+			rsch, err := Schema(SchemaOpts{
+				Schema:   sch,
+				Root:     a.root,
+				BasePath: a.basePath,
+			})
+			if err != nil {
+				return err
+			}
+			a.inherits(rsch)
+		}
+	}
+	return nil
+}
+
+func (a *AnalyzedSchema) inferSimpleSchema() {
+	a.IsSimpleSchema = a.IsKnownType || a.IsSimpleArray || a.IsSimpleMap
+}
+
+func (a *AnalyzedSchema) inferKnownType() {
+	tpe := a.schema.Type
+	format := a.schema.Format
+	a.IsKnownType = tpe.Contains("boolean") ||
+		tpe.Contains("integer") ||
+		tpe.Contains("number") ||
+		tpe.Contains("string") ||
+		(format != "" && strfmt.Default.ContainsName(format)) ||
+		(a.isObjectType() && !a.hasProps && !a.hasAllOf && !a.hasAdditionalProps && !a.hasAdditionalItems)
+}
+
+func (a *AnalyzedSchema) inferMap() error {
+	if a.isObjectType() {
+		hasExtra := a.hasProps || a.hasAllOf
+		a.IsMap = a.hasAdditionalProps && !hasExtra
+		a.IsExtendedObject = a.hasAdditionalProps && hasExtra
+		if a.IsMap {
+			if a.schema.AdditionalProperties.Schema != nil {
+				msch, err := Schema(SchemaOpts{
+					Schema:   a.schema.AdditionalProperties.Schema,
+					Root:     a.root,
+					BasePath: a.basePath,
+				})
+				if err != nil {
+					return err
+				}
+				a.IsSimpleMap = msch.IsSimpleSchema
+			} else if a.schema.AdditionalProperties.Allows {
+				a.IsSimpleMap = true
+			}
+		}
+	}
+	return nil
+}
+
+func (a *AnalyzedSchema) inferArray() error {
+	// an array has Items defined as an object schema, otherwise we qualify this JSON array as a tuple
+	// (yes, even if the Items array contains only one element).
+	// arrays in JSON schema may be unrestricted (i.e no Items specified).
+	// Note that arrays in Swagger MUST have Items. Nonetheless, we analyze unrestricted arrays.
+	//
+	// NOTE: the spec package misses the distinction between:
+	// items: [] and items: {}, so we consider both arrays here.
+	a.IsArray = a.isArrayType() && (a.schema.Items == nil || a.schema.Items.Schemas == nil)
+	if a.IsArray && a.hasItems {
+		if a.schema.Items.Schema != nil {
+			itsch, err := Schema(SchemaOpts{
+				Schema:   a.schema.Items.Schema,
+				Root:     a.root,
+				BasePath: a.basePath,
+			})
+			if err != nil {
+				return err
+			}
+			a.IsSimpleArray = itsch.IsSimpleSchema
+		}
+	}
+	if a.IsArray && !a.hasItems {
+		a.IsSimpleArray = true
+	}
+	return nil
+}
+
+func (a *AnalyzedSchema) inferTuple() error {
+	tuple := a.hasItems && a.schema.Items.Schemas != nil
+	a.IsTuple = tuple && !a.hasAdditionalItems
+	a.IsTupleWithExtra = tuple && a.hasAdditionalItems
+	return nil
+}
+
+func (a *AnalyzedSchema) inferBaseType() {
+	if a.isObjectType() {
+		a.IsBaseType = a.schema.Discriminator != ""
+	}
+}
+
+func (a *AnalyzedSchema) inferEnum() {
+	a.IsEnum = len(a.schema.Enum) > 0
+}
+
+func (a *AnalyzedSchema) initializeFlags() {
+	a.hasProps = len(a.schema.Properties) > 0
+	a.hasAllOf = len(a.schema.AllOf) > 0
+	a.hasRef = a.schema.Ref.String() != ""
+
+	a.hasItems = a.schema.Items != nil &&
+		(a.schema.Items.Schema != nil || len(a.schema.Items.Schemas) > 0)
+
+	a.hasAdditionalProps = a.schema.AdditionalProperties != nil &&
+		(a.schema.AdditionalProperties != nil || a.schema.AdditionalProperties.Allows)
+
+	a.hasAdditionalItems = a.schema.AdditionalItems != nil &&
+		(a.schema.AdditionalItems.Schema != nil || a.schema.AdditionalItems.Allows)
+
+}
+
+func (a *AnalyzedSchema) isObjectType() bool {
+	return !a.hasRef && (a.schema.Type == nil || a.schema.Type.Contains("") || a.schema.Type.Contains("object"))
+}
+
+func (a *AnalyzedSchema) isArrayType() bool {
+	return !a.hasRef && (a.schema.Type != nil && a.schema.Type.Contains("array"))
+}