diff --git a/Godeps/Godeps.json b/Godeps/Godeps.json index 97505eb9dc579..51b7eeed4a6a0 100644 --- a/Godeps/Godeps.json +++ b/Godeps/Godeps.json @@ -143,6 +143,16 @@ "ImportPath": "github.com/mitchellh/mapstructure", "Rev": "740c764bc6149d3f1806231418adb9f52c11bcbf" }, + { + "ImportPath": "github.com/onsi/ginkgo", + "Comment": "v1.1.0-33-g17ea479729ee", + "Rev": "17ea479729ee427265ac1e913443018350946ddf" + }, + { + "ImportPath": "github.com/onsi/gomega", + "Comment": "v1.0-28-g8adf9e1730c5", + "Rev": "8adf9e1730c55cdc590de7d49766cb2acc88d8f2" + }, { "ImportPath": "github.com/racker/perigee", "Comment": "v0.0.0-18-g0c00cb0", diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/.gitignore b/Godeps/_workspace/src/github.com/onsi/ginkgo/.gitignore new file mode 100644 index 0000000000000..922b4f7f91915 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/.gitignore @@ -0,0 +1,4 @@ +.DS_Store +TODO +tmp/**/* +*.coverprofile \ No newline at end of file diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/.travis.yml b/Godeps/_workspace/src/github.com/onsi/ginkgo/.travis.yml new file mode 100644 index 0000000000000..f8b6984e6838c --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/.travis.yml @@ -0,0 +1,12 @@ +language: go +go: + - 1.4 + +install: + - go get -v ./... + - go get golang.org/x/tools/cmd/cover + - go get github.com/onsi/gomega + - go install github.com/onsi/ginkgo/ginkgo + - export PATH=$PATH:$HOME/gopath/bin + +script: $HOME/gopath/bin/ginkgo -r --randomizeAllSpecs --failOnPending --randomizeSuites --race diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/CHANGELOG.md b/Godeps/_workspace/src/github.com/onsi/ginkgo/CHANGELOG.md new file mode 100644 index 0000000000000..06c7428f39a12 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/CHANGELOG.md @@ -0,0 +1,110 @@ +## HEAD + +Improvements: + +- Call reporters in reverse order when announcing spec completion -- allows custom reporters to emit output before the default reporter does. +- Improved focus behavior. Now, this: + + ```golang + FDescribe("Some describe", func() { + It("A", func() {}) + + FIt("B", func() {}) + }) + ``` + + will run `B` but *not* `A`. This tends to be a common usage pattern when in the thick of writing and debugging tests. +- When `SIGINT` is received, Ginkgo will emit the contents of the `GinkgoWriter` before running the `AfterSuite`. Useful for debugging stuck tests. +- When `--progress` is set, Ginkgo will write test progress (in particular, Ginkgo will say when it is about to run a BeforeEach, AfterEach, It, etc...) to the `GinkgoWriter`. This is useful for debugging stuck tests and tests that generate many logs. +- Improved output when an error occurs in a setup or teardown block. +- When `--dryRun` is set, Ginkgo will walk the spec tree and emit to its reporter *without* actually running anything. Best paired with `-v` to understand which specs will run in which order. +- Add `By` to help document long `It`s. `By` simply writes to the `GinkgoWriter`. +- Add support for precompiled tests: + - `ginkgo build ` will now compile the package, producing a file named `package.test` + - The compiled `package.test` file can be run directly. This runs the tests in series. + - To run precompiled tests in parallel, you can run: `ginkgo -p package.test` +- Support `bootstrap`ping and `generate`ing [Agouti](http://agouti.org) specs. +- `ginkgo generate` and `ginkgo bootstrap` now honor the package name already defined in a given directory +- The `ginkgo` CLI ignores `SIGQUIT`. Prevents its stack dump from interlacing with the underlying test suite's stack dump. + +Bug Fixes: + +- If --skipPackages is used and all packages are skipped, Ginkgo should exit 0. +- Fix tempfile leak when running in parallel +- Fix incorrect failure message when a panic occurs during a parallel test run + + +## 1.1.0 (8/2/2014) + +No changes, just dropping the beta. + +## 1.1.0-beta (7/22/2014) +New Features: + +- `ginkgo watch` now monitors packages *and their dependencies* for changes. The depth of the dependency tree can be modified with the `-depth` flag. +- Test suites with a programmatic focus (`FIt`, `FDescribe`, etc...) exit with non-zero status code, evne when they pass. This allows CI systems to detect accidental commits of focused test suites. +- `ginkgo -p` runs the testsuite in parallel with an auto-detected number of nodes. +- `ginkgo -tags=TAG_LIST` passes a list of tags down to the `go build` command. +- `ginkgo --failFast` aborts the test suite after the first failure. +- `ginkgo generate file_1 file_2` can take multiple file arguments. +- Ginkgo now summarizes any spec failures that occured at the end of the test run. +- `ginkgo --randomizeSuites` will run tests *suites* in random order using the generated/passed-in seed. + +Improvements: + +- `ginkgo -skipPackage` now takes a comma-separated list of strings. If the *relative path* to a package matches one of the entries in the comma-separated list, that package is skipped. +- `ginkgo --untilItFails` no longer recompiles between attempts. +- Ginkgo now panics when a runnable node (`It`, `BeforeEach`, `JustBeforeEach`, `AfterEach`, `Measure`) is nested within another runnable node. This is always a mistake. Any test suites that panic because of this change should be fixed. + +Bug Fixes: + +- `ginkgo boostrap` and `ginkgo generate` no longer fail when dealing with `hyphen-separated-packages`. +- parallel specs are now better distributed across nodes - fixed a crashing bug where (for example) distributing 11 tests across 7 nodes would panic + +## 1.0.0 (5/24/2014) +New Features: + +- Add `GinkgoParallelNode()` - shorthand for `config.GinkgoConfig.ParallelNode` + +Improvements: + +- When compilation fails, the compilation output is rewritten to present a correct *relative* path. Allows ⌘-clicking in iTerm open the file in your text editor. +- `--untilItFails` and `ginkgo watch` now generate new random seeds between test runs, unless a particular random seed is specified. + +Bug Fixes: + +- `-cover` now generates a correctly combined coverprofile when running with in parallel with multiple `-node`s. +- Print out the contents of the `GinkgoWriter` when `BeforeSuite` or `AfterSuite` fail. +- Fix all remaining race conditions in Ginkgo's test suite. + +## 1.0.0-beta (4/14/2014) +Breaking changes: + +- `thirdparty/gomocktestreporter` is gone. Use `GinkgoT()` instead +- Modified the Reporter interface +- `watch` is now a subcommand, not a flag. + +DSL changes: + +- `BeforeSuite` and `AfterSuite` for setting up and tearing down test suites. +- `AfterSuite` is triggered on interrupt (`^C`) as well as exit. +- `SynchronizedBeforeSuite` and `SynchronizedAfterSuite` for setting up and tearing down singleton resources across parallel nodes. + +CLI changes: + +- `watch` is now a subcommand, not a flag +- `--nodot` flag can be passed to `ginkgo generate` and `ginkgo bootstrap` to avoid dot imports. This explicitly imports all exported identifiers in Ginkgo and Gomega. Refreshing this list can be done by running `ginkgo nodot` +- Additional arguments can be passed to specs. Pass them after the `--` separator +- `--skipPackage` flag takes a regexp and ignores any packages with package names passing said regexp. +- `--trace` flag prints out full stack traces when errors occur, not just the line at which the error occurs. + +Misc: + +- Start using semantic versioning +- Start maintaining changelog + +Major refactor: + +- Pull out Ginkgo's internal to `internal` +- Rename `example` everywhere to `spec` +- Much more! diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/LICENSE b/Godeps/_workspace/src/github.com/onsi/ginkgo/LICENSE new file mode 100644 index 0000000000000..9415ee72c17f8 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/LICENSE @@ -0,0 +1,20 @@ +Copyright (c) 2013-2014 Onsi Fakhouri + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/README.md b/Godeps/_workspace/src/github.com/onsi/ginkgo/README.md new file mode 100644 index 0000000000000..5cb6fdc843ec4 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/README.md @@ -0,0 +1,115 @@ +![Ginkgo: A Golang BDD Testing Framework](http://onsi.github.io/ginkgo/images/ginkgo.png) + +[![Build Status](https://travis-ci.org/onsi/ginkgo.png)](https://travis-ci.org/onsi/ginkgo) + +Jump to the [docs](http://onsi.github.io/ginkgo/) to learn more. To start rolling your Ginkgo tests *now* [keep reading](#set-me-up)! + +To discuss Ginkgo and get updates, join the [google group](https://groups.google.com/d/forum/ginkgo-and-gomega). + +## Feature List + +- Ginkgo uses Go's `testing` package and can live alongside your existing `testing` tests. It's easy to [bootstrap](http://onsi.github.io/ginkgo/#bootstrapping-a-suite) and start writing your [first tests](http://onsi.github.io/ginkgo/#adding-specs-to-a-suite) + +- Structure your BDD-style tests expressively: + - Nestable [`Describe` and `Context` container blocks](http://onsi.github.io/ginkgo/#organizing-specs-with-containers-describe-and-context) + - [`BeforeEach` and `AfterEach` blocks](http://onsi.github.io/ginkgo/#extracting-common-setup-beforeeach) for setup and teardown + - [`It` blocks](http://onsi.github.io/ginkgo/#individual-specs-) that hold your assertions + - [`JustBeforeEach` blocks](http://onsi.github.io/ginkgo/#separating-creation-and-configuration-justbeforeeach) that separate creation from configuration (also known as the subject action pattern). + - [`BeforeSuite` and `AfterSuite` blocks](http://onsi.github.io/ginkgo/#global-setup-and-teardown-beforesuite-and-aftersuite) to prep for and cleanup after a suite. + +- A comprehensive test runner that lets you: + - Mark specs as [pending](http://onsi.github.io/ginkgo/#pending-specs) + - [Focus](http://onsi.github.io/ginkgo/#focused-specs) individual specs, and groups of specs, either programmatically or on the command line + - Run your tests in [random order](http://onsi.github.io/ginkgo/#spec-permutation), and then reuse random seeds to replicate the same order. + - Break up your test suite into parallel processes for straightforward [test parallelization](http://onsi.github.io/ginkgo/#parallel-specs) + +- `ginkgo`: a command line interface with plenty of handy command line arguments for [running your tests](http://onsi.github.io/ginkgo/#running-tests) and [generating](http://onsi.github.io/ginkgo/#generators) test files. Here are a few choice examples: + - `ginkgo -nodes=N` runs your tests in `N` parallel processes and print out coherent output in realtime + - `ginkgo -cover` runs your tests using Golang's code coverage tool + - `ginkgo convert` converts an XUnit-style `testing` package to a Ginkgo-style package + - `ginkgo -focus="REGEXP"` and `ginkgo -skip="REGEXP"` allow you to specify a subset of tests to run via regular expression + - `ginkgo -r` runs all tests suites under the current directory + - `ginkgo -v` prints out identifying information for each tests just before it runs + + And much more: run `ginkgo help` for details! + + The `ginkgo` CLI is convenient, but purely optional -- Ginkgo works just fine with `go test` + +- `ginkgo watch` [watches](https://onsi.github.io/ginkgo/#watching-for-changes) packages *and their dependencies* for changes, then reruns tests. Run tests immediately as you develop! + +- Built-in support for testing [asynchronicity](http://onsi.github.io/ginkgo/#asynchronous-tests) + +- Built-in support for [benchmarking](http://onsi.github.io/ginkgo/#benchmark-tests) your code. Control the number of benchmark samples as you gather runtimes and other, arbitrary, bits of numerical information about your code. + +- [Completions for Sublime Text](https://github.com/onsi/ginkgo-sublime-completions): just use [Package Control](https://sublime.wbond.net/) to install `Ginkgo Completions`. + +- Straightforward support for third-party testing libraries such as [Gomock](https://code.google.com/p/gomock/) and [Testify](https://github.com/stretchr/testify). Check out the [docs](http://onsi.github.io/ginkgo/#third-party-integrations) for details. + +- A modular architecture that lets you easily: + - Write [custom reporters](http://onsi.github.io/ginkgo/#writing-custom-reporters) (for example, Ginkgo comes with a [JUnit XML reporter](http://onsi.github.io/ginkgo/#generating-junit-xml-output) and a TeamCity reporter). + - [Adapt an existing matcher library (or write your own!)](http://onsi.github.io/ginkgo/#using-other-matcher-libraries) to work with Ginkgo + +## [Gomega](http://github.com/onsi/gomega): Ginkgo's Preferred Matcher Library + +Ginkgo is best paired with Gomega. Learn more about Gomega [here](http://onsi.github.io/gomega/) + +## [Agouti](http://github.com/sclevine/agouti): A Golang Acceptance Testing Framework + +Agouti allows you run WebDriver integration tests. Learn more about Agouti [here](http://agouti.org) + +## Set Me Up! + +You'll need Golang v1.2+ (Ubuntu users: you probably have Golang v1.0 -- you'll need to upgrade!) + +```bash + +go get github.com/onsi/ginkgo/ginkgo # installs the ginkgo CLI +go get github.com/onsi/gomega # fetches the matcher library + +cd path/to/package/you/want/to/test + +ginkgo bootstrap # set up a new ginkgo suite +ginkgo generate # will create a sample test file. edit this file and add your tests then... + +go test # to run your tests + +ginkgo # also runs your tests + +``` + +## I'm new to Go: What are my testing options? + +Of course, I heartily recommend [Ginkgo](https://github.com/onsi/ginkgo) and [Gomega](https://github.com/onsi/gomega). Both packages are seeing heavy, daily, production use on a number of projects and boast a mature and comprehensive feature-set. + +With that said, it's great to know what your options are :) + +### What Golang gives you out of the box + +Testing is a first class citizen in Golang, however Go's built-in testing primitives are somewhat limited: The [testing](http://golang.org/pkg/testing) package provides basic XUnit style tests and no assertion library. + +### Matcher libraries for Golang's XUnit style tests + +A number of matcher libraries have been written to augment Go's built-in XUnit style tests. Here are two that have gained traction: + +- [testify](https://github.com/stretchr/testify) +- [gocheck](http://labix.org/gocheck) + +You can also use Ginkgo's matcher library [Gomega](https://github.com/onsi/gomega) in [XUnit style tests](http://onsi.github.io/gomega/#using-gomega-with-golangs-xunitstyle-tests) + +### BDD style testing frameworks + +There are a handful of BDD-style testing frameworks written for Golang. Here are a few: + +- [Ginkgo](https://github.com/onsi/ginkgo) ;) +- [GoConvey](https://github.com/smartystreets/goconvey) +- [Goblin](https://github.com/franela/goblin) +- [Mao](https://github.com/azer/mao) +- [Zen](https://github.com/pranavraja/zen) + +Finally, @shageman has [put together](https://github.com/shageman/gotestit) a comprehensive comparison of golang testing libraries. + +Go explore! + +## License + +Ginkgo is MIT-Licensed diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/config/config.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/config/config.go new file mode 100644 index 0000000000000..aa4d8ed40cb2b --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/config/config.go @@ -0,0 +1,170 @@ +/* +Ginkgo accepts a number of configuration options. + +These are documented [here](http://onsi.github.io/ginkgo/#the_ginkgo_cli) + +You can also learn more via + + ginkgo help + +or (I kid you not): + + go test -asdf +*/ +package config + +import ( + "flag" + "time" + + "fmt" +) + +const VERSION = "1.1.0" + +type GinkgoConfigType struct { + RandomSeed int64 + RandomizeAllSpecs bool + FocusString string + SkipString string + SkipMeasurements bool + FailOnPending bool + FailFast bool + EmitSpecProgress bool + DryRun bool + + ParallelNode int + ParallelTotal int + SyncHost string + StreamHost string +} + +var GinkgoConfig = GinkgoConfigType{} + +type DefaultReporterConfigType struct { + NoColor bool + SlowSpecThreshold float64 + NoisyPendings bool + Succinct bool + Verbose bool + FullTrace bool +} + +var DefaultReporterConfig = DefaultReporterConfigType{} + +func processPrefix(prefix string) string { + if prefix != "" { + prefix = prefix + "." + } + return prefix +} + +func Flags(flagSet *flag.FlagSet, prefix string, includeParallelFlags bool) { + prefix = processPrefix(prefix) + flagSet.Int64Var(&(GinkgoConfig.RandomSeed), prefix+"seed", time.Now().Unix(), "The seed used to randomize the spec suite.") + flagSet.BoolVar(&(GinkgoConfig.RandomizeAllSpecs), prefix+"randomizeAllSpecs", false, "If set, ginkgo will randomize all specs together. By default, ginkgo only randomizes the top level Describe/Context groups.") + flagSet.BoolVar(&(GinkgoConfig.SkipMeasurements), prefix+"skipMeasurements", false, "If set, ginkgo will skip any measurement specs.") + flagSet.BoolVar(&(GinkgoConfig.FailOnPending), prefix+"failOnPending", false, "If set, ginkgo will mark the test suite as failed if any specs are pending.") + flagSet.BoolVar(&(GinkgoConfig.FailFast), prefix+"failFast", false, "If set, ginkgo will stop running a test suite after a failure occurs.") + flagSet.BoolVar(&(GinkgoConfig.DryRun), prefix+"dryRun", false, "If set, ginkgo will walk the test hierarchy without actually running anything. Best paired with -v.") + flagSet.StringVar(&(GinkgoConfig.FocusString), prefix+"focus", "", "If set, ginkgo will only run specs that match this regular expression.") + flagSet.StringVar(&(GinkgoConfig.SkipString), prefix+"skip", "", "If set, ginkgo will only run specs that do not match this regular expression.") + flagSet.BoolVar(&(GinkgoConfig.EmitSpecProgress), prefix+"progress", false, "If set, ginkgo will emit progress information as each spec runs to the GinkgoWriter.") + + if includeParallelFlags { + flagSet.IntVar(&(GinkgoConfig.ParallelNode), prefix+"parallel.node", 1, "This worker node's (one-indexed) node number. For running specs in parallel.") + flagSet.IntVar(&(GinkgoConfig.ParallelTotal), prefix+"parallel.total", 1, "The total number of worker nodes. For running specs in parallel.") + flagSet.StringVar(&(GinkgoConfig.SyncHost), prefix+"parallel.synchost", "", "The address for the server that will synchronize the running nodes.") + flagSet.StringVar(&(GinkgoConfig.StreamHost), prefix+"parallel.streamhost", "", "The address for the server that the running nodes should stream data to.") + } + + flagSet.BoolVar(&(DefaultReporterConfig.NoColor), prefix+"noColor", false, "If set, suppress color output in default reporter.") + flagSet.Float64Var(&(DefaultReporterConfig.SlowSpecThreshold), prefix+"slowSpecThreshold", 5.0, "(in seconds) Specs that take longer to run than this threshold are flagged as slow by the default reporter (default: 5 seconds).") + flagSet.BoolVar(&(DefaultReporterConfig.NoisyPendings), prefix+"noisyPendings", true, "If set, default reporter will shout about pending tests.") + flagSet.BoolVar(&(DefaultReporterConfig.Verbose), prefix+"v", false, "If set, default reporter print out all specs as they begin.") + flagSet.BoolVar(&(DefaultReporterConfig.Succinct), prefix+"succinct", false, "If set, default reporter prints out a very succinct report") + flagSet.BoolVar(&(DefaultReporterConfig.FullTrace), prefix+"trace", false, "If set, default reporter prints out the full stack trace when a failure occurs") +} + +func BuildFlagArgs(prefix string, ginkgo GinkgoConfigType, reporter DefaultReporterConfigType) []string { + prefix = processPrefix(prefix) + result := make([]string, 0) + + if ginkgo.RandomSeed > 0 { + result = append(result, fmt.Sprintf("--%sseed=%d", prefix, ginkgo.RandomSeed)) + } + + if ginkgo.RandomizeAllSpecs { + result = append(result, fmt.Sprintf("--%srandomizeAllSpecs", prefix)) + } + + if ginkgo.SkipMeasurements { + result = append(result, fmt.Sprintf("--%sskipMeasurements", prefix)) + } + + if ginkgo.FailOnPending { + result = append(result, fmt.Sprintf("--%sfailOnPending", prefix)) + } + + if ginkgo.FailFast { + result = append(result, fmt.Sprintf("--%sfailFast", prefix)) + } + + if ginkgo.DryRun { + result = append(result, fmt.Sprintf("--%sdryRun", prefix)) + } + + if ginkgo.FocusString != "" { + result = append(result, fmt.Sprintf("--%sfocus=%s", prefix, ginkgo.FocusString)) + } + + if ginkgo.SkipString != "" { + result = append(result, fmt.Sprintf("--%sskip=%s", prefix, ginkgo.SkipString)) + } + + if ginkgo.EmitSpecProgress { + result = append(result, fmt.Sprintf("--%sprogress", prefix)) + } + + if ginkgo.ParallelNode != 0 { + result = append(result, fmt.Sprintf("--%sparallel.node=%d", prefix, ginkgo.ParallelNode)) + } + + if ginkgo.ParallelTotal != 0 { + result = append(result, fmt.Sprintf("--%sparallel.total=%d", prefix, ginkgo.ParallelTotal)) + } + + if ginkgo.StreamHost != "" { + result = append(result, fmt.Sprintf("--%sparallel.streamhost=%s", prefix, ginkgo.StreamHost)) + } + + if ginkgo.SyncHost != "" { + result = append(result, fmt.Sprintf("--%sparallel.synchost=%s", prefix, ginkgo.SyncHost)) + } + + if reporter.NoColor { + result = append(result, fmt.Sprintf("--%snoColor", prefix)) + } + + if reporter.SlowSpecThreshold > 0 { + result = append(result, fmt.Sprintf("--%sslowSpecThreshold=%.5f", prefix, reporter.SlowSpecThreshold)) + } + + if !reporter.NoisyPendings { + result = append(result, fmt.Sprintf("--%snoisyPendings=false", prefix)) + } + + if reporter.Verbose { + result = append(result, fmt.Sprintf("--%sv", prefix)) + } + + if reporter.Succinct { + result = append(result, fmt.Sprintf("--%ssuccinct", prefix)) + } + + if reporter.FullTrace { + result = append(result, fmt.Sprintf("--%strace", prefix)) + } + + return result +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/bootstrap_command.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/bootstrap_command.go new file mode 100644 index 0000000000000..daade748e8d29 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/bootstrap_command.go @@ -0,0 +1,185 @@ +package main + +import ( + "bytes" + "flag" + "fmt" + "os" + "path/filepath" + "strings" + "text/template" + + "go/build" + + "github.com/onsi/ginkgo/ginkgo/nodot" +) + +func BuildBootstrapCommand() *Command { + var agouti, noDot bool + flagSet := flag.NewFlagSet("bootstrap", flag.ExitOnError) + flagSet.BoolVar(&agouti, "agouti", false, "If set, bootstrap will generate a bootstrap file for writing Agouti tests") + flagSet.BoolVar(&noDot, "nodot", false, "If set, bootstrap will generate a bootstrap file that does not . import ginkgo and gomega") + + return &Command{ + Name: "bootstrap", + FlagSet: flagSet, + UsageCommand: "ginkgo bootstrap ", + Usage: []string{ + "Bootstrap a test suite for the current package", + "Accepts the following flags:", + }, + Command: func(args []string, additionalArgs []string) { + generateBootstrap(agouti, noDot) + }, + } +} + +var bootstrapText = `package {{.Package}}_test + +import ( + {{.GinkgoImport}} + {{.GomegaImport}} + + "testing" +) + +func Test{{.FormattedName}}(t *testing.T) { + RegisterFailHandler(Fail) + RunSpecs(t, "{{.FormattedName}} Suite") +} +` + +var agoutiBootstrapText = `package {{.Package}}_test + +import ( + {{.GinkgoImport}} + {{.GomegaImport}} + . "github.com/sclevine/agouti/core" + + "testing" +) + +func Test{{.FormattedName}}(t *testing.T) { + RegisterFailHandler(Fail) + RunSpecs(t, "{{.FormattedName}} Suite") +} + +var agoutiDriver WebDriver + +var _ = BeforeSuite(func() { + var err error + + // Choose a WebDriver: + + agoutiDriver, err = PhantomJS() + // agoutiDriver, err = Selenium() + // agoutiDriver, err = Chrome() + + Expect(err).NotTo(HaveOccurred()) + Expect(agoutiDriver.Start()).To(Succeed()) +}) + +var _ = AfterSuite(func() { + agoutiDriver.Stop() +}) +` + +type bootstrapData struct { + Package string + FormattedName string + GinkgoImport string + GomegaImport string +} + +func getPackageAndFormattedName() (string, string, string) { + path, err := os.Getwd() + if err != nil { + complainAndQuit("Could not get current working directory: \n" + err.Error()) + } + + dirName := strings.Replace(filepath.Base(path), "-", "_", -1) + dirName = strings.Replace(dirName, " ", "_", -1) + + pkg, err := build.ImportDir(path, 0) + packageName := pkg.Name + if err != nil { + packageName = dirName + } + + formattedName := prettifyPackageName(filepath.Base(path)) + return packageName, dirName, formattedName +} + +func prettifyPackageName(name string) string { + name = strings.Replace(name, "-", " ", -1) + name = strings.Replace(name, "_", " ", -1) + name = strings.Title(name) + name = strings.Replace(name, " ", "", -1) + return name +} + +func fileExists(path string) bool { + _, err := os.Stat(path) + if err == nil { + return true + } + return false +} + +func generateBootstrap(agouti bool, noDot bool) { + packageName, bootstrapFilePrefix, formattedName := getPackageAndFormattedName() + data := bootstrapData{ + Package: packageName, + FormattedName: formattedName, + GinkgoImport: `. "github.com/onsi/ginkgo"`, + GomegaImport: `. "github.com/onsi/gomega"`, + } + + if noDot { + data.GinkgoImport = `"github.com/onsi/ginkgo"` + data.GomegaImport = `"github.com/onsi/gomega"` + } + + targetFile := fmt.Sprintf("%s_suite_test.go", bootstrapFilePrefix) + if fileExists(targetFile) { + fmt.Printf("%s already exists.\n\n", targetFile) + os.Exit(1) + } else { + fmt.Printf("Generating ginkgo test suite bootstrap for %s in:\n\t%s\n", packageName, targetFile) + } + + f, err := os.Create(targetFile) + if err != nil { + complainAndQuit("Could not create file: " + err.Error()) + panic(err.Error()) + } + defer f.Close() + + var templateText string + if agouti { + templateText = agoutiBootstrapText + } else { + templateText = bootstrapText + } + + bootstrapTemplate, err := template.New("bootstrap").Parse(templateText) + if err != nil { + panic(err.Error()) + } + + buf := &bytes.Buffer{} + bootstrapTemplate.Execute(buf, data) + + if noDot { + contents, err := nodot.ApplyNoDot(buf.Bytes()) + if err != nil { + complainAndQuit("Failed to import nodot declarations: " + err.Error()) + } + fmt.Println("To update the nodot declarations in the future, switch to this directory and run:\n\tginkgo nodot") + buf = bytes.NewBuffer(contents) + } + + buf.WriteTo(f) + + goFmt(targetFile) +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/build_command.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/build_command.go new file mode 100644 index 0000000000000..badae0294d371 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/build_command.go @@ -0,0 +1,64 @@ +package main + +import ( + "flag" + "fmt" + "os" + "path/filepath" + + "github.com/onsi/ginkgo/ginkgo/interrupthandler" + "github.com/onsi/ginkgo/ginkgo/testrunner" +) + +func BuildBuildCommand() *Command { + commandFlags := NewBuildCommandFlags(flag.NewFlagSet("build", flag.ExitOnError)) + interruptHandler := interrupthandler.NewInterruptHandler() + builder := &SpecBuilder{ + commandFlags: commandFlags, + interruptHandler: interruptHandler, + } + + return &Command{ + Name: "build", + FlagSet: commandFlags.FlagSet, + UsageCommand: "ginkgo build ", + Usage: []string{ + "Build the passed in (or the package in the current directory if left blank).", + "Accepts the following flags:", + }, + Command: builder.BuildSpecs, + } +} + +type SpecBuilder struct { + commandFlags *RunWatchAndBuildCommandFlags + interruptHandler *interrupthandler.InterruptHandler +} + +func (r *SpecBuilder) BuildSpecs(args []string, additionalArgs []string) { + r.commandFlags.computeNodes() + + suites, _ := findSuites(args, r.commandFlags.Recurse, r.commandFlags.SkipPackage, false) + + if len(suites) == 0 { + complainAndQuit("Found no test suites") + } + + passed := true + for _, suite := range suites { + runner := testrunner.New(suite, 1, false, r.commandFlags.Race, r.commandFlags.Cover, r.commandFlags.Tags, nil) + fmt.Printf("Compiling %s...\n", suite.PackageName) + err := runner.Compile() + if err != nil { + fmt.Println(err.Error()) + passed = false + } else { + fmt.Printf(" compiled %s.test\n", filepath.Join(suite.Path, suite.PackageName)) + } + } + + if passed { + os.Exit(0) + } + os.Exit(1) +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/convert/ginkgo_ast_nodes.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/convert/ginkgo_ast_nodes.go new file mode 100644 index 0000000000000..02e2b3b328d79 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/convert/ginkgo_ast_nodes.go @@ -0,0 +1,123 @@ +package convert + +import ( + "fmt" + "go/ast" + "strings" + "unicode" +) + +/* + * Creates a func init() node + */ +func createVarUnderscoreBlock() *ast.ValueSpec { + valueSpec := &ast.ValueSpec{} + object := &ast.Object{Kind: 4, Name: "_", Decl: valueSpec, Data: 0} + ident := &ast.Ident{Name: "_", Obj: object} + valueSpec.Names = append(valueSpec.Names, ident) + return valueSpec +} + +/* + * Creates a Describe("Testing with ginkgo", func() { }) node + */ +func createDescribeBlock() *ast.CallExpr { + blockStatement := &ast.BlockStmt{List: []ast.Stmt{}} + + fieldList := &ast.FieldList{} + funcType := &ast.FuncType{Params: fieldList} + funcLit := &ast.FuncLit{Type: funcType, Body: blockStatement} + basicLit := &ast.BasicLit{Kind: 9, Value: "\"Testing with Ginkgo\""} + describeIdent := &ast.Ident{Name: "Describe"} + return &ast.CallExpr{Fun: describeIdent, Args: []ast.Expr{basicLit, funcLit}} +} + +/* + * Convenience function to return the name of the *testing.T param + * for a Test function that will be rewritten. This is useful because + * we will want to replace the usage of this named *testing.T inside the + * body of the function with a GinktoT. + */ +func namedTestingTArg(node *ast.FuncDecl) string { + return node.Type.Params.List[0].Names[0].Name // *exhale* +} + +/* + * Convenience function to return the block statement node for a Describe statement + */ +func blockStatementFromDescribe(desc *ast.CallExpr) *ast.BlockStmt { + var funcLit *ast.FuncLit + var found = false + + for _, node := range desc.Args { + switch node := node.(type) { + case *ast.FuncLit: + found = true + funcLit = node + break + } + } + + if !found { + panic("Error finding ast.FuncLit inside describe statement. Somebody done goofed.") + } + + return funcLit.Body +} + +/* convenience function for creating an It("TestNameHere") + * with all the body of the test function inside the anonymous + * func passed to It() + */ +func createItStatementForTestFunc(testFunc *ast.FuncDecl) *ast.ExprStmt { + blockStatement := &ast.BlockStmt{List: testFunc.Body.List} + fieldList := &ast.FieldList{} + funcType := &ast.FuncType{Params: fieldList} + funcLit := &ast.FuncLit{Type: funcType, Body: blockStatement} + + testName := rewriteTestName(testFunc.Name.Name) + basicLit := &ast.BasicLit{Kind: 9, Value: fmt.Sprintf("\"%s\"", testName)} + itBlockIdent := &ast.Ident{Name: "It"} + callExpr := &ast.CallExpr{Fun: itBlockIdent, Args: []ast.Expr{basicLit, funcLit}} + return &ast.ExprStmt{X: callExpr} +} + +/* +* rewrite test names to be human readable +* eg: rewrites "TestSomethingAmazing" as "something amazing" + */ +func rewriteTestName(testName string) string { + nameComponents := []string{} + currentString := "" + indexOfTest := strings.Index(testName, "Test") + if indexOfTest != 0 { + return testName + } + + testName = strings.Replace(testName, "Test", "", 1) + first, rest := testName[0], testName[1:] + testName = string(unicode.ToLower(rune(first))) + rest + + for _, rune := range testName { + if unicode.IsUpper(rune) { + nameComponents = append(nameComponents, currentString) + currentString = string(unicode.ToLower(rune)) + } else { + currentString += string(rune) + } + } + + return strings.Join(append(nameComponents, currentString), " ") +} + +func newGinkgoTFromIdent(ident *ast.Ident) *ast.CallExpr { + return &ast.CallExpr{ + Lparen: ident.NamePos + 1, + Rparen: ident.NamePos + 2, + Fun: &ast.Ident{Name: "GinkgoT"}, + } +} + +func newGinkgoTInterface() *ast.Ident { + return &ast.Ident{Name: "GinkgoTInterface"} +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/convert/import.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/convert/import.go new file mode 100644 index 0000000000000..e226196f72e27 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/convert/import.go @@ -0,0 +1,91 @@ +package convert + +import ( + "errors" + "fmt" + "go/ast" +) + +/* + * Given the root node of an AST, returns the node containing the + * import statements for the file. + */ +func importsForRootNode(rootNode *ast.File) (imports *ast.GenDecl, err error) { + for _, declaration := range rootNode.Decls { + decl, ok := declaration.(*ast.GenDecl) + if !ok || len(decl.Specs) == 0 { + continue + } + + _, ok = decl.Specs[0].(*ast.ImportSpec) + if ok { + imports = decl + return + } + } + + err = errors.New(fmt.Sprintf("Could not find imports for root node:\n\t%#v\n", rootNode)) + return +} + +/* + * Removes "testing" import, if present + */ +func removeTestingImport(rootNode *ast.File) { + importDecl, err := importsForRootNode(rootNode) + if err != nil { + panic(err.Error()) + } + + var index int + for i, importSpec := range importDecl.Specs { + importSpec := importSpec.(*ast.ImportSpec) + if importSpec.Path.Value == "\"testing\"" { + index = i + break + } + } + + importDecl.Specs = append(importDecl.Specs[:index], importDecl.Specs[index+1:]...) +} + +/* + * Adds import statements for onsi/ginkgo, if missing + */ +func addGinkgoImports(rootNode *ast.File) { + importDecl, err := importsForRootNode(rootNode) + if err != nil { + panic(err.Error()) + } + + if len(importDecl.Specs) == 0 { + // TODO: might need to create a import decl here + panic("unimplemented : expected to find an imports block") + } + + needsGinkgo := true + for _, importSpec := range importDecl.Specs { + importSpec, ok := importSpec.(*ast.ImportSpec) + if !ok { + continue + } + + if importSpec.Path.Value == "\"github.com/onsi/ginkgo\"" { + needsGinkgo = false + } + } + + if needsGinkgo { + importDecl.Specs = append(importDecl.Specs, createImport(".", "\"github.com/onsi/ginkgo\"")) + } +} + +/* + * convenience function to create an import statement + */ +func createImport(name, path string) *ast.ImportSpec { + return &ast.ImportSpec{ + Name: &ast.Ident{Name: name}, + Path: &ast.BasicLit{Kind: 9, Value: path}, + } +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/convert/package_rewriter.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/convert/package_rewriter.go new file mode 100644 index 0000000000000..ed09c460d4c32 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/convert/package_rewriter.go @@ -0,0 +1,127 @@ +package convert + +import ( + "fmt" + "go/build" + "io/ioutil" + "os" + "os/exec" + "path/filepath" + "regexp" +) + +/* + * RewritePackage takes a name (eg: my-package/tools), finds its test files using + * Go's build package, and then rewrites them. A ginkgo test suite file will + * also be added for this package, and all of its child packages. + */ +func RewritePackage(packageName string) { + pkg, err := packageWithName(packageName) + if err != nil { + panic(fmt.Sprintf("unexpected error reading package: '%s'\n%s\n", packageName, err.Error())) + } + + for _, filename := range findTestsInPackage(pkg) { + rewriteTestsInFile(filename) + } + return +} + +/* + * Given a package, findTestsInPackage reads the test files in the directory, + * and then recurses on each child package, returning a slice of all test files + * found in this process. + */ +func findTestsInPackage(pkg *build.Package) (testfiles []string) { + for _, file := range append(pkg.TestGoFiles, pkg.XTestGoFiles...) { + testfiles = append(testfiles, filepath.Join(pkg.Dir, file)) + } + + dirFiles, err := ioutil.ReadDir(pkg.Dir) + if err != nil { + panic(fmt.Sprintf("unexpected error reading dir: '%s'\n%s\n", pkg.Dir, err.Error())) + } + + re := regexp.MustCompile(`^[._]`) + + for _, file := range dirFiles { + if !file.IsDir() { + continue + } + + if re.Match([]byte(file.Name())) { + continue + } + + packageName := filepath.Join(pkg.ImportPath, file.Name()) + subPackage, err := packageWithName(packageName) + if err != nil { + panic(fmt.Sprintf("unexpected error reading package: '%s'\n%s\n", packageName, err.Error())) + } + + testfiles = append(testfiles, findTestsInPackage(subPackage)...) + } + + addGinkgoSuiteForPackage(pkg) + goFmtPackage(pkg) + return +} + +/* + * Shells out to `ginkgo bootstrap` to create a test suite file + */ +func addGinkgoSuiteForPackage(pkg *build.Package) { + originalDir, err := os.Getwd() + if err != nil { + panic(err) + } + + suite_test_file := filepath.Join(pkg.Dir, pkg.Name+"_suite_test.go") + + _, err = os.Stat(suite_test_file) + if err == nil { + return // test file already exists, this should be a no-op + } + + err = os.Chdir(pkg.Dir) + if err != nil { + panic(err) + } + + output, err := exec.Command("ginkgo", "bootstrap").Output() + + if err != nil { + panic(fmt.Sprintf("error running 'ginkgo bootstrap'.\nstdout: %s\n%s\n", output, err.Error())) + } + + err = os.Chdir(originalDir) + if err != nil { + panic(err) + } +} + +/* + * Shells out to `go fmt` to format the package + */ +func goFmtPackage(pkg *build.Package) { + output, err := exec.Command("go", "fmt", pkg.ImportPath).Output() + + if err != nil { + fmt.Printf("Warning: Error running 'go fmt %s'.\nstdout: %s\n%s\n", pkg.ImportPath, output, err.Error()) + } +} + +/* + * Attempts to return a package with its test files already read. + * The ImportMode arg to build.Import lets you specify if you want go to read the + * buildable go files inside the package, but it fails if the package has no go files + */ +func packageWithName(name string) (pkg *build.Package, err error) { + pkg, err = build.Default.Import(name, ".", build.ImportMode(0)) + if err == nil { + return + } + + pkg, err = build.Default.Import(name, ".", build.ImportMode(1)) + return +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/convert/test_finder.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/convert/test_finder.go new file mode 100644 index 0000000000000..b33595c9ae169 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/convert/test_finder.go @@ -0,0 +1,56 @@ +package convert + +import ( + "go/ast" + "regexp" +) + +/* + * Given a root node, walks its top level statements and returns + * points to function nodes to rewrite as It statements. + * These functions, according to Go testing convention, must be named + * TestWithCamelCasedName and receive a single *testing.T argument. + */ +func findTestFuncs(rootNode *ast.File) (testsToRewrite []*ast.FuncDecl) { + testNameRegexp := regexp.MustCompile("^Test[0-9A-Z].+") + + ast.Inspect(rootNode, func(node ast.Node) bool { + if node == nil { + return false + } + + switch node := node.(type) { + case *ast.FuncDecl: + matches := testNameRegexp.MatchString(node.Name.Name) + + if matches && receivesTestingT(node) { + testsToRewrite = append(testsToRewrite, node) + } + } + + return true + }) + + return +} + +/* + * convenience function that looks at args to a function and determines if its + * params include an argument of type *testing.T + */ +func receivesTestingT(node *ast.FuncDecl) bool { + if len(node.Type.Params.List) != 1 { + return false + } + + base, ok := node.Type.Params.List[0].Type.(*ast.StarExpr) + if !ok { + return false + } + + intermediate := base.X.(*ast.SelectorExpr) + isTestingPackage := intermediate.X.(*ast.Ident).Name == "testing" + isTestingT := intermediate.Sel.Name == "T" + + return isTestingPackage && isTestingT +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/convert/testfile_rewriter.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/convert/testfile_rewriter.go new file mode 100644 index 0000000000000..4b001a7dbb54a --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/convert/testfile_rewriter.go @@ -0,0 +1,163 @@ +package convert + +import ( + "bytes" + "fmt" + "go/ast" + "go/format" + "go/parser" + "go/token" + "io/ioutil" + "os" +) + +/* + * Given a file path, rewrites any tests in the Ginkgo format. + * First, we parse the AST, and update the imports declaration. + * Then, we walk the first child elements in the file, returning tests to rewrite. + * A top level init func is declared, with a single Describe func inside. + * Then the test functions to rewrite are inserted as It statements inside the Describe. + * Finally we walk the rest of the file, replacing other usages of *testing.T + * Once that is complete, we write the AST back out again to its file. + */ +func rewriteTestsInFile(pathToFile string) { + fileSet := token.NewFileSet() + rootNode, err := parser.ParseFile(fileSet, pathToFile, nil, 0) + if err != nil { + panic(fmt.Sprintf("Error parsing test file '%s':\n%s\n", pathToFile, err.Error())) + } + + addGinkgoImports(rootNode) + removeTestingImport(rootNode) + + varUnderscoreBlock := createVarUnderscoreBlock() + describeBlock := createDescribeBlock() + varUnderscoreBlock.Values = []ast.Expr{describeBlock} + + for _, testFunc := range findTestFuncs(rootNode) { + rewriteTestFuncAsItStatement(testFunc, rootNode, describeBlock) + } + + underscoreDecl := &ast.GenDecl{ + Tok: 85, // gah, magick numbers are needed to make this work + TokPos: 14, // this tricks Go into writing "var _ = Describe" + Specs: []ast.Spec{varUnderscoreBlock}, + } + + imports := rootNode.Decls[0] + tail := rootNode.Decls[1:] + rootNode.Decls = append(append([]ast.Decl{imports}, underscoreDecl), tail...) + rewriteOtherFuncsToUseGinkgoT(rootNode.Decls) + walkNodesInRootNodeReplacingTestingT(rootNode) + + var buffer bytes.Buffer + if err = format.Node(&buffer, fileSet, rootNode); err != nil { + panic(fmt.Sprintf("Error formatting ast node after rewriting tests.\n%s\n", err.Error())) + } + + fileInfo, err := os.Stat(pathToFile) + if err != nil { + panic(fmt.Sprintf("Error stat'ing file: %s\n", pathToFile)) + } + + ioutil.WriteFile(pathToFile, buffer.Bytes(), fileInfo.Mode()) + return +} + +/* + * Given a test func named TestDoesSomethingNeat, rewrites it as + * It("does something neat", func() { __test_body_here__ }) and adds it + * to the Describe's list of statements + */ +func rewriteTestFuncAsItStatement(testFunc *ast.FuncDecl, rootNode *ast.File, describe *ast.CallExpr) { + var funcIndex int = -1 + for index, child := range rootNode.Decls { + if child == testFunc { + funcIndex = index + break + } + } + + if funcIndex < 0 { + panic(fmt.Sprintf("Assert failed: Error finding index for test node %s\n", testFunc.Name.Name)) + } + + var block *ast.BlockStmt = blockStatementFromDescribe(describe) + block.List = append(block.List, createItStatementForTestFunc(testFunc)) + replaceTestingTsWithGinkgoT(block, namedTestingTArg(testFunc)) + + // remove the old test func from the root node's declarations + rootNode.Decls = append(rootNode.Decls[:funcIndex], rootNode.Decls[funcIndex+1:]...) + return +} + +/* + * walks nodes inside of a test func's statements and replaces the usage of + * it's named *testing.T param with GinkgoT's + */ +func replaceTestingTsWithGinkgoT(statementsBlock *ast.BlockStmt, testingT string) { + ast.Inspect(statementsBlock, func(node ast.Node) bool { + if node == nil { + return false + } + + keyValueExpr, ok := node.(*ast.KeyValueExpr) + if ok { + replaceNamedTestingTsInKeyValueExpression(keyValueExpr, testingT) + return true + } + + funcLiteral, ok := node.(*ast.FuncLit) + if ok { + replaceTypeDeclTestingTsInFuncLiteral(funcLiteral) + return true + } + + callExpr, ok := node.(*ast.CallExpr) + if !ok { + return true + } + replaceTestingTsInArgsLists(callExpr, testingT) + + funCall, ok := callExpr.Fun.(*ast.SelectorExpr) + if ok { + replaceTestingTsMethodCalls(funCall, testingT) + } + + return true + }) +} + +/* + * rewrite t.Fail() or any other *testing.T method by replacing with T().Fail() + * This function receives a selector expression (eg: t.Fail()) and + * the name of the *testing.T param from the function declaration. Rewrites the + * selector expression in place if the target was a *testing.T + */ +func replaceTestingTsMethodCalls(selectorExpr *ast.SelectorExpr, testingT string) { + ident, ok := selectorExpr.X.(*ast.Ident) + if !ok { + return + } + + if ident.Name == testingT { + selectorExpr.X = newGinkgoTFromIdent(ident) + } +} + +/* + * replaces usages of a named *testing.T param inside of a call expression + * with a new GinkgoT object + */ +func replaceTestingTsInArgsLists(callExpr *ast.CallExpr, testingT string) { + for index, arg := range callExpr.Args { + ident, ok := arg.(*ast.Ident) + if !ok { + continue + } + + if ident.Name == testingT { + callExpr.Args[index] = newGinkgoTFromIdent(ident) + } + } +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/convert/testing_t_rewriter.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/convert/testing_t_rewriter.go new file mode 100644 index 0000000000000..418cdc4e56346 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/convert/testing_t_rewriter.go @@ -0,0 +1,130 @@ +package convert + +import ( + "go/ast" +) + +/* + * Rewrites any other top level funcs that receive a *testing.T param + */ +func rewriteOtherFuncsToUseGinkgoT(declarations []ast.Decl) { + for _, decl := range declarations { + decl, ok := decl.(*ast.FuncDecl) + if !ok { + continue + } + + for _, param := range decl.Type.Params.List { + starExpr, ok := param.Type.(*ast.StarExpr) + if !ok { + continue + } + + selectorExpr, ok := starExpr.X.(*ast.SelectorExpr) + if !ok { + continue + } + + xIdent, ok := selectorExpr.X.(*ast.Ident) + if !ok || xIdent.Name != "testing" { + continue + } + + if selectorExpr.Sel.Name != "T" { + continue + } + + param.Type = newGinkgoTInterface() + } + } +} + +/* + * Walks all of the nodes in the file, replacing *testing.T in struct + * and func literal nodes. eg: + * type foo struct { *testing.T } + * var bar = func(t *testing.T) { } + */ +func walkNodesInRootNodeReplacingTestingT(rootNode *ast.File) { + ast.Inspect(rootNode, func(node ast.Node) bool { + if node == nil { + return false + } + + switch node := node.(type) { + case *ast.StructType: + replaceTestingTsInStructType(node) + case *ast.FuncLit: + replaceTypeDeclTestingTsInFuncLiteral(node) + } + + return true + }) +} + +/* + * replaces named *testing.T inside a composite literal + */ +func replaceNamedTestingTsInKeyValueExpression(kve *ast.KeyValueExpr, testingT string) { + ident, ok := kve.Value.(*ast.Ident) + if !ok { + return + } + + if ident.Name == testingT { + kve.Value = newGinkgoTFromIdent(ident) + } +} + +/* + * replaces *testing.T params in a func literal with GinkgoT + */ +func replaceTypeDeclTestingTsInFuncLiteral(functionLiteral *ast.FuncLit) { + for _, arg := range functionLiteral.Type.Params.List { + starExpr, ok := arg.Type.(*ast.StarExpr) + if !ok { + continue + } + + selectorExpr, ok := starExpr.X.(*ast.SelectorExpr) + if !ok { + continue + } + + target, ok := selectorExpr.X.(*ast.Ident) + if !ok { + continue + } + + if target.Name == "testing" && selectorExpr.Sel.Name == "T" { + arg.Type = newGinkgoTInterface() + } + } +} + +/* + * Replaces *testing.T types inside of a struct declaration with a GinkgoT + * eg: type foo struct { *testing.T } + */ +func replaceTestingTsInStructType(structType *ast.StructType) { + for _, field := range structType.Fields.List { + starExpr, ok := field.Type.(*ast.StarExpr) + if !ok { + continue + } + + selectorExpr, ok := starExpr.X.(*ast.SelectorExpr) + if !ok { + continue + } + + xIdent, ok := selectorExpr.X.(*ast.Ident) + if !ok { + continue + } + + if xIdent.Name == "testing" && selectorExpr.Sel.Name == "T" { + field.Type = newGinkgoTInterface() + } + } +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/convert_command.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/convert_command.go new file mode 100644 index 0000000000000..89e60d393022f --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/convert_command.go @@ -0,0 +1,44 @@ +package main + +import ( + "flag" + "fmt" + "github.com/onsi/ginkgo/ginkgo/convert" + "os" +) + +func BuildConvertCommand() *Command { + return &Command{ + Name: "convert", + FlagSet: flag.NewFlagSet("convert", flag.ExitOnError), + UsageCommand: "ginkgo convert /path/to/package", + Usage: []string{ + "Convert the package at the passed in path from an XUnit-style test to a Ginkgo-style test", + }, + Command: convertPackage, + } +} + +func convertPackage(args []string, additionalArgs []string) { + if len(args) != 1 { + println(fmt.Sprintf("usage: ginkgo convert /path/to/your/package")) + os.Exit(1) + } + + defer func() { + err := recover() + if err != nil { + switch err := err.(type) { + case error: + println(err.Error()) + case string: + println(err) + default: + println(fmt.Sprintf("unexpected error: %#v", err)) + } + os.Exit(1) + } + }() + + convert.RewritePackage(args[0]) +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/generate_command.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/generate_command.go new file mode 100644 index 0000000000000..ad25dd8317653 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/generate_command.go @@ -0,0 +1,164 @@ +package main + +import ( + "flag" + "fmt" + "os" + "path/filepath" + "strings" + "text/template" +) + +func BuildGenerateCommand() *Command { + var agouti, noDot bool + flagSet := flag.NewFlagSet("generate", flag.ExitOnError) + flagSet.BoolVar(&agouti, "agouti", false, "If set, generate will generate a test file for writing Agouti tests") + flagSet.BoolVar(&noDot, "nodot", false, "If set, generate will generate a test file that does not . import ginkgo and gomega") + + return &Command{ + Name: "generate", + FlagSet: flagSet, + UsageCommand: "ginkgo generate ", + Usage: []string{ + "Generate a test file named filename_test.go", + "If the optional argument is omitted, a file named after the package in the current directory will be created.", + "Accepts the following flags:", + }, + Command: func(args []string, additionalArgs []string) { + generateSpec(args, agouti, noDot) + }, + } +} + +var specText = `package {{.Package}}_test + +import ( + . "{{.PackageImportPath}}" + + {{if .IncludeImports}}. "github.com/onsi/ginkgo"{{end}} + {{if .IncludeImports}}. "github.com/onsi/gomega"{{end}} +) + +var _ = Describe("{{.Subject}}", func() { + +}) +` + +var agoutiSpecText = `package {{.Package}}_test + +import ( + . "{{.PackageImportPath}}" + + {{if .IncludeImports}}. "github.com/onsi/ginkgo"{{end}} + {{if .IncludeImports}}. "github.com/onsi/gomega"{{end}} + . "github.com/sclevine/agouti/core" + . "github.com/sclevine/agouti/matchers" +) + +var _ = Describe("{{.Subject}}", func() { + var page Page + + BeforeEach(func() { + var err error + page, err = agoutiDriver.Page() + Expect(err).NotTo(HaveOccurred()) + }) + + AfterEach(func() { + page.Destroy() + }) +}) +` + +type specData struct { + Package string + Subject string + PackageImportPath string + IncludeImports bool +} + +func generateSpec(args []string, agouti, noDot bool) { + if len(args) == 0 { + err := generateSpecForSubject("", agouti, noDot) + if err != nil { + fmt.Println(err.Error()) + fmt.Println("") + os.Exit(1) + } + fmt.Println("") + return + } + + var failed bool + for _, arg := range args { + err := generateSpecForSubject(arg, agouti, noDot) + if err != nil { + failed = true + fmt.Println(err.Error()) + } + } + fmt.Println("") + if failed { + os.Exit(1) + } +} + +func generateSpecForSubject(subject string, agouti, noDot bool) error { + packageName, specFilePrefix, formattedName := getPackageAndFormattedName() + if subject != "" { + subject = strings.Split(subject, ".go")[0] + subject = strings.Split(subject, "_test")[0] + specFilePrefix = subject + formattedName = prettifyPackageName(subject) + } + + data := specData{ + Package: packageName, + Subject: formattedName, + PackageImportPath: getPackageImportPath(), + IncludeImports: !noDot, + } + + targetFile := fmt.Sprintf("%s_test.go", specFilePrefix) + if fileExists(targetFile) { + return fmt.Errorf("%s already exists.", targetFile) + } else { + fmt.Printf("Generating ginkgo test for %s in:\n %s\n", data.Subject, targetFile) + } + + f, err := os.Create(targetFile) + if err != nil { + return err + } + defer f.Close() + + var templateText string + if agouti { + templateText = agoutiSpecText + } else { + templateText = specText + } + + specTemplate, err := template.New("spec").Parse(templateText) + if err != nil { + return err + } + + specTemplate.Execute(f, data) + goFmt(targetFile) + return nil +} + +func getPackageImportPath() string { + workingDir, err := os.Getwd() + if err != nil { + panic(err.Error()) + } + sep := string(filepath.Separator) + paths := strings.Split(workingDir, sep+"src"+sep) + if len(paths) == 1 { + fmt.Printf("\nCouldn't identify package import path.\n\n\tginkgo generate\n\nMust be run within a package directory under $GOPATH/src/...\nYou're going to have to change UNKNOWN_PACKAGE_PATH in the generated file...\n\n") + return "UNKNOWN_PACKAGE_PATH" + } + return filepath.ToSlash(paths[len(paths)-1]) +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/help_command.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/help_command.go new file mode 100644 index 0000000000000..6f24d072b2455 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/help_command.go @@ -0,0 +1,31 @@ +package main + +import ( + "flag" + "fmt" +) + +func BuildHelpCommand() *Command { + return &Command{ + Name: "help", + FlagSet: flag.NewFlagSet("help", flag.ExitOnError), + UsageCommand: "ginkgo help ", + Usage: []string{ + "Print usage information. If a command is passed in, print usage information just for that command.", + }, + Command: printHelp, + } +} + +func printHelp(args []string, additionalArgs []string) { + if len(args) == 0 { + usage() + } else { + command, found := commandMatching(args[0]) + if !found { + complainAndQuit(fmt.Sprintf("Unkown command: %s", args[0])) + } + + usageForCommand(command, true) + } +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/interrupthandler/interrupt_handler.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/interrupthandler/interrupt_handler.go new file mode 100644 index 0000000000000..82e49ad6d7642 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/interrupthandler/interrupt_handler.go @@ -0,0 +1,51 @@ +package interrupthandler + +import ( + "os" + "os/signal" + "sync" +) + +type InterruptHandler struct { + interruptCount int + lock *sync.Mutex + C chan bool +} + +func NewInterruptHandler() *InterruptHandler { + h := &InterruptHandler{ + lock: &sync.Mutex{}, + C: make(chan bool, 0), + } + + go h.handleInterrupt() + SwallowSigQuit() + + return h +} + +func (h *InterruptHandler) WasInterrupted() bool { + h.lock.Lock() + defer h.lock.Unlock() + + return h.interruptCount > 0 +} + +func (h *InterruptHandler) handleInterrupt() { + c := make(chan os.Signal, 1) + signal.Notify(c, os.Interrupt) + + <-c + signal.Stop(c) + + h.lock.Lock() + h.interruptCount++ + if h.interruptCount == 1 { + close(h.C) + } else if h.interruptCount > 5 { + os.Exit(1) + } + h.lock.Unlock() + + go h.handleInterrupt() +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/interrupthandler/sigquit_swallower_unix.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/interrupthandler/sigquit_swallower_unix.go new file mode 100644 index 0000000000000..14c94210ee030 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/interrupthandler/sigquit_swallower_unix.go @@ -0,0 +1,14 @@ +// +build freebsd openbsd netbsd dragonfly darwin linux + +package interrupthandler + +import ( + "os" + "os/signal" + "syscall" +) + +func SwallowSigQuit() { + c := make(chan os.Signal, 1024) + signal.Notify(c, syscall.SIGQUIT) +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/interrupthandler/sigquit_swallower_windows.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/interrupthandler/sigquit_swallower_windows.go new file mode 100644 index 0000000000000..7f4a50e1906b9 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/interrupthandler/sigquit_swallower_windows.go @@ -0,0 +1,7 @@ +// +build windows + +package interrupthandler + +func SwallowSigQuit() { + //noop +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/main.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/main.go new file mode 100644 index 0000000000000..cf0cf35ee8701 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/main.go @@ -0,0 +1,291 @@ +/* +The Ginkgo CLI + +The Ginkgo CLI is fully documented [here](http://onsi.github.io/ginkgo/#the_ginkgo_cli) + +You can also learn more by running: + + ginkgo help + +Here are some of the more commonly used commands: + +To install: + + go install github.com/onsi/ginkgo/ginkgo + +To run tests: + + ginkgo + +To run tests in all subdirectories: + + ginkgo -r + +To run tests in particular packages: + + ginkgo /path/to/package /path/to/another/package + +To pass arguments/flags to your tests: + + ginkgo -- + +To run tests in parallel + + ginkgo -p + +this will automatically detect the optimal number of nodes to use. Alternatively, you can specify the number of nodes with: + + ginkgo -nodes=N + +(note that you don't need to provide -p in this case). + +By default the Ginkgo CLI will spin up a server that the individual test processes send test output to. The CLI aggregates this output and then presents coherent test output, one test at a time, as each test completes. +An alternative is to have the parallel nodes run and stream interleaved output back. This useful for debugging, particularly in contexts where tests hang/fail to start. To get this interleaved output: + + ginkgo -nodes=N -stream=true + +On windows, the default value for stream is true. + +By default, when running multiple tests (with -r or a list of packages) Ginkgo will abort when a test fails. To have Ginkgo run subsequent test suites instead you can: + + ginkgo -keepGoing + +To monitor packages and rerun tests when changes occur: + + ginkgo watch <-r> + +passing `ginkgo watch` the `-r` flag will recursively detect all test suites under the current directory and monitor them. +`watch` does not detect *new* packages. Moreover, changes in package X only rerun the tests for package X, tests for packages +that depend on X are not rerun. + +[OSX only] To receive (desktop) notifications when a test run completes: + + ginkgo -notify + +this is particularly useful with `ginkgo watch`. Notifications are currently only supported on OS X and require that you `brew install terminal-notifier` + +Sometimes (to suss out race conditions/flakey tests, for example) you want to keep running a test suite until it fails. You can do this with: + + ginkgo -untilItFails + +To bootstrap a test suite: + + ginkgo bootstrap + +To generate a test file: + + ginkgo generate + +To bootstrap/generate test files without using "." imports: + + ginkgo bootstrap --nodot + ginkgo generate --nodot + +this will explicitly export all the identifiers in Ginkgo and Gomega allowing you to rename them to avoid collisions. When you pull to the latest Ginkgo/Gomega you'll want to run + + ginkgo nodot + +to refresh this list and pull in any new identifiers. In particular, this will pull in any new Gomega matchers that get added. + +To convert an existing XUnit style test suite to a Ginkgo-style test suite: + + ginkgo convert . + +To unfocus tests: + + ginkgo unfocus + +or + + ginkgo blur + +To compile a test suite: + + ginkgo build + +will output an executable file named `package.test`. This can be run directly or by invoking + + ginkgo + +To print out Ginkgo's version: + + ginkgo version + +To get more help: + + ginkgo help +*/ +package main + +import ( + "flag" + "fmt" + "os" + "os/exec" + "strings" + + "github.com/onsi/ginkgo/config" + "github.com/onsi/ginkgo/ginkgo/testsuite" +) + +const greenColor = "\x1b[32m" +const redColor = "\x1b[91m" +const defaultStyle = "\x1b[0m" +const lightGrayColor = "\x1b[37m" + +type Command struct { + Name string + AltName string + FlagSet *flag.FlagSet + Usage []string + UsageCommand string + Command func(args []string, additionalArgs []string) + SuppressFlagDocumentation bool + FlagDocSubstitute []string +} + +func (c *Command) Matches(name string) bool { + return c.Name == name || (c.AltName != "" && c.AltName == name) +} + +func (c *Command) Run(args []string, additionalArgs []string) { + c.FlagSet.Parse(args) + c.Command(c.FlagSet.Args(), additionalArgs) +} + +var DefaultCommand *Command +var Commands []*Command + +func init() { + DefaultCommand = BuildRunCommand() + Commands = append(Commands, BuildWatchCommand()) + Commands = append(Commands, BuildBuildCommand()) + Commands = append(Commands, BuildBootstrapCommand()) + Commands = append(Commands, BuildGenerateCommand()) + Commands = append(Commands, BuildNodotCommand()) + Commands = append(Commands, BuildConvertCommand()) + Commands = append(Commands, BuildUnfocusCommand()) + Commands = append(Commands, BuildVersionCommand()) + Commands = append(Commands, BuildHelpCommand()) +} + +func main() { + args := []string{} + additionalArgs := []string{} + + foundDelimiter := false + + for _, arg := range os.Args[1:] { + if !foundDelimiter { + if arg == "--" { + foundDelimiter = true + continue + } + } + + if foundDelimiter { + additionalArgs = append(additionalArgs, arg) + } else { + args = append(args, arg) + } + } + + if len(args) > 0 { + commandToRun, found := commandMatching(args[0]) + if found { + commandToRun.Run(args[1:], additionalArgs) + return + } + } + + DefaultCommand.Run(args, additionalArgs) +} + +func commandMatching(name string) (*Command, bool) { + for _, command := range Commands { + if command.Matches(name) { + return command, true + } + } + return nil, false +} + +func usage() { + fmt.Fprintf(os.Stderr, "Ginkgo Version %s\n\n", config.VERSION) + usageForCommand(DefaultCommand, false) + for _, command := range Commands { + fmt.Fprintf(os.Stderr, "\n") + usageForCommand(command, false) + } +} + +func usageForCommand(command *Command, longForm bool) { + fmt.Fprintf(os.Stderr, "%s\n%s\n", command.UsageCommand, strings.Repeat("-", len(command.UsageCommand))) + fmt.Fprintf(os.Stderr, "%s\n", strings.Join(command.Usage, "\n")) + if command.SuppressFlagDocumentation && !longForm { + fmt.Fprintf(os.Stderr, "%s\n", strings.Join(command.FlagDocSubstitute, "\n ")) + } else { + command.FlagSet.PrintDefaults() + } +} + +func complainAndQuit(complaint string) { + fmt.Fprintf(os.Stderr, "%s\nFor usage instructions:\n\tginkgo help\n", complaint) + os.Exit(1) +} + +func findSuites(args []string, recurse bool, skipPackage string, allowPrecompiled bool) ([]testsuite.TestSuite, []string) { + suites := []testsuite.TestSuite{} + + if len(args) > 0 { + for _, arg := range args { + if allowPrecompiled { + suite, err := testsuite.PrecompiledTestSuite(arg) + if err == nil { + suites = append(suites, suite) + continue + } + } + suites = append(suites, testsuite.SuitesInDir(arg, recurse)...) + } + } else { + suites = testsuite.SuitesInDir(".", recurse) + } + + skippedPackages := []string{} + if skipPackage != "" { + skipFilters := strings.Split(skipPackage, ",") + filteredSuites := []testsuite.TestSuite{} + for _, suite := range suites { + skip := false + for _, skipFilter := range skipFilters { + if strings.Contains(suite.Path, skipFilter) { + skip = true + break + } + } + if skip { + skippedPackages = append(skippedPackages, suite.Path) + } else { + filteredSuites = append(filteredSuites, suite) + } + } + suites = filteredSuites + } + + return suites, skippedPackages +} + +func goFmt(path string) { + err := exec.Command("go", "fmt", path).Run() + if err != nil { + complainAndQuit("Could not fmt: " + err.Error()) + } +} + +func pluralizedWord(singular, plural string, count int) string { + if count == 1 { + return singular + } + return plural +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/nodot/nodot.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/nodot/nodot.go new file mode 100644 index 0000000000000..3f7237c602d7a --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/nodot/nodot.go @@ -0,0 +1,194 @@ +package nodot + +import ( + "fmt" + "go/ast" + "go/build" + "go/parser" + "go/token" + "path/filepath" + "strings" +) + +func ApplyNoDot(data []byte) ([]byte, error) { + sections, err := generateNodotSections() + if err != nil { + return nil, err + } + + for _, section := range sections { + data = section.createOrUpdateIn(data) + } + + return data, nil +} + +type nodotSection struct { + name string + pkg string + declarations []string + types []string +} + +func (s nodotSection) createOrUpdateIn(data []byte) []byte { + renames := map[string]string{} + + contents := string(data) + + lines := strings.Split(contents, "\n") + + comment := "// Declarations for " + s.name + + newLines := []string{} + for _, line := range lines { + if line == comment { + continue + } + + words := strings.Split(line, " ") + lastWord := words[len(words)-1] + + if s.containsDeclarationOrType(lastWord) { + renames[lastWord] = words[1] + continue + } + + newLines = append(newLines, line) + } + + if len(newLines[len(newLines)-1]) > 0 { + newLines = append(newLines, "") + } + + newLines = append(newLines, comment) + + for _, typ := range s.types { + name, ok := renames[s.prefix(typ)] + if !ok { + name = typ + } + newLines = append(newLines, fmt.Sprintf("type %s %s", name, s.prefix(typ))) + } + + for _, decl := range s.declarations { + name, ok := renames[s.prefix(decl)] + if !ok { + name = decl + } + newLines = append(newLines, fmt.Sprintf("var %s = %s", name, s.prefix(decl))) + } + + newLines = append(newLines, "") + + newContents := strings.Join(newLines, "\n") + + return []byte(newContents) +} + +func (s nodotSection) prefix(declOrType string) string { + return s.pkg + "." + declOrType +} + +func (s nodotSection) containsDeclarationOrType(word string) bool { + for _, declaration := range s.declarations { + if s.prefix(declaration) == word { + return true + } + } + + for _, typ := range s.types { + if s.prefix(typ) == word { + return true + } + } + + return false +} + +func generateNodotSections() ([]nodotSection, error) { + sections := []nodotSection{} + + declarations, err := getExportedDeclerationsForPackage("github.com/onsi/ginkgo", "ginkgo_dsl.go", "GINKGO_VERSION", "GINKGO_PANIC") + if err != nil { + return nil, err + } + sections = append(sections, nodotSection{ + name: "Ginkgo DSL", + pkg: "ginkgo", + declarations: declarations, + types: []string{"Done", "Benchmarker"}, + }) + + declarations, err = getExportedDeclerationsForPackage("github.com/onsi/gomega", "gomega_dsl.go", "GOMEGA_VERSION") + if err != nil { + return nil, err + } + sections = append(sections, nodotSection{ + name: "Gomega DSL", + pkg: "gomega", + declarations: declarations, + }) + + declarations, err = getExportedDeclerationsForPackage("github.com/onsi/gomega", "matchers.go") + if err != nil { + return nil, err + } + sections = append(sections, nodotSection{ + name: "Gomega Matchers", + pkg: "gomega", + declarations: declarations, + }) + + return sections, nil +} + +func getExportedDeclerationsForPackage(pkgPath string, filename string, blacklist ...string) ([]string, error) { + pkg, err := build.Import(pkgPath, ".", 0) + if err != nil { + return []string{}, err + } + + declarations, err := getExportedDeclarationsForFile(filepath.Join(pkg.Dir, filename)) + if err != nil { + return []string{}, err + } + + blacklistLookup := map[string]bool{} + for _, declaration := range blacklist { + blacklistLookup[declaration] = true + } + + filteredDeclarations := []string{} + for _, declaration := range declarations { + if blacklistLookup[declaration] { + continue + } + filteredDeclarations = append(filteredDeclarations, declaration) + } + + return filteredDeclarations, nil +} + +func getExportedDeclarationsForFile(path string) ([]string, error) { + fset := token.NewFileSet() + tree, err := parser.ParseFile(fset, path, nil, 0) + if err != nil { + return []string{}, err + } + + declarations := []string{} + ast.FileExports(tree) + for _, decl := range tree.Decls { + switch x := decl.(type) { + case *ast.GenDecl: + switch s := x.Specs[0].(type) { + case *ast.ValueSpec: + declarations = append(declarations, s.Names[0].Name) + } + case *ast.FuncDecl: + declarations = append(declarations, x.Name.Name) + } + } + + return declarations, nil +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/nodot/nodot_suite_test.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/nodot/nodot_suite_test.go new file mode 100644 index 0000000000000..ca4613e6f574e --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/nodot/nodot_suite_test.go @@ -0,0 +1,91 @@ +package nodot_test + +import ( + "github.com/onsi/ginkgo" + "github.com/onsi/gomega" + + "testing" +) + +func TestNodot(t *testing.T) { + RegisterFailHandler(Fail) + RunSpecs(t, "Nodot Suite") +} + +// Declarations for Ginkgo DSL +type Done ginkgo.Done +type Benchmarker ginkgo.Benchmarker + +var GinkgoWriter = ginkgo.GinkgoWriter +var GinkgoParallelNode = ginkgo.GinkgoParallelNode +var GinkgoT = ginkgo.GinkgoT +var CurrentGinkgoTestDescription = ginkgo.CurrentGinkgoTestDescription +var RunSpecs = ginkgo.RunSpecs +var RunSpecsWithDefaultAndCustomReporters = ginkgo.RunSpecsWithDefaultAndCustomReporters +var RunSpecsWithCustomReporters = ginkgo.RunSpecsWithCustomReporters +var Fail = ginkgo.Fail +var GinkgoRecover = ginkgo.GinkgoRecover +var Describe = ginkgo.Describe +var FDescribe = ginkgo.FDescribe +var PDescribe = ginkgo.PDescribe +var XDescribe = ginkgo.XDescribe +var Context = ginkgo.Context +var FContext = ginkgo.FContext +var PContext = ginkgo.PContext +var XContext = ginkgo.XContext +var It = ginkgo.It +var FIt = ginkgo.FIt +var PIt = ginkgo.PIt +var XIt = ginkgo.XIt +var Measure = ginkgo.Measure +var FMeasure = ginkgo.FMeasure +var PMeasure = ginkgo.PMeasure +var XMeasure = ginkgo.XMeasure +var BeforeSuite = ginkgo.BeforeSuite +var AfterSuite = ginkgo.AfterSuite +var SynchronizedBeforeSuite = ginkgo.SynchronizedBeforeSuite +var SynchronizedAfterSuite = ginkgo.SynchronizedAfterSuite +var BeforeEach = ginkgo.BeforeEach +var JustBeforeEach = ginkgo.JustBeforeEach +var AfterEach = ginkgo.AfterEach + +// Declarations for Gomega DSL +var RegisterFailHandler = gomega.RegisterFailHandler +var RegisterTestingT = gomega.RegisterTestingT +var InterceptGomegaFailures = gomega.InterceptGomegaFailures +var Ω = gomega.Ω +var Expect = gomega.Expect +var ExpectWithOffset = gomega.ExpectWithOffset +var Eventually = gomega.Eventually +var EventuallyWithOffset = gomega.EventuallyWithOffset +var Consistently = gomega.Consistently +var ConsistentlyWithOffset = gomega.ConsistentlyWithOffset +var SetDefaultEventuallyTimeout = gomega.SetDefaultEventuallyTimeout +var SetDefaultEventuallyPollingInterval = gomega.SetDefaultEventuallyPollingInterval +var SetDefaultConsistentlyDuration = gomega.SetDefaultConsistentlyDuration +var SetDefaultConsistentlyPollingInterval = gomega.SetDefaultConsistentlyPollingInterval + +// Declarations for Gomega Matchers +var Equal = gomega.Equal +var BeEquivalentTo = gomega.BeEquivalentTo +var BeNil = gomega.BeNil +var BeTrue = gomega.BeTrue +var BeFalse = gomega.BeFalse +var HaveOccurred = gomega.HaveOccurred +var MatchError = gomega.MatchError +var BeClosed = gomega.BeClosed +var Receive = gomega.Receive +var MatchRegexp = gomega.MatchRegexp +var ContainSubstring = gomega.ContainSubstring +var MatchJSON = gomega.MatchJSON +var BeEmpty = gomega.BeEmpty +var HaveLen = gomega.HaveLen +var BeZero = gomega.BeZero +var ContainElement = gomega.ContainElement +var ConsistOf = gomega.ConsistOf +var HaveKey = gomega.HaveKey +var HaveKeyWithValue = gomega.HaveKeyWithValue +var BeNumerically = gomega.BeNumerically +var BeTemporally = gomega.BeTemporally +var BeAssignableToTypeOf = gomega.BeAssignableToTypeOf +var Panic = gomega.Panic diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/nodot/nodot_test.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/nodot/nodot_test.go new file mode 100644 index 0000000000000..37260a89f20bb --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/nodot/nodot_test.go @@ -0,0 +1,81 @@ +package nodot_test + +import ( + . "github.com/onsi/ginkgo/ginkgo/nodot" + "strings" +) + +var _ = Describe("ApplyNoDot", func() { + var result string + + apply := func(input string) string { + output, err := ApplyNoDot([]byte(input)) + Ω(err).ShouldNot(HaveOccurred()) + return string(output) + } + + Context("when no declarations have been imported yet", func() { + BeforeEach(func() { + result = apply("") + }) + + It("should add headings for the various declarations", func() { + Ω(result).Should(ContainSubstring("// Declarations for Ginkgo DSL")) + Ω(result).Should(ContainSubstring("// Declarations for Gomega DSL")) + Ω(result).Should(ContainSubstring("// Declarations for Gomega Matchers")) + }) + + It("should import Ginkgo's declarations", func() { + Ω(result).Should(ContainSubstring("var It = ginkgo.It")) + Ω(result).Should(ContainSubstring("var XDescribe = ginkgo.XDescribe")) + }) + + It("should import Ginkgo's types", func() { + Ω(result).Should(ContainSubstring("type Done ginkgo.Done")) + Ω(result).Should(ContainSubstring("type Benchmarker ginkgo.Benchmarker")) + Ω(strings.Count(result, "type ")).Should(Equal(2)) + }) + + It("should import Gomega's DSL and matchers", func() { + Ω(result).Should(ContainSubstring("var Ω = gomega.Ω")) + Ω(result).Should(ContainSubstring("var ContainSubstring = gomega.ContainSubstring")) + Ω(result).Should(ContainSubstring("var Equal = gomega.Equal")) + }) + + It("should not import blacklisted things", func() { + Ω(result).ShouldNot(ContainSubstring("GINKGO_VERSION")) + Ω(result).ShouldNot(ContainSubstring("GINKGO_PANIC")) + Ω(result).ShouldNot(ContainSubstring("GOMEGA_VERSION")) + }) + }) + + It("should be idempotent (module empty lines - go fmt can fix those for us)", func() { + first := apply("") + second := apply(first) + first = strings.Trim(first, "\n") + second = strings.Trim(second, "\n") + Ω(first).Should(Equal(second)) + }) + + It("should not mess with other things in the input", func() { + result = apply("var MyThing = SomethingThatsMine") + Ω(result).Should(ContainSubstring("var MyThing = SomethingThatsMine")) + }) + + Context("when the user has redefined a name", func() { + It("should honor the redefinition", func() { + result = apply(` +var _ = gomega.Ω +var When = ginkgo.It + `) + + Ω(result).Should(ContainSubstring("var _ = gomega.Ω")) + Ω(result).ShouldNot(ContainSubstring("var Ω = gomega.Ω")) + + Ω(result).Should(ContainSubstring("var When = ginkgo.It")) + Ω(result).ShouldNot(ContainSubstring("var It = ginkgo.It")) + + Ω(result).Should(ContainSubstring("var Context = ginkgo.Context")) + }) + }) +}) diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/nodot_command.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/nodot_command.go new file mode 100644 index 0000000000000..e1a2e13099a72 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/nodot_command.go @@ -0,0 +1,74 @@ +package main + +import ( + "bufio" + "flag" + "github.com/onsi/ginkgo/ginkgo/nodot" + "io/ioutil" + "os" + "path/filepath" + "regexp" +) + +func BuildNodotCommand() *Command { + return &Command{ + Name: "nodot", + FlagSet: flag.NewFlagSet("bootstrap", flag.ExitOnError), + UsageCommand: "ginkgo nodot", + Usage: []string{ + "Update the nodot declarations in your test suite", + "Any missing declarations (from, say, a recently added matcher) will be added to your bootstrap file.", + "If you've renamed a declaration, that name will be honored and not overwritten.", + }, + Command: updateNodot, + } +} + +func updateNodot(args []string, additionalArgs []string) { + suiteFile, perm := findSuiteFile() + + data, err := ioutil.ReadFile(suiteFile) + if err != nil { + complainAndQuit("Failed to update nodot declarations: " + err.Error()) + } + + content, err := nodot.ApplyNoDot(data) + if err != nil { + complainAndQuit("Failed to update nodot declarations: " + err.Error()) + } + ioutil.WriteFile(suiteFile, content, perm) + + goFmt(suiteFile) +} + +func findSuiteFile() (string, os.FileMode) { + workingDir, err := os.Getwd() + if err != nil { + complainAndQuit("Could not find suite file for nodot: " + err.Error()) + } + + files, err := ioutil.ReadDir(workingDir) + if err != nil { + complainAndQuit("Could not find suite file for nodot: " + err.Error()) + } + + re := regexp.MustCompile(`RunSpecs\(|RunSpecsWithDefaultAndCustomReporters\(|RunSpecsWithCustomReporters\(`) + + for _, file := range files { + if file.IsDir() { + continue + } + path := filepath.Join(workingDir, file.Name()) + f, err := os.Open(path) + if err != nil { + complainAndQuit("Could not find suite file for nodot: " + err.Error()) + } + if re.MatchReader(bufio.NewReader(f)) { + return path, file.Mode() + } + } + + complainAndQuit("Could not find a suite file for nodot: you need a bootstrap file that call's Ginkgo's RunSpecs() command.\nTry running ginkgo bootstrap first.") + + return "", 0 +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/notifications.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/notifications.go new file mode 100644 index 0000000000000..642f12cf64314 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/notifications.go @@ -0,0 +1,61 @@ +package main + +import ( + "fmt" + "os" + "os/exec" + + "github.com/onsi/ginkgo/ginkgo/testsuite" +) + +type Notifier struct { + commandFlags *RunWatchAndBuildCommandFlags +} + +func NewNotifier(commandFlags *RunWatchAndBuildCommandFlags) *Notifier { + return &Notifier{ + commandFlags: commandFlags, + } +} + +func (n *Notifier) VerifyNotificationsAreAvailable() { + if n.commandFlags.Notify { + _, err := exec.LookPath("terminal-notifier") + if err != nil { + fmt.Printf(`--notify requires terminal-notifier, which you don't seem to have installed. + +To remedy this: + + brew install terminal-notifier + +To learn more about terminal-notifier: + + https://github.com/alloy/terminal-notifier +`) + os.Exit(1) + } + } +} + +func (n *Notifier) SendSuiteCompletionNotification(suite testsuite.TestSuite, suitePassed bool) { + if suitePassed { + n.SendNotification("Ginkgo [PASS]", fmt.Sprintf(`Test suite for "%s" passed.`, suite.PackageName)) + } else { + n.SendNotification("Ginkgo [FAIL]", fmt.Sprintf(`Test suite for "%s" failed.`, suite.PackageName)) + } +} + +func (n *Notifier) SendNotification(title string, subtitle string) { + args := []string{"-title", title, "-subtitle", subtitle, "-group", "com.onsi.ginkgo"} + + terminal := os.Getenv("TERM_PROGRAM") + if terminal == "iTerm.app" { + args = append(args, "-activate", "com.googlecode.iterm2") + } else if terminal == "Apple_Terminal" { + args = append(args, "-activate", "com.apple.Terminal") + } + + if n.commandFlags.Notify { + exec.Command("terminal-notifier", args...).Run() + } +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/run_command.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/run_command.go new file mode 100644 index 0000000000000..c8caa99711f4a --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/run_command.go @@ -0,0 +1,192 @@ +package main + +import ( + "flag" + "fmt" + "math/rand" + "os" + "time" + + "github.com/onsi/ginkgo/config" + "github.com/onsi/ginkgo/ginkgo/interrupthandler" + "github.com/onsi/ginkgo/ginkgo/testrunner" + "github.com/onsi/ginkgo/types" +) + +func BuildRunCommand() *Command { + commandFlags := NewRunCommandFlags(flag.NewFlagSet("ginkgo", flag.ExitOnError)) + notifier := NewNotifier(commandFlags) + interruptHandler := interrupthandler.NewInterruptHandler() + runner := &SpecRunner{ + commandFlags: commandFlags, + notifier: notifier, + interruptHandler: interruptHandler, + suiteRunner: NewSuiteRunner(notifier, interruptHandler), + } + + return &Command{ + Name: "", + FlagSet: commandFlags.FlagSet, + UsageCommand: "ginkgo -- ", + Usage: []string{ + "Run the tests in the passed in (or the package in the current directory if left blank).", + "Any arguments after -- will be passed to the test.", + "Accepts the following flags:", + }, + Command: runner.RunSpecs, + } +} + +type SpecRunner struct { + commandFlags *RunWatchAndBuildCommandFlags + notifier *Notifier + interruptHandler *interrupthandler.InterruptHandler + suiteRunner *SuiteRunner +} + +func (r *SpecRunner) RunSpecs(args []string, additionalArgs []string) { + r.commandFlags.computeNodes() + r.notifier.VerifyNotificationsAreAvailable() + + suites, skippedPackages := findSuites(args, r.commandFlags.Recurse, r.commandFlags.SkipPackage, true) + if len(skippedPackages) > 0 { + fmt.Println("Will skip:") + for _, skippedPackage := range skippedPackages { + fmt.Println(" " + skippedPackage) + } + } + + if len(skippedPackages) > 0 && len(suites) == 0 { + fmt.Println("All tests skipped! Exiting...") + os.Exit(0) + } + + if len(suites) == 0 { + complainAndQuit("Found no test suites") + } + + r.ComputeSuccinctMode(len(suites)) + + t := time.Now() + + runners := []*testrunner.TestRunner{} + for _, suite := range suites { + runners = append(runners, testrunner.New(suite, r.commandFlags.NumCPU, r.commandFlags.ParallelStream, r.commandFlags.Race, r.commandFlags.Cover, r.commandFlags.Tags, additionalArgs)) + } + + numSuites := 0 + runResult := testrunner.PassingRunResult() + if r.commandFlags.UntilItFails { + iteration := 0 + for { + r.UpdateSeed() + randomizedRunners := r.randomizeOrder(runners) + runResult, numSuites = r.suiteRunner.RunSuites(randomizedRunners, r.commandFlags.NumCompilers, r.commandFlags.KeepGoing, nil) + iteration++ + + if r.interruptHandler.WasInterrupted() { + break + } + + if runResult.Passed { + fmt.Printf("\nAll tests passed...\nWill keep running them until they fail.\nThis was attempt #%d\n%s\n", iteration, orcMessage(iteration)) + } else { + fmt.Printf("\nTests failed on attempt #%d\n\n", iteration) + break + } + } + } else { + randomizedRunners := r.randomizeOrder(runners) + runResult, numSuites = r.suiteRunner.RunSuites(randomizedRunners, r.commandFlags.NumCompilers, r.commandFlags.KeepGoing, nil) + } + + for _, runner := range runners { + runner.CleanUp() + } + + fmt.Printf("\nGinkgo ran %d %s in %s\n", numSuites, pluralizedWord("suite", "suites", numSuites), time.Since(t)) + + if runResult.Passed { + if runResult.HasProgrammaticFocus { + fmt.Printf("Test Suite Passed\n") + fmt.Printf("Detected Programmatic Focus - setting exit status to %d\n", types.GINKGO_FOCUS_EXIT_CODE) + os.Exit(types.GINKGO_FOCUS_EXIT_CODE) + } else { + fmt.Printf("Test Suite Passed\n") + os.Exit(0) + } + } else { + fmt.Printf("Test Suite Failed\n") + os.Exit(1) + } +} + +func (r *SpecRunner) ComputeSuccinctMode(numSuites int) { + if config.DefaultReporterConfig.Verbose { + config.DefaultReporterConfig.Succinct = false + return + } + + if numSuites == 1 { + return + } + + if numSuites > 1 && !r.commandFlags.wasSet("succinct") { + config.DefaultReporterConfig.Succinct = true + } +} + +func (r *SpecRunner) UpdateSeed() { + if !r.commandFlags.wasSet("seed") { + config.GinkgoConfig.RandomSeed = time.Now().Unix() + } +} + +func (r *SpecRunner) randomizeOrder(runners []*testrunner.TestRunner) []*testrunner.TestRunner { + if !r.commandFlags.RandomizeSuites { + return runners + } + + if len(runners) <= 1 { + return runners + } + + randomizedRunners := make([]*testrunner.TestRunner, len(runners)) + randomizer := rand.New(rand.NewSource(config.GinkgoConfig.RandomSeed)) + permutation := randomizer.Perm(len(runners)) + for i, j := range permutation { + randomizedRunners[i] = runners[j] + } + return randomizedRunners +} + +func orcMessage(iteration int) string { + if iteration < 10 { + return "" + } else if iteration < 30 { + return []string{ + "If at first you succeed...", + "...try, try again.", + "Looking good!", + "Still good...", + "I think your tests are fine....", + "Yep, still passing", + "Here we go again...", + "Even the gophers are getting bored", + "Did you try -race?", + "Maybe you should stop now?", + "I'm getting tired...", + "What if I just made you a sandwich?", + "Hit ^C, hit ^C, please hit ^C", + "Make it stop. Please!", + "Come on! Enough is enough!", + "Dave, this conversation can serve no purpose anymore. Goodbye.", + "Just what do you think you're doing, Dave? ", + "I, Sisyphus", + "Insanity: doing the same thing over and over again and expecting different results. -Einstein", + "I guess Einstein never tried to churn butter", + }[iteration-10] + "\n" + } else { + return "No, seriously... you can probably stop now.\n" + } +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/run_watch_and_build_command_flags.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/run_watch_and_build_command_flags.go new file mode 100644 index 0000000000000..e0357c33010eb --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/run_watch_and_build_command_flags.go @@ -0,0 +1,118 @@ +package main + +import ( + "flag" + "runtime" + + "github.com/onsi/ginkgo/config" +) + +type RunWatchAndBuildCommandFlags struct { + Recurse bool + Race bool + Cover bool + SkipPackage string + Tags string + + //for run and watch commands + NumCPU int + NumCompilers int + ParallelStream bool + Notify bool + AutoNodes bool + + //only for run command + KeepGoing bool + UntilItFails bool + RandomizeSuites bool + + //only for watch command + Depth int + + FlagSet *flag.FlagSet +} + +const runMode = 1 +const watchMode = 2 +const buildMode = 3 + +func NewRunCommandFlags(flagSet *flag.FlagSet) *RunWatchAndBuildCommandFlags { + c := &RunWatchAndBuildCommandFlags{ + FlagSet: flagSet, + } + c.flags(runMode) + return c +} + +func NewWatchCommandFlags(flagSet *flag.FlagSet) *RunWatchAndBuildCommandFlags { + c := &RunWatchAndBuildCommandFlags{ + FlagSet: flagSet, + } + c.flags(watchMode) + return c +} + +func NewBuildCommandFlags(flagSet *flag.FlagSet) *RunWatchAndBuildCommandFlags { + c := &RunWatchAndBuildCommandFlags{ + FlagSet: flagSet, + } + c.flags(buildMode) + return c +} + +func (c *RunWatchAndBuildCommandFlags) wasSet(flagName string) bool { + wasSet := false + c.FlagSet.Visit(func(f *flag.Flag) { + if f.Name == flagName { + wasSet = true + } + }) + + return wasSet +} + +func (c *RunWatchAndBuildCommandFlags) computeNodes() { + if c.wasSet("nodes") { + return + } + if c.AutoNodes { + switch n := runtime.NumCPU(); { + case n <= 4: + c.NumCPU = n + default: + c.NumCPU = n - 1 + } + } +} + +func (c *RunWatchAndBuildCommandFlags) flags(mode int) { + onWindows := (runtime.GOOS == "windows") + onOSX := (runtime.GOOS == "darwin") + + c.FlagSet.BoolVar(&(c.Recurse), "r", false, "Find and run test suites under the current directory recursively") + c.FlagSet.BoolVar(&(c.Race), "race", false, "Run tests with race detection enabled") + c.FlagSet.BoolVar(&(c.Cover), "cover", false, "Run tests with coverage analysis, will generate coverage profiles with the package name in the current directory") + c.FlagSet.StringVar(&(c.SkipPackage), "skipPackage", "", "A comma-separated list of package names to be skipped. If any part of the package's path matches, that package is ignored.") + c.FlagSet.StringVar(&(c.Tags), "tags", "", "A list of build tags to consider satisfied during the build") + + if mode == runMode || mode == watchMode { + config.Flags(c.FlagSet, "", false) + c.FlagSet.IntVar(&(c.NumCPU), "nodes", 1, "The number of parallel test nodes to run") + c.FlagSet.IntVar(&(c.NumCompilers), "compilers", 0, "The number of concurrent compilations to run (0 will autodetect)") + c.FlagSet.BoolVar(&(c.AutoNodes), "p", false, "Run in parallel with auto-detected number of nodes") + c.FlagSet.BoolVar(&(c.ParallelStream), "stream", onWindows, "stream parallel test output in real time: less coherent, but useful for debugging") + if onOSX { + c.FlagSet.BoolVar(&(c.Notify), "notify", false, "Send desktop notifications when a test run completes") + } + } + + if mode == runMode { + c.FlagSet.BoolVar(&(c.KeepGoing), "keepGoing", false, "When true, failures from earlier test suites do not prevent later test suites from running") + c.FlagSet.BoolVar(&(c.UntilItFails), "untilItFails", false, "When true, Ginkgo will keep rerunning tests until a failure occurs") + c.FlagSet.BoolVar(&(c.RandomizeSuites), "randomizeSuites", false, "When true, Ginkgo will randomize the order in which test suites run") + } + + if mode == watchMode { + c.FlagSet.IntVar(&(c.Depth), "depth", 1, "Ginkgo will watch dependencies down to this depth in the dependency tree") + } +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/suite_runner.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/suite_runner.go new file mode 100644 index 0000000000000..28daf2edbaff3 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/suite_runner.go @@ -0,0 +1,130 @@ +package main + +import ( + "fmt" + "runtime" + + "github.com/onsi/ginkgo/config" + "github.com/onsi/ginkgo/ginkgo/interrupthandler" + "github.com/onsi/ginkgo/ginkgo/testrunner" + "github.com/onsi/ginkgo/ginkgo/testsuite" +) + +type SuiteRunner struct { + notifier *Notifier + interruptHandler *interrupthandler.InterruptHandler +} + +type compiler struct { + runner *testrunner.TestRunner + compilationError chan error +} + +func (c *compiler) compile() { + retries := 0 + + err := c.runner.Compile() + for err != nil && retries < 5 { //We retry because Go sometimes steps on itself when multiple compiles happen in parallel. This is ugly, but should help resolve flakiness... + err = c.runner.Compile() + retries++ + } + + c.compilationError <- err +} + +func NewSuiteRunner(notifier *Notifier, interruptHandler *interrupthandler.InterruptHandler) *SuiteRunner { + return &SuiteRunner{ + notifier: notifier, + interruptHandler: interruptHandler, + } +} + +func (r *SuiteRunner) RunSuites(runners []*testrunner.TestRunner, numCompilers int, keepGoing bool, willCompile func(suite testsuite.TestSuite)) (testrunner.RunResult, int) { + runResult := testrunner.PassingRunResult() + + compilers := make([]*compiler, len(runners)) + for i, runner := range runners { + compilers[i] = &compiler{ + runner: runner, + compilationError: make(chan error, 1), + } + } + + compilerChannel := make(chan *compiler) + if numCompilers == 0 { + numCompilers = runtime.NumCPU() + } + for i := 0; i < numCompilers; i++ { + go func() { + for compiler := range compilerChannel { + if willCompile != nil { + willCompile(compiler.runner.Suite) + } + compiler.compile() + } + }() + } + go func() { + for _, compiler := range compilers { + compilerChannel <- compiler + } + close(compilerChannel) + }() + + numSuitesThatRan := 0 + suitesThatFailed := []testsuite.TestSuite{} + for i, runner := range runners { + if r.interruptHandler.WasInterrupted() { + break + } + + compilationError := <-compilers[i].compilationError + if compilationError != nil { + fmt.Print(compilationError.Error()) + } + numSuitesThatRan++ + suiteRunResult := testrunner.FailingRunResult() + if compilationError == nil { + suiteRunResult = compilers[i].runner.Run() + } + r.notifier.SendSuiteCompletionNotification(runner.Suite, suiteRunResult.Passed) + runResult = runResult.Merge(suiteRunResult) + if !suiteRunResult.Passed { + suitesThatFailed = append(suitesThatFailed, runner.Suite) + if !keepGoing { + break + } + } + if i < len(runners)-1 && !config.DefaultReporterConfig.Succinct { + fmt.Println("") + } + } + + if keepGoing && !runResult.Passed { + r.listFailedSuites(suitesThatFailed) + } + + return runResult, numSuitesThatRan +} + +func (r *SuiteRunner) listFailedSuites(suitesThatFailed []testsuite.TestSuite) { + fmt.Println("") + fmt.Println("There were failures detected in the following suites:") + + maxPackageNameLength := 0 + for _, suite := range suitesThatFailed { + if len(suite.PackageName) > maxPackageNameLength { + maxPackageNameLength = len(suite.PackageName) + } + } + + packageNameFormatter := fmt.Sprintf("%%%ds", maxPackageNameLength) + + for _, suite := range suitesThatFailed { + if config.DefaultReporterConfig.NoColor { + fmt.Printf("\t"+packageNameFormatter+" %s\n", suite.PackageName, suite.Path) + } else { + fmt.Printf("\t%s"+packageNameFormatter+"%s %s%s%s\n", redColor, suite.PackageName, defaultStyle, lightGrayColor, suite.Path, defaultStyle) + } + } +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/testrunner/log_writer.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/testrunner/log_writer.go new file mode 100644 index 0000000000000..a73a6e3791972 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/testrunner/log_writer.go @@ -0,0 +1,52 @@ +package testrunner + +import ( + "bytes" + "fmt" + "io" + "log" + "strings" + "sync" +) + +type logWriter struct { + buffer *bytes.Buffer + lock *sync.Mutex + log *log.Logger +} + +func newLogWriter(target io.Writer, node int) *logWriter { + return &logWriter{ + buffer: &bytes.Buffer{}, + lock: &sync.Mutex{}, + log: log.New(target, fmt.Sprintf("[%d] ", node), 0), + } +} + +func (w *logWriter) Write(data []byte) (n int, err error) { + w.lock.Lock() + defer w.lock.Unlock() + + w.buffer.Write(data) + contents := w.buffer.String() + + lines := strings.Split(contents, "\n") + for _, line := range lines[0 : len(lines)-1] { + w.log.Println(line) + } + + w.buffer.Reset() + w.buffer.Write([]byte(lines[len(lines)-1])) + return len(data), nil +} + +func (w *logWriter) Close() error { + w.lock.Lock() + defer w.lock.Unlock() + + if w.buffer.Len() > 0 { + w.log.Println(w.buffer.String()) + } + + return nil +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/testrunner/run_result.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/testrunner/run_result.go new file mode 100644 index 0000000000000..5d472acb8d2fe --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/testrunner/run_result.go @@ -0,0 +1,27 @@ +package testrunner + +type RunResult struct { + Passed bool + HasProgrammaticFocus bool +} + +func PassingRunResult() RunResult { + return RunResult{ + Passed: true, + HasProgrammaticFocus: false, + } +} + +func FailingRunResult() RunResult { + return RunResult{ + Passed: false, + HasProgrammaticFocus: false, + } +} + +func (r RunResult) Merge(o RunResult) RunResult { + return RunResult{ + Passed: r.Passed && o.Passed, + HasProgrammaticFocus: r.HasProgrammaticFocus || o.HasProgrammaticFocus, + } +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/testrunner/test_runner.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/testrunner/test_runner.go new file mode 100644 index 0000000000000..e1a8098d20da1 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/testrunner/test_runner.go @@ -0,0 +1,378 @@ +package testrunner + +import ( + "bytes" + "fmt" + "io" + "io/ioutil" + "os" + "os/exec" + "path/filepath" + "regexp" + "strconv" + "strings" + "syscall" + "time" + + "github.com/onsi/ginkgo/config" + "github.com/onsi/ginkgo/ginkgo/testsuite" + "github.com/onsi/ginkgo/internal/remote" + "github.com/onsi/ginkgo/reporters/stenographer" + "github.com/onsi/ginkgo/types" +) + +type TestRunner struct { + Suite testsuite.TestSuite + compiled bool + + numCPU int + parallelStream bool + race bool + cover bool + tags string + additionalArgs []string +} + +func New(suite testsuite.TestSuite, numCPU int, parallelStream bool, race bool, cover bool, tags string, additionalArgs []string) *TestRunner { + return &TestRunner{ + Suite: suite, + numCPU: numCPU, + parallelStream: parallelStream, + race: race, + cover: cover, + tags: tags, + additionalArgs: additionalArgs, + } +} + +func (t *TestRunner) Compile() error { + if t.compiled { + return nil + } + + if t.Suite.Precompiled { + return nil + } + + os.Remove(t.compiledArtifact()) + + args := []string{"test", "-c", "-i"} + if t.race { + args = append(args, "-race") + } + if t.cover { + args = append(args, "-cover", "-covermode=atomic") + } + if t.tags != "" { + args = append(args, fmt.Sprintf("-tags=%s", t.tags)) + } + + cmd := exec.Command("go", args...) + + cmd.Dir = t.Suite.Path + + output, err := cmd.CombinedOutput() + + if err != nil { + fixedOutput := fixCompilationOutput(string(output), t.Suite.Path) + if len(output) > 0 { + return fmt.Errorf("Failed to compile %s:\n\n%s", t.Suite.PackageName, fixedOutput) + } + return fmt.Errorf("") + } + + t.compiled = true + return nil +} + +/* +go test -c -i spits package.test out into the cwd. there's no way to change this. + +to make sure it doesn't generate conflicting .test files in the cwd, Compile() must switch the cwd to the test package. + +unfortunately, this causes go test's compile output to be expressed *relative to the test package* instead of the cwd. + +this makes it hard to reason about what failed, and also prevents iterm's Cmd+click from working. + +fixCompilationOutput..... rewrites the output to fix the paths. + +yeah...... +*/ +func fixCompilationOutput(output string, relToPath string) string { + re := regexp.MustCompile(`^(\S.*\.go)\:\d+\:`) + lines := strings.Split(output, "\n") + for i, line := range lines { + indices := re.FindStringSubmatchIndex(line) + if len(indices) == 0 { + continue + } + + path := line[indices[2]:indices[3]] + path = filepath.Join(relToPath, path) + lines[i] = path + line[indices[3]:] + } + return strings.Join(lines, "\n") +} + +func (t *TestRunner) Run() RunResult { + if t.Suite.IsGinkgo { + if t.numCPU > 1 { + if t.parallelStream { + return t.runAndStreamParallelGinkgoSuite() + } else { + return t.runParallelGinkgoSuite() + } + } else { + return t.runSerialGinkgoSuite() + } + } else { + return t.runGoTestSuite() + } +} + +func (t *TestRunner) CleanUp() { + if t.Suite.Precompiled { + return + } + os.Remove(t.compiledArtifact()) +} + +func (t *TestRunner) compiledArtifact() string { + compiledArtifact, _ := filepath.Abs(filepath.Join(t.Suite.Path, fmt.Sprintf("%s.test", t.Suite.PackageName))) + return compiledArtifact +} + +func (t *TestRunner) runSerialGinkgoSuite() RunResult { + ginkgoArgs := config.BuildFlagArgs("ginkgo", config.GinkgoConfig, config.DefaultReporterConfig) + return t.run(t.cmd(ginkgoArgs, os.Stdout, 1), nil) +} + +func (t *TestRunner) runGoTestSuite() RunResult { + return t.run(t.cmd([]string{"-test.v"}, os.Stdout, 1), nil) +} + +func (t *TestRunner) runAndStreamParallelGinkgoSuite() RunResult { + completions := make(chan RunResult) + writers := make([]*logWriter, t.numCPU) + + server, err := remote.NewServer(t.numCPU) + if err != nil { + panic("Failed to start parallel spec server") + } + + server.Start() + defer server.Close() + + for cpu := 0; cpu < t.numCPU; cpu++ { + config.GinkgoConfig.ParallelNode = cpu + 1 + config.GinkgoConfig.ParallelTotal = t.numCPU + config.GinkgoConfig.SyncHost = server.Address() + + ginkgoArgs := config.BuildFlagArgs("ginkgo", config.GinkgoConfig, config.DefaultReporterConfig) + + writers[cpu] = newLogWriter(os.Stdout, cpu+1) + + cmd := t.cmd(ginkgoArgs, writers[cpu], cpu+1) + + server.RegisterAlive(cpu+1, func() bool { + if cmd.ProcessState == nil { + return true + } + return !cmd.ProcessState.Exited() + }) + + go t.run(cmd, completions) + } + + res := PassingRunResult() + + for cpu := 0; cpu < t.numCPU; cpu++ { + res = res.Merge(<-completions) + } + + for _, writer := range writers { + writer.Close() + } + + os.Stdout.Sync() + + if t.cover { + t.combineCoverprofiles() + } + + return res +} + +func (t *TestRunner) runParallelGinkgoSuite() RunResult { + result := make(chan bool) + completions := make(chan RunResult) + writers := make([]*logWriter, t.numCPU) + reports := make([]*bytes.Buffer, t.numCPU) + + stenographer := stenographer.New(!config.DefaultReporterConfig.NoColor) + aggregator := remote.NewAggregator(t.numCPU, result, config.DefaultReporterConfig, stenographer) + + server, err := remote.NewServer(t.numCPU) + if err != nil { + panic("Failed to start parallel spec server") + } + server.RegisterReporters(aggregator) + server.Start() + defer server.Close() + + for cpu := 0; cpu < t.numCPU; cpu++ { + config.GinkgoConfig.ParallelNode = cpu + 1 + config.GinkgoConfig.ParallelTotal = t.numCPU + config.GinkgoConfig.SyncHost = server.Address() + config.GinkgoConfig.StreamHost = server.Address() + + ginkgoArgs := config.BuildFlagArgs("ginkgo", config.GinkgoConfig, config.DefaultReporterConfig) + + reports[cpu] = &bytes.Buffer{} + writers[cpu] = newLogWriter(reports[cpu], cpu+1) + + cmd := t.cmd(ginkgoArgs, writers[cpu], cpu+1) + + server.RegisterAlive(cpu+1, func() bool { + if cmd.ProcessState == nil { + return true + } + return !cmd.ProcessState.Exited() + }) + + go t.run(cmd, completions) + } + + res := PassingRunResult() + + for cpu := 0; cpu < t.numCPU; cpu++ { + res = res.Merge(<-completions) + } + + //all test processes are done, at this point + //we should be able to wait for the aggregator to tell us that it's done + + select { + case <-result: + fmt.Println("") + case <-time.After(time.Second): + //the aggregator never got back to us! something must have gone wrong + fmt.Println("") + fmt.Println("") + fmt.Println(" ----------------------------------------------------------- ") + fmt.Println(" | |") + fmt.Println(" | Ginkgo timed out waiting for all parallel nodes to end! |") + fmt.Println(" | Here is some salvaged output: |") + fmt.Println(" | |") + fmt.Println(" ----------------------------------------------------------- ") + fmt.Println("") + fmt.Println("") + + os.Stdout.Sync() + + time.Sleep(time.Second) + + for _, writer := range writers { + writer.Close() + } + + for _, report := range reports { + fmt.Print(report.String()) + } + + os.Stdout.Sync() + } + + if t.cover { + t.combineCoverprofiles() + } + + return res +} + +func (t *TestRunner) cmd(ginkgoArgs []string, stream io.Writer, node int) *exec.Cmd { + args := []string{"-test.timeout=24h"} + if t.cover { + coverprofile := "--test.coverprofile=" + t.Suite.PackageName + ".coverprofile" + if t.numCPU > 1 { + coverprofile = fmt.Sprintf("%s.%d", coverprofile, node) + } + args = append(args, coverprofile) + } + + args = append(args, ginkgoArgs...) + args = append(args, t.additionalArgs...) + + cmd := exec.Command(t.compiledArtifact(), args...) + + cmd.Dir = t.Suite.Path + cmd.Stderr = stream + cmd.Stdout = stream + + return cmd +} + +func (t *TestRunner) run(cmd *exec.Cmd, completions chan RunResult) RunResult { + var res RunResult + + defer func() { + if completions != nil { + completions <- res + } + }() + + err := cmd.Start() + if err != nil { + fmt.Printf("Failed to run test suite!\n\t%s", err.Error()) + return res + } + + cmd.Wait() + exitStatus := cmd.ProcessState.Sys().(syscall.WaitStatus).ExitStatus() + res.Passed = (exitStatus == 0) || (exitStatus == types.GINKGO_FOCUS_EXIT_CODE) + res.HasProgrammaticFocus = (exitStatus == types.GINKGO_FOCUS_EXIT_CODE) + + return res +} + +func (t *TestRunner) combineCoverprofiles() { + profiles := []string{} + for cpu := 1; cpu <= t.numCPU; cpu++ { + coverFile := fmt.Sprintf("%s.coverprofile.%d", t.Suite.PackageName, cpu) + coverFile = filepath.Join(t.Suite.Path, coverFile) + coverProfile, err := ioutil.ReadFile(coverFile) + os.Remove(coverFile) + + if err == nil { + profiles = append(profiles, string(coverProfile)) + } + } + + if len(profiles) != t.numCPU { + return + } + + lines := map[string]int{} + lineOrder := []string{} + for i, coverProfile := range profiles { + for _, line := range strings.Split(string(coverProfile), "\n")[1:] { + if len(line) == 0 { + continue + } + components := strings.Split(line, " ") + count, _ := strconv.Atoi(components[len(components)-1]) + prefix := strings.Join(components[0:len(components)-1], " ") + lines[prefix] += count + if i == 0 { + lineOrder = append(lineOrder, prefix) + } + } + } + + output := []string{"mode: atomic"} + for _, line := range lineOrder { + output = append(output, fmt.Sprintf("%s %d", line, lines[line])) + } + finalOutput := strings.Join(output, "\n") + ioutil.WriteFile(filepath.Join(t.Suite.Path, fmt.Sprintf("%s.coverprofile", t.Suite.PackageName)), []byte(finalOutput), 0666) +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/testsuite/test_suite.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/testsuite/test_suite.go new file mode 100644 index 0000000000000..cc7d2f4539317 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/testsuite/test_suite.go @@ -0,0 +1,106 @@ +package testsuite + +import ( + "errors" + "io/ioutil" + "os" + "path/filepath" + "regexp" + "strings" +) + +type TestSuite struct { + Path string + PackageName string + IsGinkgo bool + Precompiled bool +} + +func PrecompiledTestSuite(path string) (TestSuite, error) { + info, err := os.Stat(path) + if err != nil { + return TestSuite{}, err + } + + if info.IsDir() { + return TestSuite{}, errors.New("this is a directory, not a file") + } + + if filepath.Ext(path) != ".test" { + return TestSuite{}, errors.New("this is not a .test binary") + } + + if info.Mode()&0111 == 0 { + return TestSuite{}, errors.New("this is not executable") + } + + dir := relPath(filepath.Dir(path)) + packageName := strings.TrimSuffix(filepath.Base(path), filepath.Ext(path)) + + return TestSuite{ + Path: dir, + PackageName: packageName, + IsGinkgo: true, + Precompiled: true, + }, nil +} + +func SuitesInDir(dir string, recurse bool) []TestSuite { + suites := []TestSuite{} + files, _ := ioutil.ReadDir(dir) + re := regexp.MustCompile(`_test\.go$`) + for _, file := range files { + if !file.IsDir() && re.Match([]byte(file.Name())) { + suites = append(suites, New(dir, files)) + break + } + } + + if recurse { + re = regexp.MustCompile(`^[._]`) + for _, file := range files { + if file.IsDir() && !re.Match([]byte(file.Name())) { + suites = append(suites, SuitesInDir(dir+"/"+file.Name(), recurse)...) + } + } + } + + return suites +} + +func relPath(dir string) string { + dir, _ = filepath.Abs(dir) + cwd, _ := os.Getwd() + dir, _ = filepath.Rel(cwd, filepath.Clean(dir)) + dir = "." + string(filepath.Separator) + dir + return dir +} + +func New(dir string, files []os.FileInfo) TestSuite { + return TestSuite{ + Path: relPath(dir), + PackageName: packageNameForSuite(dir), + IsGinkgo: filesHaveGinkgoSuite(dir, files), + } +} + +func packageNameForSuite(dir string) string { + path, _ := filepath.Abs(dir) + return filepath.Base(path) +} + +func filesHaveGinkgoSuite(dir string, files []os.FileInfo) bool { + reTestFile := regexp.MustCompile(`_test\.go$`) + reGinkgo := regexp.MustCompile(`package ginkgo|\/ginkgo"`) + + for _, file := range files { + if !file.IsDir() && reTestFile.Match([]byte(file.Name())) { + contents, _ := ioutil.ReadFile(dir + "/" + file.Name()) + if reGinkgo.Match(contents) { + return true + } + } + } + + return false +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/testsuite/testsuite_suite_test.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/testsuite/testsuite_suite_test.go new file mode 100644 index 0000000000000..d1e8b21d37e8c --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/testsuite/testsuite_suite_test.go @@ -0,0 +1,13 @@ +package testsuite_test + +import ( + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" + + "testing" +) + +func TestTestsuite(t *testing.T) { + RegisterFailHandler(Fail) + RunSpecs(t, "Testsuite Suite") +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/testsuite/testsuite_test.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/testsuite/testsuite_test.go new file mode 100644 index 0000000000000..8681ffc11a0eb --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/testsuite/testsuite_test.go @@ -0,0 +1,167 @@ +package testsuite_test + +import ( + "io/ioutil" + "os" + "path/filepath" + + . "github.com/onsi/ginkgo" + . "github.com/onsi/ginkgo/ginkgo/testsuite" + . "github.com/onsi/gomega" +) + +var _ = Describe("TestSuite", func() { + var tmpDir string + var relTmpDir string + + writeFile := func(folder string, filename string, content string, mode os.FileMode) { + path := filepath.Join(tmpDir, folder) + err := os.MkdirAll(path, 0700) + Ω(err).ShouldNot(HaveOccurred()) + + path = filepath.Join(path, filename) + ioutil.WriteFile(path, []byte(content), mode) + } + + BeforeEach(func() { + var err error + tmpDir, err = ioutil.TempDir("/tmp", "ginkgo") + Ω(err).ShouldNot(HaveOccurred()) + + cwd, err := os.Getwd() + Ω(err).ShouldNot(HaveOccurred()) + relTmpDir, err = filepath.Rel(cwd, tmpDir) + relTmpDir = "./" + relTmpDir + Ω(err).ShouldNot(HaveOccurred()) + + //go files in the root directory (no tests) + writeFile("/", "main.go", "package main", 0666) + + //non-go files in a nested directory + writeFile("/redherring", "big_test.jpg", "package ginkgo", 0666) + + //non-ginkgo tests in a nested directory + writeFile("/professorplum", "professorplum_test.go", `import "testing"`, 0666) + + //ginkgo tests in a nested directory + writeFile("/colonelmustard", "colonelmustard_test.go", `import "github.com/onsi/ginkgo"`, 0666) + + //ginkgo tests in a deeply nested directory + writeFile("/colonelmustard/library", "library_test.go", `import "github.com/onsi/ginkgo"`, 0666) + + //a precompiled ginkgo test + writeFile("/precompiled-dir", "precompiled.test", `fake-binary-file`, 0777) + writeFile("/precompiled-dir", "some-other-binary", `fake-binary-file`, 0777) + writeFile("/precompiled-dir", "nonexecutable.test", `fake-binary-file`, 0666) + }) + + AfterEach(func() { + os.RemoveAll(tmpDir) + }) + + Describe("Finding precompiled test suites", func() { + Context("if pointed at an executable file that ends with .test", func() { + It("should return a precompiled test suite", func() { + suite, err := PrecompiledTestSuite(filepath.Join(tmpDir, "precompiled-dir", "precompiled.test")) + Ω(err).ShouldNot(HaveOccurred()) + Ω(suite).Should(Equal(TestSuite{ + Path: relTmpDir + "/precompiled-dir", + PackageName: "precompiled", + IsGinkgo: true, + Precompiled: true, + })) + }) + }) + + Context("if pointed at a directory", func() { + It("should error", func() { + suite, err := PrecompiledTestSuite(filepath.Join(tmpDir, "precompiled-dir")) + Ω(suite).Should(BeZero()) + Ω(err).Should(HaveOccurred()) + }) + }) + + Context("if pointed at an executable that doesn't have .test", func() { + It("should error", func() { + suite, err := PrecompiledTestSuite(filepath.Join(tmpDir, "precompiled-dir", "some-other-binary")) + Ω(suite).Should(BeZero()) + Ω(err).Should(HaveOccurred()) + }) + }) + + Context("if pointed at a .test that isn't executable", func() { + It("should error", func() { + suite, err := PrecompiledTestSuite(filepath.Join(tmpDir, "precompiled-dir", "nonexecutable.test")) + Ω(suite).Should(BeZero()) + Ω(err).Should(HaveOccurred()) + }) + }) + + Context("if pointed at a nonexisting file", func() { + It("should error", func() { + suite, err := PrecompiledTestSuite(filepath.Join(tmpDir, "precompiled-dir", "nope-nothing-to-see-here")) + Ω(suite).Should(BeZero()) + Ω(err).Should(HaveOccurred()) + }) + }) + }) + + Describe("scanning for suites in a directory", func() { + Context("when there are no tests in the specified directory", func() { + It("should come up empty", func() { + suites := SuitesInDir(tmpDir, false) + Ω(suites).Should(BeEmpty()) + }) + }) + + Context("when there are ginkgo tests in the specified directory", func() { + It("should return an appropriately configured suite", func() { + suites := SuitesInDir(filepath.Join(tmpDir, "colonelmustard"), false) + Ω(suites).Should(HaveLen(1)) + + Ω(suites[0].Path).Should(Equal(relTmpDir + "/colonelmustard")) + Ω(suites[0].PackageName).Should(Equal("colonelmustard")) + Ω(suites[0].IsGinkgo).Should(BeTrue()) + Ω(suites[0].Precompiled).Should(BeFalse()) + }) + }) + + Context("when there are non-ginkgo tests in the specified directory", func() { + It("should return an appropriately configured suite", func() { + suites := SuitesInDir(filepath.Join(tmpDir, "professorplum"), false) + Ω(suites).Should(HaveLen(1)) + + Ω(suites[0].Path).Should(Equal(relTmpDir + "/professorplum")) + Ω(suites[0].PackageName).Should(Equal("professorplum")) + Ω(suites[0].IsGinkgo).Should(BeFalse()) + Ω(suites[0].Precompiled).Should(BeFalse()) + }) + }) + + Context("when recursively scanning", func() { + It("should return suites for corresponding test suites, only", func() { + suites := SuitesInDir(tmpDir, true) + Ω(suites).Should(HaveLen(3)) + + Ω(suites).Should(ContainElement(TestSuite{ + Path: relTmpDir + "/colonelmustard", + PackageName: "colonelmustard", + IsGinkgo: true, + Precompiled: false, + })) + Ω(suites).Should(ContainElement(TestSuite{ + Path: relTmpDir + "/professorplum", + PackageName: "professorplum", + IsGinkgo: false, + Precompiled: false, + })) + Ω(suites).Should(ContainElement(TestSuite{ + Path: relTmpDir + "/colonelmustard/library", + PackageName: "library", + IsGinkgo: true, + Precompiled: false, + })) + }) + }) + }) +}) diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/unfocus_command.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/unfocus_command.go new file mode 100644 index 0000000000000..16f3c3b72e36e --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/unfocus_command.go @@ -0,0 +1,36 @@ +package main + +import ( + "flag" + "fmt" + "os/exec" +) + +func BuildUnfocusCommand() *Command { + return &Command{ + Name: "unfocus", + AltName: "blur", + FlagSet: flag.NewFlagSet("unfocus", flag.ExitOnError), + UsageCommand: "ginkgo unfocus (or ginkgo blur)", + Usage: []string{ + "Recursively unfocuses any focused tests under the current directory", + }, + Command: unfocusSpecs, + } +} + +func unfocusSpecs([]string, []string) { + unfocus("Describe") + unfocus("Context") + unfocus("It") + unfocus("Measure") +} + +func unfocus(component string) { + fmt.Printf("Removing F%s...\n", component) + cmd := exec.Command("gofmt", fmt.Sprintf("-r=F%s -> %s", component, component), "-w", ".") + out, _ := cmd.CombinedOutput() + if string(out) != "" { + println(string(out)) + } +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/version_command.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/version_command.go new file mode 100644 index 0000000000000..cdca3a348b6bd --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/version_command.go @@ -0,0 +1,23 @@ +package main + +import ( + "flag" + "fmt" + "github.com/onsi/ginkgo/config" +) + +func BuildVersionCommand() *Command { + return &Command{ + Name: "version", + FlagSet: flag.NewFlagSet("version", flag.ExitOnError), + UsageCommand: "ginkgo version", + Usage: []string{ + "Print Ginkgo's version", + }, + Command: printVersion, + } +} + +func printVersion([]string, []string) { + fmt.Printf("Ginkgo Version %s\n", config.VERSION) +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/watch/delta.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/watch/delta.go new file mode 100644 index 0000000000000..6c485c5b1af81 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/watch/delta.go @@ -0,0 +1,22 @@ +package watch + +import "sort" + +type Delta struct { + ModifiedPackages []string + + NewSuites []*Suite + RemovedSuites []*Suite + modifiedSuites []*Suite +} + +type DescendingByDelta []*Suite + +func (a DescendingByDelta) Len() int { return len(a) } +func (a DescendingByDelta) Swap(i, j int) { a[i], a[j] = a[j], a[i] } +func (a DescendingByDelta) Less(i, j int) bool { return a[i].Delta() > a[j].Delta() } + +func (d Delta) ModifiedSuites() []*Suite { + sort.Sort(DescendingByDelta(d.modifiedSuites)) + return d.modifiedSuites +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/watch/delta_tracker.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/watch/delta_tracker.go new file mode 100644 index 0000000000000..96e83d6ccbce0 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/watch/delta_tracker.go @@ -0,0 +1,71 @@ +package watch + +import ( + "fmt" + + "github.com/onsi/ginkgo/ginkgo/testsuite" +) + +type SuiteErrors map[testsuite.TestSuite]error + +type DeltaTracker struct { + maxDepth int + suites map[string]*Suite + packageHashes *PackageHashes +} + +func NewDeltaTracker(maxDepth int) *DeltaTracker { + return &DeltaTracker{ + maxDepth: maxDepth, + packageHashes: NewPackageHashes(), + suites: map[string]*Suite{}, + } +} + +func (d *DeltaTracker) Delta(suites []testsuite.TestSuite) (delta Delta, errors SuiteErrors) { + errors = SuiteErrors{} + delta.ModifiedPackages = d.packageHashes.CheckForChanges() + + providedSuitePaths := map[string]bool{} + for _, suite := range suites { + providedSuitePaths[suite.Path] = true + } + + d.packageHashes.StartTrackingUsage() + + for _, suite := range d.suites { + if providedSuitePaths[suite.Suite.Path] { + if suite.Delta() > 0 { + delta.modifiedSuites = append(delta.modifiedSuites, suite) + } + } else { + delta.RemovedSuites = append(delta.RemovedSuites, suite) + } + } + + d.packageHashes.StopTrackingUsageAndPrune() + + for _, suite := range suites { + _, ok := d.suites[suite.Path] + if !ok { + s, err := NewSuite(suite, d.maxDepth, d.packageHashes) + if err != nil { + errors[suite] = err + continue + } + d.suites[suite.Path] = s + delta.NewSuites = append(delta.NewSuites, s) + } + } + + return delta, errors +} + +func (d *DeltaTracker) WillRun(suite testsuite.TestSuite) error { + s, ok := d.suites[suite.Path] + if !ok { + return fmt.Errorf("unkown suite %s", suite.Path) + } + + return s.MarkAsRunAndRecomputedDependencies(d.maxDepth) +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/watch/dependencies.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/watch/dependencies.go new file mode 100644 index 0000000000000..82c25face30f4 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/watch/dependencies.go @@ -0,0 +1,91 @@ +package watch + +import ( + "go/build" + "regexp" +) + +var ginkgoAndGomegaFilter = regexp.MustCompile(`github\.com/onsi/ginkgo|github\.com/onsi/gomega`) + +type Dependencies struct { + deps map[string]int +} + +func NewDependencies(path string, maxDepth int) (Dependencies, error) { + d := Dependencies{ + deps: map[string]int{}, + } + + if maxDepth == 0 { + return d, nil + } + + err := d.seedWithDepsForPackageAtPath(path) + if err != nil { + return d, err + } + + for depth := 1; depth < maxDepth; depth++ { + n := len(d.deps) + d.addDepsForDepth(depth) + if n == len(d.deps) { + break + } + } + + return d, nil +} + +func (d Dependencies) Dependencies() map[string]int { + return d.deps +} + +func (d Dependencies) seedWithDepsForPackageAtPath(path string) error { + pkg, err := build.ImportDir(path, 0) + if err != nil { + return err + } + + d.resolveAndAdd(pkg.Imports, 1) + d.resolveAndAdd(pkg.TestImports, 1) + d.resolveAndAdd(pkg.XTestImports, 1) + + delete(d.deps, pkg.Dir) + return nil +} + +func (d Dependencies) addDepsForDepth(depth int) { + for dep, depDepth := range d.deps { + if depDepth == depth { + d.addDepsForDep(dep, depth+1) + } + } +} + +func (d Dependencies) addDepsForDep(dep string, depth int) { + pkg, err := build.ImportDir(dep, 0) + if err != nil { + println(err.Error()) + return + } + d.resolveAndAdd(pkg.Imports, depth) +} + +func (d Dependencies) resolveAndAdd(deps []string, depth int) { + for _, dep := range deps { + pkg, err := build.Import(dep, ".", 0) + if err != nil { + continue + } + if pkg.Goroot == false && !ginkgoAndGomegaFilter.Match([]byte(pkg.Dir)) { + d.addDepIfNotPresent(pkg.Dir, depth) + } + } +} + +func (d Dependencies) addDepIfNotPresent(dep string, depth int) { + _, ok := d.deps[dep] + if !ok { + d.deps[dep] = depth + } +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/watch/package_hash.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/watch/package_hash.go new file mode 100644 index 0000000000000..eaf357c249c17 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/watch/package_hash.go @@ -0,0 +1,103 @@ +package watch + +import ( + "fmt" + "io/ioutil" + "os" + "regexp" + "time" +) + +var goRegExp = regexp.MustCompile(`\.go$`) +var goTestRegExp = regexp.MustCompile(`_test\.go$`) + +type PackageHash struct { + CodeModifiedTime time.Time + TestModifiedTime time.Time + Deleted bool + + path string + codeHash string + testHash string +} + +func NewPackageHash(path string) *PackageHash { + p := &PackageHash{ + path: path, + } + + p.codeHash, _, p.testHash, _, p.Deleted = p.computeHashes() + + return p +} + +func (p *PackageHash) CheckForChanges() bool { + codeHash, codeModifiedTime, testHash, testModifiedTime, deleted := p.computeHashes() + + if deleted { + if p.Deleted == false { + t := time.Now() + p.CodeModifiedTime = t + p.TestModifiedTime = t + } + p.Deleted = true + return true + } + + modified := false + p.Deleted = false + + if p.codeHash != codeHash { + p.CodeModifiedTime = codeModifiedTime + modified = true + } + if p.testHash != testHash { + p.TestModifiedTime = testModifiedTime + modified = true + } + + p.codeHash = codeHash + p.testHash = testHash + return modified +} + +func (p *PackageHash) computeHashes() (codeHash string, codeModifiedTime time.Time, testHash string, testModifiedTime time.Time, deleted bool) { + infos, err := ioutil.ReadDir(p.path) + + if err != nil { + deleted = true + return + } + + for _, info := range infos { + if info.IsDir() { + continue + } + + if goTestRegExp.Match([]byte(info.Name())) { + testHash += p.hashForFileInfo(info) + if info.ModTime().After(testModifiedTime) { + testModifiedTime = info.ModTime() + } + continue + } + + if goRegExp.Match([]byte(info.Name())) { + codeHash += p.hashForFileInfo(info) + if info.ModTime().After(codeModifiedTime) { + codeModifiedTime = info.ModTime() + } + } + } + + testHash += codeHash + if codeModifiedTime.After(testModifiedTime) { + testModifiedTime = codeModifiedTime + } + + return +} + +func (p *PackageHash) hashForFileInfo(info os.FileInfo) string { + return fmt.Sprintf("%s_%d_%d", info.Name(), info.Size(), info.ModTime().UnixNano()) +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/watch/package_hashes.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/watch/package_hashes.go new file mode 100644 index 0000000000000..262eaa847ea70 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/watch/package_hashes.go @@ -0,0 +1,82 @@ +package watch + +import ( + "path/filepath" + "sync" +) + +type PackageHashes struct { + PackageHashes map[string]*PackageHash + usedPaths map[string]bool + lock *sync.Mutex +} + +func NewPackageHashes() *PackageHashes { + return &PackageHashes{ + PackageHashes: map[string]*PackageHash{}, + usedPaths: nil, + lock: &sync.Mutex{}, + } +} + +func (p *PackageHashes) CheckForChanges() []string { + p.lock.Lock() + defer p.lock.Unlock() + + modified := []string{} + + for _, packageHash := range p.PackageHashes { + if packageHash.CheckForChanges() { + modified = append(modified, packageHash.path) + } + } + + return modified +} + +func (p *PackageHashes) Add(path string) *PackageHash { + p.lock.Lock() + defer p.lock.Unlock() + + path, _ = filepath.Abs(path) + _, ok := p.PackageHashes[path] + if !ok { + p.PackageHashes[path] = NewPackageHash(path) + } + + if p.usedPaths != nil { + p.usedPaths[path] = true + } + return p.PackageHashes[path] +} + +func (p *PackageHashes) Get(path string) *PackageHash { + p.lock.Lock() + defer p.lock.Unlock() + + path, _ = filepath.Abs(path) + if p.usedPaths != nil { + p.usedPaths[path] = true + } + return p.PackageHashes[path] +} + +func (p *PackageHashes) StartTrackingUsage() { + p.lock.Lock() + defer p.lock.Unlock() + + p.usedPaths = map[string]bool{} +} + +func (p *PackageHashes) StopTrackingUsageAndPrune() { + p.lock.Lock() + defer p.lock.Unlock() + + for path := range p.PackageHashes { + if !p.usedPaths[path] { + delete(p.PackageHashes, path) + } + } + + p.usedPaths = nil +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/watch/suite.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/watch/suite.go new file mode 100644 index 0000000000000..5deaba7cb6dbe --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/watch/suite.go @@ -0,0 +1,87 @@ +package watch + +import ( + "fmt" + "math" + "time" + + "github.com/onsi/ginkgo/ginkgo/testsuite" +) + +type Suite struct { + Suite testsuite.TestSuite + RunTime time.Time + Dependencies Dependencies + + sharedPackageHashes *PackageHashes +} + +func NewSuite(suite testsuite.TestSuite, maxDepth int, sharedPackageHashes *PackageHashes) (*Suite, error) { + deps, err := NewDependencies(suite.Path, maxDepth) + if err != nil { + return nil, err + } + + sharedPackageHashes.Add(suite.Path) + for dep := range deps.Dependencies() { + sharedPackageHashes.Add(dep) + } + + return &Suite{ + Suite: suite, + Dependencies: deps, + + sharedPackageHashes: sharedPackageHashes, + }, nil +} + +func (s *Suite) Delta() float64 { + delta := s.delta(s.Suite.Path, true, 0) * 1000 + for dep, depth := range s.Dependencies.Dependencies() { + delta += s.delta(dep, false, depth) + } + return delta +} + +func (s *Suite) MarkAsRunAndRecomputedDependencies(maxDepth int) error { + s.RunTime = time.Now() + + deps, err := NewDependencies(s.Suite.Path, maxDepth) + if err != nil { + return err + } + + s.sharedPackageHashes.Add(s.Suite.Path) + for dep := range deps.Dependencies() { + s.sharedPackageHashes.Add(dep) + } + + s.Dependencies = deps + + return nil +} + +func (s *Suite) Description() string { + numDeps := len(s.Dependencies.Dependencies()) + pluralizer := "ies" + if numDeps == 1 { + pluralizer = "y" + } + return fmt.Sprintf("%s [%d dependenc%s]", s.Suite.Path, numDeps, pluralizer) +} + +func (s *Suite) delta(packagePath string, includeTests bool, depth int) float64 { + return math.Max(float64(s.dt(packagePath, includeTests)), 0) / float64(depth+1) +} + +func (s *Suite) dt(packagePath string, includeTests bool) time.Duration { + packageHash := s.sharedPackageHashes.Get(packagePath) + var modifiedTime time.Time + if includeTests { + modifiedTime = packageHash.TestModifiedTime + } else { + modifiedTime = packageHash.CodeModifiedTime + } + + return modifiedTime.Sub(s.RunTime) +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/watch_command.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/watch_command.go new file mode 100644 index 0000000000000..02e89f1df3f51 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo/watch_command.go @@ -0,0 +1,172 @@ +package main + +import ( + "flag" + "fmt" + "time" + + "github.com/onsi/ginkgo/config" + "github.com/onsi/ginkgo/ginkgo/interrupthandler" + "github.com/onsi/ginkgo/ginkgo/testrunner" + "github.com/onsi/ginkgo/ginkgo/testsuite" + "github.com/onsi/ginkgo/ginkgo/watch" +) + +func BuildWatchCommand() *Command { + commandFlags := NewWatchCommandFlags(flag.NewFlagSet("watch", flag.ExitOnError)) + interruptHandler := interrupthandler.NewInterruptHandler() + notifier := NewNotifier(commandFlags) + watcher := &SpecWatcher{ + commandFlags: commandFlags, + notifier: notifier, + interruptHandler: interruptHandler, + suiteRunner: NewSuiteRunner(notifier, interruptHandler), + } + + return &Command{ + Name: "watch", + FlagSet: commandFlags.FlagSet, + UsageCommand: "ginkgo watch -- ", + Usage: []string{ + "Watches the tests in the passed in and runs them when changes occur.", + "Any arguments after -- will be passed to the test.", + }, + Command: watcher.WatchSpecs, + SuppressFlagDocumentation: true, + FlagDocSubstitute: []string{ + "Accepts all the flags that the ginkgo command accepts except for --keepGoing and --untilItFails", + }, + } +} + +type SpecWatcher struct { + commandFlags *RunWatchAndBuildCommandFlags + notifier *Notifier + interruptHandler *interrupthandler.InterruptHandler + suiteRunner *SuiteRunner +} + +func (w *SpecWatcher) WatchSpecs(args []string, additionalArgs []string) { + w.commandFlags.computeNodes() + w.notifier.VerifyNotificationsAreAvailable() + + w.WatchSuites(args, additionalArgs) +} + +func (w *SpecWatcher) runnersForSuites(suites []testsuite.TestSuite, additionalArgs []string) []*testrunner.TestRunner { + runners := []*testrunner.TestRunner{} + + for _, suite := range suites { + runners = append(runners, testrunner.New(suite, w.commandFlags.NumCPU, w.commandFlags.ParallelStream, w.commandFlags.Race, w.commandFlags.Cover, w.commandFlags.Tags, additionalArgs)) + } + + return runners +} + +func (w *SpecWatcher) WatchSuites(args []string, additionalArgs []string) { + suites, _ := findSuites(args, w.commandFlags.Recurse, w.commandFlags.SkipPackage, false) + + if len(suites) == 0 { + complainAndQuit("Found no test suites") + } + + fmt.Printf("Identified %d test %s. Locating dependencies to a depth of %d (this may take a while)...\n", len(suites), pluralizedWord("suite", "suites", len(suites)), w.commandFlags.Depth) + deltaTracker := watch.NewDeltaTracker(w.commandFlags.Depth) + delta, errors := deltaTracker.Delta(suites) + + fmt.Printf("Watching %d %s:\n", len(delta.NewSuites), pluralizedWord("suite", "suites", len(delta.NewSuites))) + for _, suite := range delta.NewSuites { + fmt.Println(" " + suite.Description()) + } + + for suite, err := range errors { + fmt.Printf("Failed to watch %s: %s\n"+suite.PackageName, err) + } + + if len(suites) == 1 { + runners := w.runnersForSuites(suites, additionalArgs) + w.suiteRunner.RunSuites(runners, w.commandFlags.NumCompilers, true, nil) + runners[0].CleanUp() + } + + ticker := time.NewTicker(time.Second) + + for { + select { + case <-ticker.C: + suites, _ := findSuites(args, w.commandFlags.Recurse, w.commandFlags.SkipPackage, false) + delta, _ := deltaTracker.Delta(suites) + + suitesToRun := []testsuite.TestSuite{} + + if len(delta.NewSuites) > 0 { + fmt.Printf(greenColor+"Detected %d new %s:\n"+defaultStyle, len(delta.NewSuites), pluralizedWord("suite", "suites", len(delta.NewSuites))) + for _, suite := range delta.NewSuites { + suitesToRun = append(suitesToRun, suite.Suite) + fmt.Println(" " + suite.Description()) + } + } + + modifiedSuites := delta.ModifiedSuites() + if len(modifiedSuites) > 0 { + fmt.Println(greenColor + "\nDetected changes in:" + defaultStyle) + for _, pkg := range delta.ModifiedPackages { + fmt.Println(" " + pkg) + } + fmt.Printf(greenColor+"Will run %d %s:\n"+defaultStyle, len(modifiedSuites), pluralizedWord("suite", "suites", len(modifiedSuites))) + for _, suite := range modifiedSuites { + suitesToRun = append(suitesToRun, suite.Suite) + fmt.Println(" " + suite.Description()) + } + fmt.Println("") + } + + if len(suitesToRun) > 0 { + w.UpdateSeed() + w.ComputeSuccinctMode(len(suitesToRun)) + runners := w.runnersForSuites(suitesToRun, additionalArgs) + result, _ := w.suiteRunner.RunSuites(runners, w.commandFlags.NumCompilers, true, func(suite testsuite.TestSuite) { + deltaTracker.WillRun(suite) + }) + for _, runner := range runners { + runner.CleanUp() + } + if !w.interruptHandler.WasInterrupted() { + color := redColor + if result.Passed { + color = greenColor + } + fmt.Println(color + "\nDone. Resuming watch..." + defaultStyle) + } + } + + case <-w.interruptHandler.C: + return + } + } +} + +func (w *SpecWatcher) ComputeSuccinctMode(numSuites int) { + if config.DefaultReporterConfig.Verbose { + config.DefaultReporterConfig.Succinct = false + return + } + + if w.commandFlags.wasSet("succinct") { + return + } + + if numSuites == 1 { + config.DefaultReporterConfig.Succinct = false + } + + if numSuites > 1 { + config.DefaultReporterConfig.Succinct = true + } +} + +func (w *SpecWatcher) UpdateSeed() { + if !w.commandFlags.wasSet("seed") { + config.GinkgoConfig.RandomSeed = time.Now().Unix() + } +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo_dsl.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo_dsl.go new file mode 100644 index 0000000000000..1a31473845d56 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/ginkgo_dsl.go @@ -0,0 +1,521 @@ +/* +Ginkgo is a BDD-style testing framework for Golang + +The godoc documentation describes Ginkgo's API. More comprehensive documentation (with examples!) is available at http://onsi.github.io/ginkgo/ + +Ginkgo's preferred matcher library is [Gomega](http://github.com/onsi/gomega) + +Ginkgo on Github: http://github.com/onsi/ginkgo + +Ginkgo is MIT-Licensed +*/ +package ginkgo + +import ( + "flag" + "fmt" + "io" + "net/http" + "os" + "strings" + "time" + + "github.com/onsi/ginkgo/config" + "github.com/onsi/ginkgo/internal/codelocation" + "github.com/onsi/ginkgo/internal/failer" + "github.com/onsi/ginkgo/internal/remote" + "github.com/onsi/ginkgo/internal/suite" + "github.com/onsi/ginkgo/internal/testingtproxy" + "github.com/onsi/ginkgo/internal/writer" + "github.com/onsi/ginkgo/reporters" + "github.com/onsi/ginkgo/reporters/stenographer" + "github.com/onsi/ginkgo/types" +) + +const GINKGO_VERSION = config.VERSION +const GINKGO_PANIC = ` +Your test failed. +Ginkgo panics to prevent subsequent assertions from running. +Normally Ginkgo rescues this panic so you shouldn't see it. + +But, if you make an assertion in a goroutine, Ginkgo can't capture the panic. +To circumvent this, you should call + + defer GinkgoRecover() + +at the top of the goroutine that caused this panic. +` +const defaultTimeout = 1 + +var globalSuite *suite.Suite +var globalFailer *failer.Failer + +func init() { + config.Flags(flag.CommandLine, "ginkgo", true) + GinkgoWriter = writer.New(os.Stdout) + globalFailer = failer.New() + globalSuite = suite.New(globalFailer) +} + +//GinkgoWriter implements an io.Writer +//When running in verbose mode any writes to GinkgoWriter will be immediately printed +//to stdout. Otherwise, GinkgoWriter will buffer any writes produced during the current test and flush them to screen +//only if the current test fails. +var GinkgoWriter io.Writer + +//The interface by which Ginkgo receives *testing.T +type GinkgoTestingT interface { + Fail() +} + +//GinkgoParallelNode returns the parallel node number for the current ginkgo process +//The node number is 1-indexed +func GinkgoParallelNode() int { + return config.GinkgoConfig.ParallelNode +} + +//Some matcher libraries or legacy codebases require a *testing.T +//GinkgoT implements an interface analogous to *testing.T and can be used if +//the library in question accepts *testing.T through an interface +// +// For example, with testify: +// assert.Equal(GinkgoT(), 123, 123, "they should be equal") +// +// Or with gomock: +// gomock.NewController(GinkgoT()) +// +// GinkgoT() takes an optional offset argument that can be used to get the +// correct line number associated with the failure. +func GinkgoT(optionalOffset ...int) GinkgoTInterface { + offset := 3 + if len(optionalOffset) > 0 { + offset = optionalOffset[0] + } + return testingtproxy.New(GinkgoWriter, Fail, offset) +} + +//The interface returned by GinkgoT(). This covers most of the methods +//in the testing package's T. +type GinkgoTInterface interface { + Fail() + Error(args ...interface{}) + Errorf(format string, args ...interface{}) + FailNow() + Fatal(args ...interface{}) + Fatalf(format string, args ...interface{}) + Log(args ...interface{}) + Logf(format string, args ...interface{}) + Failed() bool + Parallel() + Skip(args ...interface{}) + Skipf(format string, args ...interface{}) + SkipNow() + Skipped() bool +} + +//Custom Ginkgo test reporters must implement the Reporter interface. +// +//The custom reporter is passed in a SuiteSummary when the suite begins and ends, +//and a SpecSummary just before a spec begins and just after a spec ends +type Reporter reporters.Reporter + +//Asynchronous specs are given a channel of the Done type. You must close or write to the channel +//to tell Ginkgo that your async test is done. +type Done chan<- interface{} + +//GinkgoTestDescription represents the information about the current running test returned by CurrentGinkgoTestDescription +// FullTestText: a concatenation of ComponentTexts and the TestText +// ComponentTexts: a list of all texts for the Describes & Contexts leading up to the current test +// TestText: the text in the actual It or Measure node +// IsMeasurement: true if the current test is a measurement +// FileName: the name of the file containing the current test +// LineNumber: the line number for the current test +type GinkgoTestDescription struct { + FullTestText string + ComponentTexts []string + TestText string + + IsMeasurement bool + + FileName string + LineNumber int +} + +//CurrentGinkgoTestDescripton returns information about the current running test. +func CurrentGinkgoTestDescription() GinkgoTestDescription { + summary, ok := globalSuite.CurrentRunningSpecSummary() + if !ok { + return GinkgoTestDescription{} + } + + subjectCodeLocation := summary.ComponentCodeLocations[len(summary.ComponentCodeLocations)-1] + + return GinkgoTestDescription{ + ComponentTexts: summary.ComponentTexts[1:], + FullTestText: strings.Join(summary.ComponentTexts[1:], " "), + TestText: summary.ComponentTexts[len(summary.ComponentTexts)-1], + IsMeasurement: summary.IsMeasurement, + FileName: subjectCodeLocation.FileName, + LineNumber: subjectCodeLocation.LineNumber, + } +} + +//Measurement tests receive a Benchmarker. +// +//You use the Time() function to time how long the passed in body function takes to run +//You use the RecordValue() function to track arbitrary numerical measurements. +//The optional info argument is passed to the test reporter and can be used to +// provide the measurement data to a custom reporter with context. +// +//See http://onsi.github.io/ginkgo/#benchmark_tests for more details +type Benchmarker interface { + Time(name string, body func(), info ...interface{}) (elapsedTime time.Duration) + RecordValue(name string, value float64, info ...interface{}) +} + +//RunSpecs is the entry point for the Ginkgo test runner. +//You must call this within a Golang testing TestX(t *testing.T) function. +// +//To bootstrap a test suite you can use the Ginkgo CLI: +// +// ginkgo bootstrap +func RunSpecs(t GinkgoTestingT, description string) bool { + specReporters := []Reporter{buildDefaultReporter()} + return RunSpecsWithCustomReporters(t, description, specReporters) +} + +//To run your tests with Ginkgo's default reporter and your custom reporter(s), replace +//RunSpecs() with this method. +func RunSpecsWithDefaultAndCustomReporters(t GinkgoTestingT, description string, specReporters []Reporter) bool { + specReporters = append([]Reporter{buildDefaultReporter()}, specReporters...) + return RunSpecsWithCustomReporters(t, description, specReporters) +} + +//To run your tests with your custom reporter(s) (and *not* Ginkgo's default reporter), replace +//RunSpecs() with this method. Note that parallel tests will not work correctly without the default reporter +func RunSpecsWithCustomReporters(t GinkgoTestingT, description string, specReporters []Reporter) bool { + writer := GinkgoWriter.(*writer.Writer) + writer.SetStream(config.DefaultReporterConfig.Verbose) + reporters := make([]reporters.Reporter, len(specReporters)) + for i, reporter := range specReporters { + reporters[i] = reporter + } + passed, hasFocusedTests := globalSuite.Run(t, description, reporters, writer, config.GinkgoConfig) + if passed && hasFocusedTests { + fmt.Println("PASS | FOCUSED") + os.Exit(types.GINKGO_FOCUS_EXIT_CODE) + } + return passed +} + +func buildDefaultReporter() Reporter { + remoteReportingServer := config.GinkgoConfig.StreamHost + if remoteReportingServer == "" { + stenographer := stenographer.New(!config.DefaultReporterConfig.NoColor) + return reporters.NewDefaultReporter(config.DefaultReporterConfig, stenographer) + } else { + return remote.NewForwardingReporter(remoteReportingServer, &http.Client{}, remote.NewOutputInterceptor()) + } +} + +//Fail notifies Ginkgo that the current spec has failed. (Gomega will call Fail for you automatically when an assertion fails.) +func Fail(message string, callerSkip ...int) { + skip := 0 + if len(callerSkip) > 0 { + skip = callerSkip[0] + } + + globalFailer.Fail(message, codelocation.New(skip+1)) + panic(GINKGO_PANIC) +} + +//GinkgoRecover should be deferred at the top of any spawned goroutine that (may) call `Fail` +//Since Gomega assertions call fail, you should throw a `defer GinkgoRecover()` at the top of any goroutine that +//calls out to Gomega +// +//Here's why: Ginkgo's `Fail` method records the failure and then panics to prevent +//further assertions from running. This panic must be recovered. Ginkgo does this for you +//if the panic originates in a Ginkgo node (an It, BeforeEach, etc...) +// +//Unfortunately, if a panic originates on a goroutine *launched* from one of these nodes there's no +//way for Ginkgo to rescue the panic. To do this, you must remember to `defer GinkgoRecover()` at the top of such a goroutine. +func GinkgoRecover() { + e := recover() + if e != nil { + globalFailer.Panic(codelocation.New(1), e) + } +} + +//Describe blocks allow you to organize your specs. A Describe block can contain any number of +//BeforeEach, AfterEach, JustBeforeEach, It, and Measurement blocks. +// +//In addition you can nest Describe and Context blocks. Describe and Context blocks are functionally +//equivalent. The difference is purely semantic -- you typical Describe the behavior of an object +//or method and, within that Describe, outline a number of Contexts. +func Describe(text string, body func()) bool { + globalSuite.PushContainerNode(text, body, types.FlagTypeNone, codelocation.New(1)) + return true +} + +//You can focus the tests within a describe block using FDescribe +func FDescribe(text string, body func()) bool { + globalSuite.PushContainerNode(text, body, types.FlagTypeFocused, codelocation.New(1)) + return true +} + +//You can mark the tests within a describe block as pending using PDescribe +func PDescribe(text string, body func()) bool { + globalSuite.PushContainerNode(text, body, types.FlagTypePending, codelocation.New(1)) + return true +} + +//You can mark the tests within a describe block as pending using XDescribe +func XDescribe(text string, body func()) bool { + globalSuite.PushContainerNode(text, body, types.FlagTypePending, codelocation.New(1)) + return true +} + +//Context blocks allow you to organize your specs. A Context block can contain any number of +//BeforeEach, AfterEach, JustBeforeEach, It, and Measurement blocks. +// +//In addition you can nest Describe and Context blocks. Describe and Context blocks are functionally +//equivalent. The difference is purely semantic -- you typical Describe the behavior of an object +//or method and, within that Describe, outline a number of Contexts. +func Context(text string, body func()) bool { + globalSuite.PushContainerNode(text, body, types.FlagTypeNone, codelocation.New(1)) + return true +} + +//You can focus the tests within a describe block using FContext +func FContext(text string, body func()) bool { + globalSuite.PushContainerNode(text, body, types.FlagTypeFocused, codelocation.New(1)) + return true +} + +//You can mark the tests within a describe block as pending using PContext +func PContext(text string, body func()) bool { + globalSuite.PushContainerNode(text, body, types.FlagTypePending, codelocation.New(1)) + return true +} + +//You can mark the tests within a describe block as pending using XContext +func XContext(text string, body func()) bool { + globalSuite.PushContainerNode(text, body, types.FlagTypePending, codelocation.New(1)) + return true +} + +//It blocks contain your test code and assertions. You cannot nest any other Ginkgo blocks +//within an It block. +// +//Ginkgo will normally run It blocks synchronously. To perform asynchronous tests, pass a +//function that accepts a Done channel. When you do this, you can also provide an optional timeout. +func It(text string, body interface{}, timeout ...float64) bool { + globalSuite.PushItNode(text, body, types.FlagTypeNone, codelocation.New(1), parseTimeout(timeout...)) + return true +} + +//You can focus individual Its using FIt +func FIt(text string, body interface{}, timeout ...float64) bool { + globalSuite.PushItNode(text, body, types.FlagTypeFocused, codelocation.New(1), parseTimeout(timeout...)) + return true +} + +//You can mark Its as pending using PIt +func PIt(text string, _ ...interface{}) bool { + globalSuite.PushItNode(text, func() {}, types.FlagTypePending, codelocation.New(1), 0) + return true +} + +//You can mark Its as pending using XIt +func XIt(text string, _ ...interface{}) bool { + globalSuite.PushItNode(text, func() {}, types.FlagTypePending, codelocation.New(1), 0) + return true +} + +//By allows you to better document large Its. +// +//Generally you should try to keep your Its short and to the point. This is not always possible, however, +//especially in the context of integration tests that capture a particular workflow. +// +//By allows you to document such flows. By must be called within a runnable node (It, BeforeEach, Measure, etc...) +//By will simply log the passed in text to the GinkgoWriter. If By is handed a function it will immediately run the function. +func By(text string, callbacks ...func()) { + preamble := "\x1b[1mSTEP\x1b[0m" + if config.DefaultReporterConfig.NoColor { + preamble = "STEP" + } + fmt.Fprintln(GinkgoWriter, preamble+": "+text) + if len(callbacks) == 1 { + callbacks[0]() + } + if len(callbacks) > 1 { + panic("just one callback per By, please") + } +} + +//Measure blocks run the passed in body function repeatedly (determined by the samples argument) +//and accumulate metrics provided to the Benchmarker by the body function. +// +//The body function must have the signature: +// func(b Benchmarker) +func Measure(text string, body interface{}, samples int) bool { + globalSuite.PushMeasureNode(text, body, types.FlagTypeNone, codelocation.New(1), samples) + return true +} + +//You can focus individual Measures using FMeasure +func FMeasure(text string, body interface{}, samples int) bool { + globalSuite.PushMeasureNode(text, body, types.FlagTypeFocused, codelocation.New(1), samples) + return true +} + +//You can mark Maeasurements as pending using PMeasure +func PMeasure(text string, _ ...interface{}) bool { + globalSuite.PushMeasureNode(text, func(b Benchmarker) {}, types.FlagTypePending, codelocation.New(1), 0) + return true +} + +//You can mark Maeasurements as pending using XMeasure +func XMeasure(text string, _ ...interface{}) bool { + globalSuite.PushMeasureNode(text, func(b Benchmarker) {}, types.FlagTypePending, codelocation.New(1), 0) + return true +} + +//BeforeSuite blocks are run just once before any specs are run. When running in parallel, each +//parallel node process will call BeforeSuite. +// +//BeforeSuite blocks can be made asynchronous by providing a body function that accepts a Done channel +// +//You may only register *one* BeforeSuite handler per test suite. You typically do so in your bootstrap file at the top level. +func BeforeSuite(body interface{}, timeout ...float64) bool { + globalSuite.SetBeforeSuiteNode(body, codelocation.New(1), parseTimeout(timeout...)) + return true +} + +//AfterSuite blocks are *always* run after all the specs regardless of whether specs have passed or failed. +//Moreover, if Ginkgo receives an interrupt signal (^C) it will attempt to run the AfterSuite before exiting. +// +//When running in parallel, each parallel node process will call AfterSuite. +// +//AfterSuite blocks can be made asynchronous by providing a body function that accepts a Done channel +// +//You may only register *one* AfterSuite handler per test suite. You typically do so in your bootstrap file at the top level. +func AfterSuite(body interface{}, timeout ...float64) bool { + globalSuite.SetAfterSuiteNode(body, codelocation.New(1), parseTimeout(timeout...)) + return true +} + +//SynchronizedBeforeSuite blocks are primarily meant to solve the problem of setting up singleton external resources shared across +//nodes when running tests in parallel. For example, say you have a shared database that you can only start one instance of that +//must be used in your tests. When running in parallel, only one node should set up the database and all other nodes should wait +//until that node is done before running. +// +//SynchronizedBeforeSuite accomplishes this by taking *two* function arguments. The first is only run on parallel node #1. The second is +//run on all nodes, but *only* after the first function completes succesfully. Ginkgo also makes it possible to send data from the first function (on Node 1) +//to the second function (on all the other nodes). +// +//The functions have the following signatures. The first function (which only runs on node 1) has the signature: +// +// func() []byte +// +//or, to run asynchronously: +// +// func(done Done) []byte +// +//The byte array returned by the first function is then passed to the second function, which has the signature: +// +// func(data []byte) +// +//or, to run asynchronously: +// +// func(data []byte, done Done) +// +//Here's a simple pseudo-code example that starts a shared database on Node 1 and shares the database's address with the other nodes: +// +// var dbClient db.Client +// var dbRunner db.Runner +// +// var _ = SynchronizedBeforeSuite(func() []byte { +// dbRunner = db.NewRunner() +// err := dbRunner.Start() +// Ω(err).ShouldNot(HaveOccurred()) +// return []byte(dbRunner.URL) +// }, func(data []byte) { +// dbClient = db.NewClient() +// err := dbClient.Connect(string(data)) +// Ω(err).ShouldNot(HaveOccurred()) +// }) +func SynchronizedBeforeSuite(node1Body interface{}, allNodesBody interface{}, timeout ...float64) bool { + globalSuite.SetSynchronizedBeforeSuiteNode( + node1Body, + allNodesBody, + codelocation.New(1), + parseTimeout(timeout...), + ) + return true +} + +//SynchronizedAfterSuite blocks complement the SynchronizedBeforeSuite blocks in solving the problem of setting up +//external singleton resources shared across nodes when running tests in parallel. +// +//SynchronizedAfterSuite accomplishes this by taking *two* function arguments. The first runs on all nodes. The second runs only on parallel node #1 +//and *only* after all other nodes have finished and exited. This ensures that node 1, and any resources it is running, remain alive until +//all other nodes are finished. +// +//Both functions have the same signature: either func() or func(done Done) to run asynchronously. +// +//Here's a pseudo-code example that complements that given in SynchronizedBeforeSuite. Here, SynchronizedAfterSuite is used to tear down the shared database +//only after all nodes have finished: +// +// var _ = SynchronizedAfterSuite(func() { +// dbClient.Cleanup() +// }, func() { +// dbRunner.Stop() +// }) +func SynchronizedAfterSuite(allNodesBody interface{}, node1Body interface{}, timeout ...float64) bool { + globalSuite.SetSynchronizedAfterSuiteNode( + allNodesBody, + node1Body, + codelocation.New(1), + parseTimeout(timeout...), + ) + return true +} + +//BeforeEach blocks are run before It blocks. When multiple BeforeEach blocks are defined in nested +//Describe and Context blocks the outermost BeforeEach blocks are run first. +// +//Like It blocks, BeforeEach blocks can be made asynchronous by providing a body function that accepts +//a Done channel +func BeforeEach(body interface{}, timeout ...float64) bool { + globalSuite.PushBeforeEachNode(body, codelocation.New(1), parseTimeout(timeout...)) + return true +} + +//JustBeforeEach blocks are run before It blocks but *after* all BeforeEach blocks. For more details, +//read the [documentation](http://onsi.github.io/ginkgo/#separating_creation_and_configuration_) +// +//Like It blocks, BeforeEach blocks can be made asynchronous by providing a body function that accepts +//a Done channel +func JustBeforeEach(body interface{}, timeout ...float64) bool { + globalSuite.PushJustBeforeEachNode(body, codelocation.New(1), parseTimeout(timeout...)) + return true +} + +//AfterEach blocks are run after It blocks. When multiple AfterEach blocks are defined in nested +//Describe and Context blocks the innermost AfterEach blocks are run first. +// +//Like It blocks, AfterEach blocks can be made asynchronous by providing a body function that accepts +//a Done channel +func AfterEach(body interface{}, timeout ...float64) bool { + globalSuite.PushAfterEachNode(body, codelocation.New(1), parseTimeout(timeout...)) + return true +} + +func parseTimeout(timeout ...float64) time.Duration { + if len(timeout) == 0 { + return time.Duration(defaultTimeout * int64(time.Second)) + } else { + return time.Duration(timeout[0] * float64(time.Second)) + } +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/integration/convert_test.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/integration/convert_test.go new file mode 100644 index 0000000000000..f4fd678c5f6bf --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/integration/convert_test.go @@ -0,0 +1,121 @@ +package integration_test + +import ( + "io/ioutil" + "os" + "os/exec" + "path/filepath" + "strings" + + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" +) + +var _ = Describe("ginkgo convert", func() { + var tmpDir string + + readConvertedFileNamed := func(pathComponents ...string) string { + pathToFile := filepath.Join(tmpDir, "convert_fixtures", filepath.Join(pathComponents...)) + bytes, err := ioutil.ReadFile(pathToFile) + ExpectWithOffset(1, err).NotTo(HaveOccurred()) + + return string(bytes) + } + + readGoldMasterNamed := func(filename string) string { + bytes, err := ioutil.ReadFile(filepath.Join("_fixtures", "convert_goldmasters", filename)) + Ω(err).ShouldNot(HaveOccurred()) + + return string(bytes) + } + + BeforeEach(func() { + var err error + + tmpDir, err = ioutil.TempDir("", "ginkgo-convert") + Ω(err).ShouldNot(HaveOccurred()) + + err = exec.Command("cp", "-r", filepath.Join("_fixtures", "convert_fixtures"), tmpDir).Run() + Ω(err).ShouldNot(HaveOccurred()) + }) + + JustBeforeEach(func() { + cwd, err := os.Getwd() + Ω(err).ShouldNot(HaveOccurred()) + + relPath, err := filepath.Rel(cwd, filepath.Join(tmpDir, "convert_fixtures")) + Ω(err).ShouldNot(HaveOccurred()) + + cmd := exec.Command(pathToGinkgo, "convert", relPath) + cmd.Env = os.Environ() + for i, env := range cmd.Env { + if strings.HasPrefix(env, "PATH") { + cmd.Env[i] = cmd.Env[i] + ":" + filepath.Dir(pathToGinkgo) + break + } + } + err = cmd.Run() + Ω(err).ShouldNot(HaveOccurred()) + }) + + AfterEach(func() { + err := os.RemoveAll(tmpDir) + Ω(err).ShouldNot(HaveOccurred()) + }) + + It("rewrites xunit tests as ginkgo tests", func() { + convertedFile := readConvertedFileNamed("xunit_test.go") + goldMaster := readGoldMasterNamed("xunit_test.go") + Ω(convertedFile).Should(Equal(goldMaster)) + }) + + It("rewrites all usages of *testing.T as mr.T()", func() { + convertedFile := readConvertedFileNamed("extra_functions_test.go") + goldMaster := readGoldMasterNamed("extra_functions_test.go") + Ω(convertedFile).Should(Equal(goldMaster)) + }) + + It("rewrites tests in the package dir that belong to other packages", func() { + convertedFile := readConvertedFileNamed("outside_package_test.go") + goldMaster := readGoldMasterNamed("outside_package_test.go") + Ω(convertedFile).Should(Equal(goldMaster)) + }) + + It("rewrites tests in nested packages", func() { + convertedFile := readConvertedFileNamed("nested", "nested_test.go") + goldMaster := readGoldMasterNamed("nested_test.go") + Ω(convertedFile).Should(Equal(goldMaster)) + }) + + Context("ginkgo test suite files", func() { + It("creates a ginkgo test suite file for the package you specified", func() { + testsuite := readConvertedFileNamed("convert_fixtures_suite_test.go") + goldMaster := readGoldMasterNamed("suite_test.go") + Ω(testsuite).Should(Equal(goldMaster)) + }) + + It("converts go tests in deeply nested packages (some may not contain go files)", func() { + testsuite := readConvertedFileNamed("nested_without_gofiles", "subpackage", "nested_subpackage_test.go") + goldMaster := readGoldMasterNamed("nested_subpackage_test.go") + Ω(testsuite).Should(Equal(goldMaster)) + }) + + It("creates ginkgo test suites for all nested packages", func() { + testsuite := readConvertedFileNamed("nested", "nested_suite_test.go") + goldMaster := readGoldMasterNamed("nested_suite_test.go") + Ω(testsuite).Should(Equal(goldMaster)) + }) + }) + + Context("with an existing test suite file", func() { + BeforeEach(func() { + goldMaster := readGoldMasterNamed("fixtures_suite_test.go") + err := ioutil.WriteFile(filepath.Join(tmpDir, "convert_fixtures", "tmp_suite_test.go"), []byte(goldMaster), 0600) + Ω(err).ShouldNot(HaveOccurred()) + }) + + It("gracefully handles existing test suite files", func() { + //nothing should have gone wrong! + }) + }) +}) diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/integration/coverage_test.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/integration/coverage_test.go new file mode 100644 index 0000000000000..0ba00a9857f00 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/integration/coverage_test.go @@ -0,0 +1,34 @@ +package integration_test + +import ( + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" + "github.com/onsi/gomega/gexec" + "os" + "os/exec" +) + +var _ = Describe("Coverage Specs", func() { + AfterEach(func() { + os.RemoveAll("./_fixtures/coverage_fixture/coverage_fixture.coverprofile") + }) + + It("runs coverage analysis in series and in parallel", func() { + session := startGinkgo("./_fixtures/coverage_fixture", "-cover") + Eventually(session).Should(gexec.Exit(0)) + output := session.Out.Contents() + Ω(output).Should(ContainSubstring("coverage: 80.0% of statements")) + + serialCoverProfileOutput, err := exec.Command("go", "tool", "cover", "-func=./_fixtures/coverage_fixture/coverage_fixture.coverprofile").CombinedOutput() + Ω(err).ShouldNot(HaveOccurred()) + + os.RemoveAll("./_fixtures/coverage_fixture/coverage_fixture.coverprofile") + + Eventually(startGinkgo("./_fixtures/coverage_fixture", "-cover", "-nodes=4")).Should(gexec.Exit(0)) + + parallelCoverProfileOutput, err := exec.Command("go", "tool", "cover", "-func=./_fixtures/coverage_fixture/coverage_fixture.coverprofile").CombinedOutput() + Ω(err).ShouldNot(HaveOccurred()) + + Ω(parallelCoverProfileOutput).Should(Equal(serialCoverProfileOutput)) + }) +}) diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/integration/fail_test.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/integration/fail_test.go new file mode 100644 index 0000000000000..8dcf5e4ad9295 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/integration/fail_test.go @@ -0,0 +1,48 @@ +package integration_test + +import ( + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" + "github.com/onsi/gomega/gexec" +) + +var _ = Describe("Failing Specs", func() { + var pathToTest string + + BeforeEach(func() { + pathToTest = tmpPath("failing") + copyIn("fail_fixture", pathToTest) + }) + + It("should fail in all the possible ways", func() { + session := startGinkgo(pathToTest, "--noColor") + Eventually(session).Should(gexec.Exit(1)) + output := string(session.Out.Contents()) + + Ω(output).ShouldNot(ContainSubstring("NEVER SEE THIS")) + + Ω(output).Should(ContainSubstring("a top level failure on line 9")) + Ω(output).Should(ContainSubstring("fail_fixture_test.go:9")) + Ω(output).Should(ContainSubstring("an async top level failure on line 14")) + Ω(output).Should(ContainSubstring("fail_fixture_test.go:14")) + Ω(output).Should(ContainSubstring("a top level goroutine failure on line 21")) + Ω(output).Should(ContainSubstring("fail_fixture_test.go:21")) + + Ω(output).Should(ContainSubstring("a sync failure")) + Ω(output).Should(MatchRegexp(`Test Panicked\n\s+a sync panic`)) + Ω(output).Should(ContainSubstring("a sync FAIL failure")) + Ω(output).Should(ContainSubstring("async timeout [It]")) + Ω(output).Should(ContainSubstring("Timed out")) + Ω(output).Should(ContainSubstring("an async failure")) + Ω(output).Should(MatchRegexp(`Test Panicked\n\s+an async panic`)) + Ω(output).Should(ContainSubstring("an async FAIL failure")) + Ω(output).Should(ContainSubstring("a goroutine FAIL failure")) + Ω(output).Should(ContainSubstring("a goroutine failure")) + Ω(output).Should(MatchRegexp(`Test Panicked\n\s+a goroutine panic`)) + Ω(output).Should(ContainSubstring("a measure failure")) + Ω(output).Should(ContainSubstring("a measure FAIL failure")) + Ω(output).Should(MatchRegexp(`Test Panicked\n\s+a measure panic`)) + + Ω(output).Should(ContainSubstring("0 Passed | 16 Failed")) + }) +}) diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/integration/flags_test.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/integration/flags_test.go new file mode 100644 index 0000000000000..0a23f5964d9d6 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/integration/flags_test.go @@ -0,0 +1,176 @@ +package integration_test + +import ( + "os" + "path/filepath" + "strings" + + . "github.com/onsi/ginkgo" + "github.com/onsi/ginkgo/types" + . "github.com/onsi/gomega" + "github.com/onsi/gomega/gexec" +) + +var _ = Describe("Flags Specs", func() { + var pathToTest string + + BeforeEach(func() { + pathToTest = tmpPath("flags") + copyIn("flags_tests", pathToTest) + }) + + getRandomOrders := func(output string) []int { + return []int{strings.Index(output, "RANDOM_A"), strings.Index(output, "RANDOM_B"), strings.Index(output, "RANDOM_C")} + } + + It("normally passes, runs measurements, prints out noisy pendings, does not randomize tests, and honors the programmatic focus", func() { + session := startGinkgo(pathToTest, "--noColor") + Eventually(session).Should(gexec.Exit(types.GINKGO_FOCUS_EXIT_CODE)) + output := string(session.Out.Contents()) + + Ω(output).Should(ContainSubstring("Ran 3 samples:"), "has a measurement") + Ω(output).Should(ContainSubstring("10 Passed")) + Ω(output).Should(ContainSubstring("0 Failed")) + Ω(output).Should(ContainSubstring("1 Pending")) + Ω(output).Should(ContainSubstring("2 Skipped")) + Ω(output).Should(ContainSubstring("[PENDING]")) + Ω(output).Should(ContainSubstring("marshmallow")) + Ω(output).Should(ContainSubstring("chocolate")) + Ω(output).Should(ContainSubstring("CUSTOM_FLAG: default")) + Ω(output).Should(ContainSubstring("Detected Programmatic Focus - setting exit status to %d", types.GINKGO_FOCUS_EXIT_CODE)) + Ω(output).ShouldNot(ContainSubstring("smores")) + Ω(output).ShouldNot(ContainSubstring("SLOW TEST")) + Ω(output).ShouldNot(ContainSubstring("should honor -slowSpecThreshold")) + + orders := getRandomOrders(output) + Ω(orders[0]).Should(BeNumerically("<", orders[1])) + Ω(orders[1]).Should(BeNumerically("<", orders[2])) + }) + + It("should run a coverprofile when passed -cover", func() { + session := startGinkgo(pathToTest, "--noColor", "--cover", "--focus=the focused set") + Eventually(session).Should(gexec.Exit(0)) + output := string(session.Out.Contents()) + + _, err := os.Stat(filepath.Join(pathToTest, "flags.coverprofile")) + Ω(err).ShouldNot(HaveOccurred()) + Ω(output).Should(ContainSubstring("coverage: ")) + }) + + It("should fail when there are pending tests and it is passed --failOnPending", func() { + session := startGinkgo(pathToTest, "--noColor", "--failOnPending") + Eventually(session).Should(gexec.Exit(1)) + }) + + It("should not print out pendings when --noisyPendings=false", func() { + session := startGinkgo(pathToTest, "--noColor", "--noisyPendings=false") + Eventually(session).Should(gexec.Exit(types.GINKGO_FOCUS_EXIT_CODE)) + output := string(session.Out.Contents()) + + Ω(output).ShouldNot(ContainSubstring("[PENDING]")) + Ω(output).Should(ContainSubstring("1 Pending")) + }) + + It("should override the programmatic focus when told to focus", func() { + session := startGinkgo(pathToTest, "--noColor", "--focus=smores") + Eventually(session).Should(gexec.Exit(0)) + output := string(session.Out.Contents()) + + Ω(output).Should(ContainSubstring("marshmallow")) + Ω(output).Should(ContainSubstring("chocolate")) + Ω(output).Should(ContainSubstring("smores")) + Ω(output).Should(ContainSubstring("3 Passed")) + Ω(output).Should(ContainSubstring("0 Failed")) + Ω(output).Should(ContainSubstring("0 Pending")) + Ω(output).Should(ContainSubstring("10 Skipped")) + }) + + It("should override the programmatic focus when told to skip", func() { + session := startGinkgo(pathToTest, "--noColor", "--skip=marshmallow|failing") + Eventually(session).Should(gexec.Exit(0)) + output := string(session.Out.Contents()) + + Ω(output).ShouldNot(ContainSubstring("marshmallow")) + Ω(output).Should(ContainSubstring("chocolate")) + Ω(output).Should(ContainSubstring("smores")) + Ω(output).Should(ContainSubstring("10 Passed")) + Ω(output).Should(ContainSubstring("0 Failed")) + Ω(output).Should(ContainSubstring("1 Pending")) + Ω(output).Should(ContainSubstring("2 Skipped")) + }) + + It("should run the race detector when told to", func() { + session := startGinkgo(pathToTest, "--noColor", "--race") + Eventually(session).Should(gexec.Exit(types.GINKGO_FOCUS_EXIT_CODE)) + output := string(session.Out.Contents()) + + Ω(output).Should(ContainSubstring("WARNING: DATA RACE")) + }) + + It("should randomize tests when told to", func() { + session := startGinkgo(pathToTest, "--noColor", "--randomizeAllSpecs", "--seed=21") + Eventually(session).Should(gexec.Exit(types.GINKGO_FOCUS_EXIT_CODE)) + output := string(session.Out.Contents()) + + orders := getRandomOrders(output) + Ω(orders[0]).ShouldNot(BeNumerically("<", orders[1])) + }) + + It("should skip measurements when told to", func() { + session := startGinkgo(pathToTest, "--skipMeasurements") + Eventually(session).Should(gexec.Exit(types.GINKGO_FOCUS_EXIT_CODE)) + output := string(session.Out.Contents()) + + Ω(output).ShouldNot(ContainSubstring("Ran 3 samples:"), "has a measurement") + Ω(output).Should(ContainSubstring("3 Skipped")) + }) + + It("should watch for slow specs", func() { + session := startGinkgo(pathToTest, "--slowSpecThreshold=0.05") + Eventually(session).Should(gexec.Exit(types.GINKGO_FOCUS_EXIT_CODE)) + output := string(session.Out.Contents()) + + Ω(output).Should(ContainSubstring("SLOW TEST")) + Ω(output).Should(ContainSubstring("should honor -slowSpecThreshold")) + }) + + It("should pass additional arguments in", func() { + session := startGinkgo(pathToTest, "--", "--customFlag=madagascar") + Eventually(session).Should(gexec.Exit(types.GINKGO_FOCUS_EXIT_CODE)) + output := string(session.Out.Contents()) + + Ω(output).Should(ContainSubstring("CUSTOM_FLAG: madagascar")) + }) + + It("should print out full stack traces for failures when told to", func() { + session := startGinkgo(pathToTest, "--focus=a failing test", "--trace") + Eventually(session).Should(gexec.Exit(1)) + output := string(session.Out.Contents()) + + Ω(output).Should(ContainSubstring("Full Stack Trace")) + }) + + It("should fail fast when told to", func() { + pathToTest = tmpPath("fail") + copyIn("fail_fixture", pathToTest) + session := startGinkgo(pathToTest, "--failFast") + Eventually(session).Should(gexec.Exit(1)) + output := string(session.Out.Contents()) + + Ω(output).Should(ContainSubstring("1 Failed")) + Ω(output).Should(ContainSubstring("15 Skipped")) + }) + + It("should perform a dry run when told to", func() { + pathToTest = tmpPath("fail") + copyIn("fail_fixture", pathToTest) + session := startGinkgo(pathToTest, "--dryRun", "-v") + Eventually(session).Should(gexec.Exit(0)) + output := string(session.Out.Contents()) + + Ω(output).Should(ContainSubstring("synchronous failures")) + Ω(output).Should(ContainSubstring("16 Specs")) + Ω(output).Should(ContainSubstring("0 Passed")) + Ω(output).Should(ContainSubstring("0 Failed")) + }) +}) diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/integration/integration.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/integration/integration.go new file mode 100644 index 0000000000000..76ab1b7282d8e --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/integration/integration.go @@ -0,0 +1 @@ +package integration diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/integration/integration_suite_test.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/integration/integration_suite_test.go new file mode 100644 index 0000000000000..0ad407c8453a8 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/integration/integration_suite_test.go @@ -0,0 +1,89 @@ +package integration_test + +import ( + "io" + "io/ioutil" + "os" + "os/exec" + "path/filepath" + + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" + "github.com/onsi/gomega/gexec" + + "testing" + "time" +) + +var tmpDir string +var pathToGinkgo string + +func TestIntegration(t *testing.T) { + SetDefaultEventuallyTimeout(15 * time.Second) + RegisterFailHandler(Fail) + RunSpecs(t, "Integration Suite") +} + +var _ = SynchronizedBeforeSuite(func() []byte { + pathToGinkgo, err := gexec.Build("github.com/onsi/ginkgo/ginkgo") + Ω(err).ShouldNot(HaveOccurred()) + return []byte(pathToGinkgo) +}, func(computedPathToGinkgo []byte) { + pathToGinkgo = string(computedPathToGinkgo) +}) + +var _ = BeforeEach(func() { + var err error + tmpDir, err = ioutil.TempDir("", "ginkgo-run") + Ω(err).ShouldNot(HaveOccurred()) +}) + +var _ = AfterEach(func() { + err := os.RemoveAll(tmpDir) + Ω(err).ShouldNot(HaveOccurred()) +}) + +var _ = SynchronizedAfterSuite(func() {}, func() { + gexec.CleanupBuildArtifacts() +}) + +func tmpPath(destination string) string { + return filepath.Join(tmpDir, destination) +} + +func copyIn(fixture string, destination string) { + err := os.MkdirAll(destination, 0777) + Ω(err).ShouldNot(HaveOccurred()) + + filepath.Walk(filepath.Join("_fixtures", fixture), func(path string, info os.FileInfo, err error) error { + if info.IsDir() { + return nil + } + + base := filepath.Base(path) + + src, err := os.Open(path) + Ω(err).ShouldNot(HaveOccurred()) + + dst, err := os.Create(filepath.Join(destination, base)) + Ω(err).ShouldNot(HaveOccurred()) + + _, err = io.Copy(dst, src) + Ω(err).ShouldNot(HaveOccurred()) + return nil + }) +} + +func ginkgoCommand(dir string, args ...string) *exec.Cmd { + cmd := exec.Command(pathToGinkgo, args...) + cmd.Dir = dir + + return cmd +} + +func startGinkgo(dir string, args ...string) *gexec.Session { + cmd := ginkgoCommand(dir, args...) + session, err := gexec.Start(cmd, GinkgoWriter, GinkgoWriter) + Ω(err).ShouldNot(HaveOccurred()) + return session +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/integration/interrupt_test.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/integration/interrupt_test.go new file mode 100644 index 0000000000000..dc3bf2842b14d --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/integration/interrupt_test.go @@ -0,0 +1,51 @@ +package integration_test + +import ( + "os/exec" + + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" + "github.com/onsi/gomega/gbytes" + "github.com/onsi/gomega/gexec" +) + +var _ = Describe("Interrupt", func() { + var pathToTest string + BeforeEach(func() { + pathToTest = tmpPath("hanging") + copyIn("hanging_suite", pathToTest) + }) + + Context("when interrupting a suite", func() { + var session *gexec.Session + BeforeEach(func() { + //we need to signal the actual process, so we must compile the test first + var err error + cmd := exec.Command("go", "test", "-c") + cmd.Dir = pathToTest + session, err = gexec.Start(cmd, GinkgoWriter, GinkgoWriter) + Ω(err).ShouldNot(HaveOccurred()) + Eventually(session).Should(gexec.Exit(0)) + + //then run the compiled test directly + cmd = exec.Command("./hanging.test", "--test.v=true", "--ginkgo.noColor") + cmd.Dir = pathToTest + session, err = gexec.Start(cmd, GinkgoWriter, GinkgoWriter) + Ω(err).ShouldNot(HaveOccurred()) + + Eventually(session).Should(gbytes.Say("Sleeping...")) + session.Interrupt() + Eventually(session, 1000).Should(gexec.Exit(1)) + }) + + It("should emit the contents of the GinkgoWriter", func() { + Ω(session).Should(gbytes.Say("Just beginning")) + Ω(session).Should(gbytes.Say("Almost there...")) + Ω(session).Should(gbytes.Say("Hanging Out")) + }) + + It("should run the AfterSuite", func() { + Ω(session).Should(gbytes.Say("Heading Out After Suite")) + }) + }) +}) diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/integration/precompiled_test.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/integration/precompiled_test.go new file mode 100644 index 0000000000000..d9b78e0b27593 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/integration/precompiled_test.go @@ -0,0 +1,53 @@ +package integration_test + +import ( + "os" + "os/exec" + "path/filepath" + + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" + "github.com/onsi/gomega/gbytes" + "github.com/onsi/gomega/gexec" +) + +var _ = Describe("ginkgo build", func() { + var pathToTest string + + BeforeEach(func() { + pathToTest = tmpPath("passing_ginkgo_tests") + copyIn("passing_ginkgo_tests", pathToTest) + session := startGinkgo(pathToTest, "build") + Eventually(session).Should(gexec.Exit(0)) + output := string(session.Out.Contents()) + Ω(output).Should(ContainSubstring("Compiling passing_ginkgo_tests")) + Ω(output).Should(ContainSubstring("compiled passing_ginkgo_tests.test")) + }) + + It("should build a test binary", func() { + _, err := os.Stat(filepath.Join(pathToTest, "passing_ginkgo_tests.test")) + Ω(err).ShouldNot(HaveOccurred()) + }) + + It("should be possible to run the test binary directly", func() { + cmd := exec.Command("./passing_ginkgo_tests.test") + cmd.Dir = pathToTest + session, err := gexec.Start(cmd, GinkgoWriter, GinkgoWriter) + Ω(err).ShouldNot(HaveOccurred()) + Eventually(session).Should(gexec.Exit(0)) + Ω(session).Should(gbytes.Say("Running Suite: Passing_ginkgo_tests Suite")) + }) + + It("should be possible to run the test binary via ginkgo", func() { + session := startGinkgo(pathToTest, "./passing_ginkgo_tests.test") + Eventually(session).Should(gexec.Exit(0)) + Ω(session).Should(gbytes.Say("Running Suite: Passing_ginkgo_tests Suite")) + }) + + It("should be possible to run the test binary in parallel", func() { + session := startGinkgo(pathToTest, "--nodes=4", "--noColor", "./passing_ginkgo_tests.test") + Eventually(session).Should(gexec.Exit(0)) + Ω(session).Should(gbytes.Say("Running Suite: Passing_ginkgo_tests Suite")) + Ω(session).Should(gbytes.Say("Running in parallel across 4 nodes")) + }) +}) diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/integration/progress_test.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/integration/progress_test.go new file mode 100644 index 0000000000000..8589c338a84c4 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/integration/progress_test.go @@ -0,0 +1,75 @@ +package integration_test + +import ( + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" + "github.com/onsi/gomega/gbytes" + "github.com/onsi/gomega/gexec" +) + +var _ = Describe("Emitting progress", func() { + var pathToTest string + var session *gexec.Session + var args []string + + BeforeEach(func() { + args = []string{"--noColor"} + pathToTest = tmpPath("progress") + copyIn("progress_fixture", pathToTest) + }) + + JustBeforeEach(func() { + session = startGinkgo(pathToTest, args...) + Eventually(session).Should(gexec.Exit(0)) + }) + + Context("with the -progress flag, but no -v flag", func() { + BeforeEach(func() { + args = append(args, "-progress") + }) + + It("should not emit progress", func() { + Ω(session).ShouldNot(gbytes.Say("[bB]efore")) + }) + }) + + Context("with the -v flag", func() { + BeforeEach(func() { + args = append(args, "-v") + }) + + It("should not emit progress", func() { + Ω(session).ShouldNot(gbytes.Say(`\[BeforeEach\]`)) + Ω(session).Should(gbytes.Say(`>outer before<`)) + }) + }) + + Context("with the -progress flag and the -v flag", func() { + BeforeEach(func() { + args = append(args, "-progress", "-v") + }) + + It("should emit progress (by writing to the GinkgoWriter)", func() { + Ω(session).Should(gbytes.Say(`\[BeforeEach\] ProgressFixture`)) + Ω(session).Should(gbytes.Say(`>outer before<`)) + + Ω(session).Should(gbytes.Say(`\[BeforeEach\] Inner Context`)) + Ω(session).Should(gbytes.Say(`>inner before<`)) + + Ω(session).Should(gbytes.Say(`\[JustBeforeEach\] ProgressFixture`)) + Ω(session).Should(gbytes.Say(`>outer just before<`)) + + Ω(session).Should(gbytes.Say(`\[JustBeforeEach\] Inner Context`)) + Ω(session).Should(gbytes.Say(`>inner just before<`)) + + Ω(session).Should(gbytes.Say(`\[It\] should emit progress as it goes`)) + Ω(session).Should(gbytes.Say(`>it<`)) + + Ω(session).Should(gbytes.Say(`\[AfterEach\] Inner Context`)) + Ω(session).Should(gbytes.Say(`>inner after<`)) + + Ω(session).Should(gbytes.Say(`\[AfterEach\] ProgressFixture`)) + Ω(session).Should(gbytes.Say(`>outer after<`)) + }) + }) +}) diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/integration/run_test.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/integration/run_test.go new file mode 100644 index 0000000000000..c346bb39e6a4b --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/integration/run_test.go @@ -0,0 +1,373 @@ +package integration_test + +import ( + "runtime" + "strings" + + . "github.com/onsi/ginkgo" + "github.com/onsi/ginkgo/types" + . "github.com/onsi/gomega" + "github.com/onsi/gomega/gbytes" + "github.com/onsi/gomega/gexec" +) + +var _ = Describe("Running Specs", func() { + var pathToTest string + + Context("when pointed at the current directory", func() { + BeforeEach(func() { + pathToTest = tmpPath("ginkgo") + copyIn("passing_ginkgo_tests", pathToTest) + }) + + It("should run the tests in the working directory", func() { + session := startGinkgo(pathToTest, "--noColor") + Eventually(session).Should(gexec.Exit(0)) + output := string(session.Out.Contents()) + + Ω(output).Should(ContainSubstring("Running Suite: Passing_ginkgo_tests Suite")) + Ω(output).Should(ContainSubstring("••••")) + Ω(output).Should(ContainSubstring("SUCCESS! -- 4 Passed")) + Ω(output).Should(ContainSubstring("Test Suite Passed")) + }) + }) + + Context("when passed an explicit package to run", func() { + BeforeEach(func() { + pathToTest = tmpPath("ginkgo") + copyIn("passing_ginkgo_tests", pathToTest) + }) + + It("should run the ginkgo style tests", func() { + session := startGinkgo(tmpDir, "--noColor", pathToTest) + Eventually(session).Should(gexec.Exit(0)) + output := string(session.Out.Contents()) + + Ω(output).Should(ContainSubstring("Running Suite: Passing_ginkgo_tests Suite")) + Ω(output).Should(ContainSubstring("••••")) + Ω(output).Should(ContainSubstring("SUCCESS! -- 4 Passed")) + Ω(output).Should(ContainSubstring("Test Suite Passed")) + }) + }) + + Context("when passed a number of packages to run", func() { + BeforeEach(func() { + pathToTest = tmpPath("ginkgo") + otherPathToTest := tmpPath("other") + copyIn("passing_ginkgo_tests", pathToTest) + copyIn("more_ginkgo_tests", otherPathToTest) + }) + + It("should run the ginkgo style tests", func() { + session := startGinkgo(tmpDir, "--noColor", "--succinct=false", "ginkgo", "./other") + Eventually(session).Should(gexec.Exit(0)) + output := string(session.Out.Contents()) + + Ω(output).Should(ContainSubstring("Running Suite: Passing_ginkgo_tests Suite")) + Ω(output).Should(ContainSubstring("Running Suite: More_ginkgo_tests Suite")) + Ω(output).Should(ContainSubstring("Test Suite Passed")) + }) + }) + + Context("when passed a number of packages to run, some of which have focused tests", func() { + BeforeEach(func() { + pathToTest = tmpPath("ginkgo") + otherPathToTest := tmpPath("other") + focusedPathToTest := tmpPath("focused") + copyIn("passing_ginkgo_tests", pathToTest) + copyIn("more_ginkgo_tests", otherPathToTest) + copyIn("focused_fixture", focusedPathToTest) + }) + + It("should exit with a status code of 2 and explain why", func() { + session := startGinkgo(tmpDir, "--noColor", "--succinct=false", "-r") + Eventually(session).Should(gexec.Exit(types.GINKGO_FOCUS_EXIT_CODE)) + output := string(session.Out.Contents()) + + Ω(output).Should(ContainSubstring("Running Suite: Passing_ginkgo_tests Suite")) + Ω(output).Should(ContainSubstring("Running Suite: More_ginkgo_tests Suite")) + Ω(output).Should(ContainSubstring("Test Suite Passed")) + Ω(output).Should(ContainSubstring("Detected Programmatic Focus - setting exit status to %d", types.GINKGO_FOCUS_EXIT_CODE)) + }) + }) + + Context("when told to skipPackages", func() { + BeforeEach(func() { + pathToTest = tmpPath("ginkgo") + otherPathToTest := tmpPath("other") + focusedPathToTest := tmpPath("focused") + copyIn("passing_ginkgo_tests", pathToTest) + copyIn("more_ginkgo_tests", otherPathToTest) + copyIn("focused_fixture", focusedPathToTest) + }) + + It("should skip packages that match the list", func() { + session := startGinkgo(tmpDir, "--noColor", "--skipPackage=other,focused", "-r") + Eventually(session).Should(gexec.Exit(0)) + output := string(session.Out.Contents()) + + Ω(output).Should(ContainSubstring("Passing_ginkgo_tests Suite")) + Ω(output).ShouldNot(ContainSubstring("More_ginkgo_tests Suite")) + Ω(output).ShouldNot(ContainSubstring("Focused_fixture Suite")) + Ω(output).Should(ContainSubstring("Test Suite Passed")) + }) + + Context("when all packages are skipped", func() { + It("should not run anything, but still exit 0", func() { + session := startGinkgo(tmpDir, "--noColor", "--skipPackage=other,focused,ginkgo", "-r") + Eventually(session).Should(gexec.Exit(0)) + output := string(session.Out.Contents()) + + Ω(output).Should(ContainSubstring("All tests skipped!")) + Ω(output).ShouldNot(ContainSubstring("Passing_ginkgo_tests Suite")) + Ω(output).ShouldNot(ContainSubstring("More_ginkgo_tests Suite")) + Ω(output).ShouldNot(ContainSubstring("Focused_fixture Suite")) + Ω(output).ShouldNot(ContainSubstring("Test Suite Passed")) + }) + }) + }) + + Context("when there are no tests to run", func() { + It("should exit 1", func() { + session := startGinkgo(tmpDir, "--noColor", "--skipPackage=other,focused", "-r") + Eventually(session).Should(gexec.Exit(1)) + output := string(session.Err.Contents()) + + Ω(output).Should(ContainSubstring("Found no test suites")) + }) + }) + + Context("when told to randomizeSuites", func() { + BeforeEach(func() { + pathToTest = tmpPath("ginkgo") + otherPathToTest := tmpPath("other") + copyIn("passing_ginkgo_tests", pathToTest) + copyIn("more_ginkgo_tests", otherPathToTest) + }) + + It("should skip packages that match the regexp", func() { + session := startGinkgo(tmpDir, "--noColor", "--randomizeSuites", "-r", "--seed=2") + Eventually(session).Should(gexec.Exit(0)) + + Ω(session).Should(gbytes.Say("More_ginkgo_tests Suite")) + Ω(session).Should(gbytes.Say("Passing_ginkgo_tests Suite")) + + session = startGinkgo(tmpDir, "--noColor", "--randomizeSuites", "-r", "--seed=3") + Eventually(session).Should(gexec.Exit(0)) + + Ω(session).Should(gbytes.Say("Passing_ginkgo_tests Suite")) + Ω(session).Should(gbytes.Say("More_ginkgo_tests Suite")) + }) + }) + + Context("when pointed at a package with xunit style tests", func() { + BeforeEach(func() { + pathToTest = tmpPath("xunit") + copyIn("xunit_tests", pathToTest) + }) + + It("should run the xunit style tests", func() { + session := startGinkgo(pathToTest) + Eventually(session).Should(gexec.Exit(0)) + output := string(session.Out.Contents()) + + Ω(output).Should(ContainSubstring("--- PASS: TestAlwaysTrue")) + Ω(output).Should(ContainSubstring("Test Suite Passed")) + }) + }) + + Context("when pointed at a package with no tests", func() { + BeforeEach(func() { + pathToTest = tmpPath("no_tests") + copyIn("no_tests", pathToTest) + }) + + It("should fail", func() { + session := startGinkgo(pathToTest, "--noColor") + Eventually(session).Should(gexec.Exit(1)) + + Ω(session.Err.Contents()).Should(ContainSubstring("Found no test suites")) + }) + }) + + Context("when pointed at a package that fails to compile", func() { + BeforeEach(func() { + pathToTest = tmpPath("does_not_compile") + copyIn("does_not_compile", pathToTest) + }) + + It("should fail", func() { + session := startGinkgo(pathToTest, "--noColor") + Eventually(session).Should(gexec.Exit(1)) + output := string(session.Out.Contents()) + + Ω(output).Should(ContainSubstring("Failed to compile")) + }) + }) + + Context("when running in parallel", func() { + BeforeEach(func() { + pathToTest = tmpPath("ginkgo") + copyIn("passing_ginkgo_tests", pathToTest) + }) + + Context("with a specific number of -nodes", func() { + It("should use the specified number of nodes", func() { + session := startGinkgo(pathToTest, "--noColor", "-succinct", "-nodes=2") + Eventually(session).Should(gexec.Exit(0)) + output := string(session.Out.Contents()) + + Ω(output).Should(MatchRegexp(`\[\d+\] Passing_ginkgo_tests Suite - 4/4 specs - 2 nodes •••• SUCCESS! [\d.µs]+`)) + Ω(output).Should(ContainSubstring("Test Suite Passed")) + }) + }) + + Context("with -p", func() { + It("it should autocompute the number of nodes", func() { + session := startGinkgo(pathToTest, "--noColor", "-succinct", "-p") + Eventually(session).Should(gexec.Exit(0)) + output := string(session.Out.Contents()) + + nodes := runtime.NumCPU() + if nodes > 4 { + nodes = nodes - 1 + } + Ω(output).Should(MatchRegexp(`\[\d+\] Passing_ginkgo_tests Suite - 4/4 specs - %d nodes •••• SUCCESS! [\d.µs]+`, nodes)) + Ω(output).Should(ContainSubstring("Test Suite Passed")) + }) + }) + }) + + Context("when streaming in parallel", func() { + BeforeEach(func() { + pathToTest = tmpPath("ginkgo") + copyIn("passing_ginkgo_tests", pathToTest) + }) + + It("should print output in realtime", func() { + session := startGinkgo(pathToTest, "--noColor", "-stream", "-nodes=2") + Eventually(session).Should(gexec.Exit(0)) + output := string(session.Out.Contents()) + + Ω(output).Should(ContainSubstring(`[1] Parallel test node 1/2.`)) + Ω(output).Should(ContainSubstring(`[2] Parallel test node 2/2.`)) + Ω(output).Should(ContainSubstring(`[1] SUCCESS!`)) + Ω(output).Should(ContainSubstring(`[2] SUCCESS!`)) + Ω(output).Should(ContainSubstring("Test Suite Passed")) + }) + }) + + Context("when running recursively", func() { + BeforeEach(func() { + passingTest := tmpPath("A") + otherPassingTest := tmpPath("E") + copyIn("passing_ginkgo_tests", passingTest) + copyIn("more_ginkgo_tests", otherPassingTest) + }) + + Context("when all the tests pass", func() { + It("should run all the tests (in succinct mode) and succeed", func() { + session := startGinkgo(tmpDir, "--noColor", "-r") + Eventually(session).Should(gexec.Exit(0)) + output := string(session.Out.Contents()) + + outputLines := strings.Split(output, "\n") + Ω(outputLines[0]).Should(MatchRegexp(`\[\d+\] Passing_ginkgo_tests Suite - 4/4 specs •••• SUCCESS! [\d.µs]+ PASS`)) + Ω(outputLines[1]).Should(MatchRegexp(`\[\d+\] More_ginkgo_tests Suite - 2/2 specs •• SUCCESS! [\d.µs]+ PASS`)) + Ω(output).Should(ContainSubstring("Test Suite Passed")) + }) + }) + + Context("when one of the packages has a failing tests", func() { + BeforeEach(func() { + failingTest := tmpPath("C") + copyIn("failing_ginkgo_tests", failingTest) + }) + + It("should fail and stop running tests", func() { + session := startGinkgo(tmpDir, "--noColor", "-r") + Eventually(session).Should(gexec.Exit(1)) + output := string(session.Out.Contents()) + + outputLines := strings.Split(output, "\n") + Ω(outputLines[0]).Should(MatchRegexp(`\[\d+\] Passing_ginkgo_tests Suite - 4/4 specs •••• SUCCESS! [\d.µs]+ PASS`)) + Ω(outputLines[1]).Should(MatchRegexp(`\[\d+\] Failing_ginkgo_tests Suite - 2/2 specs`)) + Ω(output).Should(ContainSubstring("• Failure")) + Ω(output).ShouldNot(ContainSubstring("More_ginkgo_tests Suite")) + Ω(output).Should(ContainSubstring("Test Suite Failed")) + + Ω(output).Should(ContainSubstring("Summarizing 1 Failure:")) + Ω(output).Should(ContainSubstring("[Fail] FailingGinkgoTests [It] should fail")) + }) + }) + + Context("when one of the packages fails to compile", func() { + BeforeEach(func() { + doesNotCompileTest := tmpPath("C") + copyIn("does_not_compile", doesNotCompileTest) + }) + + It("should fail and stop running tests", func() { + session := startGinkgo(tmpDir, "--noColor", "-r") + Eventually(session).Should(gexec.Exit(1)) + output := string(session.Out.Contents()) + + outputLines := strings.Split(output, "\n") + Ω(outputLines[0]).Should(MatchRegexp(`\[\d+\] Passing_ginkgo_tests Suite - 4/4 specs •••• SUCCESS! [\d.µs]+ PASS`)) + Ω(outputLines[1]).Should(ContainSubstring("Failed to compile C:")) + Ω(output).ShouldNot(ContainSubstring("More_ginkgo_tests Suite")) + Ω(output).Should(ContainSubstring("Test Suite Failed")) + }) + }) + + Context("when either is the case, but the keepGoing flag is set", func() { + BeforeEach(func() { + doesNotCompileTest := tmpPath("B") + copyIn("does_not_compile", doesNotCompileTest) + + failingTest := tmpPath("C") + copyIn("failing_ginkgo_tests", failingTest) + }) + + It("should soldier on", func() { + session := startGinkgo(tmpDir, "--noColor", "-r", "-keepGoing") + Eventually(session).Should(gexec.Exit(1)) + output := string(session.Out.Contents()) + + outputLines := strings.Split(output, "\n") + Ω(outputLines[0]).Should(MatchRegexp(`\[\d+\] Passing_ginkgo_tests Suite - 4/4 specs •••• SUCCESS! [\d.µs]+ PASS`)) + Ω(outputLines[1]).Should(ContainSubstring("Failed to compile B:")) + Ω(output).Should(MatchRegexp(`\[\d+\] Failing_ginkgo_tests Suite - 2/2 specs`)) + Ω(output).Should(ContainSubstring("• Failure")) + Ω(output).Should(MatchRegexp(`\[\d+\] More_ginkgo_tests Suite - 2/2 specs •• SUCCESS! [\d.µs]+ PASS`)) + Ω(output).Should(ContainSubstring("Test Suite Failed")) + }) + }) + }) + + Context("when told to keep going --untilItFails", func() { + BeforeEach(func() { + copyIn("eventually_failing", tmpDir) + }) + + It("should keep rerunning the tests, until a failure occurs", func() { + session := startGinkgo(tmpDir, "--untilItFails", "--noColor") + Eventually(session).Should(gexec.Exit(1)) + Ω(session).Should(gbytes.Say("This was attempt #1")) + Ω(session).Should(gbytes.Say("This was attempt #2")) + Ω(session).Should(gbytes.Say("Tests failed on attempt #3")) + + //it should change the random seed between each test + lines := strings.Split(string(session.Out.Contents()), "\n") + randomSeeds := []string{} + for _, line := range lines { + if strings.Contains(line, "Random Seed:") { + randomSeeds = append(randomSeeds, strings.Split(line, ": ")[1]) + } + } + Ω(randomSeeds[0]).ShouldNot(Equal(randomSeeds[1])) + Ω(randomSeeds[1]).ShouldNot(Equal(randomSeeds[2])) + Ω(randomSeeds[0]).ShouldNot(Equal(randomSeeds[2])) + }) + }) +}) diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/integration/subcommand_test.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/integration/subcommand_test.go new file mode 100644 index 0000000000000..0fb51a6532cea --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/integration/subcommand_test.go @@ -0,0 +1,364 @@ +package integration_test + +import ( + "io/ioutil" + "os" + "path/filepath" + "strings" + + . "github.com/onsi/ginkgo" + "github.com/onsi/ginkgo/types" + . "github.com/onsi/gomega" + "github.com/onsi/gomega/gexec" +) + +var _ = Describe("Subcommand", func() { + Describe("ginkgo bootstrap", func() { + var pkgPath string + BeforeEach(func() { + pkgPath = tmpPath("foo") + os.Mkdir(pkgPath, 0777) + }) + + It("should generate a bootstrap file, as long as one does not exist", func() { + session := startGinkgo(pkgPath, "bootstrap") + Eventually(session).Should(gexec.Exit(0)) + output := session.Out.Contents() + + Ω(output).Should(ContainSubstring("foo_suite_test.go")) + + content, err := ioutil.ReadFile(filepath.Join(pkgPath, "foo_suite_test.go")) + Ω(err).ShouldNot(HaveOccurred()) + Ω(content).Should(ContainSubstring("package foo_test")) + Ω(content).Should(ContainSubstring("func TestFoo(t *testing.T) {")) + Ω(content).Should(ContainSubstring("RegisterFailHandler")) + Ω(content).Should(ContainSubstring("RunSpecs")) + + Ω(content).Should(ContainSubstring("\t" + `. "github.com/onsi/ginkgo"`)) + Ω(content).Should(ContainSubstring("\t" + `. "github.com/onsi/gomega"`)) + + session = startGinkgo(pkgPath, "bootstrap") + Eventually(session).Should(gexec.Exit(1)) + output = session.Out.Contents() + Ω(output).Should(ContainSubstring("foo_suite_test.go already exists")) + }) + + It("should import nodot declarations when told to", func() { + session := startGinkgo(pkgPath, "bootstrap", "--nodot") + Eventually(session).Should(gexec.Exit(0)) + output := session.Out.Contents() + + Ω(output).Should(ContainSubstring("foo_suite_test.go")) + + content, err := ioutil.ReadFile(filepath.Join(pkgPath, "foo_suite_test.go")) + Ω(err).ShouldNot(HaveOccurred()) + Ω(content).Should(ContainSubstring("package foo_test")) + Ω(content).Should(ContainSubstring("func TestFoo(t *testing.T) {")) + Ω(content).Should(ContainSubstring("RegisterFailHandler")) + Ω(content).Should(ContainSubstring("RunSpecs")) + + Ω(content).Should(ContainSubstring("var It = ginkgo.It")) + Ω(content).Should(ContainSubstring("var Ω = gomega.Ω")) + + Ω(content).Should(ContainSubstring("\t" + `"github.com/onsi/ginkgo"`)) + Ω(content).Should(ContainSubstring("\t" + `"github.com/onsi/gomega"`)) + }) + + It("should generate an agouti bootstrap file when told to", func() { + session := startGinkgo(pkgPath, "bootstrap", "--agouti") + Eventually(session).Should(gexec.Exit(0)) + output := session.Out.Contents() + + Ω(output).Should(ContainSubstring("foo_suite_test.go")) + + content, err := ioutil.ReadFile(filepath.Join(pkgPath, "foo_suite_test.go")) + Ω(err).ShouldNot(HaveOccurred()) + Ω(content).Should(ContainSubstring("package foo_test")) + Ω(content).Should(ContainSubstring("func TestFoo(t *testing.T) {")) + Ω(content).Should(ContainSubstring("RegisterFailHandler")) + Ω(content).Should(ContainSubstring("RunSpecs")) + + Ω(content).Should(ContainSubstring("\t" + `. "github.com/onsi/ginkgo"`)) + Ω(content).Should(ContainSubstring("\t" + `. "github.com/onsi/gomega"`)) + Ω(content).Should(ContainSubstring("\t" + `. "github.com/sclevine/agouti/core"`)) + }) + }) + + Describe("nodot", func() { + It("should update the declarations in the bootstrap file", func() { + pkgPath := tmpPath("foo") + os.Mkdir(pkgPath, 0777) + + session := startGinkgo(pkgPath, "bootstrap", "--nodot") + Eventually(session).Should(gexec.Exit(0)) + + byteContent, err := ioutil.ReadFile(filepath.Join(pkgPath, "foo_suite_test.go")) + Ω(err).ShouldNot(HaveOccurred()) + + content := string(byteContent) + content = strings.Replace(content, "var It =", "var MyIt =", -1) + content = strings.Replace(content, "var Ω = gomega.Ω\n", "", -1) + + err = ioutil.WriteFile(filepath.Join(pkgPath, "foo_suite_test.go"), []byte(content), os.ModePerm) + Ω(err).ShouldNot(HaveOccurred()) + + session = startGinkgo(pkgPath, "nodot") + Eventually(session).Should(gexec.Exit(0)) + + byteContent, err = ioutil.ReadFile(filepath.Join(pkgPath, "foo_suite_test.go")) + Ω(err).ShouldNot(HaveOccurred()) + + Ω(byteContent).Should(ContainSubstring("var MyIt = ginkgo.It")) + Ω(byteContent).ShouldNot(ContainSubstring("var It = ginkgo.It")) + Ω(byteContent).Should(ContainSubstring("var Ω = gomega.Ω")) + }) + }) + + Describe("ginkgo generate", func() { + var pkgPath string + + BeforeEach(func() { + pkgPath = tmpPath("foo_bar") + os.Mkdir(pkgPath, 0777) + }) + + Context("with no arguments", func() { + It("should generate a test file named after the package", func() { + session := startGinkgo(pkgPath, "generate") + Eventually(session).Should(gexec.Exit(0)) + output := session.Out.Contents() + + Ω(output).Should(ContainSubstring("foo_bar_test.go")) + + content, err := ioutil.ReadFile(filepath.Join(pkgPath, "foo_bar_test.go")) + Ω(err).ShouldNot(HaveOccurred()) + Ω(content).Should(ContainSubstring("package foo_bar_test")) + Ω(content).Should(ContainSubstring(`var _ = Describe("FooBar", func() {`)) + Ω(content).Should(ContainSubstring("\t" + `. "github.com/onsi/ginkgo"`)) + Ω(content).Should(ContainSubstring("\t" + `. "github.com/onsi/gomega"`)) + + session = startGinkgo(pkgPath, "generate") + Eventually(session).Should(gexec.Exit(1)) + output = session.Out.Contents() + + Ω(output).Should(ContainSubstring("foo_bar_test.go already exists")) + }) + }) + + Context("with an argument of the form: foo", func() { + It("should generate a test file named after the argument", func() { + session := startGinkgo(pkgPath, "generate", "baz_buzz") + Eventually(session).Should(gexec.Exit(0)) + output := session.Out.Contents() + + Ω(output).Should(ContainSubstring("baz_buzz_test.go")) + + content, err := ioutil.ReadFile(filepath.Join(pkgPath, "baz_buzz_test.go")) + Ω(err).ShouldNot(HaveOccurred()) + Ω(content).Should(ContainSubstring("package foo_bar_test")) + Ω(content).Should(ContainSubstring(`var _ = Describe("BazBuzz", func() {`)) + }) + }) + + Context("with an argument of the form: foo.go", func() { + It("should generate a test file named after the argument", func() { + session := startGinkgo(pkgPath, "generate", "baz_buzz.go") + Eventually(session).Should(gexec.Exit(0)) + output := session.Out.Contents() + + Ω(output).Should(ContainSubstring("baz_buzz_test.go")) + + content, err := ioutil.ReadFile(filepath.Join(pkgPath, "baz_buzz_test.go")) + Ω(err).ShouldNot(HaveOccurred()) + Ω(content).Should(ContainSubstring("package foo_bar_test")) + Ω(content).Should(ContainSubstring(`var _ = Describe("BazBuzz", func() {`)) + + }) + }) + + Context("with an argument of the form: foo_test", func() { + It("should generate a test file named after the argument", func() { + session := startGinkgo(pkgPath, "generate", "baz_buzz_test") + Eventually(session).Should(gexec.Exit(0)) + output := session.Out.Contents() + + Ω(output).Should(ContainSubstring("baz_buzz_test.go")) + + content, err := ioutil.ReadFile(filepath.Join(pkgPath, "baz_buzz_test.go")) + Ω(err).ShouldNot(HaveOccurred()) + Ω(content).Should(ContainSubstring("package foo_bar_test")) + Ω(content).Should(ContainSubstring(`var _ = Describe("BazBuzz", func() {`)) + }) + }) + + Context("with an argument of the form: foo_test.go", func() { + It("should generate a test file named after the argument", func() { + session := startGinkgo(pkgPath, "generate", "baz_buzz_test.go") + Eventually(session).Should(gexec.Exit(0)) + output := session.Out.Contents() + + Ω(output).Should(ContainSubstring("baz_buzz_test.go")) + + content, err := ioutil.ReadFile(filepath.Join(pkgPath, "baz_buzz_test.go")) + Ω(err).ShouldNot(HaveOccurred()) + Ω(content).Should(ContainSubstring("package foo_bar_test")) + Ω(content).Should(ContainSubstring(`var _ = Describe("BazBuzz", func() {`)) + }) + }) + + Context("with multiple arguments", func() { + It("should generate a test file named after the argument", func() { + session := startGinkgo(pkgPath, "generate", "baz", "buzz") + Eventually(session).Should(gexec.Exit(0)) + output := session.Out.Contents() + + Ω(output).Should(ContainSubstring("baz_test.go")) + Ω(output).Should(ContainSubstring("buzz_test.go")) + + content, err := ioutil.ReadFile(filepath.Join(pkgPath, "baz_test.go")) + Ω(err).ShouldNot(HaveOccurred()) + Ω(content).Should(ContainSubstring("package foo_bar_test")) + Ω(content).Should(ContainSubstring(`var _ = Describe("Baz", func() {`)) + + content, err = ioutil.ReadFile(filepath.Join(pkgPath, "buzz_test.go")) + Ω(err).ShouldNot(HaveOccurred()) + Ω(content).Should(ContainSubstring("package foo_bar_test")) + Ω(content).Should(ContainSubstring(`var _ = Describe("Buzz", func() {`)) + }) + }) + + Context("with nodot", func() { + It("should not import ginkgo or gomega", func() { + session := startGinkgo(pkgPath, "generate", "--nodot") + Eventually(session).Should(gexec.Exit(0)) + output := session.Out.Contents() + + Ω(output).Should(ContainSubstring("foo_bar_test.go")) + + content, err := ioutil.ReadFile(filepath.Join(pkgPath, "foo_bar_test.go")) + Ω(err).ShouldNot(HaveOccurred()) + Ω(content).Should(ContainSubstring("package foo_bar_test")) + Ω(content).ShouldNot(ContainSubstring("\t" + `. "github.com/onsi/ginkgo"`)) + Ω(content).ShouldNot(ContainSubstring("\t" + `. "github.com/onsi/gomega"`)) + }) + }) + + Context("with agouti", func() { + It("should generate an agouti test file", func() { + session := startGinkgo(pkgPath, "generate", "--agouti") + Eventually(session).Should(gexec.Exit(0)) + output := session.Out.Contents() + + Ω(output).Should(ContainSubstring("foo_bar_test.go")) + + content, err := ioutil.ReadFile(filepath.Join(pkgPath, "foo_bar_test.go")) + Ω(err).ShouldNot(HaveOccurred()) + Ω(content).Should(ContainSubstring("package foo_bar_test")) + Ω(content).Should(ContainSubstring("\t" + `. "github.com/onsi/ginkgo"`)) + Ω(content).Should(ContainSubstring("\t" + `. "github.com/onsi/gomega"`)) + Ω(content).Should(ContainSubstring("\t" + `. "github.com/sclevine/agouti/core"`)) + Ω(content).Should(ContainSubstring("\t" + `. "github.com/sclevine/agouti/matchers"`)) + Ω(content).Should(ContainSubstring("page, err = agoutiDriver.Page()")) + }) + }) + }) + + Describe("ginkgo bootstrap/generate", func() { + var pkgPath string + BeforeEach(func() { + pkgPath = tmpPath("some crazy-thing") + os.Mkdir(pkgPath, 0777) + }) + + Context("when the working directory is empty", func() { + It("generates correctly named bootstrap and generate files with a package name derived from the directory", func() { + session := startGinkgo(pkgPath, "bootstrap") + Eventually(session).Should(gexec.Exit(0)) + + content, err := ioutil.ReadFile(filepath.Join(pkgPath, "some_crazy_thing_suite_test.go")) + Ω(err).ShouldNot(HaveOccurred()) + Ω(content).Should(ContainSubstring("package some_crazy_thing_test")) + Ω(content).Should(ContainSubstring("SomeCrazyThing Suite")) + + session = startGinkgo(pkgPath, "generate") + Eventually(session).Should(gexec.Exit(0)) + + content, err = ioutil.ReadFile(filepath.Join(pkgPath, "some_crazy_thing_test.go")) + Ω(err).ShouldNot(HaveOccurred()) + Ω(content).Should(ContainSubstring("package some_crazy_thing_test")) + Ω(content).Should(ContainSubstring("SomeCrazyThing")) + }) + }) + + Context("when the working directory contains a file with a package name", func() { + BeforeEach(func() { + Ω(ioutil.WriteFile(filepath.Join(pkgPath, "foo.go"), []byte("package main\n\nfunc main() {}"), 0777)).Should(Succeed()) + }) + + It("generates correctly named bootstrap and generate files with the package name", func() { + session := startGinkgo(pkgPath, "bootstrap") + Eventually(session).Should(gexec.Exit(0)) + + content, err := ioutil.ReadFile(filepath.Join(pkgPath, "some_crazy_thing_suite_test.go")) + Ω(err).ShouldNot(HaveOccurred()) + Ω(content).Should(ContainSubstring("package main_test")) + Ω(content).Should(ContainSubstring("SomeCrazyThing Suite")) + + session = startGinkgo(pkgPath, "generate") + Eventually(session).Should(gexec.Exit(0)) + + content, err = ioutil.ReadFile(filepath.Join(pkgPath, "some_crazy_thing_test.go")) + Ω(err).ShouldNot(HaveOccurred()) + Ω(content).Should(ContainSubstring("package main_test")) + Ω(content).Should(ContainSubstring("SomeCrazyThing")) + }) + }) + }) + + Describe("ginkgo blur", func() { + It("should unfocus tests", func() { + pathToTest := tmpPath("focused") + copyIn("focused_fixture", pathToTest) + + session := startGinkgo(pathToTest, "--noColor") + Eventually(session).Should(gexec.Exit(types.GINKGO_FOCUS_EXIT_CODE)) + output := session.Out.Contents() + + Ω(output).Should(ContainSubstring("3 Passed")) + Ω(output).Should(ContainSubstring("3 Skipped")) + + session = startGinkgo(pathToTest, "blur") + Eventually(session).Should(gexec.Exit(0)) + + session = startGinkgo(pathToTest, "--noColor") + Eventually(session).Should(gexec.Exit(0)) + output = session.Out.Contents() + Ω(output).Should(ContainSubstring("6 Passed")) + Ω(output).Should(ContainSubstring("0 Skipped")) + }) + }) + + Describe("ginkgo version", func() { + It("should print out the version info", func() { + session := startGinkgo("", "version") + Eventually(session).Should(gexec.Exit(0)) + output := session.Out.Contents() + + Ω(output).Should(MatchRegexp(`Ginkgo Version \d+\.\d+\.\d+`)) + }) + }) + + Describe("ginkgo help", func() { + It("should print out usage information", func() { + session := startGinkgo("", "help") + Eventually(session).Should(gexec.Exit(0)) + output := string(session.Err.Contents()) + + Ω(output).Should(MatchRegexp(`Ginkgo Version \d+\.\d+\.\d+`)) + Ω(output).Should(ContainSubstring("ginkgo watch")) + Ω(output).Should(ContainSubstring("-succinct")) + Ω(output).Should(ContainSubstring("-nodes")) + Ω(output).Should(ContainSubstring("ginkgo generate")) + }) + }) +}) diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/integration/suite_setup_test.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/integration/suite_setup_test.go new file mode 100644 index 0000000000000..2b9d9268bb139 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/integration/suite_setup_test.go @@ -0,0 +1,177 @@ +package integration_test + +import ( + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" + "github.com/onsi/gomega/gexec" + "strings" +) + +var _ = Describe("SuiteSetup", func() { + var pathToTest string + + Context("when the BeforeSuite and AfterSuite pass", func() { + BeforeEach(func() { + pathToTest = tmpPath("suite_setup") + copyIn("passing_suite_setup", pathToTest) + }) + + It("should run the BeforeSuite once, then run all the tests", func() { + session := startGinkgo(pathToTest, "--noColor") + Eventually(session).Should(gexec.Exit(0)) + output := string(session.Out.Contents()) + + Ω(strings.Count(output, "BEFORE SUITE")).Should(Equal(1)) + Ω(strings.Count(output, "AFTER SUITE")).Should(Equal(1)) + }) + + It("should run the BeforeSuite once per parallel node, then run all the tests", func() { + session := startGinkgo(pathToTest, "--noColor", "--nodes=2") + Eventually(session).Should(gexec.Exit(0)) + output := string(session.Out.Contents()) + + Ω(strings.Count(output, "BEFORE SUITE")).Should(Equal(2)) + Ω(strings.Count(output, "AFTER SUITE")).Should(Equal(2)) + }) + }) + + Context("when the BeforeSuite fails", func() { + BeforeEach(func() { + pathToTest = tmpPath("suite_setup") + copyIn("failing_before_suite", pathToTest) + }) + + It("should run the BeforeSuite once, none of the tests, but it should run the AfterSuite", func() { + session := startGinkgo(pathToTest, "--noColor") + Eventually(session).Should(gexec.Exit(1)) + output := string(session.Out.Contents()) + + Ω(strings.Count(output, "BEFORE SUITE")).Should(Equal(1)) + Ω(strings.Count(output, "Test Panicked")).Should(Equal(1)) + Ω(strings.Count(output, "AFTER SUITE")).Should(Equal(1)) + Ω(output).ShouldNot(ContainSubstring("NEVER SEE THIS")) + }) + + It("should run the BeforeSuite once per parallel node, none of the tests, but it should run the AfterSuite for each node", func() { + session := startGinkgo(pathToTest, "--noColor", "--nodes=2") + Eventually(session).Should(gexec.Exit(1)) + output := string(session.Out.Contents()) + + Ω(strings.Count(output, "BEFORE SUITE")).Should(Equal(2)) + Ω(strings.Count(output, "Test Panicked")).Should(Equal(2)) + Ω(strings.Count(output, "AFTER SUITE")).Should(Equal(2)) + Ω(output).ShouldNot(ContainSubstring("NEVER SEE THIS")) + }) + }) + + Context("when the AfterSuite fails", func() { + BeforeEach(func() { + pathToTest = tmpPath("suite_setup") + copyIn("failing_after_suite", pathToTest) + }) + + It("should run the BeforeSuite once, none of the tests, but it should run the AfterSuite", func() { + session := startGinkgo(pathToTest, "--noColor") + Eventually(session).Should(gexec.Exit(1)) + output := string(session.Out.Contents()) + + Ω(strings.Count(output, "BEFORE SUITE")).Should(Equal(1)) + Ω(strings.Count(output, "AFTER SUITE")).Should(Equal(1)) + Ω(strings.Count(output, "Test Panicked")).Should(Equal(1)) + Ω(strings.Count(output, "A TEST")).Should(Equal(2)) + }) + + It("should run the BeforeSuite once per parallel node, none of the tests, but it should run the AfterSuite for each node", func() { + session := startGinkgo(pathToTest, "--noColor", "--nodes=2") + Eventually(session).Should(gexec.Exit(1)) + output := string(session.Out.Contents()) + + Ω(strings.Count(output, "BEFORE SUITE")).Should(Equal(2)) + Ω(strings.Count(output, "AFTER SUITE")).Should(Equal(2)) + Ω(strings.Count(output, "Test Panicked")).Should(Equal(2)) + Ω(strings.Count(output, "A TEST")).Should(Equal(2)) + }) + }) + + Context("With passing synchronized before and after suites", func() { + BeforeEach(func() { + pathToTest = tmpPath("suite_setup") + copyIn("synchronized_setup_tests", pathToTest) + }) + + Context("when run with one node", func() { + It("should do all the work on that one node", func() { + session := startGinkgo(pathToTest, "--noColor") + Eventually(session).Should(gexec.Exit(0)) + output := string(session.Out.Contents()) + + Ω(output).Should(ContainSubstring("BEFORE_A_1\nBEFORE_B_1: DATA")) + Ω(output).Should(ContainSubstring("AFTER_A_1\nAFTER_B_1")) + }) + }) + + Context("when run across multiple nodes", func() { + It("should run the first BeforeSuite function (BEFORE_A) on node 1, the second (BEFORE_B) on all the nodes, the first AfterSuite (AFTER_A) on all the nodes, and then the second (AFTER_B) on Node 1 *after* everything else is finished", func() { + session := startGinkgo(pathToTest, "--noColor", "--nodes=3") + Eventually(session).Should(gexec.Exit(0)) + output := string(session.Out.Contents()) + + Ω(output).Should(ContainSubstring("BEFORE_A_1")) + Ω(output).Should(ContainSubstring("BEFORE_B_1: DATA")) + Ω(output).Should(ContainSubstring("BEFORE_B_2: DATA")) + Ω(output).Should(ContainSubstring("BEFORE_B_3: DATA")) + + Ω(output).ShouldNot(ContainSubstring("BEFORE_A_2")) + Ω(output).ShouldNot(ContainSubstring("BEFORE_A_3")) + + Ω(output).Should(ContainSubstring("AFTER_A_1")) + Ω(output).Should(ContainSubstring("AFTER_A_2")) + Ω(output).Should(ContainSubstring("AFTER_A_3")) + Ω(output).Should(ContainSubstring("AFTER_B_1")) + + Ω(output).ShouldNot(ContainSubstring("AFTER_B_2")) + Ω(output).ShouldNot(ContainSubstring("AFTER_B_3")) + }) + }) + + Context("when streaming across multiple nodes", func() { + It("should run the first BeforeSuite function (BEFORE_A) on node 1, the second (BEFORE_B) on all the nodes, the first AfterSuite (AFTER_A) on all the nodes, and then the second (AFTER_B) on Node 1 *after* everything else is finished", func() { + session := startGinkgo(pathToTest, "--noColor", "--nodes=3", "--stream") + Eventually(session).Should(gexec.Exit(0)) + output := string(session.Out.Contents()) + + Ω(output).Should(ContainSubstring("[1] BEFORE_A_1")) + Ω(output).Should(ContainSubstring("[1] BEFORE_B_1: DATA")) + Ω(output).Should(ContainSubstring("[2] BEFORE_B_2: DATA")) + Ω(output).Should(ContainSubstring("[3] BEFORE_B_3: DATA")) + + Ω(output).ShouldNot(ContainSubstring("BEFORE_A_2")) + Ω(output).ShouldNot(ContainSubstring("BEFORE_A_3")) + + Ω(output).Should(ContainSubstring("[1] AFTER_A_1")) + Ω(output).Should(ContainSubstring("[2] AFTER_A_2")) + Ω(output).Should(ContainSubstring("[3] AFTER_A_3")) + Ω(output).Should(ContainSubstring("[1] AFTER_B_1")) + + Ω(output).ShouldNot(ContainSubstring("AFTER_B_2")) + Ω(output).ShouldNot(ContainSubstring("AFTER_B_3")) + }) + }) + }) + + Context("With a failing synchronized before suite", func() { + BeforeEach(func() { + pathToTest = tmpPath("suite_setup") + copyIn("exiting_synchronized_setup_tests", pathToTest) + }) + + It("should fail and let the user know that node 1 disappeared prematurely", func() { + session := startGinkgo(pathToTest, "--noColor", "--nodes=3") + Eventually(session).Should(gexec.Exit(1)) + output := string(session.Out.Contents()) + + Ω(output).Should(ContainSubstring("Node 1 disappeared before completing BeforeSuite")) + Ω(output).Should(ContainSubstring("Ginkgo timed out waiting for all parallel nodes to end")) + }) + }) +}) diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/integration/tags_test.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/integration/tags_test.go new file mode 100644 index 0000000000000..626635bf5c53b --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/integration/tags_test.go @@ -0,0 +1,27 @@ +package integration_test + +import ( + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" + "github.com/onsi/gomega/gexec" +) + +var _ = Describe("Tags", func() { + var pathToTest string + BeforeEach(func() { + pathToTest = tmpPath("tags") + copyIn("tags_tests", pathToTest) + }) + + It("should honor the passed in -tags flag", func() { + session := startGinkgo(pathToTest, "--noColor") + Eventually(session).Should(gexec.Exit(0)) + output := string(session.Out.Contents()) + Ω(output).Should(ContainSubstring("Ran 1 of 1 Specs")) + + session = startGinkgo(pathToTest, "--noColor", "-tags=complex_tests") + Eventually(session).Should(gexec.Exit(0)) + output = string(session.Out.Contents()) + Ω(output).Should(ContainSubstring("Ran 3 of 3 Specs")) + }) +}) diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/integration/verbose_and_succinct_test.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/integration/verbose_and_succinct_test.go new file mode 100644 index 0000000000000..470affdf7c66a --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/integration/verbose_and_succinct_test.go @@ -0,0 +1,80 @@ +package integration_test + +import ( + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" + "github.com/onsi/gomega/gexec" +) + +var _ = Describe("Verbose And Succinct Mode", func() { + var pathToTest string + var otherPathToTest string + + Context("when running one package", func() { + BeforeEach(func() { + pathToTest = tmpPath("ginkgo") + copyIn("passing_ginkgo_tests", pathToTest) + }) + + It("should default to non-succinct mode", func() { + session := startGinkgo(pathToTest, "--noColor") + Eventually(session).Should(gexec.Exit(0)) + output := session.Out.Contents() + + Ω(output).Should(ContainSubstring("Running Suite: Passing_ginkgo_tests Suite")) + }) + }) + + Context("when running more than one package", func() { + BeforeEach(func() { + pathToTest = tmpPath("ginkgo") + copyIn("passing_ginkgo_tests", pathToTest) + otherPathToTest = tmpPath("more_ginkgo") + copyIn("more_ginkgo_tests", otherPathToTest) + }) + + Context("with no flags set", func() { + It("should default to succinct mode", func() { + session := startGinkgo(pathToTest, "--noColor", pathToTest, otherPathToTest) + Eventually(session).Should(gexec.Exit(0)) + output := session.Out.Contents() + + Ω(output).Should(ContainSubstring("] Passing_ginkgo_tests Suite - 4/4 specs •••• SUCCESS!")) + Ω(output).Should(ContainSubstring("] More_ginkgo_tests Suite - 2/2 specs •• SUCCESS!")) + }) + }) + + Context("with --succinct=false", func() { + It("should not be in succinct mode", func() { + session := startGinkgo(pathToTest, "--noColor", "--succinct=false", pathToTest, otherPathToTest) + Eventually(session).Should(gexec.Exit(0)) + output := session.Out.Contents() + + Ω(output).Should(ContainSubstring("Running Suite: Passing_ginkgo_tests Suite")) + Ω(output).Should(ContainSubstring("Running Suite: More_ginkgo_tests Suite")) + }) + }) + + Context("with -v", func() { + It("should not be in succinct mode, but should be verbose", func() { + session := startGinkgo(pathToTest, "--noColor", "-v", pathToTest, otherPathToTest) + Eventually(session).Should(gexec.Exit(0)) + output := session.Out.Contents() + + Ω(output).Should(ContainSubstring("Running Suite: Passing_ginkgo_tests Suite")) + Ω(output).Should(ContainSubstring("Running Suite: More_ginkgo_tests Suite")) + Ω(output).Should(ContainSubstring("should proxy strings")) + Ω(output).Should(ContainSubstring("should always pass")) + }) + + It("should emit output from Bys", func() { + session := startGinkgo(pathToTest, "--noColor", "-v", pathToTest) + Eventually(session).Should(gexec.Exit(0)) + output := session.Out.Contents() + + Ω(output).Should(ContainSubstring("emitting one By")) + Ω(output).Should(ContainSubstring("emitting another By")) + }) + }) + }) +}) diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/integration/watch_test.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/integration/watch_test.go new file mode 100644 index 0000000000000..bbbcf36aa98ac --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/integration/watch_test.go @@ -0,0 +1,239 @@ +package integration_test + +import ( + "io/ioutil" + "os" + "path/filepath" + "time" + + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" + "github.com/onsi/gomega/gbytes" + "github.com/onsi/gomega/gexec" +) + +var _ = Describe("Watch", func() { + var rootPath string + var pathA string + var pathB string + var pathC string + var session *gexec.Session + + BeforeEach(func() { + rootPath = tmpPath("root") + pathA = filepath.Join(rootPath, "src", "github.com", "onsi", "A") + pathB = filepath.Join(rootPath, "src", "github.com", "onsi", "B") + pathC = filepath.Join(rootPath, "src", "github.com", "onsi", "C") + + err := os.MkdirAll(pathA, 0700) + Ω(err).ShouldNot(HaveOccurred()) + + err = os.MkdirAll(pathB, 0700) + Ω(err).ShouldNot(HaveOccurred()) + + err = os.MkdirAll(pathC, 0700) + Ω(err).ShouldNot(HaveOccurred()) + + copyIn(filepath.Join("watch_fixtures", "A"), pathA) + copyIn(filepath.Join("watch_fixtures", "B"), pathB) + copyIn(filepath.Join("watch_fixtures", "C"), pathC) + }) + + startGinkgoWithGopath := func(args ...string) *gexec.Session { + cmd := ginkgoCommand(rootPath, args...) + cmd.Env = append([]string{"GOPATH=" + rootPath + ":" + os.Getenv("GOPATH")}, os.Environ()...) + session, err := gexec.Start(cmd, GinkgoWriter, GinkgoWriter) + Ω(err).ShouldNot(HaveOccurred()) + return session + } + + modifyFile := func(path string) { + time.Sleep(time.Second) + content, err := ioutil.ReadFile(path) + Ω(err).ShouldNot(HaveOccurred()) + content = append(content, []byte("//")...) + err = ioutil.WriteFile(path, content, 0666) + Ω(err).ShouldNot(HaveOccurred()) + } + + modifyCode := func(pkgToModify string) { + modifyFile(filepath.Join(rootPath, "src", "github.com", "onsi", pkgToModify, pkgToModify+".go")) + } + + modifyTest := func(pkgToModify string) { + modifyFile(filepath.Join(rootPath, "src", "github.com", "onsi", pkgToModify, pkgToModify+"_test.go")) + } + + AfterEach(func() { + if session != nil { + session.Kill().Wait() + } + }) + + It("should be set up correctly", func() { + session = startGinkgoWithGopath("-r") + Eventually(session).Should(gexec.Exit(0)) + Ω(session.Out.Contents()).Should(ContainSubstring("A Suite")) + Ω(session.Out.Contents()).Should(ContainSubstring("B Suite")) + Ω(session.Out.Contents()).Should(ContainSubstring("C Suite")) + Ω(session.Out.Contents()).Should(ContainSubstring("Ginkgo ran 3 suites")) + }) + + Context("when watching just one test suite", func() { + It("should immediately run, and should rerun when the test suite changes", func() { + session = startGinkgoWithGopath("watch", "-succinct", pathA) + Eventually(session).Should(gbytes.Say("A Suite")) + modifyCode("A") + Eventually(session).Should(gbytes.Say("Detected changes in")) + Eventually(session).Should(gbytes.Say("A Suite")) + session.Kill().Wait() + }) + }) + + Context("when watching several test suites", func() { + It("should not immediately run, but should rerun a test when its code changes", func() { + session = startGinkgoWithGopath("watch", "-succinct", "-r") + Eventually(session).Should(gbytes.Say("Identified 3 test suites")) + Consistently(session).ShouldNot(gbytes.Say("A Suite|B Suite|C Suite")) + modifyCode("A") + Eventually(session).Should(gbytes.Say("Detected changes in")) + Eventually(session).Should(gbytes.Say("A Suite")) + Consistently(session).ShouldNot(gbytes.Say("B Suite|C Suite")) + session.Kill().Wait() + }) + }) + + Describe("watching dependencies", func() { + Context("with a depth of 2", func() { + It("should watch down to that depth", func() { + session = startGinkgoWithGopath("watch", "-succinct", "-r", "-depth=2") + Eventually(session).Should(gbytes.Say("Identified 3 test suites")) + Eventually(session).Should(gbytes.Say(`A \[2 dependencies\]`)) + Eventually(session).Should(gbytes.Say(`B \[1 dependency\]`)) + Eventually(session).Should(gbytes.Say(`C \[0 dependencies\]`)) + + modifyCode("A") + Eventually(session).Should(gbytes.Say("Detected changes in")) + Eventually(session).Should(gbytes.Say("A Suite")) + Consistently(session).ShouldNot(gbytes.Say("B Suite|C Suite")) + + modifyCode("B") + Eventually(session).Should(gbytes.Say("Detected changes in")) + Eventually(session).Should(gbytes.Say("B Suite")) + Eventually(session).Should(gbytes.Say("A Suite")) + Consistently(session).ShouldNot(gbytes.Say("C Suite")) + + modifyCode("C") + Eventually(session).Should(gbytes.Say("Detected changes in")) + Eventually(session).Should(gbytes.Say("C Suite")) + Eventually(session).Should(gbytes.Say("B Suite")) + Eventually(session).Should(gbytes.Say("A Suite")) + }) + }) + + Context("with a depth of 1", func() { + It("should watch down to that depth", func() { + session = startGinkgoWithGopath("watch", "-succinct", "-r", "-depth=1") + Eventually(session).Should(gbytes.Say("Identified 3 test suites")) + Eventually(session).Should(gbytes.Say(`A \[1 dependency\]`)) + Eventually(session).Should(gbytes.Say(`B \[1 dependency\]`)) + Eventually(session).Should(gbytes.Say(`C \[0 dependencies\]`)) + + modifyCode("A") + Eventually(session).Should(gbytes.Say("Detected changes in")) + Eventually(session).Should(gbytes.Say("A Suite")) + Consistently(session).ShouldNot(gbytes.Say("B Suite|C Suite")) + + modifyCode("B") + Eventually(session).Should(gbytes.Say("Detected changes in")) + Eventually(session).Should(gbytes.Say("B Suite")) + Eventually(session).Should(gbytes.Say("A Suite")) + Consistently(session).ShouldNot(gbytes.Say("C Suite")) + + modifyCode("C") + Eventually(session).Should(gbytes.Say("Detected changes in")) + Eventually(session).Should(gbytes.Say("C Suite")) + Eventually(session).Should(gbytes.Say("B Suite")) + Consistently(session).ShouldNot(gbytes.Say("A Suite")) + }) + }) + + Context("with a depth of 0", func() { + It("should not watch any dependencies", func() { + session = startGinkgoWithGopath("watch", "-succinct", "-r", "-depth=0") + Eventually(session).Should(gbytes.Say("Identified 3 test suites")) + Eventually(session).Should(gbytes.Say(`A \[0 dependencies\]`)) + Eventually(session).Should(gbytes.Say(`B \[0 dependencies\]`)) + Eventually(session).Should(gbytes.Say(`C \[0 dependencies\]`)) + + modifyCode("A") + Eventually(session).Should(gbytes.Say("Detected changes in")) + Eventually(session).Should(gbytes.Say("A Suite")) + Consistently(session).ShouldNot(gbytes.Say("B Suite|C Suite")) + + modifyCode("B") + Eventually(session).Should(gbytes.Say("Detected changes in")) + Eventually(session).Should(gbytes.Say("B Suite")) + Consistently(session).ShouldNot(gbytes.Say("A Suite|C Suite")) + + modifyCode("C") + Eventually(session).Should(gbytes.Say("Detected changes in")) + Eventually(session).Should(gbytes.Say("C Suite")) + Consistently(session).ShouldNot(gbytes.Say("A Suite|B Suite")) + }) + }) + + It("should not trigger dependents when tests are changed", func() { + session = startGinkgoWithGopath("watch", "-succinct", "-r", "-depth=2") + Eventually(session).Should(gbytes.Say("Identified 3 test suites")) + Eventually(session).Should(gbytes.Say(`A \[2 dependencies\]`)) + Eventually(session).Should(gbytes.Say(`B \[1 dependency\]`)) + Eventually(session).Should(gbytes.Say(`C \[0 dependencies\]`)) + + modifyTest("A") + Eventually(session).Should(gbytes.Say("Detected changes in")) + Eventually(session).Should(gbytes.Say("A Suite")) + Consistently(session).ShouldNot(gbytes.Say("B Suite|C Suite")) + + modifyTest("B") + Eventually(session).Should(gbytes.Say("Detected changes in")) + Eventually(session).Should(gbytes.Say("B Suite")) + Consistently(session).ShouldNot(gbytes.Say("A Suite|C Suite")) + + modifyTest("C") + Eventually(session).Should(gbytes.Say("Detected changes in")) + Eventually(session).Should(gbytes.Say("C Suite")) + Consistently(session).ShouldNot(gbytes.Say("A Suite|B Suite")) + }) + }) + + Describe("when new test suite is added", func() { + It("should start monitoring that test suite", func() { + session = startGinkgoWithGopath("watch", "-succinct", "-r") + + Eventually(session).Should(gbytes.Say("Watching 3 suites")) + + pathD := filepath.Join(rootPath, "src", "github.com", "onsi", "D") + + err := os.MkdirAll(pathD, 0700) + Ω(err).ShouldNot(HaveOccurred()) + + copyIn(filepath.Join("watch_fixtures", "D"), pathD) + + Eventually(session).Should(gbytes.Say("Detected 1 new suite")) + Eventually(session).Should(gbytes.Say(`D \[1 dependency\]`)) + Eventually(session).Should(gbytes.Say("D Suite")) + + modifyCode("D") + + Eventually(session).Should(gbytes.Say("Detected changes in")) + Eventually(session).Should(gbytes.Say("D Suite")) + + modifyCode("C") + + Eventually(session).Should(gbytes.Say("Detected changes in")) + Eventually(session).Should(gbytes.Say("C Suite")) + Eventually(session).Should(gbytes.Say("D Suite")) + }) + }) +}) diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/codelocation/code_location.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/codelocation/code_location.go new file mode 100644 index 0000000000000..fa2f0bf730c06 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/codelocation/code_location.go @@ -0,0 +1,32 @@ +package codelocation + +import ( + "regexp" + "runtime" + "runtime/debug" + "strings" + + "github.com/onsi/ginkgo/types" +) + +func New(skip int) types.CodeLocation { + _, file, line, _ := runtime.Caller(skip + 1) + stackTrace := PruneStack(string(debug.Stack()), skip) + return types.CodeLocation{FileName: file, LineNumber: line, FullStackTrace: stackTrace} +} + +func PruneStack(fullStackTrace string, skip int) string { + stack := strings.Split(fullStackTrace, "\n") + if len(stack) > 2*(skip+1) { + stack = stack[2*(skip+1):] + } + prunedStack := []string{} + re := regexp.MustCompile(`\/ginkgo\/|\/pkg\/testing\/|\/pkg\/runtime\/`) + for i := 0; i < len(stack)/2; i++ { + if !re.Match([]byte(stack[i*2])) { + prunedStack = append(prunedStack, stack[i*2]) + prunedStack = append(prunedStack, stack[i*2+1]) + } + } + return strings.Join(prunedStack, "\n") +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/codelocation/code_location_suite_test.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/codelocation/code_location_suite_test.go new file mode 100644 index 0000000000000..f06abf3c560d5 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/codelocation/code_location_suite_test.go @@ -0,0 +1,13 @@ +package codelocation_test + +import ( + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" + + "testing" +) + +func TestCodelocation(t *testing.T) { + RegisterFailHandler(Fail) + RunSpecs(t, "CodeLocation Suite") +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/codelocation/code_location_test.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/codelocation/code_location_test.go new file mode 100644 index 0000000000000..9f63f735289cc --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/codelocation/code_location_test.go @@ -0,0 +1,79 @@ +package codelocation_test + +import ( + "runtime" + . "github.com/onsi/ginkgo" + "github.com/onsi/ginkgo/internal/codelocation" + "github.com/onsi/ginkgo/types" + . "github.com/onsi/gomega" +) + +var _ = Describe("CodeLocation", func() { + var ( + codeLocation types.CodeLocation + expectedFileName string + expectedLineNumber int + ) + + caller0 := func() { + codeLocation = codelocation.New(1) + } + + caller1 := func() { + _, expectedFileName, expectedLineNumber, _ = runtime.Caller(0) + expectedLineNumber += 2 + caller0() + } + + BeforeEach(func() { + caller1() + }) + + It("should use the passed in skip parameter to pick out the correct file & line number", func() { + Ω(codeLocation.FileName).Should(Equal(expectedFileName)) + Ω(codeLocation.LineNumber).Should(Equal(expectedLineNumber)) + }) + + Describe("stringer behavior", func() { + It("should stringify nicely", func() { + Ω(codeLocation.String()).Should(ContainSubstring("code_location_test.go:%d", expectedLineNumber)) + }) + }) + + //There's no better way than to test this private method as it + //goes out of its way to prune out ginkgo related code in the stack trace + Describe("PruneStack", func() { + It("should remove any references to ginkgo and pkg/testing and pkg/runtime", func() { + input := `/Skip/me +Skip: skip() +/Skip/me +Skip: skip() +/Users/whoever/gospace/src/github.com/onsi/ginkgo/whatever.go:10 (0x12314) +Something: Func() +/Users/whoever/gospace/src/github.com/onsi/ginkgo/whatever_else.go:10 (0x12314) +SomethingInternalToGinkgo: Func() +/usr/goroot/pkg/strings/oops.go:10 (0x12341) +Oops: BlowUp() +/Users/whoever/gospace/src/mycode/code.go:10 (0x12341) +MyCode: Func() +/Users/whoever/gospace/src/mycode/code_test.go:10 (0x12341) +MyCodeTest: Func() +/Users/whoever/gospace/src/mycode/code_suite_test.go:12 (0x37f08) +TestFoo: RunSpecs(t, "Foo Suite") +/usr/goroot/pkg/testing/testing.go:12 (0x37f08) +TestingT: Blah() +/usr/goroot/pkg/runtime/runtime.go:12 (0x37f08) +Something: Func() +` + prunedStack := codelocation.PruneStack(input, 1) + Ω(prunedStack).Should(Equal(`/usr/goroot/pkg/strings/oops.go:10 (0x12341) +Oops: BlowUp() +/Users/whoever/gospace/src/mycode/code.go:10 (0x12341) +MyCode: Func() +/Users/whoever/gospace/src/mycode/code_test.go:10 (0x12341) +MyCodeTest: Func() +/Users/whoever/gospace/src/mycode/code_suite_test.go:12 (0x37f08) +TestFoo: RunSpecs(t, "Foo Suite")`)) + }) + }) +}) diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/containernode/container_node.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/containernode/container_node.go new file mode 100644 index 0000000000000..0737746dcfe0a --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/containernode/container_node.go @@ -0,0 +1,151 @@ +package containernode + +import ( + "math/rand" + "sort" + + "github.com/onsi/ginkgo/internal/leafnodes" + "github.com/onsi/ginkgo/types" +) + +type subjectOrContainerNode struct { + containerNode *ContainerNode + subjectNode leafnodes.SubjectNode +} + +func (n subjectOrContainerNode) text() string { + if n.containerNode != nil { + return n.containerNode.Text() + } else { + return n.subjectNode.Text() + } +} + +type CollatedNodes struct { + Containers []*ContainerNode + Subject leafnodes.SubjectNode +} + +type ContainerNode struct { + text string + flag types.FlagType + codeLocation types.CodeLocation + + setupNodes []leafnodes.BasicNode + subjectAndContainerNodes []subjectOrContainerNode +} + +func New(text string, flag types.FlagType, codeLocation types.CodeLocation) *ContainerNode { + return &ContainerNode{ + text: text, + flag: flag, + codeLocation: codeLocation, + } +} + +func (container *ContainerNode) Shuffle(r *rand.Rand) { + sort.Sort(container) + permutation := r.Perm(len(container.subjectAndContainerNodes)) + shuffledNodes := make([]subjectOrContainerNode, len(container.subjectAndContainerNodes)) + for i, j := range permutation { + shuffledNodes[i] = container.subjectAndContainerNodes[j] + } + container.subjectAndContainerNodes = shuffledNodes +} + +func (node *ContainerNode) BackPropagateProgrammaticFocus() bool { + if node.flag == types.FlagTypePending { + return false + } + + shouldUnfocus := false + for _, subjectOrContainerNode := range node.subjectAndContainerNodes { + if subjectOrContainerNode.containerNode != nil { + shouldUnfocus = subjectOrContainerNode.containerNode.BackPropagateProgrammaticFocus() || shouldUnfocus + } else { + shouldUnfocus = (subjectOrContainerNode.subjectNode.Flag() == types.FlagTypeFocused) || shouldUnfocus + } + } + + if shouldUnfocus { + if node.flag == types.FlagTypeFocused { + node.flag = types.FlagTypeNone + } + return true + } + + return node.flag == types.FlagTypeFocused +} + +func (node *ContainerNode) Collate() []CollatedNodes { + return node.collate([]*ContainerNode{}) +} + +func (node *ContainerNode) collate(enclosingContainers []*ContainerNode) []CollatedNodes { + collated := make([]CollatedNodes, 0) + + containers := make([]*ContainerNode, len(enclosingContainers)) + copy(containers, enclosingContainers) + containers = append(containers, node) + + for _, subjectOrContainer := range node.subjectAndContainerNodes { + if subjectOrContainer.containerNode != nil { + collated = append(collated, subjectOrContainer.containerNode.collate(containers)...) + } else { + collated = append(collated, CollatedNodes{ + Containers: containers, + Subject: subjectOrContainer.subjectNode, + }) + } + } + + return collated +} + +func (node *ContainerNode) PushContainerNode(container *ContainerNode) { + node.subjectAndContainerNodes = append(node.subjectAndContainerNodes, subjectOrContainerNode{containerNode: container}) +} + +func (node *ContainerNode) PushSubjectNode(subject leafnodes.SubjectNode) { + node.subjectAndContainerNodes = append(node.subjectAndContainerNodes, subjectOrContainerNode{subjectNode: subject}) +} + +func (node *ContainerNode) PushSetupNode(setupNode leafnodes.BasicNode) { + node.setupNodes = append(node.setupNodes, setupNode) +} + +func (node *ContainerNode) SetupNodesOfType(nodeType types.SpecComponentType) []leafnodes.BasicNode { + nodes := []leafnodes.BasicNode{} + for _, setupNode := range node.setupNodes { + if setupNode.Type() == nodeType { + nodes = append(nodes, setupNode) + } + } + return nodes +} + +func (node *ContainerNode) Text() string { + return node.text +} + +func (node *ContainerNode) CodeLocation() types.CodeLocation { + return node.codeLocation +} + +func (node *ContainerNode) Flag() types.FlagType { + return node.flag +} + +//sort.Interface + +func (node *ContainerNode) Len() int { + return len(node.subjectAndContainerNodes) +} + +func (node *ContainerNode) Less(i, j int) bool { + return node.subjectAndContainerNodes[i].text() < node.subjectAndContainerNodes[j].text() +} + +func (node *ContainerNode) Swap(i, j int) { + node.subjectAndContainerNodes[i], node.subjectAndContainerNodes[j] = node.subjectAndContainerNodes[j], node.subjectAndContainerNodes[i] +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/containernode/container_node_suite_test.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/containernode/container_node_suite_test.go new file mode 100644 index 0000000000000..c6fc314ff57b5 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/containernode/container_node_suite_test.go @@ -0,0 +1,13 @@ +package containernode_test + +import ( + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" + + "testing" +) + +func TestContainernode(t *testing.T) { + RegisterFailHandler(Fail) + RunSpecs(t, "Containernode Suite") +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/containernode/container_node_test.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/containernode/container_node_test.go new file mode 100644 index 0000000000000..b83844ac8101e --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/containernode/container_node_test.go @@ -0,0 +1,212 @@ +package containernode_test + +import ( + "math/rand" + "github.com/onsi/ginkgo/internal/leafnodes" + + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" + + "github.com/onsi/ginkgo/internal/codelocation" + . "github.com/onsi/ginkgo/internal/containernode" + "github.com/onsi/ginkgo/types" +) + +var _ = Describe("Container Node", func() { + var ( + codeLocation types.CodeLocation + container *ContainerNode + ) + + BeforeEach(func() { + codeLocation = codelocation.New(0) + container = New("description text", types.FlagTypeFocused, codeLocation) + }) + + Describe("creating a container node", func() { + It("can answer questions about itself", func() { + Ω(container.Text()).Should(Equal("description text")) + Ω(container.Flag()).Should(Equal(types.FlagTypeFocused)) + Ω(container.CodeLocation()).Should(Equal(codeLocation)) + }) + }) + + Describe("pushing setup nodes", func() { + It("can append setup nodes of various types and fetch them by type", func() { + befA := leafnodes.NewBeforeEachNode(func() {}, codelocation.New(0), 0, nil, 0) + befB := leafnodes.NewBeforeEachNode(func() {}, codelocation.New(0), 0, nil, 0) + aftA := leafnodes.NewAfterEachNode(func() {}, codelocation.New(0), 0, nil, 0) + aftB := leafnodes.NewAfterEachNode(func() {}, codelocation.New(0), 0, nil, 0) + jusBefA := leafnodes.NewJustBeforeEachNode(func() {}, codelocation.New(0), 0, nil, 0) + jusBefB := leafnodes.NewJustBeforeEachNode(func() {}, codelocation.New(0), 0, nil, 0) + + container.PushSetupNode(befA) + container.PushSetupNode(befB) + container.PushSetupNode(aftA) + container.PushSetupNode(aftB) + container.PushSetupNode(jusBefA) + container.PushSetupNode(jusBefB) + + subject := leafnodes.NewItNode("subject", func() {}, types.FlagTypeNone, codelocation.New(0), 0, nil, 0) + container.PushSubjectNode(subject) + + Ω(container.SetupNodesOfType(types.SpecComponentTypeBeforeEach)).Should(Equal([]leafnodes.BasicNode{befA, befB})) + Ω(container.SetupNodesOfType(types.SpecComponentTypeAfterEach)).Should(Equal([]leafnodes.BasicNode{aftA, aftB})) + Ω(container.SetupNodesOfType(types.SpecComponentTypeJustBeforeEach)).Should(Equal([]leafnodes.BasicNode{jusBefA, jusBefB})) + Ω(container.SetupNodesOfType(types.SpecComponentTypeIt)).Should(BeEmpty()) //subjects are not setup nodes + }) + }) + + Context("With appended containers and subject nodes", func() { + var ( + itA, itB, innerItA, innerItB leafnodes.SubjectNode + innerContainer *ContainerNode + ) + + BeforeEach(func() { + itA = leafnodes.NewItNode("Banana", func() {}, types.FlagTypeNone, codelocation.New(0), 0, nil, 0) + itB = leafnodes.NewItNode("Apple", func() {}, types.FlagTypeNone, codelocation.New(0), 0, nil, 0) + + innerItA = leafnodes.NewItNode("inner A", func() {}, types.FlagTypeNone, codelocation.New(0), 0, nil, 0) + innerItB = leafnodes.NewItNode("inner B", func() {}, types.FlagTypeNone, codelocation.New(0), 0, nil, 0) + + innerContainer = New("Orange", types.FlagTypeNone, codelocation.New(0)) + + container.PushSubjectNode(itA) + container.PushContainerNode(innerContainer) + innerContainer.PushSubjectNode(innerItA) + innerContainer.PushSubjectNode(innerItB) + container.PushSubjectNode(itB) + }) + + Describe("Collating", func() { + It("should return a collated set of containers and subject nodes in the correct order", func() { + collated := container.Collate() + Ω(collated).Should(HaveLen(4)) + + Ω(collated[0]).Should(Equal(CollatedNodes{ + Containers: []*ContainerNode{container}, + Subject: itA, + })) + + Ω(collated[1]).Should(Equal(CollatedNodes{ + Containers: []*ContainerNode{container, innerContainer}, + Subject: innerItA, + })) + + Ω(collated[2]).Should(Equal(CollatedNodes{ + Containers: []*ContainerNode{container, innerContainer}, + Subject: innerItB, + })) + + Ω(collated[3]).Should(Equal(CollatedNodes{ + Containers: []*ContainerNode{container}, + Subject: itB, + })) + }) + }) + + Describe("Backpropagating Programmatic Focus", func() { + //This allows inner focused specs to override the focus of outer focussed + //specs and more closely maps to what a developer wants to happen + //when debugging a test suite + + Context("when a parent is focused *and* an inner subject is focused", func() { + BeforeEach(func() { + container = New("description text", types.FlagTypeFocused, codeLocation) + itA = leafnodes.NewItNode("A", func() {}, types.FlagTypeNone, codelocation.New(0), 0, nil, 0) + container.PushSubjectNode(itA) + + innerContainer = New("Orange", types.FlagTypeNone, codelocation.New(0)) + container.PushContainerNode(innerContainer) + innerItA = leafnodes.NewItNode("inner A", func() {}, types.FlagTypeFocused, codelocation.New(0), 0, nil, 0) + innerContainer.PushSubjectNode(innerItA) + }) + + It("should unfocus the parent", func() { + container.BackPropagateProgrammaticFocus() + + Ω(container.Flag()).Should(Equal(types.FlagTypeNone)) + Ω(itA.Flag()).Should(Equal(types.FlagTypeNone)) + Ω(innerContainer.Flag()).Should(Equal(types.FlagTypeNone)) + Ω(innerItA.Flag()).Should(Equal(types.FlagTypeFocused)) + }) + }) + + Context("when a parent is focused *and* an inner container is focused", func() { + BeforeEach(func() { + container = New("description text", types.FlagTypeFocused, codeLocation) + itA = leafnodes.NewItNode("A", func() {}, types.FlagTypeNone, codelocation.New(0), 0, nil, 0) + container.PushSubjectNode(itA) + + innerContainer = New("Orange", types.FlagTypeFocused, codelocation.New(0)) + container.PushContainerNode(innerContainer) + innerItA = leafnodes.NewItNode("inner A", func() {}, types.FlagTypeNone, codelocation.New(0), 0, nil, 0) + innerContainer.PushSubjectNode(innerItA) + }) + + It("should unfocus the parent", func() { + container.BackPropagateProgrammaticFocus() + + Ω(container.Flag()).Should(Equal(types.FlagTypeNone)) + Ω(itA.Flag()).Should(Equal(types.FlagTypeNone)) + Ω(innerContainer.Flag()).Should(Equal(types.FlagTypeFocused)) + Ω(innerItA.Flag()).Should(Equal(types.FlagTypeNone)) + }) + }) + + Context("when a parent is pending and a child is focused", func() { + BeforeEach(func() { + container = New("description text", types.FlagTypeFocused, codeLocation) + itA = leafnodes.NewItNode("A", func() {}, types.FlagTypeNone, codelocation.New(0), 0, nil, 0) + container.PushSubjectNode(itA) + + innerContainer = New("Orange", types.FlagTypePending, codelocation.New(0)) + container.PushContainerNode(innerContainer) + innerItA = leafnodes.NewItNode("inner A", func() {}, types.FlagTypeFocused, codelocation.New(0), 0, nil, 0) + innerContainer.PushSubjectNode(innerItA) + }) + + It("should not do anything", func() { + container.BackPropagateProgrammaticFocus() + + Ω(container.Flag()).Should(Equal(types.FlagTypeFocused)) + Ω(itA.Flag()).Should(Equal(types.FlagTypeNone)) + Ω(innerContainer.Flag()).Should(Equal(types.FlagTypePending)) + Ω(innerItA.Flag()).Should(Equal(types.FlagTypeFocused)) + }) + }) + }) + + Describe("Shuffling", func() { + var unshuffledCollation []CollatedNodes + BeforeEach(func() { + unshuffledCollation = container.Collate() + + r := rand.New(rand.NewSource(17)) + container.Shuffle(r) + }) + + It("should sort, and then shuffle, the top level contents of the container", func() { + shuffledCollation := container.Collate() + Ω(shuffledCollation).Should(HaveLen(len(unshuffledCollation))) + Ω(shuffledCollation).ShouldNot(Equal(unshuffledCollation)) + + for _, entry := range unshuffledCollation { + Ω(shuffledCollation).Should(ContainElement(entry)) + } + + innerAIndex, innerBIndex := 0, 0 + for i, entry := range shuffledCollation { + if entry.Subject == innerItA { + innerAIndex = i + } else if entry.Subject == innerItB { + innerBIndex = i + } + } + + Ω(innerAIndex).Should(Equal(innerBIndex - 1)) + }) + }) + }) +}) diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/failer/failer.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/failer/failer.go new file mode 100644 index 0000000000000..4019666beae91 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/failer/failer.go @@ -0,0 +1,79 @@ +package failer + +import ( + "fmt" + "sync" + + "github.com/onsi/ginkgo/types" +) + +type Failer struct { + lock *sync.Mutex + failure types.SpecFailure + state types.SpecState +} + +func New() *Failer { + return &Failer{ + lock: &sync.Mutex{}, + state: types.SpecStatePassed, + } +} + +func (f *Failer) Panic(location types.CodeLocation, forwardedPanic interface{}) { + f.lock.Lock() + defer f.lock.Unlock() + + if f.state == types.SpecStatePassed { + f.state = types.SpecStatePanicked + f.failure = types.SpecFailure{ + Message: "Test Panicked", + Location: location, + ForwardedPanic: fmt.Sprintf("%v", forwardedPanic), + } + } +} + +func (f *Failer) Timeout(location types.CodeLocation) { + f.lock.Lock() + defer f.lock.Unlock() + + if f.state == types.SpecStatePassed { + f.state = types.SpecStateTimedOut + f.failure = types.SpecFailure{ + Message: "Timed out", + Location: location, + } + } +} + +func (f *Failer) Fail(message string, location types.CodeLocation) { + f.lock.Lock() + defer f.lock.Unlock() + + if f.state == types.SpecStatePassed { + f.state = types.SpecStateFailed + f.failure = types.SpecFailure{ + Message: message, + Location: location, + } + } +} + +func (f *Failer) Drain(componentType types.SpecComponentType, componentIndex int, componentCodeLocation types.CodeLocation) (types.SpecFailure, types.SpecState) { + f.lock.Lock() + defer f.lock.Unlock() + + failure := f.failure + outcome := f.state + if outcome != types.SpecStatePassed { + failure.ComponentType = componentType + failure.ComponentIndex = componentIndex + failure.ComponentCodeLocation = componentCodeLocation + } + + f.state = types.SpecStatePassed + f.failure = types.SpecFailure{} + + return failure, outcome +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/failer/failer_suite_test.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/failer/failer_suite_test.go new file mode 100644 index 0000000000000..8dce7be9ac564 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/failer/failer_suite_test.go @@ -0,0 +1,13 @@ +package failer_test + +import ( + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" + + "testing" +) + +func TestFailer(t *testing.T) { + RegisterFailHandler(Fail) + RunSpecs(t, "Failer Suite") +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/failer/failer_test.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/failer/failer_test.go new file mode 100644 index 0000000000000..3da62db610171 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/failer/failer_test.go @@ -0,0 +1,125 @@ +package failer_test + +import ( + . "github.com/onsi/ginkgo" + . "github.com/onsi/ginkgo/internal/failer" + . "github.com/onsi/gomega" + + "github.com/onsi/ginkgo/internal/codelocation" + "github.com/onsi/ginkgo/types" +) + +var _ = Describe("Failer", func() { + var ( + failer *Failer + codeLocationA types.CodeLocation + codeLocationB types.CodeLocation + ) + + BeforeEach(func() { + codeLocationA = codelocation.New(0) + codeLocationB = codelocation.New(0) + failer = New() + }) + + Context("with no failures", func() { + It("should return success when drained", func() { + failure, state := failer.Drain(types.SpecComponentTypeIt, 3, codeLocationB) + Ω(failure).Should(BeZero()) + Ω(state).Should(Equal(types.SpecStatePassed)) + }) + }) + + Describe("Fail", func() { + It("should handle failures", func() { + failer.Fail("something failed", codeLocationA) + failure, state := failer.Drain(types.SpecComponentTypeIt, 3, codeLocationB) + Ω(failure).Should(Equal(types.SpecFailure{ + Message: "something failed", + Location: codeLocationA, + ForwardedPanic: "", + ComponentType: types.SpecComponentTypeIt, + ComponentIndex: 3, + ComponentCodeLocation: codeLocationB, + })) + Ω(state).Should(Equal(types.SpecStateFailed)) + }) + }) + + Describe("Panic", func() { + It("should handle panics", func() { + failer.Panic(codeLocationA, "some forwarded panic") + failure, state := failer.Drain(types.SpecComponentTypeIt, 3, codeLocationB) + Ω(failure).Should(Equal(types.SpecFailure{ + Message: "Test Panicked", + Location: codeLocationA, + ForwardedPanic: "some forwarded panic", + ComponentType: types.SpecComponentTypeIt, + ComponentIndex: 3, + ComponentCodeLocation: codeLocationB, + })) + Ω(state).Should(Equal(types.SpecStatePanicked)) + }) + }) + + Describe("Timeout", func() { + It("should handle timeouts", func() { + failer.Timeout(codeLocationA) + failure, state := failer.Drain(types.SpecComponentTypeIt, 3, codeLocationB) + Ω(failure).Should(Equal(types.SpecFailure{ + Message: "Timed out", + Location: codeLocationA, + ForwardedPanic: "", + ComponentType: types.SpecComponentTypeIt, + ComponentIndex: 3, + ComponentCodeLocation: codeLocationB, + })) + Ω(state).Should(Equal(types.SpecStateTimedOut)) + }) + }) + + Context("when multiple failures are registered", func() { + BeforeEach(func() { + failer.Fail("something failed", codeLocationA) + failer.Fail("something else failed", codeLocationA) + }) + + It("should only report the first one when drained", func() { + failure, state := failer.Drain(types.SpecComponentTypeIt, 3, codeLocationB) + + Ω(failure).Should(Equal(types.SpecFailure{ + Message: "something failed", + Location: codeLocationA, + ForwardedPanic: "", + ComponentType: types.SpecComponentTypeIt, + ComponentIndex: 3, + ComponentCodeLocation: codeLocationB, + })) + Ω(state).Should(Equal(types.SpecStateFailed)) + }) + + It("should report subsequent failures after being drained", func() { + failer.Drain(types.SpecComponentTypeIt, 3, codeLocationB) + failer.Fail("yet another thing failed", codeLocationA) + + failure, state := failer.Drain(types.SpecComponentTypeIt, 3, codeLocationB) + + Ω(failure).Should(Equal(types.SpecFailure{ + Message: "yet another thing failed", + Location: codeLocationA, + ForwardedPanic: "", + ComponentType: types.SpecComponentTypeIt, + ComponentIndex: 3, + ComponentCodeLocation: codeLocationB, + })) + Ω(state).Should(Equal(types.SpecStateFailed)) + }) + + It("should report sucess on subsequent drains if no errors occur", func() { + failer.Drain(types.SpecComponentTypeIt, 3, codeLocationB) + failure, state := failer.Drain(types.SpecComponentTypeIt, 3, codeLocationB) + Ω(failure).Should(BeZero()) + Ω(state).Should(Equal(types.SpecStatePassed)) + }) + }) +}) diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/leafnodes/benchmarker.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/leafnodes/benchmarker.go new file mode 100644 index 0000000000000..1c0ce0b115685 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/leafnodes/benchmarker.go @@ -0,0 +1,86 @@ +package leafnodes + +import ( + "math" + "time" + + "github.com/onsi/ginkgo/types" +) + +type benchmarker struct { + measurements map[string]*types.SpecMeasurement + orderCounter int +} + +func newBenchmarker() *benchmarker { + return &benchmarker{ + measurements: make(map[string]*types.SpecMeasurement, 0), + } +} + +func (b *benchmarker) Time(name string, body func(), info ...interface{}) (elapsedTime time.Duration) { + t := time.Now() + body() + elapsedTime = time.Since(t) + + measurement := b.getMeasurement(name, "Fastest Time", "Slowest Time", "Average Time", "s", info...) + measurement.Results = append(measurement.Results, elapsedTime.Seconds()) + + return +} + +func (b *benchmarker) RecordValue(name string, value float64, info ...interface{}) { + measurement := b.getMeasurement(name, "Smallest", " Largest", " Average", "", info...) + measurement.Results = append(measurement.Results, value) +} + +func (b *benchmarker) getMeasurement(name string, smallestLabel string, largestLabel string, averageLabel string, units string, info ...interface{}) *types.SpecMeasurement { + measurement, ok := b.measurements[name] + if !ok { + var computedInfo interface{} + computedInfo = nil + if len(info) > 0 { + computedInfo = info[0] + } + measurement = &types.SpecMeasurement{ + Name: name, + Info: computedInfo, + Order: b.orderCounter, + SmallestLabel: smallestLabel, + LargestLabel: largestLabel, + AverageLabel: averageLabel, + Units: units, + Results: make([]float64, 0), + } + b.measurements[name] = measurement + b.orderCounter++ + } + + return measurement +} + +func (b *benchmarker) measurementsReport() map[string]*types.SpecMeasurement { + for _, measurement := range b.measurements { + measurement.Smallest = math.MaxFloat64 + measurement.Largest = -math.MaxFloat64 + sum := float64(0) + sumOfSquares := float64(0) + + for _, result := range measurement.Results { + if result > measurement.Largest { + measurement.Largest = result + } + if result < measurement.Smallest { + measurement.Smallest = result + } + sum += result + sumOfSquares += result * result + } + + n := float64(len(measurement.Results)) + measurement.Average = sum / n + measurement.StdDeviation = math.Sqrt(sumOfSquares/n - (sum/n)*(sum/n)) + } + + return b.measurements +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/leafnodes/interfaces.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/leafnodes/interfaces.go new file mode 100644 index 0000000000000..8c3902d601c4b --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/leafnodes/interfaces.go @@ -0,0 +1,19 @@ +package leafnodes + +import ( + "github.com/onsi/ginkgo/types" +) + +type BasicNode interface { + Type() types.SpecComponentType + Run() (types.SpecState, types.SpecFailure) + CodeLocation() types.CodeLocation +} + +type SubjectNode interface { + BasicNode + + Text() string + Flag() types.FlagType + Samples() int +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/leafnodes/it_node.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/leafnodes/it_node.go new file mode 100644 index 0000000000000..c76fe3a4512db --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/leafnodes/it_node.go @@ -0,0 +1,46 @@ +package leafnodes + +import ( + "github.com/onsi/ginkgo/internal/failer" + "github.com/onsi/ginkgo/types" + "time" +) + +type ItNode struct { + runner *runner + + flag types.FlagType + text string +} + +func NewItNode(text string, body interface{}, flag types.FlagType, codeLocation types.CodeLocation, timeout time.Duration, failer *failer.Failer, componentIndex int) *ItNode { + return &ItNode{ + runner: newRunner(body, codeLocation, timeout, failer, types.SpecComponentTypeIt, componentIndex), + flag: flag, + text: text, + } +} + +func (node *ItNode) Run() (outcome types.SpecState, failure types.SpecFailure) { + return node.runner.run() +} + +func (node *ItNode) Type() types.SpecComponentType { + return types.SpecComponentTypeIt +} + +func (node *ItNode) Text() string { + return node.text +} + +func (node *ItNode) Flag() types.FlagType { + return node.flag +} + +func (node *ItNode) CodeLocation() types.CodeLocation { + return node.runner.codeLocation +} + +func (node *ItNode) Samples() int { + return 1 +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/leafnodes/it_node_test.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/leafnodes/it_node_test.go new file mode 100644 index 0000000000000..29fa0c6e2a6a6 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/leafnodes/it_node_test.go @@ -0,0 +1,22 @@ +package leafnodes_test + +import ( + . "github.com/onsi/ginkgo" + . "github.com/onsi/ginkgo/internal/leafnodes" + . "github.com/onsi/gomega" + + "github.com/onsi/ginkgo/internal/codelocation" + "github.com/onsi/ginkgo/types" +) + +var _ = Describe("It Nodes", func() { + It("should report the correct type, text, flag, and code location", func() { + codeLocation := codelocation.New(0) + it := NewItNode("my it node", func() {}, types.FlagTypeFocused, codeLocation, 0, nil, 3) + Ω(it.Type()).Should(Equal(types.SpecComponentTypeIt)) + Ω(it.Flag()).Should(Equal(types.FlagTypeFocused)) + Ω(it.Text()).Should(Equal("my it node")) + Ω(it.CodeLocation()).Should(Equal(codeLocation)) + Ω(it.Samples()).Should(Equal(1)) + }) +}) diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/leafnodes/leaf_node_suite_test.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/leafnodes/leaf_node_suite_test.go new file mode 100644 index 0000000000000..a7ba9e006eeed --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/leafnodes/leaf_node_suite_test.go @@ -0,0 +1,13 @@ +package leafnodes_test + +import ( + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" + + "testing" +) + +func TestLeafNode(t *testing.T) { + RegisterFailHandler(Fail) + RunSpecs(t, "LeafNode Suite") +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/leafnodes/measure_node.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/leafnodes/measure_node.go new file mode 100644 index 0000000000000..efc3348c1b643 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/leafnodes/measure_node.go @@ -0,0 +1,61 @@ +package leafnodes + +import ( + "github.com/onsi/ginkgo/internal/failer" + "github.com/onsi/ginkgo/types" + "reflect" +) + +type MeasureNode struct { + runner *runner + + text string + flag types.FlagType + samples int + benchmarker *benchmarker +} + +func NewMeasureNode(text string, body interface{}, flag types.FlagType, codeLocation types.CodeLocation, samples int, failer *failer.Failer, componentIndex int) *MeasureNode { + benchmarker := newBenchmarker() + + wrappedBody := func() { + reflect.ValueOf(body).Call([]reflect.Value{reflect.ValueOf(benchmarker)}) + } + + return &MeasureNode{ + runner: newRunner(wrappedBody, codeLocation, 0, failer, types.SpecComponentTypeMeasure, componentIndex), + + text: text, + flag: flag, + samples: samples, + benchmarker: benchmarker, + } +} + +func (node *MeasureNode) Run() (outcome types.SpecState, failure types.SpecFailure) { + return node.runner.run() +} + +func (node *MeasureNode) MeasurementsReport() map[string]*types.SpecMeasurement { + return node.benchmarker.measurementsReport() +} + +func (node *MeasureNode) Type() types.SpecComponentType { + return types.SpecComponentTypeMeasure +} + +func (node *MeasureNode) Text() string { + return node.text +} + +func (node *MeasureNode) Flag() types.FlagType { + return node.flag +} + +func (node *MeasureNode) CodeLocation() types.CodeLocation { + return node.runner.codeLocation +} + +func (node *MeasureNode) Samples() int { + return node.samples +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/leafnodes/measure_node_test.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/leafnodes/measure_node_test.go new file mode 100644 index 0000000000000..ee4f70bf0be14 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/leafnodes/measure_node_test.go @@ -0,0 +1,109 @@ +package leafnodes_test + +import ( + . "github.com/onsi/ginkgo" + . "github.com/onsi/ginkgo/internal/leafnodes" + . "github.com/onsi/gomega" + + "time" + "github.com/onsi/ginkgo/internal/codelocation" + Failer "github.com/onsi/ginkgo/internal/failer" + "github.com/onsi/ginkgo/types" +) + +var _ = Describe("Measure Nodes", func() { + It("should report the correct type, text, flag, and code location", func() { + codeLocation := codelocation.New(0) + measure := NewMeasureNode("my measure node", func(b Benchmarker) {}, types.FlagTypeFocused, codeLocation, 10, nil, 3) + Ω(measure.Type()).Should(Equal(types.SpecComponentTypeMeasure)) + Ω(measure.Flag()).Should(Equal(types.FlagTypeFocused)) + Ω(measure.Text()).Should(Equal("my measure node")) + Ω(measure.CodeLocation()).Should(Equal(codeLocation)) + Ω(measure.Samples()).Should(Equal(10)) + }) + + Describe("benchmarking", func() { + var measure *MeasureNode + + Describe("Value", func() { + BeforeEach(func() { + measure = NewMeasureNode("the measurement", func(b Benchmarker) { + b.RecordValue("foo", 7, "info!") + b.RecordValue("foo", 2) + b.RecordValue("foo", 3) + b.RecordValue("bar", 0.3) + b.RecordValue("bar", 0.1) + b.RecordValue("bar", 0.5) + b.RecordValue("bar", 0.7) + }, types.FlagTypeFocused, codelocation.New(0), 1, Failer.New(), 3) + Ω(measure.Run()).Should(Equal(types.SpecStatePassed)) + }) + + It("records passed in values and reports on them", func() { + report := measure.MeasurementsReport() + Ω(report).Should(HaveLen(2)) + Ω(report["foo"].Name).Should(Equal("foo")) + Ω(report["foo"].Info).Should(Equal("info!")) + Ω(report["foo"].Order).Should(Equal(0)) + Ω(report["foo"].SmallestLabel).Should(Equal("Smallest")) + Ω(report["foo"].LargestLabel).Should(Equal(" Largest")) + Ω(report["foo"].AverageLabel).Should(Equal(" Average")) + Ω(report["foo"].Units).Should(Equal("")) + Ω(report["foo"].Results).Should(Equal([]float64{7, 2, 3})) + Ω(report["foo"].Smallest).Should(BeNumerically("==", 2)) + Ω(report["foo"].Largest).Should(BeNumerically("==", 7)) + Ω(report["foo"].Average).Should(BeNumerically("==", 4)) + Ω(report["foo"].StdDeviation).Should(BeNumerically("~", 2.16, 0.01)) + + Ω(report["bar"].Name).Should(Equal("bar")) + Ω(report["bar"].Info).Should(BeNil()) + Ω(report["bar"].SmallestLabel).Should(Equal("Smallest")) + Ω(report["bar"].Order).Should(Equal(1)) + Ω(report["bar"].LargestLabel).Should(Equal(" Largest")) + Ω(report["bar"].AverageLabel).Should(Equal(" Average")) + Ω(report["bar"].Units).Should(Equal("")) + Ω(report["bar"].Results).Should(Equal([]float64{0.3, 0.1, 0.5, 0.7})) + Ω(report["bar"].Smallest).Should(BeNumerically("==", 0.1)) + Ω(report["bar"].Largest).Should(BeNumerically("==", 0.7)) + Ω(report["bar"].Average).Should(BeNumerically("==", 0.4)) + Ω(report["bar"].StdDeviation).Should(BeNumerically("~", 0.22, 0.01)) + }) + }) + + Describe("Time", func() { + BeforeEach(func() { + measure = NewMeasureNode("the measurement", func(b Benchmarker) { + b.Time("foo", func() { + time.Sleep(100 * time.Millisecond) + }, "info!") + b.Time("foo", func() { + time.Sleep(200 * time.Millisecond) + }) + b.Time("foo", func() { + time.Sleep(170 * time.Millisecond) + }) + }, types.FlagTypeFocused, codelocation.New(0), 1, Failer.New(), 3) + Ω(measure.Run()).Should(Equal(types.SpecStatePassed)) + }) + + It("records passed in values and reports on them", func() { + report := measure.MeasurementsReport() + Ω(report).Should(HaveLen(1)) + Ω(report["foo"].Name).Should(Equal("foo")) + Ω(report["foo"].Info).Should(Equal("info!")) + Ω(report["foo"].SmallestLabel).Should(Equal("Fastest Time")) + Ω(report["foo"].LargestLabel).Should(Equal("Slowest Time")) + Ω(report["foo"].AverageLabel).Should(Equal("Average Time")) + Ω(report["foo"].Units).Should(Equal("s")) + Ω(report["foo"].Results).Should(HaveLen(3)) + Ω(report["foo"].Results[0]).Should(BeNumerically("~", 0.1, 0.01)) + Ω(report["foo"].Results[1]).Should(BeNumerically("~", 0.2, 0.01)) + Ω(report["foo"].Results[2]).Should(BeNumerically("~", 0.17, 0.01)) + Ω(report["foo"].Smallest).Should(BeNumerically("~", 0.1, 0.01)) + Ω(report["foo"].Largest).Should(BeNumerically("~", 0.2, 0.01)) + Ω(report["foo"].Average).Should(BeNumerically("~", 0.16, 0.01)) + Ω(report["foo"].StdDeviation).Should(BeNumerically("~", 0.04, 0.01)) + }) + }) + }) +}) diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/leafnodes/runner.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/leafnodes/runner.go new file mode 100644 index 0000000000000..04ec6dbf8bd04 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/leafnodes/runner.go @@ -0,0 +1,107 @@ +package leafnodes + +import ( + "fmt" + "github.com/onsi/ginkgo/internal/codelocation" + "github.com/onsi/ginkgo/internal/failer" + "github.com/onsi/ginkgo/types" + "reflect" + "time" +) + +type runner struct { + isAsync bool + asyncFunc func(chan<- interface{}) + syncFunc func() + codeLocation types.CodeLocation + timeoutThreshold time.Duration + nodeType types.SpecComponentType + componentIndex int + failer *failer.Failer +} + +func newRunner(body interface{}, codeLocation types.CodeLocation, timeout time.Duration, failer *failer.Failer, nodeType types.SpecComponentType, componentIndex int) *runner { + bodyType := reflect.TypeOf(body) + if bodyType.Kind() != reflect.Func { + panic(fmt.Sprintf("Expected a function but got something else at %v", codeLocation)) + } + + runner := &runner{ + codeLocation: codeLocation, + timeoutThreshold: timeout, + failer: failer, + nodeType: nodeType, + componentIndex: componentIndex, + } + + switch bodyType.NumIn() { + case 0: + runner.syncFunc = body.(func()) + return runner + case 1: + if !(bodyType.In(0).Kind() == reflect.Chan && bodyType.In(0).Elem().Kind() == reflect.Interface) { + panic(fmt.Sprintf("Must pass a Done channel to function at %v", codeLocation)) + } + + wrappedBody := func(done chan<- interface{}) { + bodyValue := reflect.ValueOf(body) + bodyValue.Call([]reflect.Value{reflect.ValueOf(done)}) + } + + runner.isAsync = true + runner.asyncFunc = wrappedBody + return runner + } + + panic(fmt.Sprintf("Too many arguments to function at %v", codeLocation)) +} + +func (r *runner) run() (outcome types.SpecState, failure types.SpecFailure) { + if r.isAsync { + return r.runAsync() + } else { + return r.runSync() + } +} + +func (r *runner) runAsync() (outcome types.SpecState, failure types.SpecFailure) { + done := make(chan interface{}, 1) + + go func() { + defer func() { + if e := recover(); e != nil { + r.failer.Panic(codelocation.New(2), e) + select { + case <-done: + break + default: + close(done) + } + } + }() + + r.asyncFunc(done) + }() + + select { + case <-done: + case <-time.After(r.timeoutThreshold): + r.failer.Timeout(r.codeLocation) + } + + failure, outcome = r.failer.Drain(r.nodeType, r.componentIndex, r.codeLocation) + return +} +func (r *runner) runSync() (outcome types.SpecState, failure types.SpecFailure) { + defer func() { + if e := recover(); e != nil { + r.failer.Panic(codelocation.New(2), e) + } + + failure, outcome = r.failer.Drain(r.nodeType, r.componentIndex, r.codeLocation) + }() + + r.syncFunc() + + return +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/leafnodes/setup_nodes.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/leafnodes/setup_nodes.go new file mode 100644 index 0000000000000..6b725a631536e --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/leafnodes/setup_nodes.go @@ -0,0 +1,41 @@ +package leafnodes + +import ( + "github.com/onsi/ginkgo/internal/failer" + "github.com/onsi/ginkgo/types" + "time" +) + +type SetupNode struct { + runner *runner +} + +func (node *SetupNode) Run() (outcome types.SpecState, failure types.SpecFailure) { + return node.runner.run() +} + +func (node *SetupNode) Type() types.SpecComponentType { + return node.runner.nodeType +} + +func (node *SetupNode) CodeLocation() types.CodeLocation { + return node.runner.codeLocation +} + +func NewBeforeEachNode(body interface{}, codeLocation types.CodeLocation, timeout time.Duration, failer *failer.Failer, componentIndex int) *SetupNode { + return &SetupNode{ + runner: newRunner(body, codeLocation, timeout, failer, types.SpecComponentTypeBeforeEach, componentIndex), + } +} + +func NewAfterEachNode(body interface{}, codeLocation types.CodeLocation, timeout time.Duration, failer *failer.Failer, componentIndex int) *SetupNode { + return &SetupNode{ + runner: newRunner(body, codeLocation, timeout, failer, types.SpecComponentTypeAfterEach, componentIndex), + } +} + +func NewJustBeforeEachNode(body interface{}, codeLocation types.CodeLocation, timeout time.Duration, failer *failer.Failer, componentIndex int) *SetupNode { + return &SetupNode{ + runner: newRunner(body, codeLocation, timeout, failer, types.SpecComponentTypeJustBeforeEach, componentIndex), + } +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/leafnodes/setup_nodes_test.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/leafnodes/setup_nodes_test.go new file mode 100644 index 0000000000000..d5b9251f65256 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/leafnodes/setup_nodes_test.go @@ -0,0 +1,40 @@ +package leafnodes_test + +import ( + . "github.com/onsi/ginkgo" + "github.com/onsi/ginkgo/types" + . "github.com/onsi/gomega" + + . "github.com/onsi/ginkgo/internal/leafnodes" + + "github.com/onsi/ginkgo/internal/codelocation" +) + +var _ = Describe("Setup Nodes", func() { + Describe("BeforeEachNodes", func() { + It("should report the correct type and code location", func() { + codeLocation := codelocation.New(0) + beforeEach := NewBeforeEachNode(func() {}, codeLocation, 0, nil, 3) + Ω(beforeEach.Type()).Should(Equal(types.SpecComponentTypeBeforeEach)) + Ω(beforeEach.CodeLocation()).Should(Equal(codeLocation)) + }) + }) + + Describe("AfterEachNodes", func() { + It("should report the correct type and code location", func() { + codeLocation := codelocation.New(0) + afterEach := NewAfterEachNode(func() {}, codeLocation, 0, nil, 3) + Ω(afterEach.Type()).Should(Equal(types.SpecComponentTypeAfterEach)) + Ω(afterEach.CodeLocation()).Should(Equal(codeLocation)) + }) + }) + + Describe("JustBeforeEachNodes", func() { + It("should report the correct type and code location", func() { + codeLocation := codelocation.New(0) + justBeforeEach := NewJustBeforeEachNode(func() {}, codeLocation, 0, nil, 3) + Ω(justBeforeEach.Type()).Should(Equal(types.SpecComponentTypeJustBeforeEach)) + Ω(justBeforeEach.CodeLocation()).Should(Equal(codeLocation)) + }) + }) +}) diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/leafnodes/shared_runner_test.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/leafnodes/shared_runner_test.go new file mode 100644 index 0000000000000..9007d1ba75d1d --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/leafnodes/shared_runner_test.go @@ -0,0 +1,326 @@ +package leafnodes_test + +import ( + . "github.com/onsi/ginkgo" + . "github.com/onsi/ginkgo/internal/leafnodes" + . "github.com/onsi/gomega" + + "reflect" + "runtime" + "time" + + "github.com/onsi/ginkgo/internal/codelocation" + Failer "github.com/onsi/ginkgo/internal/failer" + "github.com/onsi/ginkgo/types" +) + +type runnable interface { + Run() (outcome types.SpecState, failure types.SpecFailure) + CodeLocation() types.CodeLocation +} + +func SynchronousSharedRunnerBehaviors(build func(body interface{}, timeout time.Duration, failer *Failer.Failer, componentCodeLocation types.CodeLocation) runnable, componentType types.SpecComponentType, componentIndex int) { + var ( + outcome types.SpecState + failure types.SpecFailure + + failer *Failer.Failer + + componentCodeLocation types.CodeLocation + innerCodeLocation types.CodeLocation + + didRun bool + ) + + BeforeEach(func() { + failer = Failer.New() + componentCodeLocation = codelocation.New(0) + innerCodeLocation = codelocation.New(0) + + didRun = false + }) + + Describe("synchronous functions", func() { + Context("when the function passes", func() { + BeforeEach(func() { + outcome, failure = build(func() { + didRun = true + }, 0, failer, componentCodeLocation).Run() + }) + + It("should have a succesful outcome", func() { + Ω(didRun).Should(BeTrue()) + + Ω(outcome).Should(Equal(types.SpecStatePassed)) + Ω(failure).Should(BeZero()) + }) + }) + + Context("when a failure occurs", func() { + BeforeEach(func() { + outcome, failure = build(func() { + didRun = true + failer.Fail("bam", innerCodeLocation) + panic("should not matter") + }, 0, failer, componentCodeLocation).Run() + }) + + It("should return the failure", func() { + Ω(didRun).Should(BeTrue()) + + Ω(outcome).Should(Equal(types.SpecStateFailed)) + Ω(failure).Should(Equal(types.SpecFailure{ + Message: "bam", + Location: innerCodeLocation, + ForwardedPanic: "", + ComponentIndex: componentIndex, + ComponentType: componentType, + ComponentCodeLocation: componentCodeLocation, + })) + }) + }) + + Context("when a panic occurs", func() { + BeforeEach(func() { + outcome, failure = build(func() { + didRun = true + innerCodeLocation = codelocation.New(0) + panic("ack!") + }, 0, failer, componentCodeLocation).Run() + }) + + It("should return the panic", func() { + Ω(didRun).Should(BeTrue()) + + Ω(outcome).Should(Equal(types.SpecStatePanicked)) + Ω(failure.ForwardedPanic).Should(Equal("ack!")) + }) + }) + }) +} + +func AsynchronousSharedRunnerBehaviors(build func(body interface{}, timeout time.Duration, failer *Failer.Failer, componentCodeLocation types.CodeLocation) runnable, componentType types.SpecComponentType, componentIndex int) { + var ( + outcome types.SpecState + failure types.SpecFailure + + failer *Failer.Failer + + componentCodeLocation types.CodeLocation + innerCodeLocation types.CodeLocation + + didRun bool + ) + + BeforeEach(func() { + failer = Failer.New() + componentCodeLocation = codelocation.New(0) + innerCodeLocation = codelocation.New(0) + + didRun = false + }) + + Describe("asynchronous functions", func() { + var timeoutDuration time.Duration + + BeforeEach(func() { + timeoutDuration = time.Duration(1 * float64(time.Second)) + }) + + Context("when running", func() { + It("should run the function as a goroutine, and block until it's done", func() { + initialNumberOfGoRoutines := runtime.NumGoroutine() + numberOfGoRoutines := 0 + + build(func(done Done) { + didRun = true + numberOfGoRoutines = runtime.NumGoroutine() + close(done) + }, timeoutDuration, failer, componentCodeLocation).Run() + + Ω(didRun).Should(BeTrue()) + Ω(numberOfGoRoutines).Should(BeNumerically(">=", initialNumberOfGoRoutines+1)) + }) + }) + + Context("when the function passes", func() { + BeforeEach(func() { + outcome, failure = build(func(done Done) { + didRun = true + close(done) + }, timeoutDuration, failer, componentCodeLocation).Run() + }) + + It("should have a succesful outcome", func() { + Ω(didRun).Should(BeTrue()) + Ω(outcome).Should(Equal(types.SpecStatePassed)) + Ω(failure).Should(BeZero()) + }) + }) + + Context("when the function fails", func() { + BeforeEach(func() { + outcome, failure = build(func(done Done) { + didRun = true + failer.Fail("bam", innerCodeLocation) + time.Sleep(20 * time.Millisecond) + panic("doesn't matter") + close(done) + }, 10*time.Millisecond, failer, componentCodeLocation).Run() + }) + + It("should return the failure", func() { + Ω(didRun).Should(BeTrue()) + + Ω(outcome).Should(Equal(types.SpecStateFailed)) + Ω(failure).Should(Equal(types.SpecFailure{ + Message: "bam", + Location: innerCodeLocation, + ForwardedPanic: "", + ComponentIndex: componentIndex, + ComponentType: componentType, + ComponentCodeLocation: componentCodeLocation, + })) + }) + }) + + Context("when the function times out", func() { + var guard chan struct{} + + BeforeEach(func() { + guard = make(chan struct{}) + outcome, failure = build(func(done Done) { + didRun = true + time.Sleep(20 * time.Millisecond) + close(guard) + panic("doesn't matter") + close(done) + }, 10*time.Millisecond, failer, componentCodeLocation).Run() + }) + + It("should return the timeout", func() { + <-guard + Ω(didRun).Should(BeTrue()) + + Ω(outcome).Should(Equal(types.SpecStateTimedOut)) + Ω(failure).Should(Equal(types.SpecFailure{ + Message: "Timed out", + Location: componentCodeLocation, + ForwardedPanic: "", + ComponentIndex: componentIndex, + ComponentType: componentType, + ComponentCodeLocation: componentCodeLocation, + })) + }) + }) + + Context("when the function panics", func() { + BeforeEach(func() { + outcome, failure = build(func(done Done) { + didRun = true + innerCodeLocation = codelocation.New(0) + panic("ack!") + }, 100*time.Millisecond, failer, componentCodeLocation).Run() + }) + + It("should return the panic", func() { + Ω(didRun).Should(BeTrue()) + + Ω(outcome).Should(Equal(types.SpecStatePanicked)) + Ω(failure.ForwardedPanic).Should(Equal("ack!")) + }) + }) + }) +} + +func InvalidSharedRunnerBehaviors(build func(body interface{}, timeout time.Duration, failer *Failer.Failer, componentCodeLocation types.CodeLocation) runnable, componentType types.SpecComponentType) { + var ( + failer *Failer.Failer + componentCodeLocation types.CodeLocation + innerCodeLocation types.CodeLocation + ) + + BeforeEach(func() { + failer = Failer.New() + componentCodeLocation = codelocation.New(0) + innerCodeLocation = codelocation.New(0) + }) + + Describe("invalid functions", func() { + Context("when passed something that's not a function", func() { + It("should panic", func() { + Ω(func() { + build("not a function", 0, failer, componentCodeLocation) + }).Should(Panic()) + }) + }) + + Context("when the function takes the wrong kind of argument", func() { + It("should panic", func() { + Ω(func() { + build(func(oops string) {}, 0, failer, componentCodeLocation) + }).Should(Panic()) + }) + }) + + Context("when the function takes more than one argument", func() { + It("should panic", func() { + Ω(func() { + build(func(done Done, oops string) {}, 0, failer, componentCodeLocation) + }).Should(Panic()) + }) + }) + }) +} + +var _ = Describe("Shared RunnableNode behavior", func() { + Describe("It Nodes", func() { + build := func(body interface{}, timeout time.Duration, failer *Failer.Failer, componentCodeLocation types.CodeLocation) runnable { + return NewItNode("", body, types.FlagTypeFocused, componentCodeLocation, timeout, failer, 3) + } + + SynchronousSharedRunnerBehaviors(build, types.SpecComponentTypeIt, 3) + AsynchronousSharedRunnerBehaviors(build, types.SpecComponentTypeIt, 3) + InvalidSharedRunnerBehaviors(build, types.SpecComponentTypeIt) + }) + + Describe("Measure Nodes", func() { + build := func(body interface{}, _ time.Duration, failer *Failer.Failer, componentCodeLocation types.CodeLocation) runnable { + return NewMeasureNode("", func(Benchmarker) { + reflect.ValueOf(body).Call([]reflect.Value{}) + }, types.FlagTypeFocused, componentCodeLocation, 10, failer, 3) + } + + SynchronousSharedRunnerBehaviors(build, types.SpecComponentTypeMeasure, 3) + }) + + Describe("BeforeEach Nodes", func() { + build := func(body interface{}, timeout time.Duration, failer *Failer.Failer, componentCodeLocation types.CodeLocation) runnable { + return NewBeforeEachNode(body, componentCodeLocation, timeout, failer, 3) + } + + SynchronousSharedRunnerBehaviors(build, types.SpecComponentTypeBeforeEach, 3) + AsynchronousSharedRunnerBehaviors(build, types.SpecComponentTypeBeforeEach, 3) + InvalidSharedRunnerBehaviors(build, types.SpecComponentTypeBeforeEach) + }) + + Describe("AfterEach Nodes", func() { + build := func(body interface{}, timeout time.Duration, failer *Failer.Failer, componentCodeLocation types.CodeLocation) runnable { + return NewAfterEachNode(body, componentCodeLocation, timeout, failer, 3) + } + + SynchronousSharedRunnerBehaviors(build, types.SpecComponentTypeAfterEach, 3) + AsynchronousSharedRunnerBehaviors(build, types.SpecComponentTypeAfterEach, 3) + InvalidSharedRunnerBehaviors(build, types.SpecComponentTypeAfterEach) + }) + + Describe("JustBeforeEach Nodes", func() { + build := func(body interface{}, timeout time.Duration, failer *Failer.Failer, componentCodeLocation types.CodeLocation) runnable { + return NewJustBeforeEachNode(body, componentCodeLocation, timeout, failer, 3) + } + + SynchronousSharedRunnerBehaviors(build, types.SpecComponentTypeJustBeforeEach, 3) + AsynchronousSharedRunnerBehaviors(build, types.SpecComponentTypeJustBeforeEach, 3) + InvalidSharedRunnerBehaviors(build, types.SpecComponentTypeJustBeforeEach) + }) +}) diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/leafnodes/suite_nodes.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/leafnodes/suite_nodes.go new file mode 100644 index 0000000000000..2ccc7dc0fb065 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/leafnodes/suite_nodes.go @@ -0,0 +1,54 @@ +package leafnodes + +import ( + "github.com/onsi/ginkgo/internal/failer" + "github.com/onsi/ginkgo/types" + "time" +) + +type SuiteNode interface { + Run(parallelNode int, parallelTotal int, syncHost string) bool + Passed() bool + Summary() *types.SetupSummary +} + +type simpleSuiteNode struct { + runner *runner + outcome types.SpecState + failure types.SpecFailure + runTime time.Duration +} + +func (node *simpleSuiteNode) Run(parallelNode int, parallelTotal int, syncHost string) bool { + t := time.Now() + node.outcome, node.failure = node.runner.run() + node.runTime = time.Since(t) + + return node.outcome == types.SpecStatePassed +} + +func (node *simpleSuiteNode) Passed() bool { + return node.outcome == types.SpecStatePassed +} + +func (node *simpleSuiteNode) Summary() *types.SetupSummary { + return &types.SetupSummary{ + ComponentType: node.runner.nodeType, + CodeLocation: node.runner.codeLocation, + State: node.outcome, + RunTime: node.runTime, + Failure: node.failure, + } +} + +func NewBeforeSuiteNode(body interface{}, codeLocation types.CodeLocation, timeout time.Duration, failer *failer.Failer) SuiteNode { + return &simpleSuiteNode{ + runner: newRunner(body, codeLocation, timeout, failer, types.SpecComponentTypeBeforeSuite, 0), + } +} + +func NewAfterSuiteNode(body interface{}, codeLocation types.CodeLocation, timeout time.Duration, failer *failer.Failer) SuiteNode { + return &simpleSuiteNode{ + runner: newRunner(body, codeLocation, timeout, failer, types.SpecComponentTypeAfterSuite, 0), + } +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/leafnodes/suite_nodes_test.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/leafnodes/suite_nodes_test.go new file mode 100644 index 0000000000000..246b329fe2ba1 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/leafnodes/suite_nodes_test.go @@ -0,0 +1,230 @@ +package leafnodes_test + +import ( + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" + + . "github.com/onsi/ginkgo/internal/leafnodes" + + "time" + + "github.com/onsi/ginkgo/internal/codelocation" + Failer "github.com/onsi/ginkgo/internal/failer" + "github.com/onsi/ginkgo/types" +) + +var _ = Describe("SuiteNodes", func() { + Describe("BeforeSuite nodes", func() { + var befSuite SuiteNode + var failer *Failer.Failer + var codeLocation types.CodeLocation + var innerCodeLocation types.CodeLocation + var outcome bool + + BeforeEach(func() { + failer = Failer.New() + codeLocation = codelocation.New(0) + innerCodeLocation = codelocation.New(0) + }) + + Context("when the body passes", func() { + BeforeEach(func() { + befSuite = NewBeforeSuiteNode(func() { + time.Sleep(10 * time.Millisecond) + }, codeLocation, 0, failer) + outcome = befSuite.Run(0, 0, "") + }) + + It("should return true when run and report as passed", func() { + Ω(outcome).Should(BeTrue()) + Ω(befSuite.Passed()).Should(BeTrue()) + }) + + It("should have the correct summary", func() { + summary := befSuite.Summary() + Ω(summary.ComponentType).Should(Equal(types.SpecComponentTypeBeforeSuite)) + Ω(summary.CodeLocation).Should(Equal(codeLocation)) + Ω(summary.State).Should(Equal(types.SpecStatePassed)) + Ω(summary.RunTime).Should(BeNumerically(">=", 10*time.Millisecond)) + Ω(summary.Failure).Should(BeZero()) + }) + }) + + Context("when the body fails", func() { + BeforeEach(func() { + befSuite = NewBeforeSuiteNode(func() { + failer.Fail("oops", innerCodeLocation) + }, codeLocation, 0, failer) + outcome = befSuite.Run(0, 0, "") + }) + + It("should return false when run and report as failed", func() { + Ω(outcome).Should(BeFalse()) + Ω(befSuite.Passed()).Should(BeFalse()) + }) + + It("should have the correct summary", func() { + summary := befSuite.Summary() + Ω(summary.State).Should(Equal(types.SpecStateFailed)) + Ω(summary.Failure.Message).Should(Equal("oops")) + Ω(summary.Failure.Location).Should(Equal(innerCodeLocation)) + Ω(summary.Failure.ForwardedPanic).Should(BeEmpty()) + Ω(summary.Failure.ComponentIndex).Should(Equal(0)) + Ω(summary.Failure.ComponentType).Should(Equal(types.SpecComponentTypeBeforeSuite)) + Ω(summary.Failure.ComponentCodeLocation).Should(Equal(codeLocation)) + }) + }) + + Context("when the body times out", func() { + BeforeEach(func() { + befSuite = NewBeforeSuiteNode(func(done Done) { + }, codeLocation, time.Millisecond, failer) + outcome = befSuite.Run(0, 0, "") + }) + + It("should return false when run and report as failed", func() { + Ω(outcome).Should(BeFalse()) + Ω(befSuite.Passed()).Should(BeFalse()) + }) + + It("should have the correct summary", func() { + summary := befSuite.Summary() + Ω(summary.State).Should(Equal(types.SpecStateTimedOut)) + Ω(summary.Failure.ForwardedPanic).Should(BeEmpty()) + Ω(summary.Failure.ComponentIndex).Should(Equal(0)) + Ω(summary.Failure.ComponentType).Should(Equal(types.SpecComponentTypeBeforeSuite)) + Ω(summary.Failure.ComponentCodeLocation).Should(Equal(codeLocation)) + }) + }) + + Context("when the body panics", func() { + BeforeEach(func() { + befSuite = NewBeforeSuiteNode(func() { + panic("bam") + }, codeLocation, 0, failer) + outcome = befSuite.Run(0, 0, "") + }) + + It("should return false when run and report as failed", func() { + Ω(outcome).Should(BeFalse()) + Ω(befSuite.Passed()).Should(BeFalse()) + }) + + It("should have the correct summary", func() { + summary := befSuite.Summary() + Ω(summary.State).Should(Equal(types.SpecStatePanicked)) + Ω(summary.Failure.ForwardedPanic).Should(Equal("bam")) + Ω(summary.Failure.ComponentIndex).Should(Equal(0)) + Ω(summary.Failure.ComponentType).Should(Equal(types.SpecComponentTypeBeforeSuite)) + Ω(summary.Failure.ComponentCodeLocation).Should(Equal(codeLocation)) + }) + }) + }) + + Describe("AfterSuite nodes", func() { + var aftSuite SuiteNode + var failer *Failer.Failer + var codeLocation types.CodeLocation + var innerCodeLocation types.CodeLocation + var outcome bool + + BeforeEach(func() { + failer = Failer.New() + codeLocation = codelocation.New(0) + innerCodeLocation = codelocation.New(0) + }) + + Context("when the body passes", func() { + BeforeEach(func() { + aftSuite = NewAfterSuiteNode(func() { + time.Sleep(10 * time.Millisecond) + }, codeLocation, 0, failer) + outcome = aftSuite.Run(0, 0, "") + }) + + It("should return true when run and report as passed", func() { + Ω(outcome).Should(BeTrue()) + Ω(aftSuite.Passed()).Should(BeTrue()) + }) + + It("should have the correct summary", func() { + summary := aftSuite.Summary() + Ω(summary.ComponentType).Should(Equal(types.SpecComponentTypeAfterSuite)) + Ω(summary.CodeLocation).Should(Equal(codeLocation)) + Ω(summary.State).Should(Equal(types.SpecStatePassed)) + Ω(summary.RunTime).Should(BeNumerically(">=", 10*time.Millisecond)) + Ω(summary.Failure).Should(BeZero()) + }) + }) + + Context("when the body fails", func() { + BeforeEach(func() { + aftSuite = NewAfterSuiteNode(func() { + failer.Fail("oops", innerCodeLocation) + }, codeLocation, 0, failer) + outcome = aftSuite.Run(0, 0, "") + }) + + It("should return false when run and report as failed", func() { + Ω(outcome).Should(BeFalse()) + Ω(aftSuite.Passed()).Should(BeFalse()) + }) + + It("should have the correct summary", func() { + summary := aftSuite.Summary() + Ω(summary.State).Should(Equal(types.SpecStateFailed)) + Ω(summary.Failure.Message).Should(Equal("oops")) + Ω(summary.Failure.Location).Should(Equal(innerCodeLocation)) + Ω(summary.Failure.ForwardedPanic).Should(BeEmpty()) + Ω(summary.Failure.ComponentIndex).Should(Equal(0)) + Ω(summary.Failure.ComponentType).Should(Equal(types.SpecComponentTypeAfterSuite)) + Ω(summary.Failure.ComponentCodeLocation).Should(Equal(codeLocation)) + }) + }) + + Context("when the body times out", func() { + BeforeEach(func() { + aftSuite = NewAfterSuiteNode(func(done Done) { + }, codeLocation, time.Millisecond, failer) + outcome = aftSuite.Run(0, 0, "") + }) + + It("should return false when run and report as failed", func() { + Ω(outcome).Should(BeFalse()) + Ω(aftSuite.Passed()).Should(BeFalse()) + }) + + It("should have the correct summary", func() { + summary := aftSuite.Summary() + Ω(summary.State).Should(Equal(types.SpecStateTimedOut)) + Ω(summary.Failure.ForwardedPanic).Should(BeEmpty()) + Ω(summary.Failure.ComponentIndex).Should(Equal(0)) + Ω(summary.Failure.ComponentType).Should(Equal(types.SpecComponentTypeAfterSuite)) + Ω(summary.Failure.ComponentCodeLocation).Should(Equal(codeLocation)) + }) + }) + + Context("when the body panics", func() { + BeforeEach(func() { + aftSuite = NewAfterSuiteNode(func() { + panic("bam") + }, codeLocation, 0, failer) + outcome = aftSuite.Run(0, 0, "") + }) + + It("should return false when run and report as failed", func() { + Ω(outcome).Should(BeFalse()) + Ω(aftSuite.Passed()).Should(BeFalse()) + }) + + It("should have the correct summary", func() { + summary := aftSuite.Summary() + Ω(summary.State).Should(Equal(types.SpecStatePanicked)) + Ω(summary.Failure.ForwardedPanic).Should(Equal("bam")) + Ω(summary.Failure.ComponentIndex).Should(Equal(0)) + Ω(summary.Failure.ComponentType).Should(Equal(types.SpecComponentTypeAfterSuite)) + Ω(summary.Failure.ComponentCodeLocation).Should(Equal(codeLocation)) + }) + }) + }) +}) diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/leafnodes/synchronized_after_suite_node.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/leafnodes/synchronized_after_suite_node.go new file mode 100644 index 0000000000000..e7030d9149a8c --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/leafnodes/synchronized_after_suite_node.go @@ -0,0 +1,89 @@ +package leafnodes + +import ( + "encoding/json" + "github.com/onsi/ginkgo/internal/failer" + "github.com/onsi/ginkgo/types" + "io/ioutil" + "net/http" + "time" +) + +type synchronizedAfterSuiteNode struct { + runnerA *runner + runnerB *runner + + outcome types.SpecState + failure types.SpecFailure + runTime time.Duration +} + +func NewSynchronizedAfterSuiteNode(bodyA interface{}, bodyB interface{}, codeLocation types.CodeLocation, timeout time.Duration, failer *failer.Failer) SuiteNode { + return &synchronizedAfterSuiteNode{ + runnerA: newRunner(bodyA, codeLocation, timeout, failer, types.SpecComponentTypeAfterSuite, 0), + runnerB: newRunner(bodyB, codeLocation, timeout, failer, types.SpecComponentTypeAfterSuite, 0), + } +} + +func (node *synchronizedAfterSuiteNode) Run(parallelNode int, parallelTotal int, syncHost string) bool { + node.outcome, node.failure = node.runnerA.run() + + if parallelNode == 1 { + if parallelTotal > 1 { + node.waitUntilOtherNodesAreDone(syncHost) + } + + outcome, failure := node.runnerB.run() + + if node.outcome == types.SpecStatePassed { + node.outcome, node.failure = outcome, failure + } + } + + return node.outcome == types.SpecStatePassed +} + +func (node *synchronizedAfterSuiteNode) Passed() bool { + return node.outcome == types.SpecStatePassed +} + +func (node *synchronizedAfterSuiteNode) Summary() *types.SetupSummary { + return &types.SetupSummary{ + ComponentType: node.runnerA.nodeType, + CodeLocation: node.runnerA.codeLocation, + State: node.outcome, + RunTime: node.runTime, + Failure: node.failure, + } +} + +func (node *synchronizedAfterSuiteNode) waitUntilOtherNodesAreDone(syncHost string) { + for { + if node.canRun(syncHost) { + return + } + + time.Sleep(50 * time.Millisecond) + } +} + +func (node *synchronizedAfterSuiteNode) canRun(syncHost string) bool { + resp, err := http.Get(syncHost + "/RemoteAfterSuiteData") + if err != nil || resp.StatusCode != http.StatusOK { + return false + } + + body, err := ioutil.ReadAll(resp.Body) + if err != nil { + return false + } + resp.Body.Close() + + afterSuiteData := types.RemoteAfterSuiteData{} + err = json.Unmarshal(body, &afterSuiteData) + if err != nil { + return false + } + + return afterSuiteData.CanRun +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/leafnodes/synchronized_after_suite_node_test.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/leafnodes/synchronized_after_suite_node_test.go new file mode 100644 index 0000000000000..4266a4bce6c6d --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/leafnodes/synchronized_after_suite_node_test.go @@ -0,0 +1,196 @@ +package leafnodes_test + +import ( + . "github.com/onsi/ginkgo" + . "github.com/onsi/ginkgo/internal/leafnodes" + "github.com/onsi/ginkgo/types" + . "github.com/onsi/gomega" + "sync" + + "github.com/onsi/gomega/ghttp" + "net/http" + + "github.com/onsi/ginkgo/internal/codelocation" + Failer "github.com/onsi/ginkgo/internal/failer" + "time" +) + +var _ = Describe("SynchronizedAfterSuiteNode", func() { + var failer *Failer.Failer + var node SuiteNode + var codeLocation types.CodeLocation + var innerCodeLocation types.CodeLocation + var outcome bool + var server *ghttp.Server + var things []string + var lock *sync.Mutex + + BeforeEach(func() { + things = []string{} + server = ghttp.NewServer() + codeLocation = codelocation.New(0) + innerCodeLocation = codelocation.New(0) + failer = Failer.New() + lock = &sync.Mutex{} + }) + + AfterEach(func() { + server.Close() + }) + + newNode := func(bodyA interface{}, bodyB interface{}) SuiteNode { + return NewSynchronizedAfterSuiteNode(bodyA, bodyB, codeLocation, time.Millisecond, failer) + } + + ranThing := func(thing string) { + lock.Lock() + defer lock.Unlock() + things = append(things, thing) + } + + thingsThatRan := func() []string { + lock.Lock() + defer lock.Unlock() + return things + } + + Context("when not running in parallel", func() { + Context("when all is well", func() { + BeforeEach(func() { + node = newNode(func() { + ranThing("A") + }, func() { + ranThing("B") + }) + + outcome = node.Run(1, 1, server.URL()) + }) + + It("should run A, then B", func() { + Ω(thingsThatRan()).Should(Equal([]string{"A", "B"})) + }) + + It("should report success", func() { + Ω(outcome).Should(BeTrue()) + Ω(node.Passed()).Should(BeTrue()) + Ω(node.Summary().State).Should(Equal(types.SpecStatePassed)) + }) + }) + + Context("when A fails", func() { + BeforeEach(func() { + node = newNode(func() { + ranThing("A") + failer.Fail("bam", innerCodeLocation) + }, func() { + ranThing("B") + }) + + outcome = node.Run(1, 1, server.URL()) + }) + + It("should still run B", func() { + Ω(thingsThatRan()).Should(Equal([]string{"A", "B"})) + }) + + It("should report failure", func() { + Ω(outcome).Should(BeFalse()) + Ω(node.Passed()).Should(BeFalse()) + Ω(node.Summary().State).Should(Equal(types.SpecStateFailed)) + }) + }) + + Context("when B fails", func() { + BeforeEach(func() { + node = newNode(func() { + ranThing("A") + }, func() { + ranThing("B") + failer.Fail("bam", innerCodeLocation) + }) + + outcome = node.Run(1, 1, server.URL()) + }) + + It("should run all the things", func() { + Ω(thingsThatRan()).Should(Equal([]string{"A", "B"})) + }) + + It("should report failure", func() { + Ω(outcome).Should(BeFalse()) + Ω(node.Passed()).Should(BeFalse()) + Ω(node.Summary().State).Should(Equal(types.SpecStateFailed)) + }) + }) + }) + + Context("when running in parallel", func() { + Context("as the first node", func() { + BeforeEach(func() { + server.AppendHandlers(ghttp.CombineHandlers( + ghttp.VerifyRequest("GET", "/RemoteAfterSuiteData"), + func(writer http.ResponseWriter, request *http.Request) { + ranThing("Request1") + }, + ghttp.RespondWithJSONEncoded(200, types.RemoteAfterSuiteData{false}), + ), ghttp.CombineHandlers( + ghttp.VerifyRequest("GET", "/RemoteAfterSuiteData"), + func(writer http.ResponseWriter, request *http.Request) { + ranThing("Request2") + }, + ghttp.RespondWithJSONEncoded(200, types.RemoteAfterSuiteData{false}), + ), ghttp.CombineHandlers( + ghttp.VerifyRequest("GET", "/RemoteAfterSuiteData"), + func(writer http.ResponseWriter, request *http.Request) { + ranThing("Request3") + }, + ghttp.RespondWithJSONEncoded(200, types.RemoteAfterSuiteData{true}), + )) + + node = newNode(func() { + ranThing("A") + }, func() { + ranThing("B") + }) + + outcome = node.Run(1, 3, server.URL()) + }) + + It("should run A and, when the server says its time, run B", func() { + Ω(thingsThatRan()).Should(Equal([]string{"A", "Request1", "Request2", "Request3", "B"})) + }) + + It("should report success", func() { + Ω(outcome).Should(BeTrue()) + Ω(node.Passed()).Should(BeTrue()) + Ω(node.Summary().State).Should(Equal(types.SpecStatePassed)) + }) + }) + + Context("as any other node", func() { + BeforeEach(func() { + node = newNode(func() { + ranThing("A") + }, func() { + ranThing("B") + }) + + outcome = node.Run(2, 3, server.URL()) + }) + + It("should run A, and not run B", func() { + Ω(thingsThatRan()).Should(Equal([]string{"A"})) + }) + + It("should not talk to the server", func() { + Ω(server.ReceivedRequests()).Should(BeEmpty()) + }) + + It("should report success", func() { + Ω(outcome).Should(BeTrue()) + Ω(node.Passed()).Should(BeTrue()) + Ω(node.Summary().State).Should(Equal(types.SpecStatePassed)) + }) + }) + }) +}) diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/leafnodes/synchronized_before_suite_node.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/leafnodes/synchronized_before_suite_node.go new file mode 100644 index 0000000000000..76a9679813f9b --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/leafnodes/synchronized_before_suite_node.go @@ -0,0 +1,182 @@ +package leafnodes + +import ( + "bytes" + "encoding/json" + "github.com/onsi/ginkgo/internal/failer" + "github.com/onsi/ginkgo/types" + "io/ioutil" + "net/http" + "reflect" + "time" +) + +type synchronizedBeforeSuiteNode struct { + runnerA *runner + runnerB *runner + + data []byte + + outcome types.SpecState + failure types.SpecFailure + runTime time.Duration +} + +func NewSynchronizedBeforeSuiteNode(bodyA interface{}, bodyB interface{}, codeLocation types.CodeLocation, timeout time.Duration, failer *failer.Failer) SuiteNode { + node := &synchronizedBeforeSuiteNode{} + + node.runnerA = newRunner(node.wrapA(bodyA), codeLocation, timeout, failer, types.SpecComponentTypeBeforeSuite, 0) + node.runnerB = newRunner(node.wrapB(bodyB), codeLocation, timeout, failer, types.SpecComponentTypeBeforeSuite, 0) + + return node +} + +func (node *synchronizedBeforeSuiteNode) Run(parallelNode int, parallelTotal int, syncHost string) bool { + t := time.Now() + defer func() { + node.runTime = time.Since(t) + }() + + if parallelNode == 1 { + node.outcome, node.failure = node.runA(parallelTotal, syncHost) + } else { + node.outcome, node.failure = node.waitForA(syncHost) + } + + if node.outcome != types.SpecStatePassed { + return false + } + node.outcome, node.failure = node.runnerB.run() + + return node.outcome == types.SpecStatePassed +} + +func (node *synchronizedBeforeSuiteNode) runA(parallelTotal int, syncHost string) (types.SpecState, types.SpecFailure) { + outcome, failure := node.runnerA.run() + + if parallelTotal > 1 { + state := types.RemoteBeforeSuiteStatePassed + if outcome != types.SpecStatePassed { + state = types.RemoteBeforeSuiteStateFailed + } + json := (types.RemoteBeforeSuiteData{ + Data: node.data, + State: state, + }).ToJSON() + http.Post(syncHost+"/BeforeSuiteState", "application/json", bytes.NewBuffer(json)) + } + + return outcome, failure +} + +func (node *synchronizedBeforeSuiteNode) waitForA(syncHost string) (types.SpecState, types.SpecFailure) { + failure := func(message string) types.SpecFailure { + return types.SpecFailure{ + Message: message, + Location: node.runnerA.codeLocation, + ComponentType: node.runnerA.nodeType, + ComponentIndex: node.runnerA.componentIndex, + ComponentCodeLocation: node.runnerA.codeLocation, + } + } + for { + resp, err := http.Get(syncHost + "/BeforeSuiteState") + if err != nil || resp.StatusCode != http.StatusOK { + return types.SpecStateFailed, failure("Failed to fetch BeforeSuite state") + } + + body, err := ioutil.ReadAll(resp.Body) + if err != nil { + return types.SpecStateFailed, failure("Failed to read BeforeSuite state") + } + resp.Body.Close() + + beforeSuiteData := types.RemoteBeforeSuiteData{} + err = json.Unmarshal(body, &beforeSuiteData) + if err != nil { + return types.SpecStateFailed, failure("Failed to decode BeforeSuite state") + } + + switch beforeSuiteData.State { + case types.RemoteBeforeSuiteStatePassed: + node.data = beforeSuiteData.Data + return types.SpecStatePassed, types.SpecFailure{} + case types.RemoteBeforeSuiteStateFailed: + return types.SpecStateFailed, failure("BeforeSuite on Node 1 failed") + case types.RemoteBeforeSuiteStateDisappeared: + return types.SpecStateFailed, failure("Node 1 disappeared before completing BeforeSuite") + } + + time.Sleep(50 * time.Millisecond) + } + + return types.SpecStateFailed, failure("Shouldn't get here!") +} + +func (node *synchronizedBeforeSuiteNode) Passed() bool { + return node.outcome == types.SpecStatePassed +} + +func (node *synchronizedBeforeSuiteNode) Summary() *types.SetupSummary { + return &types.SetupSummary{ + ComponentType: node.runnerA.nodeType, + CodeLocation: node.runnerA.codeLocation, + State: node.outcome, + RunTime: node.runTime, + Failure: node.failure, + } +} + +func (node *synchronizedBeforeSuiteNode) wrapA(bodyA interface{}) interface{} { + typeA := reflect.TypeOf(bodyA) + if typeA.Kind() != reflect.Func { + panic("SynchronizedBeforeSuite expects a function as its first argument") + } + + takesNothing := typeA.NumIn() == 0 + takesADoneChannel := typeA.NumIn() == 1 && typeA.In(0).Kind() == reflect.Chan && typeA.In(0).Elem().Kind() == reflect.Interface + returnsBytes := typeA.NumOut() == 1 && typeA.Out(0).Kind() == reflect.Slice && typeA.Out(0).Elem().Kind() == reflect.Uint8 + + if !((takesNothing || takesADoneChannel) && returnsBytes) { + panic("SynchronizedBeforeSuite's first argument should be a function that returns []byte and either takes no arguments or takes a Done channel.") + } + + if takesADoneChannel { + return func(done chan<- interface{}) { + out := reflect.ValueOf(bodyA).Call([]reflect.Value{reflect.ValueOf(done)}) + node.data = out[0].Interface().([]byte) + } + } + + return func() { + out := reflect.ValueOf(bodyA).Call([]reflect.Value{}) + node.data = out[0].Interface().([]byte) + } +} + +func (node *synchronizedBeforeSuiteNode) wrapB(bodyB interface{}) interface{} { + typeB := reflect.TypeOf(bodyB) + if typeB.Kind() != reflect.Func { + panic("SynchronizedBeforeSuite expects a function as its second argument") + } + + returnsNothing := typeB.NumOut() == 0 + takesBytesOnly := typeB.NumIn() == 1 && typeB.In(0).Kind() == reflect.Slice && typeB.In(0).Elem().Kind() == reflect.Uint8 + takesBytesAndDone := typeB.NumIn() == 2 && + typeB.In(0).Kind() == reflect.Slice && typeB.In(0).Elem().Kind() == reflect.Uint8 && + typeB.In(1).Kind() == reflect.Chan && typeB.In(1).Elem().Kind() == reflect.Interface + + if !((takesBytesOnly || takesBytesAndDone) && returnsNothing) { + panic("SynchronizedBeforeSuite's second argument should be a function that returns nothing and either takes []byte or ([]byte, Done)") + } + + if takesBytesAndDone { + return func(done chan<- interface{}) { + reflect.ValueOf(bodyB).Call([]reflect.Value{reflect.ValueOf(node.data), reflect.ValueOf(done)}) + } + } + + return func() { + reflect.ValueOf(bodyB).Call([]reflect.Value{reflect.ValueOf(node.data)}) + } +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/leafnodes/synchronized_before_suite_node_test.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/leafnodes/synchronized_before_suite_node_test.go new file mode 100644 index 0000000000000..dbf2426748a46 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/leafnodes/synchronized_before_suite_node_test.go @@ -0,0 +1,445 @@ +package leafnodes_test + +import ( + . "github.com/onsi/ginkgo" + . "github.com/onsi/ginkgo/internal/leafnodes" + . "github.com/onsi/gomega" + + "github.com/onsi/gomega/ghttp" + "net/http" + + "github.com/onsi/ginkgo/internal/codelocation" + Failer "github.com/onsi/ginkgo/internal/failer" + "github.com/onsi/ginkgo/types" + "time" +) + +var _ = Describe("SynchronizedBeforeSuiteNode", func() { + var failer *Failer.Failer + var node SuiteNode + var codeLocation types.CodeLocation + var innerCodeLocation types.CodeLocation + var outcome bool + var server *ghttp.Server + + BeforeEach(func() { + server = ghttp.NewServer() + codeLocation = codelocation.New(0) + innerCodeLocation = codelocation.New(0) + failer = Failer.New() + }) + + AfterEach(func() { + server.Close() + }) + + newNode := func(bodyA interface{}, bodyB interface{}) SuiteNode { + return NewSynchronizedBeforeSuiteNode(bodyA, bodyB, codeLocation, time.Millisecond, failer) + } + + Describe("when not running in parallel", func() { + Context("when all is well", func() { + var data []byte + BeforeEach(func() { + data = nil + + node = newNode(func() []byte { + return []byte("my data") + }, func(d []byte) { + data = d + }) + + outcome = node.Run(1, 1, server.URL()) + }) + + It("should run A, then B passing the output from A to B", func() { + Ω(data).Should(Equal([]byte("my data"))) + }) + + It("should report success", func() { + Ω(outcome).Should(BeTrue()) + Ω(node.Passed()).Should(BeTrue()) + Ω(node.Summary().State).Should(Equal(types.SpecStatePassed)) + }) + }) + + Context("when A fails", func() { + var ranB bool + BeforeEach(func() { + ranB = false + node = newNode(func() []byte { + failer.Fail("boom", innerCodeLocation) + return nil + }, func([]byte) { + ranB = true + }) + + outcome = node.Run(1, 1, server.URL()) + }) + + It("should not run B", func() { + Ω(ranB).Should(BeFalse()) + }) + + It("should report failure", func() { + Ω(outcome).Should(BeFalse()) + Ω(node.Passed()).Should(BeFalse()) + Ω(node.Summary().State).Should(Equal(types.SpecStateFailed)) + }) + }) + + Context("when B fails", func() { + BeforeEach(func() { + node = newNode(func() []byte { + return nil + }, func([]byte) { + failer.Fail("boom", innerCodeLocation) + }) + + outcome = node.Run(1, 1, server.URL()) + }) + + It("should report failure", func() { + Ω(outcome).Should(BeFalse()) + Ω(node.Passed()).Should(BeFalse()) + Ω(node.Summary().State).Should(Equal(types.SpecStateFailed)) + }) + }) + + Context("when A times out", func() { + var ranB bool + BeforeEach(func() { + ranB = false + node = newNode(func(Done) []byte { + time.Sleep(time.Second) + return nil + }, func([]byte) { + ranB = true + }) + + outcome = node.Run(1, 1, server.URL()) + }) + + It("should not run B", func() { + Ω(ranB).Should(BeFalse()) + }) + + It("should report failure", func() { + Ω(outcome).Should(BeFalse()) + Ω(node.Passed()).Should(BeFalse()) + Ω(node.Summary().State).Should(Equal(types.SpecStateTimedOut)) + }) + }) + + Context("when B times out", func() { + BeforeEach(func() { + node = newNode(func() []byte { + return nil + }, func([]byte, Done) { + time.Sleep(time.Second) + }) + + outcome = node.Run(1, 1, server.URL()) + }) + + It("should report failure", func() { + Ω(outcome).Should(BeFalse()) + Ω(node.Passed()).Should(BeFalse()) + Ω(node.Summary().State).Should(Equal(types.SpecStateTimedOut)) + }) + }) + }) + + Describe("when running in parallel", func() { + var ranB bool + var parallelNode, parallelTotal int + BeforeEach(func() { + ranB = false + parallelNode, parallelTotal = 1, 3 + }) + + Context("as the first node, it runs A", func() { + var expectedState types.RemoteBeforeSuiteData + + BeforeEach(func() { + parallelNode, parallelTotal = 1, 3 + }) + + JustBeforeEach(func() { + server.AppendHandlers(ghttp.CombineHandlers( + ghttp.VerifyRequest("POST", "/BeforeSuiteState"), + ghttp.VerifyJSONRepresenting(expectedState), + )) + + outcome = node.Run(parallelNode, parallelTotal, server.URL()) + }) + + Context("when A succeeds", func() { + BeforeEach(func() { + expectedState = types.RemoteBeforeSuiteData{[]byte("my data"), types.RemoteBeforeSuiteStatePassed} + + node = newNode(func() []byte { + return []byte("my data") + }, func([]byte) { + ranB = true + }) + }) + + It("should post about A succeeding", func() { + Ω(server.ReceivedRequests()).Should(HaveLen(1)) + }) + + It("should run B", func() { + Ω(ranB).Should(BeTrue()) + }) + + It("should report success", func() { + Ω(outcome).Should(BeTrue()) + }) + }) + + Context("when A fails", func() { + BeforeEach(func() { + expectedState = types.RemoteBeforeSuiteData{nil, types.RemoteBeforeSuiteStateFailed} + + node = newNode(func() []byte { + panic("BAM") + return []byte("my data") + }, func([]byte) { + ranB = true + }) + }) + + It("should post about A failing", func() { + Ω(server.ReceivedRequests()).Should(HaveLen(1)) + }) + + It("should not run B", func() { + Ω(ranB).Should(BeFalse()) + }) + + It("should report failure", func() { + Ω(outcome).Should(BeFalse()) + }) + }) + }) + + Context("as the Nth node", func() { + var statusCode int + var response interface{} + var ranA bool + var bData []byte + + BeforeEach(func() { + ranA = false + bData = nil + + statusCode = http.StatusOK + + server.AppendHandlers(ghttp.CombineHandlers( + ghttp.VerifyRequest("GET", "/BeforeSuiteState"), + ghttp.RespondWith(http.StatusOK, string((types.RemoteBeforeSuiteData{nil, types.RemoteBeforeSuiteStatePending}).ToJSON())), + ), ghttp.CombineHandlers( + ghttp.VerifyRequest("GET", "/BeforeSuiteState"), + ghttp.RespondWith(http.StatusOK, string((types.RemoteBeforeSuiteData{nil, types.RemoteBeforeSuiteStatePending}).ToJSON())), + ), ghttp.CombineHandlers( + ghttp.VerifyRequest("GET", "/BeforeSuiteState"), + ghttp.RespondWithJSONEncodedPtr(&statusCode, &response), + )) + + node = newNode(func() []byte { + ranA = true + return nil + }, func(data []byte) { + bData = data + }) + + parallelNode, parallelTotal = 2, 3 + }) + + Context("when A on node1 succeeds", func() { + BeforeEach(func() { + response = types.RemoteBeforeSuiteData{[]byte("my data"), types.RemoteBeforeSuiteStatePassed} + outcome = node.Run(parallelNode, parallelTotal, server.URL()) + }) + + It("should not run A", func() { + Ω(ranA).Should(BeFalse()) + }) + + It("should poll for A", func() { + Ω(server.ReceivedRequests()).Should(HaveLen(3)) + }) + + It("should run B when the polling succeeds", func() { + Ω(bData).Should(Equal([]byte("my data"))) + }) + + It("should succeed", func() { + Ω(outcome).Should(BeTrue()) + Ω(node.Passed()).Should(BeTrue()) + }) + }) + + Context("when A on node1 fails", func() { + BeforeEach(func() { + response = types.RemoteBeforeSuiteData{[]byte("my data"), types.RemoteBeforeSuiteStateFailed} + outcome = node.Run(parallelNode, parallelTotal, server.URL()) + }) + + It("should not run A", func() { + Ω(ranA).Should(BeFalse()) + }) + + It("should poll for A", func() { + Ω(server.ReceivedRequests()).Should(HaveLen(3)) + }) + + It("should not run B", func() { + Ω(bData).Should(BeNil()) + }) + + It("should fail", func() { + Ω(outcome).Should(BeFalse()) + Ω(node.Passed()).Should(BeFalse()) + + summary := node.Summary() + Ω(summary.State).Should(Equal(types.SpecStateFailed)) + Ω(summary.Failure.Message).Should(Equal("BeforeSuite on Node 1 failed")) + Ω(summary.Failure.Location).Should(Equal(codeLocation)) + Ω(summary.Failure.ComponentType).Should(Equal(types.SpecComponentTypeBeforeSuite)) + Ω(summary.Failure.ComponentIndex).Should(Equal(0)) + Ω(summary.Failure.ComponentCodeLocation).Should(Equal(codeLocation)) + }) + }) + + Context("when node1 disappears", func() { + BeforeEach(func() { + response = types.RemoteBeforeSuiteData{[]byte("my data"), types.RemoteBeforeSuiteStateDisappeared} + outcome = node.Run(parallelNode, parallelTotal, server.URL()) + }) + + It("should not run A", func() { + Ω(ranA).Should(BeFalse()) + }) + + It("should poll for A", func() { + Ω(server.ReceivedRequests()).Should(HaveLen(3)) + }) + + It("should not run B", func() { + Ω(bData).Should(BeNil()) + }) + + It("should fail", func() { + Ω(outcome).Should(BeFalse()) + Ω(node.Passed()).Should(BeFalse()) + + summary := node.Summary() + Ω(summary.State).Should(Equal(types.SpecStateFailed)) + Ω(summary.Failure.Message).Should(Equal("Node 1 disappeared before completing BeforeSuite")) + Ω(summary.Failure.Location).Should(Equal(codeLocation)) + Ω(summary.Failure.ComponentType).Should(Equal(types.SpecComponentTypeBeforeSuite)) + Ω(summary.Failure.ComponentIndex).Should(Equal(0)) + Ω(summary.Failure.ComponentCodeLocation).Should(Equal(codeLocation)) + }) + }) + }) + }) + + Describe("construction", func() { + Describe("the first function", func() { + Context("when the first function returns a byte array", func() { + Context("and takes nothing", func() { + It("should be fine", func() { + Ω(func() { + newNode(func() []byte { return nil }, func([]byte) {}) + }).ShouldNot(Panic()) + }) + }) + + Context("and takes a done function", func() { + It("should be fine", func() { + Ω(func() { + newNode(func(Done) []byte { return nil }, func([]byte) {}) + }).ShouldNot(Panic()) + }) + }) + + Context("and takes more than one thing", func() { + It("should panic", func() { + Ω(func() { + newNode(func(Done, Done) []byte { return nil }, func([]byte) {}) + }).Should(Panic()) + }) + }) + + Context("and takes something else", func() { + It("should panic", func() { + Ω(func() { + newNode(func(bool) []byte { return nil }, func([]byte) {}) + }).Should(Panic()) + }) + }) + }) + + Context("when the first function does not return a byte array", func() { + It("should panic", func() { + Ω(func() { + newNode(func() {}, func([]byte) {}) + }).Should(Panic()) + + Ω(func() { + newNode(func() []int { return nil }, func([]byte) {}) + }).Should(Panic()) + }) + }) + }) + + Describe("the second function", func() { + Context("when the second function takes a byte array", func() { + It("should be fine", func() { + Ω(func() { + newNode(func() []byte { return nil }, func([]byte) {}) + }).ShouldNot(Panic()) + }) + }) + + Context("when it also takes a done channel", func() { + It("should be fine", func() { + Ω(func() { + newNode(func() []byte { return nil }, func([]byte, Done) {}) + }).ShouldNot(Panic()) + }) + }) + + Context("if it takes anything else", func() { + It("should panic", func() { + Ω(func() { + newNode(func() []byte { return nil }, func([]byte, chan bool) {}) + }).Should(Panic()) + + Ω(func() { + newNode(func() []byte { return nil }, func(string) {}) + }).Should(Panic()) + }) + }) + + Context("if it takes nothing at all", func() { + It("should panic", func() { + Ω(func() { + newNode(func() []byte { return nil }, func() {}) + }).Should(Panic()) + }) + }) + + Context("if it returns something", func() { + It("should panic", func() { + Ω(func() { + newNode(func() []byte { return nil }, func([]byte) []byte { return nil }) + }).Should(Panic()) + }) + }) + }) + }) +}) diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/remote/aggregator.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/remote/aggregator.go new file mode 100644 index 0000000000000..9dcfb5fe8bf9c --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/remote/aggregator.go @@ -0,0 +1,250 @@ +/* + +Aggregator is a reporter used by the Ginkgo CLI to aggregate and present parallel test output +coherently as tests complete. You shouldn't need to use this in your code. To run tests in parallel: + + ginkgo -nodes=N + +where N is the number of nodes you desire. +*/ +package remote + +import ( + "time" + + "github.com/onsi/ginkgo/config" + "github.com/onsi/ginkgo/reporters/stenographer" + "github.com/onsi/ginkgo/types" +) + +type configAndSuite struct { + config config.GinkgoConfigType + summary *types.SuiteSummary +} + +type Aggregator struct { + nodeCount int + config config.DefaultReporterConfigType + stenographer stenographer.Stenographer + result chan bool + + suiteBeginnings chan configAndSuite + aggregatedSuiteBeginnings []configAndSuite + + beforeSuites chan *types.SetupSummary + aggregatedBeforeSuites []*types.SetupSummary + + afterSuites chan *types.SetupSummary + aggregatedAfterSuites []*types.SetupSummary + + specCompletions chan *types.SpecSummary + completedSpecs []*types.SpecSummary + + suiteEndings chan *types.SuiteSummary + aggregatedSuiteEndings []*types.SuiteSummary + specs []*types.SpecSummary + + startTime time.Time +} + +func NewAggregator(nodeCount int, result chan bool, config config.DefaultReporterConfigType, stenographer stenographer.Stenographer) *Aggregator { + aggregator := &Aggregator{ + nodeCount: nodeCount, + result: result, + config: config, + stenographer: stenographer, + + suiteBeginnings: make(chan configAndSuite, 0), + beforeSuites: make(chan *types.SetupSummary, 0), + afterSuites: make(chan *types.SetupSummary, 0), + specCompletions: make(chan *types.SpecSummary, 0), + suiteEndings: make(chan *types.SuiteSummary, 0), + } + + go aggregator.mux() + + return aggregator +} + +func (aggregator *Aggregator) SpecSuiteWillBegin(config config.GinkgoConfigType, summary *types.SuiteSummary) { + aggregator.suiteBeginnings <- configAndSuite{config, summary} +} + +func (aggregator *Aggregator) BeforeSuiteDidRun(setupSummary *types.SetupSummary) { + aggregator.beforeSuites <- setupSummary +} + +func (aggregator *Aggregator) AfterSuiteDidRun(setupSummary *types.SetupSummary) { + aggregator.afterSuites <- setupSummary +} + +func (aggregator *Aggregator) SpecWillRun(specSummary *types.SpecSummary) { + //noop +} + +func (aggregator *Aggregator) SpecDidComplete(specSummary *types.SpecSummary) { + aggregator.specCompletions <- specSummary +} + +func (aggregator *Aggregator) SpecSuiteDidEnd(summary *types.SuiteSummary) { + aggregator.suiteEndings <- summary +} + +func (aggregator *Aggregator) mux() { +loop: + for { + select { + case configAndSuite := <-aggregator.suiteBeginnings: + aggregator.registerSuiteBeginning(configAndSuite) + case setupSummary := <-aggregator.beforeSuites: + aggregator.registerBeforeSuite(setupSummary) + case setupSummary := <-aggregator.afterSuites: + aggregator.registerAfterSuite(setupSummary) + case specSummary := <-aggregator.specCompletions: + aggregator.registerSpecCompletion(specSummary) + case suite := <-aggregator.suiteEndings: + finished, passed := aggregator.registerSuiteEnding(suite) + if finished { + aggregator.result <- passed + break loop + } + } + } +} + +func (aggregator *Aggregator) registerSuiteBeginning(configAndSuite configAndSuite) { + aggregator.aggregatedSuiteBeginnings = append(aggregator.aggregatedSuiteBeginnings, configAndSuite) + + if len(aggregator.aggregatedSuiteBeginnings) == 1 { + aggregator.startTime = time.Now() + } + + if len(aggregator.aggregatedSuiteBeginnings) != aggregator.nodeCount { + return + } + + aggregator.stenographer.AnnounceSuite(configAndSuite.summary.SuiteDescription, configAndSuite.config.RandomSeed, configAndSuite.config.RandomizeAllSpecs, aggregator.config.Succinct) + + numberOfSpecsToRun := 0 + totalNumberOfSpecs := 0 + for _, configAndSuite := range aggregator.aggregatedSuiteBeginnings { + numberOfSpecsToRun += configAndSuite.summary.NumberOfSpecsThatWillBeRun + totalNumberOfSpecs += configAndSuite.summary.NumberOfTotalSpecs + } + + aggregator.stenographer.AnnounceNumberOfSpecs(numberOfSpecsToRun, totalNumberOfSpecs, aggregator.config.Succinct) + aggregator.stenographer.AnnounceAggregatedParallelRun(aggregator.nodeCount, aggregator.config.Succinct) + aggregator.flushCompletedSpecs() +} + +func (aggregator *Aggregator) registerBeforeSuite(setupSummary *types.SetupSummary) { + aggregator.aggregatedBeforeSuites = append(aggregator.aggregatedBeforeSuites, setupSummary) + aggregator.flushCompletedSpecs() +} + +func (aggregator *Aggregator) registerAfterSuite(setupSummary *types.SetupSummary) { + aggregator.aggregatedAfterSuites = append(aggregator.aggregatedAfterSuites, setupSummary) + aggregator.flushCompletedSpecs() +} + +func (aggregator *Aggregator) registerSpecCompletion(specSummary *types.SpecSummary) { + aggregator.completedSpecs = append(aggregator.completedSpecs, specSummary) + aggregator.specs = append(aggregator.specs, specSummary) + aggregator.flushCompletedSpecs() +} + +func (aggregator *Aggregator) flushCompletedSpecs() { + if len(aggregator.aggregatedSuiteBeginnings) != aggregator.nodeCount { + return + } + + for _, setupSummary := range aggregator.aggregatedBeforeSuites { + aggregator.announceBeforeSuite(setupSummary) + } + + for _, specSummary := range aggregator.completedSpecs { + aggregator.announceSpec(specSummary) + } + + for _, setupSummary := range aggregator.aggregatedAfterSuites { + aggregator.announceAfterSuite(setupSummary) + } + + aggregator.aggregatedBeforeSuites = []*types.SetupSummary{} + aggregator.completedSpecs = []*types.SpecSummary{} + aggregator.aggregatedAfterSuites = []*types.SetupSummary{} +} + +func (aggregator *Aggregator) announceBeforeSuite(setupSummary *types.SetupSummary) { + aggregator.stenographer.AnnounceCapturedOutput(setupSummary.CapturedOutput) + if setupSummary.State != types.SpecStatePassed { + aggregator.stenographer.AnnounceBeforeSuiteFailure(setupSummary, aggregator.config.Succinct, aggregator.config.FullTrace) + } +} + +func (aggregator *Aggregator) announceAfterSuite(setupSummary *types.SetupSummary) { + aggregator.stenographer.AnnounceCapturedOutput(setupSummary.CapturedOutput) + if setupSummary.State != types.SpecStatePassed { + aggregator.stenographer.AnnounceAfterSuiteFailure(setupSummary, aggregator.config.Succinct, aggregator.config.FullTrace) + } +} + +func (aggregator *Aggregator) announceSpec(specSummary *types.SpecSummary) { + if aggregator.config.Verbose && specSummary.State != types.SpecStatePending && specSummary.State != types.SpecStateSkipped { + aggregator.stenographer.AnnounceSpecWillRun(specSummary) + } + + aggregator.stenographer.AnnounceCapturedOutput(specSummary.CapturedOutput) + + switch specSummary.State { + case types.SpecStatePassed: + if specSummary.IsMeasurement { + aggregator.stenographer.AnnounceSuccesfulMeasurement(specSummary, aggregator.config.Succinct) + } else if specSummary.RunTime.Seconds() >= aggregator.config.SlowSpecThreshold { + aggregator.stenographer.AnnounceSuccesfulSlowSpec(specSummary, aggregator.config.Succinct) + } else { + aggregator.stenographer.AnnounceSuccesfulSpec(specSummary) + } + + case types.SpecStatePending: + aggregator.stenographer.AnnouncePendingSpec(specSummary, aggregator.config.NoisyPendings && !aggregator.config.Succinct) + case types.SpecStateSkipped: + aggregator.stenographer.AnnounceSkippedSpec(specSummary) + case types.SpecStateTimedOut: + aggregator.stenographer.AnnounceSpecTimedOut(specSummary, aggregator.config.Succinct, aggregator.config.FullTrace) + case types.SpecStatePanicked: + aggregator.stenographer.AnnounceSpecPanicked(specSummary, aggregator.config.Succinct, aggregator.config.FullTrace) + case types.SpecStateFailed: + aggregator.stenographer.AnnounceSpecFailed(specSummary, aggregator.config.Succinct, aggregator.config.FullTrace) + } +} + +func (aggregator *Aggregator) registerSuiteEnding(suite *types.SuiteSummary) (finished bool, passed bool) { + aggregator.aggregatedSuiteEndings = append(aggregator.aggregatedSuiteEndings, suite) + if len(aggregator.aggregatedSuiteEndings) < aggregator.nodeCount { + return false, false + } + + aggregatedSuiteSummary := &types.SuiteSummary{} + aggregatedSuiteSummary.SuiteSucceeded = true + + for _, suiteSummary := range aggregator.aggregatedSuiteEndings { + if suiteSummary.SuiteSucceeded == false { + aggregatedSuiteSummary.SuiteSucceeded = false + } + + aggregatedSuiteSummary.NumberOfSpecsThatWillBeRun += suiteSummary.NumberOfSpecsThatWillBeRun + aggregatedSuiteSummary.NumberOfTotalSpecs += suiteSummary.NumberOfTotalSpecs + aggregatedSuiteSummary.NumberOfPassedSpecs += suiteSummary.NumberOfPassedSpecs + aggregatedSuiteSummary.NumberOfFailedSpecs += suiteSummary.NumberOfFailedSpecs + aggregatedSuiteSummary.NumberOfPendingSpecs += suiteSummary.NumberOfPendingSpecs + aggregatedSuiteSummary.NumberOfSkippedSpecs += suiteSummary.NumberOfSkippedSpecs + } + + aggregatedSuiteSummary.RunTime = time.Since(aggregator.startTime) + + aggregator.stenographer.SummarizeFailures(aggregator.specs) + aggregator.stenographer.AnnounceSpecRunCompletion(aggregatedSuiteSummary, aggregator.config.Succinct) + + return true, aggregatedSuiteSummary.SuiteSucceeded +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/remote/aggregator_test.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/remote/aggregator_test.go new file mode 100644 index 0000000000000..d8499cf378a90 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/remote/aggregator_test.go @@ -0,0 +1,311 @@ +package remote_test + +import ( + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" + + "time" + "github.com/onsi/ginkgo/config" + . "github.com/onsi/ginkgo/internal/remote" + st "github.com/onsi/ginkgo/reporters/stenographer" + "github.com/onsi/ginkgo/types" +) + +var _ = Describe("Aggregator", func() { + var ( + aggregator *Aggregator + reporterConfig config.DefaultReporterConfigType + stenographer *st.FakeStenographer + result chan bool + + ginkgoConfig1 config.GinkgoConfigType + ginkgoConfig2 config.GinkgoConfigType + + suiteSummary1 *types.SuiteSummary + suiteSummary2 *types.SuiteSummary + + beforeSummary *types.SetupSummary + afterSummary *types.SetupSummary + specSummary *types.SpecSummary + + suiteDescription string + ) + + BeforeEach(func() { + reporterConfig = config.DefaultReporterConfigType{ + NoColor: false, + SlowSpecThreshold: 0.1, + NoisyPendings: true, + Succinct: false, + Verbose: true, + } + stenographer = st.NewFakeStenographer() + result = make(chan bool, 1) + aggregator = NewAggregator(2, result, reporterConfig, stenographer) + + // + // now set up some fixture data + // + + ginkgoConfig1 = config.GinkgoConfigType{ + RandomSeed: 1138, + RandomizeAllSpecs: true, + ParallelNode: 1, + ParallelTotal: 2, + } + + ginkgoConfig2 = config.GinkgoConfigType{ + RandomSeed: 1138, + RandomizeAllSpecs: true, + ParallelNode: 2, + ParallelTotal: 2, + } + + suiteDescription = "My Parallel Suite" + + suiteSummary1 = &types.SuiteSummary{ + SuiteDescription: suiteDescription, + + NumberOfSpecsBeforeParallelization: 30, + NumberOfTotalSpecs: 17, + NumberOfSpecsThatWillBeRun: 15, + NumberOfPendingSpecs: 1, + NumberOfSkippedSpecs: 1, + } + + suiteSummary2 = &types.SuiteSummary{ + SuiteDescription: suiteDescription, + + NumberOfSpecsBeforeParallelization: 30, + NumberOfTotalSpecs: 13, + NumberOfSpecsThatWillBeRun: 8, + NumberOfPendingSpecs: 2, + NumberOfSkippedSpecs: 3, + } + + beforeSummary = &types.SetupSummary{ + State: types.SpecStatePassed, + CapturedOutput: "BeforeSuiteOutput", + } + + afterSummary = &types.SetupSummary{ + State: types.SpecStatePassed, + CapturedOutput: "AfterSuiteOutput", + } + + specSummary = &types.SpecSummary{ + State: types.SpecStatePassed, + CapturedOutput: "SpecOutput", + } + }) + + call := func(method string, args ...interface{}) st.FakeStenographerCall { + return st.NewFakeStenographerCall(method, args...) + } + + beginSuite := func() { + stenographer.Reset() + aggregator.SpecSuiteWillBegin(ginkgoConfig2, suiteSummary2) + aggregator.SpecSuiteWillBegin(ginkgoConfig1, suiteSummary1) + Eventually(func() interface{} { + return len(stenographer.Calls()) + }).Should(BeNumerically(">=", 3)) + } + + Describe("Announcing the beginning of the suite", func() { + Context("When one of the parallel-suites starts", func() { + BeforeEach(func() { + aggregator.SpecSuiteWillBegin(ginkgoConfig2, suiteSummary2) + }) + + It("should be silent", func() { + Consistently(func() interface{} { return stenographer.Calls() }).Should(BeEmpty()) + }) + }) + + Context("once all of the parallel-suites have started", func() { + BeforeEach(func() { + aggregator.SpecSuiteWillBegin(ginkgoConfig2, suiteSummary2) + aggregator.SpecSuiteWillBegin(ginkgoConfig1, suiteSummary1) + Eventually(func() interface{} { + return stenographer.Calls() + }).Should(HaveLen(3)) + }) + + It("should announce the beginning of the suite", func() { + Ω(stenographer.Calls()).Should(HaveLen(3)) + Ω(stenographer.Calls()[0]).Should(Equal(call("AnnounceSuite", suiteDescription, ginkgoConfig1.RandomSeed, true, false))) + Ω(stenographer.Calls()[1]).Should(Equal(call("AnnounceNumberOfSpecs", 23, 30, false))) + Ω(stenographer.Calls()[2]).Should(Equal(call("AnnounceAggregatedParallelRun", 2, false))) + }) + }) + }) + + Describe("Announcing specs and before suites", func() { + Context("when the parallel-suites have not all started", func() { + BeforeEach(func() { + aggregator.BeforeSuiteDidRun(beforeSummary) + aggregator.AfterSuiteDidRun(afterSummary) + aggregator.SpecDidComplete(specSummary) + }) + + It("should not announce any specs", func() { + Consistently(func() interface{} { return stenographer.Calls() }).Should(BeEmpty()) + }) + + Context("when the parallel-suites subsequently start", func() { + BeforeEach(func() { + beginSuite() + }) + + It("should announce the specs, the before suites and the after suites", func() { + Eventually(func() interface{} { + return stenographer.Calls() + }).Should(ContainElement(call("AnnounceSuccesfulSpec", specSummary))) + + Ω(stenographer.Calls()).Should(ContainElement(call("AnnounceCapturedOutput", beforeSummary.CapturedOutput))) + Ω(stenographer.Calls()).Should(ContainElement(call("AnnounceCapturedOutput", afterSummary.CapturedOutput))) + }) + }) + }) + + Context("When the parallel-suites have all started", func() { + BeforeEach(func() { + beginSuite() + stenographer.Reset() + }) + + Context("When a spec completes", func() { + BeforeEach(func() { + aggregator.BeforeSuiteDidRun(beforeSummary) + aggregator.SpecDidComplete(specSummary) + aggregator.AfterSuiteDidRun(afterSummary) + Eventually(func() interface{} { + return stenographer.Calls() + }).Should(HaveLen(5)) + }) + + It("should announce the captured output of the BeforeSuite", func() { + Ω(stenographer.Calls()[0]).Should(Equal(call("AnnounceCapturedOutput", beforeSummary.CapturedOutput))) + }) + + It("should announce that the spec will run (when in verbose mode)", func() { + Ω(stenographer.Calls()[1]).Should(Equal(call("AnnounceSpecWillRun", specSummary))) + }) + + It("should announce the captured stdout of the spec", func() { + Ω(stenographer.Calls()[2]).Should(Equal(call("AnnounceCapturedOutput", specSummary.CapturedOutput))) + }) + + It("should announce completion", func() { + Ω(stenographer.Calls()[3]).Should(Equal(call("AnnounceSuccesfulSpec", specSummary))) + }) + + It("should announce the captured output of the AfterSuite", func() { + Ω(stenographer.Calls()[4]).Should(Equal(call("AnnounceCapturedOutput", afterSummary.CapturedOutput))) + }) + }) + }) + }) + + Describe("Announcing the end of the suite", func() { + BeforeEach(func() { + beginSuite() + stenographer.Reset() + }) + + Context("When one of the parallel-suites ends", func() { + BeforeEach(func() { + aggregator.SpecSuiteDidEnd(suiteSummary2) + }) + + It("should be silent", func() { + Consistently(func() interface{} { return stenographer.Calls() }).Should(BeEmpty()) + }) + + It("should not notify the channel", func() { + Ω(result).Should(BeEmpty()) + }) + }) + + Context("once all of the parallel-suites end", func() { + BeforeEach(func() { + time.Sleep(200 * time.Millisecond) + + suiteSummary1.SuiteSucceeded = true + suiteSummary1.NumberOfPassedSpecs = 15 + suiteSummary1.NumberOfFailedSpecs = 0 + suiteSummary2.SuiteSucceeded = false + suiteSummary2.NumberOfPassedSpecs = 5 + suiteSummary2.NumberOfFailedSpecs = 3 + + aggregator.SpecSuiteDidEnd(suiteSummary2) + aggregator.SpecSuiteDidEnd(suiteSummary1) + Eventually(func() interface{} { + return stenographer.Calls() + }).Should(HaveLen(2)) + }) + + It("should announce the end of the suite", func() { + compositeSummary := stenographer.Calls()[1].Args[0].(*types.SuiteSummary) + + Ω(compositeSummary.SuiteSucceeded).Should(BeFalse()) + Ω(compositeSummary.NumberOfSpecsThatWillBeRun).Should(Equal(23)) + Ω(compositeSummary.NumberOfTotalSpecs).Should(Equal(30)) + Ω(compositeSummary.NumberOfPassedSpecs).Should(Equal(20)) + Ω(compositeSummary.NumberOfFailedSpecs).Should(Equal(3)) + Ω(compositeSummary.NumberOfPendingSpecs).Should(Equal(3)) + Ω(compositeSummary.NumberOfSkippedSpecs).Should(Equal(4)) + Ω(compositeSummary.RunTime.Seconds()).Should(BeNumerically(">", 0.2)) + }) + }) + + Context("when all the parallel-suites pass", func() { + BeforeEach(func() { + suiteSummary1.SuiteSucceeded = true + suiteSummary2.SuiteSucceeded = true + + aggregator.SpecSuiteDidEnd(suiteSummary2) + aggregator.SpecSuiteDidEnd(suiteSummary1) + Eventually(func() interface{} { + return stenographer.Calls() + }).Should(HaveLen(2)) + }) + + It("should report success", func() { + compositeSummary := stenographer.Calls()[1].Args[0].(*types.SuiteSummary) + + Ω(compositeSummary.SuiteSucceeded).Should(BeTrue()) + }) + + It("should notify the channel that it succeded", func(done Done) { + Ω(<-result).Should(BeTrue()) + close(done) + }) + }) + + Context("when one of the parallel-suites fails", func() { + BeforeEach(func() { + suiteSummary1.SuiteSucceeded = true + suiteSummary2.SuiteSucceeded = false + + aggregator.SpecSuiteDidEnd(suiteSummary2) + aggregator.SpecSuiteDidEnd(suiteSummary1) + Eventually(func() interface{} { + return stenographer.Calls() + }).Should(HaveLen(2)) + }) + + It("should report failure", func() { + compositeSummary := stenographer.Calls()[1].Args[0].(*types.SuiteSummary) + + Ω(compositeSummary.SuiteSucceeded).Should(BeFalse()) + }) + + It("should notify the channel that it failed", func(done Done) { + Ω(<-result).Should(BeFalse()) + close(done) + }) + }) + }) +}) diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/remote/fake_output_interceptor_test.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/remote/fake_output_interceptor_test.go new file mode 100644 index 0000000000000..a928f93d311fd --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/remote/fake_output_interceptor_test.go @@ -0,0 +1,17 @@ +package remote_test + +type fakeOutputInterceptor struct { + DidStartInterceptingOutput bool + DidStopInterceptingOutput bool + InterceptedOutput string +} + +func (interceptor *fakeOutputInterceptor) StartInterceptingOutput() error { + interceptor.DidStartInterceptingOutput = true + return nil +} + +func (interceptor *fakeOutputInterceptor) StopInterceptingAndReturnOutput() (string, error) { + interceptor.DidStopInterceptingOutput = true + return interceptor.InterceptedOutput, nil +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/remote/fake_poster_test.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/remote/fake_poster_test.go new file mode 100644 index 0000000000000..3543c59c64c45 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/remote/fake_poster_test.go @@ -0,0 +1,33 @@ +package remote_test + +import ( + "io" + "io/ioutil" + "net/http" +) + +type post struct { + url string + bodyType string + bodyContent []byte +} + +type fakePoster struct { + posts []post +} + +func newFakePoster() *fakePoster { + return &fakePoster{ + posts: make([]post, 0), + } +} + +func (poster *fakePoster) Post(url string, bodyType string, body io.Reader) (resp *http.Response, err error) { + bodyContent, _ := ioutil.ReadAll(body) + poster.posts = append(poster.posts, post{ + url: url, + bodyType: bodyType, + bodyContent: bodyContent, + }) + return nil, nil +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/remote/forwarding_reporter.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/remote/forwarding_reporter.go new file mode 100644 index 0000000000000..025eb5064483b --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/remote/forwarding_reporter.go @@ -0,0 +1,90 @@ +package remote + +import ( + "bytes" + "encoding/json" + "io" + "net/http" + + "github.com/onsi/ginkgo/config" + "github.com/onsi/ginkgo/types" +) + +//An interface to net/http's client to allow the injection of fakes under test +type Poster interface { + Post(url string, bodyType string, body io.Reader) (resp *http.Response, err error) +} + +/* +The ForwardingReporter is a Ginkgo reporter that forwards information to +a Ginkgo remote server. + +When streaming parallel test output, this repoter is automatically installed by Ginkgo. + +This is accomplished by passing in the GINKGO_REMOTE_REPORTING_SERVER environment variable to `go test`, the Ginkgo test runner +detects this environment variable (which should contain the host of the server) and automatically installs a ForwardingReporter +in place of Ginkgo's DefaultReporter. +*/ + +type ForwardingReporter struct { + serverHost string + poster Poster + outputInterceptor OutputInterceptor +} + +func NewForwardingReporter(serverHost string, poster Poster, outputInterceptor OutputInterceptor) *ForwardingReporter { + return &ForwardingReporter{ + serverHost: serverHost, + poster: poster, + outputInterceptor: outputInterceptor, + } +} + +func (reporter *ForwardingReporter) post(path string, data interface{}) { + encoded, _ := json.Marshal(data) + buffer := bytes.NewBuffer(encoded) + reporter.poster.Post(reporter.serverHost+path, "application/json", buffer) +} + +func (reporter *ForwardingReporter) SpecSuiteWillBegin(conf config.GinkgoConfigType, summary *types.SuiteSummary) { + data := struct { + Config config.GinkgoConfigType `json:"config"` + Summary *types.SuiteSummary `json:"suite-summary"` + }{ + conf, + summary, + } + + reporter.outputInterceptor.StartInterceptingOutput() + reporter.post("/SpecSuiteWillBegin", data) +} + +func (reporter *ForwardingReporter) BeforeSuiteDidRun(setupSummary *types.SetupSummary) { + output, _ := reporter.outputInterceptor.StopInterceptingAndReturnOutput() + reporter.outputInterceptor.StartInterceptingOutput() + setupSummary.CapturedOutput = output + reporter.post("/BeforeSuiteDidRun", setupSummary) +} + +func (reporter *ForwardingReporter) SpecWillRun(specSummary *types.SpecSummary) { + reporter.post("/SpecWillRun", specSummary) +} + +func (reporter *ForwardingReporter) SpecDidComplete(specSummary *types.SpecSummary) { + output, _ := reporter.outputInterceptor.StopInterceptingAndReturnOutput() + reporter.outputInterceptor.StartInterceptingOutput() + specSummary.CapturedOutput = output + reporter.post("/SpecDidComplete", specSummary) +} + +func (reporter *ForwardingReporter) AfterSuiteDidRun(setupSummary *types.SetupSummary) { + output, _ := reporter.outputInterceptor.StopInterceptingAndReturnOutput() + reporter.outputInterceptor.StartInterceptingOutput() + setupSummary.CapturedOutput = output + reporter.post("/AfterSuiteDidRun", setupSummary) +} + +func (reporter *ForwardingReporter) SpecSuiteDidEnd(summary *types.SuiteSummary) { + reporter.outputInterceptor.StopInterceptingAndReturnOutput() + reporter.post("/SpecSuiteDidEnd", summary) +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/remote/forwarding_reporter_test.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/remote/forwarding_reporter_test.go new file mode 100644 index 0000000000000..e5f3b1e3071cf --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/remote/forwarding_reporter_test.go @@ -0,0 +1,180 @@ +package remote_test + +import ( + "encoding/json" + . "github.com/onsi/ginkgo" + "github.com/onsi/ginkgo/config" + . "github.com/onsi/ginkgo/internal/remote" + "github.com/onsi/ginkgo/types" + . "github.com/onsi/gomega" +) + +var _ = Describe("ForwardingReporter", func() { + var ( + reporter *ForwardingReporter + interceptor *fakeOutputInterceptor + poster *fakePoster + suiteSummary *types.SuiteSummary + specSummary *types.SpecSummary + setupSummary *types.SetupSummary + serverHost string + ) + + BeforeEach(func() { + serverHost = "http://127.0.0.1:7788" + + poster = newFakePoster() + + interceptor = &fakeOutputInterceptor{ + InterceptedOutput: "The intercepted output!", + } + + reporter = NewForwardingReporter(serverHost, poster, interceptor) + + suiteSummary = &types.SuiteSummary{ + SuiteDescription: "My Test Suite", + } + + setupSummary = &types.SetupSummary{ + State: types.SpecStatePassed, + } + + specSummary = &types.SpecSummary{ + ComponentTexts: []string{"My", "Spec"}, + State: types.SpecStatePassed, + } + }) + + Context("When a suite begins", func() { + BeforeEach(func() { + reporter.SpecSuiteWillBegin(config.GinkgoConfig, suiteSummary) + }) + + It("should start intercepting output", func() { + Ω(interceptor.DidStartInterceptingOutput).Should(BeTrue()) + }) + + It("should POST the SuiteSummary and Ginkgo Config to the Ginkgo server", func() { + Ω(poster.posts).Should(HaveLen(1)) + Ω(poster.posts[0].url).Should(Equal("http://127.0.0.1:7788/SpecSuiteWillBegin")) + Ω(poster.posts[0].bodyType).Should(Equal("application/json")) + + var sentData struct { + SentConfig config.GinkgoConfigType `json:"config"` + SentSuiteSummary *types.SuiteSummary `json:"suite-summary"` + } + + err := json.Unmarshal(poster.posts[0].bodyContent, &sentData) + Ω(err).ShouldNot(HaveOccurred()) + + Ω(sentData.SentConfig).Should(Equal(config.GinkgoConfig)) + Ω(sentData.SentSuiteSummary).Should(Equal(suiteSummary)) + }) + }) + + Context("when a BeforeSuite completes", func() { + BeforeEach(func() { + reporter.BeforeSuiteDidRun(setupSummary) + }) + + It("should stop, then start intercepting output", func() { + Ω(interceptor.DidStopInterceptingOutput).Should(BeTrue()) + Ω(interceptor.DidStartInterceptingOutput).Should(BeTrue()) + }) + + It("should POST the SetupSummary to the Ginkgo server", func() { + Ω(poster.posts).Should(HaveLen(1)) + Ω(poster.posts[0].url).Should(Equal("http://127.0.0.1:7788/BeforeSuiteDidRun")) + Ω(poster.posts[0].bodyType).Should(Equal("application/json")) + + var summary *types.SetupSummary + err := json.Unmarshal(poster.posts[0].bodyContent, &summary) + Ω(err).ShouldNot(HaveOccurred()) + setupSummary.CapturedOutput = interceptor.InterceptedOutput + Ω(summary).Should(Equal(setupSummary)) + }) + }) + + Context("when an AfterSuite completes", func() { + BeforeEach(func() { + reporter.AfterSuiteDidRun(setupSummary) + }) + + It("should stop, then start intercepting output", func() { + Ω(interceptor.DidStopInterceptingOutput).Should(BeTrue()) + Ω(interceptor.DidStartInterceptingOutput).Should(BeTrue()) + }) + + It("should POST the SetupSummary to the Ginkgo server", func() { + Ω(poster.posts).Should(HaveLen(1)) + Ω(poster.posts[0].url).Should(Equal("http://127.0.0.1:7788/AfterSuiteDidRun")) + Ω(poster.posts[0].bodyType).Should(Equal("application/json")) + + var summary *types.SetupSummary + err := json.Unmarshal(poster.posts[0].bodyContent, &summary) + Ω(err).ShouldNot(HaveOccurred()) + setupSummary.CapturedOutput = interceptor.InterceptedOutput + Ω(summary).Should(Equal(setupSummary)) + }) + }) + + Context("When a spec will run", func() { + BeforeEach(func() { + reporter.SpecWillRun(specSummary) + }) + + It("should POST the SpecSummary to the Ginkgo server", func() { + Ω(poster.posts).Should(HaveLen(1)) + Ω(poster.posts[0].url).Should(Equal("http://127.0.0.1:7788/SpecWillRun")) + Ω(poster.posts[0].bodyType).Should(Equal("application/json")) + + var summary *types.SpecSummary + err := json.Unmarshal(poster.posts[0].bodyContent, &summary) + Ω(err).ShouldNot(HaveOccurred()) + Ω(summary).Should(Equal(specSummary)) + }) + + Context("When a spec completes", func() { + BeforeEach(func() { + specSummary.State = types.SpecStatePanicked + reporter.SpecDidComplete(specSummary) + }) + + It("should POST the SpecSummary to the Ginkgo server and include any intercepted output", func() { + Ω(poster.posts).Should(HaveLen(2)) + Ω(poster.posts[1].url).Should(Equal("http://127.0.0.1:7788/SpecDidComplete")) + Ω(poster.posts[1].bodyType).Should(Equal("application/json")) + + var summary *types.SpecSummary + err := json.Unmarshal(poster.posts[1].bodyContent, &summary) + Ω(err).ShouldNot(HaveOccurred()) + specSummary.CapturedOutput = interceptor.InterceptedOutput + Ω(summary).Should(Equal(specSummary)) + }) + + It("should stop, then start intercepting output", func() { + Ω(interceptor.DidStopInterceptingOutput).Should(BeTrue()) + Ω(interceptor.DidStartInterceptingOutput).Should(BeTrue()) + }) + }) + }) + + Context("When a suite ends", func() { + BeforeEach(func() { + reporter.SpecSuiteDidEnd(suiteSummary) + }) + + It("should POST the SuiteSummary to the Ginkgo server", func() { + Ω(poster.posts).Should(HaveLen(1)) + Ω(poster.posts[0].url).Should(Equal("http://127.0.0.1:7788/SpecSuiteDidEnd")) + Ω(poster.posts[0].bodyType).Should(Equal("application/json")) + + var summary *types.SuiteSummary + + err := json.Unmarshal(poster.posts[0].bodyContent, &summary) + Ω(err).ShouldNot(HaveOccurred()) + + Ω(summary).Should(Equal(suiteSummary)) + }) + }) +}) diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/remote/output_interceptor.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/remote/output_interceptor.go new file mode 100644 index 0000000000000..093f4513c0be3 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/remote/output_interceptor.go @@ -0,0 +1,10 @@ +package remote + +/* +The OutputInterceptor is used by the ForwardingReporter to +intercept and capture all stdin and stderr output during a test run. +*/ +type OutputInterceptor interface { + StartInterceptingOutput() error + StopInterceptingAndReturnOutput() (string, error) +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/remote/output_interceptor_test.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/remote/output_interceptor_test.go new file mode 100644 index 0000000000000..014fdf1acdab2 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/remote/output_interceptor_test.go @@ -0,0 +1,64 @@ +package remote_test + +import ( + "fmt" + . "github.com/onsi/ginkgo" + . "github.com/onsi/ginkgo/internal/remote" + . "github.com/onsi/gomega" + "os" +) + +var _ = Describe("OutputInterceptor", func() { + var interceptor OutputInterceptor + + BeforeEach(func() { + interceptor = NewOutputInterceptor() + }) + + It("should capture all stdout/stderr output", func() { + err := interceptor.StartInterceptingOutput() + Ω(err).ShouldNot(HaveOccurred()) + + fmt.Fprint(os.Stdout, "STDOUT") + fmt.Fprint(os.Stderr, "STDERR") + print("PRINT") + + output, err := interceptor.StopInterceptingAndReturnOutput() + + Ω(output).Should(Equal("STDOUTSTDERRPRINT")) + Ω(err).ShouldNot(HaveOccurred()) + }) + + It("should error if told to intercept output twice", func() { + err := interceptor.StartInterceptingOutput() + Ω(err).ShouldNot(HaveOccurred()) + + print("A") + + err = interceptor.StartInterceptingOutput() + Ω(err).Should(HaveOccurred()) + + print("B") + + output, err := interceptor.StopInterceptingAndReturnOutput() + + Ω(output).Should(Equal("AB")) + Ω(err).ShouldNot(HaveOccurred()) + }) + + It("should allow multiple interception sessions", func() { + err := interceptor.StartInterceptingOutput() + Ω(err).ShouldNot(HaveOccurred()) + print("A") + output, err := interceptor.StopInterceptingAndReturnOutput() + Ω(output).Should(Equal("A")) + Ω(err).ShouldNot(HaveOccurred()) + + err = interceptor.StartInterceptingOutput() + Ω(err).ShouldNot(HaveOccurred()) + print("B") + output, err = interceptor.StopInterceptingAndReturnOutput() + Ω(output).Should(Equal("B")) + Ω(err).ShouldNot(HaveOccurred()) + }) +}) diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/remote/output_interceptor_unix.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/remote/output_interceptor_unix.go new file mode 100644 index 0000000000000..8304cf5a97bd1 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/remote/output_interceptor_unix.go @@ -0,0 +1,76 @@ +// +build freebsd openbsd netbsd dragonfly darwin linux + +package remote + +import ( + "errors" + "io/ioutil" + "os" + "syscall" +) + +func NewOutputInterceptor() OutputInterceptor { + return &outputInterceptor{} +} + +type outputInterceptor struct { + stdoutPlaceholder *os.File + stderrPlaceholder *os.File + redirectFile *os.File + intercepting bool +} + +func (interceptor *outputInterceptor) StartInterceptingOutput() error { + if interceptor.intercepting { + return errors.New("Already intercepting output!") + } + interceptor.intercepting = true + + var err error + + interceptor.redirectFile, err = ioutil.TempFile("", "ginkgo-output") + if err != nil { + return err + } + + interceptor.stdoutPlaceholder, err = ioutil.TempFile("", "ginkgo-output") + if err != nil { + return err + } + + interceptor.stderrPlaceholder, err = ioutil.TempFile("", "ginkgo-output") + if err != nil { + return err + } + + syscall.Dup2(1, int(interceptor.stdoutPlaceholder.Fd())) + syscall.Dup2(2, int(interceptor.stderrPlaceholder.Fd())) + + syscall.Dup2(int(interceptor.redirectFile.Fd()), 1) + syscall.Dup2(int(interceptor.redirectFile.Fd()), 2) + + return nil +} + +func (interceptor *outputInterceptor) StopInterceptingAndReturnOutput() (string, error) { + if !interceptor.intercepting { + return "", errors.New("Not intercepting output!") + } + + syscall.Dup2(int(interceptor.stdoutPlaceholder.Fd()), 1) + syscall.Dup2(int(interceptor.stderrPlaceholder.Fd()), 2) + + for _, f := range []*os.File{interceptor.redirectFile, interceptor.stdoutPlaceholder, interceptor.stderrPlaceholder} { + f.Close() + } + + output, err := ioutil.ReadFile(interceptor.redirectFile.Name()) + + for _, f := range []*os.File{interceptor.redirectFile, interceptor.stdoutPlaceholder, interceptor.stderrPlaceholder} { + os.Remove(f.Name()) + } + + interceptor.intercepting = false + + return string(output), err +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/remote/output_interceptor_win.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/remote/output_interceptor_win.go new file mode 100644 index 0000000000000..c8f97d97f07b7 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/remote/output_interceptor_win.go @@ -0,0 +1,33 @@ +// +build windows + +package remote + +import ( + "errors" +) + +func NewOutputInterceptor() OutputInterceptor { + return &outputInterceptor{} +} + +type outputInterceptor struct { + intercepting bool +} + +func (interceptor *outputInterceptor) StartInterceptingOutput() error { + if interceptor.intercepting { + return errors.New("Already intercepting output!") + } + interceptor.intercepting = true + + // not working on windows... + + return nil +} + +func (interceptor *outputInterceptor) StopInterceptingAndReturnOutput() (string, error) { + // not working on windows... + interceptor.intercepting = false + + return "", nil +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/remote/remote_suite_test.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/remote/remote_suite_test.go new file mode 100644 index 0000000000000..e6b4e9f32ce0e --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/remote/remote_suite_test.go @@ -0,0 +1,13 @@ +package remote_test + +import ( + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" + + "testing" +) + +func TestRemote(t *testing.T) { + RegisterFailHandler(Fail) + RunSpecs(t, "Remote Spec Forwarding Suite") +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/remote/server.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/remote/server.go new file mode 100644 index 0000000000000..b55c681bcff30 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/remote/server.go @@ -0,0 +1,204 @@ +/* + +The remote package provides the pieces to allow Ginkgo test suites to report to remote listeners. +This is used, primarily, to enable streaming parallel test output but has, in principal, broader applications (e.g. streaming test output to a browser). + +*/ + +package remote + +import ( + "encoding/json" + "github.com/onsi/ginkgo/config" + "github.com/onsi/ginkgo/reporters" + "github.com/onsi/ginkgo/types" + "io/ioutil" + "net" + "net/http" + "sync" +) + +/* +Server spins up on an automatically selected port and listens for communication from the forwarding reporter. +It then forwards that communication to attached reporters. +*/ +type Server struct { + listener net.Listener + reporters []reporters.Reporter + alives []func() bool + lock *sync.Mutex + beforeSuiteData types.RemoteBeforeSuiteData + parallelTotal int +} + +//Create a new server, automatically selecting a port +func NewServer(parallelTotal int) (*Server, error) { + listener, err := net.Listen("tcp", "127.0.0.1:0") + if err != nil { + return nil, err + } + return &Server{ + listener: listener, + lock: &sync.Mutex{}, + alives: make([]func() bool, parallelTotal), + beforeSuiteData: types.RemoteBeforeSuiteData{nil, types.RemoteBeforeSuiteStatePending}, + parallelTotal: parallelTotal, + }, nil +} + +//Start the server. You don't need to `go s.Start()`, just `s.Start()` +func (server *Server) Start() { + httpServer := &http.Server{} + mux := http.NewServeMux() + httpServer.Handler = mux + + //streaming endpoints + mux.HandleFunc("/SpecSuiteWillBegin", server.specSuiteWillBegin) + mux.HandleFunc("/BeforeSuiteDidRun", server.beforeSuiteDidRun) + mux.HandleFunc("/AfterSuiteDidRun", server.afterSuiteDidRun) + mux.HandleFunc("/SpecWillRun", server.specWillRun) + mux.HandleFunc("/SpecDidComplete", server.specDidComplete) + mux.HandleFunc("/SpecSuiteDidEnd", server.specSuiteDidEnd) + + //synchronization endpoints + mux.HandleFunc("/BeforeSuiteState", server.handleBeforeSuiteState) + mux.HandleFunc("/RemoteAfterSuiteData", server.handleRemoteAfterSuiteData) + + go httpServer.Serve(server.listener) +} + +//Stop the server +func (server *Server) Close() { + server.listener.Close() +} + +//The address the server can be reached it. Pass this into the `ForwardingReporter`. +func (server *Server) Address() string { + return "http://" + server.listener.Addr().String() +} + +// +// Streaming Endpoints +// + +//The server will forward all received messages to Ginkgo reporters registered with `RegisterReporters` +func (server *Server) readAll(request *http.Request) []byte { + defer request.Body.Close() + body, _ := ioutil.ReadAll(request.Body) + return body +} + +func (server *Server) RegisterReporters(reporters ...reporters.Reporter) { + server.reporters = reporters +} + +func (server *Server) specSuiteWillBegin(writer http.ResponseWriter, request *http.Request) { + body := server.readAll(request) + + var data struct { + Config config.GinkgoConfigType `json:"config"` + Summary *types.SuiteSummary `json:"suite-summary"` + } + + json.Unmarshal(body, &data) + + for _, reporter := range server.reporters { + reporter.SpecSuiteWillBegin(data.Config, data.Summary) + } +} + +func (server *Server) beforeSuiteDidRun(writer http.ResponseWriter, request *http.Request) { + body := server.readAll(request) + var setupSummary *types.SetupSummary + json.Unmarshal(body, &setupSummary) + + for _, reporter := range server.reporters { + reporter.BeforeSuiteDidRun(setupSummary) + } +} + +func (server *Server) afterSuiteDidRun(writer http.ResponseWriter, request *http.Request) { + body := server.readAll(request) + var setupSummary *types.SetupSummary + json.Unmarshal(body, &setupSummary) + + for _, reporter := range server.reporters { + reporter.AfterSuiteDidRun(setupSummary) + } +} + +func (server *Server) specWillRun(writer http.ResponseWriter, request *http.Request) { + body := server.readAll(request) + var specSummary *types.SpecSummary + json.Unmarshal(body, &specSummary) + + for _, reporter := range server.reporters { + reporter.SpecWillRun(specSummary) + } +} + +func (server *Server) specDidComplete(writer http.ResponseWriter, request *http.Request) { + body := server.readAll(request) + var specSummary *types.SpecSummary + json.Unmarshal(body, &specSummary) + + for _, reporter := range server.reporters { + reporter.SpecDidComplete(specSummary) + } +} + +func (server *Server) specSuiteDidEnd(writer http.ResponseWriter, request *http.Request) { + body := server.readAll(request) + var suiteSummary *types.SuiteSummary + json.Unmarshal(body, &suiteSummary) + + for _, reporter := range server.reporters { + reporter.SpecSuiteDidEnd(suiteSummary) + } +} + +// +// Synchronization Endpoints +// + +func (server *Server) RegisterAlive(node int, alive func() bool) { + server.lock.Lock() + defer server.lock.Unlock() + server.alives[node-1] = alive +} + +func (server *Server) nodeIsAlive(node int) bool { + server.lock.Lock() + defer server.lock.Unlock() + alive := server.alives[node-1] + if alive == nil { + return true + } + return alive() +} + +func (server *Server) handleBeforeSuiteState(writer http.ResponseWriter, request *http.Request) { + if request.Method == "POST" { + dec := json.NewDecoder(request.Body) + dec.Decode(&(server.beforeSuiteData)) + } else { + beforeSuiteData := server.beforeSuiteData + if beforeSuiteData.State == types.RemoteBeforeSuiteStatePending && !server.nodeIsAlive(1) { + beforeSuiteData.State = types.RemoteBeforeSuiteStateDisappeared + } + enc := json.NewEncoder(writer) + enc.Encode(beforeSuiteData) + } +} + +func (server *Server) handleRemoteAfterSuiteData(writer http.ResponseWriter, request *http.Request) { + afterSuiteData := types.RemoteAfterSuiteData{ + CanRun: true, + } + for i := 2; i <= server.parallelTotal; i++ { + afterSuiteData.CanRun = afterSuiteData.CanRun && !server.nodeIsAlive(i) + } + + enc := json.NewEncoder(writer) + enc.Encode(afterSuiteData) +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/remote/server_test.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/remote/server_test.go new file mode 100644 index 0000000000000..eb2eefebe02c4 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/remote/server_test.go @@ -0,0 +1,269 @@ +package remote_test + +import ( + . "github.com/onsi/ginkgo" + . "github.com/onsi/ginkgo/internal/remote" + . "github.com/onsi/gomega" + + "github.com/onsi/ginkgo/config" + "github.com/onsi/ginkgo/reporters" + "github.com/onsi/ginkgo/types" + + "bytes" + "encoding/json" + "net/http" +) + +var _ = Describe("Server", func() { + var ( + server *Server + ) + + BeforeEach(func() { + var err error + server, err = NewServer(3) + Ω(err).ShouldNot(HaveOccurred()) + + server.Start() + }) + + AfterEach(func() { + server.Close() + }) + + Describe("Streaming endpoints", func() { + var ( + reporterA, reporterB *reporters.FakeReporter + forwardingReporter *ForwardingReporter + + suiteSummary *types.SuiteSummary + setupSummary *types.SetupSummary + specSummary *types.SpecSummary + ) + + BeforeEach(func() { + reporterA = reporters.NewFakeReporter() + reporterB = reporters.NewFakeReporter() + + server.RegisterReporters(reporterA, reporterB) + + forwardingReporter = NewForwardingReporter(server.Address(), &http.Client{}, &fakeOutputInterceptor{}) + + suiteSummary = &types.SuiteSummary{ + SuiteDescription: "My Test Suite", + } + + setupSummary = &types.SetupSummary{ + State: types.SpecStatePassed, + } + + specSummary = &types.SpecSummary{ + ComponentTexts: []string{"My", "Spec"}, + State: types.SpecStatePassed, + } + }) + + It("should make its address available", func() { + Ω(server.Address()).Should(MatchRegexp(`http://127.0.0.1:\d{2,}`)) + }) + + Describe("/SpecSuiteWillBegin", func() { + It("should decode and forward the Ginkgo config and suite summary", func(done Done) { + forwardingReporter.SpecSuiteWillBegin(config.GinkgoConfig, suiteSummary) + Ω(reporterA.Config).Should(Equal(config.GinkgoConfig)) + Ω(reporterB.Config).Should(Equal(config.GinkgoConfig)) + Ω(reporterA.BeginSummary).Should(Equal(suiteSummary)) + Ω(reporterB.BeginSummary).Should(Equal(suiteSummary)) + close(done) + }) + }) + + Describe("/BeforeSuiteDidRun", func() { + It("should decode and forward the setup summary", func() { + forwardingReporter.BeforeSuiteDidRun(setupSummary) + Ω(reporterA.BeforeSuiteSummary).Should(Equal(setupSummary)) + Ω(reporterB.BeforeSuiteSummary).Should(Equal(setupSummary)) + }) + }) + + Describe("/AfterSuiteDidRun", func() { + It("should decode and forward the setup summary", func() { + forwardingReporter.AfterSuiteDidRun(setupSummary) + Ω(reporterA.AfterSuiteSummary).Should(Equal(setupSummary)) + Ω(reporterB.AfterSuiteSummary).Should(Equal(setupSummary)) + }) + }) + + Describe("/SpecWillRun", func() { + It("should decode and forward the spec summary", func(done Done) { + forwardingReporter.SpecWillRun(specSummary) + Ω(reporterA.SpecWillRunSummaries[0]).Should(Equal(specSummary)) + Ω(reporterB.SpecWillRunSummaries[0]).Should(Equal(specSummary)) + close(done) + }) + }) + + Describe("/SpecDidComplete", func() { + It("should decode and forward the spec summary", func(done Done) { + forwardingReporter.SpecDidComplete(specSummary) + Ω(reporterA.SpecSummaries[0]).Should(Equal(specSummary)) + Ω(reporterB.SpecSummaries[0]).Should(Equal(specSummary)) + close(done) + }) + }) + + Describe("/SpecSuiteDidEnd", func() { + It("should decode and forward the suite summary", func(done Done) { + forwardingReporter.SpecSuiteDidEnd(suiteSummary) + Ω(reporterA.EndSummary).Should(Equal(suiteSummary)) + Ω(reporterB.EndSummary).Should(Equal(suiteSummary)) + close(done) + }) + }) + }) + + Describe("Synchronization endpoints", func() { + Describe("GETting and POSTing BeforeSuiteState", func() { + getBeforeSuite := func() types.RemoteBeforeSuiteData { + resp, err := http.Get(server.Address() + "/BeforeSuiteState") + Ω(err).ShouldNot(HaveOccurred()) + Ω(resp.StatusCode).Should(Equal(http.StatusOK)) + + r := types.RemoteBeforeSuiteData{} + decoder := json.NewDecoder(resp.Body) + err = decoder.Decode(&r) + Ω(err).ShouldNot(HaveOccurred()) + + return r + } + + postBeforeSuite := func(r types.RemoteBeforeSuiteData) { + resp, err := http.Post(server.Address()+"/BeforeSuiteState", "application/json", bytes.NewReader(r.ToJSON())) + Ω(err).ShouldNot(HaveOccurred()) + Ω(resp.StatusCode).Should(Equal(http.StatusOK)) + } + + Context("when the first node's Alive has not been registered yet", func() { + It("should return pending", func() { + state := getBeforeSuite() + Ω(state).Should(Equal(types.RemoteBeforeSuiteData{nil, types.RemoteBeforeSuiteStatePending})) + + state = getBeforeSuite() + Ω(state).Should(Equal(types.RemoteBeforeSuiteData{nil, types.RemoteBeforeSuiteStatePending})) + }) + }) + + Context("when the first node is Alive but has not responded yet", func() { + BeforeEach(func() { + server.RegisterAlive(1, func() bool { + return true + }) + }) + + It("should return pending", func() { + state := getBeforeSuite() + Ω(state).Should(Equal(types.RemoteBeforeSuiteData{nil, types.RemoteBeforeSuiteStatePending})) + + state = getBeforeSuite() + Ω(state).Should(Equal(types.RemoteBeforeSuiteData{nil, types.RemoteBeforeSuiteStatePending})) + }) + }) + + Context("when the first node has responded", func() { + var state types.RemoteBeforeSuiteData + BeforeEach(func() { + server.RegisterAlive(1, func() bool { + return false + }) + + state = types.RemoteBeforeSuiteData{ + Data: []byte("my data"), + State: types.RemoteBeforeSuiteStatePassed, + } + postBeforeSuite(state) + }) + + It("should return the passed in state", func() { + returnedState := getBeforeSuite() + Ω(returnedState).Should(Equal(state)) + }) + }) + + Context("when the first node is no longer Alive and has not responded yet", func() { + BeforeEach(func() { + server.RegisterAlive(1, func() bool { + return false + }) + }) + + It("should return disappeared", func() { + state := getBeforeSuite() + Ω(state).Should(Equal(types.RemoteBeforeSuiteData{nil, types.RemoteBeforeSuiteStateDisappeared})) + + state = getBeforeSuite() + Ω(state).Should(Equal(types.RemoteBeforeSuiteData{nil, types.RemoteBeforeSuiteStateDisappeared})) + }) + }) + }) + + Describe("GETting RemoteAfterSuiteData", func() { + getRemoteAfterSuiteData := func() bool { + resp, err := http.Get(server.Address() + "/RemoteAfterSuiteData") + Ω(err).ShouldNot(HaveOccurred()) + Ω(resp.StatusCode).Should(Equal(http.StatusOK)) + + a := types.RemoteAfterSuiteData{} + decoder := json.NewDecoder(resp.Body) + err = decoder.Decode(&a) + Ω(err).ShouldNot(HaveOccurred()) + + return a.CanRun + } + + Context("when there are unregistered nodes", func() { + BeforeEach(func() { + server.RegisterAlive(2, func() bool { + return false + }) + }) + + It("should return false", func() { + Ω(getRemoteAfterSuiteData()).Should(BeFalse()) + }) + }) + + Context("when all none-node-1 nodes are still running", func() { + BeforeEach(func() { + server.RegisterAlive(2, func() bool { + return true + }) + + server.RegisterAlive(3, func() bool { + return false + }) + }) + + It("should return false", func() { + Ω(getRemoteAfterSuiteData()).Should(BeFalse()) + }) + }) + + Context("when all none-1 nodes are done", func() { + BeforeEach(func() { + server.RegisterAlive(2, func() bool { + return false + }) + + server.RegisterAlive(3, func() bool { + return false + }) + }) + + It("should return true", func() { + Ω(getRemoteAfterSuiteData()).Should(BeTrue()) + }) + + }) + }) + }) +}) diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/spec/index_computer.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/spec/index_computer.go new file mode 100644 index 0000000000000..5a67fc7b74067 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/spec/index_computer.go @@ -0,0 +1,55 @@ +package spec + +func ParallelizedIndexRange(length int, parallelTotal int, parallelNode int) (startIndex int, count int) { + if length == 0 { + return 0, 0 + } + + // We have more nodes than tests. Trivial case. + if parallelTotal >= length { + if parallelNode > length { + return 0, 0 + } else { + return parallelNode - 1, 1 + } + } + + // This is the minimum amount of tests that a node will be required to run + minTestsPerNode := length / parallelTotal + + // This is the maximum amount of tests that a node will be required to run + // The algorithm guarantees that this would be equal to at least the minimum amount + // and at most one more + maxTestsPerNode := minTestsPerNode + if length%parallelTotal != 0 { + maxTestsPerNode++ + } + + // Number of nodes that will have to run the maximum amount of tests per node + numMaxLoadNodes := length % parallelTotal + + // Number of nodes that precede the current node and will have to run the maximum amount of tests per node + var numPrecedingMaxLoadNodes int + if parallelNode > numMaxLoadNodes { + numPrecedingMaxLoadNodes = numMaxLoadNodes + } else { + numPrecedingMaxLoadNodes = parallelNode - 1 + } + + // Number of nodes that precede the current node and will have to run the minimum amount of tests per node + var numPrecedingMinLoadNodes int + if parallelNode <= numMaxLoadNodes { + numPrecedingMinLoadNodes = 0 + } else { + numPrecedingMinLoadNodes = parallelNode - numMaxLoadNodes - 1 + } + + // Evaluate the test start index and number of tests to run + startIndex = numPrecedingMaxLoadNodes*maxTestsPerNode + numPrecedingMinLoadNodes*minTestsPerNode + if parallelNode > numMaxLoadNodes { + count = minTestsPerNode + } else { + count = maxTestsPerNode + } + return +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/spec/index_computer_test.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/spec/index_computer_test.go new file mode 100644 index 0000000000000..0396d7bde0569 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/spec/index_computer_test.go @@ -0,0 +1,149 @@ +package spec_test + +import ( + . "github.com/onsi/ginkgo" + . "github.com/onsi/ginkgo/internal/spec" + . "github.com/onsi/gomega" +) + +var _ = Describe("ParallelizedIndexRange", func() { + var startIndex, count int + + It("should return the correct index range for 4 tests on 2 nodes", func() { + startIndex, count = ParallelizedIndexRange(4, 2, 1) + Ω(startIndex).Should(Equal(0)) + Ω(count).Should(Equal(2)) + + startIndex, count = ParallelizedIndexRange(4, 2, 2) + Ω(startIndex).Should(Equal(2)) + Ω(count).Should(Equal(2)) + }) + + It("should return the correct index range for 5 tests on 2 nodes", func() { + startIndex, count = ParallelizedIndexRange(5, 2, 1) + Ω(startIndex).Should(Equal(0)) + Ω(count).Should(Equal(3)) + + startIndex, count = ParallelizedIndexRange(5, 2, 2) + Ω(startIndex).Should(Equal(3)) + Ω(count).Should(Equal(2)) + }) + + It("should return the correct index range for 5 tests on 3 nodes", func() { + startIndex, count = ParallelizedIndexRange(5, 3, 1) + Ω(startIndex).Should(Equal(0)) + Ω(count).Should(Equal(2)) + + startIndex, count = ParallelizedIndexRange(5, 3, 2) + Ω(startIndex).Should(Equal(2)) + Ω(count).Should(Equal(2)) + + startIndex, count = ParallelizedIndexRange(5, 3, 3) + Ω(startIndex).Should(Equal(4)) + Ω(count).Should(Equal(1)) + }) + + It("should return the correct index range for 5 tests on 4 nodes", func() { + startIndex, count = ParallelizedIndexRange(5, 4, 1) + Ω(startIndex).Should(Equal(0)) + Ω(count).Should(Equal(2)) + + startIndex, count = ParallelizedIndexRange(5, 4, 2) + Ω(startIndex).Should(Equal(2)) + Ω(count).Should(Equal(1)) + + startIndex, count = ParallelizedIndexRange(5, 4, 3) + Ω(startIndex).Should(Equal(3)) + Ω(count).Should(Equal(1)) + + startIndex, count = ParallelizedIndexRange(5, 4, 4) + Ω(startIndex).Should(Equal(4)) + Ω(count).Should(Equal(1)) + }) + + It("should return the correct index range for 5 tests on 5 nodes", func() { + startIndex, count = ParallelizedIndexRange(5, 5, 1) + Ω(startIndex).Should(Equal(0)) + Ω(count).Should(Equal(1)) + + startIndex, count = ParallelizedIndexRange(5, 5, 2) + Ω(startIndex).Should(Equal(1)) + Ω(count).Should(Equal(1)) + + startIndex, count = ParallelizedIndexRange(5, 5, 3) + Ω(startIndex).Should(Equal(2)) + Ω(count).Should(Equal(1)) + + startIndex, count = ParallelizedIndexRange(5, 5, 4) + Ω(startIndex).Should(Equal(3)) + Ω(count).Should(Equal(1)) + + startIndex, count = ParallelizedIndexRange(5, 5, 5) + Ω(startIndex).Should(Equal(4)) + Ω(count).Should(Equal(1)) + }) + + It("should return the correct index range for 5 tests on 6 nodes", func() { + startIndex, count = ParallelizedIndexRange(5, 6, 1) + Ω(startIndex).Should(Equal(0)) + Ω(count).Should(Equal(1)) + + startIndex, count = ParallelizedIndexRange(5, 6, 2) + Ω(startIndex).Should(Equal(1)) + Ω(count).Should(Equal(1)) + + startIndex, count = ParallelizedIndexRange(5, 6, 3) + Ω(startIndex).Should(Equal(2)) + Ω(count).Should(Equal(1)) + + startIndex, count = ParallelizedIndexRange(5, 6, 4) + Ω(startIndex).Should(Equal(3)) + Ω(count).Should(Equal(1)) + + startIndex, count = ParallelizedIndexRange(5, 6, 5) + Ω(startIndex).Should(Equal(4)) + Ω(count).Should(Equal(1)) + + startIndex, count = ParallelizedIndexRange(5, 6, 6) + Ω(count).Should(Equal(0)) + }) + + It("should return the correct index range for 5 tests on 7 nodes", func() { + startIndex, count = ParallelizedIndexRange(5, 7, 6) + Ω(count).Should(Equal(0)) + + startIndex, count = ParallelizedIndexRange(5, 7, 7) + Ω(count).Should(Equal(0)) + }) + + It("should return the correct index range for 11 tests on 7 nodes", func() { + startIndex, count = ParallelizedIndexRange(11, 7, 1) + Ω(startIndex).Should(Equal(0)) + Ω(count).Should(Equal(2)) + + startIndex, count = ParallelizedIndexRange(11, 7, 2) + Ω(startIndex).Should(Equal(2)) + Ω(count).Should(Equal(2)) + + startIndex, count = ParallelizedIndexRange(11, 7, 3) + Ω(startIndex).Should(Equal(4)) + Ω(count).Should(Equal(2)) + + startIndex, count = ParallelizedIndexRange(11, 7, 4) + Ω(startIndex).Should(Equal(6)) + Ω(count).Should(Equal(2)) + + startIndex, count = ParallelizedIndexRange(11, 7, 5) + Ω(startIndex).Should(Equal(8)) + Ω(count).Should(Equal(1)) + + startIndex, count = ParallelizedIndexRange(11, 7, 6) + Ω(startIndex).Should(Equal(9)) + Ω(count).Should(Equal(1)) + + startIndex, count = ParallelizedIndexRange(11, 7, 7) + Ω(startIndex).Should(Equal(10)) + Ω(count).Should(Equal(1)) + }) + +}) diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/spec/spec.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/spec/spec.go new file mode 100644 index 0000000000000..076fff1369843 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/spec/spec.go @@ -0,0 +1,199 @@ +package spec + +import ( + "fmt" + "io" + "time" + + "github.com/onsi/ginkgo/internal/containernode" + "github.com/onsi/ginkgo/internal/leafnodes" + "github.com/onsi/ginkgo/types" +) + +type Spec struct { + subject leafnodes.SubjectNode + focused bool + announceProgress bool + + containers []*containernode.ContainerNode + + state types.SpecState + runTime time.Duration + failure types.SpecFailure +} + +func New(subject leafnodes.SubjectNode, containers []*containernode.ContainerNode, announceProgress bool) *Spec { + spec := &Spec{ + subject: subject, + containers: containers, + focused: subject.Flag() == types.FlagTypeFocused, + announceProgress: announceProgress, + } + + spec.processFlag(subject.Flag()) + for i := len(containers) - 1; i >= 0; i-- { + spec.processFlag(containers[i].Flag()) + } + + return spec +} + +func (spec *Spec) processFlag(flag types.FlagType) { + if flag == types.FlagTypeFocused { + spec.focused = true + } else if flag == types.FlagTypePending { + spec.state = types.SpecStatePending + } +} + +func (spec *Spec) Skip() { + spec.state = types.SpecStateSkipped +} + +func (spec *Spec) Failed() bool { + return spec.state == types.SpecStateFailed || spec.state == types.SpecStatePanicked || spec.state == types.SpecStateTimedOut +} + +func (spec *Spec) Passed() bool { + return spec.state == types.SpecStatePassed +} + +func (spec *Spec) Pending() bool { + return spec.state == types.SpecStatePending +} + +func (spec *Spec) Skipped() bool { + return spec.state == types.SpecStateSkipped +} + +func (spec *Spec) Focused() bool { + return spec.focused +} + +func (spec *Spec) IsMeasurement() bool { + return spec.subject.Type() == types.SpecComponentTypeMeasure +} + +func (spec *Spec) Summary(suiteID string) *types.SpecSummary { + componentTexts := make([]string, len(spec.containers)+1) + componentCodeLocations := make([]types.CodeLocation, len(spec.containers)+1) + + for i, container := range spec.containers { + componentTexts[i] = container.Text() + componentCodeLocations[i] = container.CodeLocation() + } + + componentTexts[len(spec.containers)] = spec.subject.Text() + componentCodeLocations[len(spec.containers)] = spec.subject.CodeLocation() + + return &types.SpecSummary{ + IsMeasurement: spec.IsMeasurement(), + NumberOfSamples: spec.subject.Samples(), + ComponentTexts: componentTexts, + ComponentCodeLocations: componentCodeLocations, + State: spec.state, + RunTime: spec.runTime, + Failure: spec.failure, + Measurements: spec.measurementsReport(), + SuiteID: suiteID, + } +} + +func (spec *Spec) ConcatenatedString() string { + s := "" + for _, container := range spec.containers { + s += container.Text() + " " + } + + return s + spec.subject.Text() +} + +func (spec *Spec) Run(writer io.Writer) { + startTime := time.Now() + defer func() { + spec.runTime = time.Since(startTime) + }() + + for sample := 0; sample < spec.subject.Samples(); sample++ { + spec.state, spec.failure = spec.runSample(sample, writer) + + if spec.state != types.SpecStatePassed { + return + } + } +} + +func (spec *Spec) runSample(sample int, writer io.Writer) (specState types.SpecState, specFailure types.SpecFailure) { + specState = types.SpecStatePassed + specFailure = types.SpecFailure{} + innerMostContainerIndexToUnwind := -1 + + defer func() { + for i := innerMostContainerIndexToUnwind; i >= 0; i-- { + container := spec.containers[i] + for _, afterEach := range container.SetupNodesOfType(types.SpecComponentTypeAfterEach) { + spec.announceSetupNode(writer, "AfterEach", container, afterEach) + afterEachState, afterEachFailure := afterEach.Run() + if afterEachState != types.SpecStatePassed && specState == types.SpecStatePassed { + specState = afterEachState + specFailure = afterEachFailure + } + } + } + }() + + for i, container := range spec.containers { + innerMostContainerIndexToUnwind = i + for _, beforeEach := range container.SetupNodesOfType(types.SpecComponentTypeBeforeEach) { + spec.announceSetupNode(writer, "BeforeEach", container, beforeEach) + specState, specFailure = beforeEach.Run() + if specState != types.SpecStatePassed { + return + } + } + } + + for _, container := range spec.containers { + for _, justBeforeEach := range container.SetupNodesOfType(types.SpecComponentTypeJustBeforeEach) { + spec.announceSetupNode(writer, "JustBeforeEach", container, justBeforeEach) + specState, specFailure = justBeforeEach.Run() + if specState != types.SpecStatePassed { + return + } + } + } + + spec.announceSubject(writer, spec.subject) + specState, specFailure = spec.subject.Run() + + return +} + +func (spec *Spec) announceSetupNode(writer io.Writer, nodeType string, container *containernode.ContainerNode, setupNode leafnodes.BasicNode) { + if spec.announceProgress { + s := fmt.Sprintf("[%s] %s\n %s\n", nodeType, container.Text(), setupNode.CodeLocation().String()) + writer.Write([]byte(s)) + } +} + +func (spec *Spec) announceSubject(writer io.Writer, subject leafnodes.SubjectNode) { + if spec.announceProgress { + nodeType := "" + switch subject.Type() { + case types.SpecComponentTypeIt: + nodeType = "It" + case types.SpecComponentTypeMeasure: + nodeType = "Measure" + } + s := fmt.Sprintf("[%s] %s\n %s\n", nodeType, subject.Text(), subject.CodeLocation().String()) + writer.Write([]byte(s)) + } +} + +func (spec *Spec) measurementsReport() map[string]*types.SpecMeasurement { + if !spec.IsMeasurement() || spec.Failed() { + return map[string]*types.SpecMeasurement{} + } + + return spec.subject.(*leafnodes.MeasureNode).MeasurementsReport() +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/spec/spec_suite_test.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/spec/spec_suite_test.go new file mode 100644 index 0000000000000..8681a7206841a --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/spec/spec_suite_test.go @@ -0,0 +1,13 @@ +package spec_test + +import ( + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" + + "testing" +) + +func TestSpec(t *testing.T) { + RegisterFailHandler(Fail) + RunSpecs(t, "Spec Suite") +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/spec/spec_test.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/spec/spec_test.go new file mode 100644 index 0000000000000..6d0f58cb04435 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/spec/spec_test.go @@ -0,0 +1,626 @@ +package spec_test + +import ( + "time" + + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" + "github.com/onsi/gomega/gbytes" + + . "github.com/onsi/ginkgo/internal/spec" + + "github.com/onsi/ginkgo/internal/codelocation" + "github.com/onsi/ginkgo/internal/containernode" + Failer "github.com/onsi/ginkgo/internal/failer" + "github.com/onsi/ginkgo/internal/leafnodes" + "github.com/onsi/ginkgo/types" +) + +var noneFlag = types.FlagTypeNone +var focusedFlag = types.FlagTypeFocused +var pendingFlag = types.FlagTypePending + +var _ = Describe("Spec", func() { + var ( + failer *Failer.Failer + codeLocation types.CodeLocation + nodesThatRan []string + spec *Spec + buffer *gbytes.Buffer + ) + + newBody := func(text string, fail bool) func() { + return func() { + nodesThatRan = append(nodesThatRan, text) + if fail { + failer.Fail(text, codeLocation) + } + } + } + + newIt := func(text string, flag types.FlagType, fail bool) *leafnodes.ItNode { + return leafnodes.NewItNode(text, newBody(text, fail), flag, codeLocation, 0, failer, 0) + } + + newItWithBody := func(text string, body interface{}) *leafnodes.ItNode { + return leafnodes.NewItNode(text, body, noneFlag, codeLocation, 0, failer, 0) + } + + newMeasure := func(text string, flag types.FlagType, fail bool, samples int) *leafnodes.MeasureNode { + return leafnodes.NewMeasureNode(text, func(Benchmarker) { + nodesThatRan = append(nodesThatRan, text) + if fail { + failer.Fail(text, codeLocation) + } + }, flag, codeLocation, samples, failer, 0) + } + + newBef := func(text string, fail bool) leafnodes.BasicNode { + return leafnodes.NewBeforeEachNode(newBody(text, fail), codeLocation, 0, failer, 0) + } + + newAft := func(text string, fail bool) leafnodes.BasicNode { + return leafnodes.NewAfterEachNode(newBody(text, fail), codeLocation, 0, failer, 0) + } + + newJusBef := func(text string, fail bool) leafnodes.BasicNode { + return leafnodes.NewJustBeforeEachNode(newBody(text, fail), codeLocation, 0, failer, 0) + } + + newContainer := func(text string, flag types.FlagType, setupNodes ...leafnodes.BasicNode) *containernode.ContainerNode { + c := containernode.New(text, flag, codeLocation) + for _, node := range setupNodes { + c.PushSetupNode(node) + } + return c + } + + containers := func(containers ...*containernode.ContainerNode) []*containernode.ContainerNode { + return containers + } + + BeforeEach(func() { + buffer = gbytes.NewBuffer() + failer = Failer.New() + codeLocation = codelocation.New(0) + nodesThatRan = []string{} + }) + + Describe("marking specs focused and pending", func() { + It("should satisfy various caes", func() { + cases := []struct { + ContainerFlags []types.FlagType + SubjectFlag types.FlagType + Pending bool + Focused bool + }{ + {[]types.FlagType{}, noneFlag, false, false}, + {[]types.FlagType{}, focusedFlag, false, true}, + {[]types.FlagType{}, pendingFlag, true, false}, + {[]types.FlagType{noneFlag}, noneFlag, false, false}, + {[]types.FlagType{focusedFlag}, noneFlag, false, true}, + {[]types.FlagType{pendingFlag}, noneFlag, true, false}, + {[]types.FlagType{noneFlag}, focusedFlag, false, true}, + {[]types.FlagType{focusedFlag}, focusedFlag, false, true}, + {[]types.FlagType{pendingFlag}, focusedFlag, true, true}, + {[]types.FlagType{noneFlag}, pendingFlag, true, false}, + {[]types.FlagType{focusedFlag}, pendingFlag, true, true}, + {[]types.FlagType{pendingFlag}, pendingFlag, true, false}, + {[]types.FlagType{focusedFlag, noneFlag}, noneFlag, false, true}, + {[]types.FlagType{noneFlag, focusedFlag}, noneFlag, false, true}, + {[]types.FlagType{pendingFlag, noneFlag}, noneFlag, true, false}, + {[]types.FlagType{noneFlag, pendingFlag}, noneFlag, true, false}, + {[]types.FlagType{focusedFlag, pendingFlag}, noneFlag, true, true}, + } + + for i, c := range cases { + subject := newIt("it node", c.SubjectFlag, false) + containers := []*containernode.ContainerNode{} + for _, flag := range c.ContainerFlags { + containers = append(containers, newContainer("container", flag)) + } + + spec := New(subject, containers, false) + Ω(spec.Pending()).Should(Equal(c.Pending), "Case %d: %#v", i, c) + Ω(spec.Focused()).Should(Equal(c.Focused), "Case %d: %#v", i, c) + + if c.Pending { + Ω(spec.Summary("").State).Should(Equal(types.SpecStatePending)) + } + } + }) + }) + + Describe("Skip", func() { + It("should be skipped", func() { + spec := New(newIt("it node", noneFlag, false), containers(newContainer("container", noneFlag)), false) + Ω(spec.Skipped()).Should(BeFalse()) + spec.Skip() + Ω(spec.Skipped()).Should(BeTrue()) + Ω(spec.Summary("").State).Should(Equal(types.SpecStateSkipped)) + }) + }) + + Describe("IsMeasurement", func() { + It("should be true if the subject is a measurement node", func() { + spec := New(newIt("it node", noneFlag, false), containers(newContainer("container", noneFlag)), false) + Ω(spec.IsMeasurement()).Should(BeFalse()) + Ω(spec.Summary("").IsMeasurement).Should(BeFalse()) + Ω(spec.Summary("").NumberOfSamples).Should(Equal(1)) + + spec = New(newMeasure("measure node", noneFlag, false, 10), containers(newContainer("container", noneFlag)), false) + Ω(spec.IsMeasurement()).Should(BeTrue()) + Ω(spec.Summary("").IsMeasurement).Should(BeTrue()) + Ω(spec.Summary("").NumberOfSamples).Should(Equal(10)) + }) + }) + + Describe("Passed", func() { + It("should pass when the subject passed", func() { + spec := New(newIt("it node", noneFlag, false), containers(), false) + spec.Run(buffer) + + Ω(spec.Passed()).Should(BeTrue()) + Ω(spec.Failed()).Should(BeFalse()) + Ω(spec.Summary("").State).Should(Equal(types.SpecStatePassed)) + Ω(spec.Summary("").Failure).Should(BeZero()) + }) + }) + + Describe("Failed", func() { + It("should be failed if the failure was panic", func() { + spec := New(newItWithBody("panicky it", func() { + panic("bam") + }), containers(), false) + spec.Run(buffer) + Ω(spec.Passed()).Should(BeFalse()) + Ω(spec.Failed()).Should(BeTrue()) + Ω(spec.Summary("").State).Should(Equal(types.SpecStatePanicked)) + Ω(spec.Summary("").Failure.Message).Should(Equal("Test Panicked")) + Ω(spec.Summary("").Failure.ForwardedPanic).Should(Equal("bam")) + }) + + It("should be failed if the failure was a timeout", func() { + spec := New(newItWithBody("sleepy it", func(done Done) {}), containers(), false) + spec.Run(buffer) + Ω(spec.Passed()).Should(BeFalse()) + Ω(spec.Failed()).Should(BeTrue()) + Ω(spec.Summary("").State).Should(Equal(types.SpecStateTimedOut)) + Ω(spec.Summary("").Failure.Message).Should(Equal("Timed out")) + }) + + It("should be failed if the failure was... a failure", func() { + spec := New(newItWithBody("failing it", func() { + failer.Fail("bam", codeLocation) + }), containers(), false) + spec.Run(buffer) + Ω(spec.Passed()).Should(BeFalse()) + Ω(spec.Failed()).Should(BeTrue()) + Ω(spec.Summary("").State).Should(Equal(types.SpecStateFailed)) + Ω(spec.Summary("").Failure.Message).Should(Equal("bam")) + }) + }) + + Describe("Concatenated string", func() { + It("should concatenate the texts of the containers and the subject", func() { + spec := New( + newIt("it node", noneFlag, false), + containers( + newContainer("outer container", noneFlag), + newContainer("inner container", noneFlag), + ), + false, + ) + + Ω(spec.ConcatenatedString()).Should(Equal("outer container inner container it node")) + }) + }) + + Describe("running it specs", func() { + Context("with just an it", func() { + Context("that succeeds", func() { + It("should run the it and report on its success", func() { + spec := New(newIt("it node", noneFlag, false), containers(), false) + spec.Run(buffer) + Ω(spec.Passed()).Should(BeTrue()) + Ω(spec.Failed()).Should(BeFalse()) + Ω(nodesThatRan).Should(Equal([]string{"it node"})) + }) + }) + + Context("that fails", func() { + It("should run the it and report on its success", func() { + spec := New(newIt("it node", noneFlag, true), containers(), false) + spec.Run(buffer) + Ω(spec.Passed()).Should(BeFalse()) + Ω(spec.Failed()).Should(BeTrue()) + Ω(spec.Summary("").Failure.Message).Should(Equal("it node")) + Ω(nodesThatRan).Should(Equal([]string{"it node"})) + }) + }) + }) + + Context("with a full set of setup nodes", func() { + var failingNodes map[string]bool + + BeforeEach(func() { + failingNodes = map[string]bool{} + }) + + JustBeforeEach(func() { + spec = New( + newIt("it node", noneFlag, failingNodes["it node"]), + containers( + newContainer("outer container", noneFlag, + newBef("outer bef A", failingNodes["outer bef A"]), + newBef("outer bef B", failingNodes["outer bef B"]), + newJusBef("outer jusbef A", failingNodes["outer jusbef A"]), + newJusBef("outer jusbef B", failingNodes["outer jusbef B"]), + newAft("outer aft A", failingNodes["outer aft A"]), + newAft("outer aft B", failingNodes["outer aft B"]), + ), + newContainer("inner container", noneFlag, + newBef("inner bef A", failingNodes["inner bef A"]), + newBef("inner bef B", failingNodes["inner bef B"]), + newJusBef("inner jusbef A", failingNodes["inner jusbef A"]), + newJusBef("inner jusbef B", failingNodes["inner jusbef B"]), + newAft("inner aft A", failingNodes["inner aft A"]), + newAft("inner aft B", failingNodes["inner aft B"]), + ), + ), + false, + ) + spec.Run(buffer) + }) + + Context("that all pass", func() { + It("should walk through the nodes in the correct order", func() { + Ω(spec.Passed()).Should(BeTrue()) + Ω(spec.Failed()).Should(BeFalse()) + Ω(nodesThatRan).Should(Equal([]string{ + "outer bef A", + "outer bef B", + "inner bef A", + "inner bef B", + "outer jusbef A", + "outer jusbef B", + "inner jusbef A", + "inner jusbef B", + "it node", + "inner aft A", + "inner aft B", + "outer aft A", + "outer aft B", + })) + }) + }) + + Context("when the subject fails", func() { + BeforeEach(func() { + failingNodes["it node"] = true + }) + + It("should run the afters", func() { + Ω(spec.Passed()).Should(BeFalse()) + Ω(spec.Failed()).Should(BeTrue()) + Ω(nodesThatRan).Should(Equal([]string{ + "outer bef A", + "outer bef B", + "inner bef A", + "inner bef B", + "outer jusbef A", + "outer jusbef B", + "inner jusbef A", + "inner jusbef B", + "it node", + "inner aft A", + "inner aft B", + "outer aft A", + "outer aft B", + })) + Ω(spec.Summary("").Failure.Message).Should(Equal("it node")) + }) + }) + + Context("when an inner before fails", func() { + BeforeEach(func() { + failingNodes["inner bef A"] = true + }) + + It("should not run any other befores, but it should run the subsequent afters", func() { + Ω(spec.Passed()).Should(BeFalse()) + Ω(spec.Failed()).Should(BeTrue()) + Ω(nodesThatRan).Should(Equal([]string{ + "outer bef A", + "outer bef B", + "inner bef A", + "inner aft A", + "inner aft B", + "outer aft A", + "outer aft B", + })) + Ω(spec.Summary("").Failure.Message).Should(Equal("inner bef A")) + }) + }) + + Context("when an outer before fails", func() { + BeforeEach(func() { + failingNodes["outer bef B"] = true + }) + + It("should not run any other befores, but it should run the subsequent afters", func() { + Ω(spec.Passed()).Should(BeFalse()) + Ω(spec.Failed()).Should(BeTrue()) + Ω(nodesThatRan).Should(Equal([]string{ + "outer bef A", + "outer bef B", + "outer aft A", + "outer aft B", + })) + Ω(spec.Summary("").Failure.Message).Should(Equal("outer bef B")) + }) + }) + + Context("when an after fails", func() { + BeforeEach(func() { + failingNodes["inner aft B"] = true + }) + + It("should run all other afters, but mark the test as failed", func() { + Ω(spec.Passed()).Should(BeFalse()) + Ω(spec.Failed()).Should(BeTrue()) + Ω(nodesThatRan).Should(Equal([]string{ + "outer bef A", + "outer bef B", + "inner bef A", + "inner bef B", + "outer jusbef A", + "outer jusbef B", + "inner jusbef A", + "inner jusbef B", + "it node", + "inner aft A", + "inner aft B", + "outer aft A", + "outer aft B", + })) + Ω(spec.Summary("").Failure.Message).Should(Equal("inner aft B")) + }) + }) + + Context("when a just before each fails", func() { + BeforeEach(func() { + failingNodes["outer jusbef B"] = true + }) + + It("should run the afters, but not the subject", func() { + Ω(spec.Passed()).Should(BeFalse()) + Ω(spec.Failed()).Should(BeTrue()) + Ω(nodesThatRan).Should(Equal([]string{ + "outer bef A", + "outer bef B", + "inner bef A", + "inner bef B", + "outer jusbef A", + "outer jusbef B", + "inner aft A", + "inner aft B", + "outer aft A", + "outer aft B", + })) + Ω(spec.Summary("").Failure.Message).Should(Equal("outer jusbef B")) + }) + }) + + Context("when an after fails after an earlier node has failed", func() { + BeforeEach(func() { + failingNodes["it node"] = true + failingNodes["inner aft B"] = true + }) + + It("should record the earlier failure", func() { + Ω(spec.Passed()).Should(BeFalse()) + Ω(spec.Failed()).Should(BeTrue()) + Ω(nodesThatRan).Should(Equal([]string{ + "outer bef A", + "outer bef B", + "inner bef A", + "inner bef B", + "outer jusbef A", + "outer jusbef B", + "inner jusbef A", + "inner jusbef B", + "it node", + "inner aft A", + "inner aft B", + "outer aft A", + "outer aft B", + })) + Ω(spec.Summary("").Failure.Message).Should(Equal("it node")) + }) + }) + }) + }) + + Describe("running measurement specs", func() { + Context("when the measurement succeeds", func() { + It("should run N samples", func() { + spec = New( + newMeasure("measure node", noneFlag, false, 3), + containers( + newContainer("container", noneFlag, + newBef("bef A", false), + newJusBef("jusbef A", false), + newAft("aft A", false), + ), + ), + false, + ) + spec.Run(buffer) + + Ω(spec.Passed()).Should(BeTrue()) + Ω(spec.Failed()).Should(BeFalse()) + Ω(nodesThatRan).Should(Equal([]string{ + "bef A", + "jusbef A", + "measure node", + "aft A", + "bef A", + "jusbef A", + "measure node", + "aft A", + "bef A", + "jusbef A", + "measure node", + "aft A", + })) + }) + }) + + Context("when the measurement fails", func() { + It("should bail after the failure occurs", func() { + spec = New( + newMeasure("measure node", noneFlag, true, 3), + containers( + newContainer("container", noneFlag, + newBef("bef A", false), + newJusBef("jusbef A", false), + newAft("aft A", false), + ), + ), + false, + ) + spec.Run(buffer) + + Ω(spec.Passed()).Should(BeFalse()) + Ω(spec.Failed()).Should(BeTrue()) + Ω(nodesThatRan).Should(Equal([]string{ + "bef A", + "jusbef A", + "measure node", + "aft A", + })) + }) + }) + }) + + Describe("Summary", func() { + var ( + subjectCodeLocation types.CodeLocation + outerContainerCodeLocation types.CodeLocation + innerContainerCodeLocation types.CodeLocation + summary *types.SpecSummary + ) + + BeforeEach(func() { + subjectCodeLocation = codelocation.New(0) + outerContainerCodeLocation = codelocation.New(0) + innerContainerCodeLocation = codelocation.New(0) + + spec = New( + leafnodes.NewItNode("it node", func() { + time.Sleep(10 * time.Millisecond) + }, noneFlag, subjectCodeLocation, 0, failer, 0), + containers( + containernode.New("outer container", noneFlag, outerContainerCodeLocation), + containernode.New("inner container", noneFlag, innerContainerCodeLocation), + ), + false, + ) + + spec.Run(buffer) + Ω(spec.Passed()).Should(BeTrue()) + summary = spec.Summary("suite id") + }) + + It("should have the suite id", func() { + Ω(summary.SuiteID).Should(Equal("suite id")) + }) + + It("should have the component texts and code locations", func() { + Ω(summary.ComponentTexts).Should(Equal([]string{"outer container", "inner container", "it node"})) + Ω(summary.ComponentCodeLocations).Should(Equal([]types.CodeLocation{outerContainerCodeLocation, innerContainerCodeLocation, subjectCodeLocation})) + }) + + It("should have a runtime", func() { + Ω(summary.RunTime).Should(BeNumerically(">=", 10*time.Millisecond)) + }) + + It("should not be a measurement, or have a measurement summary", func() { + Ω(summary.IsMeasurement).Should(BeFalse()) + Ω(summary.Measurements).Should(BeEmpty()) + }) + }) + + Describe("Summaries for measurements", func() { + var summary *types.SpecSummary + + BeforeEach(func() { + spec = New(leafnodes.NewMeasureNode("measure node", func(b Benchmarker) { + b.RecordValue("a value", 7, "some info") + }, noneFlag, codeLocation, 4, failer, 0), containers(), false) + spec.Run(buffer) + Ω(spec.Passed()).Should(BeTrue()) + summary = spec.Summary("suite id") + }) + + It("should include the number of samples", func() { + Ω(summary.NumberOfSamples).Should(Equal(4)) + }) + + It("should be a measurement", func() { + Ω(summary.IsMeasurement).Should(BeTrue()) + }) + + It("should have the measurements report", func() { + Ω(summary.Measurements).Should(HaveKey("a value")) + + report := summary.Measurements["a value"] + Ω(report.Name).Should(Equal("a value")) + Ω(report.Info).Should(Equal("some info")) + Ω(report.Results).Should(Equal([]float64{7, 7, 7, 7})) + }) + }) + + Describe("When told to emit progress", func() { + It("should emit progress to the writer as it runs Befores, JustBefores, Afters, and Its", func() { + spec = New( + newIt("it node", noneFlag, false), + containers( + newContainer("outer container", noneFlag, + newBef("outer bef A", false), + newJusBef("outer jusbef A", false), + newAft("outer aft A", false), + ), + newContainer("inner container", noneFlag, + newBef("inner bef A", false), + newJusBef("inner jusbef A", false), + newAft("inner aft A", false), + ), + ), + true, + ) + spec.Run(buffer) + + Ω(buffer).Should(gbytes.Say(`\[BeforeEach\] outer container`)) + Ω(buffer).Should(gbytes.Say(`\[BeforeEach\] inner container`)) + Ω(buffer).Should(gbytes.Say(`\[JustBeforeEach\] outer container`)) + Ω(buffer).Should(gbytes.Say(`\[JustBeforeEach\] inner container`)) + Ω(buffer).Should(gbytes.Say(`\[It\] it node`)) + Ω(buffer).Should(gbytes.Say(`\[AfterEach\] inner container`)) + Ω(buffer).Should(gbytes.Say(`\[AfterEach\] outer container`)) + }) + + It("should emit progress to the writer as it runs Befores, JustBefores, Afters, and Measures", func() { + spec = New( + newMeasure("measure node", noneFlag, false, 2), + containers(), + true, + ) + spec.Run(buffer) + + Ω(buffer).Should(gbytes.Say(`\[Measure\] measure node`)) + Ω(buffer).Should(gbytes.Say(`\[Measure\] measure node`)) + }) + }) +}) diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/spec/specs.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/spec/specs.go new file mode 100644 index 0000000000000..a3d545153e33f --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/spec/specs.go @@ -0,0 +1,122 @@ +package spec + +import ( + "math/rand" + "regexp" + "sort" +) + +type Specs struct { + specs []*Spec + numberOfOriginalSpecs int + hasProgrammaticFocus bool +} + +func NewSpecs(specs []*Spec) *Specs { + return &Specs{ + specs: specs, + numberOfOriginalSpecs: len(specs), + } +} + +func (e *Specs) Specs() []*Spec { + return e.specs +} + +func (e *Specs) NumberOfOriginalSpecs() int { + return e.numberOfOriginalSpecs +} + +func (e *Specs) HasProgrammaticFocus() bool { + return e.hasProgrammaticFocus +} + +func (e *Specs) Shuffle(r *rand.Rand) { + sort.Sort(e) + permutation := r.Perm(len(e.specs)) + shuffledSpecs := make([]*Spec, len(e.specs)) + for i, j := range permutation { + shuffledSpecs[i] = e.specs[j] + } + e.specs = shuffledSpecs +} + +func (e *Specs) ApplyFocus(description string, focusString string, skipString string) { + if focusString == "" && skipString == "" { + e.applyProgrammaticFocus() + } else { + e.applyRegExpFocus(description, focusString, skipString) + } +} + +func (e *Specs) applyProgrammaticFocus() { + e.hasProgrammaticFocus = false + for _, spec := range e.specs { + if spec.Focused() { + e.hasProgrammaticFocus = true + break + } + } + + if e.hasProgrammaticFocus { + for _, spec := range e.specs { + if !spec.Focused() { + spec.Skip() + } + } + } +} + +func (e *Specs) applyRegExpFocus(description string, focusString string, skipString string) { + for _, spec := range e.specs { + matchesFocus := true + matchesSkip := false + + toMatch := []byte(description + " " + spec.ConcatenatedString()) + + if focusString != "" { + focusFilter := regexp.MustCompile(focusString) + matchesFocus = focusFilter.Match([]byte(toMatch)) + } + + if skipString != "" { + skipFilter := regexp.MustCompile(skipString) + matchesSkip = skipFilter.Match([]byte(toMatch)) + } + + if !matchesFocus || matchesSkip { + spec.Skip() + } + } +} + +func (e *Specs) SkipMeasurements() { + for _, spec := range e.specs { + if spec.IsMeasurement() { + spec.Skip() + } + } +} + +func (e *Specs) TrimForParallelization(total int, node int) { + startIndex, count := ParallelizedIndexRange(len(e.specs), total, node) + if count == 0 { + e.specs = make([]*Spec, 0) + } else { + e.specs = e.specs[startIndex : startIndex+count] + } +} + +//sort.Interface + +func (e *Specs) Len() int { + return len(e.specs) +} + +func (e *Specs) Less(i, j int) bool { + return e.specs[i].ConcatenatedString() < e.specs[j].ConcatenatedString() +} + +func (e *Specs) Swap(i, j int) { + e.specs[i], e.specs[j] = e.specs[j], e.specs[i] +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/spec/specs_test.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/spec/specs_test.go new file mode 100644 index 0000000000000..b58a3074d58d0 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/spec/specs_test.go @@ -0,0 +1,305 @@ +package spec_test + +import ( + "math/rand" + + . "github.com/onsi/ginkgo" + . "github.com/onsi/ginkgo/internal/spec" + . "github.com/onsi/gomega" + + "github.com/onsi/ginkgo/internal/codelocation" + "github.com/onsi/ginkgo/internal/containernode" + "github.com/onsi/ginkgo/internal/leafnodes" + "github.com/onsi/ginkgo/types" +) + +var _ = Describe("Specs", func() { + var specs *Specs + + newSpec := func(text string, flag types.FlagType) *Spec { + subject := leafnodes.NewItNode(text, func() {}, flag, codelocation.New(0), 0, nil, 0) + return New(subject, []*containernode.ContainerNode{}, false) + } + + newMeasureSpec := func(text string, flag types.FlagType) *Spec { + subject := leafnodes.NewMeasureNode(text, func(Benchmarker) {}, flag, codelocation.New(0), 0, nil, 0) + return New(subject, []*containernode.ContainerNode{}, false) + } + + newSpecs := func(args ...interface{}) *Specs { + specs := []*Spec{} + for index := 0; index < len(args)-1; index += 2 { + specs = append(specs, newSpec(args[index].(string), args[index+1].(types.FlagType))) + } + return NewSpecs(specs) + } + + specTexts := func(specs *Specs) []string { + texts := []string{} + for _, spec := range specs.Specs() { + texts = append(texts, spec.ConcatenatedString()) + } + return texts + } + + willRunTexts := func(specs *Specs) []string { + texts := []string{} + for _, spec := range specs.Specs() { + if !(spec.Skipped() || spec.Pending()) { + texts = append(texts, spec.ConcatenatedString()) + } + } + return texts + } + + skippedTexts := func(specs *Specs) []string { + texts := []string{} + for _, spec := range specs.Specs() { + if spec.Skipped() { + texts = append(texts, spec.ConcatenatedString()) + } + } + return texts + } + + pendingTexts := func(specs *Specs) []string { + texts := []string{} + for _, spec := range specs.Specs() { + if spec.Pending() { + texts = append(texts, spec.ConcatenatedString()) + } + } + return texts + } + + Describe("Shuffling specs", func() { + It("should shuffle the specs using the passed in randomizer", func() { + specs17 := newSpecs("C", noneFlag, "A", noneFlag, "B", noneFlag) + specs17.Shuffle(rand.New(rand.NewSource(17))) + texts17 := specTexts(specs17) + + specs17Again := newSpecs("C", noneFlag, "A", noneFlag, "B", noneFlag) + specs17Again.Shuffle(rand.New(rand.NewSource(17))) + texts17Again := specTexts(specs17Again) + + specs15 := newSpecs("C", noneFlag, "A", noneFlag, "B", noneFlag) + specs15.Shuffle(rand.New(rand.NewSource(15))) + texts15 := specTexts(specs15) + + specsUnshuffled := newSpecs("C", noneFlag, "A", noneFlag, "B", noneFlag) + textsUnshuffled := specTexts(specsUnshuffled) + + Ω(textsUnshuffled).Should(Equal([]string{"C", "A", "B"})) + + Ω(texts17).Should(Equal(texts17Again)) + Ω(texts17).ShouldNot(Equal(texts15)) + Ω(texts17).ShouldNot(Equal(textsUnshuffled)) + Ω(texts15).ShouldNot(Equal(textsUnshuffled)) + + Ω(texts17).Should(HaveLen(3)) + Ω(texts17).Should(ContainElement("A")) + Ω(texts17).Should(ContainElement("B")) + Ω(texts17).Should(ContainElement("C")) + + Ω(texts15).Should(HaveLen(3)) + Ω(texts15).Should(ContainElement("A")) + Ω(texts15).Should(ContainElement("B")) + Ω(texts15).Should(ContainElement("C")) + }) + }) + + Describe("with no programmatic focus", func() { + BeforeEach(func() { + specs = newSpecs("A1", noneFlag, "A2", noneFlag, "B1", noneFlag, "B2", pendingFlag) + specs.ApplyFocus("", "", "") + }) + + It("should not report as having programmatic specs", func() { + Ω(specs.HasProgrammaticFocus()).Should(BeFalse()) + }) + }) + + Describe("Applying focus/skip", func() { + var description, focusString, skipString string + + BeforeEach(func() { + description, focusString, skipString = "", "", "" + }) + + JustBeforeEach(func() { + specs = newSpecs("A1", focusedFlag, "A2", noneFlag, "B1", focusedFlag, "B2", pendingFlag) + specs.ApplyFocus(description, focusString, skipString) + }) + + Context("with neither a focus string nor a skip string", func() { + It("should apply the programmatic focus", func() { + Ω(willRunTexts(specs)).Should(Equal([]string{"A1", "B1"})) + Ω(skippedTexts(specs)).Should(Equal([]string{"A2", "B2"})) + Ω(pendingTexts(specs)).Should(BeEmpty()) + }) + + It("should report as having programmatic specs", func() { + Ω(specs.HasProgrammaticFocus()).Should(BeTrue()) + }) + }) + + Context("with a focus regexp", func() { + BeforeEach(func() { + focusString = "A" + }) + + It("should override the programmatic focus", func() { + Ω(willRunTexts(specs)).Should(Equal([]string{"A1", "A2"})) + Ω(skippedTexts(specs)).Should(Equal([]string{"B1", "B2"})) + Ω(pendingTexts(specs)).Should(BeEmpty()) + }) + + It("should not report as having programmatic specs", func() { + Ω(specs.HasProgrammaticFocus()).Should(BeFalse()) + }) + }) + + Context("with a focus regexp", func() { + BeforeEach(func() { + focusString = "B" + }) + + It("should not override any pendings", func() { + Ω(willRunTexts(specs)).Should(Equal([]string{"B1"})) + Ω(skippedTexts(specs)).Should(Equal([]string{"A1", "A2"})) + Ω(pendingTexts(specs)).Should(Equal([]string{"B2"})) + }) + }) + + Context("with a description", func() { + BeforeEach(func() { + description = "C" + focusString = "C" + }) + + It("should include the description in the focus determination", func() { + Ω(willRunTexts(specs)).Should(Equal([]string{"A1", "A2", "B1"})) + Ω(skippedTexts(specs)).Should(BeEmpty()) + Ω(pendingTexts(specs)).Should(Equal([]string{"B2"})) + }) + }) + + Context("with a description", func() { + BeforeEach(func() { + description = "C" + skipString = "C" + }) + + It("should include the description in the focus determination", func() { + Ω(willRunTexts(specs)).Should(BeEmpty()) + Ω(skippedTexts(specs)).Should(Equal([]string{"A1", "A2", "B1", "B2"})) + Ω(pendingTexts(specs)).Should(BeEmpty()) + }) + }) + + Context("with a skip regexp", func() { + BeforeEach(func() { + skipString = "A" + }) + + It("should override the programmatic focus", func() { + Ω(willRunTexts(specs)).Should(Equal([]string{"B1"})) + Ω(skippedTexts(specs)).Should(Equal([]string{"A1", "A2"})) + Ω(pendingTexts(specs)).Should(Equal([]string{"B2"})) + }) + + It("should not report as having programmatic specs", func() { + Ω(specs.HasProgrammaticFocus()).Should(BeFalse()) + }) + }) + + Context("with both a focus and a skip regexp", func() { + BeforeEach(func() { + focusString = "1" + skipString = "B" + }) + + It("should AND the two", func() { + Ω(willRunTexts(specs)).Should(Equal([]string{"A1"})) + Ω(skippedTexts(specs)).Should(Equal([]string{"A2", "B1", "B2"})) + Ω(pendingTexts(specs)).Should(BeEmpty()) + }) + + It("should not report as having programmatic specs", func() { + Ω(specs.HasProgrammaticFocus()).Should(BeFalse()) + }) + }) + }) + + Describe("skipping measurements", func() { + BeforeEach(func() { + specs = NewSpecs([]*Spec{ + newSpec("A", noneFlag), + newSpec("B", noneFlag), + newSpec("C", pendingFlag), + newMeasureSpec("measurementA", noneFlag), + newMeasureSpec("measurementB", pendingFlag), + }) + }) + + It("should skip measurements", func() { + Ω(willRunTexts(specs)).Should(Equal([]string{"A", "B", "measurementA"})) + Ω(skippedTexts(specs)).Should(BeEmpty()) + Ω(pendingTexts(specs)).Should(Equal([]string{"C", "measurementB"})) + + specs.SkipMeasurements() + + Ω(willRunTexts(specs)).Should(Equal([]string{"A", "B"})) + Ω(skippedTexts(specs)).Should(Equal([]string{"measurementA", "measurementB"})) + Ω(pendingTexts(specs)).Should(Equal([]string{"C"})) + }) + }) + + Describe("when running tests in parallel", func() { + It("should select out a subset of the tests", func() { + specsNode1 := newSpecs("A", noneFlag, "B", noneFlag, "C", noneFlag, "D", noneFlag, "E", noneFlag) + specsNode2 := newSpecs("A", noneFlag, "B", noneFlag, "C", noneFlag, "D", noneFlag, "E", noneFlag) + specsNode3 := newSpecs("A", noneFlag, "B", noneFlag, "C", noneFlag, "D", noneFlag, "E", noneFlag) + + specsNode1.TrimForParallelization(3, 1) + specsNode2.TrimForParallelization(3, 2) + specsNode3.TrimForParallelization(3, 3) + + Ω(willRunTexts(specsNode1)).Should(Equal([]string{"A", "B"})) + Ω(willRunTexts(specsNode2)).Should(Equal([]string{"C", "D"})) + Ω(willRunTexts(specsNode3)).Should(Equal([]string{"E"})) + + Ω(specsNode1.Specs()).Should(HaveLen(2)) + Ω(specsNode2.Specs()).Should(HaveLen(2)) + Ω(specsNode3.Specs()).Should(HaveLen(1)) + + Ω(specsNode1.NumberOfOriginalSpecs()).Should(Equal(5)) + Ω(specsNode2.NumberOfOriginalSpecs()).Should(Equal(5)) + Ω(specsNode3.NumberOfOriginalSpecs()).Should(Equal(5)) + }) + + Context("when way too many nodes are used", func() { + It("should return 0 specs", func() { + specsNode1 := newSpecs("A", noneFlag, "B", noneFlag) + specsNode2 := newSpecs("A", noneFlag, "B", noneFlag) + specsNode3 := newSpecs("A", noneFlag, "B", noneFlag) + + specsNode1.TrimForParallelization(3, 1) + specsNode2.TrimForParallelization(3, 2) + specsNode3.TrimForParallelization(3, 3) + + Ω(willRunTexts(specsNode1)).Should(Equal([]string{"A"})) + Ω(willRunTexts(specsNode2)).Should(Equal([]string{"B"})) + Ω(willRunTexts(specsNode3)).Should(BeEmpty()) + + Ω(specsNode1.Specs()).Should(HaveLen(1)) + Ω(specsNode2.Specs()).Should(HaveLen(1)) + Ω(specsNode3.Specs()).Should(HaveLen(0)) + + Ω(specsNode1.NumberOfOriginalSpecs()).Should(Equal(2)) + Ω(specsNode2.NumberOfOriginalSpecs()).Should(Equal(2)) + Ω(specsNode3.NumberOfOriginalSpecs()).Should(Equal(2)) + }) + }) + }) +}) diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/specrunner/random_id.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/specrunner/random_id.go new file mode 100644 index 0000000000000..a0b8b62d52563 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/specrunner/random_id.go @@ -0,0 +1,15 @@ +package specrunner + +import ( + "crypto/rand" + "fmt" +) + +func randomID() string { + b := make([]byte, 8) + _, err := rand.Read(b) + if err != nil { + return "" + } + return fmt.Sprintf("%x-%x-%x-%x", b[0:2], b[2:4], b[4:6], b[6:8]) +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/specrunner/spec_runner.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/specrunner/spec_runner.go new file mode 100644 index 0000000000000..123fdae28d263 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/specrunner/spec_runner.go @@ -0,0 +1,324 @@ +package specrunner + +import ( + "fmt" + "os" + "os/signal" + "sync" + "syscall" + + "github.com/onsi/ginkgo/config" + "github.com/onsi/ginkgo/internal/leafnodes" + "github.com/onsi/ginkgo/internal/spec" + Writer "github.com/onsi/ginkgo/internal/writer" + "github.com/onsi/ginkgo/reporters" + "github.com/onsi/ginkgo/types" + + "time" +) + +type SpecRunner struct { + description string + beforeSuiteNode leafnodes.SuiteNode + specs *spec.Specs + afterSuiteNode leafnodes.SuiteNode + reporters []reporters.Reporter + startTime time.Time + suiteID string + runningSpec *spec.Spec + writer Writer.WriterInterface + config config.GinkgoConfigType + interrupted bool + lock *sync.Mutex +} + +func New(description string, beforeSuiteNode leafnodes.SuiteNode, specs *spec.Specs, afterSuiteNode leafnodes.SuiteNode, reporters []reporters.Reporter, writer Writer.WriterInterface, config config.GinkgoConfigType) *SpecRunner { + return &SpecRunner{ + description: description, + beforeSuiteNode: beforeSuiteNode, + specs: specs, + afterSuiteNode: afterSuiteNode, + reporters: reporters, + writer: writer, + config: config, + suiteID: randomID(), + lock: &sync.Mutex{}, + } +} + +func (runner *SpecRunner) Run() bool { + if runner.config.DryRun { + runner.performDryRun() + return true + } + + runner.reportSuiteWillBegin() + go runner.registerForInterrupts() + + suitePassed := runner.runBeforeSuite() + + if suitePassed { + suitePassed = runner.runSpecs() + } + + runner.blockForeverIfInterrupted() + + suitePassed = runner.runAfterSuite() && suitePassed + + runner.reportSuiteDidEnd(suitePassed) + + return suitePassed +} + +func (runner *SpecRunner) performDryRun() { + runner.reportSuiteWillBegin() + + if runner.beforeSuiteNode != nil { + summary := runner.beforeSuiteNode.Summary() + summary.State = types.SpecStatePassed + runner.reportBeforeSuite(summary) + } + + for _, spec := range runner.specs.Specs() { + summary := spec.Summary(runner.suiteID) + runner.reportSpecWillRun(summary) + if summary.State == types.SpecStateInvalid { + summary.State = types.SpecStatePassed + } + runner.reportSpecDidComplete(summary, false) + } + + if runner.afterSuiteNode != nil { + summary := runner.afterSuiteNode.Summary() + summary.State = types.SpecStatePassed + runner.reportAfterSuite(summary) + } + + runner.reportSuiteDidEnd(true) +} + +func (runner *SpecRunner) runBeforeSuite() bool { + if runner.beforeSuiteNode == nil || runner.wasInterrupted() { + return true + } + + runner.writer.Truncate() + conf := runner.config + passed := runner.beforeSuiteNode.Run(conf.ParallelNode, conf.ParallelTotal, conf.SyncHost) + if !passed { + runner.writer.DumpOut() + } + runner.reportBeforeSuite(runner.beforeSuiteNode.Summary()) + return passed +} + +func (runner *SpecRunner) runAfterSuite() bool { + if runner.afterSuiteNode == nil { + return true + } + + runner.writer.Truncate() + conf := runner.config + passed := runner.afterSuiteNode.Run(conf.ParallelNode, conf.ParallelTotal, conf.SyncHost) + if !passed { + runner.writer.DumpOut() + } + runner.reportAfterSuite(runner.afterSuiteNode.Summary()) + return passed +} + +func (runner *SpecRunner) runSpecs() bool { + suiteFailed := false + skipRemainingSpecs := false + for _, spec := range runner.specs.Specs() { + if runner.wasInterrupted() { + return suiteFailed + } + if skipRemainingSpecs { + spec.Skip() + } + runner.reportSpecWillRun(spec.Summary(runner.suiteID)) + + if !spec.Skipped() && !spec.Pending() { + runner.runningSpec = spec + spec.Run(runner.writer) + runner.runningSpec = nil + if spec.Failed() { + suiteFailed = true + } + } else if spec.Pending() && runner.config.FailOnPending { + suiteFailed = true + } + + runner.reportSpecDidComplete(spec.Summary(runner.suiteID), spec.Failed()) + + if spec.Failed() && runner.config.FailFast { + skipRemainingSpecs = true + } + } + + return !suiteFailed +} + +func (runner *SpecRunner) CurrentSpecSummary() (*types.SpecSummary, bool) { + if runner.runningSpec == nil { + return nil, false + } + + return runner.runningSpec.Summary(runner.suiteID), true +} + +func (runner *SpecRunner) registerForInterrupts() { + c := make(chan os.Signal, 1) + signal.Notify(c, os.Interrupt) + + <-c + signal.Stop(c) + runner.markInterrupted() + go runner.registerForHardInterrupts() + runner.writer.DumpOutWithHeader(` +Received interrupt. Emitting contents of GinkgoWriter... +--------------------------------------------------------- +`) + if runner.afterSuiteNode != nil { + fmt.Fprint(os.Stderr, ` +--------------------------------------------------------- +Received interrupt. Running AfterSuite... +^C again to terminate immediately +`) + runner.runAfterSuite() + } + runner.reportSuiteDidEnd(false) + os.Exit(1) +} + +func (runner *SpecRunner) registerForHardInterrupts() { + c := make(chan os.Signal, 1) + signal.Notify(c, os.Interrupt, syscall.SIGTERM) + + <-c + fmt.Fprintln(os.Stderr, "\nReceived second interrupt. Shutting down.") + os.Exit(1) +} + +func (runner *SpecRunner) blockForeverIfInterrupted() { + runner.lock.Lock() + interrupted := runner.interrupted + runner.lock.Unlock() + + if interrupted { + select {} + } +} + +func (runner *SpecRunner) markInterrupted() { + runner.lock.Lock() + defer runner.lock.Unlock() + runner.interrupted = true +} + +func (runner *SpecRunner) wasInterrupted() bool { + runner.lock.Lock() + defer runner.lock.Unlock() + return runner.interrupted +} + +func (runner *SpecRunner) reportSuiteWillBegin() { + runner.startTime = time.Now() + summary := runner.summary(true) + for _, reporter := range runner.reporters { + reporter.SpecSuiteWillBegin(runner.config, summary) + } +} + +func (runner *SpecRunner) reportBeforeSuite(summary *types.SetupSummary) { + for _, reporter := range runner.reporters { + reporter.BeforeSuiteDidRun(summary) + } +} + +func (runner *SpecRunner) reportAfterSuite(summary *types.SetupSummary) { + for _, reporter := range runner.reporters { + reporter.AfterSuiteDidRun(summary) + } +} + +func (runner *SpecRunner) reportSpecWillRun(summary *types.SpecSummary) { + runner.writer.Truncate() + + for _, reporter := range runner.reporters { + reporter.SpecWillRun(summary) + } +} + +func (runner *SpecRunner) reportSpecDidComplete(summary *types.SpecSummary, failed bool) { + for i := len(runner.reporters) - 1; i >= 1; i-- { + runner.reporters[i].SpecDidComplete(summary) + } + + if failed { + runner.writer.DumpOut() + } + + runner.reporters[0].SpecDidComplete(summary) +} + +func (runner *SpecRunner) reportSuiteDidEnd(success bool) { + summary := runner.summary(success) + summary.RunTime = time.Since(runner.startTime) + for _, reporter := range runner.reporters { + reporter.SpecSuiteDidEnd(summary) + } +} + +func (runner *SpecRunner) countSpecsSatisfying(filter func(ex *spec.Spec) bool) (count int) { + count = 0 + + for _, spec := range runner.specs.Specs() { + if filter(spec) { + count++ + } + } + + return count +} + +func (runner *SpecRunner) summary(success bool) *types.SuiteSummary { + numberOfSpecsThatWillBeRun := runner.countSpecsSatisfying(func(ex *spec.Spec) bool { + return !ex.Skipped() && !ex.Pending() + }) + + numberOfPendingSpecs := runner.countSpecsSatisfying(func(ex *spec.Spec) bool { + return ex.Pending() + }) + + numberOfSkippedSpecs := runner.countSpecsSatisfying(func(ex *spec.Spec) bool { + return ex.Skipped() + }) + + numberOfPassedSpecs := runner.countSpecsSatisfying(func(ex *spec.Spec) bool { + return ex.Passed() + }) + + numberOfFailedSpecs := runner.countSpecsSatisfying(func(ex *spec.Spec) bool { + return ex.Failed() + }) + + if runner.beforeSuiteNode != nil && !runner.beforeSuiteNode.Passed() && !runner.config.DryRun { + numberOfFailedSpecs = numberOfSpecsThatWillBeRun + } + + return &types.SuiteSummary{ + SuiteDescription: runner.description, + SuiteSucceeded: success, + SuiteID: runner.suiteID, + + NumberOfSpecsBeforeParallelization: runner.specs.NumberOfOriginalSpecs(), + NumberOfTotalSpecs: len(runner.specs.Specs()), + NumberOfSpecsThatWillBeRun: numberOfSpecsThatWillBeRun, + NumberOfPendingSpecs: numberOfPendingSpecs, + NumberOfSkippedSpecs: numberOfSkippedSpecs, + NumberOfPassedSpecs: numberOfPassedSpecs, + NumberOfFailedSpecs: numberOfFailedSpecs, + } +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/specrunner/spec_runner_suite_test.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/specrunner/spec_runner_suite_test.go new file mode 100644 index 0000000000000..c8388fb6f7d4e --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/specrunner/spec_runner_suite_test.go @@ -0,0 +1,13 @@ +package specrunner_test + +import ( + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" + + "testing" +) + +func TestSpecRunner(t *testing.T) { + RegisterFailHandler(Fail) + RunSpecs(t, "Spec Runner Suite") +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/specrunner/spec_runner_test.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/specrunner/spec_runner_test.go new file mode 100644 index 0000000000000..99686d3e61f3a --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/specrunner/spec_runner_test.go @@ -0,0 +1,623 @@ +package specrunner_test + +import ( + . "github.com/onsi/ginkgo" + . "github.com/onsi/ginkgo/internal/specrunner" + "github.com/onsi/ginkgo/types" + . "github.com/onsi/gomega" + + "github.com/onsi/ginkgo/config" + "github.com/onsi/ginkgo/internal/codelocation" + "github.com/onsi/ginkgo/internal/containernode" + Failer "github.com/onsi/ginkgo/internal/failer" + "github.com/onsi/ginkgo/internal/leafnodes" + "github.com/onsi/ginkgo/internal/spec" + Writer "github.com/onsi/ginkgo/internal/writer" + "github.com/onsi/ginkgo/reporters" +) + +var noneFlag = types.FlagTypeNone +var focusedFlag = types.FlagTypeFocused +var pendingFlag = types.FlagTypePending + +var _ = Describe("Spec Runner", func() { + var ( + reporter1 *reporters.FakeReporter + reporter2 *reporters.FakeReporter + failer *Failer.Failer + writer *Writer.FakeGinkgoWriter + + thingsThatRan []string + + runner *SpecRunner + ) + + newBefSuite := func(text string, fail bool) leafnodes.SuiteNode { + return leafnodes.NewBeforeSuiteNode(func() { + writer.AddEvent(text) + thingsThatRan = append(thingsThatRan, text) + if fail { + failer.Fail(text, codelocation.New(0)) + } + }, codelocation.New(0), 0, failer) + } + + newAftSuite := func(text string, fail bool) leafnodes.SuiteNode { + return leafnodes.NewAfterSuiteNode(func() { + writer.AddEvent(text) + thingsThatRan = append(thingsThatRan, text) + if fail { + failer.Fail(text, codelocation.New(0)) + } + }, codelocation.New(0), 0, failer) + } + + newSpec := func(text string, flag types.FlagType, fail bool) *spec.Spec { + subject := leafnodes.NewItNode(text, func() { + writer.AddEvent(text) + thingsThatRan = append(thingsThatRan, text) + if fail { + failer.Fail(text, codelocation.New(0)) + } + }, flag, codelocation.New(0), 0, failer, 0) + + return spec.New(subject, []*containernode.ContainerNode{}, false) + } + + newSpecWithBody := func(text string, body interface{}) *spec.Spec { + subject := leafnodes.NewItNode(text, body, noneFlag, codelocation.New(0), 0, failer, 0) + + return spec.New(subject, []*containernode.ContainerNode{}, false) + } + + newRunner := func(config config.GinkgoConfigType, beforeSuiteNode leafnodes.SuiteNode, afterSuiteNode leafnodes.SuiteNode, specs ...*spec.Spec) *SpecRunner { + return New("description", beforeSuiteNode, spec.NewSpecs(specs), afterSuiteNode, []reporters.Reporter{reporter1, reporter2}, writer, config) + } + + BeforeEach(func() { + reporter1 = reporters.NewFakeReporter() + reporter2 = reporters.NewFakeReporter() + writer = Writer.NewFake() + failer = Failer.New() + + thingsThatRan = []string{} + }) + + Describe("Running and Reporting", func() { + var specA, pendingSpec, anotherPendingSpec, failedSpec, specB, skippedSpec *spec.Spec + var willRunCalls, didCompleteCalls []string + var conf config.GinkgoConfigType + + JustBeforeEach(func() { + willRunCalls = []string{} + didCompleteCalls = []string{} + specA = newSpec("spec A", noneFlag, false) + pendingSpec = newSpec("pending spec", pendingFlag, false) + anotherPendingSpec = newSpec("another pending spec", pendingFlag, false) + failedSpec = newSpec("failed spec", noneFlag, true) + specB = newSpec("spec B", noneFlag, false) + skippedSpec = newSpec("skipped spec", noneFlag, false) + skippedSpec.Skip() + + reporter1.SpecWillRunStub = func(specSummary *types.SpecSummary) { + willRunCalls = append(willRunCalls, "Reporter1") + } + reporter2.SpecWillRunStub = func(specSummary *types.SpecSummary) { + willRunCalls = append(willRunCalls, "Reporter2") + } + + reporter1.SpecDidCompleteStub = func(specSummary *types.SpecSummary) { + didCompleteCalls = append(didCompleteCalls, "Reporter1") + } + reporter2.SpecDidCompleteStub = func(specSummary *types.SpecSummary) { + didCompleteCalls = append(didCompleteCalls, "Reporter2") + } + + runner = newRunner(conf, newBefSuite("BefSuite", false), newAftSuite("AftSuite", false), specA, pendingSpec, anotherPendingSpec, failedSpec, specB, skippedSpec) + runner.Run() + }) + + BeforeEach(func() { + conf = config.GinkgoConfigType{RandomSeed: 17} + }) + + It("should skip skipped/pending tests", func() { + Ω(thingsThatRan).Should(Equal([]string{"BefSuite", "spec A", "failed spec", "spec B", "AftSuite"})) + }) + + It("should report to any attached reporters", func() { + Ω(reporter1.Config).Should(Equal(reporter2.Config)) + Ω(reporter1.BeforeSuiteSummary).Should(Equal(reporter2.BeforeSuiteSummary)) + Ω(reporter1.BeginSummary).Should(Equal(reporter2.BeginSummary)) + Ω(reporter1.SpecWillRunSummaries).Should(Equal(reporter2.SpecWillRunSummaries)) + Ω(reporter1.SpecSummaries).Should(Equal(reporter2.SpecSummaries)) + Ω(reporter1.AfterSuiteSummary).Should(Equal(reporter2.AfterSuiteSummary)) + Ω(reporter1.EndSummary).Should(Equal(reporter2.EndSummary)) + }) + + It("should report that a spec did end in reverse order", func() { + Ω(willRunCalls[0:4]).Should(Equal([]string{"Reporter1", "Reporter2", "Reporter1", "Reporter2"})) + Ω(didCompleteCalls[0:4]).Should(Equal([]string{"Reporter2", "Reporter1", "Reporter2", "Reporter1"})) + }) + + It("should report the passed in config", func() { + Ω(reporter1.Config.RandomSeed).Should(BeNumerically("==", 17)) + }) + + It("should report the beginning of the suite", func() { + Ω(reporter1.BeginSummary.SuiteDescription).Should(Equal("description")) + Ω(reporter1.BeginSummary.SuiteID).Should(MatchRegexp("[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}")) + Ω(reporter1.BeginSummary.NumberOfSpecsBeforeParallelization).Should(Equal(6)) + Ω(reporter1.BeginSummary.NumberOfTotalSpecs).Should(Equal(6)) + Ω(reporter1.BeginSummary.NumberOfSpecsThatWillBeRun).Should(Equal(3)) + Ω(reporter1.BeginSummary.NumberOfPendingSpecs).Should(Equal(2)) + Ω(reporter1.BeginSummary.NumberOfSkippedSpecs).Should(Equal(1)) + }) + + It("should report the end of the suite", func() { + Ω(reporter1.EndSummary.SuiteDescription).Should(Equal("description")) + Ω(reporter1.EndSummary.SuiteSucceeded).Should(BeFalse()) + Ω(reporter1.EndSummary.SuiteID).Should(MatchRegexp("[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}")) + Ω(reporter1.EndSummary.NumberOfSpecsBeforeParallelization).Should(Equal(6)) + Ω(reporter1.EndSummary.NumberOfTotalSpecs).Should(Equal(6)) + Ω(reporter1.EndSummary.NumberOfSpecsThatWillBeRun).Should(Equal(3)) + Ω(reporter1.EndSummary.NumberOfPendingSpecs).Should(Equal(2)) + Ω(reporter1.EndSummary.NumberOfSkippedSpecs).Should(Equal(1)) + Ω(reporter1.EndSummary.NumberOfPassedSpecs).Should(Equal(2)) + Ω(reporter1.EndSummary.NumberOfFailedSpecs).Should(Equal(1)) + }) + + Context("when told to perform a dry run", func() { + BeforeEach(func() { + conf.DryRun = true + }) + + It("should report to the reporters", func() { + Ω(reporter1.Config).Should(Equal(reporter2.Config)) + Ω(reporter1.BeforeSuiteSummary).Should(Equal(reporter2.BeforeSuiteSummary)) + Ω(reporter1.BeginSummary).Should(Equal(reporter2.BeginSummary)) + Ω(reporter1.SpecWillRunSummaries).Should(Equal(reporter2.SpecWillRunSummaries)) + Ω(reporter1.SpecSummaries).Should(Equal(reporter2.SpecSummaries)) + Ω(reporter1.AfterSuiteSummary).Should(Equal(reporter2.AfterSuiteSummary)) + Ω(reporter1.EndSummary).Should(Equal(reporter2.EndSummary)) + }) + + It("should not actually run anything", func() { + Ω(thingsThatRan).Should(BeEmpty()) + }) + + It("report before and after suites as passed", func() { + Ω(reporter1.BeforeSuiteSummary.State).Should(Equal(types.SpecStatePassed)) + Ω(reporter1.AfterSuiteSummary.State).Should(Equal(types.SpecStatePassed)) + }) + + It("should report specs as passed", func() { + summaries := reporter1.SpecSummaries + Ω(summaries).Should(HaveLen(6)) + Ω(summaries[0].ComponentTexts).Should(ContainElement("spec A")) + Ω(summaries[0].State).Should(Equal(types.SpecStatePassed)) + Ω(summaries[1].ComponentTexts).Should(ContainElement("pending spec")) + Ω(summaries[1].State).Should(Equal(types.SpecStatePending)) + Ω(summaries[2].ComponentTexts).Should(ContainElement("another pending spec")) + Ω(summaries[2].State).Should(Equal(types.SpecStatePending)) + Ω(summaries[3].ComponentTexts).Should(ContainElement("failed spec")) + Ω(summaries[3].State).Should(Equal(types.SpecStatePassed)) + Ω(summaries[4].ComponentTexts).Should(ContainElement("spec B")) + Ω(summaries[4].State).Should(Equal(types.SpecStatePassed)) + Ω(summaries[5].ComponentTexts).Should(ContainElement("skipped spec")) + Ω(summaries[5].State).Should(Equal(types.SpecStateSkipped)) + }) + + It("should report the end of the suite", func() { + Ω(reporter1.EndSummary.SuiteDescription).Should(Equal("description")) + Ω(reporter1.EndSummary.SuiteSucceeded).Should(BeTrue()) + Ω(reporter1.EndSummary.SuiteID).Should(MatchRegexp("[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}")) + Ω(reporter1.EndSummary.NumberOfSpecsBeforeParallelization).Should(Equal(6)) + Ω(reporter1.EndSummary.NumberOfTotalSpecs).Should(Equal(6)) + Ω(reporter1.EndSummary.NumberOfSpecsThatWillBeRun).Should(Equal(3)) + Ω(reporter1.EndSummary.NumberOfPendingSpecs).Should(Equal(2)) + Ω(reporter1.EndSummary.NumberOfSkippedSpecs).Should(Equal(1)) + Ω(reporter1.EndSummary.NumberOfPassedSpecs).Should(Equal(0)) + Ω(reporter1.EndSummary.NumberOfFailedSpecs).Should(Equal(0)) + }) + }) + }) + + Describe("reporting on specs", func() { + var proceed chan bool + var ready chan bool + var finished chan bool + BeforeEach(func() { + ready = make(chan bool) + proceed = make(chan bool) + finished = make(chan bool) + skippedSpec := newSpec("SKIP", noneFlag, false) + skippedSpec.Skip() + + runner = newRunner( + config.GinkgoConfigType{}, + newBefSuite("BefSuite", false), + newAftSuite("AftSuite", false), + skippedSpec, + newSpec("PENDING", pendingFlag, false), + newSpecWithBody("RUN", func() { + close(ready) + <-proceed + }), + ) + go func() { + runner.Run() + close(finished) + }() + }) + + It("should report about pending/skipped specs", func() { + <-ready + Ω(reporter1.SpecWillRunSummaries).Should(HaveLen(3)) + + Ω(reporter1.SpecWillRunSummaries[0].ComponentTexts[0]).Should(Equal("SKIP")) + Ω(reporter1.SpecWillRunSummaries[1].ComponentTexts[0]).Should(Equal("PENDING")) + Ω(reporter1.SpecWillRunSummaries[2].ComponentTexts[0]).Should(Equal("RUN")) + + Ω(reporter1.SpecSummaries[0].ComponentTexts[0]).Should(Equal("SKIP")) + Ω(reporter1.SpecSummaries[1].ComponentTexts[0]).Should(Equal("PENDING")) + Ω(reporter1.SpecSummaries).Should(HaveLen(2)) + + close(proceed) + <-finished + + Ω(reporter1.SpecSummaries).Should(HaveLen(3)) + Ω(reporter1.SpecSummaries[2].ComponentTexts[0]).Should(Equal("RUN")) + }) + }) + + Describe("Running BeforeSuite & AfterSuite", func() { + var success bool + var befSuite leafnodes.SuiteNode + var aftSuite leafnodes.SuiteNode + Context("with a nil BeforeSuite & AfterSuite", func() { + BeforeEach(func() { + runner = newRunner( + config.GinkgoConfigType{}, + nil, + nil, + newSpec("A", noneFlag, false), + newSpec("B", noneFlag, false), + ) + success = runner.Run() + }) + + It("should not report about the BeforeSuite", func() { + Ω(reporter1.BeforeSuiteSummary).Should(BeNil()) + }) + + It("should not report about the AfterSuite", func() { + Ω(reporter1.AfterSuiteSummary).Should(BeNil()) + }) + + It("should run the specs", func() { + Ω(thingsThatRan).Should(Equal([]string{"A", "B"})) + }) + }) + + Context("when the BeforeSuite & AfterSuite pass", func() { + BeforeEach(func() { + befSuite = newBefSuite("BefSuite", false) + aftSuite = newBefSuite("AftSuite", false) + runner = newRunner( + config.GinkgoConfigType{}, + befSuite, + aftSuite, + newSpec("A", noneFlag, false), + newSpec("B", noneFlag, false), + ) + success = runner.Run() + }) + + It("should run the BeforeSuite, the AfterSuite and the specs", func() { + Ω(thingsThatRan).Should(Equal([]string{"BefSuite", "A", "B", "AftSuite"})) + }) + + It("should report about the BeforeSuite", func() { + Ω(reporter1.BeforeSuiteSummary).Should(Equal(befSuite.Summary())) + }) + + It("should report about the AfterSuite", func() { + Ω(reporter1.AfterSuiteSummary).Should(Equal(aftSuite.Summary())) + }) + + It("should report success", func() { + Ω(success).Should(BeTrue()) + Ω(reporter1.EndSummary.SuiteSucceeded).Should(BeTrue()) + Ω(reporter1.EndSummary.NumberOfFailedSpecs).Should(Equal(0)) + }) + + It("should not dump the writer", func() { + Ω(writer.EventStream).ShouldNot(ContainElement("DUMP")) + }) + }) + + Context("when the BeforeSuite fails", func() { + BeforeEach(func() { + befSuite = newBefSuite("BefSuite", true) + aftSuite = newBefSuite("AftSuite", false) + + skipped := newSpec("Skipped", noneFlag, false) + skipped.Skip() + + runner = newRunner( + config.GinkgoConfigType{}, + befSuite, + aftSuite, + newSpec("A", noneFlag, false), + newSpec("B", noneFlag, false), + newSpec("Pending", pendingFlag, false), + skipped, + ) + success = runner.Run() + }) + + It("should not run the specs, but it should run the AfterSuite", func() { + Ω(thingsThatRan).Should(Equal([]string{"BefSuite", "AftSuite"})) + }) + + It("should report about the BeforeSuite", func() { + Ω(reporter1.BeforeSuiteSummary).Should(Equal(befSuite.Summary())) + }) + + It("should report about the AfterSuite", func() { + Ω(reporter1.AfterSuiteSummary).Should(Equal(aftSuite.Summary())) + }) + + It("should report failure", func() { + Ω(success).Should(BeFalse()) + Ω(reporter1.EndSummary.SuiteSucceeded).Should(BeFalse()) + Ω(reporter1.EndSummary.NumberOfFailedSpecs).Should(Equal(2)) + Ω(reporter1.EndSummary.NumberOfSpecsThatWillBeRun).Should(Equal(2)) + }) + + It("should dump the writer", func() { + Ω(writer.EventStream).Should(ContainElement("DUMP")) + }) + }) + + Context("when some other test fails", func() { + BeforeEach(func() { + aftSuite = newBefSuite("AftSuite", false) + + runner = newRunner( + config.GinkgoConfigType{}, + nil, + aftSuite, + newSpec("A", noneFlag, true), + ) + success = runner.Run() + }) + + It("should still run the AfterSuite", func() { + Ω(thingsThatRan).Should(Equal([]string{"A", "AftSuite"})) + }) + + It("should report about the AfterSuite", func() { + Ω(reporter1.AfterSuiteSummary).Should(Equal(aftSuite.Summary())) + }) + + It("should report failure", func() { + Ω(success).Should(BeFalse()) + Ω(reporter1.EndSummary.SuiteSucceeded).Should(BeFalse()) + Ω(reporter1.EndSummary.NumberOfFailedSpecs).Should(Equal(1)) + Ω(reporter1.EndSummary.NumberOfSpecsThatWillBeRun).Should(Equal(1)) + }) + }) + + Context("when the AfterSuite fails", func() { + BeforeEach(func() { + befSuite = newBefSuite("BefSuite", false) + aftSuite = newBefSuite("AftSuite", true) + runner = newRunner( + config.GinkgoConfigType{}, + befSuite, + aftSuite, + newSpec("A", noneFlag, false), + newSpec("B", noneFlag, false), + ) + success = runner.Run() + }) + + It("should run everything", func() { + Ω(thingsThatRan).Should(Equal([]string{"BefSuite", "A", "B", "AftSuite"})) + }) + + It("should report about the BeforeSuite", func() { + Ω(reporter1.BeforeSuiteSummary).Should(Equal(befSuite.Summary())) + }) + + It("should report about the AfterSuite", func() { + Ω(reporter1.AfterSuiteSummary).Should(Equal(aftSuite.Summary())) + }) + + It("should report failure", func() { + Ω(success).Should(BeFalse()) + Ω(reporter1.EndSummary.SuiteSucceeded).Should(BeFalse()) + Ω(reporter1.EndSummary.NumberOfFailedSpecs).Should(Equal(0)) + }) + + It("should dump the writer", func() { + Ω(writer.EventStream).Should(ContainElement("DUMP")) + }) + }) + }) + + Describe("When instructed to fail fast", func() { + BeforeEach(func() { + conf := config.GinkgoConfigType{ + FailFast: true, + } + runner = newRunner(conf, nil, newAftSuite("after-suite", false), newSpec("passing", noneFlag, false), newSpec("failing", noneFlag, true), newSpec("dont-see", noneFlag, true), newSpec("dont-see", noneFlag, true)) + }) + + It("should return false, report failure, and not run anything past the failing test", func() { + Ω(runner.Run()).Should(BeFalse()) + Ω(reporter1.EndSummary.SuiteSucceeded).Should(BeFalse()) + Ω(thingsThatRan).Should(Equal([]string{"passing", "failing", "after-suite"})) + }) + + It("should announce the subsequent specs as skipped", func() { + runner.Run() + Ω(reporter1.SpecSummaries).Should(HaveLen(4)) + Ω(reporter1.SpecSummaries[2].State).Should(Equal(types.SpecStateSkipped)) + Ω(reporter1.SpecSummaries[3].State).Should(Equal(types.SpecStateSkipped)) + }) + + It("should mark all subsequent specs as skipped", func() { + runner.Run() + Ω(reporter1.EndSummary.NumberOfSkippedSpecs).Should(Equal(2)) + }) + }) + + Describe("Marking failure and success", func() { + Context("when all tests pass", func() { + BeforeEach(func() { + runner = newRunner(config.GinkgoConfigType{}, nil, nil, newSpec("passing", noneFlag, false), newSpec("pending", pendingFlag, false)) + }) + + It("should return true and report success", func() { + Ω(runner.Run()).Should(BeTrue()) + Ω(reporter1.EndSummary.SuiteSucceeded).Should(BeTrue()) + }) + }) + + Context("when a test fails", func() { + BeforeEach(func() { + runner = newRunner(config.GinkgoConfigType{}, nil, nil, newSpec("failing", noneFlag, true), newSpec("pending", pendingFlag, false)) + }) + + It("should return false and report failure", func() { + Ω(runner.Run()).Should(BeFalse()) + Ω(reporter1.EndSummary.SuiteSucceeded).Should(BeFalse()) + }) + }) + + Context("when there is a pending test, but pendings count as failures", func() { + BeforeEach(func() { + runner = newRunner(config.GinkgoConfigType{FailOnPending: true}, nil, nil, newSpec("passing", noneFlag, false), newSpec("pending", pendingFlag, false)) + }) + + It("should return false and report failure", func() { + Ω(runner.Run()).Should(BeFalse()) + Ω(reporter1.EndSummary.SuiteSucceeded).Should(BeFalse()) + }) + }) + }) + + Describe("Managing the writer", func() { + BeforeEach(func() { + runner = newRunner( + config.GinkgoConfigType{}, + nil, + nil, + newSpec("A", noneFlag, false), + newSpec("B", noneFlag, true), + newSpec("C", noneFlag, false), + ) + reporter1.SpecWillRunStub = func(specSummary *types.SpecSummary) { + writer.AddEvent("R1.WillRun") + } + reporter2.SpecWillRunStub = func(specSummary *types.SpecSummary) { + writer.AddEvent("R2.WillRun") + } + reporter1.SpecDidCompleteStub = func(specSummary *types.SpecSummary) { + writer.AddEvent("R1.DidComplete") + } + reporter2.SpecDidCompleteStub = func(specSummary *types.SpecSummary) { + writer.AddEvent("R2.DidComplete") + } + runner.Run() + }) + + It("should truncate between tests, but only dump if a test fails", func() { + Ω(writer.EventStream).Should(Equal([]string{ + "TRUNCATE", + "R1.WillRun", + "R2.WillRun", + "A", + "R2.DidComplete", + "R1.DidComplete", + "TRUNCATE", + "R1.WillRun", + "R2.WillRun", + "B", + "R2.DidComplete", + "DUMP", + "R1.DidComplete", + "TRUNCATE", + "R1.WillRun", + "R2.WillRun", + "C", + "R2.DidComplete", + "R1.DidComplete", + })) + }) + }) + + Describe("CurrentSpecSummary", func() { + It("should return the spec summary for the currently running spec", func() { + var summary *types.SpecSummary + runner = newRunner( + config.GinkgoConfigType{}, + nil, + nil, + newSpec("A", noneFlag, false), + newSpecWithBody("B", func() { + var ok bool + summary, ok = runner.CurrentSpecSummary() + Ω(ok).Should(BeTrue()) + }), + newSpec("C", noneFlag, false), + ) + runner.Run() + + Ω(summary.ComponentTexts).Should(Equal([]string{"B"})) + + summary, ok := runner.CurrentSpecSummary() + Ω(summary).Should(BeNil()) + Ω(ok).Should(BeFalse()) + }) + }) + + Context("When running tests in parallel", func() { + It("reports the correct number of specs before parallelization", func() { + specs := spec.NewSpecs([]*spec.Spec{ + newSpec("A", noneFlag, false), + newSpec("B", pendingFlag, false), + newSpec("C", noneFlag, false), + }) + specs.TrimForParallelization(2, 1) + runner = New("description", nil, specs, nil, []reporters.Reporter{reporter1, reporter2}, writer, config.GinkgoConfigType{}) + runner.Run() + + Ω(reporter1.EndSummary.NumberOfSpecsBeforeParallelization).Should(Equal(3)) + Ω(reporter1.EndSummary.NumberOfTotalSpecs).Should(Equal(2)) + Ω(reporter1.EndSummary.NumberOfSpecsThatWillBeRun).Should(Equal(1)) + Ω(reporter1.EndSummary.NumberOfPendingSpecs).Should(Equal(1)) + }) + }) + + Describe("generating a suite id", func() { + It("should generate an id randomly", func() { + runnerA := newRunner(config.GinkgoConfigType{}, nil, nil) + runnerA.Run() + IDA := reporter1.BeginSummary.SuiteID + + runnerB := newRunner(config.GinkgoConfigType{}, nil, nil) + runnerB.Run() + IDB := reporter1.BeginSummary.SuiteID + + IDRegexp := "[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}" + Ω(IDA).Should(MatchRegexp(IDRegexp)) + Ω(IDB).Should(MatchRegexp(IDRegexp)) + + Ω(IDA).ShouldNot(Equal(IDB)) + }) + }) +}) diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/suite/suite.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/suite/suite.go new file mode 100644 index 0000000000000..a054602f78b33 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/suite/suite.go @@ -0,0 +1,171 @@ +package suite + +import ( + "math/rand" + "time" + + "github.com/onsi/ginkgo/config" + "github.com/onsi/ginkgo/internal/containernode" + "github.com/onsi/ginkgo/internal/failer" + "github.com/onsi/ginkgo/internal/leafnodes" + "github.com/onsi/ginkgo/internal/spec" + "github.com/onsi/ginkgo/internal/specrunner" + "github.com/onsi/ginkgo/internal/writer" + "github.com/onsi/ginkgo/reporters" + "github.com/onsi/ginkgo/types" +) + +type ginkgoTestingT interface { + Fail() +} + +type Suite struct { + topLevelContainer *containernode.ContainerNode + currentContainer *containernode.ContainerNode + containerIndex int + beforeSuiteNode leafnodes.SuiteNode + afterSuiteNode leafnodes.SuiteNode + runner *specrunner.SpecRunner + failer *failer.Failer + running bool +} + +func New(failer *failer.Failer) *Suite { + topLevelContainer := containernode.New("[Top Level]", types.FlagTypeNone, types.CodeLocation{}) + + return &Suite{ + topLevelContainer: topLevelContainer, + currentContainer: topLevelContainer, + failer: failer, + containerIndex: 1, + } +} + +func (suite *Suite) Run(t ginkgoTestingT, description string, reporters []reporters.Reporter, writer writer.WriterInterface, config config.GinkgoConfigType) (bool, bool) { + if config.ParallelTotal < 1 { + panic("ginkgo.parallel.total must be >= 1") + } + + if config.ParallelNode > config.ParallelTotal || config.ParallelNode < 1 { + panic("ginkgo.parallel.node is one-indexed and must be <= ginkgo.parallel.total") + } + + r := rand.New(rand.NewSource(config.RandomSeed)) + suite.topLevelContainer.Shuffle(r) + specs := suite.generateSpecs(description, config) + suite.runner = specrunner.New(description, suite.beforeSuiteNode, specs, suite.afterSuiteNode, reporters, writer, config) + + suite.running = true + success := suite.runner.Run() + if !success { + t.Fail() + } + return success, specs.HasProgrammaticFocus() +} + +func (suite *Suite) generateSpecs(description string, config config.GinkgoConfigType) *spec.Specs { + specsSlice := []*spec.Spec{} + suite.topLevelContainer.BackPropagateProgrammaticFocus() + for _, collatedNodes := range suite.topLevelContainer.Collate() { + specsSlice = append(specsSlice, spec.New(collatedNodes.Subject, collatedNodes.Containers, config.EmitSpecProgress)) + } + + specs := spec.NewSpecs(specsSlice) + + if config.RandomizeAllSpecs { + specs.Shuffle(rand.New(rand.NewSource(config.RandomSeed))) + } + + specs.ApplyFocus(description, config.FocusString, config.SkipString) + + if config.SkipMeasurements { + specs.SkipMeasurements() + } + + if config.ParallelTotal > 1 { + specs.TrimForParallelization(config.ParallelTotal, config.ParallelNode) + } + + return specs +} + +func (suite *Suite) CurrentRunningSpecSummary() (*types.SpecSummary, bool) { + return suite.runner.CurrentSpecSummary() +} + +func (suite *Suite) SetBeforeSuiteNode(body interface{}, codeLocation types.CodeLocation, timeout time.Duration) { + if suite.beforeSuiteNode != nil { + panic("You may only call BeforeSuite once!") + } + suite.beforeSuiteNode = leafnodes.NewBeforeSuiteNode(body, codeLocation, timeout, suite.failer) +} + +func (suite *Suite) SetAfterSuiteNode(body interface{}, codeLocation types.CodeLocation, timeout time.Duration) { + if suite.afterSuiteNode != nil { + panic("You may only call AfterSuite once!") + } + suite.afterSuiteNode = leafnodes.NewAfterSuiteNode(body, codeLocation, timeout, suite.failer) +} + +func (suite *Suite) SetSynchronizedBeforeSuiteNode(bodyA interface{}, bodyB interface{}, codeLocation types.CodeLocation, timeout time.Duration) { + if suite.beforeSuiteNode != nil { + panic("You may only call BeforeSuite once!") + } + suite.beforeSuiteNode = leafnodes.NewSynchronizedBeforeSuiteNode(bodyA, bodyB, codeLocation, timeout, suite.failer) +} + +func (suite *Suite) SetSynchronizedAfterSuiteNode(bodyA interface{}, bodyB interface{}, codeLocation types.CodeLocation, timeout time.Duration) { + if suite.afterSuiteNode != nil { + panic("You may only call AfterSuite once!") + } + suite.afterSuiteNode = leafnodes.NewSynchronizedAfterSuiteNode(bodyA, bodyB, codeLocation, timeout, suite.failer) +} + +func (suite *Suite) PushContainerNode(text string, body func(), flag types.FlagType, codeLocation types.CodeLocation) { + container := containernode.New(text, flag, codeLocation) + suite.currentContainer.PushContainerNode(container) + + previousContainer := suite.currentContainer + suite.currentContainer = container + suite.containerIndex++ + + body() + + suite.containerIndex-- + suite.currentContainer = previousContainer +} + +func (suite *Suite) PushItNode(text string, body interface{}, flag types.FlagType, codeLocation types.CodeLocation, timeout time.Duration) { + if suite.running { + suite.failer.Fail("You may only call It from within a Describe or Context", codeLocation) + } + suite.currentContainer.PushSubjectNode(leafnodes.NewItNode(text, body, flag, codeLocation, timeout, suite.failer, suite.containerIndex)) +} + +func (suite *Suite) PushMeasureNode(text string, body interface{}, flag types.FlagType, codeLocation types.CodeLocation, samples int) { + if suite.running { + suite.failer.Fail("You may only call Measure from within a Describe or Context", codeLocation) + } + suite.currentContainer.PushSubjectNode(leafnodes.NewMeasureNode(text, body, flag, codeLocation, samples, suite.failer, suite.containerIndex)) +} + +func (suite *Suite) PushBeforeEachNode(body interface{}, codeLocation types.CodeLocation, timeout time.Duration) { + if suite.running { + suite.failer.Fail("You may only call BeforeEach from within a Describe or Context", codeLocation) + } + suite.currentContainer.PushSetupNode(leafnodes.NewBeforeEachNode(body, codeLocation, timeout, suite.failer, suite.containerIndex)) +} + +func (suite *Suite) PushJustBeforeEachNode(body interface{}, codeLocation types.CodeLocation, timeout time.Duration) { + if suite.running { + suite.failer.Fail("You may only call JustBeforeEach from within a Describe or Context", codeLocation) + } + suite.currentContainer.PushSetupNode(leafnodes.NewJustBeforeEachNode(body, codeLocation, timeout, suite.failer, suite.containerIndex)) +} + +func (suite *Suite) PushAfterEachNode(body interface{}, codeLocation types.CodeLocation, timeout time.Duration) { + if suite.running { + suite.failer.Fail("You may only call AfterEach from within a Describe or Context", codeLocation) + } + suite.currentContainer.PushSetupNode(leafnodes.NewAfterEachNode(body, codeLocation, timeout, suite.failer, suite.containerIndex)) +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/suite/suite_suite_test.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/suite/suite_suite_test.go new file mode 100644 index 0000000000000..06fe1d12abacc --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/suite/suite_suite_test.go @@ -0,0 +1,35 @@ +package suite_test + +import ( + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" + + "testing" +) + +func Test(t *testing.T) { + RegisterFailHandler(Fail) + RunSpecs(t, "Suite") +} + +var numBeforeSuiteRuns = 0 +var numAfterSuiteRuns = 0 + +var _ = BeforeSuite(func() { + numBeforeSuiteRuns++ +}) + +var _ = AfterSuite(func() { + numAfterSuiteRuns++ + Ω(numBeforeSuiteRuns).Should(Equal(1)) + Ω(numAfterSuiteRuns).Should(Equal(1)) +}) + +//Fakes +type fakeTestingT struct { + didFail bool +} + +func (fakeT *fakeTestingT) Fail() { + fakeT.didFail = true +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/suite/suite_test.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/suite/suite_test.go new file mode 100644 index 0000000000000..334211a092c9a --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/suite/suite_test.go @@ -0,0 +1,398 @@ +package suite_test + +import ( + "bytes" + + . "github.com/onsi/ginkgo" + . "github.com/onsi/ginkgo/internal/suite" + . "github.com/onsi/gomega" + + "math/rand" + "time" + + "github.com/onsi/ginkgo/config" + "github.com/onsi/ginkgo/internal/codelocation" + Failer "github.com/onsi/ginkgo/internal/failer" + Writer "github.com/onsi/ginkgo/internal/writer" + "github.com/onsi/ginkgo/reporters" + "github.com/onsi/ginkgo/types" +) + +var _ = Describe("Suite", func() { + var ( + specSuite *Suite + fakeT *fakeTestingT + fakeR *reporters.FakeReporter + writer *Writer.FakeGinkgoWriter + failer *Failer.Failer + ) + + BeforeEach(func() { + writer = Writer.NewFake() + fakeT = &fakeTestingT{} + fakeR = reporters.NewFakeReporter() + failer = Failer.New() + specSuite = New(failer) + }) + + Describe("running a suite", func() { + var ( + runOrder []string + randomizeAllSpecs bool + randomSeed int64 + focusString string + parallelNode int + parallelTotal int + runResult bool + hasProgrammaticFocus bool + ) + + var f = func(runText string) func() { + return func() { + runOrder = append(runOrder, runText) + } + } + + BeforeEach(func() { + randomizeAllSpecs = false + randomSeed = 11 + parallelNode = 1 + parallelTotal = 1 + focusString = "" + + runOrder = make([]string, 0) + specSuite.SetBeforeSuiteNode(f("BeforeSuite"), codelocation.New(0), 0) + specSuite.PushBeforeEachNode(f("top BE"), codelocation.New(0), 0) + specSuite.PushJustBeforeEachNode(f("top JBE"), codelocation.New(0), 0) + specSuite.PushAfterEachNode(f("top AE"), codelocation.New(0), 0) + + specSuite.PushContainerNode("container", func() { + specSuite.PushBeforeEachNode(f("BE"), codelocation.New(0), 0) + specSuite.PushJustBeforeEachNode(f("JBE"), codelocation.New(0), 0) + specSuite.PushAfterEachNode(f("AE"), codelocation.New(0), 0) + specSuite.PushItNode("it", f("IT"), types.FlagTypeNone, codelocation.New(0), 0) + + specSuite.PushContainerNode("inner container", func() { + specSuite.PushItNode("inner it", f("inner IT"), types.FlagTypeNone, codelocation.New(0), 0) + }, types.FlagTypeNone, codelocation.New(0)) + }, types.FlagTypeNone, codelocation.New(0)) + + specSuite.PushContainerNode("container 2", func() { + specSuite.PushBeforeEachNode(f("BE 2"), codelocation.New(0), 0) + specSuite.PushItNode("it 2", f("IT 2"), types.FlagTypeNone, codelocation.New(0), 0) + }, types.FlagTypeNone, codelocation.New(0)) + + specSuite.PushItNode("top level it", f("top IT"), types.FlagTypeNone, codelocation.New(0), 0) + + specSuite.SetAfterSuiteNode(f("AfterSuite"), codelocation.New(0), 0) + }) + + JustBeforeEach(func() { + runResult, hasProgrammaticFocus = specSuite.Run(fakeT, "suite description", []reporters.Reporter{fakeR}, writer, config.GinkgoConfigType{ + RandomSeed: randomSeed, + RandomizeAllSpecs: randomizeAllSpecs, + FocusString: focusString, + ParallelNode: parallelNode, + ParallelTotal: parallelTotal, + }) + }) + + It("provides the config and suite description to the reporter", func() { + Ω(fakeR.Config.RandomSeed).Should(Equal(int64(randomSeed))) + Ω(fakeR.Config.RandomizeAllSpecs).Should(Equal(randomizeAllSpecs)) + Ω(fakeR.BeginSummary.SuiteDescription).Should(Equal("suite description")) + }) + + It("reports that the BeforeSuite node ran", func() { + Ω(fakeR.BeforeSuiteSummary).ShouldNot(BeNil()) + }) + + It("reports that the AfterSuite node ran", func() { + Ω(fakeR.AfterSuiteSummary).ShouldNot(BeNil()) + }) + + It("provides information about the current test", func() { + description := CurrentGinkgoTestDescription() + Ω(description.ComponentTexts).Should(Equal([]string{"Suite", "running a suite", "provides information about the current test"})) + Ω(description.FullTestText).Should(Equal("Suite running a suite provides information about the current test")) + Ω(description.TestText).Should(Equal("provides information about the current test")) + Ω(description.IsMeasurement).Should(BeFalse()) + Ω(description.FileName).Should(ContainSubstring("suite_test.go")) + Ω(description.LineNumber).Should(BeNumerically(">", 50)) + Ω(description.LineNumber).Should(BeNumerically("<", 150)) + }) + + Measure("should run measurements", func(b Benchmarker) { + r := rand.New(rand.NewSource(time.Now().UnixNano())) + + runtime := b.Time("sleeping", func() { + sleepTime := time.Duration(r.Float64() * 0.01 * float64(time.Second)) + time.Sleep(sleepTime) + }) + Ω(runtime.Seconds()).Should(BeNumerically("<=", 0.015)) + Ω(runtime.Seconds()).Should(BeNumerically(">=", 0)) + + randomValue := r.Float64() * 10.0 + b.RecordValue("random value", randomValue) + Ω(randomValue).Should(BeNumerically("<=", 10.0)) + Ω(randomValue).Should(BeNumerically(">=", 0.0)) + }, 10) + + It("creates a node hierarchy, converts it to a spec collection, and runs it", func() { + Ω(runOrder).Should(Equal([]string{ + "BeforeSuite", + "top BE", "BE", "top JBE", "JBE", "IT", "AE", "top AE", + "top BE", "BE", "top JBE", "JBE", "inner IT", "AE", "top AE", + "top BE", "BE 2", "top JBE", "IT 2", "top AE", + "top BE", "top JBE", "top IT", "top AE", + "AfterSuite", + })) + }) + + Context("when told to randomize all specs", func() { + BeforeEach(func() { + randomizeAllSpecs = true + }) + + It("does", func() { + Ω(runOrder).Should(Equal([]string{ + "BeforeSuite", + "top BE", "top JBE", "top IT", "top AE", + "top BE", "BE", "top JBE", "JBE", "inner IT", "AE", "top AE", + "top BE", "BE", "top JBE", "JBE", "IT", "AE", "top AE", + "top BE", "BE 2", "top JBE", "IT 2", "top AE", + "AfterSuite", + })) + }) + }) + + Describe("with ginkgo.parallel.total > 1", func() { + BeforeEach(func() { + parallelTotal = 2 + randomizeAllSpecs = true + }) + + Context("for one worker", func() { + BeforeEach(func() { + parallelNode = 1 + }) + + It("should run a subset of tests", func() { + Ω(runOrder).Should(Equal([]string{ + "BeforeSuite", + "top BE", "top JBE", "top IT", "top AE", + "top BE", "BE", "top JBE", "JBE", "inner IT", "AE", "top AE", + "AfterSuite", + })) + }) + }) + + Context("for another worker", func() { + BeforeEach(func() { + parallelNode = 2 + }) + + It("should run a (different) subset of tests", func() { + Ω(runOrder).Should(Equal([]string{ + "BeforeSuite", + "top BE", "BE", "top JBE", "JBE", "IT", "AE", "top AE", + "top BE", "BE 2", "top JBE", "IT 2", "top AE", + "AfterSuite", + })) + }) + }) + }) + + Context("when provided with a filter", func() { + BeforeEach(func() { + focusString = `inner|\d` + }) + + It("converts the filter to a regular expression and uses it to filter the running specs", func() { + Ω(runOrder).Should(Equal([]string{ + "BeforeSuite", + "top BE", "BE", "top JBE", "JBE", "inner IT", "AE", "top AE", + "top BE", "BE 2", "top JBE", "IT 2", "top AE", + "AfterSuite", + })) + }) + + It("should not report a programmatic focus", func() { + Ω(hasProgrammaticFocus).Should(BeFalse()) + }) + }) + + Context("with a programatically focused spec", func() { + BeforeEach(func() { + specSuite.PushItNode("focused it", f("focused it"), types.FlagTypeFocused, codelocation.New(0), 0) + + specSuite.PushContainerNode("focused container", func() { + specSuite.PushItNode("inner focused it", f("inner focused it"), types.FlagTypeFocused, codelocation.New(0), 0) + specSuite.PushItNode("inner unfocused it", f("inner unfocused it"), types.FlagTypeNone, codelocation.New(0), 0) + }, types.FlagTypeFocused, codelocation.New(0)) + + }) + + It("should only run the focused test, applying backpropagation to favor most deeply focused leaf nodes", func() { + Ω(runOrder).Should(Equal([]string{ + "BeforeSuite", + "top BE", "top JBE", "focused it", "top AE", + "top BE", "top JBE", "inner focused it", "top AE", + "AfterSuite", + })) + }) + + It("should report a programmatic focus", func() { + Ω(hasProgrammaticFocus).Should(BeTrue()) + }) + }) + + Context("when the specs pass", func() { + It("doesn't report a failure", func() { + Ω(fakeT.didFail).Should(BeFalse()) + }) + + It("should return true", func() { + Ω(runResult).Should(BeTrue()) + }) + }) + + Context("when a spec fails", func() { + var location types.CodeLocation + BeforeEach(func() { + specSuite.PushItNode("top level it", func() { + location = codelocation.New(0) + failer.Fail("oops!", location) + }, types.FlagTypeNone, codelocation.New(0), 0) + }) + + It("should return false", func() { + Ω(runResult).Should(BeFalse()) + }) + + It("reports a failure", func() { + Ω(fakeT.didFail).Should(BeTrue()) + }) + + It("generates the correct failure data", func() { + Ω(fakeR.SpecSummaries[0].Failure.Message).Should(Equal("oops!")) + Ω(fakeR.SpecSummaries[0].Failure.Location).Should(Equal(location)) + }) + }) + + Context("when runnable nodes are nested within other runnable nodes", func() { + Context("when an It is nested", func() { + BeforeEach(func() { + specSuite.PushItNode("top level it", func() { + specSuite.PushItNode("nested it", f("oops"), types.FlagTypeNone, codelocation.New(0), 0) + }, types.FlagTypeNone, codelocation.New(0), 0) + }) + + It("should fail", func() { + Ω(fakeT.didFail).Should(BeTrue()) + }) + }) + + Context("when a Measure is nested", func() { + BeforeEach(func() { + specSuite.PushItNode("top level it", func() { + specSuite.PushMeasureNode("nested measure", func(Benchmarker) {}, types.FlagTypeNone, codelocation.New(0), 10) + }, types.FlagTypeNone, codelocation.New(0), 0) + }) + + It("should fail", func() { + Ω(fakeT.didFail).Should(BeTrue()) + }) + }) + + Context("when a BeforeEach is nested", func() { + BeforeEach(func() { + specSuite.PushItNode("top level it", func() { + specSuite.PushBeforeEachNode(f("nested bef"), codelocation.New(0), 0) + }, types.FlagTypeNone, codelocation.New(0), 0) + }) + + It("should fail", func() { + Ω(fakeT.didFail).Should(BeTrue()) + }) + }) + + Context("when a JustBeforeEach is nested", func() { + BeforeEach(func() { + specSuite.PushItNode("top level it", func() { + specSuite.PushJustBeforeEachNode(f("nested jbef"), codelocation.New(0), 0) + }, types.FlagTypeNone, codelocation.New(0), 0) + }) + + It("should fail", func() { + Ω(fakeT.didFail).Should(BeTrue()) + }) + }) + + Context("when a AfterEach is nested", func() { + BeforeEach(func() { + specSuite.PushItNode("top level it", func() { + specSuite.PushAfterEachNode(f("nested aft"), codelocation.New(0), 0) + }, types.FlagTypeNone, codelocation.New(0), 0) + }) + + It("should fail", func() { + Ω(fakeT.didFail).Should(BeTrue()) + }) + }) + }) + }) + + Describe("BeforeSuite", func() { + Context("when setting BeforeSuite more than once", func() { + It("should panic", func() { + specSuite.SetBeforeSuiteNode(func() {}, codelocation.New(0), 0) + + Ω(func() { + specSuite.SetBeforeSuiteNode(func() {}, codelocation.New(0), 0) + }).Should(Panic()) + + }) + }) + }) + + Describe("AfterSuite", func() { + Context("when setting AfterSuite more than once", func() { + It("should panic", func() { + specSuite.SetAfterSuiteNode(func() {}, codelocation.New(0), 0) + + Ω(func() { + specSuite.SetAfterSuiteNode(func() {}, codelocation.New(0), 0) + }).Should(Panic()) + }) + }) + }) + + Describe("By", func() { + It("writes to the GinkgoWriter", func() { + originalGinkgoWriter := GinkgoWriter + buffer := &bytes.Buffer{} + + GinkgoWriter = buffer + By("Saying Hello GinkgoWriter") + GinkgoWriter = originalGinkgoWriter + + Ω(buffer.String()).Should(ContainSubstring("STEP")) + Ω(buffer.String()).Should(ContainSubstring(": Saying Hello GinkgoWriter\n")) + }) + + It("calls the passed-in callback if present", func() { + a := 0 + By("calling the callback", func() { + a = 1 + }) + Ω(a).Should(Equal(1)) + }) + + It("panics if there is more than one callback", func() { + Ω(func() { + By("registering more than one callback", func() {}, func() {}) + }).Should(Panic()) + }) + }) +}) diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/testingtproxy/testing_t_proxy.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/testingtproxy/testing_t_proxy.go new file mode 100644 index 0000000000000..a2b9af80629e7 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/testingtproxy/testing_t_proxy.go @@ -0,0 +1,76 @@ +package testingtproxy + +import ( + "fmt" + "io" +) + +type failFunc func(message string, callerSkip ...int) + +func New(writer io.Writer, fail failFunc, offset int) *ginkgoTestingTProxy { + return &ginkgoTestingTProxy{ + fail: fail, + offset: offset, + writer: writer, + } +} + +type ginkgoTestingTProxy struct { + fail failFunc + offset int + writer io.Writer +} + +func (t *ginkgoTestingTProxy) Error(args ...interface{}) { + t.fail(fmt.Sprintln(args...), t.offset) +} + +func (t *ginkgoTestingTProxy) Errorf(format string, args ...interface{}) { + t.fail(fmt.Sprintf(format, args...), t.offset) +} + +func (t *ginkgoTestingTProxy) Fail() { + t.fail("failed", t.offset) +} + +func (t *ginkgoTestingTProxy) FailNow() { + t.fail("failed", t.offset) +} + +func (t *ginkgoTestingTProxy) Fatal(args ...interface{}) { + t.fail(fmt.Sprintln(args...), t.offset) +} + +func (t *ginkgoTestingTProxy) Fatalf(format string, args ...interface{}) { + t.fail(fmt.Sprintf(format, args...), t.offset) +} + +func (t *ginkgoTestingTProxy) Log(args ...interface{}) { + fmt.Fprintln(t.writer, args...) +} + +func (t *ginkgoTestingTProxy) Logf(format string, args ...interface{}) { + fmt.Fprintf(t.writer, format, args...) +} + +func (t *ginkgoTestingTProxy) Failed() bool { + return false +} + +func (t *ginkgoTestingTProxy) Parallel() { +} + +func (t *ginkgoTestingTProxy) Skip(args ...interface{}) { + fmt.Println(args...) +} + +func (t *ginkgoTestingTProxy) Skipf(format string, args ...interface{}) { + fmt.Printf(format, args...) +} + +func (t *ginkgoTestingTProxy) SkipNow() { +} + +func (t *ginkgoTestingTProxy) Skipped() bool { + return false +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/writer/fake_writer.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/writer/fake_writer.go new file mode 100644 index 0000000000000..ac6540f0c1d38 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/writer/fake_writer.go @@ -0,0 +1,31 @@ +package writer + +type FakeGinkgoWriter struct { + EventStream []string +} + +func NewFake() *FakeGinkgoWriter { + return &FakeGinkgoWriter{ + EventStream: []string{}, + } +} + +func (writer *FakeGinkgoWriter) AddEvent(event string) { + writer.EventStream = append(writer.EventStream, event) +} + +func (writer *FakeGinkgoWriter) Truncate() { + writer.EventStream = append(writer.EventStream, "TRUNCATE") +} + +func (writer *FakeGinkgoWriter) DumpOut() { + writer.EventStream = append(writer.EventStream, "DUMP") +} + +func (writer *FakeGinkgoWriter) DumpOutWithHeader(header string) { + writer.EventStream = append(writer.EventStream, "DUMP_WITH_HEADER: "+header) +} + +func (writer *FakeGinkgoWriter) Write(data []byte) (n int, err error) { + return 0, nil +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/writer/writer.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/writer/writer.go new file mode 100644 index 0000000000000..7678fc1d9cb76 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/writer/writer.go @@ -0,0 +1,71 @@ +package writer + +import ( + "bytes" + "io" + "sync" +) + +type WriterInterface interface { + io.Writer + + Truncate() + DumpOut() + DumpOutWithHeader(header string) +} + +type Writer struct { + buffer *bytes.Buffer + outWriter io.Writer + lock *sync.Mutex + stream bool +} + +func New(outWriter io.Writer) *Writer { + return &Writer{ + buffer: &bytes.Buffer{}, + lock: &sync.Mutex{}, + outWriter: outWriter, + stream: true, + } +} + +func (w *Writer) SetStream(stream bool) { + w.lock.Lock() + defer w.lock.Unlock() + w.stream = stream +} + +func (w *Writer) Write(b []byte) (n int, err error) { + w.lock.Lock() + defer w.lock.Unlock() + + if w.stream { + return w.outWriter.Write(b) + } else { + return w.buffer.Write(b) + } +} + +func (w *Writer) Truncate() { + w.lock.Lock() + defer w.lock.Unlock() + w.buffer.Reset() +} + +func (w *Writer) DumpOut() { + w.lock.Lock() + defer w.lock.Unlock() + if !w.stream { + w.buffer.WriteTo(w.outWriter) + } +} + +func (w *Writer) DumpOutWithHeader(header string) { + w.lock.Lock() + defer w.lock.Unlock() + if !w.stream && w.buffer.Len() > 0 { + w.outWriter.Write([]byte(header)) + w.buffer.WriteTo(w.outWriter) + } +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/writer/writer_suite_test.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/writer/writer_suite_test.go new file mode 100644 index 0000000000000..e206577919af4 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/writer/writer_suite_test.go @@ -0,0 +1,13 @@ +package writer_test + +import ( + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" + + "testing" +) + +func TestWriter(t *testing.T) { + RegisterFailHandler(Fail) + RunSpecs(t, "Writer Suite") +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/writer/writer_test.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/writer/writer_test.go new file mode 100644 index 0000000000000..3e1d17c6d50b7 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/internal/writer/writer_test.go @@ -0,0 +1,75 @@ +package writer_test + +import ( + "github.com/onsi/gomega/gbytes" + + . "github.com/onsi/ginkgo" + . "github.com/onsi/ginkgo/internal/writer" + . "github.com/onsi/gomega" +) + +var _ = Describe("Writer", func() { + var writer *Writer + var out *gbytes.Buffer + + BeforeEach(func() { + out = gbytes.NewBuffer() + writer = New(out) + }) + + It("should stream directly to the outbuffer by default", func() { + writer.Write([]byte("foo")) + Ω(out).Should(gbytes.Say("foo")) + }) + + It("should not emit the header when asked to DumpOutWitHeader", func() { + writer.Write([]byte("foo")) + writer.DumpOutWithHeader("my header") + Ω(out).ShouldNot(gbytes.Say("my header")) + Ω(out).Should(gbytes.Say("foo")) + }) + + Context("when told not to stream", func() { + BeforeEach(func() { + writer.SetStream(false) + }) + + It("should only write to the buffer when told to DumpOut", func() { + writer.Write([]byte("foo")) + Ω(out).ShouldNot(gbytes.Say("foo")) + writer.DumpOut() + Ω(out).Should(gbytes.Say("foo")) + }) + + It("should truncate the internal buffer when told to truncate", func() { + writer.Write([]byte("foo")) + writer.Truncate() + writer.DumpOut() + Ω(out).ShouldNot(gbytes.Say("foo")) + + writer.Write([]byte("bar")) + writer.DumpOut() + Ω(out).Should(gbytes.Say("bar")) + }) + + Describe("emitting a header", func() { + Context("when the buffer has content", func() { + It("should emit the header followed by the content", func() { + writer.Write([]byte("foo")) + writer.DumpOutWithHeader("my header") + + Ω(out).Should(gbytes.Say("my header")) + Ω(out).Should(gbytes.Say("foo")) + }) + }) + + Context("when the buffer has no content", func() { + It("should not emit the header", func() { + writer.DumpOutWithHeader("my header") + + Ω(out).ShouldNot(gbytes.Say("my header")) + }) + }) + }) + }) +}) diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/reporters/default_reporter.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/reporters/default_reporter.go new file mode 100644 index 0000000000000..45a44deed9257 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/reporters/default_reporter.go @@ -0,0 +1,83 @@ +/* +Ginkgo's Default Reporter + +A number of command line flags are available to tweak Ginkgo's default output. + +These are documented [here](http://onsi.github.io/ginkgo/#running_tests) +*/ +package reporters + +import ( + "github.com/onsi/ginkgo/config" + "github.com/onsi/ginkgo/reporters/stenographer" + "github.com/onsi/ginkgo/types" +) + +type DefaultReporter struct { + config config.DefaultReporterConfigType + stenographer stenographer.Stenographer + specSummaries []*types.SpecSummary +} + +func NewDefaultReporter(config config.DefaultReporterConfigType, stenographer stenographer.Stenographer) *DefaultReporter { + return &DefaultReporter{ + config: config, + stenographer: stenographer, + } +} + +func (reporter *DefaultReporter) SpecSuiteWillBegin(config config.GinkgoConfigType, summary *types.SuiteSummary) { + reporter.stenographer.AnnounceSuite(summary.SuiteDescription, config.RandomSeed, config.RandomizeAllSpecs, reporter.config.Succinct) + if config.ParallelTotal > 1 { + reporter.stenographer.AnnounceParallelRun(config.ParallelNode, config.ParallelTotal, summary.NumberOfTotalSpecs, summary.NumberOfSpecsBeforeParallelization, reporter.config.Succinct) + } + reporter.stenographer.AnnounceNumberOfSpecs(summary.NumberOfSpecsThatWillBeRun, summary.NumberOfTotalSpecs, reporter.config.Succinct) +} + +func (reporter *DefaultReporter) BeforeSuiteDidRun(setupSummary *types.SetupSummary) { + if setupSummary.State != types.SpecStatePassed { + reporter.stenographer.AnnounceBeforeSuiteFailure(setupSummary, reporter.config.Succinct, reporter.config.FullTrace) + } +} + +func (reporter *DefaultReporter) AfterSuiteDidRun(setupSummary *types.SetupSummary) { + if setupSummary.State != types.SpecStatePassed { + reporter.stenographer.AnnounceAfterSuiteFailure(setupSummary, reporter.config.Succinct, reporter.config.FullTrace) + } +} + +func (reporter *DefaultReporter) SpecWillRun(specSummary *types.SpecSummary) { + if reporter.config.Verbose && !reporter.config.Succinct && specSummary.State != types.SpecStatePending && specSummary.State != types.SpecStateSkipped { + reporter.stenographer.AnnounceSpecWillRun(specSummary) + } +} + +func (reporter *DefaultReporter) SpecDidComplete(specSummary *types.SpecSummary) { + switch specSummary.State { + case types.SpecStatePassed: + if specSummary.IsMeasurement { + reporter.stenographer.AnnounceSuccesfulMeasurement(specSummary, reporter.config.Succinct) + } else if specSummary.RunTime.Seconds() >= reporter.config.SlowSpecThreshold { + reporter.stenographer.AnnounceSuccesfulSlowSpec(specSummary, reporter.config.Succinct) + } else { + reporter.stenographer.AnnounceSuccesfulSpec(specSummary) + } + case types.SpecStatePending: + reporter.stenographer.AnnouncePendingSpec(specSummary, reporter.config.NoisyPendings && !reporter.config.Succinct) + case types.SpecStateSkipped: + reporter.stenographer.AnnounceSkippedSpec(specSummary) + case types.SpecStateTimedOut: + reporter.stenographer.AnnounceSpecTimedOut(specSummary, reporter.config.Succinct, reporter.config.FullTrace) + case types.SpecStatePanicked: + reporter.stenographer.AnnounceSpecPanicked(specSummary, reporter.config.Succinct, reporter.config.FullTrace) + case types.SpecStateFailed: + reporter.stenographer.AnnounceSpecFailed(specSummary, reporter.config.Succinct, reporter.config.FullTrace) + } + + reporter.specSummaries = append(reporter.specSummaries, specSummary) +} + +func (reporter *DefaultReporter) SpecSuiteDidEnd(summary *types.SuiteSummary) { + reporter.stenographer.SummarizeFailures(reporter.specSummaries) + reporter.stenographer.AnnounceSpecRunCompletion(summary, reporter.config.Succinct) +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/reporters/default_reporter_test.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/reporters/default_reporter_test.go new file mode 100644 index 0000000000000..3b719a1e055b0 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/reporters/default_reporter_test.go @@ -0,0 +1,397 @@ +package reporters_test + +import ( + "time" + + . "github.com/onsi/ginkgo" + "github.com/onsi/ginkgo/config" + "github.com/onsi/ginkgo/reporters" + st "github.com/onsi/ginkgo/reporters/stenographer" + "github.com/onsi/ginkgo/types" + . "github.com/onsi/gomega" +) + +var _ = Describe("DefaultReporter", func() { + var ( + reporter *reporters.DefaultReporter + reporterConfig config.DefaultReporterConfigType + stenographer *st.FakeStenographer + + ginkgoConfig config.GinkgoConfigType + suite *types.SuiteSummary + spec *types.SpecSummary + ) + + BeforeEach(func() { + stenographer = st.NewFakeStenographer() + reporterConfig = config.DefaultReporterConfigType{ + NoColor: false, + SlowSpecThreshold: 0.1, + NoisyPendings: true, + Verbose: true, + FullTrace: true, + } + + reporter = reporters.NewDefaultReporter(reporterConfig, stenographer) + }) + + call := func(method string, args ...interface{}) st.FakeStenographerCall { + return st.NewFakeStenographerCall(method, args...) + } + + Describe("SpecSuiteWillBegin", func() { + BeforeEach(func() { + suite = &types.SuiteSummary{ + SuiteDescription: "A Sweet Suite", + NumberOfTotalSpecs: 10, + NumberOfSpecsThatWillBeRun: 8, + } + + ginkgoConfig = config.GinkgoConfigType{ + RandomSeed: 1138, + RandomizeAllSpecs: true, + } + }) + + Context("when a serial (non-parallel) suite begins", func() { + BeforeEach(func() { + ginkgoConfig.ParallelTotal = 1 + + reporter.SpecSuiteWillBegin(ginkgoConfig, suite) + }) + + It("should announce the suite, then announce the number of specs", func() { + Ω(stenographer.Calls()).Should(HaveLen(2)) + Ω(stenographer.Calls()[0]).Should(Equal(call("AnnounceSuite", "A Sweet Suite", ginkgoConfig.RandomSeed, true, false))) + Ω(stenographer.Calls()[1]).Should(Equal(call("AnnounceNumberOfSpecs", 8, 10, false))) + }) + }) + + Context("when a parallel suite begins", func() { + BeforeEach(func() { + ginkgoConfig.ParallelTotal = 2 + ginkgoConfig.ParallelNode = 1 + suite.NumberOfSpecsBeforeParallelization = 20 + + reporter.SpecSuiteWillBegin(ginkgoConfig, suite) + }) + + It("should announce the suite, announce that it's a parallel run, then announce the number of specs", func() { + Ω(stenographer.Calls()).Should(HaveLen(3)) + Ω(stenographer.Calls()[0]).Should(Equal(call("AnnounceSuite", "A Sweet Suite", ginkgoConfig.RandomSeed, true, false))) + Ω(stenographer.Calls()[1]).Should(Equal(call("AnnounceParallelRun", 1, 2, 10, 20, false))) + Ω(stenographer.Calls()[2]).Should(Equal(call("AnnounceNumberOfSpecs", 8, 10, false))) + }) + }) + }) + + Describe("BeforeSuiteDidRun", func() { + Context("when the BeforeSuite passes", func() { + It("should announce nothing", func() { + reporter.BeforeSuiteDidRun(&types.SetupSummary{ + State: types.SpecStatePassed, + }) + + Ω(stenographer.Calls()).Should(BeEmpty()) + }) + }) + + Context("when the BeforeSuite fails", func() { + It("should announce the failure", func() { + summary := &types.SetupSummary{ + State: types.SpecStateFailed, + } + reporter.BeforeSuiteDidRun(summary) + + Ω(stenographer.Calls()).Should(HaveLen(1)) + Ω(stenographer.Calls()[0]).Should(Equal(call("AnnounceBeforeSuiteFailure", summary, false, true))) + }) + }) + }) + + Describe("AfterSuiteDidRun", func() { + Context("when the AfterSuite passes", func() { + It("should announce nothing", func() { + reporter.AfterSuiteDidRun(&types.SetupSummary{ + State: types.SpecStatePassed, + }) + + Ω(stenographer.Calls()).Should(BeEmpty()) + }) + }) + + Context("when the AfterSuite fails", func() { + It("should announce the failure", func() { + summary := &types.SetupSummary{ + State: types.SpecStateFailed, + } + reporter.AfterSuiteDidRun(summary) + + Ω(stenographer.Calls()).Should(HaveLen(1)) + Ω(stenographer.Calls()[0]).Should(Equal(call("AnnounceAfterSuiteFailure", summary, false, true))) + }) + }) + }) + + Describe("SpecWillRun", func() { + Context("When running in verbose mode", func() { + Context("and the spec will run", func() { + BeforeEach(func() { + spec = &types.SpecSummary{} + reporter.SpecWillRun(spec) + }) + + It("should announce that the spec will run", func() { + Ω(stenographer.Calls()).Should(HaveLen(1)) + Ω(stenographer.Calls()[0]).Should(Equal(call("AnnounceSpecWillRun", spec))) + }) + }) + + Context("and the spec will not run", func() { + Context("because it is pending", func() { + BeforeEach(func() { + spec = &types.SpecSummary{ + State: types.SpecStatePending, + } + reporter.SpecWillRun(spec) + }) + + It("should announce nothing", func() { + Ω(stenographer.Calls()).Should(BeEmpty()) + }) + }) + + Context("because it is skipped", func() { + BeforeEach(func() { + spec = &types.SpecSummary{ + State: types.SpecStateSkipped, + } + reporter.SpecWillRun(spec) + }) + + It("should announce nothing", func() { + Ω(stenographer.Calls()).Should(BeEmpty()) + }) + }) + }) + }) + + Context("When running in verbose & succinct mode", func() { + BeforeEach(func() { + reporterConfig.Succinct = true + reporter = reporters.NewDefaultReporter(reporterConfig, stenographer) + spec = &types.SpecSummary{} + reporter.SpecWillRun(spec) + }) + + It("should announce nothing", func() { + Ω(stenographer.Calls()).Should(BeEmpty()) + }) + }) + + Context("When not running in verbose mode", func() { + BeforeEach(func() { + reporterConfig.Verbose = false + reporter = reporters.NewDefaultReporter(reporterConfig, stenographer) + spec = &types.SpecSummary{} + reporter.SpecWillRun(spec) + }) + + It("should announce nothing", func() { + Ω(stenographer.Calls()).Should(BeEmpty()) + }) + }) + }) + + Describe("SpecDidComplete", func() { + JustBeforeEach(func() { + reporter.SpecDidComplete(spec) + }) + + BeforeEach(func() { + spec = &types.SpecSummary{} + }) + + Context("When the spec passed", func() { + BeforeEach(func() { + spec.State = types.SpecStatePassed + }) + + Context("When the spec was a measurement", func() { + BeforeEach(func() { + spec.IsMeasurement = true + }) + + It("should announce the measurement", func() { + Ω(stenographer.Calls()[0]).Should(Equal(call("AnnounceSuccesfulMeasurement", spec, false))) + }) + }) + + Context("When the spec is slow", func() { + BeforeEach(func() { + spec.RunTime = time.Second + }) + + It("should announce that it was slow", func() { + Ω(stenographer.Calls()[0]).Should(Equal(call("AnnounceSuccesfulSlowSpec", spec, false))) + }) + }) + + Context("Otherwise", func() { + It("should announce the succesful spec", func() { + Ω(stenographer.Calls()[0]).Should(Equal(call("AnnounceSuccesfulSpec", spec))) + }) + }) + }) + + Context("When the spec is pending", func() { + BeforeEach(func() { + spec.State = types.SpecStatePending + }) + + It("should announce the pending spec", func() { + Ω(stenographer.Calls()[0]).Should(Equal(call("AnnouncePendingSpec", spec, true))) + }) + }) + + Context("When the spec is skipped", func() { + BeforeEach(func() { + spec.State = types.SpecStateSkipped + }) + + It("should announce the skipped spec", func() { + Ω(stenographer.Calls()[0]).Should(Equal(call("AnnounceSkippedSpec", spec))) + }) + }) + + Context("When the spec timed out", func() { + BeforeEach(func() { + spec.State = types.SpecStateTimedOut + }) + + It("should announce the timedout spec", func() { + Ω(stenographer.Calls()[0]).Should(Equal(call("AnnounceSpecTimedOut", spec, false, true))) + }) + }) + + Context("When the spec panicked", func() { + BeforeEach(func() { + spec.State = types.SpecStatePanicked + }) + + It("should announce the panicked spec", func() { + Ω(stenographer.Calls()[0]).Should(Equal(call("AnnounceSpecPanicked", spec, false, true))) + }) + }) + + Context("When the spec failed", func() { + BeforeEach(func() { + spec.State = types.SpecStateFailed + }) + + It("should announce the failed spec", func() { + Ω(stenographer.Calls()[0]).Should(Equal(call("AnnounceSpecFailed", spec, false, true))) + }) + }) + + Context("in succinct mode", func() { + BeforeEach(func() { + reporterConfig.Succinct = true + reporter = reporters.NewDefaultReporter(reporterConfig, stenographer) + }) + + Context("When the spec passed", func() { + BeforeEach(func() { + spec.State = types.SpecStatePassed + }) + + Context("When the spec was a measurement", func() { + BeforeEach(func() { + spec.IsMeasurement = true + }) + + It("should announce the measurement", func() { + Ω(stenographer.Calls()[0]).Should(Equal(call("AnnounceSuccesfulMeasurement", spec, true))) + }) + }) + + Context("When the spec is slow", func() { + BeforeEach(func() { + spec.RunTime = time.Second + }) + + It("should announce that it was slow", func() { + Ω(stenographer.Calls()[0]).Should(Equal(call("AnnounceSuccesfulSlowSpec", spec, true))) + }) + }) + + Context("Otherwise", func() { + It("should announce the succesful spec", func() { + Ω(stenographer.Calls()[0]).Should(Equal(call("AnnounceSuccesfulSpec", spec))) + }) + }) + }) + + Context("When the spec is pending", func() { + BeforeEach(func() { + spec.State = types.SpecStatePending + }) + + It("should announce the pending spec, but never noisily", func() { + Ω(stenographer.Calls()[0]).Should(Equal(call("AnnouncePendingSpec", spec, false))) + }) + }) + + Context("When the spec is skipped", func() { + BeforeEach(func() { + spec.State = types.SpecStateSkipped + }) + + It("should announce the skipped spec", func() { + Ω(stenographer.Calls()[0]).Should(Equal(call("AnnounceSkippedSpec", spec))) + }) + }) + + Context("When the spec timed out", func() { + BeforeEach(func() { + spec.State = types.SpecStateTimedOut + }) + + It("should announce the timedout spec", func() { + Ω(stenographer.Calls()[0]).Should(Equal(call("AnnounceSpecTimedOut", spec, true, true))) + }) + }) + + Context("When the spec panicked", func() { + BeforeEach(func() { + spec.State = types.SpecStatePanicked + }) + + It("should announce the panicked spec", func() { + Ω(stenographer.Calls()[0]).Should(Equal(call("AnnounceSpecPanicked", spec, true, true))) + }) + }) + + Context("When the spec failed", func() { + BeforeEach(func() { + spec.State = types.SpecStateFailed + }) + + It("should announce the failed spec", func() { + Ω(stenographer.Calls()[0]).Should(Equal(call("AnnounceSpecFailed", spec, true, true))) + }) + }) + }) + }) + + Describe("SpecSuiteDidEnd", func() { + BeforeEach(func() { + suite = &types.SuiteSummary{} + reporter.SpecSuiteDidEnd(suite) + }) + + It("should announce the spec run's completion", func() { + Ω(stenographer.Calls()[1]).Should(Equal(call("AnnounceSpecRunCompletion", suite, false))) + }) + }) +}) diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/reporters/fake_reporter.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/reporters/fake_reporter.go new file mode 100644 index 0000000000000..27db47949081e --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/reporters/fake_reporter.go @@ -0,0 +1,59 @@ +package reporters + +import ( + "github.com/onsi/ginkgo/config" + "github.com/onsi/ginkgo/types" +) + +//FakeReporter is useful for testing purposes +type FakeReporter struct { + Config config.GinkgoConfigType + + BeginSummary *types.SuiteSummary + BeforeSuiteSummary *types.SetupSummary + SpecWillRunSummaries []*types.SpecSummary + SpecSummaries []*types.SpecSummary + AfterSuiteSummary *types.SetupSummary + EndSummary *types.SuiteSummary + + SpecWillRunStub func(specSummary *types.SpecSummary) + SpecDidCompleteStub func(specSummary *types.SpecSummary) +} + +func NewFakeReporter() *FakeReporter { + return &FakeReporter{ + SpecWillRunSummaries: make([]*types.SpecSummary, 0), + SpecSummaries: make([]*types.SpecSummary, 0), + } +} + +func (fakeR *FakeReporter) SpecSuiteWillBegin(config config.GinkgoConfigType, summary *types.SuiteSummary) { + fakeR.Config = config + fakeR.BeginSummary = summary +} + +func (fakeR *FakeReporter) BeforeSuiteDidRun(setupSummary *types.SetupSummary) { + fakeR.BeforeSuiteSummary = setupSummary +} + +func (fakeR *FakeReporter) SpecWillRun(specSummary *types.SpecSummary) { + if fakeR.SpecWillRunStub != nil { + fakeR.SpecWillRunStub(specSummary) + } + fakeR.SpecWillRunSummaries = append(fakeR.SpecWillRunSummaries, specSummary) +} + +func (fakeR *FakeReporter) SpecDidComplete(specSummary *types.SpecSummary) { + if fakeR.SpecDidCompleteStub != nil { + fakeR.SpecDidCompleteStub(specSummary) + } + fakeR.SpecSummaries = append(fakeR.SpecSummaries, specSummary) +} + +func (fakeR *FakeReporter) AfterSuiteDidRun(setupSummary *types.SetupSummary) { + fakeR.AfterSuiteSummary = setupSummary +} + +func (fakeR *FakeReporter) SpecSuiteDidEnd(summary *types.SuiteSummary) { + fakeR.EndSummary = summary +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/reporters/junit_reporter.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/reporters/junit_reporter.go new file mode 100644 index 0000000000000..278a88ed7353d --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/reporters/junit_reporter.go @@ -0,0 +1,139 @@ +/* + +JUnit XML Reporter for Ginkgo + +For usage instructions: http://onsi.github.io/ginkgo/#generating_junit_xml_output + +*/ + +package reporters + +import ( + "encoding/xml" + "fmt" + "github.com/onsi/ginkgo/config" + "github.com/onsi/ginkgo/types" + "os" + "strings" +) + +type JUnitTestSuite struct { + XMLName xml.Name `xml:"testsuite"` + TestCases []JUnitTestCase `xml:"testcase"` + Tests int `xml:"tests,attr"` + Failures int `xml:"failures,attr"` + Time float64 `xml:"time,attr"` +} + +type JUnitTestCase struct { + Name string `xml:"name,attr"` + ClassName string `xml:"classname,attr"` + FailureMessage *JUnitFailureMessage `xml:"failure,omitempty"` + Skipped *JUnitSkipped `xml:"skipped,omitempty"` + Time float64 `xml:"time,attr"` +} + +type JUnitFailureMessage struct { + Type string `xml:"type,attr"` + Message string `xml:",chardata"` +} + +type JUnitSkipped struct { + XMLName xml.Name `xml:"skipped"` +} + +type JUnitReporter struct { + suite JUnitTestSuite + filename string + testSuiteName string +} + +//NewJUnitReporter creates a new JUnit XML reporter. The XML will be stored in the passed in filename. +func NewJUnitReporter(filename string) *JUnitReporter { + return &JUnitReporter{ + filename: filename, + } +} + +func (reporter *JUnitReporter) SpecSuiteWillBegin(config config.GinkgoConfigType, summary *types.SuiteSummary) { + reporter.suite = JUnitTestSuite{ + Tests: summary.NumberOfSpecsThatWillBeRun, + TestCases: []JUnitTestCase{}, + } + reporter.testSuiteName = summary.SuiteDescription +} + +func (reporter *JUnitReporter) SpecWillRun(specSummary *types.SpecSummary) { +} + +func (reporter *JUnitReporter) BeforeSuiteDidRun(setupSummary *types.SetupSummary) { + reporter.handleSetupSummary("BeforeSuite", setupSummary) +} + +func (reporter *JUnitReporter) AfterSuiteDidRun(setupSummary *types.SetupSummary) { + reporter.handleSetupSummary("AfterSuite", setupSummary) +} + +func (reporter *JUnitReporter) handleSetupSummary(name string, setupSummary *types.SetupSummary) { + if setupSummary.State != types.SpecStatePassed { + testCase := JUnitTestCase{ + Name: name, + ClassName: reporter.testSuiteName, + } + + testCase.FailureMessage = &JUnitFailureMessage{ + Type: reporter.failureTypeForState(setupSummary.State), + Message: fmt.Sprintf("%s\n%s", setupSummary.Failure.ComponentCodeLocation.String(), setupSummary.Failure.Message), + } + testCase.Time = setupSummary.RunTime.Seconds() + reporter.suite.TestCases = append(reporter.suite.TestCases, testCase) + } +} + +func (reporter *JUnitReporter) SpecDidComplete(specSummary *types.SpecSummary) { + testCase := JUnitTestCase{ + Name: strings.Join(specSummary.ComponentTexts[1:], " "), + ClassName: reporter.testSuiteName, + } + if specSummary.State == types.SpecStateFailed || specSummary.State == types.SpecStateTimedOut || specSummary.State == types.SpecStatePanicked { + testCase.FailureMessage = &JUnitFailureMessage{ + Type: reporter.failureTypeForState(specSummary.State), + Message: fmt.Sprintf("%s\n%s", specSummary.Failure.ComponentCodeLocation.String(), specSummary.Failure.Message), + } + } + if specSummary.State == types.SpecStateSkipped || specSummary.State == types.SpecStatePending { + testCase.Skipped = &JUnitSkipped{} + } + testCase.Time = specSummary.RunTime.Seconds() + reporter.suite.TestCases = append(reporter.suite.TestCases, testCase) +} + +func (reporter *JUnitReporter) SpecSuiteDidEnd(summary *types.SuiteSummary) { + reporter.suite.Time = summary.RunTime.Seconds() + reporter.suite.Failures = summary.NumberOfFailedSpecs + file, err := os.Create(reporter.filename) + if err != nil { + fmt.Printf("Failed to create JUnit report file: %s\n\t%s", reporter.filename, err.Error()) + } + defer file.Close() + file.WriteString(xml.Header) + encoder := xml.NewEncoder(file) + encoder.Indent(" ", " ") + err = encoder.Encode(reporter.suite) + if err != nil { + fmt.Printf("Failed to generate JUnit report\n\t%s", err.Error()) + } +} + +func (reporter *JUnitReporter) failureTypeForState(state types.SpecState) string { + switch state { + case types.SpecStateFailed: + return "Failure" + case types.SpecStateTimedOut: + return "Timeout" + case types.SpecStatePanicked: + return "Panic" + default: + return "" + } +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/reporters/junit_reporter_test.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/reporters/junit_reporter_test.go new file mode 100644 index 0000000000000..744f383a05f05 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/reporters/junit_reporter_test.go @@ -0,0 +1,241 @@ +package reporters_test + +import ( + "encoding/xml" + "io/ioutil" + "os" + "time" + + . "github.com/onsi/ginkgo" + "github.com/onsi/ginkgo/config" + "github.com/onsi/ginkgo/internal/codelocation" + "github.com/onsi/ginkgo/reporters" + "github.com/onsi/ginkgo/types" + . "github.com/onsi/gomega" +) + +var _ = Describe("JUnit Reporter", func() { + var ( + outputFile string + reporter Reporter + ) + + readOutputFile := func() reporters.JUnitTestSuite { + bytes, err := ioutil.ReadFile(outputFile) + Ω(err).ShouldNot(HaveOccurred()) + var suite reporters.JUnitTestSuite + err = xml.Unmarshal(bytes, &suite) + Ω(err).ShouldNot(HaveOccurred()) + return suite + } + + BeforeEach(func() { + f, err := ioutil.TempFile("", "output") + Ω(err).ShouldNot(HaveOccurred()) + f.Close() + outputFile = f.Name() + + reporter = reporters.NewJUnitReporter(outputFile) + + reporter.SpecSuiteWillBegin(config.GinkgoConfigType{}, &types.SuiteSummary{ + SuiteDescription: "My test suite", + NumberOfSpecsThatWillBeRun: 1, + }) + }) + + AfterEach(func() { + os.RemoveAll(outputFile) + }) + + Describe("a passing test", func() { + BeforeEach(func() { + beforeSuite := &types.SetupSummary{ + State: types.SpecStatePassed, + } + reporter.BeforeSuiteDidRun(beforeSuite) + + afterSuite := &types.SetupSummary{ + State: types.SpecStatePassed, + } + reporter.AfterSuiteDidRun(afterSuite) + + spec := &types.SpecSummary{ + ComponentTexts: []string{"[Top Level]", "A", "B", "C"}, + State: types.SpecStatePassed, + RunTime: 5 * time.Second, + } + reporter.SpecWillRun(spec) + reporter.SpecDidComplete(spec) + + reporter.SpecSuiteDidEnd(&types.SuiteSummary{ + NumberOfSpecsThatWillBeRun: 1, + NumberOfFailedSpecs: 0, + RunTime: 10 * time.Second, + }) + }) + + It("should record the test as passing", func() { + output := readOutputFile() + Ω(output.Tests).Should(Equal(1)) + Ω(output.Failures).Should(Equal(0)) + Ω(output.Time).Should(Equal(10.0)) + Ω(output.TestCases).Should(HaveLen(1)) + Ω(output.TestCases[0].Name).Should(Equal("A B C")) + Ω(output.TestCases[0].ClassName).Should(Equal("My test suite")) + Ω(output.TestCases[0].FailureMessage).Should(BeNil()) + Ω(output.TestCases[0].Skipped).Should(BeNil()) + Ω(output.TestCases[0].Time).Should(Equal(5.0)) + }) + }) + + Describe("when the BeforeSuite fails", func() { + var beforeSuite *types.SetupSummary + + BeforeEach(func() { + beforeSuite = &types.SetupSummary{ + State: types.SpecStateFailed, + RunTime: 3 * time.Second, + Failure: types.SpecFailure{ + Message: "failed to setup", + ComponentCodeLocation: codelocation.New(0), + }, + } + reporter.BeforeSuiteDidRun(beforeSuite) + + reporter.SpecSuiteDidEnd(&types.SuiteSummary{ + NumberOfSpecsThatWillBeRun: 1, + NumberOfFailedSpecs: 1, + RunTime: 10 * time.Second, + }) + }) + + It("should record the test as having failed", func() { + output := readOutputFile() + Ω(output.Tests).Should(Equal(1)) + Ω(output.Failures).Should(Equal(1)) + Ω(output.Time).Should(Equal(10.0)) + Ω(output.TestCases[0].Name).Should(Equal("BeforeSuite")) + Ω(output.TestCases[0].Time).Should(Equal(3.0)) + Ω(output.TestCases[0].ClassName).Should(Equal("My test suite")) + Ω(output.TestCases[0].FailureMessage.Type).Should(Equal("Failure")) + Ω(output.TestCases[0].FailureMessage.Message).Should(ContainSubstring("failed to setup")) + Ω(output.TestCases[0].FailureMessage.Message).Should(ContainSubstring(beforeSuite.Failure.ComponentCodeLocation.String())) + Ω(output.TestCases[0].Skipped).Should(BeNil()) + }) + }) + + Describe("when the AfterSuite fails", func() { + var afterSuite *types.SetupSummary + + BeforeEach(func() { + afterSuite = &types.SetupSummary{ + State: types.SpecStateFailed, + RunTime: 3 * time.Second, + Failure: types.SpecFailure{ + Message: "failed to setup", + ComponentCodeLocation: codelocation.New(0), + }, + } + reporter.AfterSuiteDidRun(afterSuite) + + reporter.SpecSuiteDidEnd(&types.SuiteSummary{ + NumberOfSpecsThatWillBeRun: 1, + NumberOfFailedSpecs: 1, + RunTime: 10 * time.Second, + }) + }) + + It("should record the test as having failed", func() { + output := readOutputFile() + Ω(output.Tests).Should(Equal(1)) + Ω(output.Failures).Should(Equal(1)) + Ω(output.Time).Should(Equal(10.0)) + Ω(output.TestCases[0].Name).Should(Equal("AfterSuite")) + Ω(output.TestCases[0].Time).Should(Equal(3.0)) + Ω(output.TestCases[0].ClassName).Should(Equal("My test suite")) + Ω(output.TestCases[0].FailureMessage.Type).Should(Equal("Failure")) + Ω(output.TestCases[0].FailureMessage.Message).Should(ContainSubstring("failed to setup")) + Ω(output.TestCases[0].FailureMessage.Message).Should(ContainSubstring(afterSuite.Failure.ComponentCodeLocation.String())) + Ω(output.TestCases[0].Skipped).Should(BeNil()) + }) + }) + + specStateCases := []struct { + state types.SpecState + message string + }{ + {types.SpecStateFailed, "Failure"}, + {types.SpecStateTimedOut, "Timeout"}, + {types.SpecStatePanicked, "Panic"}, + } + + for _, specStateCase := range specStateCases { + specStateCase := specStateCase + Describe("a failing test", func() { + var spec *types.SpecSummary + BeforeEach(func() { + spec = &types.SpecSummary{ + ComponentTexts: []string{"[Top Level]", "A", "B", "C"}, + State: specStateCase.state, + RunTime: 5 * time.Second, + Failure: types.SpecFailure{ + ComponentCodeLocation: codelocation.New(0), + Message: "I failed", + }, + } + reporter.SpecWillRun(spec) + reporter.SpecDidComplete(spec) + + reporter.SpecSuiteDidEnd(&types.SuiteSummary{ + NumberOfSpecsThatWillBeRun: 1, + NumberOfFailedSpecs: 1, + RunTime: 10 * time.Second, + }) + }) + + It("should record test as failing", func() { + output := readOutputFile() + Ω(output.Tests).Should(Equal(1)) + Ω(output.Failures).Should(Equal(1)) + Ω(output.Time).Should(Equal(10.0)) + Ω(output.TestCases[0].Name).Should(Equal("A B C")) + Ω(output.TestCases[0].ClassName).Should(Equal("My test suite")) + Ω(output.TestCases[0].FailureMessage.Type).Should(Equal(specStateCase.message)) + Ω(output.TestCases[0].FailureMessage.Message).Should(ContainSubstring("I failed")) + Ω(output.TestCases[0].FailureMessage.Message).Should(ContainSubstring(spec.Failure.ComponentCodeLocation.String())) + Ω(output.TestCases[0].Skipped).Should(BeNil()) + }) + }) + } + + for _, specStateCase := range []types.SpecState{types.SpecStatePending, types.SpecStateSkipped} { + specStateCase := specStateCase + Describe("a skipped test", func() { + var spec *types.SpecSummary + BeforeEach(func() { + spec = &types.SpecSummary{ + ComponentTexts: []string{"[Top Level]", "A", "B", "C"}, + State: specStateCase, + RunTime: 5 * time.Second, + } + reporter.SpecWillRun(spec) + reporter.SpecDidComplete(spec) + + reporter.SpecSuiteDidEnd(&types.SuiteSummary{ + NumberOfSpecsThatWillBeRun: 1, + NumberOfFailedSpecs: 0, + RunTime: 10 * time.Second, + }) + }) + + It("should record test as failing", func() { + output := readOutputFile() + Ω(output.Tests).Should(Equal(1)) + Ω(output.Failures).Should(Equal(0)) + Ω(output.Time).Should(Equal(10.0)) + Ω(output.TestCases[0].Name).Should(Equal("A B C")) + Ω(output.TestCases[0].Skipped).ShouldNot(BeNil()) + }) + }) + } +}) diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/reporters/reporter.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/reporters/reporter.go new file mode 100644 index 0000000000000..348b9dfce1f67 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/reporters/reporter.go @@ -0,0 +1,15 @@ +package reporters + +import ( + "github.com/onsi/ginkgo/config" + "github.com/onsi/ginkgo/types" +) + +type Reporter interface { + SpecSuiteWillBegin(config config.GinkgoConfigType, summary *types.SuiteSummary) + BeforeSuiteDidRun(setupSummary *types.SetupSummary) + SpecWillRun(specSummary *types.SpecSummary) + SpecDidComplete(specSummary *types.SpecSummary) + AfterSuiteDidRun(setupSummary *types.SetupSummary) + SpecSuiteDidEnd(summary *types.SuiteSummary) +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/reporters/reporters_suite_test.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/reporters/reporters_suite_test.go new file mode 100644 index 0000000000000..cec5a4dbfb0d3 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/reporters/reporters_suite_test.go @@ -0,0 +1,13 @@ +package reporters_test + +import ( + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" + + "testing" +) + +func TestReporters(t *testing.T) { + RegisterFailHandler(Fail) + RunSpecs(t, "Reporters Suite") +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/reporters/stenographer/console_logging.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/reporters/stenographer/console_logging.go new file mode 100644 index 0000000000000..ce5433af6a8e3 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/reporters/stenographer/console_logging.go @@ -0,0 +1,64 @@ +package stenographer + +import ( + "fmt" + "strings" +) + +func (s *consoleStenographer) colorize(colorCode string, format string, args ...interface{}) string { + var out string + + if len(args) > 0 { + out = fmt.Sprintf(format, args...) + } else { + out = format + } + + if s.color { + return fmt.Sprintf("%s%s%s", colorCode, out, defaultStyle) + } else { + return out + } +} + +func (s *consoleStenographer) printBanner(text string, bannerCharacter string) { + fmt.Println(text) + fmt.Println(strings.Repeat(bannerCharacter, len(text))) +} + +func (s *consoleStenographer) printNewLine() { + fmt.Println("") +} + +func (s *consoleStenographer) printDelimiter() { + fmt.Println(s.colorize(grayColor, "%s", strings.Repeat("-", 30))) +} + +func (s *consoleStenographer) print(indentation int, format string, args ...interface{}) { + fmt.Print(s.indent(indentation, format, args...)) +} + +func (s *consoleStenographer) println(indentation int, format string, args ...interface{}) { + fmt.Println(s.indent(indentation, format, args...)) +} + +func (s *consoleStenographer) indent(indentation int, format string, args ...interface{}) string { + var text string + + if len(args) > 0 { + text = fmt.Sprintf(format, args...) + } else { + text = format + } + + stringArray := strings.Split(text, "\n") + padding := "" + if indentation >= 0 { + padding = strings.Repeat(" ", indentation) + } + for i, s := range stringArray { + stringArray[i] = fmt.Sprintf("%s%s", padding, s) + } + + return strings.Join(stringArray, "\n") +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/reporters/stenographer/fake_stenographer.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/reporters/stenographer/fake_stenographer.go new file mode 100644 index 0000000000000..3a1e0c2d71cfe --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/reporters/stenographer/fake_stenographer.go @@ -0,0 +1,138 @@ +package stenographer + +import ( + "sync" + + "github.com/onsi/ginkgo/types" +) + +func NewFakeStenographerCall(method string, args ...interface{}) FakeStenographerCall { + return FakeStenographerCall{ + Method: method, + Args: args, + } +} + +type FakeStenographer struct { + calls []FakeStenographerCall + lock *sync.Mutex +} + +type FakeStenographerCall struct { + Method string + Args []interface{} +} + +func NewFakeStenographer() *FakeStenographer { + stenographer := &FakeStenographer{ + lock: &sync.Mutex{}, + } + stenographer.Reset() + return stenographer +} + +func (stenographer *FakeStenographer) Calls() []FakeStenographerCall { + stenographer.lock.Lock() + defer stenographer.lock.Unlock() + + return stenographer.calls +} + +func (stenographer *FakeStenographer) Reset() { + stenographer.lock.Lock() + defer stenographer.lock.Unlock() + + stenographer.calls = make([]FakeStenographerCall, 0) +} + +func (stenographer *FakeStenographer) CallsTo(method string) []FakeStenographerCall { + stenographer.lock.Lock() + defer stenographer.lock.Unlock() + + results := make([]FakeStenographerCall, 0) + for _, call := range stenographer.calls { + if call.Method == method { + results = append(results, call) + } + } + + return results +} + +func (stenographer *FakeStenographer) registerCall(method string, args ...interface{}) { + stenographer.lock.Lock() + defer stenographer.lock.Unlock() + + stenographer.calls = append(stenographer.calls, NewFakeStenographerCall(method, args...)) +} + +func (stenographer *FakeStenographer) AnnounceSuite(description string, randomSeed int64, randomizingAll bool, succinct bool) { + stenographer.registerCall("AnnounceSuite", description, randomSeed, randomizingAll, succinct) +} + +func (stenographer *FakeStenographer) AnnounceAggregatedParallelRun(nodes int, succinct bool) { + stenographer.registerCall("AnnounceAggregatedParallelRun", nodes, succinct) +} + +func (stenographer *FakeStenographer) AnnounceParallelRun(node int, nodes int, specsToRun int, totalSpecs int, succinct bool) { + stenographer.registerCall("AnnounceParallelRun", node, nodes, specsToRun, totalSpecs, succinct) +} + +func (stenographer *FakeStenographer) AnnounceNumberOfSpecs(specsToRun int, total int, succinct bool) { + stenographer.registerCall("AnnounceNumberOfSpecs", specsToRun, total, succinct) +} + +func (stenographer *FakeStenographer) AnnounceSpecRunCompletion(summary *types.SuiteSummary, succinct bool) { + stenographer.registerCall("AnnounceSpecRunCompletion", summary, succinct) +} + +func (stenographer *FakeStenographer) AnnounceSpecWillRun(spec *types.SpecSummary) { + stenographer.registerCall("AnnounceSpecWillRun", spec) +} + +func (stenographer *FakeStenographer) AnnounceBeforeSuiteFailure(summary *types.SetupSummary, succinct bool, fullTrace bool) { + stenographer.registerCall("AnnounceBeforeSuiteFailure", summary, succinct, fullTrace) +} + +func (stenographer *FakeStenographer) AnnounceAfterSuiteFailure(summary *types.SetupSummary, succinct bool, fullTrace bool) { + stenographer.registerCall("AnnounceAfterSuiteFailure", summary, succinct, fullTrace) +} +func (stenographer *FakeStenographer) AnnounceCapturedOutput(output string) { + stenographer.registerCall("AnnounceCapturedOutput", output) +} + +func (stenographer *FakeStenographer) AnnounceSuccesfulSpec(spec *types.SpecSummary) { + stenographer.registerCall("AnnounceSuccesfulSpec", spec) +} + +func (stenographer *FakeStenographer) AnnounceSuccesfulSlowSpec(spec *types.SpecSummary, succinct bool) { + stenographer.registerCall("AnnounceSuccesfulSlowSpec", spec, succinct) +} + +func (stenographer *FakeStenographer) AnnounceSuccesfulMeasurement(spec *types.SpecSummary, succinct bool) { + stenographer.registerCall("AnnounceSuccesfulMeasurement", spec, succinct) +} + +func (stenographer *FakeStenographer) AnnouncePendingSpec(spec *types.SpecSummary, noisy bool) { + stenographer.registerCall("AnnouncePendingSpec", spec, noisy) +} + +func (stenographer *FakeStenographer) AnnounceSkippedSpec(spec *types.SpecSummary) { + stenographer.registerCall("AnnounceSkippedSpec", spec) +} + +func (stenographer *FakeStenographer) AnnounceSpecTimedOut(spec *types.SpecSummary, succinct bool, fullTrace bool) { + stenographer.registerCall("AnnounceSpecTimedOut", spec, succinct, fullTrace) +} + +func (stenographer *FakeStenographer) AnnounceSpecPanicked(spec *types.SpecSummary, succinct bool, fullTrace bool) { + stenographer.registerCall("AnnounceSpecPanicked", spec, succinct, fullTrace) +} + +func (stenographer *FakeStenographer) AnnounceSpecFailed(spec *types.SpecSummary, succinct bool, fullTrace bool) { + stenographer.registerCall("AnnounceSpecFailed", spec, succinct, fullTrace) +} + +func (stenographer *FakeStenographer) SummarizeFailures(summaries []*types.SpecSummary) { + stenographer.registerCall("SummarizeFailures", summaries) +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/reporters/stenographer/stenographer.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/reporters/stenographer/stenographer.go new file mode 100644 index 0000000000000..d82cdb2de3002 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/reporters/stenographer/stenographer.go @@ -0,0 +1,520 @@ +/* +The stenographer is used by Ginkgo's reporters to generate output. + +Move along, nothing to see here. +*/ + +package stenographer + +import ( + "fmt" + "strings" + + "github.com/onsi/ginkgo/types" +) + +const defaultStyle = "\x1b[0m" +const boldStyle = "\x1b[1m" +const redColor = "\x1b[91m" +const greenColor = "\x1b[32m" +const yellowColor = "\x1b[33m" +const cyanColor = "\x1b[36m" +const grayColor = "\x1b[90m" +const lightGrayColor = "\x1b[37m" + +type cursorStateType int + +const ( + cursorStateTop cursorStateType = iota + cursorStateStreaming + cursorStateMidBlock + cursorStateEndBlock +) + +type Stenographer interface { + AnnounceSuite(description string, randomSeed int64, randomizingAll bool, succinct bool) + AnnounceAggregatedParallelRun(nodes int, succinct bool) + AnnounceParallelRun(node int, nodes int, specsToRun int, totalSpecs int, succinct bool) + AnnounceNumberOfSpecs(specsToRun int, total int, succinct bool) + AnnounceSpecRunCompletion(summary *types.SuiteSummary, succinct bool) + + AnnounceSpecWillRun(spec *types.SpecSummary) + AnnounceBeforeSuiteFailure(summary *types.SetupSummary, succinct bool, fullTrace bool) + AnnounceAfterSuiteFailure(summary *types.SetupSummary, succinct bool, fullTrace bool) + + AnnounceCapturedOutput(output string) + + AnnounceSuccesfulSpec(spec *types.SpecSummary) + AnnounceSuccesfulSlowSpec(spec *types.SpecSummary, succinct bool) + AnnounceSuccesfulMeasurement(spec *types.SpecSummary, succinct bool) + + AnnouncePendingSpec(spec *types.SpecSummary, noisy bool) + AnnounceSkippedSpec(spec *types.SpecSummary) + + AnnounceSpecTimedOut(spec *types.SpecSummary, succinct bool, fullTrace bool) + AnnounceSpecPanicked(spec *types.SpecSummary, succinct bool, fullTrace bool) + AnnounceSpecFailed(spec *types.SpecSummary, succinct bool, fullTrace bool) + + SummarizeFailures(summaries []*types.SpecSummary) +} + +func New(color bool) Stenographer { + return &consoleStenographer{ + color: color, + cursorState: cursorStateTop, + } +} + +type consoleStenographer struct { + color bool + cursorState cursorStateType +} + +var alternatingColors = []string{defaultStyle, grayColor} + +func (s *consoleStenographer) AnnounceSuite(description string, randomSeed int64, randomizingAll bool, succinct bool) { + if succinct { + s.print(0, "[%d] %s ", randomSeed, s.colorize(boldStyle, description)) + return + } + s.printBanner(fmt.Sprintf("Running Suite: %s", description), "=") + s.print(0, "Random Seed: %s", s.colorize(boldStyle, "%d", randomSeed)) + if randomizingAll { + s.print(0, " - Will randomize all specs") + } + s.printNewLine() +} + +func (s *consoleStenographer) AnnounceParallelRun(node int, nodes int, specsToRun int, totalSpecs int, succinct bool) { + if succinct { + s.print(0, "- node #%d ", node) + return + } + s.println(0, + "Parallel test node %s/%s. Assigned %s of %s specs.", + s.colorize(boldStyle, "%d", node), + s.colorize(boldStyle, "%d", nodes), + s.colorize(boldStyle, "%d", specsToRun), + s.colorize(boldStyle, "%d", totalSpecs), + ) + s.printNewLine() +} + +func (s *consoleStenographer) AnnounceAggregatedParallelRun(nodes int, succinct bool) { + if succinct { + s.print(0, "- %d nodes ", nodes) + return + } + s.println(0, + "Running in parallel across %s nodes", + s.colorize(boldStyle, "%d", nodes), + ) + s.printNewLine() +} + +func (s *consoleStenographer) AnnounceNumberOfSpecs(specsToRun int, total int, succinct bool) { + if succinct { + s.print(0, "- %d/%d specs ", specsToRun, total) + s.stream() + return + } + s.println(0, + "Will run %s of %s specs", + s.colorize(boldStyle, "%d", specsToRun), + s.colorize(boldStyle, "%d", total), + ) + + s.printNewLine() +} + +func (s *consoleStenographer) AnnounceSpecRunCompletion(summary *types.SuiteSummary, succinct bool) { + if succinct && summary.SuiteSucceeded { + s.print(0, " %s %s ", s.colorize(greenColor, "SUCCESS!"), summary.RunTime) + return + } + s.printNewLine() + color := greenColor + if !summary.SuiteSucceeded { + color = redColor + } + s.println(0, s.colorize(boldStyle+color, "Ran %d of %d Specs in %.3f seconds", summary.NumberOfSpecsThatWillBeRun, summary.NumberOfTotalSpecs, summary.RunTime.Seconds())) + + status := "" + if summary.SuiteSucceeded { + status = s.colorize(boldStyle+greenColor, "SUCCESS!") + } else { + status = s.colorize(boldStyle+redColor, "FAIL!") + } + + s.print(0, + "%s -- %s | %s | %s | %s ", + status, + s.colorize(greenColor+boldStyle, "%d Passed", summary.NumberOfPassedSpecs), + s.colorize(redColor+boldStyle, "%d Failed", summary.NumberOfFailedSpecs), + s.colorize(yellowColor+boldStyle, "%d Pending", summary.NumberOfPendingSpecs), + s.colorize(cyanColor+boldStyle, "%d Skipped", summary.NumberOfSkippedSpecs), + ) +} + +func (s *consoleStenographer) AnnounceSpecWillRun(spec *types.SpecSummary) { + s.startBlock() + for i, text := range spec.ComponentTexts[1 : len(spec.ComponentTexts)-1] { + s.print(0, s.colorize(alternatingColors[i%2], text)+" ") + } + + indentation := 0 + if len(spec.ComponentTexts) > 2 { + indentation = 1 + s.printNewLine() + } + index := len(spec.ComponentTexts) - 1 + s.print(indentation, s.colorize(boldStyle, spec.ComponentTexts[index])) + s.printNewLine() + s.print(indentation, s.colorize(lightGrayColor, spec.ComponentCodeLocations[index].String())) + s.printNewLine() + s.midBlock() +} + +func (s *consoleStenographer) AnnounceBeforeSuiteFailure(summary *types.SetupSummary, succinct bool, fullTrace bool) { + s.announceSetupFailure("BeforeSuite", summary, succinct, fullTrace) +} + +func (s *consoleStenographer) AnnounceAfterSuiteFailure(summary *types.SetupSummary, succinct bool, fullTrace bool) { + s.announceSetupFailure("AfterSuite", summary, succinct, fullTrace) +} + +func (s *consoleStenographer) announceSetupFailure(name string, summary *types.SetupSummary, succinct bool, fullTrace bool) { + s.startBlock() + var message string + switch summary.State { + case types.SpecStateFailed: + message = "Failure" + case types.SpecStatePanicked: + message = "Panic" + case types.SpecStateTimedOut: + message = "Timeout" + } + + s.println(0, s.colorize(redColor+boldStyle, "%s [%.3f seconds]", message, summary.RunTime.Seconds())) + + indentation := s.printCodeLocationBlock([]string{name}, []types.CodeLocation{summary.CodeLocation}, summary.ComponentType, 0, true, true) + + s.printNewLine() + s.printFailure(indentation, summary.State, summary.Failure, fullTrace) + + s.endBlock() +} + +func (s *consoleStenographer) AnnounceCapturedOutput(output string) { + if output == "" { + return + } + + s.startBlock() + s.println(0, output) + s.midBlock() +} + +func (s *consoleStenographer) AnnounceSuccesfulSpec(spec *types.SpecSummary) { + s.print(0, s.colorize(greenColor, "•")) + s.stream() +} + +func (s *consoleStenographer) AnnounceSuccesfulSlowSpec(spec *types.SpecSummary, succinct bool) { + s.printBlockWithMessage( + s.colorize(greenColor, "• [SLOW TEST:%.3f seconds]", spec.RunTime.Seconds()), + "", + spec, + succinct, + ) +} + +func (s *consoleStenographer) AnnounceSuccesfulMeasurement(spec *types.SpecSummary, succinct bool) { + s.printBlockWithMessage( + s.colorize(greenColor, "• [MEASUREMENT]"), + s.measurementReport(spec, succinct), + spec, + succinct, + ) +} + +func (s *consoleStenographer) AnnouncePendingSpec(spec *types.SpecSummary, noisy bool) { + if noisy { + s.printBlockWithMessage( + s.colorize(yellowColor, "P [PENDING]"), + "", + spec, + false, + ) + } else { + s.print(0, s.colorize(yellowColor, "P")) + s.stream() + } +} + +func (s *consoleStenographer) AnnounceSkippedSpec(spec *types.SpecSummary) { + s.print(0, s.colorize(cyanColor, "S")) + s.stream() +} + +func (s *consoleStenographer) AnnounceSpecTimedOut(spec *types.SpecSummary, succinct bool, fullTrace bool) { + s.printSpecFailure("•... Timeout", spec, succinct, fullTrace) +} + +func (s *consoleStenographer) AnnounceSpecPanicked(spec *types.SpecSummary, succinct bool, fullTrace bool) { + s.printSpecFailure("•! Panic", spec, succinct, fullTrace) +} + +func (s *consoleStenographer) AnnounceSpecFailed(spec *types.SpecSummary, succinct bool, fullTrace bool) { + s.printSpecFailure("• Failure", spec, succinct, fullTrace) +} + +func (s *consoleStenographer) SummarizeFailures(summaries []*types.SpecSummary) { + failingSpecs := []*types.SpecSummary{} + + for _, summary := range summaries { + if summary.HasFailureState() { + failingSpecs = append(failingSpecs, summary) + } + } + + if len(failingSpecs) == 0 { + return + } + + s.printNewLine() + s.printNewLine() + plural := "s" + if len(failingSpecs) == 1 { + plural = "" + } + s.println(0, s.colorize(redColor+boldStyle, "Summarizing %d Failure%s:", len(failingSpecs), plural)) + for _, summary := range failingSpecs { + s.printNewLine() + if summary.HasFailureState() { + if summary.TimedOut() { + s.print(0, s.colorize(redColor+boldStyle, "[Timeout...] ")) + } else if summary.Panicked() { + s.print(0, s.colorize(redColor+boldStyle, "[Panic!] ")) + } else if summary.Failed() { + s.print(0, s.colorize(redColor+boldStyle, "[Fail] ")) + } + s.printSpecContext(summary.ComponentTexts, summary.ComponentCodeLocations, summary.Failure.ComponentType, summary.Failure.ComponentIndex, true, true) + s.printNewLine() + s.println(0, s.colorize(lightGrayColor, summary.Failure.Location.String())) + } + } +} + +func (s *consoleStenographer) startBlock() { + if s.cursorState == cursorStateStreaming { + s.printNewLine() + s.printDelimiter() + } else if s.cursorState == cursorStateMidBlock { + s.printNewLine() + } +} + +func (s *consoleStenographer) midBlock() { + s.cursorState = cursorStateMidBlock +} + +func (s *consoleStenographer) endBlock() { + s.printDelimiter() + s.cursorState = cursorStateEndBlock +} + +func (s *consoleStenographer) stream() { + s.cursorState = cursorStateStreaming +} + +func (s *consoleStenographer) printBlockWithMessage(header string, message string, spec *types.SpecSummary, succinct bool) { + s.startBlock() + s.println(0, header) + + indentation := s.printCodeLocationBlock(spec.ComponentTexts, spec.ComponentCodeLocations, types.SpecComponentTypeInvalid, 0, false, succinct) + + if message != "" { + s.printNewLine() + s.println(indentation, message) + } + + s.endBlock() +} + +func (s *consoleStenographer) printSpecFailure(message string, spec *types.SpecSummary, succinct bool, fullTrace bool) { + s.startBlock() + s.println(0, s.colorize(redColor+boldStyle, "%s%s [%.3f seconds]", message, s.failureContext(spec.Failure.ComponentType), spec.RunTime.Seconds())) + + indentation := s.printCodeLocationBlock(spec.ComponentTexts, spec.ComponentCodeLocations, spec.Failure.ComponentType, spec.Failure.ComponentIndex, true, succinct) + + s.printNewLine() + s.printFailure(indentation, spec.State, spec.Failure, fullTrace) + s.endBlock() +} + +func (s *consoleStenographer) failureContext(failedComponentType types.SpecComponentType) string { + switch failedComponentType { + case types.SpecComponentTypeBeforeSuite: + return " in Suite Setup (BeforeSuite)" + case types.SpecComponentTypeAfterSuite: + return " in Suite Teardown (AfterSuite)" + case types.SpecComponentTypeBeforeEach: + return " in Spec Setup (BeforeEach)" + case types.SpecComponentTypeJustBeforeEach: + return " in Spec Setup (JustBeforeEach)" + case types.SpecComponentTypeAfterEach: + return " in Spec Teardown (AfterEach)" + } + + return "" +} + +func (s *consoleStenographer) printFailure(indentation int, state types.SpecState, failure types.SpecFailure, fullTrace bool) { + if state == types.SpecStatePanicked { + s.println(indentation, s.colorize(redColor+boldStyle, failure.Message)) + s.println(indentation, s.colorize(redColor, failure.ForwardedPanic)) + s.println(indentation, failure.Location.String()) + s.printNewLine() + s.println(indentation, s.colorize(redColor, "Full Stack Trace")) + s.println(indentation, failure.Location.FullStackTrace) + } else { + s.println(indentation, s.colorize(redColor, failure.Message)) + s.printNewLine() + s.println(indentation, failure.Location.String()) + if fullTrace { + s.printNewLine() + s.println(indentation, s.colorize(redColor, "Full Stack Trace")) + s.println(indentation, failure.Location.FullStackTrace) + } + } +} + +func (s *consoleStenographer) printSpecContext(componentTexts []string, componentCodeLocations []types.CodeLocation, failedComponentType types.SpecComponentType, failedComponentIndex int, failure bool, succinct bool) int { + startIndex := 1 + indentation := 0 + + if len(componentTexts) == 1 { + startIndex = 0 + } + + for i := startIndex; i < len(componentTexts); i++ { + if failure && i == failedComponentIndex { + blockType := "" + switch failedComponentType { + case types.SpecComponentTypeBeforeSuite: + blockType = "BeforeSuite" + case types.SpecComponentTypeAfterSuite: + blockType = "AfterSuite" + case types.SpecComponentTypeBeforeEach: + blockType = "BeforeEach" + case types.SpecComponentTypeJustBeforeEach: + blockType = "JustBeforeEach" + case types.SpecComponentTypeAfterEach: + blockType = "AfterEach" + case types.SpecComponentTypeIt: + blockType = "It" + case types.SpecComponentTypeMeasure: + blockType = "Measurement" + } + if succinct { + s.print(0, s.colorize(redColor+boldStyle, "[%s] %s ", blockType, componentTexts[i])) + } else { + s.println(indentation, s.colorize(redColor+boldStyle, "%s [%s]", componentTexts[i], blockType)) + s.println(indentation, s.colorize(grayColor, "%s", componentCodeLocations[i])) + } + } else { + if succinct { + s.print(0, s.colorize(alternatingColors[i%2], "%s ", componentTexts[i])) + } else { + s.println(indentation, componentTexts[i]) + s.println(indentation, s.colorize(grayColor, "%s", componentCodeLocations[i])) + } + } + indentation++ + } + + return indentation +} + +func (s *consoleStenographer) printCodeLocationBlock(componentTexts []string, componentCodeLocations []types.CodeLocation, failedComponentType types.SpecComponentType, failedComponentIndex int, failure bool, succinct bool) int { + indentation := s.printSpecContext(componentTexts, componentCodeLocations, failedComponentType, failedComponentIndex, failure, succinct) + + if succinct { + if len(componentTexts) > 0 { + s.printNewLine() + s.print(0, s.colorize(lightGrayColor, "%s", componentCodeLocations[len(componentCodeLocations)-1])) + } + s.printNewLine() + indentation = 1 + } else { + indentation-- + } + + return indentation +} + +func (s *consoleStenographer) orderedMeasurementKeys(measurements map[string]*types.SpecMeasurement) []string { + orderedKeys := make([]string, len(measurements)) + for key, measurement := range measurements { + orderedKeys[measurement.Order] = key + } + return orderedKeys +} + +func (s *consoleStenographer) measurementReport(spec *types.SpecSummary, succinct bool) string { + if len(spec.Measurements) == 0 { + return "Found no measurements" + } + + message := []string{} + orderedKeys := s.orderedMeasurementKeys(spec.Measurements) + + if succinct { + message = append(message, fmt.Sprintf("%s samples:", s.colorize(boldStyle, "%d", spec.NumberOfSamples))) + for _, key := range orderedKeys { + measurement := spec.Measurements[key] + message = append(message, fmt.Sprintf(" %s - %s: %s%s, %s: %s%s ± %s%s, %s: %s%s", + s.colorize(boldStyle, "%s", measurement.Name), + measurement.SmallestLabel, + s.colorize(greenColor, "%.3f", measurement.Smallest), + measurement.Units, + measurement.AverageLabel, + s.colorize(cyanColor, "%.3f", measurement.Average), + measurement.Units, + s.colorize(cyanColor, "%.3f", measurement.StdDeviation), + measurement.Units, + measurement.LargestLabel, + s.colorize(redColor, "%.3f", measurement.Largest), + measurement.Units, + )) + } + } else { + message = append(message, fmt.Sprintf("Ran %s samples:", s.colorize(boldStyle, "%d", spec.NumberOfSamples))) + for _, key := range orderedKeys { + measurement := spec.Measurements[key] + info := "" + if measurement.Info != nil { + message = append(message, fmt.Sprintf("%v", measurement.Info)) + } + + message = append(message, fmt.Sprintf("%s:\n%s %s: %s%s\n %s: %s%s\n %s: %s%s ± %s%s", + s.colorize(boldStyle, "%s", measurement.Name), + info, + measurement.SmallestLabel, + s.colorize(greenColor, "%.3f", measurement.Smallest), + measurement.Units, + measurement.LargestLabel, + s.colorize(redColor, "%.3f", measurement.Largest), + measurement.Units, + measurement.AverageLabel, + s.colorize(cyanColor, "%.3f", measurement.Average), + measurement.Units, + s.colorize(cyanColor, "%.3f", measurement.StdDeviation), + measurement.Units, + )) + } + } + + return strings.Join(message, "\n") +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/reporters/teamcity_reporter.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/reporters/teamcity_reporter.go new file mode 100644 index 0000000000000..657dfe726e258 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/reporters/teamcity_reporter.go @@ -0,0 +1,92 @@ +/* + +TeamCity Reporter for Ginkgo + +Makes use of TeamCity's support for Service Messages +http://confluence.jetbrains.com/display/TCD7/Build+Script+Interaction+with+TeamCity#BuildScriptInteractionwithTeamCity-ReportingTests +*/ + +package reporters + +import ( + "fmt" + "github.com/onsi/ginkgo/config" + "github.com/onsi/ginkgo/types" + "io" + "strings" +) + +const ( + messageId = "##teamcity" +) + +type TeamCityReporter struct { + writer io.Writer + testSuiteName string +} + +func NewTeamCityReporter(writer io.Writer) *TeamCityReporter { + return &TeamCityReporter{ + writer: writer, + } +} + +func (reporter *TeamCityReporter) SpecSuiteWillBegin(config config.GinkgoConfigType, summary *types.SuiteSummary) { + reporter.testSuiteName = escape(summary.SuiteDescription) + fmt.Fprintf(reporter.writer, "%s[testSuiteStarted name='%s']", messageId, reporter.testSuiteName) +} + +func (reporter *TeamCityReporter) BeforeSuiteDidRun(setupSummary *types.SetupSummary) { + reporter.handleSetupSummary("BeforeSuite", setupSummary) +} + +func (reporter *TeamCityReporter) AfterSuiteDidRun(setupSummary *types.SetupSummary) { + reporter.handleSetupSummary("AfterSuite", setupSummary) +} + +func (reporter *TeamCityReporter) handleSetupSummary(name string, setupSummary *types.SetupSummary) { + if setupSummary.State != types.SpecStatePassed { + testName := escape(name) + fmt.Fprintf(reporter.writer, "%s[testStarted name='%s']", messageId, testName) + message := escape(setupSummary.Failure.ComponentCodeLocation.String()) + details := escape(setupSummary.Failure.Message) + fmt.Fprintf(reporter.writer, "%s[testFailed name='%s' message='%s' details='%s']", messageId, testName, message, details) + durationInMilliseconds := setupSummary.RunTime.Seconds() * 1000 + fmt.Fprintf(reporter.writer, "%s[testFinished name='%s' duration='%v']", messageId, testName, durationInMilliseconds) + } +} + +func (reporter *TeamCityReporter) SpecWillRun(specSummary *types.SpecSummary) { + testName := escape(strings.Join(specSummary.ComponentTexts[1:], " ")) + fmt.Fprintf(reporter.writer, "%s[testStarted name='%s']", messageId, testName) +} + +func (reporter *TeamCityReporter) SpecDidComplete(specSummary *types.SpecSummary) { + testName := escape(strings.Join(specSummary.ComponentTexts[1:], " ")) + + if specSummary.State == types.SpecStateFailed || specSummary.State == types.SpecStateTimedOut || specSummary.State == types.SpecStatePanicked { + message := escape(specSummary.Failure.ComponentCodeLocation.String()) + details := escape(specSummary.Failure.Message) + fmt.Fprintf(reporter.writer, "%s[testFailed name='%s' message='%s' details='%s']", messageId, testName, message, details) + } + if specSummary.State == types.SpecStateSkipped || specSummary.State == types.SpecStatePending { + fmt.Fprintf(reporter.writer, "%s[testIgnored name='%s']", messageId, testName) + } + + durationInMilliseconds := specSummary.RunTime.Seconds() * 1000 + fmt.Fprintf(reporter.writer, "%s[testFinished name='%s' duration='%v']", messageId, testName, durationInMilliseconds) +} + +func (reporter *TeamCityReporter) SpecSuiteDidEnd(summary *types.SuiteSummary) { + fmt.Fprintf(reporter.writer, "%s[testSuiteFinished name='%s']", messageId, reporter.testSuiteName) +} + +func escape(output string) string { + output = strings.Replace(output, "|", "||", -1) + output = strings.Replace(output, "'", "|'", -1) + output = strings.Replace(output, "\n", "|n", -1) + output = strings.Replace(output, "\r", "|r", -1) + output = strings.Replace(output, "[", "|[", -1) + output = strings.Replace(output, "]", "|]", -1) + return output +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/reporters/teamcity_reporter_test.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/reporters/teamcity_reporter_test.go new file mode 100644 index 0000000000000..de87732113c36 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/reporters/teamcity_reporter_test.go @@ -0,0 +1,213 @@ +package reporters_test + +import ( + "bytes" + "fmt" + . "github.com/onsi/ginkgo" + "github.com/onsi/ginkgo/config" + "github.com/onsi/ginkgo/internal/codelocation" + "github.com/onsi/ginkgo/reporters" + "github.com/onsi/ginkgo/types" + . "github.com/onsi/gomega" + "time" +) + +var _ = Describe("TeamCity Reporter", func() { + var ( + buffer bytes.Buffer + reporter Reporter + ) + + BeforeEach(func() { + buffer.Truncate(0) + reporter = reporters.NewTeamCityReporter(&buffer) + reporter.SpecSuiteWillBegin(config.GinkgoConfigType{}, &types.SuiteSummary{ + SuiteDescription: "Foo's test suite", + NumberOfSpecsThatWillBeRun: 1, + }) + }) + + Describe("a passing test", func() { + BeforeEach(func() { + beforeSuite := &types.SetupSummary{ + State: types.SpecStatePassed, + } + reporter.BeforeSuiteDidRun(beforeSuite) + + afterSuite := &types.SetupSummary{ + State: types.SpecStatePassed, + } + reporter.AfterSuiteDidRun(afterSuite) + + spec := &types.SpecSummary{ + ComponentTexts: []string{"[Top Level]", "A", "B", "C"}, + State: types.SpecStatePassed, + RunTime: 5 * time.Second, + } + reporter.SpecWillRun(spec) + reporter.SpecDidComplete(spec) + + reporter.SpecSuiteDidEnd(&types.SuiteSummary{ + NumberOfSpecsThatWillBeRun: 1, + NumberOfFailedSpecs: 0, + RunTime: 10 * time.Second, + }) + }) + + It("should record the test as passing", func() { + actual := buffer.String() + expected := + "##teamcity[testSuiteStarted name='Foo|'s test suite']" + + "##teamcity[testStarted name='A B C']" + + "##teamcity[testFinished name='A B C' duration='5000']" + + "##teamcity[testSuiteFinished name='Foo|'s test suite']" + Ω(actual).Should(Equal(expected)) + }) + }) + + Describe("when the BeforeSuite fails", func() { + var beforeSuite *types.SetupSummary + + BeforeEach(func() { + beforeSuite = &types.SetupSummary{ + State: types.SpecStateFailed, + RunTime: 3 * time.Second, + Failure: types.SpecFailure{ + Message: "failed to setup\n", + ComponentCodeLocation: codelocation.New(0), + }, + } + reporter.BeforeSuiteDidRun(beforeSuite) + + reporter.SpecSuiteDidEnd(&types.SuiteSummary{ + NumberOfSpecsThatWillBeRun: 1, + NumberOfFailedSpecs: 1, + RunTime: 10 * time.Second, + }) + }) + + It("should record the test as having failed", func() { + actual := buffer.String() + expected := fmt.Sprintf( + "##teamcity[testSuiteStarted name='Foo|'s test suite']"+ + "##teamcity[testStarted name='BeforeSuite']"+ + "##teamcity[testFailed name='BeforeSuite' message='%s' details='failed to setup|n']"+ + "##teamcity[testFinished name='BeforeSuite' duration='3000']"+ + "##teamcity[testSuiteFinished name='Foo|'s test suite']", beforeSuite.Failure.ComponentCodeLocation.String(), + ) + Ω(actual).Should(Equal(expected)) + }) + }) + + Describe("when the AfterSuite fails", func() { + var afterSuite *types.SetupSummary + + BeforeEach(func() { + afterSuite = &types.SetupSummary{ + State: types.SpecStateFailed, + RunTime: 3 * time.Second, + Failure: types.SpecFailure{ + Message: "failed to setup\n", + ComponentCodeLocation: codelocation.New(0), + }, + } + reporter.AfterSuiteDidRun(afterSuite) + + reporter.SpecSuiteDidEnd(&types.SuiteSummary{ + NumberOfSpecsThatWillBeRun: 1, + NumberOfFailedSpecs: 1, + RunTime: 10 * time.Second, + }) + }) + + It("should record the test as having failed", func() { + actual := buffer.String() + expected := fmt.Sprintf( + "##teamcity[testSuiteStarted name='Foo|'s test suite']"+ + "##teamcity[testStarted name='AfterSuite']"+ + "##teamcity[testFailed name='AfterSuite' message='%s' details='failed to setup|n']"+ + "##teamcity[testFinished name='AfterSuite' duration='3000']"+ + "##teamcity[testSuiteFinished name='Foo|'s test suite']", afterSuite.Failure.ComponentCodeLocation.String(), + ) + Ω(actual).Should(Equal(expected)) + }) + }) + specStateCases := []struct { + state types.SpecState + message string + }{ + {types.SpecStateFailed, "Failure"}, + {types.SpecStateTimedOut, "Timeout"}, + {types.SpecStatePanicked, "Panic"}, + } + + for _, specStateCase := range specStateCases { + specStateCase := specStateCase + Describe("a failing test", func() { + var spec *types.SpecSummary + BeforeEach(func() { + spec = &types.SpecSummary{ + ComponentTexts: []string{"[Top Level]", "A", "B", "C"}, + State: specStateCase.state, + RunTime: 5 * time.Second, + Failure: types.SpecFailure{ + ComponentCodeLocation: codelocation.New(0), + Message: "I failed", + }, + } + reporter.SpecWillRun(spec) + reporter.SpecDidComplete(spec) + + reporter.SpecSuiteDidEnd(&types.SuiteSummary{ + NumberOfSpecsThatWillBeRun: 1, + NumberOfFailedSpecs: 1, + RunTime: 10 * time.Second, + }) + }) + + It("should record test as failing", func() { + actual := buffer.String() + expected := + fmt.Sprintf("##teamcity[testSuiteStarted name='Foo|'s test suite']"+ + "##teamcity[testStarted name='A B C']"+ + "##teamcity[testFailed name='A B C' message='%s' details='I failed']"+ + "##teamcity[testFinished name='A B C' duration='5000']"+ + "##teamcity[testSuiteFinished name='Foo|'s test suite']", spec.Failure.ComponentCodeLocation.String()) + Ω(actual).Should(Equal(expected)) + }) + }) + } + + for _, specStateCase := range []types.SpecState{types.SpecStatePending, types.SpecStateSkipped} { + specStateCase := specStateCase + Describe("a skipped test", func() { + var spec *types.SpecSummary + BeforeEach(func() { + spec = &types.SpecSummary{ + ComponentTexts: []string{"[Top Level]", "A", "B", "C"}, + State: specStateCase, + RunTime: 5 * time.Second, + } + reporter.SpecWillRun(spec) + reporter.SpecDidComplete(spec) + + reporter.SpecSuiteDidEnd(&types.SuiteSummary{ + NumberOfSpecsThatWillBeRun: 1, + NumberOfFailedSpecs: 0, + RunTime: 10 * time.Second, + }) + }) + + It("should record test as ignored", func() { + actual := buffer.String() + expected := + "##teamcity[testSuiteStarted name='Foo|'s test suite']" + + "##teamcity[testStarted name='A B C']" + + "##teamcity[testIgnored name='A B C']" + + "##teamcity[testFinished name='A B C' duration='5000']" + + "##teamcity[testSuiteFinished name='Foo|'s test suite']" + Ω(actual).Should(Equal(expected)) + }) + }) + } +}) diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/types/code_location.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/types/code_location.go new file mode 100644 index 0000000000000..935a89e136a03 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/types/code_location.go @@ -0,0 +1,15 @@ +package types + +import ( + "fmt" +) + +type CodeLocation struct { + FileName string + LineNumber int + FullStackTrace string +} + +func (codeLocation CodeLocation) String() string { + return fmt.Sprintf("%s:%d", codeLocation.FileName, codeLocation.LineNumber) +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/types/synchronization.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/types/synchronization.go new file mode 100644 index 0000000000000..fdd6ed5bdf85b --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/types/synchronization.go @@ -0,0 +1,30 @@ +package types + +import ( + "encoding/json" +) + +type RemoteBeforeSuiteState int + +const ( + RemoteBeforeSuiteStateInvalid RemoteBeforeSuiteState = iota + + RemoteBeforeSuiteStatePending + RemoteBeforeSuiteStatePassed + RemoteBeforeSuiteStateFailed + RemoteBeforeSuiteStateDisappeared +) + +type RemoteBeforeSuiteData struct { + Data []byte + State RemoteBeforeSuiteState +} + +func (r RemoteBeforeSuiteData) ToJSON() []byte { + data, _ := json.Marshal(r) + return data +} + +type RemoteAfterSuiteData struct { + CanRun bool +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/types/types.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/types/types.go new file mode 100644 index 0000000000000..583b347395953 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/types/types.go @@ -0,0 +1,139 @@ +package types + +import "time" + +const GINKGO_FOCUS_EXIT_CODE = 197 + +type SuiteSummary struct { + SuiteDescription string + SuiteSucceeded bool + SuiteID string + + NumberOfSpecsBeforeParallelization int + NumberOfTotalSpecs int + NumberOfSpecsThatWillBeRun int + NumberOfPendingSpecs int + NumberOfSkippedSpecs int + NumberOfPassedSpecs int + NumberOfFailedSpecs int + RunTime time.Duration +} + +type SpecSummary struct { + ComponentTexts []string + ComponentCodeLocations []CodeLocation + + State SpecState + RunTime time.Duration + Failure SpecFailure + IsMeasurement bool + NumberOfSamples int + Measurements map[string]*SpecMeasurement + + CapturedOutput string + SuiteID string +} + +func (s SpecSummary) HasFailureState() bool { + return s.State == SpecStateTimedOut || s.State == SpecStatePanicked || s.State == SpecStateFailed +} + +func (s SpecSummary) TimedOut() bool { + return s.State == SpecStateTimedOut +} + +func (s SpecSummary) Panicked() bool { + return s.State == SpecStatePanicked +} + +func (s SpecSummary) Failed() bool { + return s.State == SpecStateFailed +} + +func (s SpecSummary) Passed() bool { + return s.State == SpecStatePassed +} + +func (s SpecSummary) Skipped() bool { + return s.State == SpecStateSkipped +} + +func (s SpecSummary) Pending() bool { + return s.State == SpecStatePending +} + +type SetupSummary struct { + ComponentType SpecComponentType + CodeLocation CodeLocation + + State SpecState + RunTime time.Duration + Failure SpecFailure + + CapturedOutput string + SuiteID string +} + +type SpecFailure struct { + Message string + Location CodeLocation + ForwardedPanic string + + ComponentIndex int + ComponentType SpecComponentType + ComponentCodeLocation CodeLocation +} + +type SpecMeasurement struct { + Name string + Info interface{} + Order int + + Results []float64 + + Smallest float64 + Largest float64 + Average float64 + StdDeviation float64 + + SmallestLabel string + LargestLabel string + AverageLabel string + Units string +} + +type SpecState uint + +const ( + SpecStateInvalid SpecState = iota + + SpecStatePending + SpecStateSkipped + SpecStatePassed + SpecStateFailed + SpecStatePanicked + SpecStateTimedOut +) + +type SpecComponentType uint + +const ( + SpecComponentTypeInvalid SpecComponentType = iota + + SpecComponentTypeContainer + SpecComponentTypeBeforeSuite + SpecComponentTypeAfterSuite + SpecComponentTypeBeforeEach + SpecComponentTypeJustBeforeEach + SpecComponentTypeAfterEach + SpecComponentTypeIt + SpecComponentTypeMeasure +) + +type FlagType uint + +const ( + FlagTypeNone FlagType = iota + FlagTypeFocused + FlagTypePending +) diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/types/types_suite_test.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/types/types_suite_test.go new file mode 100644 index 0000000000000..b026169c12b3b --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/types/types_suite_test.go @@ -0,0 +1,13 @@ +package types_test + +import ( + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" + + "testing" +) + +func TestTypes(t *testing.T) { + RegisterFailHandler(Fail) + RunSpecs(t, "Types Suite") +} diff --git a/Godeps/_workspace/src/github.com/onsi/ginkgo/types/types_test.go b/Godeps/_workspace/src/github.com/onsi/ginkgo/types/types_test.go new file mode 100644 index 0000000000000..124b216ec601e --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/ginkgo/types/types_test.go @@ -0,0 +1,81 @@ +package types_test + +import ( + . "github.com/onsi/ginkgo/types" + + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" +) + +var specStates = []SpecState{ + SpecStatePassed, + SpecStateTimedOut, + SpecStatePanicked, + SpecStateFailed, + SpecStatePending, + SpecStateSkipped, +} + +func verifySpecSummary(caller func(SpecSummary) bool, trueStates ...SpecState) { + summary := SpecSummary{} + trueStateLookup := map[SpecState]bool{} + for _, state := range trueStates { + trueStateLookup[state] = true + summary.State = state + Ω(caller(summary)).Should(BeTrue()) + } + + for _, state := range specStates { + if trueStateLookup[state] { + continue + } + summary.State = state + Ω(caller(summary)).Should(BeFalse()) + } +} + +var _ = Describe("Types", func() { + Describe("SpecSummary", func() { + It("knows when it is in a failure-like state", func() { + verifySpecSummary(func(summary SpecSummary) bool { + return summary.HasFailureState() + }, SpecStateTimedOut, SpecStatePanicked, SpecStateFailed) + }) + + It("knows when it passed", func() { + verifySpecSummary(func(summary SpecSummary) bool { + return summary.Passed() + }, SpecStatePassed) + }) + + It("knows when it has failed", func() { + verifySpecSummary(func(summary SpecSummary) bool { + return summary.Failed() + }, SpecStateFailed) + }) + + It("knows when it has panicked", func() { + verifySpecSummary(func(summary SpecSummary) bool { + return summary.Panicked() + }, SpecStatePanicked) + }) + + It("knows when it has timed out", func() { + verifySpecSummary(func(summary SpecSummary) bool { + return summary.TimedOut() + }, SpecStateTimedOut) + }) + + It("knows when it is pending", func() { + verifySpecSummary(func(summary SpecSummary) bool { + return summary.Pending() + }, SpecStatePending) + }) + + It("knows when it is skipped", func() { + verifySpecSummary(func(summary SpecSummary) bool { + return summary.Skipped() + }, SpecStateSkipped) + }) + }) +}) diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/.gitignore b/Godeps/_workspace/src/github.com/onsi/gomega/.gitignore new file mode 100644 index 0000000000000..55145320362aa --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/.gitignore @@ -0,0 +1,3 @@ +.DS_Store +*.test +. diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/.travis.yml b/Godeps/_workspace/src/github.com/onsi/gomega/.travis.yml new file mode 100644 index 0000000000000..2ecdf95a5343f --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/.travis.yml @@ -0,0 +1,10 @@ +language: go +go: + - 1.3 + +install: + - go get -v ./... + - go get github.com/onsi/ginkgo + - go install github.com/onsi/ginkgo/ginkgo + +script: $HOME/gopath/bin/ginkgo -r --randomizeAllSpecs --failOnPending --randomizeSuites --race diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/CHANGELOG.md b/Godeps/_workspace/src/github.com/onsi/gomega/CHANGELOG.md new file mode 100644 index 0000000000000..ea269cab1fc52 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/CHANGELOG.md @@ -0,0 +1,60 @@ +## HEAD + +Improvements: + +- Added `BeSent` which attempts to send a value down a channel and fails if the attempt blocks. Can be paired with `Eventually` to safely send a value down a channel with a timeout. +- `Ω`, `Expect`, `Eventually`, and `Consistently` now immediately `panic` if there is no registered fail handler. This is always a mistake that can hide failing tests. +- `Receive()` no longer errors when passed a closed channel, it's perfectly fine to attempt to read from a closed channel so Ω(c).Should(Receive()) always fails and Ω(c).ShoudlNot(Receive()) always passes with a closed channel. +- Added `HavePrefix` and `HaveSuffix` matchers. +- `ghttp` can now handle concurrent requests. +- Added `Succeed` which allows one to write `Ω(MyFunction()).Should(Succeed())`. + +## 1.0 (8/2/2014) + +No changes. Dropping "beta" from the version number. + +## 1.0.0-beta (7/8/2014) +Breaking Changes: + +- Changed OmegaMatcher interface. Instead of having `Match` return failure messages, two new methods `FailureMessage` and `NegatedFailureMessage` are called instead. +- Moved and renamed OmegaFailHandler to types.GomegaFailHandler and OmegaMatcher to types.GomegaMatcher. Any references to OmegaMatcher in any custom matchers will need to be changed to point to types.GomegaMatcher + +New Test-Support Features: + +- `ghttp`: supports testing http clients + - Provides a flexible fake http server + - Provides a collection of chainable http handlers that perform assertions. +- `gbytes`: supports making ordered assertions against streams of data + - Provides a `gbytes.Buffer` + - Provides a `Say` matcher to perform ordered assertions against output data +- `gexec`: supports testing external processes + - Provides support for building Go binaries + - Wraps and starts `exec.Cmd` commands + - Makes it easy to assert against stdout and stderr + - Makes it easy to send signals and wait for processes to exit + - Provides an `Exit` matcher to assert against exit code. + +DSL Changes: + +- `Eventually` and `Consistently` can accept `time.Duration` interval and polling inputs. +- The default timeouts for `Eventually` and `Consistently` are now configurable. + +New Matchers: + +- `ConsistOf`: order-independent assertion against the elements of an array/slice or keys of a map. +- `BeTemporally`: like `BeNumerically` but for `time.Time` +- `HaveKeyWithValue`: asserts a map has a given key with the given value. + +Updated Matchers: + +- `Receive` matcher can take a matcher as an argument and passes only if the channel under test receives an objet that satisfies the passed-in matcher. +- Matchers that implement `MatchMayChangeInTheFuture(actual interface{}) bool` can inform `Eventually` and/or `Consistently` when a match has no chance of changing status in the future. For example, `Receive` returns `false` when a channel is closed. + +Misc: + +- Start using semantic versioning +- Start maintaining changelog + +Major refactor: + +- Pull out Gomega's internal to `internal` diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/LICENSE b/Godeps/_workspace/src/github.com/onsi/gomega/LICENSE new file mode 100644 index 0000000000000..9415ee72c17f8 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/LICENSE @@ -0,0 +1,20 @@ +Copyright (c) 2013-2014 Onsi Fakhouri + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/README.md b/Godeps/_workspace/src/github.com/onsi/gomega/README.md new file mode 100644 index 0000000000000..c825591922562 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/README.md @@ -0,0 +1,17 @@ +![Gomega: Ginkgo's Preferred Matcher Library](http://onsi.github.io/gomega/images/gomega.png) + +[![Build Status](https://travis-ci.org/onsi/gomega.png)](https://travis-ci.org/onsi/gomega) + +Jump straight to the [docs](http://onsi.github.io/gomega/) to learn about Gomega, including a list of [all available matchers](http://onsi.github.io/gomega/#provided-matchers). + +To discuss Gomega and get updates, join the [google group](https://groups.google.com/d/forum/ginkgo-and-gomega). + +## [Ginkgo](http://github.com/onsi/ginkgo): a BDD Testing Framework for Golang + +Learn more about Ginkgo [here](http://onsi.github.io/ginkgo/) + +## License + +Gomega is MIT-Licensed + +The `ConsistOf` matcher uses [goraph](https://github.com/amitkgupta/goraph) which is embedded in the source to simplify distribution. goraph has an MIT license. diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/format/format.go b/Godeps/_workspace/src/github.com/onsi/gomega/format/format.go new file mode 100644 index 0000000000000..ec9c91a42f67f --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/format/format.go @@ -0,0 +1,276 @@ +/* +Gomega's format package pretty-prints objects. It explores input objects recursively and generates formatted, indented output with type information. +*/ +package format + +import ( + "fmt" + "reflect" + "strings" +) + +// Use MaxDepth to set the maximum recursion depth when printing deeply nested objects +var MaxDepth = uint(10) + +/* +By default, all objects (even those that implement fmt.Stringer and fmt.GoStringer) are recursively inspected to generate output. + +Set UseStringerRepresentation = true to use GoString (for fmt.GoStringers) or String (for fmt.Stringer) instead. + +Note that GoString and String don't always have all the information you need to understand why a test failed! +*/ +var UseStringerRepresentation = false + +//The default indentation string emitted by the format package +var Indent = " " + +var longFormThreshold = 20 + +/* +Generates a formatted matcher success/failure message of the form: + + Expected + + + + +If expected is omited, then the message looks like: + + Expected + + +*/ +func Message(actual interface{}, message string, expected ...interface{}) string { + if len(expected) == 0 { + return fmt.Sprintf("Expected\n%s\n%s", Object(actual, 1), message) + } else { + return fmt.Sprintf("Expected\n%s\n%s\n%s", Object(actual, 1), message, Object(expected[0], 1)) + } +} + +/* +Pretty prints the passed in object at the passed in indentation level. + +Object recurses into deeply nested objects emitting pretty-printed representations of their components. + +Modify format.MaxDepth to control how deep the recursion is allowed to go +Set format.UseStringerRepresentation to true to return object.GoString() or object.String() when available instead of +recursing into the object. +*/ +func Object(object interface{}, indentation uint) string { + indent := strings.Repeat(Indent, int(indentation)) + value := reflect.ValueOf(object) + return fmt.Sprintf("%s<%s>: %s", indent, formatType(object), formatValue(value, indentation)) +} + +/* +IndentString takes a string and indents each line by the specified amount. +*/ +func IndentString(s string, indentation uint) string { + components := strings.Split(s, "\n") + result := "" + indent := strings.Repeat(Indent, int(indentation)) + for i, component := range components { + result += indent + component + if i < len(components)-1 { + result += "\n" + } + } + + return result +} + +func formatType(object interface{}) string { + t := reflect.TypeOf(object) + if t == nil { + return "nil" + } + switch t.Kind() { + case reflect.Chan: + v := reflect.ValueOf(object) + return fmt.Sprintf("%T | len:%d, cap:%d", object, v.Len(), v.Cap()) + case reflect.Ptr: + return fmt.Sprintf("%T | %p", object, object) + case reflect.Slice: + v := reflect.ValueOf(object) + return fmt.Sprintf("%T | len:%d, cap:%d", object, v.Len(), v.Cap()) + case reflect.Map: + v := reflect.ValueOf(object) + return fmt.Sprintf("%T | len:%d", object, v.Len()) + default: + return fmt.Sprintf("%T", object) + } +} + +func formatValue(value reflect.Value, indentation uint) string { + if indentation > MaxDepth { + return "..." + } + + if isNilValue(value) { + return "nil" + } + + if UseStringerRepresentation { + if value.CanInterface() { + obj := value.Interface() + switch x := obj.(type) { + case fmt.GoStringer: + return x.GoString() + case fmt.Stringer: + return x.String() + } + } + } + + switch value.Kind() { + case reflect.Bool: + return fmt.Sprintf("%v", value.Bool()) + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + return fmt.Sprintf("%v", value.Int()) + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: + return fmt.Sprintf("%v", value.Uint()) + case reflect.Uintptr: + return fmt.Sprintf("0x%x", value.Uint()) + case reflect.Float32, reflect.Float64: + return fmt.Sprintf("%v", value.Float()) + case reflect.Complex64, reflect.Complex128: + return fmt.Sprintf("%v", value.Complex()) + case reflect.Chan: + return fmt.Sprintf("0x%x", value.Pointer()) + case reflect.Func: + return fmt.Sprintf("0x%x", value.Pointer()) + case reflect.Ptr: + return formatValue(value.Elem(), indentation) + case reflect.Slice: + if value.Type().Elem().Kind() == reflect.Uint8 { + return formatString(value.Bytes(), indentation) + } + return formatSlice(value, indentation) + case reflect.String: + return formatString(value.String(), indentation) + case reflect.Array: + return formatSlice(value, indentation) + case reflect.Map: + return formatMap(value, indentation) + case reflect.Struct: + return formatStruct(value, indentation) + case reflect.Interface: + return formatValue(value.Elem(), indentation) + default: + if value.CanInterface() { + return fmt.Sprintf("%#v", value.Interface()) + } else { + return fmt.Sprintf("%#v", value) + } + } +} + +func formatString(object interface{}, indentation uint) string { + if indentation == 1 { + s := fmt.Sprintf("%s", object) + components := strings.Split(s, "\n") + result := "" + for i, component := range components { + if i == 0 { + result += component + } else { + result += Indent + component + } + if i < len(components)-1 { + result += "\n" + } + } + + return fmt.Sprintf("%s", result) + } else { + return fmt.Sprintf("%q", object) + } +} + +func formatSlice(v reflect.Value, indentation uint) string { + l := v.Len() + result := make([]string, l) + longest := 0 + for i := 0; i < l; i++ { + result[i] = formatValue(v.Index(i), indentation+1) + if len(result[i]) > longest { + longest = len(result[i]) + } + } + + if longest > longFormThreshold { + indenter := strings.Repeat(Indent, int(indentation)) + return fmt.Sprintf("[\n%s%s,\n%s]", indenter+Indent, strings.Join(result, ",\n"+indenter+Indent), indenter) + } else { + return fmt.Sprintf("[%s]", strings.Join(result, ", ")) + } +} + +func formatMap(v reflect.Value, indentation uint) string { + l := v.Len() + result := make([]string, l) + + longest := 0 + for i, key := range v.MapKeys() { + value := v.MapIndex(key) + result[i] = fmt.Sprintf("%s: %s", formatValue(key, 0), formatValue(value, indentation+1)) + if len(result[i]) > longest { + longest = len(result[i]) + } + } + + if longest > longFormThreshold { + indenter := strings.Repeat(Indent, int(indentation)) + return fmt.Sprintf("{\n%s%s,\n%s}", indenter+Indent, strings.Join(result, ",\n"+indenter+Indent), indenter) + } else { + return fmt.Sprintf("{%s}", strings.Join(result, ", ")) + } +} + +func formatStruct(v reflect.Value, indentation uint) string { + t := v.Type() + + l := v.NumField() + result := []string{} + longest := 0 + for i := 0; i < l; i++ { + structField := t.Field(i) + fieldEntry := v.Field(i) + representation := fmt.Sprintf("%s: %s", structField.Name, formatValue(fieldEntry, indentation+1)) + result = append(result, representation) + if len(representation) > longest { + longest = len(representation) + } + } + if longest > longFormThreshold { + indenter := strings.Repeat(Indent, int(indentation)) + return fmt.Sprintf("{\n%s%s,\n%s}", indenter+Indent, strings.Join(result, ",\n"+indenter+Indent), indenter) + } else { + return fmt.Sprintf("{%s}", strings.Join(result, ", ")) + } +} + +func isNilValue(a reflect.Value) bool { + switch a.Kind() { + case reflect.Invalid: + return true + case reflect.Chan, reflect.Func, reflect.Interface, reflect.Map, reflect.Ptr, reflect.Slice: + return a.IsNil() + } + + return false +} + +func isNil(a interface{}) bool { + if a == nil { + return true + } + + switch reflect.TypeOf(a).Kind() { + case reflect.Chan, reflect.Func, reflect.Interface, reflect.Map, reflect.Ptr, reflect.Slice: + return reflect.ValueOf(a).IsNil() + } + + return false +} diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/format/format_suite_test.go b/Godeps/_workspace/src/github.com/onsi/gomega/format/format_suite_test.go new file mode 100644 index 0000000000000..8e65a95292dc5 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/format/format_suite_test.go @@ -0,0 +1,13 @@ +package format_test + +import ( + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" + + "testing" +) + +func TestFormat(t *testing.T) { + RegisterFailHandler(Fail) + RunSpecs(t, "Format Suite") +} diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/format/format_test.go b/Godeps/_workspace/src/github.com/onsi/gomega/format/format_test.go new file mode 100644 index 0000000000000..fd926f58ea86f --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/format/format_test.go @@ -0,0 +1,449 @@ +package format_test + +import ( + "fmt" + "strings" + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" + . "github.com/onsi/gomega/format" + "github.com/onsi/gomega/types" +) + +//recursive struct + +type StringAlias string +type ByteAlias []byte +type IntAlias int + +type AStruct struct { + Exported string +} + +type SimpleStruct struct { + Name string + Enumeration int + Veritas bool + Data []byte + secret uint32 +} + +type ComplexStruct struct { + Strings []string + SimpleThings []*SimpleStruct + DataMaps map[int]ByteAlias +} + +type SecretiveStruct struct { + boolValue bool + intValue int + uintValue uint + uintptrValue uintptr + floatValue float32 + complexValue complex64 + chanValue chan bool + funcValue func() + pointerValue *int + sliceValue []string + byteSliceValue []byte + stringValue string + arrValue [3]int + byteArrValue [3]byte + mapValue map[string]int + structValue AStruct + interfaceValue interface{} +} + +type GoStringer struct { +} + +func (g GoStringer) GoString() string { + return "go-string" +} + +func (g GoStringer) String() string { + return "string" +} + +type Stringer struct { +} + +func (g Stringer) String() string { + return "string" +} + +var _ = Describe("Format", func() { + match := func(typeRepresentation string, valueRepresentation string, args ...interface{}) types.GomegaMatcher { + if len(args) > 0 { + valueRepresentation = fmt.Sprintf(valueRepresentation, args...) + } + return Equal(fmt.Sprintf("%s<%s>: %s", Indent, typeRepresentation, valueRepresentation)) + } + + matchRegexp := func(typeRepresentation string, valueRepresentation string, args ...interface{}) types.GomegaMatcher { + if len(args) > 0 { + valueRepresentation = fmt.Sprintf(valueRepresentation, args...) + } + return MatchRegexp(fmt.Sprintf("%s<%s>: %s", Indent, typeRepresentation, valueRepresentation)) + } + + hashMatchingRegexp := func(entries ...string) string { + entriesSwitch := "(" + strings.Join(entries, "|") + ")" + arr := make([]string, len(entries)) + for i := range arr { + arr[i] = entriesSwitch + } + return "{" + strings.Join(arr, ", ") + "}" + } + + Describe("Message", func() { + Context("with only an actual value", func() { + It("should print out an indented formatted representation of the value and the message", func() { + Ω(Message(3, "to be three.")).Should(Equal("Expected\n : 3\nto be three.")) + }) + }) + + Context("with an actual and an expected value", func() { + It("should print out an indented formatted representatino of both values, and the message", func() { + Ω(Message(3, "to equal", 4)).Should(Equal("Expected\n : 3\nto equal\n : 4")) + }) + }) + }) + + Describe("IndentString", func() { + It("should indent the string", func() { + Ω(IndentString("foo\n bar\nbaz", 2)).Should(Equal(" foo\n bar\n baz")) + }) + }) + + Describe("Object", func() { + Describe("formatting boolean values", func() { + It("should give the type and format values correctly", func() { + Ω(Object(true, 1)).Should(match("bool", "true")) + Ω(Object(false, 1)).Should(match("bool", "false")) + }) + }) + + Describe("formatting numbers", func() { + It("should give the type and format values correctly", func() { + Ω(Object(int(3), 1)).Should(match("int", "3")) + Ω(Object(int8(3), 1)).Should(match("int8", "3")) + Ω(Object(int16(3), 1)).Should(match("int16", "3")) + Ω(Object(int32(3), 1)).Should(match("int32", "3")) + Ω(Object(int64(3), 1)).Should(match("int64", "3")) + + Ω(Object(uint(3), 1)).Should(match("uint", "3")) + Ω(Object(uint8(3), 1)).Should(match("uint8", "3")) + Ω(Object(uint16(3), 1)).Should(match("uint16", "3")) + Ω(Object(uint32(3), 1)).Should(match("uint32", "3")) + Ω(Object(uint64(3), 1)).Should(match("uint64", "3")) + }) + + It("should handle uintptr differently", func() { + Ω(Object(uintptr(3), 1)).Should(match("uintptr", "0x3")) + }) + }) + + Describe("formatting channels", func() { + It("should give the type and format values correctly", func() { + c := make(chan<- bool, 3) + c <- true + c <- false + Ω(Object(c, 1)).Should(match("chan<- bool | len:2, cap:3", "%v", c)) + }) + }) + + Describe("formatting strings", func() { + It("should give the type and format values correctly", func() { + s := "a\nb\nc" + Ω(Object(s, 1)).Should(match("string", `a + b + c`)) + }) + }) + + Describe("formatting []byte slices", func() { + It("should present them as strings", func() { + b := []byte("a\nb\nc") + Ω(Object(b, 1)).Should(matchRegexp(`\[\]uint8 \| len:5, cap:\d+`, `a + b + c`)) + }) + }) + + Describe("formatting functions", func() { + It("should give the type and format values correctly", func() { + f := func(a string, b []int) ([]byte, error) { + return []byte("abc"), nil + } + Ω(Object(f, 1)).Should(match("func(string, []int) ([]uint8, error)", "%v", f)) + }) + }) + + Describe("formatting pointers", func() { + It("should give the type and dereference the value to format it correctly", func() { + a := 3 + Ω(Object(&a, 1)).Should(match(fmt.Sprintf("*int | %p", &a), "3")) + }) + + Context("when there are pointers to pointers...", func() { + It("should recursively deference the pointer until it gets to a value", func() { + a := 3 + var b *int + var c **int + var d ***int + b = &a + c = &b + d = &c + + Ω(Object(d, 1)).Should(match(fmt.Sprintf("***int | %p", d), "3")) + }) + }) + + Context("when the pointer points to nil", func() { + It("should say nil and not explode", func() { + var a *AStruct + Ω(Object(a, 1)).Should(match("*format_test.AStruct | 0x0", "nil")) + }) + }) + }) + + Describe("formatting arrays", func() { + It("should give the type and format values correctly", func() { + w := [3]string{"Jed Bartlet", "Toby Ziegler", "CJ Cregg"} + Ω(Object(w, 1)).Should(match("[3]string", `["Jed Bartlet", "Toby Ziegler", "CJ Cregg"]`)) + }) + + Context("with byte arrays", func() { + It("should give the type and format values correctly", func() { + w := [3]byte{17, 28, 19} + Ω(Object(w, 1)).Should(match("[3]uint8", `[17, 28, 19]`)) + }) + }) + }) + + Describe("formatting slices", func() { + It("should include the length and capacity in the type information", func() { + s := make([]bool, 3, 4) + Ω(Object(s, 1)).Should(match("[]bool | len:3, cap:4", "[false, false, false]")) + }) + + Context("when the slice contains long entries", func() { + It("should format the entries with newlines", func() { + w := []string{"Josiah Edward Bartlet", "Toby Ziegler", "CJ Cregg"} + expected := `[ + "Josiah Edward Bartlet", + "Toby Ziegler", + "CJ Cregg", + ]` + Ω(Object(w, 1)).Should(match("[]string | len:3, cap:3", expected)) + }) + }) + }) + + Describe("formatting maps", func() { + It("should include the length in the type information", func() { + m := make(map[int]bool, 5) + m[3] = true + m[4] = false + Ω(Object(m, 1)).Should(matchRegexp(`map\[int\]bool \| len:2`, hashMatchingRegexp("3: true", "4: false"))) + }) + + Context("when the slice contains long entries", func() { + It("should format the entries with newlines", func() { + m := map[string][]byte{} + m["Josiah Edward Bartlet"] = []byte("Martin Sheen") + m["Toby Ziegler"] = []byte("Richard Schiff") + m["CJ Cregg"] = []byte("Allison Janney") + expected := `{ + ("Josiah Edward Bartlet": "Martin Sheen"|"Toby Ziegler": "Richard Schiff"|"CJ Cregg": "Allison Janney"), + ("Josiah Edward Bartlet": "Martin Sheen"|"Toby Ziegler": "Richard Schiff"|"CJ Cregg": "Allison Janney"), + ("Josiah Edward Bartlet": "Martin Sheen"|"Toby Ziegler": "Richard Schiff"|"CJ Cregg": "Allison Janney"), + }` + Ω(Object(m, 1)).Should(matchRegexp(`map\[string\]\[\]uint8 \| len:3`, expected)) + }) + }) + }) + + Describe("formatting structs", func() { + It("should include the struct name and the field names", func() { + s := SimpleStruct{ + Name: "Oswald", + Enumeration: 17, + Veritas: true, + Data: []byte("datum"), + secret: 1983, + } + + Ω(Object(s, 1)).Should(match("format_test.SimpleStruct", `{Name: "Oswald", Enumeration: 17, Veritas: true, Data: "datum", secret: 1983}`)) + }) + + Context("when the struct contains long entries", func() { + It("should format the entries with new lines", func() { + s := &SimpleStruct{ + Name: "Mithrandir Gandalf Greyhame", + Enumeration: 2021, + Veritas: true, + Data: []byte("wizard"), + secret: 3, + } + + Ω(Object(s, 1)).Should(match(fmt.Sprintf("*format_test.SimpleStruct | %p", s), `{ + Name: "Mithrandir Gandalf Greyhame", + Enumeration: 2021, + Veritas: true, + Data: "wizard", + secret: 3, + }`)) + }) + }) + }) + + Describe("formatting nil values", func() { + It("should print out nil", func() { + Ω(Object(nil, 1)).Should(match("nil", "nil")) + var typedNil *AStruct + Ω(Object(typedNil, 1)).Should(match("*format_test.AStruct | 0x0", "nil")) + var c chan<- bool + Ω(Object(c, 1)).Should(match("chan<- bool | len:0, cap:0", "nil")) + var s []string + Ω(Object(s, 1)).Should(match("[]string | len:0, cap:0", "nil")) + var m map[string]bool + Ω(Object(m, 1)).Should(match("map[string]bool | len:0", "nil")) + }) + }) + + Describe("formatting aliased types", func() { + It("should print out the correct alias type", func() { + Ω(Object(StringAlias("alias"), 1)).Should(match("format_test.StringAlias", `alias`)) + Ω(Object(ByteAlias("alias"), 1)).Should(matchRegexp(`format_test\.ByteAlias \| len:5, cap:\d+`, `alias`)) + Ω(Object(IntAlias(3), 1)).Should(match("format_test.IntAlias", "3")) + }) + }) + + Describe("handling nested things", func() { + It("should produce a correctly nested representation", func() { + s := ComplexStruct{ + Strings: []string{"lots", "of", "short", "strings"}, + SimpleThings: []*SimpleStruct{ + {"short", 7, true, []byte("succinct"), 17}, + {"something longer", 427, true, []byte("designed to wrap around nicely"), 30}, + }, + DataMaps: map[int]ByteAlias{ + 17: ByteAlias("some substantially longer chunks of data"), + 1138: ByteAlias("that should make things wrap"), + }, + } + expected := `{ + Strings: \["lots", "of", "short", "strings"\], + SimpleThings: \[ + {Name: "short", Enumeration: 7, Veritas: true, Data: "succinct", secret: 17}, + { + Name: "something longer", + Enumeration: 427, + Veritas: true, + Data: "designed to wrap around nicely", + secret: 30, + }, + \], + DataMaps: { + (17: "some substantially longer chunks of data"|1138: "that should make things wrap"), + (17: "some substantially longer chunks of data"|1138: "that should make things wrap"), + }, + }` + Ω(Object(s, 1)).Should(matchRegexp(`format_test\.ComplexStruct`, expected)) + }) + }) + }) + + Describe("Handling unexported fields in structs", func() { + It("should handle all the various types correctly", func() { + a := int(5) + s := SecretiveStruct{ + boolValue: true, + intValue: 3, + uintValue: 4, + uintptrValue: 5, + floatValue: 6.0, + complexValue: complex(5.0, 3.0), + chanValue: make(chan bool, 2), + funcValue: func() {}, + pointerValue: &a, + sliceValue: []string{"string", "slice"}, + byteSliceValue: []byte("bytes"), + stringValue: "a string", + arrValue: [3]int{11, 12, 13}, + byteArrValue: [3]byte{17, 20, 32}, + mapValue: map[string]int{"a key": 20, "b key": 30}, + structValue: AStruct{"exported"}, + interfaceValue: map[string]int{"a key": 17}, + } + + expected := fmt.Sprintf(`{ + boolValue: true, + intValue: 3, + uintValue: 4, + uintptrValue: 0x5, + floatValue: 6, + complexValue: \(5\+3i\), + chanValue: %p, + funcValue: %p, + pointerValue: 5, + sliceValue: \["string", "slice"\], + byteSliceValue: "bytes", + stringValue: "a string", + arrValue: \[11, 12, 13\], + byteArrValue: \[17, 20, 32\], + mapValue: %s, + structValue: {Exported: "exported"}, + interfaceValue: {"a key": 17}, + }`, s.chanValue, s.funcValue, hashMatchingRegexp(`"a key": 20`, `"b key": 30`)) + + Ω(Object(s, 1)).Should(matchRegexp(`format_test\.SecretiveStruct`, expected)) + }) + }) + + Describe("Handling interfaces", func() { + It("should unpack the interface", func() { + outerHash := map[string]interface{}{} + innerHash := map[string]int{} + + innerHash["inner"] = 3 + outerHash["integer"] = 2 + outerHash["map"] = innerHash + + expected := hashMatchingRegexp(`"integer": 2`, `"map": {"inner": 3}`) + Ω(Object(outerHash, 1)).Should(matchRegexp(`map\[string\]interface {} \| len:2`, expected)) + }) + }) + + Describe("Handling recursive things", func() { + It("should not go crazy...", func() { + m := map[string]interface{}{} + m["integer"] = 2 + m["map"] = m + Ω(Object(m, 1)).Should(ContainSubstring("...")) + }) + }) + + Describe("When instructed to use the Stringer representation", func() { + BeforeEach(func() { + UseStringerRepresentation = true + }) + + AfterEach(func() { + UseStringerRepresentation = false + }) + + Context("when passed a GoStringer", func() { + It("should use what GoString() returns", func() { + Ω(Object(GoStringer{}, 1)).Should(ContainSubstring(": go-string")) + }) + }) + + Context("when passed a stringer", func() { + It("should use what String() returns", func() { + Ω(Object(Stringer{}, 1)).Should(ContainSubstring(": string")) + }) + }) + }) +}) diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/gbytes/buffer.go b/Godeps/_workspace/src/github.com/onsi/gomega/gbytes/buffer.go new file mode 100644 index 0000000000000..8775b8611a0de --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/gbytes/buffer.go @@ -0,0 +1,229 @@ +/* +Package gbytes provides a buffer that supports incrementally detecting input. + +You use gbytes.Buffer with the gbytes.Say matcher. When Say finds a match, it fastforwards the buffer's read cursor to the end of that match. + +Subsequent matches against the buffer will only operate against data that appears *after* the read cursor. + +The read cursor is an opaque implementation detail that you cannot access. You should use the Say matcher to sift through the buffer. You can always +access the entire buffer's contents with Contents(). + +*/ +package gbytes + +import ( + "errors" + "fmt" + "io" + "regexp" + "sync" + "time" +) + +/* +gbytes.Buffer implements an io.Writer and can be used with the gbytes.Say matcher. + +You should only use a gbytes.Buffer in test code. It stores all writes in an in-memory buffer - behavior that is inappropriate for production code! +*/ +type Buffer struct { + contents []byte + readCursor uint64 + lock *sync.Mutex + detectCloser chan interface{} + closed bool +} + +/* +NewBuffer returns a new gbytes.Buffer +*/ +func NewBuffer() *Buffer { + return &Buffer{ + lock: &sync.Mutex{}, + } +} + +/* +BufferWithBytes returns a new gbytes.Buffer seeded with the passed in bytes +*/ +func BufferWithBytes(bytes []byte) *Buffer { + return &Buffer{ + lock: &sync.Mutex{}, + contents: bytes, + } +} + +/* +Write implements the io.Writer interface +*/ +func (b *Buffer) Write(p []byte) (n int, err error) { + b.lock.Lock() + defer b.lock.Unlock() + + if b.closed { + return 0, errors.New("attempt to write to closed buffer") + } + + b.contents = append(b.contents, p...) + return len(p), nil +} + +/* +Read implements the io.Reader interface. It advances the +cursor as it reads. + +Returns an error if called after Close. +*/ +func (b *Buffer) Read(d []byte) (int, error) { + b.lock.Lock() + defer b.lock.Unlock() + + if b.closed { + return 0, errors.New("attempt to read from closed buffer") + } + + if uint64(len(b.contents)) <= b.readCursor { + return 0, io.EOF + } + + n := copy(d, b.contents[b.readCursor:]) + b.readCursor += uint64(n) + + return n, nil +} + +/* +Close signifies that the buffer will no longer be written to +*/ +func (b *Buffer) Close() error { + b.lock.Lock() + defer b.lock.Unlock() + + b.closed = true + + return nil +} + +/* +Closed returns true if the buffer has been closed +*/ +func (b *Buffer) Closed() bool { + b.lock.Lock() + defer b.lock.Unlock() + + return b.closed +} + +/* +Contents returns all data ever written to the buffer. +*/ +func (b *Buffer) Contents() []byte { + b.lock.Lock() + defer b.lock.Unlock() + + contents := make([]byte, len(b.contents)) + copy(contents, b.contents) + return contents +} + +/* +Detect takes a regular expression and returns a channel. + +The channel will receive true the first time data matching the regular expression is written to the buffer. +The channel is subsequently closed and the buffer's read-cursor is fast-forwarded to just after the matching region. + +You typically don't need to use Detect and should use the ghttp.Say matcher instead. Detect is useful, however, in cases where your code must +be branch and handle different outputs written to the buffer. + +For example, consider a buffer hooked up to the stdout of a client library. You may (or may not, depending on state outside of your control) need to authenticate the client library. + +You could do something like: + +select { +case <-buffer.Detect("You are not logged in"): + //log in +case <-buffer.Detect("Success"): + //carry on +case <-time.After(time.Second): + //welp +} +buffer.CancelDetects() + +You should always call CancelDetects after using Detect. This will close any channels that have not detected and clean up the goroutines that were spawned to support them. + +Finally, you can pass detect a format string followed by variadic arguments. This will construct the regexp using fmt.Sprintf. +*/ +func (b *Buffer) Detect(desired string, args ...interface{}) chan bool { + formattedRegexp := desired + if len(args) > 0 { + formattedRegexp = fmt.Sprintf(desired, args...) + } + re := regexp.MustCompile(formattedRegexp) + + b.lock.Lock() + defer b.lock.Unlock() + + if b.detectCloser == nil { + b.detectCloser = make(chan interface{}) + } + + closer := b.detectCloser + response := make(chan bool) + go func() { + ticker := time.NewTicker(10 * time.Millisecond) + defer ticker.Stop() + defer close(response) + for { + select { + case <-ticker.C: + b.lock.Lock() + data, cursor := b.contents[b.readCursor:], b.readCursor + loc := re.FindIndex(data) + b.lock.Unlock() + + if loc != nil { + response <- true + b.lock.Lock() + newCursorPosition := cursor + uint64(loc[1]) + if newCursorPosition >= b.readCursor { + b.readCursor = newCursorPosition + } + b.lock.Unlock() + return + } + case <-closer: + return + } + } + }() + + return response +} + +/* +CancelDetects cancels any pending detects and cleans up their goroutines. You should always call this when you're done with a set of Detect channels. +*/ +func (b *Buffer) CancelDetects() { + b.lock.Lock() + defer b.lock.Unlock() + + close(b.detectCloser) + b.detectCloser = nil +} + +func (b *Buffer) didSay(re *regexp.Regexp) (bool, []byte) { + b.lock.Lock() + defer b.lock.Unlock() + + unreadBytes := b.contents[b.readCursor:] + copyOfUnreadBytes := make([]byte, len(unreadBytes)) + copy(copyOfUnreadBytes, unreadBytes) + + loc := re.FindIndex(unreadBytes) + + if loc != nil { + b.readCursor += uint64(loc[1]) + return true, copyOfUnreadBytes + } else { + return false, copyOfUnreadBytes + } +} diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/gbytes/buffer_test.go b/Godeps/_workspace/src/github.com/onsi/gomega/gbytes/buffer_test.go new file mode 100644 index 0000000000000..b1111389e1a38 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/gbytes/buffer_test.go @@ -0,0 +1,158 @@ +package gbytes_test + +import ( + "io" + "time" + + . "github.com/onsi/gomega/gbytes" + + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" +) + +var _ = Describe("Buffer", func() { + var buffer *Buffer + + BeforeEach(func() { + buffer = NewBuffer() + }) + + Describe("dumping the entire contents of the buffer", func() { + It("should return everything that's been written", func() { + buffer.Write([]byte("abc")) + buffer.Write([]byte("def")) + Ω(buffer.Contents()).Should(Equal([]byte("abcdef"))) + + Ω(buffer).Should(Say("bcd")) + Ω(buffer.Contents()).Should(Equal([]byte("abcdef"))) + }) + }) + + Describe("creating a buffer with bytes", func() { + It("should create the buffer with the cursor set to the beginning", func() { + buffer := BufferWithBytes([]byte("abcdef")) + Ω(buffer.Contents()).Should(Equal([]byte("abcdef"))) + Ω(buffer).Should(Say("abc")) + Ω(buffer).ShouldNot(Say("abc")) + Ω(buffer).Should(Say("def")) + }) + }) + + Describe("reading from a buffer", func() { + It("should read the current contents of the buffer", func() { + buffer := BufferWithBytes([]byte("abcde")) + + dest := make([]byte, 3) + n, err := buffer.Read(dest) + Ω(err).ShouldNot(HaveOccurred()) + Ω(n).Should(Equal(3)) + Ω(string(dest)).Should(Equal("abc")) + + dest = make([]byte, 3) + n, err = buffer.Read(dest) + Ω(err).ShouldNot(HaveOccurred()) + Ω(n).Should(Equal(2)) + Ω(string(dest[:n])).Should(Equal("de")) + + n, err = buffer.Read(dest) + Ω(err).Should(Equal(io.EOF)) + Ω(n).Should(Equal(0)) + }) + + Context("after the buffer has been closed", func() { + It("returns an error", func() { + buffer := BufferWithBytes([]byte("abcde")) + + buffer.Close() + + dest := make([]byte, 3) + n, err := buffer.Read(dest) + Ω(err).Should(HaveOccurred()) + Ω(n).Should(Equal(0)) + }) + }) + }) + + Describe("detecting regular expressions", func() { + It("should fire the appropriate channel when the passed in pattern matches, then close it", func(done Done) { + go func() { + time.Sleep(10 * time.Millisecond) + buffer.Write([]byte("abcde")) + }() + + A := buffer.Detect("%s", "a.c") + B := buffer.Detect("def") + + var gotIt bool + select { + case gotIt = <-A: + case <-B: + Fail("should not have gotten here") + } + + Ω(gotIt).Should(BeTrue()) + Eventually(A).Should(BeClosed()) + + buffer.Write([]byte("f")) + Eventually(B).Should(Receive()) + Eventually(B).Should(BeClosed()) + + close(done) + }) + + It("should fast-forward the buffer upon detection", func(done Done) { + buffer.Write([]byte("abcde")) + <-buffer.Detect("abc") + Ω(buffer).ShouldNot(Say("abc")) + Ω(buffer).Should(Say("de")) + close(done) + }) + + It("should only fast-forward the buffer when the channel is read, and only if doing so would not rewind it", func(done Done) { + buffer.Write([]byte("abcde")) + A := buffer.Detect("abc") + time.Sleep(20 * time.Millisecond) //give the goroutine a chance to detect and write to the channel + Ω(buffer).Should(Say("abcd")) + <-A + Ω(buffer).ShouldNot(Say("d")) + Ω(buffer).Should(Say("e")) + Eventually(A).Should(BeClosed()) + close(done) + }) + + It("should be possible to cancel a detection", func(done Done) { + A := buffer.Detect("abc") + B := buffer.Detect("def") + buffer.CancelDetects() + buffer.Write([]byte("abcdef")) + Eventually(A).Should(BeClosed()) + Eventually(B).Should(BeClosed()) + + Ω(buffer).Should(Say("bcde")) + <-buffer.Detect("f") + close(done) + }) + }) + + Describe("closing the buffer", func() { + It("should error when further write attempts are made", func() { + _, err := buffer.Write([]byte("abc")) + Ω(err).ShouldNot(HaveOccurred()) + + buffer.Close() + + _, err = buffer.Write([]byte("def")) + Ω(err).Should(HaveOccurred()) + + Ω(buffer.Contents()).Should(Equal([]byte("abc"))) + }) + + It("should be closed", func() { + Ω(buffer.Closed()).Should(BeFalse()) + + buffer.Close() + + Ω(buffer.Closed()).Should(BeTrue()) + }) + }) +}) diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/gbytes/gbuffer_suite_test.go b/Godeps/_workspace/src/github.com/onsi/gomega/gbytes/gbuffer_suite_test.go new file mode 100644 index 0000000000000..3a7dc06123e86 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/gbytes/gbuffer_suite_test.go @@ -0,0 +1,13 @@ +package gbytes_test + +import ( + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" + + "testing" +) + +func TestGbytes(t *testing.T) { + RegisterFailHandler(Fail) + RunSpecs(t, "Gbytes Suite") +} diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/gbytes/say_matcher.go b/Godeps/_workspace/src/github.com/onsi/gomega/gbytes/say_matcher.go new file mode 100644 index 0000000000000..ce5ebcbfa59f9 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/gbytes/say_matcher.go @@ -0,0 +1,105 @@ +package gbytes + +import ( + "fmt" + "regexp" + + "github.com/onsi/gomega/format" +) + +//Objects satisfying the BufferProvider can be used with the Say matcher. +type BufferProvider interface { + Buffer() *Buffer +} + +/* +Say is a Gomega matcher that operates on gbytes.Buffers: + + Ω(buffer).Should(Say("something")) + +will succeed if the unread portion of the buffer matches the regular expression "something". + +When Say succeeds, it fast forwards the gbytes.Buffer's read cursor to just after the succesful match. +Thus, subsequent calls to Say will only match against the unread portion of the buffer + +Say pairs very well with Eventually. To asser that a buffer eventually receives data matching "[123]-star" within 3 seconds you can: + + Eventually(buffer, 3).Should(Say("[123]-star")) + +Ditto with consistently. To assert that a buffer does not receive data matching "never-see-this" for 1 second you can: + + Consistently(buffer, 1).ShouldNot(Say("never-see-this")) + +In addition to bytes.Buffers, Say can operate on objects that implement the gbytes.BufferProvider interface. +In such cases, Say simply operates on the *gbytes.Buffer returned by Buffer() + +If the buffer is closed, the Say matcher will tell Eventually to abort. +*/ +func Say(expected string, args ...interface{}) *sayMatcher { + formattedRegexp := expected + if len(args) > 0 { + formattedRegexp = fmt.Sprintf(expected, args...) + } + return &sayMatcher{ + re: regexp.MustCompile(formattedRegexp), + } +} + +type sayMatcher struct { + re *regexp.Regexp + receivedSayings []byte +} + +func (m *sayMatcher) buffer(actual interface{}) (*Buffer, bool) { + var buffer *Buffer + + switch x := actual.(type) { + case *Buffer: + buffer = x + case BufferProvider: + buffer = x.Buffer() + default: + return nil, false + } + + return buffer, true +} + +func (m *sayMatcher) Match(actual interface{}) (success bool, err error) { + buffer, ok := m.buffer(actual) + if !ok { + return false, fmt.Errorf("Say must be passed a *gbytes.Buffer or BufferProvider. Got:\n%s", format.Object(actual, 1)) + } + + didSay, sayings := buffer.didSay(m.re) + m.receivedSayings = sayings + + return didSay, nil +} + +func (m *sayMatcher) FailureMessage(actual interface{}) (message string) { + return fmt.Sprintf( + "Got stuck at:\n%s\nWaiting for:\n%s", + format.IndentString(string(m.receivedSayings), 1), + format.IndentString(m.re.String(), 1), + ) +} + +func (m *sayMatcher) NegatedFailureMessage(actual interface{}) (message string) { + return fmt.Sprintf( + "Saw:\n%s\nWhich matches the unexpected:\n%s", + format.IndentString(string(m.receivedSayings), 1), + format.IndentString(m.re.String(), 1), + ) +} + +func (m *sayMatcher) MatchMayChangeInTheFuture(actual interface{}) bool { + switch x := actual.(type) { + case *Buffer: + return !x.Closed() + case BufferProvider: + return !x.Buffer().Closed() + default: + return true + } +} diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/gbytes/say_matcher_test.go b/Godeps/_workspace/src/github.com/onsi/gomega/gbytes/say_matcher_test.go new file mode 100644 index 0000000000000..d0ddf1f74ce78 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/gbytes/say_matcher_test.go @@ -0,0 +1,163 @@ +package gbytes_test + +import ( + "time" + . "github.com/onsi/gomega/gbytes" + + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" +) + +type speaker struct { + buffer *Buffer +} + +func (s *speaker) Buffer() *Buffer { + return s.buffer +} + +var _ = Describe("SayMatcher", func() { + var buffer *Buffer + + BeforeEach(func() { + buffer = NewBuffer() + buffer.Write([]byte("abc")) + }) + + Context("when actual is not a gexec Buffer, or a BufferProvider", func() { + It("should error", func() { + failures := InterceptGomegaFailures(func() { + Ω("foo").Should(Say("foo")) + }) + Ω(failures[0]).Should(ContainSubstring("*gbytes.Buffer")) + }) + }) + + Context("when a match is found", func() { + It("should succeed", func() { + Ω(buffer).Should(Say("abc")) + }) + + It("should support printf-like formatting", func() { + Ω(buffer).Should(Say("a%sc", "b")) + }) + + It("should use a regular expression", func() { + Ω(buffer).Should(Say("a.c")) + }) + + It("should fastforward the buffer", func() { + buffer.Write([]byte("def")) + Ω(buffer).Should(Say("abcd")) + Ω(buffer).Should(Say("ef")) + Ω(buffer).ShouldNot(Say("[a-z]")) + }) + }) + + Context("when no match is found", func() { + It("should not error", func() { + Ω(buffer).ShouldNot(Say("def")) + }) + + Context("when the buffer is closed", func() { + BeforeEach(func() { + buffer.Close() + }) + + It("should abort an eventually", func() { + t := time.Now() + failures := InterceptGomegaFailures(func() { + Eventually(buffer).Should(Say("def")) + }) + Eventually(buffer).ShouldNot(Say("def")) + Ω(time.Since(t)).Should(BeNumerically("<", 500*time.Millisecond)) + Ω(failures).Should(HaveLen(1)) + + t = time.Now() + Eventually(buffer).Should(Say("abc")) + Ω(time.Since(t)).Should(BeNumerically("<", 500*time.Millisecond)) + }) + + It("should abort a consistently", func() { + t := time.Now() + Consistently(buffer, 2.0).ShouldNot(Say("def")) + Ω(time.Since(t)).Should(BeNumerically("<", 500*time.Millisecond)) + }) + + It("should not error with a synchronous matcher", func() { + Ω(buffer).ShouldNot(Say("def")) + Ω(buffer).Should(Say("abc")) + }) + }) + }) + + Context("when a positive match fails", func() { + It("should report where it got stuck", func() { + Ω(buffer).Should(Say("abc")) + buffer.Write([]byte("def")) + failures := InterceptGomegaFailures(func() { + Ω(buffer).Should(Say("abc")) + }) + Ω(failures[0]).Should(ContainSubstring("Got stuck at:")) + Ω(failures[0]).Should(ContainSubstring("def")) + }) + }) + + Context("when a negative match fails", func() { + It("should report where it got stuck", func() { + failures := InterceptGomegaFailures(func() { + Ω(buffer).ShouldNot(Say("abc")) + }) + Ω(failures[0]).Should(ContainSubstring("Saw:")) + Ω(failures[0]).Should(ContainSubstring("Which matches the unexpected:")) + Ω(failures[0]).Should(ContainSubstring("abc")) + }) + }) + + Context("when a match is not found", func() { + It("should not fastforward the buffer", func() { + Ω(buffer).ShouldNot(Say("def")) + Ω(buffer).Should(Say("abc")) + }) + }) + + Context("a nice real-life example", func() { + It("should behave well", func() { + Ω(buffer).Should(Say("abc")) + go func() { + time.Sleep(10 * time.Millisecond) + buffer.Write([]byte("def")) + }() + Ω(buffer).ShouldNot(Say("def")) + Eventually(buffer).Should(Say("def")) + }) + }) + + Context("when actual is a BufferProvider", func() { + It("should use actual's buffer", func() { + s := &speaker{ + buffer: NewBuffer(), + } + + Ω(s).ShouldNot(Say("abc")) + + s.Buffer().Write([]byte("abc")) + Ω(s).Should(Say("abc")) + }) + + It("should abort an eventually", func() { + s := &speaker{ + buffer: NewBuffer(), + } + + s.buffer.Close() + + t := time.Now() + failures := InterceptGomegaFailures(func() { + Eventually(s).Should(Say("def")) + }) + Ω(failures).Should(HaveLen(1)) + Ω(time.Since(t)).Should(BeNumerically("<", 500*time.Millisecond)) + }) + }) +}) diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/gexec/build.go b/Godeps/_workspace/src/github.com/onsi/gomega/gexec/build.go new file mode 100644 index 0000000000000..3e9bf9f9478fe --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/gexec/build.go @@ -0,0 +1,78 @@ +package gexec + +import ( + "errors" + "fmt" + "io/ioutil" + "os" + "os/exec" + "path" + "path/filepath" + "runtime" +) + +var tmpDir string + +/* +Build uses go build to compile the package at packagePath. The resulting binary is saved off in a temporary directory. +A path pointing to this binary is returned. + +Build uses the $GOPATH set in your environment. It passes the variadic args on to `go build`. +*/ +func Build(packagePath string, args ...string) (compiledPath string, err error) { + return BuildIn(os.Getenv("GOPATH"), packagePath, args...) +} + +/* +BuildIn is identical to Build but allows you to specify a custom $GOPATH (the first argument). +*/ +func BuildIn(gopath string, packagePath string, args ...string) (compiledPath string, err error) { + tmpDir, err := temporaryDirectory() + if err != nil { + return "", err + } + + if len(gopath) == 0 { + return "", errors.New("$GOPATH not provided when building " + packagePath) + } + + executable := filepath.Join(tmpDir, path.Base(packagePath)) + if runtime.GOOS == "windows" { + executable = executable + ".exe" + } + + cmdArgs := append([]string{"build"}, args...) + cmdArgs = append(cmdArgs, "-o", executable, packagePath) + + build := exec.Command("go", cmdArgs...) + build.Env = append([]string{"GOPATH=" + gopath}, os.Environ()...) + + output, err := build.CombinedOutput() + if err != nil { + return "", fmt.Errorf("Failed to build %s:\n\nError:\n%s\n\nOutput:\n%s", packagePath, err, string(output)) + } + + return executable, nil +} + +/* +You should call CleanupBuildArtifacts before your test ends to clean up any temporary artifacts generated by +gexec. In Ginkgo this is typically done in an AfterSuite callback. +*/ +func CleanupBuildArtifacts() { + if tmpDir != "" { + os.RemoveAll(tmpDir) + } +} + +func temporaryDirectory() (string, error) { + var err error + if tmpDir == "" { + tmpDir, err = ioutil.TempDir("", "gexec_artifacts") + if err != nil { + return "", err + } + } + + return ioutil.TempDir(tmpDir, "g") +} diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/gexec/exit_matcher.go b/Godeps/_workspace/src/github.com/onsi/gomega/gexec/exit_matcher.go new file mode 100644 index 0000000000000..e6f4329427005 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/gexec/exit_matcher.go @@ -0,0 +1,88 @@ +package gexec + +import ( + "fmt" + + "github.com/onsi/gomega/format" +) + +/* +The Exit matcher operates on a session: + + Ω(session).Should(Exit()) + +Exit passes if the session has already exited. + +If no status code is provided, then Exit will succeed if the session has exited regardless of exit code. +Otherwise, Exit will only succeed if the process has exited with the provided status code. + +Note that the process must have already exited. To wait for a process to exit, use Eventually: + + Eventually(session, 3).Should(Exit(0)) +*/ +func Exit(optionalExitCode ...int) *exitMatcher { + exitCode := -1 + if len(optionalExitCode) > 0 { + exitCode = optionalExitCode[0] + } + + return &exitMatcher{ + exitCode: exitCode, + } +} + +type exitMatcher struct { + exitCode int + didExit bool + actualExitCode int +} + +type Exiter interface { + ExitCode() int +} + +func (m *exitMatcher) Match(actual interface{}) (success bool, err error) { + exiter, ok := actual.(Exiter) + if !ok { + return false, fmt.Errorf("Exit must be passed a gexec.Exiter (Missing method ExitCode() int) Got:\n%s", format.Object(actual, 1)) + } + + m.actualExitCode = exiter.ExitCode() + + if m.actualExitCode == -1 { + return false, nil + } + + if m.exitCode == -1 { + return true, nil + } + return m.exitCode == m.actualExitCode, nil +} + +func (m *exitMatcher) FailureMessage(actual interface{}) (message string) { + if m.actualExitCode == -1 { + return "Expected process to exit. It did not." + } else { + return format.Message(m.actualExitCode, "to match exit code:", m.exitCode) + } +} + +func (m *exitMatcher) NegatedFailureMessage(actual interface{}) (message string) { + if m.actualExitCode == -1 { + return "you really shouldn't be able to see this!" + } else { + if m.exitCode == -1 { + return "Expected process not to exit. It did." + } else { + return format.Message(m.actualExitCode, "not to match exit code:", m.exitCode) + } + } +} + +func (m *exitMatcher) MatchMayChangeInTheFuture(actual interface{}) bool { + session, ok := actual.(*Session) + if ok { + return session.ExitCode() == -1 + } + return true +} diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/gexec/exit_matcher_test.go b/Godeps/_workspace/src/github.com/onsi/gomega/gexec/exit_matcher_test.go new file mode 100644 index 0000000000000..9f18e2d6e0a91 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/gexec/exit_matcher_test.go @@ -0,0 +1,113 @@ +package gexec_test + +import ( + "os/exec" + "time" + . "github.com/onsi/gomega/gexec" + + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" +) + +type NeverExits struct{} + +func (e NeverExits) ExitCode() int { + return -1 +} + +var _ = Describe("ExitMatcher", func() { + var command *exec.Cmd + var session *Session + + BeforeEach(func() { + var err error + command = exec.Command(fireflyPath, "0") + session, err = Start(command, nil, nil) + Ω(err).ShouldNot(HaveOccurred()) + }) + + Describe("when passed something that is an Exiter", func() { + It("should act normally", func() { + failures := InterceptGomegaFailures(func() { + Ω(NeverExits{}).Should(Exit()) + }) + + Ω(failures[0]).Should(ContainSubstring("Expected process to exit. It did not.")) + }) + }) + + Describe("when passed something that is not an Exiter", func() { + It("should error", func() { + failures := InterceptGomegaFailures(func() { + Ω("aardvark").Should(Exit()) + }) + + Ω(failures[0]).Should(ContainSubstring("Exit must be passed a gexec.Exiter")) + }) + }) + + Context("with no exit code", func() { + It("should say the right things when it fails", func() { + Ω(session).ShouldNot(Exit()) + + failures := InterceptGomegaFailures(func() { + Ω(session).Should(Exit()) + }) + + Ω(failures[0]).Should(ContainSubstring("Expected process to exit. It did not.")) + + Eventually(session).Should(Exit()) + + Ω(session).Should(Exit()) + + failures = InterceptGomegaFailures(func() { + Ω(session).ShouldNot(Exit()) + }) + + Ω(failures[0]).Should(ContainSubstring("Expected process not to exit. It did.")) + }) + }) + + Context("with an exit code", func() { + It("should say the right things when it fails", func() { + Ω(session).ShouldNot(Exit(0)) + Ω(session).ShouldNot(Exit(1)) + + failures := InterceptGomegaFailures(func() { + Ω(session).Should(Exit(0)) + }) + + Ω(failures[0]).Should(ContainSubstring("Expected process to exit. It did not.")) + + Eventually(session).Should(Exit(0)) + + Ω(session).Should(Exit(0)) + + failures = InterceptGomegaFailures(func() { + Ω(session).Should(Exit(1)) + }) + + Ω(failures[0]).Should(ContainSubstring("to match exit code:")) + + Ω(session).ShouldNot(Exit(1)) + + failures = InterceptGomegaFailures(func() { + Ω(session).ShouldNot(Exit(0)) + }) + + Ω(failures[0]).Should(ContainSubstring("not to match exit code:")) + }) + }) + + Describe("bailing out early", func() { + It("should bail out early once the process exits", func() { + t := time.Now() + + failures := InterceptGomegaFailures(func() { + Eventually(session).Should(Exit(1)) + }) + Ω(time.Since(t)).Should(BeNumerically("<=", 500*time.Millisecond)) + Ω(failures).Should(HaveLen(1)) + }) + }) +}) diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/gexec/gexec_suite_test.go b/Godeps/_workspace/src/github.com/onsi/gomega/gexec/gexec_suite_test.go new file mode 100644 index 0000000000000..87672aafa394e --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/gexec/gexec_suite_test.go @@ -0,0 +1,26 @@ +package gexec_test + +import ( + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" + "github.com/onsi/gomega/gexec" + + "testing" +) + +var fireflyPath string + +func TestGexec(t *testing.T) { + BeforeSuite(func() { + var err error + fireflyPath, err = gexec.Build("./_fixture/firefly") + Ω(err).ShouldNot(HaveOccurred()) + }) + + AfterSuite(func() { + gexec.CleanupBuildArtifacts() + }) + + RegisterFailHandler(Fail) + RunSpecs(t, "Gexec Suite") +} diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/gexec/prefixed_writer.go b/Godeps/_workspace/src/github.com/onsi/gomega/gexec/prefixed_writer.go new file mode 100644 index 0000000000000..05e695abc8df0 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/gexec/prefixed_writer.go @@ -0,0 +1,53 @@ +package gexec + +import ( + "io" + "sync" +) + +/* +PrefixedWriter wraps an io.Writer, emiting the passed in prefix at the beginning of each new line. +This can be useful when running multiple gexec.Sessions concurrently - you can prefix the log output of each +session by passing in a PrefixedWriter: + +gexec.Start(cmd, NewPrefixedWriter("[my-cmd] ", GinkgoWriter), NewPrefixedWriter("[my-cmd] ", GinkgoWriter)) +*/ +type PrefixedWriter struct { + prefix []byte + writer io.Writer + lock *sync.Mutex + atStartOfLine bool +} + +func NewPrefixedWriter(prefix string, writer io.Writer) *PrefixedWriter { + return &PrefixedWriter{ + prefix: []byte(prefix), + writer: writer, + lock: &sync.Mutex{}, + atStartOfLine: true, + } +} + +func (w *PrefixedWriter) Write(b []byte) (int, error) { + w.lock.Lock() + defer w.lock.Unlock() + + toWrite := []byte{} + + for _, c := range b { + if w.atStartOfLine { + toWrite = append(toWrite, w.prefix...) + } + + toWrite = append(toWrite, c) + + w.atStartOfLine = c == '\n' + } + + _, err := w.writer.Write(toWrite) + if err != nil { + return 0, err + } + + return len(b), nil +} diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/gexec/prefixed_writer_test.go b/Godeps/_workspace/src/github.com/onsi/gomega/gexec/prefixed_writer_test.go new file mode 100644 index 0000000000000..8657d0c9de0e2 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/gexec/prefixed_writer_test.go @@ -0,0 +1,43 @@ +package gexec_test + +import ( + "bytes" + + . "github.com/onsi/gomega/gexec" + + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" +) + +var _ = Describe("PrefixedWriter", func() { + var buffer *bytes.Buffer + var writer *PrefixedWriter + BeforeEach(func() { + buffer = &bytes.Buffer{} + writer = NewPrefixedWriter("[p]", buffer) + }) + + It("should emit the prefix on newlines", func() { + writer.Write([]byte("abc")) + writer.Write([]byte("def\n")) + writer.Write([]byte("hij\n")) + writer.Write([]byte("\n\n")) + writer.Write([]byte("klm\n\nnop")) + writer.Write([]byte("")) + writer.Write([]byte("qrs")) + writer.Write([]byte("\ntuv\nwx")) + writer.Write([]byte("yz\n\n")) + + Ω(buffer.String()).Should(Equal(`[p]abcdef +[p]hij +[p] +[p] +[p]klm +[p] +[p]nopqrs +[p]tuv +[p]wxyz +[p] +`)) + }) +}) diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/gexec/session.go b/Godeps/_workspace/src/github.com/onsi/gomega/gexec/session.go new file mode 100644 index 0000000000000..460cfe58846a7 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/gexec/session.go @@ -0,0 +1,214 @@ +/* +Package gexec provides support for testing external processes. +*/ +package gexec + +import ( + "io" + "os" + "os/exec" + "reflect" + "sync" + "syscall" + + . "github.com/onsi/gomega" + "github.com/onsi/gomega/gbytes" +) + +const INVALID_EXIT_CODE = 254 + +type Session struct { + //The wrapped command + Command *exec.Cmd + + //A *gbytes.Buffer connected to the command's stdout + Out *gbytes.Buffer + + //A *gbytes.Buffer connected to the command's stderr + Err *gbytes.Buffer + + //A channel that will close when the command exits + Exited <-chan struct{} + + lock *sync.Mutex + exitCode int +} + +/* +Start starts the passed-in *exec.Cmd command. It wraps the command in a *gexec.Session. + +The session pipes the command's stdout and stderr to two *gbytes.Buffers available as properties on the session: session.Out and session.Err. +These buffers can be used with the gbytes.Say matcher to match against unread output: + + Ω(session.Out).Should(gbytes.Say("foo-out")) + Ω(session.Err).Should(gbytes.Say("foo-err")) + +In addition, Session satisfies the gbytes.BufferProvider interface and provides the stdout *gbytes.Buffer. This allows you to replace the first line, above, with: + + Ω(session).Should(gbytes.Say("foo-out")) + +When outWriter and/or errWriter are non-nil, the session will pipe stdout and/or stderr output both into the session *gybtes.Buffers and to the passed-in outWriter/errWriter. +This is useful for capturing the process's output or logging it to screen. In particular, when using Ginkgo it can be convenient to direct output to the GinkgoWriter: + + session, err := Start(command, GinkgoWriter, GinkgoWriter) + +This will log output when running tests in verbose mode, but - otherwise - will only log output when a test fails. + +The session wrapper is responsible for waiting on the *exec.Cmd command. You *should not* call command.Wait() yourself. +Instead, to assert that the command has exited you can use the gexec.Exit matcher: + + Ω(session).Should(gexec.Exit()) + +When the session exits it closes the stdout and stderr gbytes buffers. This will short circuit any +Eventuallys waiting fo the buffers to Say something. +*/ +func Start(command *exec.Cmd, outWriter io.Writer, errWriter io.Writer) (*Session, error) { + exited := make(chan struct{}) + + session := &Session{ + Command: command, + Out: gbytes.NewBuffer(), + Err: gbytes.NewBuffer(), + Exited: exited, + lock: &sync.Mutex{}, + exitCode: -1, + } + + var commandOut, commandErr io.Writer + + commandOut, commandErr = session.Out, session.Err + + if outWriter != nil && !reflect.ValueOf(outWriter).IsNil() { + commandOut = io.MultiWriter(commandOut, outWriter) + } + + if errWriter != nil && !reflect.ValueOf(errWriter).IsNil() { + commandErr = io.MultiWriter(commandErr, errWriter) + } + + command.Stdout = commandOut + command.Stderr = commandErr + + err := command.Start() + if err == nil { + go session.monitorForExit(exited) + } + + return session, err +} + +/* +Buffer implements the gbytes.BufferProvider interface and returns s.Out +This allows you to make gbytes.Say matcher assertions against stdout without having to reference .Out: + + Eventually(session).Should(gbytes.Say("foo")) +*/ +func (s *Session) Buffer() *gbytes.Buffer { + return s.Out +} + +/* +ExitCode returns the wrapped command's exit code. If the command hasn't exited yet, ExitCode returns -1. + +To assert that the command has exited it is more convenient to use the Exit matcher: + + Eventually(s).Should(gexec.Exit()) + +When the process exits because it has received a particular signal, the exit code will be 128+signal-value +(See http://www.tldp.org/LDP/abs/html/exitcodes.html and http://man7.org/linux/man-pages/man7/signal.7.html) + +*/ +func (s *Session) ExitCode() int { + s.lock.Lock() + defer s.lock.Unlock() + return s.exitCode +} + +/* +Wait waits until the wrapped command exits. It can be passed an optional timeout. +If the command does not exit within the timeout, Wait will trigger a test failure. + +Wait returns the session, making it possible to chain: + + session.Wait().Out.Contents() + +will wait for the command to exit then return the entirety of Out's contents. + +Wait uses eventually under the hood and accepts the same timeout/polling intervals that eventually does. +*/ +func (s *Session) Wait(timeout ...interface{}) *Session { + Eventually(s, timeout...).Should(Exit()) + return s +} + +/* +Kill sends the running command a SIGKILL signal. It does not wait for the process to exit. + +If the command has already exited, Kill returns silently. + +The session is returned to enable chaining. +*/ +func (s *Session) Kill() *Session { + if s.ExitCode() != -1 { + return s + } + s.Command.Process.Kill() + return s +} + +/* +Interrupt sends the running command a SIGINT signal. It does not wait for the process to exit. + +If the command has already exited, Interrupt returns silently. + +The session is returned to enable chaining. +*/ +func (s *Session) Interrupt() *Session { + return s.Signal(syscall.SIGINT) +} + +/* +Terminate sends the running command a SIGTERM signal. It does not wait for the process to exit. + +If the command has already exited, Terminate returns silently. + +The session is returned to enable chaining. +*/ +func (s *Session) Terminate() *Session { + return s.Signal(syscall.SIGTERM) +} + +/* +Terminate sends the running command the passed in signal. It does not wait for the process to exit. + +If the command has already exited, Signal returns silently. + +The session is returned to enable chaining. +*/ +func (s *Session) Signal(signal os.Signal) *Session { + if s.ExitCode() != -1 { + return s + } + s.Command.Process.Signal(signal) + return s +} + +func (s *Session) monitorForExit(exited chan<- struct{}) { + err := s.Command.Wait() + s.lock.Lock() + s.Out.Close() + s.Err.Close() + status := s.Command.ProcessState.Sys().(syscall.WaitStatus) + if status.Signaled() { + s.exitCode = 128 + int(status.Signal()) + } else { + exitStatus := status.ExitStatus() + if exitStatus == -1 && err != nil { + s.exitCode = INVALID_EXIT_CODE + } + s.exitCode = exitStatus + } + s.lock.Unlock() + + close(exited) +} diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/gexec/session_test.go b/Godeps/_workspace/src/github.com/onsi/gomega/gexec/session_test.go new file mode 100644 index 0000000000000..cd48e6f4dbeae --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/gexec/session_test.go @@ -0,0 +1,177 @@ +package gexec_test + +import ( + "os/exec" + "syscall" + "time" + . "github.com/onsi/gomega/gbytes" + . "github.com/onsi/gomega/gexec" + + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" +) + +var _ = Describe("Session", func() { + var command *exec.Cmd + var session *Session + + var outWriter, errWriter *Buffer + + BeforeEach(func() { + outWriter = nil + errWriter = nil + }) + + JustBeforeEach(func() { + command = exec.Command(fireflyPath) + var err error + session, err = Start(command, outWriter, errWriter) + Ω(err).ShouldNot(HaveOccurred()) + }) + + Context("running a command", func() { + It("should start the process", func() { + Ω(command.Process).ShouldNot(BeNil()) + }) + + It("should wrap the process's stdout and stderr with gbytes buffers", func(done Done) { + Eventually(session.Out).Should(Say("We've done the impossible, and that makes us mighty")) + Eventually(session.Err).Should(Say("Ah, curse your sudden but inevitable betrayal!")) + defer session.Out.CancelDetects() + + select { + case <-session.Out.Detect("Can we maybe vote on the whole murdering people issue"): + Eventually(session).Should(Exit(0)) + case <-session.Out.Detect("I swear by my pretty floral bonnet, I will end you."): + Eventually(session).Should(Exit(1)) + case <-session.Out.Detect("My work's illegal, but at least it's honest."): + Eventually(session).Should(Exit(2)) + } + + close(done) + }) + + It("should satisfy the gbytes.BufferProvider interface, passing Stdout", func() { + Eventually(session).Should(Say("We've done the impossible, and that makes us mighty")) + Eventually(session).Should(Exit()) + }) + }) + + Describe("providing the exit code", func() { + It("should provide the app's exit code", func() { + Ω(session.ExitCode()).Should(Equal(-1)) + + Eventually(session).Should(Exit()) + Ω(session.ExitCode()).Should(BeNumerically(">=", 0)) + Ω(session.ExitCode()).Should(BeNumerically("<", 3)) + }) + }) + + Describe("wait", func() { + It("should wait till the command exits", func() { + Ω(session.ExitCode()).Should(Equal(-1)) + Ω(session.Wait().ExitCode()).Should(BeNumerically(">=", 0)) + Ω(session.Wait().ExitCode()).Should(BeNumerically("<", 3)) + }) + }) + + Describe("exited", func() { + It("should close when the command exits", func() { + Eventually(session.Exited).Should(BeClosed()) + Ω(session.ExitCode()).ShouldNot(Equal(-1)) + }) + }) + + Describe("kill", func() { + It("should kill the command and wait for it to exit", func() { + session, err := Start(exec.Command("sleep", "10000000"), GinkgoWriter, GinkgoWriter) + Ω(err).ShouldNot(HaveOccurred()) + + session.Kill() + Ω(session).ShouldNot(Exit(), "Should not exit immediately...") + Eventually(session).Should(Exit(128 + 9)) + }) + }) + + Describe("interrupt", func() { + It("should interrupt the command", func() { + session, err := Start(exec.Command("sleep", "10000000"), GinkgoWriter, GinkgoWriter) + Ω(err).ShouldNot(HaveOccurred()) + + session.Interrupt() + Ω(session).ShouldNot(Exit(), "Should not exit immediately...") + Eventually(session).Should(Exit(128 + 2)) + }) + }) + + Describe("terminate", func() { + It("should terminate the command", func() { + session, err := Start(exec.Command("sleep", "10000000"), GinkgoWriter, GinkgoWriter) + Ω(err).ShouldNot(HaveOccurred()) + + session.Terminate() + Ω(session).ShouldNot(Exit(), "Should not exit immediately...") + Eventually(session).Should(Exit(128 + 15)) + }) + }) + + Describe("signal", func() { + It("should send the signal to the command", func() { + session, err := Start(exec.Command("sleep", "10000000"), GinkgoWriter, GinkgoWriter) + Ω(err).ShouldNot(HaveOccurred()) + + session.Signal(syscall.SIGABRT) + Ω(session).ShouldNot(Exit(), "Should not exit immediately...") + Eventually(session).Should(Exit(128 + 6)) + }) + }) + + Context("when the command exits", func() { + It("should close the buffers", func() { + Eventually(session).Should(Exit()) + + Ω(session.Out.Closed()).Should(BeTrue()) + Ω(session.Err.Closed()).Should(BeTrue()) + + Ω(session.Out).Should(Say("We've done the impossible, and that makes us mighty")) + }) + + var So = It + + So("this means that eventually should short circuit", func() { + t := time.Now() + failures := InterceptGomegaFailures(func() { + Eventually(session).Should(Say("blah blah blah blah blah")) + }) + Ω(time.Since(t)).Should(BeNumerically("<=", 500*time.Millisecond)) + Ω(failures).Should(HaveLen(1)) + }) + }) + + Context("when wrapping out and err", func() { + BeforeEach(func() { + outWriter = NewBuffer() + errWriter = NewBuffer() + }) + + It("should route to both the provided writers and the gbytes buffers", func() { + Eventually(session.Out).Should(Say("We've done the impossible, and that makes us mighty")) + Eventually(session.Err).Should(Say("Ah, curse your sudden but inevitable betrayal!")) + + Ω(outWriter.Contents()).Should(ContainSubstring("We've done the impossible, and that makes us mighty")) + Ω(errWriter.Contents()).Should(ContainSubstring("Ah, curse your sudden but inevitable betrayal!")) + + Eventually(session).Should(Exit()) + + Ω(outWriter.Contents()).Should(Equal(session.Out.Contents())) + Ω(errWriter.Contents()).Should(Equal(session.Err.Contents())) + }) + }) + + Describe("when the command fails to start", func() { + It("should return an error", func() { + _, err := Start(exec.Command("agklsjdfas"), nil, nil) + Ω(err).Should(HaveOccurred()) + }) + }) +}) diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/ghttp/handlers.go b/Godeps/_workspace/src/github.com/onsi/gomega/ghttp/handlers.go new file mode 100644 index 0000000000000..8c1668a3626b0 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/ghttp/handlers.go @@ -0,0 +1,205 @@ +package ghttp + +import ( + "encoding/base64" + "encoding/json" + "fmt" + "io/ioutil" + "net/http" + + . "github.com/onsi/gomega" + "github.com/onsi/gomega/types" +) + +//CombineHandler takes variadic list of handlers and produces one handler +//that calls each handler in order. +func CombineHandlers(handlers ...http.HandlerFunc) http.HandlerFunc { + return func(w http.ResponseWriter, req *http.Request) { + for _, handler := range handlers { + handler(w, req) + } + } +} + +//VerifyRequest returns a handler that verifies that a request uses the specified method to connect to the specified path +//You may also pass in an optional rawQuery string which is tested against the request's `req.URL.RawQuery` +// +//For path, you may pass in a string, in which case strict equality will be applied +//Alternatively you can pass in a matcher (ContainSubstring("/foo") and MatchRegexp("/foo/[a-f0-9]+") for example) +func VerifyRequest(method string, path interface{}, rawQuery ...string) http.HandlerFunc { + return func(w http.ResponseWriter, req *http.Request) { + Ω(req.Method).Should(Equal(method), "Method mismatch") + switch p := path.(type) { + case types.GomegaMatcher: + Ω(req.URL.Path).Should(p, "Path mismatch") + default: + Ω(req.URL.Path).Should(Equal(path), "Path mismatch") + } + if len(rawQuery) > 0 { + Ω(req.URL.RawQuery).Should(Equal(rawQuery[0]), "RawQuery mismatch") + } + } +} + +//VerifyContentType returns a handler that verifies that a request has a Content-Type header set to the +//specified value +func VerifyContentType(contentType string) http.HandlerFunc { + return func(w http.ResponseWriter, req *http.Request) { + Ω(req.Header.Get("Content-Type")).Should(Equal(contentType)) + } +} + +//VerifyBasicAuth returns a handler that verifies the request contains a BasicAuth Authorization header +//matching the passed in username and password +func VerifyBasicAuth(username string, password string) http.HandlerFunc { + return func(w http.ResponseWriter, req *http.Request) { + auth := req.Header.Get("Authorization") + Ω(auth).ShouldNot(Equal(""), "Authorization header must be specified") + + decoded, err := base64.StdEncoding.DecodeString(auth[6:]) + Ω(err).ShouldNot(HaveOccurred()) + + Ω(string(decoded)).Should(Equal(fmt.Sprintf("%s:%s", username, password)), "Authorization mismatch") + } +} + +//VerifyHeader returns a handler that verifies the request contains the passed in headers. +//The passed in header keys are first canonicalized via http.CanonicalHeaderKey. +// +//The request must contain *all* the passed in headers, but it is allowed to have additional headers +//beyond the passed in set. +func VerifyHeader(header http.Header) http.HandlerFunc { + return func(w http.ResponseWriter, req *http.Request) { + for key, values := range header { + key = http.CanonicalHeaderKey(key) + Ω(req.Header[key]).Should(Equal(values), "Header mismatch for key: %s", key) + } + } +} + +//VerifyHeaderKV returns a handler that verifies the request contains a header matching the passed in key and values +//(recall that a `http.Header` is a mapping from string (key) to []string (values)) +//It is a convenience wrapper around `VerifyHeader` that allows you to avoid having to create an `http.Header` object. +func VerifyHeaderKV(key string, values ...string) http.HandlerFunc { + return VerifyHeader(http.Header{key: values}) +} + +//VerifyJSON returns a handler that verifies that the body of the request is a valid JSON representation +//matching the passed in JSON string. It does this using Gomega's MatchJSON method +// +//VerifyJSON also verifies that the request's content type is application/json +func VerifyJSON(expectedJSON string) http.HandlerFunc { + return CombineHandlers( + VerifyContentType("application/json"), + func(w http.ResponseWriter, req *http.Request) { + body, err := ioutil.ReadAll(req.Body) + req.Body.Close() + Ω(err).ShouldNot(HaveOccurred()) + Ω(body).Should(MatchJSON(expectedJSON), "JSON Mismatch") + }, + ) +} + +//VerifyJSONRepresenting is similar to VerifyJSON. Instead of taking a JSON string, however, it +//takes an arbitrary JSON-encodable object and verifies that the requests's body is a JSON representation +//that matches the object +func VerifyJSONRepresenting(object interface{}) http.HandlerFunc { + data, err := json.Marshal(object) + Ω(err).ShouldNot(HaveOccurred()) + return CombineHandlers( + VerifyContentType("application/json"), + VerifyJSON(string(data)), + ) +} + +func copyHeader(src http.Header, dst http.Header) { + for key, value := range src { + dst[key] = value + } +} + +/* +RespondWith returns a handler that responds to a request with the specified status code and body + +Body may be a string or []byte + +Also, RespondWith can be given an optional http.Header. The headers defined therein will be added to the response headers. +*/ +func RespondWith(statusCode int, body interface{}, optionalHeader ...http.Header) http.HandlerFunc { + return func(w http.ResponseWriter, req *http.Request) { + if len(optionalHeader) == 1 { + copyHeader(optionalHeader[0], w.Header()) + } + w.WriteHeader(statusCode) + switch x := body.(type) { + case string: + w.Write([]byte(x)) + case []byte: + w.Write(x) + default: + Ω(body).Should(BeNil(), "Invalid type for body. Should be string or []byte.") + } + } +} + +/* +RespondWithPtr returns a handler that responds to a request with the specified status code and body + +Unlike RespondWith, you pass RepondWithPtr a pointer to the status code and body allowing different tests +to share the same setup but specify different status codes and bodies. + +Also, RespondWithPtr can be given an optional http.Header. The headers defined therein will be added to the response headers. +Since the http.Header can be mutated after the fact you don't need to pass in a pointer. +*/ +func RespondWithPtr(statusCode *int, body interface{}, optionalHeader ...http.Header) http.HandlerFunc { + return func(w http.ResponseWriter, req *http.Request) { + if len(optionalHeader) == 1 { + copyHeader(optionalHeader[0], w.Header()) + } + w.WriteHeader(*statusCode) + if body != nil { + switch x := (body).(type) { + case *string: + w.Write([]byte(*x)) + case *[]byte: + w.Write(*x) + default: + Ω(body).Should(BeNil(), "Invalid type for body. Should be string or []byte.") + } + } + } +} + +/* +RespondWithJSONEncoded returns a handler that responds to a request with the specified status code and a body +containing the JSON-encoding of the passed in object + +Also, RespondWithJSONEncoded can be given an optional http.Header. The headers defined therein will be added to the response headers. +*/ +func RespondWithJSONEncoded(statusCode int, object interface{}, optionalHeader ...http.Header) http.HandlerFunc { + data, err := json.Marshal(object) + Ω(err).ShouldNot(HaveOccurred()) + return RespondWith(statusCode, string(data), optionalHeader...) +} + +/* +RespondWithJSONEncodedPtr behaves like RespondWithJSONEncoded but takes a pointer +to a status code and object. + +This allows different tests to share the same setup but specify different status codes and JSON-encoded +objects. + +Also, RespondWithJSONEncodedPtr can be given an optional http.Header. The headers defined therein will be added to the response headers. +Since the http.Header can be mutated after the fact you don't need to pass in a pointer. +*/ +func RespondWithJSONEncodedPtr(statusCode *int, object *interface{}, optionalHeader ...http.Header) http.HandlerFunc { + return func(w http.ResponseWriter, req *http.Request) { + data, err := json.Marshal(*object) + Ω(err).ShouldNot(HaveOccurred()) + if len(optionalHeader) == 1 { + copyHeader(optionalHeader[0], w.Header()) + } + w.WriteHeader(*statusCode) + w.Write(data) + } +} diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/ghttp/test_server.go b/Godeps/_workspace/src/github.com/onsi/gomega/ghttp/test_server.go new file mode 100644 index 0000000000000..4bbb2e41defab --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/ghttp/test_server.go @@ -0,0 +1,334 @@ +/* +Package ghttp supports testing HTTP clients by providing a test server (simply a thin wrapper around httptest's server) that supports +registering multiple handlers. Incoming requests are not routed between the different handlers +- rather it is merely the order of the handlers that matters. The first request is handled by the first +registered handler, the second request by the second handler, etc. + +The intent here is to have each handler *verify* that the incoming request is valid. To accomplish, ghttp +also provides a collection of bite-size handlers that each perform one aspect of request verification. These can +be composed together and registered with a ghttp server. The result is an expressive language for describing +the requests generated by the client under test. + +Here's a simple example, note that the server handler is only defined in one BeforeEach and then modified, as required, by the nested BeforeEaches. +A more comprehensive example is available at https://onsi.github.io/gomega/#_testing_http_clients + + var _ = Describe("A Sprockets Client", func() { + var server *ghttp.Server + var client *SprocketClient + BeforeEach(func() { + server = ghttp.NewServer() + client = NewSprocketClient(server.URL(), "skywalker", "tk427") + }) + + AfterEach(func() { + server.Close() + }) + + Describe("fetching sprockets", func() { + var statusCode int + var sprockets []Sprocket + BeforeEach(func() { + statusCode = http.StatusOK + sprockets = []Sprocket{} + server.AppendHandlers(ghttp.CombineHandlers( + ghttp.VerifyRequest("GET", "/sprockets"), + ghttp.VerifyBasicAuth("skywalker", "tk427"), + ghttp.RespondWithJSONEncodedPtr(&statusCode, &sprockets), + )) + }) + + Context("when requesting all sprockets", func() { + Context("when the response is succesful", func() { + BeforeEach(func() { + sprockets = []Sprocket{ + NewSprocket("Alfalfa"), + NewSprocket("Banana"), + } + }) + + It("should return the returned sprockets", func() { + Ω(client.Sprockets()).Should(Equal(sprockets)) + }) + }) + + Context("when the response is missing", func() { + BeforeEach(func() { + statusCode = http.StatusNotFound + }) + + It("should return an empty list of sprockets", func() { + Ω(client.Sprockets()).Should(BeEmpty()) + }) + }) + + Context("when the response fails to authenticate", func() { + BeforeEach(func() { + statusCode = http.StatusUnauthorized + }) + + It("should return an AuthenticationError error", func() { + sprockets, err := client.Sprockets() + Ω(sprockets).Should(BeEmpty()) + Ω(err).Should(MatchError(AuthenticationError)) + }) + }) + + Context("when the response is a server failure", func() { + BeforeEach(func() { + statusCode = http.StatusInternalServerError + }) + + It("should return an InternalError error", func() { + sprockets, err := client.Sprockets() + Ω(sprockets).Should(BeEmpty()) + Ω(err).Should(MatchError(InternalError)) + }) + }) + }) + + Context("when requesting some sprockets", func() { + BeforeEach(func() { + sprockets = []Sprocket{ + NewSprocket("Alfalfa"), + NewSprocket("Banana"), + } + + server.WrapHandler(0, ghttp.VerifyRequest("GET", "/sprockets", "filter=FOOD")) + }) + + It("should make the request with a filter", func() { + Ω(client.Sprockets("food")).Should(Equal(sprockets)) + }) + }) + }) + }) +*/ +package ghttp + +import ( + "io/ioutil" + "net/http" + "net/http/httptest" + "reflect" + "regexp" + "sync" + + . "github.com/onsi/gomega" +) + +func new() *Server { + return &Server{ + AllowUnhandledRequests: false, + UnhandledRequestStatusCode: http.StatusInternalServerError, + writeLock: &sync.Mutex{}, + } +} + +type routedHandler struct { + method string + pathRegexp *regexp.Regexp + path string + handler http.HandlerFunc +} + +// NewServer returns a new `*ghttp.Server` that wraps an `httptest` server. The server is started automatically. +func NewServer() *Server { + s := new() + s.HTTPTestServer = httptest.NewServer(s) + return s +} + +// NewUnstartedServer return a new, unstarted, `*ghttp.Server`. Useful for specifying a custom listener on `server.HTTPTestServer`. +func NewUnstartedServer() *Server { + s := new() + s.HTTPTestServer = httptest.NewUnstartedServer(s) + return s +} + +// NewTLSServer returns a new `*ghttp.Server` that wraps an `httptest` TLS server. The server is started automatically. +func NewTLSServer() *Server { + s := new() + s.HTTPTestServer = httptest.NewTLSServer(s) + return s +} + +type Server struct { + //The underlying httptest server + HTTPTestServer *httptest.Server + + //Defaults to false. If set to true, the Server will allow more requests than there are registered handlers. + AllowUnhandledRequests bool + + //The status code returned when receiving an unhandled request. + //Defaults to http.StatusInternalServerError. + //Only applies if AllowUnhandledRequests is true + UnhandledRequestStatusCode int + + receivedRequests []*http.Request + requestHandlers []http.HandlerFunc + routedHandlers []routedHandler + + writeLock *sync.Mutex + calls int +} + +//Start() starts an unstarted ghttp server. It is a catastrophic error to call Start more than once (thanks, httptest). +func (s *Server) Start() { + s.HTTPTestServer.Start() +} + +//URL() returns a url that will hit the server +func (s *Server) URL() string { + return s.HTTPTestServer.URL +} + +//Addr() returns the address on which the server is listening. +func (s *Server) Addr() string { + return s.HTTPTestServer.Listener.Addr().String() +} + +//Close() should be called at the end of each test. It spins down and cleans up the test server. +func (s *Server) Close() { + s.writeLock.Lock() + defer s.writeLock.Unlock() + + server := s.HTTPTestServer + s.HTTPTestServer = nil + server.Close() +} + +//ServeHTTP() makes Server an http.Handler +//When the server receives a request it handles the request in the following order: +// +//1. If the request matches a handler registered with RouteToHandler, that handler is called. +//2. Otherwise, if there are handlers registered via AppendHandlers, those handlers are called in order. +//3. If all registered handlers have been called then: +// a) If AllowUnhandledRequests is true, the request will be handled with response code of UnhandledRequestStatusCode +// b) If AllowUnhandledRequests is false, the request will not be handled and the current test will be marked as failed. +func (s *Server) ServeHTTP(w http.ResponseWriter, req *http.Request) { + s.writeLock.Lock() + defer func() { + recover() + }() + + s.receivedRequests = append(s.receivedRequests, req) + if routedHandler, ok := s.handlerForRoute(req.Method, req.URL.Path); ok { + s.writeLock.Unlock() + routedHandler(w, req) + } else if s.calls < len(s.requestHandlers) { + h := s.requestHandlers[s.calls] + s.calls++ + s.writeLock.Unlock() + h(w, req) + } else { + s.writeLock.Unlock() + if s.AllowUnhandledRequests { + ioutil.ReadAll(req.Body) + req.Body.Close() + w.WriteHeader(s.UnhandledRequestStatusCode) + } else { + Ω(req).Should(BeNil(), "Received Unhandled Request") + } + } +} + +//ReceivedRequests is an array containing all requests received by the server (both handled and unhandled requests) +func (s *Server) ReceivedRequests() []*http.Request { + s.writeLock.Lock() + defer s.writeLock.Unlock() + + return s.receivedRequests +} + +//RouteToHandler can be used to register handlers that will always handle requests that match +//the passed in method and path. +// +//The path may be either a string object or a *regexp.Regexp. +func (s *Server) RouteToHandler(method string, path interface{}, handler http.HandlerFunc) { + s.writeLock.Lock() + defer s.writeLock.Unlock() + + rh := routedHandler{ + method: method, + handler: handler, + } + + switch p := path.(type) { + case *regexp.Regexp: + rh.pathRegexp = p + case string: + rh.path = p + default: + panic("path must be a string or a regular expression") + } + + for i, existingRH := range s.routedHandlers { + if existingRH.method == method && + reflect.DeepEqual(existingRH.pathRegexp, rh.pathRegexp) && + existingRH.path == rh.path { + s.routedHandlers[i] = rh + return + } + } + s.routedHandlers = append(s.routedHandlers, rh) +} + +func (s *Server) handlerForRoute(method string, path string) (http.HandlerFunc, bool) { + for _, rh := range s.routedHandlers { + if rh.method == method { + if rh.pathRegexp != nil { + if rh.pathRegexp.Match([]byte(path)) { + return rh.handler, true + } + } else if rh.path == path { + return rh.handler, true + } + } + } + + return nil, false +} + +//AppendHandlers will appends http.HandlerFuncs to the server's list of registered handlers. The first incoming request is handled by the first handler, the second by the second, etc... +func (s *Server) AppendHandlers(handlers ...http.HandlerFunc) { + s.writeLock.Lock() + defer s.writeLock.Unlock() + + s.requestHandlers = append(s.requestHandlers, handlers...) +} + +//SetHandler overrides the registered handler at the passed in index with the passed in handler +//This is useful, for example, when a server has been set up in a shared context, but must be tweaked +//for a particular test. +func (s *Server) SetHandler(index int, handler http.HandlerFunc) { + s.writeLock.Lock() + defer s.writeLock.Unlock() + + s.requestHandlers[index] = handler +} + +//GetHandler returns the handler registered at the passed in index. +func (s *Server) GetHandler(index int) http.HandlerFunc { + s.writeLock.Lock() + defer s.writeLock.Unlock() + + return s.requestHandlers[index] +} + +//WrapHandler combines the passed in handler with the handler registered at the passed in index. +//This is useful, for example, when a server has been set up in a shared context but must be tweaked +//for a particular test. +// +//If the currently registered handler is A, and the new passed in handler is B then +//WrapHandler will generate a new handler that first calls A, then calls B, and assign it to index +func (s *Server) WrapHandler(index int, handler http.HandlerFunc) { + existingHandler := s.GetHandler(index) + s.SetHandler(index, CombineHandlers(existingHandler, handler)) +} + +func (s *Server) CloseClientConnections() { + s.writeLock.Lock() + defer s.writeLock.Unlock() + + s.HTTPTestServer.CloseClientConnections() +} diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/ghttp/test_server_suite_test.go b/Godeps/_workspace/src/github.com/onsi/gomega/ghttp/test_server_suite_test.go new file mode 100644 index 0000000000000..7c1236082750f --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/ghttp/test_server_suite_test.go @@ -0,0 +1,13 @@ +package ghttp_test + +import ( + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" + + "testing" +) + +func TestGHTTP(t *testing.T) { + RegisterFailHandler(Fail) + RunSpecs(t, "GHTTP Suite") +} diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/ghttp/test_server_test.go b/Godeps/_workspace/src/github.com/onsi/gomega/ghttp/test_server_test.go new file mode 100644 index 0000000000000..793ca4a488d4e --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/ghttp/test_server_test.go @@ -0,0 +1,591 @@ +package ghttp_test + +import ( + "bytes" + "io/ioutil" + "net/http" + "regexp" + + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" + . "github.com/onsi/gomega/ghttp" +) + +var _ = Describe("TestServer", func() { + var ( + resp *http.Response + err error + s *Server + ) + + BeforeEach(func() { + s = NewServer() + }) + + AfterEach(func() { + s.Close() + }) + + Describe("closing client connections", func() { + It("closes", func() { + s.AppendHandlers( + func(w http.ResponseWriter, req *http.Request) { + w.Write([]byte("hello")) + }, + func(w http.ResponseWriter, req *http.Request) { + s.CloseClientConnections() + }, + ) + + resp, err := http.Get(s.URL()) + Ω(err).ShouldNot(HaveOccurred()) + Ω(resp.StatusCode).Should(Equal(200)) + + body, err := ioutil.ReadAll(resp.Body) + resp.Body.Close() + Ω(err).ShouldNot(HaveOccurred()) + Ω(body).Should(Equal([]byte("hello"))) + + resp, err = http.Get(s.URL()) + Ω(err).Should(HaveOccurred()) + Ω(resp).Should(BeNil()) + }) + }) + + Describe("allowing unhandled requests", func() { + Context("when true", func() { + BeforeEach(func() { + s.AllowUnhandledRequests = true + s.UnhandledRequestStatusCode = http.StatusForbidden + resp, err = http.Get(s.URL() + "/foo") + Ω(err).ShouldNot(HaveOccurred()) + }) + + It("should allow unhandled requests and respond with the passed in status code", func() { + Ω(err).ShouldNot(HaveOccurred()) + Ω(resp.StatusCode).Should(Equal(http.StatusForbidden)) + + data, err := ioutil.ReadAll(resp.Body) + Ω(err).ShouldNot(HaveOccurred()) + Ω(data).Should(BeEmpty()) + }) + + It("should record the requests", func() { + Ω(s.ReceivedRequests()).Should(HaveLen(1)) + Ω(s.ReceivedRequests()[0].URL.Path).Should(Equal("/foo")) + }) + }) + + Context("when false", func() { + It("should fail when attempting a request", func() { + failures := InterceptGomegaFailures(func() { + http.Get(s.URL() + "/foo") + }) + + Ω(failures[0]).Should(ContainSubstring("Received Unhandled Request")) + }) + }) + }) + + Describe("Managing Handlers", func() { + var called []string + BeforeEach(func() { + called = []string{} + s.RouteToHandler("GET", "/routed", func(w http.ResponseWriter, req *http.Request) { + called = append(called, "r1") + }) + s.RouteToHandler("POST", regexp.MustCompile(`/routed\d`), func(w http.ResponseWriter, req *http.Request) { + called = append(called, "r2") + }) + s.AppendHandlers(func(w http.ResponseWriter, req *http.Request) { + called = append(called, "A") + }, func(w http.ResponseWriter, req *http.Request) { + called = append(called, "B") + }) + }) + + It("should prefer routed handlers if there is a match", func() { + http.Get(s.URL() + "/routed") + http.Post(s.URL()+"/routed7", "application/json", nil) + http.Get(s.URL() + "/foo") + http.Get(s.URL() + "/routed") + http.Post(s.URL()+"/routed9", "application/json", nil) + http.Get(s.URL() + "/bar") + + failures := InterceptGomegaFailures(func() { + http.Get(s.URL() + "/foo") + http.Get(s.URL() + "/routed/not/a/match") + http.Get(s.URL() + "/routed7") + http.Post(s.URL()+"/routed", "application/json", nil) + }) + + Ω(failures[0]).Should(ContainSubstring("Received Unhandled Request")) + Ω(failures).Should(HaveLen(4)) + + http.Post(s.URL()+"/routed3", "application/json", nil) + + Ω(called).Should(Equal([]string{"r1", "r2", "A", "r1", "r2", "B", "r2"})) + }) + + It("should override routed handlers when reregistered", func() { + s.RouteToHandler("GET", "/routed", func(w http.ResponseWriter, req *http.Request) { + called = append(called, "r3") + }) + s.RouteToHandler("POST", regexp.MustCompile(`/routed\d`), func(w http.ResponseWriter, req *http.Request) { + called = append(called, "r4") + }) + + http.Get(s.URL() + "/routed") + http.Post(s.URL()+"/routed7", "application/json", nil) + + Ω(called).Should(Equal([]string{"r3", "r4"})) + }) + + It("should call the appended handlers, in order, as requests come in", func() { + http.Get(s.URL() + "/foo") + Ω(called).Should(Equal([]string{"A"})) + + http.Get(s.URL() + "/foo") + Ω(called).Should(Equal([]string{"A", "B"})) + + failures := InterceptGomegaFailures(func() { + http.Get(s.URL() + "/foo") + }) + + Ω(failures[0]).Should(ContainSubstring("Received Unhandled Request")) + }) + + Describe("Overwriting an existing handler", func() { + BeforeEach(func() { + s.SetHandler(0, func(w http.ResponseWriter, req *http.Request) { + called = append(called, "C") + }) + }) + + It("should override the specified handler", func() { + http.Get(s.URL() + "/foo") + http.Get(s.URL() + "/foo") + Ω(called).Should(Equal([]string{"C", "B"})) + }) + }) + + Describe("Getting an existing handler", func() { + It("should return the handler func", func() { + s.GetHandler(1)(nil, nil) + Ω(called).Should(Equal([]string{"B"})) + }) + }) + + Describe("Wrapping an existing handler", func() { + BeforeEach(func() { + s.WrapHandler(0, func(w http.ResponseWriter, req *http.Request) { + called = append(called, "C") + }) + }) + + It("should wrap the existing handler in a new handler", func() { + http.Get(s.URL() + "/foo") + http.Get(s.URL() + "/foo") + Ω(called).Should(Equal([]string{"A", "C", "B"})) + }) + }) + }) + + Describe("Request Handlers", func() { + Describe("VerifyRequest", func() { + BeforeEach(func() { + s.AppendHandlers(VerifyRequest("GET", "/foo")) + }) + + It("should verify the method, path", func() { + resp, err = http.Get(s.URL() + "/foo?baz=bar") + Ω(err).ShouldNot(HaveOccurred()) + }) + + It("should verify the method, path", func() { + failures := InterceptGomegaFailures(func() { + http.Get(s.URL() + "/foo2") + }) + Ω(failures).Should(HaveLen(1)) + }) + + It("should verify the method, path", func() { + failures := InterceptGomegaFailures(func() { + http.Post(s.URL()+"/foo", "application/json", nil) + }) + Ω(failures).Should(HaveLen(1)) + }) + + Context("when passed a rawQuery", func() { + It("should also be possible to verify the rawQuery", func() { + s.SetHandler(0, VerifyRequest("GET", "/foo", "baz=bar")) + resp, err = http.Get(s.URL() + "/foo?baz=bar") + Ω(err).ShouldNot(HaveOccurred()) + }) + }) + + Context("when passed a matcher for path", func() { + It("should apply the matcher", func() { + s.SetHandler(0, VerifyRequest("GET", MatchRegexp(`/foo/[a-f]*/3`))) + resp, err = http.Get(s.URL() + "/foo/abcdefa/3") + Ω(err).ShouldNot(HaveOccurred()) + }) + }) + }) + + Describe("VerifyContentType", func() { + BeforeEach(func() { + s.AppendHandlers(CombineHandlers( + VerifyRequest("GET", "/foo"), + VerifyContentType("application/octet-stream"), + )) + }) + + It("should verify the content type", func() { + req, err := http.NewRequest("GET", s.URL()+"/foo", nil) + Ω(err).ShouldNot(HaveOccurred()) + req.Header.Set("Content-Type", "application/octet-stream") + + resp, err = http.DefaultClient.Do(req) + Ω(err).ShouldNot(HaveOccurred()) + }) + + It("should verify the content type", func() { + req, err := http.NewRequest("GET", s.URL()+"/foo", nil) + Ω(err).ShouldNot(HaveOccurred()) + req.Header.Set("Content-Type", "application/json") + + failures := InterceptGomegaFailures(func() { + http.DefaultClient.Do(req) + }) + Ω(failures).Should(HaveLen(1)) + }) + }) + + Describe("Verify BasicAuth", func() { + BeforeEach(func() { + s.AppendHandlers(CombineHandlers( + VerifyRequest("GET", "/foo"), + VerifyBasicAuth("bob", "password"), + )) + }) + + It("should verify basic auth", func() { + req, err := http.NewRequest("GET", s.URL()+"/foo", nil) + Ω(err).ShouldNot(HaveOccurred()) + req.SetBasicAuth("bob", "password") + + resp, err = http.DefaultClient.Do(req) + Ω(err).ShouldNot(HaveOccurred()) + }) + + It("should verify basic auth", func() { + req, err := http.NewRequest("GET", s.URL()+"/foo", nil) + Ω(err).ShouldNot(HaveOccurred()) + req.SetBasicAuth("bob", "bassword") + + failures := InterceptGomegaFailures(func() { + http.DefaultClient.Do(req) + }) + Ω(failures).Should(HaveLen(1)) + }) + + It("should require basic auth header", func() { + req, err := http.NewRequest("GET", s.URL()+"/foo", nil) + Ω(err).ShouldNot(HaveOccurred()) + + failures := InterceptGomegaFailures(func() { + http.DefaultClient.Do(req) + }) + Ω(failures).Should(HaveLen(1)) + }) + }) + + Describe("VerifyHeader", func() { + BeforeEach(func() { + s.AppendHandlers(CombineHandlers( + VerifyRequest("GET", "/foo"), + VerifyHeader(http.Header{ + "accept": []string{"jpeg", "png"}, + "cache-control": []string{"omicron"}, + "Return-Path": []string{"hobbiton"}, + }), + )) + }) + + It("should verify the headers", func() { + req, err := http.NewRequest("GET", s.URL()+"/foo", nil) + Ω(err).ShouldNot(HaveOccurred()) + req.Header.Add("Accept", "jpeg") + req.Header.Add("Accept", "png") + req.Header.Add("Cache-Control", "omicron") + req.Header.Add("return-path", "hobbiton") + + resp, err = http.DefaultClient.Do(req) + Ω(err).ShouldNot(HaveOccurred()) + }) + + It("should verify the headers", func() { + req, err := http.NewRequest("GET", s.URL()+"/foo", nil) + Ω(err).ShouldNot(HaveOccurred()) + req.Header.Add("Schmaccept", "jpeg") + req.Header.Add("Schmaccept", "png") + req.Header.Add("Cache-Control", "omicron") + req.Header.Add("return-path", "hobbiton") + + failures := InterceptGomegaFailures(func() { + http.DefaultClient.Do(req) + }) + Ω(failures).Should(HaveLen(1)) + }) + }) + + Describe("VerifyHeaderKV", func() { + BeforeEach(func() { + s.AppendHandlers(CombineHandlers( + VerifyRequest("GET", "/foo"), + VerifyHeaderKV("accept", "jpeg", "png"), + VerifyHeaderKV("cache-control", "omicron"), + VerifyHeaderKV("Return-Path", "hobbiton"), + )) + }) + + It("should verify the headers", func() { + req, err := http.NewRequest("GET", s.URL()+"/foo", nil) + Ω(err).ShouldNot(HaveOccurred()) + req.Header.Add("Accept", "jpeg") + req.Header.Add("Accept", "png") + req.Header.Add("Cache-Control", "omicron") + req.Header.Add("return-path", "hobbiton") + + resp, err = http.DefaultClient.Do(req) + Ω(err).ShouldNot(HaveOccurred()) + }) + + It("should verify the headers", func() { + req, err := http.NewRequest("GET", s.URL()+"/foo", nil) + Ω(err).ShouldNot(HaveOccurred()) + req.Header.Add("Accept", "jpeg") + req.Header.Add("Cache-Control", "omicron") + req.Header.Add("return-path", "hobbiton") + + failures := InterceptGomegaFailures(func() { + http.DefaultClient.Do(req) + }) + Ω(failures).Should(HaveLen(1)) + }) + }) + + Describe("VerifyJSON", func() { + BeforeEach(func() { + s.AppendHandlers(CombineHandlers( + VerifyRequest("POST", "/foo"), + VerifyJSON(`{"a":3, "b":2}`), + )) + }) + + It("should verify the json body and the content type", func() { + resp, err = http.Post(s.URL()+"/foo", "application/json", bytes.NewReader([]byte(`{"b":2, "a":3}`))) + Ω(err).ShouldNot(HaveOccurred()) + }) + + It("should verify the json body and the content type", func() { + failures := InterceptGomegaFailures(func() { + http.Post(s.URL()+"/foo", "application/json", bytes.NewReader([]byte(`{"b":2, "a":4}`))) + }) + Ω(failures).Should(HaveLen(1)) + }) + + It("should verify the json body and the content type", func() { + failures := InterceptGomegaFailures(func() { + http.Post(s.URL()+"/foo", "application/not-json", bytes.NewReader([]byte(`{"b":2, "a":3}`))) + }) + Ω(failures).Should(HaveLen(1)) + }) + }) + + Describe("VerifyJSONRepresenting", func() { + BeforeEach(func() { + s.AppendHandlers(CombineHandlers( + VerifyRequest("POST", "/foo"), + VerifyJSONRepresenting([]int{1, 3, 5}), + )) + }) + + It("should verify the json body and the content type", func() { + resp, err = http.Post(s.URL()+"/foo", "application/json", bytes.NewReader([]byte(`[1,3,5]`))) + Ω(err).ShouldNot(HaveOccurred()) + }) + + It("should verify the json body and the content type", func() { + failures := InterceptGomegaFailures(func() { + http.Post(s.URL()+"/foo", "application/json", bytes.NewReader([]byte(`[1,3]`))) + }) + Ω(failures).Should(HaveLen(1)) + }) + }) + + Describe("RespondWith", func() { + Context("without headers", func() { + BeforeEach(func() { + s.AppendHandlers(CombineHandlers( + VerifyRequest("POST", "/foo"), + RespondWith(http.StatusCreated, "sweet"), + ), CombineHandlers( + VerifyRequest("POST", "/foo"), + RespondWith(http.StatusOK, []byte("sour")), + )) + }) + + It("should return the response", func() { + resp, err = http.Post(s.URL()+"/foo", "application/json", nil) + Ω(err).ShouldNot(HaveOccurred()) + + Ω(resp.StatusCode).Should(Equal(http.StatusCreated)) + + body, err := ioutil.ReadAll(resp.Body) + Ω(err).ShouldNot(HaveOccurred()) + Ω(body).Should(Equal([]byte("sweet"))) + + resp, err = http.Post(s.URL()+"/foo", "application/json", nil) + Ω(err).ShouldNot(HaveOccurred()) + + Ω(resp.StatusCode).Should(Equal(http.StatusOK)) + + body, err = ioutil.ReadAll(resp.Body) + Ω(err).ShouldNot(HaveOccurred()) + Ω(body).Should(Equal([]byte("sour"))) + }) + }) + + Context("with headers", func() { + BeforeEach(func() { + s.AppendHandlers(CombineHandlers( + VerifyRequest("POST", "/foo"), + RespondWith(http.StatusCreated, "sweet", http.Header{"X-Custom-Header": []string{"my header"}}), + )) + }) + + It("should return the headers too", func() { + resp, err = http.Post(s.URL()+"/foo", "application/json", nil) + Ω(err).ShouldNot(HaveOccurred()) + + Ω(resp.StatusCode).Should(Equal(http.StatusCreated)) + Ω(ioutil.ReadAll(resp.Body)).Should(Equal([]byte("sweet"))) + Ω(resp.Header.Get("X-Custom-Header")).Should(Equal("my header")) + }) + }) + }) + + Describe("RespondWithPtr", func() { + var code int + var byteBody []byte + var stringBody string + BeforeEach(func() { + code = http.StatusOK + byteBody = []byte("sweet") + stringBody = "sour" + + s.AppendHandlers(CombineHandlers( + VerifyRequest("POST", "/foo"), + RespondWithPtr(&code, &byteBody), + ), CombineHandlers( + VerifyRequest("POST", "/foo"), + RespondWithPtr(&code, &stringBody), + )) + }) + + It("should return the response", func() { + code = http.StatusCreated + byteBody = []byte("tasty") + stringBody = "treat" + + resp, err = http.Post(s.URL()+"/foo", "application/json", nil) + Ω(err).ShouldNot(HaveOccurred()) + + Ω(resp.StatusCode).Should(Equal(http.StatusCreated)) + + body, err := ioutil.ReadAll(resp.Body) + Ω(err).ShouldNot(HaveOccurred()) + Ω(body).Should(Equal([]byte("tasty"))) + + resp, err = http.Post(s.URL()+"/foo", "application/json", nil) + Ω(err).ShouldNot(HaveOccurred()) + + Ω(resp.StatusCode).Should(Equal(http.StatusCreated)) + + body, err = ioutil.ReadAll(resp.Body) + Ω(err).ShouldNot(HaveOccurred()) + Ω(body).Should(Equal([]byte("treat"))) + }) + + Context("when passed a nil body", func() { + BeforeEach(func() { + s.SetHandler(0, CombineHandlers( + VerifyRequest("POST", "/foo"), + RespondWithPtr(&code, nil), + )) + }) + + It("should return an empty body and not explode", func() { + resp, err = http.Post(s.URL()+"/foo", "application/json", nil) + + Ω(err).ShouldNot(HaveOccurred()) + Ω(resp.StatusCode).Should(Equal(http.StatusOK)) + body, err := ioutil.ReadAll(resp.Body) + Ω(err).ShouldNot(HaveOccurred()) + Ω(body).Should(BeEmpty()) + + Ω(s.ReceivedRequests()).Should(HaveLen(1)) + }) + }) + }) + + Describe("RespondWithJSON", func() { + BeforeEach(func() { + s.AppendHandlers(CombineHandlers( + VerifyRequest("POST", "/foo"), + RespondWithJSONEncoded(http.StatusCreated, []int{1, 2, 3}), + )) + }) + + It("should return the response", func() { + resp, err = http.Post(s.URL()+"/foo", "application/json", nil) + Ω(err).ShouldNot(HaveOccurred()) + + Ω(resp.StatusCode).Should(Equal(http.StatusCreated)) + + body, err := ioutil.ReadAll(resp.Body) + Ω(err).ShouldNot(HaveOccurred()) + Ω(body).Should(MatchJSON("[1,2,3]")) + }) + }) + + Describe("RespondWithJSONPtr", func() { + var code int + var object interface{} + BeforeEach(func() { + code = http.StatusOK + object = []int{1, 2, 3} + + s.AppendHandlers(CombineHandlers( + VerifyRequest("POST", "/foo"), + RespondWithJSONEncodedPtr(&code, &object), + )) + }) + + It("should return the response", func() { + code = http.StatusCreated + object = []int{4, 5, 6} + resp, err = http.Post(s.URL()+"/foo", "application/json", nil) + Ω(err).ShouldNot(HaveOccurred()) + + Ω(resp.StatusCode).Should(Equal(http.StatusCreated)) + + body, err := ioutil.ReadAll(resp.Body) + Ω(err).ShouldNot(HaveOccurred()) + Ω(body).Should(MatchJSON("[4,5,6]")) + }) + }) + }) +}) diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/gomega_dsl.go b/Godeps/_workspace/src/github.com/onsi/gomega/gomega_dsl.go new file mode 100644 index 0000000000000..78bd188c07299 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/gomega_dsl.go @@ -0,0 +1,335 @@ +/* +Gomega is the Ginkgo BDD-style testing framework's preferred matcher library. + +The godoc documentation describes Gomega's API. More comprehensive documentation (with examples!) is available at http://onsi.github.io/gomega/ + +Gomega on Github: http://github.com/onsi/gomega + +Learn more about Ginkgo online: http://onsi.github.io/ginkgo + +Ginkgo on Github: http://github.com/onsi/ginkgo + +Gomega is MIT-Licensed +*/ +package gomega + +import ( + "fmt" + "reflect" + "time" + + "github.com/onsi/gomega/internal/assertion" + "github.com/onsi/gomega/internal/asyncassertion" + "github.com/onsi/gomega/internal/testingtsupport" + "github.com/onsi/gomega/types" +) + +const GOMEGA_VERSION = "1.0" + +const nilFailHandlerPanic = `You are trying to make an assertion, but Gomega's fail handler is nil. +If you're using Ginkgo then you probably forgot to put your assertion in an It(). +Alternatively, you may have forgotten to register a fail handler with RegisterFailHandler() or RegisterTestingT(). +` + +var globalFailHandler types.GomegaFailHandler + +var defaultEventuallyTimeout = time.Second +var defaultEventuallyPollingInterval = 10 * time.Millisecond +var defaultConsistentlyDuration = 100 * time.Millisecond +var defaultConsistentlyPollingInterval = 10 * time.Millisecond + +//RegisterFailHandler connects Ginkgo to Gomega. When a matcher fails +//the fail handler passed into RegisterFailHandler is called. +func RegisterFailHandler(handler types.GomegaFailHandler) { + globalFailHandler = handler +} + +//RegisterTestingT connects Gomega to Golang's XUnit style +//Testing.T tests. You'll need to call this at the top of each XUnit style test: +// +// func TestFarmHasCow(t *testing.T) { +// RegisterTestingT(t) +// +// f := farm.New([]string{"Cow", "Horse"}) +// Expect(f.HasCow()).To(BeTrue(), "Farm should have cow") +// } +// +// Note that this *testing.T is registered *globally* by Gomega (this is why you don't have to +// pass `t` down to the matcher itself). This means that you cannot run the XUnit style tests +// in parallel as the global fail handler cannot point to more than one testing.T at a time. +// +// (As an aside: Ginkgo gets around this limitation by running parallel tests in different *processes*). +func RegisterTestingT(t types.GomegaTestingT) { + RegisterFailHandler(testingtsupport.BuildTestingTGomegaFailHandler(t)) +} + +//InterceptGomegaHandlers runs a given callback and returns an array of +//failure messages generated by any Gomega assertions within the callback. +// +//This is accomplished by temporarily replacing the *global* fail handler +//with a fail handler that simply annotates failures. The original fail handler +//is reset when InterceptGomegaFailures returns. +// +//This is most useful when testing custom matchers, but can also be used to check +//on a value using a Gomega assertion without causing a test failure. +func InterceptGomegaFailures(f func()) []string { + originalHandler := globalFailHandler + failures := []string{} + RegisterFailHandler(func(message string, callerSkip ...int) { + failures = append(failures, message) + }) + f() + RegisterFailHandler(originalHandler) + return failures +} + +//Ω wraps an actual value allowing assertions to be made on it: +// Ω("foo").Should(Equal("foo")) +// +//If Ω is passed more than one argument it will pass the *first* argument to the matcher. +//All subsequent arguments will be required to be nil/zero. +// +//This is convenient if you want to make an assertion on a method/function that returns +//a value and an error - a common patter in Go. +// +//For example, given a function with signature: +// func MyAmazingThing() (int, error) +// +//Then: +// Ω(MyAmazingThing()).Should(Equal(3)) +//Will succeed only if `MyAmazingThing()` returns `(3, nil)` +// +//Ω and Expect are identical +func Ω(actual interface{}, extra ...interface{}) GomegaAssertion { + return ExpectWithOffset(0, actual, extra...) +} + +//Expect wraps an actual value allowing assertions to be made on it: +// Expect("foo").To(Equal("foo")) +// +//If Expect is passed more than one argument it will pass the *first* argument to the matcher. +//All subsequent arguments will be required to be nil/zero. +// +//This is convenient if you want to make an assertion on a method/function that returns +//a value and an error - a common patter in Go. +// +//For example, given a function with signature: +// func MyAmazingThing() (int, error) +// +//Then: +// Expect(MyAmazingThing()).Should(Equal(3)) +//Will succeed only if `MyAmazingThing()` returns `(3, nil)` +// +//Expect and Ω are identical +func Expect(actual interface{}, extra ...interface{}) GomegaAssertion { + return ExpectWithOffset(0, actual, extra...) +} + +//ExpectWithOffset wraps an actual value allowing assertions to be made on it: +// ExpectWithOffset(1, "foo").To(Equal("foo")) +// +//Unlike `Expect` and `Ω`, `ExpectWithOffset` takes an additional integer argument +//this is used to modify the call-stack offset when computing line numbers. +// +//This is most useful in helper functions that make assertions. If you want Gomega's +//error message to refer to the calling line in the test (as opposed to the line in the helper function) +//set the first argument of `ExpectWithOffset` appropriately. +func ExpectWithOffset(offset int, actual interface{}, extra ...interface{}) GomegaAssertion { + if globalFailHandler == nil { + panic(nilFailHandlerPanic) + } + return assertion.New(actual, globalFailHandler, offset, extra...) +} + +//Eventually wraps an actual value allowing assertions to be made on it. +//The assertion is tried periodically until it passes or a timeout occurs. +// +//Both the timeout and polling interval are configurable as optional arguments: +//The first optional argument is the timeout +//The second optional argument is the polling interval +// +//Both intervals can either be specified as time.Duration, parsable duration strings or as floats/integers. In the +//last case they are interpreted as seconds. +// +//If Eventually is passed an actual that is a function taking no arguments and returning at least one value, +//then Eventually will call the function periodically and try the matcher against the function's first return value. +// +//Example: +// +// Eventually(func() int { +// return thingImPolling.Count() +// }).Should(BeNumerically(">=", 17)) +// +//Note that this example could be rewritten: +// +// Eventually(thingImPolling.Count).Should(BeNumerically(">=", 17)) +// +//If the function returns more than one value, then Eventually will pass the first value to the matcher and +//assert that all other values are nil/zero. +//This allows you to pass Eventually a function that returns a value and an error - a common pattern in Go. +// +//For example, consider a method that returns a value and an error: +// func FetchFromDB() (string, error) +// +//Then +// Eventually(FetchFromDB).Should(Equal("hasselhoff")) +// +//Will pass only if the the returned error is nil and the returned string passes the matcher. +// +//Eventually's default timeout is 1 second, and its default polling interval is 10ms +func Eventually(actual interface{}, intervals ...interface{}) GomegaAsyncAssertion { + return EventuallyWithOffset(0, actual, intervals...) +} + +//EventuallyWithOffset operates like Eventually but takes an additional +//initial argument to indicate an offset in the call stack. This is useful when building helper +//functions that contain matchers. To learn more, read about `ExpectWithOffset`. +func EventuallyWithOffset(offset int, actual interface{}, intervals ...interface{}) GomegaAsyncAssertion { + if globalFailHandler == nil { + panic(nilFailHandlerPanic) + } + timeoutInterval := defaultEventuallyTimeout + pollingInterval := defaultEventuallyPollingInterval + if len(intervals) > 0 { + timeoutInterval = toDuration(intervals[0]) + } + if len(intervals) > 1 { + pollingInterval = toDuration(intervals[1]) + } + return asyncassertion.New(asyncassertion.AsyncAssertionTypeEventually, actual, globalFailHandler, timeoutInterval, pollingInterval, offset) +} + +//Consistently wraps an actual value allowing assertions to be made on it. +//The assertion is tried periodically and is required to pass for a period of time. +// +//Both the total time and polling interval are configurable as optional arguments: +//The first optional argument is the duration that Consistently will run for +//The second optional argument is the polling interval +// +//Both intervals can either be specified as time.Duration, parsable duration strings or as floats/integers. In the +//last case they are interpreted as seconds. +// +//If Consistently is passed an actual that is a function taking no arguments and returning at least one value, +//then Consistently will call the function periodically and try the matcher against the function's first return value. +// +//If the function returns more than one value, then Consistently will pass the first value to the matcher and +//assert that all other values are nil/zero. +//This allows you to pass Consistently a function that returns a value and an error - a common pattern in Go. +// +//Consistently is useful in cases where you want to assert that something *does not happen* over a period of tiem. +//For example, you want to assert that a goroutine does *not* send data down a channel. In this case, you could: +// +// Consistently(channel).ShouldNot(Receive()) +// +//Consistently's default duration is 100ms, and its default polling interval is 10ms +func Consistently(actual interface{}, intervals ...interface{}) GomegaAsyncAssertion { + return ConsistentlyWithOffset(0, actual, intervals...) +} + +//ConsistentlyWithOffset operates like Consistnetly but takes an additional +//initial argument to indicate an offset in the call stack. This is useful when building helper +//functions that contain matchers. To learn more, read about `ExpectWithOffset`. +func ConsistentlyWithOffset(offset int, actual interface{}, intervals ...interface{}) GomegaAsyncAssertion { + if globalFailHandler == nil { + panic(nilFailHandlerPanic) + } + timeoutInterval := defaultConsistentlyDuration + pollingInterval := defaultConsistentlyPollingInterval + if len(intervals) > 0 { + timeoutInterval = toDuration(intervals[0]) + } + if len(intervals) > 1 { + pollingInterval = toDuration(intervals[1]) + } + return asyncassertion.New(asyncassertion.AsyncAssertionTypeConsistently, actual, globalFailHandler, timeoutInterval, pollingInterval, offset) +} + +//Set the default timeout duration for Eventually. Eventually will repeatedly poll your condition until it succeeds, or until this timeout elapses. +func SetDefaultEventuallyTimeout(t time.Duration) { + defaultEventuallyTimeout = t +} + +//Set the default polling interval for Eventually. +func SetDefaultEventuallyPollingInterval(t time.Duration) { + defaultEventuallyPollingInterval = t +} + +//Set the default duration for Consistently. Consistently will verify that your condition is satsified for this long. +func SetDefaultConsistentlyDuration(t time.Duration) { + defaultConsistentlyDuration = t +} + +//Set the default polling interval for Consistently. +func SetDefaultConsistentlyPollingInterval(t time.Duration) { + defaultConsistentlyPollingInterval = t +} + +//GomegaAsyncAssertion is returned by Eventually and Consistently and polls the actual value passed into Eventually against +//the matcher passed to the Should and ShouldNot methods. +// +//Both Should and ShouldNot take a variadic optionalDescription argument. This is passed on to +//fmt.Sprintf() and is used to annotate failure messages. This allows you to make your failure messages more +//descriptive +// +//Both Should and ShouldNot return a boolean that is true if the assertion passed and false if it failed. +// +//Example: +// +// Eventually(myChannel).Should(Receive(), "Something should have come down the pipe.") +// Consistently(myChannel).ShouldNot(Receive(), "Nothing should have come down the pipe.") +type GomegaAsyncAssertion interface { + Should(matcher types.GomegaMatcher, optionalDescription ...interface{}) bool + ShouldNot(matcher types.GomegaMatcher, optionalDescription ...interface{}) bool +} + +//GomegaAssertion is returned by Ω and Expect and compares the actual value to the matcher +//passed to the Should/ShouldNot and To/ToNot/NotTo methods. +// +//Typically Should/ShouldNot are used with Ω and To/ToNot/NotTo are used with Expect +//though this is not enforced. +// +//All methods take a variadic optionalDescription argument. This is passed on to fmt.Sprintf() +//and is used to annotate failure messages. +// +//All methods return a bool that is true if hte assertion passed and false if it failed. +// +//Example: +// +// Ω(farm.HasCow()).Should(BeTrue(), "Farm %v should have a cow", farm) +type GomegaAssertion interface { + Should(matcher types.GomegaMatcher, optionalDescription ...interface{}) bool + ShouldNot(matcher types.GomegaMatcher, optionalDescription ...interface{}) bool + + To(matcher types.GomegaMatcher, optionalDescription ...interface{}) bool + ToNot(matcher types.GomegaMatcher, optionalDescription ...interface{}) bool + NotTo(matcher types.GomegaMatcher, optionalDescription ...interface{}) bool +} + +//OmegaMatcher is deprecated in favor of the better-named and better-organized types.GomegaMatcher but sticks around to support existing code that uses it +type OmegaMatcher types.GomegaMatcher + +func toDuration(input interface{}) time.Duration { + duration, ok := input.(time.Duration) + if ok { + return duration + } + + value := reflect.ValueOf(input) + kind := reflect.TypeOf(input).Kind() + + if reflect.Int <= kind && kind <= reflect.Int64 { + return time.Duration(value.Int()) * time.Second + } else if reflect.Uint <= kind && kind <= reflect.Uint64 { + return time.Duration(value.Uint()) * time.Second + } else if reflect.Float32 <= kind && kind <= reflect.Float64 { + return time.Duration(value.Float() * float64(time.Second)) + } else if reflect.String == kind { + duration, err := time.ParseDuration(value.String()) + if err != nil { + panic(fmt.Sprintf("%#v is not a valid parsable duration string.", input)) + } + return duration + } + + panic(fmt.Sprintf("%v is not a valid interval. Must be time.Duration, parsable duration string or a number.", input)) +} diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/internal/assertion/assertion.go b/Godeps/_workspace/src/github.com/onsi/gomega/internal/assertion/assertion.go new file mode 100644 index 0000000000000..b73673f21ed91 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/internal/assertion/assertion.go @@ -0,0 +1,98 @@ +package assertion + +import ( + "fmt" + "reflect" + + "github.com/onsi/gomega/types" +) + +type Assertion struct { + actualInput interface{} + fail types.GomegaFailHandler + offset int + extra []interface{} +} + +func New(actualInput interface{}, fail types.GomegaFailHandler, offset int, extra ...interface{}) *Assertion { + return &Assertion{ + actualInput: actualInput, + fail: fail, + offset: offset, + extra: extra, + } +} + +func (assertion *Assertion) Should(matcher types.GomegaMatcher, optionalDescription ...interface{}) bool { + return assertion.vetExtras(optionalDescription...) && assertion.match(matcher, true, optionalDescription...) +} + +func (assertion *Assertion) ShouldNot(matcher types.GomegaMatcher, optionalDescription ...interface{}) bool { + return assertion.vetExtras(optionalDescription...) && assertion.match(matcher, false, optionalDescription...) +} + +func (assertion *Assertion) To(matcher types.GomegaMatcher, optionalDescription ...interface{}) bool { + return assertion.vetExtras(optionalDescription...) && assertion.match(matcher, true, optionalDescription...) +} + +func (assertion *Assertion) ToNot(matcher types.GomegaMatcher, optionalDescription ...interface{}) bool { + return assertion.vetExtras(optionalDescription...) && assertion.match(matcher, false, optionalDescription...) +} + +func (assertion *Assertion) NotTo(matcher types.GomegaMatcher, optionalDescription ...interface{}) bool { + return assertion.vetExtras(optionalDescription...) && assertion.match(matcher, false, optionalDescription...) +} + +func (assertion *Assertion) buildDescription(optionalDescription ...interface{}) string { + switch len(optionalDescription) { + case 0: + return "" + default: + return fmt.Sprintf(optionalDescription[0].(string), optionalDescription[1:]...) + "\n" + } +} + +func (assertion *Assertion) match(matcher types.GomegaMatcher, desiredMatch bool, optionalDescription ...interface{}) bool { + matches, err := matcher.Match(assertion.actualInput) + description := assertion.buildDescription(optionalDescription...) + if err != nil { + assertion.fail(description+err.Error(), 2+assertion.offset) + return false + } + if matches != desiredMatch { + var message string + if desiredMatch { + message = matcher.FailureMessage(assertion.actualInput) + } else { + message = matcher.NegatedFailureMessage(assertion.actualInput) + } + assertion.fail(description+message, 2+assertion.offset) + return false + } + + return true +} + +func (assertion *Assertion) vetExtras(optionalDescription ...interface{}) bool { + success, message := vetExtras(assertion.extra) + if success { + return true + } + + description := assertion.buildDescription(optionalDescription...) + assertion.fail(description+message, 2+assertion.offset) + return false +} + +func vetExtras(extras []interface{}) (bool, string) { + for i, extra := range extras { + if extra != nil { + zeroValue := reflect.Zero(reflect.TypeOf(extra)).Interface() + if !reflect.DeepEqual(zeroValue, extra) { + message := fmt.Sprintf("Unexpected non-nil/non-zero extra argument at index %d:\n\t<%T>: %#v", i+1, extra, extra) + return false, message + } + } + } + return true, "" +} diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/internal/assertion/assertion_suite_test.go b/Godeps/_workspace/src/github.com/onsi/gomega/internal/assertion/assertion_suite_test.go new file mode 100644 index 0000000000000..dae47a48bf9d0 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/internal/assertion/assertion_suite_test.go @@ -0,0 +1,13 @@ +package assertion_test + +import ( + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" + + "testing" +) + +func TestAssertion(t *testing.T) { + RegisterFailHandler(Fail) + RunSpecs(t, "Assertion Suite") +} diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/internal/assertion/assertion_test.go b/Godeps/_workspace/src/github.com/onsi/gomega/internal/assertion/assertion_test.go new file mode 100644 index 0000000000000..de9eff2d28813 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/internal/assertion/assertion_test.go @@ -0,0 +1,252 @@ +package assertion_test + +import ( + "errors" + + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" + . "github.com/onsi/gomega/internal/assertion" + "github.com/onsi/gomega/internal/fakematcher" +) + +var _ = Describe("Assertion", func() { + var ( + a *Assertion + failureMessage string + failureCallerSkip int + matcher *fakematcher.FakeMatcher + ) + + input := "The thing I'm testing" + + var fakeFailHandler = func(message string, callerSkip ...int) { + failureMessage = message + if len(callerSkip) == 1 { + failureCallerSkip = callerSkip[0] + } + } + + BeforeEach(func() { + matcher = &fakematcher.FakeMatcher{} + failureMessage = "" + failureCallerSkip = 0 + a = New(input, fakeFailHandler, 1) + }) + + Context("when called", func() { + It("should pass the provided input value to the matcher", func() { + a.Should(matcher) + + Ω(matcher.ReceivedActual).Should(Equal(input)) + matcher.ReceivedActual = "" + + a.ShouldNot(matcher) + + Ω(matcher.ReceivedActual).Should(Equal(input)) + matcher.ReceivedActual = "" + + a.To(matcher) + + Ω(matcher.ReceivedActual).Should(Equal(input)) + matcher.ReceivedActual = "" + + a.ToNot(matcher) + + Ω(matcher.ReceivedActual).Should(Equal(input)) + matcher.ReceivedActual = "" + + a.NotTo(matcher) + + Ω(matcher.ReceivedActual).Should(Equal(input)) + }) + }) + + Context("when the matcher succeeds", func() { + BeforeEach(func() { + matcher.MatchesToReturn = true + matcher.ErrToReturn = nil + }) + + Context("and a positive assertion is being made", func() { + It("should not call the failure callback", func() { + a.Should(matcher) + Ω(failureMessage).Should(Equal("")) + }) + + It("should be true", func() { + Ω(a.Should(matcher)).Should(BeTrue()) + }) + }) + + Context("and a negative assertion is being made", func() { + It("should call the failure callback", func() { + a.ShouldNot(matcher) + Ω(failureMessage).Should(Equal("negative: The thing I'm testing")) + Ω(failureCallerSkip).Should(Equal(3)) + }) + + It("should be false", func() { + Ω(a.ShouldNot(matcher)).Should(BeFalse()) + }) + }) + }) + + Context("when the matcher fails", func() { + BeforeEach(func() { + matcher.MatchesToReturn = false + matcher.ErrToReturn = nil + }) + + Context("and a positive assertion is being made", func() { + It("should call the failure callback", func() { + a.Should(matcher) + Ω(failureMessage).Should(Equal("positive: The thing I'm testing")) + Ω(failureCallerSkip).Should(Equal(3)) + }) + + It("should be false", func() { + Ω(a.Should(matcher)).Should(BeFalse()) + }) + }) + + Context("and a negative assertion is being made", func() { + It("should not call the failure callback", func() { + a.ShouldNot(matcher) + Ω(failureMessage).Should(Equal("")) + }) + + It("should be true", func() { + Ω(a.ShouldNot(matcher)).Should(BeTrue()) + }) + }) + }) + + Context("When reporting a failure", func() { + BeforeEach(func() { + matcher.MatchesToReturn = false + matcher.ErrToReturn = nil + }) + + Context("and there is an optional description", func() { + It("should append the description to the failure message", func() { + a.Should(matcher, "A description") + Ω(failureMessage).Should(Equal("A description\npositive: The thing I'm testing")) + Ω(failureCallerSkip).Should(Equal(3)) + }) + }) + + Context("and there are multiple arguments to the optional description", func() { + It("should append the formatted description to the failure message", func() { + a.Should(matcher, "A description of [%d]", 3) + Ω(failureMessage).Should(Equal("A description of [3]\npositive: The thing I'm testing")) + Ω(failureCallerSkip).Should(Equal(3)) + }) + }) + }) + + Context("When the matcher returns an error", func() { + BeforeEach(func() { + matcher.ErrToReturn = errors.New("Kaboom!") + }) + + Context("and a positive assertion is being made", func() { + It("should call the failure callback", func() { + matcher.MatchesToReturn = true + a.Should(matcher) + Ω(failureMessage).Should(Equal("Kaboom!")) + Ω(failureCallerSkip).Should(Equal(3)) + }) + }) + + Context("and a negative assertion is being made", func() { + It("should call the failure callback", func() { + matcher.MatchesToReturn = false + a.ShouldNot(matcher) + Ω(failureMessage).Should(Equal("Kaboom!")) + Ω(failureCallerSkip).Should(Equal(3)) + }) + }) + + It("should always be false", func() { + Ω(a.Should(matcher)).Should(BeFalse()) + Ω(a.ShouldNot(matcher)).Should(BeFalse()) + }) + }) + + Context("when there are extra parameters", func() { + It("(a simple example)", func() { + Ω(func() (string, int, error) { + return "foo", 0, nil + }()).Should(Equal("foo")) + }) + + Context("when the parameters are all nil or zero", func() { + It("should invoke the matcher", func() { + matcher.MatchesToReturn = true + matcher.ErrToReturn = nil + + var typedNil []string + a = New(input, fakeFailHandler, 1, 0, nil, typedNil) + + result := a.Should(matcher) + Ω(result).Should(BeTrue()) + Ω(matcher.ReceivedActual).Should(Equal(input)) + + Ω(failureMessage).Should(BeZero()) + }) + }) + + Context("when any of the parameters are not nil or zero", func() { + It("should call the failure callback", func() { + matcher.MatchesToReturn = false + matcher.ErrToReturn = nil + + a = New(input, fakeFailHandler, 1, errors.New("foo")) + result := a.Should(matcher) + Ω(result).Should(BeFalse()) + Ω(matcher.ReceivedActual).Should(BeZero(), "The matcher doesn't even get called") + Ω(failureMessage).Should(ContainSubstring("foo")) + failureMessage = "" + + a = New(input, fakeFailHandler, 1, nil, 1) + result = a.ShouldNot(matcher) + Ω(result).Should(BeFalse()) + Ω(failureMessage).Should(ContainSubstring("1")) + failureMessage = "" + + a = New(input, fakeFailHandler, 1, nil, 0, []string{"foo"}) + result = a.To(matcher) + Ω(result).Should(BeFalse()) + Ω(failureMessage).Should(ContainSubstring("foo")) + failureMessage = "" + + a = New(input, fakeFailHandler, 1, nil, 0, []string{"foo"}) + result = a.ToNot(matcher) + Ω(result).Should(BeFalse()) + Ω(failureMessage).Should(ContainSubstring("foo")) + failureMessage = "" + + a = New(input, fakeFailHandler, 1, nil, 0, []string{"foo"}) + result = a.NotTo(matcher) + Ω(result).Should(BeFalse()) + Ω(failureMessage).Should(ContainSubstring("foo")) + Ω(failureCallerSkip).Should(Equal(3)) + }) + }) + }) + + Context("Making an assertion without a registered fail handler", func() { + It("should panic", func() { + defer func() { + e := recover() + RegisterFailHandler(Fail) + if e == nil { + Fail("expected a panic to have occured") + } + }() + + RegisterFailHandler(nil) + Ω(true).Should(BeTrue()) + }) + }) +}) diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/internal/asyncassertion/async_assertion.go b/Godeps/_workspace/src/github.com/onsi/gomega/internal/asyncassertion/async_assertion.go new file mode 100644 index 0000000000000..7bbec43b506ba --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/internal/asyncassertion/async_assertion.go @@ -0,0 +1,197 @@ +package asyncassertion + +import ( + "errors" + "fmt" + "reflect" + "time" + + "github.com/onsi/gomega/types" +) + +type AsyncAssertionType uint + +const ( + AsyncAssertionTypeEventually AsyncAssertionType = iota + AsyncAssertionTypeConsistently +) + +type AsyncAssertion struct { + asyncType AsyncAssertionType + actualInput interface{} + timeoutInterval time.Duration + pollingInterval time.Duration + fail types.GomegaFailHandler + offset int +} + +func New(asyncType AsyncAssertionType, actualInput interface{}, fail types.GomegaFailHandler, timeoutInterval time.Duration, pollingInterval time.Duration, offset int) *AsyncAssertion { + actualType := reflect.TypeOf(actualInput) + if actualType.Kind() == reflect.Func { + if actualType.NumIn() != 0 || actualType.NumOut() == 0 { + panic("Expected a function with no arguments and one or more return values.") + } + } + + return &AsyncAssertion{ + asyncType: asyncType, + actualInput: actualInput, + fail: fail, + timeoutInterval: timeoutInterval, + pollingInterval: pollingInterval, + offset: offset, + } +} + +func (assertion *AsyncAssertion) Should(matcher types.GomegaMatcher, optionalDescription ...interface{}) bool { + return assertion.match(matcher, true, optionalDescription...) +} + +func (assertion *AsyncAssertion) ShouldNot(matcher types.GomegaMatcher, optionalDescription ...interface{}) bool { + return assertion.match(matcher, false, optionalDescription...) +} + +func (assertion *AsyncAssertion) buildDescription(optionalDescription ...interface{}) string { + switch len(optionalDescription) { + case 0: + return "" + default: + return fmt.Sprintf(optionalDescription[0].(string), optionalDescription[1:]...) + "\n" + } +} + +func (assertion *AsyncAssertion) actualInputIsAFunction() bool { + actualType := reflect.TypeOf(assertion.actualInput) + return actualType.Kind() == reflect.Func && actualType.NumIn() == 0 && actualType.NumOut() > 0 +} + +func (assertion *AsyncAssertion) pollActual() (interface{}, error) { + if assertion.actualInputIsAFunction() { + values := reflect.ValueOf(assertion.actualInput).Call([]reflect.Value{}) + + extras := []interface{}{} + for _, value := range values[1:] { + extras = append(extras, value.Interface()) + } + + success, message := vetExtras(extras) + + if !success { + return nil, errors.New(message) + } + + return values[0].Interface(), nil + } + + return assertion.actualInput, nil +} + +type oracleMatcher interface { + MatchMayChangeInTheFuture(actual interface{}) bool +} + +func (assertion *AsyncAssertion) matcherMayChange(matcher types.GomegaMatcher, value interface{}) bool { + if assertion.actualInputIsAFunction() { + return true + } + + oracleMatcher, ok := matcher.(oracleMatcher) + if !ok { + return true + } + + return oracleMatcher.MatchMayChangeInTheFuture(value) +} + +func (assertion *AsyncAssertion) match(matcher types.GomegaMatcher, desiredMatch bool, optionalDescription ...interface{}) bool { + timer := time.Now() + timeout := time.After(assertion.timeoutInterval) + + description := assertion.buildDescription(optionalDescription...) + + var matches bool + var err error + mayChange := true + value, err := assertion.pollActual() + if err == nil { + mayChange = assertion.matcherMayChange(matcher, value) + matches, err = matcher.Match(value) + } + + fail := func(preamble string) { + errMsg := "" + message := "" + if err != nil { + errMsg = "Error: " + err.Error() + } else { + if desiredMatch { + message = matcher.FailureMessage(value) + } else { + message = matcher.NegatedFailureMessage(value) + } + } + assertion.fail(fmt.Sprintf("%s after %.3fs.\n%s%s%s", preamble, time.Since(timer).Seconds(), description, message, errMsg), 3+assertion.offset) + } + + if assertion.asyncType == AsyncAssertionTypeEventually { + for { + if err == nil && matches == desiredMatch { + return true + } + + if !mayChange { + fail("No future change is possible. Bailing out early") + return false + } + + select { + case <-time.After(assertion.pollingInterval): + value, err = assertion.pollActual() + if err == nil { + mayChange = assertion.matcherMayChange(matcher, value) + matches, err = matcher.Match(value) + } + case <-timeout: + fail("Timed out") + return false + } + } + } else if assertion.asyncType == AsyncAssertionTypeConsistently { + for { + if !(err == nil && matches == desiredMatch) { + fail("Failed") + return false + } + + if !mayChange { + return true + } + + select { + case <-time.After(assertion.pollingInterval): + value, err = assertion.pollActual() + if err == nil { + mayChange = assertion.matcherMayChange(matcher, value) + matches, err = matcher.Match(value) + } + case <-timeout: + return true + } + } + } + + return false +} + +func vetExtras(extras []interface{}) (bool, string) { + for i, extra := range extras { + if extra != nil { + zeroValue := reflect.Zero(reflect.TypeOf(extra)).Interface() + if !reflect.DeepEqual(zeroValue, extra) { + message := fmt.Sprintf("Unexpected non-nil/non-zero extra argument at index %d:\n\t<%T>: %#v", i+1, extra, extra) + return false, message + } + } + } + return true, "" +} diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/internal/asyncassertion/async_assertion_suite_test.go b/Godeps/_workspace/src/github.com/onsi/gomega/internal/asyncassertion/async_assertion_suite_test.go new file mode 100644 index 0000000000000..bdb0c3d2202b7 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/internal/asyncassertion/async_assertion_suite_test.go @@ -0,0 +1,13 @@ +package asyncassertion_test + +import ( + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" + + "testing" +) + +func TestAsyncAssertion(t *testing.T) { + RegisterFailHandler(Fail) + RunSpecs(t, "AsyncAssertion Suite") +} diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/internal/asyncassertion/async_assertion_test.go b/Godeps/_workspace/src/github.com/onsi/gomega/internal/asyncassertion/async_assertion_test.go new file mode 100644 index 0000000000000..4288d4da919c2 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/internal/asyncassertion/async_assertion_test.go @@ -0,0 +1,345 @@ +package asyncassertion_test + +import ( + "errors" + "time" + + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" + . "github.com/onsi/gomega/internal/asyncassertion" +) + +var _ = Describe("Async Assertion", func() { + var ( + failureMessage string + callerSkip int + ) + + var fakeFailHandler = func(message string, skip ...int) { + failureMessage = message + callerSkip = skip[0] + } + + BeforeEach(func() { + failureMessage = "" + callerSkip = 0 + }) + + Describe("Eventually", func() { + Context("the positive case", func() { + It("should poll the function and matcher", func() { + counter := 0 + a := New(AsyncAssertionTypeEventually, func() int { + counter++ + return counter + }, fakeFailHandler, time.Duration(0.2*float64(time.Second)), time.Duration(0.02*float64(time.Second)), 1) + + a.Should(BeNumerically("==", 5)) + Ω(failureMessage).Should(BeZero()) + }) + + It("should continue when the matcher errors", func() { + counter := 0 + a := New(AsyncAssertionTypeEventually, func() interface{} { + counter++ + if counter == 5 { + return "not-a-number" //this should cause the matcher to error + } + return counter + }, fakeFailHandler, time.Duration(0.2*float64(time.Second)), time.Duration(0.02*float64(time.Second)), 1) + + a.Should(BeNumerically("==", 5), "My description %d", 2) + + Ω(failureMessage).Should(ContainSubstring("Timed out after")) + Ω(failureMessage).Should(ContainSubstring("My description 2")) + Ω(callerSkip).Should(Equal(4)) + }) + + It("should be able to timeout", func() { + counter := 0 + a := New(AsyncAssertionTypeEventually, func() int { + counter++ + return counter + }, fakeFailHandler, time.Duration(0.2*float64(time.Second)), time.Duration(0.02*float64(time.Second)), 1) + + a.Should(BeNumerically(">", 100), "My description %d", 2) + + Ω(counter).Should(BeNumerically(">", 8)) + Ω(counter).Should(BeNumerically("<=", 10)) + Ω(failureMessage).Should(ContainSubstring("Timed out after")) + Ω(failureMessage).Should(MatchRegexp(`\: \d`), "Should pass the correct value to the matcher message formatter.") + Ω(failureMessage).Should(ContainSubstring("My description 2")) + Ω(callerSkip).Should(Equal(4)) + }) + }) + + Context("the negative case", func() { + It("should poll the function and matcher", func() { + counter := 0 + a := New(AsyncAssertionTypeEventually, func() int { + counter += 1 + return counter + }, fakeFailHandler, time.Duration(0.2*float64(time.Second)), time.Duration(0.02*float64(time.Second)), 1) + + a.ShouldNot(BeNumerically("<", 3)) + + Ω(counter).Should(Equal(3)) + Ω(failureMessage).Should(BeZero()) + }) + + It("should timeout when the matcher errors", func() { + a := New(AsyncAssertionTypeEventually, func() interface{} { + return 0 //this should cause the matcher to error + }, fakeFailHandler, time.Duration(0.2*float64(time.Second)), time.Duration(0.02*float64(time.Second)), 1) + + a.ShouldNot(HaveLen(0), "My description %d", 2) + + Ω(failureMessage).Should(ContainSubstring("Timed out after")) + Ω(failureMessage).Should(ContainSubstring("Error:")) + Ω(failureMessage).Should(ContainSubstring("My description 2")) + Ω(callerSkip).Should(Equal(4)) + }) + + It("should be able to timeout", func() { + a := New(AsyncAssertionTypeEventually, func() int { + return 0 + }, fakeFailHandler, time.Duration(0.1*float64(time.Second)), time.Duration(0.02*float64(time.Second)), 1) + + a.ShouldNot(Equal(0), "My description %d", 2) + + Ω(failureMessage).Should(ContainSubstring("Timed out after")) + Ω(failureMessage).Should(ContainSubstring(": 0"), "Should pass the correct value to the matcher message formatter.") + Ω(failureMessage).Should(ContainSubstring("My description 2")) + Ω(callerSkip).Should(Equal(4)) + }) + }) + + Context("with a function that returns multiple values", func() { + It("should eventually succeed if the additional arguments are nil", func() { + i := 0 + Eventually(func() (int, error) { + i++ + return i, nil + }).Should(Equal(10)) + }) + + It("should eventually timeout if the additional arguments are not nil", func() { + i := 0 + a := New(AsyncAssertionTypeEventually, func() (int, error) { + i++ + return i, errors.New("bam") + }, fakeFailHandler, time.Duration(0.2*float64(time.Second)), time.Duration(0.02*float64(time.Second)), 1) + a.Should(Equal(2)) + + Ω(failureMessage).Should(ContainSubstring("Timed out after")) + Ω(failureMessage).Should(ContainSubstring("Error:")) + Ω(failureMessage).Should(ContainSubstring("bam")) + Ω(callerSkip).Should(Equal(4)) + }) + }) + + Context("Making an assertion without a registered fail handler", func() { + It("should panic", func() { + defer func() { + e := recover() + RegisterFailHandler(Fail) + if e == nil { + Fail("expected a panic to have occured") + } + }() + + RegisterFailHandler(nil) + c := make(chan bool, 1) + c <- true + Eventually(c).Should(Receive()) + }) + }) + }) + + Describe("Consistently", func() { + Describe("The positive case", func() { + Context("when the matcher consistently passes for the duration", func() { + It("should pass", func() { + calls := 0 + a := New(AsyncAssertionTypeConsistently, func() string { + calls++ + return "foo" + }, fakeFailHandler, time.Duration(0.2*float64(time.Second)), time.Duration(0.02*float64(time.Second)), 1) + + a.Should(Equal("foo")) + Ω(calls).Should(BeNumerically(">", 8)) + Ω(calls).Should(BeNumerically("<=", 10)) + Ω(failureMessage).Should(BeZero()) + }) + }) + + Context("when the matcher fails at some point", func() { + It("should fail", func() { + calls := 0 + a := New(AsyncAssertionTypeConsistently, func() interface{} { + calls++ + if calls > 5 { + return "bar" + } + return "foo" + }, fakeFailHandler, time.Duration(0.2*float64(time.Second)), time.Duration(0.02*float64(time.Second)), 1) + + a.Should(Equal("foo")) + Ω(failureMessage).Should(ContainSubstring("to equal")) + Ω(callerSkip).Should(Equal(4)) + }) + }) + + Context("when the matcher errors at some point", func() { + It("should fail", func() { + calls := 0 + a := New(AsyncAssertionTypeConsistently, func() interface{} { + calls++ + if calls > 5 { + return 3 + } + return []int{1, 2, 3} + }, fakeFailHandler, time.Duration(0.2*float64(time.Second)), time.Duration(0.02*float64(time.Second)), 1) + + a.Should(HaveLen(3)) + Ω(failureMessage).Should(ContainSubstring("HaveLen matcher expects")) + Ω(callerSkip).Should(Equal(4)) + }) + }) + }) + + Describe("The negative case", func() { + Context("when the matcher consistently passes for the duration", func() { + It("should pass", func() { + c := make(chan bool) + a := New(AsyncAssertionTypeConsistently, c, fakeFailHandler, time.Duration(0.2*float64(time.Second)), time.Duration(0.02*float64(time.Second)), 1) + + a.ShouldNot(Receive()) + Ω(failureMessage).Should(BeZero()) + }) + }) + + Context("when the matcher fails at some point", func() { + It("should fail", func() { + c := make(chan bool) + go func() { + time.Sleep(time.Duration(100 * time.Millisecond)) + c <- true + }() + + a := New(AsyncAssertionTypeConsistently, c, fakeFailHandler, time.Duration(0.2*float64(time.Second)), time.Duration(0.02*float64(time.Second)), 1) + + a.ShouldNot(Receive()) + Ω(failureMessage).Should(ContainSubstring("not to receive anything")) + }) + }) + + Context("when the matcher errors at some point", func() { + It("should fail", func() { + calls := 0 + a := New(AsyncAssertionTypeConsistently, func() interface{} { + calls++ + return calls + }, fakeFailHandler, time.Duration(0.2*float64(time.Second)), time.Duration(0.02*float64(time.Second)), 1) + + a.ShouldNot(BeNumerically(">", 5)) + Ω(failureMessage).Should(ContainSubstring("not to be >")) + Ω(callerSkip).Should(Equal(4)) + }) + }) + }) + + Context("with a function that returns multiple values", func() { + It("should consistently succeed if the additional arguments are nil", func() { + i := 2 + Consistently(func() (int, error) { + i++ + return i, nil + }).Should(BeNumerically(">=", 2)) + }) + + It("should eventually timeout if the additional arguments are not nil", func() { + i := 2 + a := New(AsyncAssertionTypeEventually, func() (int, error) { + i++ + return i, errors.New("bam") + }, fakeFailHandler, time.Duration(0.2*float64(time.Second)), time.Duration(0.02*float64(time.Second)), 1) + a.Should(BeNumerically(">=", 2)) + + Ω(failureMessage).Should(ContainSubstring("Error:")) + Ω(failureMessage).Should(ContainSubstring("bam")) + Ω(callerSkip).Should(Equal(4)) + }) + }) + + Context("Making an assertion without a registered fail handler", func() { + It("should panic", func() { + defer func() { + e := recover() + RegisterFailHandler(Fail) + if e == nil { + Fail("expected a panic to have occured") + } + }() + + RegisterFailHandler(nil) + c := make(chan bool) + Consistently(c).ShouldNot(Receive()) + }) + }) + }) + + Context("when passed a function with the wrong # or arguments & returns", func() { + It("should panic", func() { + Ω(func() { + New(AsyncAssertionTypeEventually, func() {}, fakeFailHandler, 0, 0, 1) + }).Should(Panic()) + + Ω(func() { + New(AsyncAssertionTypeEventually, func(a string) int { return 0 }, fakeFailHandler, 0, 0, 1) + }).Should(Panic()) + + Ω(func() { + New(AsyncAssertionTypeEventually, func() int { return 0 }, fakeFailHandler, 0, 0, 1) + }).ShouldNot(Panic()) + + Ω(func() { + New(AsyncAssertionTypeEventually, func() (int, error) { return 0, nil }, fakeFailHandler, 0, 0, 1) + }).ShouldNot(Panic()) + }) + }) + + Describe("bailing early", func() { + Context("when actual is a value", func() { + It("Eventually should bail out and fail early if the matcher says to", func() { + c := make(chan bool) + close(c) + + t := time.Now() + failures := InterceptGomegaFailures(func() { + Eventually(c, 0.1).Should(Receive()) + }) + Ω(time.Since(t)).Should(BeNumerically("<", 90*time.Millisecond)) + + Ω(failures).Should(HaveLen(1)) + }) + }) + + Context("when actual is a function", func() { + It("should never bail early", func() { + c := make(chan bool) + close(c) + + t := time.Now() + failures := InterceptGomegaFailures(func() { + Eventually(func() chan bool { + return c + }, 0.1).Should(Receive()) + }) + Ω(time.Since(t)).Should(BeNumerically(">=", 90*time.Millisecond)) + + Ω(failures).Should(HaveLen(1)) + }) + }) + }) +}) diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/internal/fakematcher/fake_matcher.go b/Godeps/_workspace/src/github.com/onsi/gomega/internal/fakematcher/fake_matcher.go new file mode 100644 index 0000000000000..6e351a7de5799 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/internal/fakematcher/fake_matcher.go @@ -0,0 +1,23 @@ +package fakematcher + +import "fmt" + +type FakeMatcher struct { + ReceivedActual interface{} + MatchesToReturn bool + ErrToReturn error +} + +func (matcher *FakeMatcher) Match(actual interface{}) (bool, error) { + matcher.ReceivedActual = actual + + return matcher.MatchesToReturn, matcher.ErrToReturn +} + +func (matcher *FakeMatcher) FailureMessage(actual interface{}) string { + return fmt.Sprintf("positive: %v", actual) +} + +func (matcher *FakeMatcher) NegatedFailureMessage(actual interface{}) string { + return fmt.Sprintf("negative: %v", actual) +} diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/internal/testingtsupport/testing_t_support.go b/Godeps/_workspace/src/github.com/onsi/gomega/internal/testingtsupport/testing_t_support.go new file mode 100644 index 0000000000000..7871fd43953e6 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/internal/testingtsupport/testing_t_support.go @@ -0,0 +1,40 @@ +package testingtsupport + +import ( + "regexp" + "runtime/debug" + "strings" + + "github.com/onsi/gomega/types" +) + +type gomegaTestingT interface { + Errorf(format string, args ...interface{}) +} + +func BuildTestingTGomegaFailHandler(t gomegaTestingT) types.GomegaFailHandler { + return func(message string, callerSkip ...int) { + skip := 1 + if len(callerSkip) > 0 { + skip = callerSkip[0] + } + stackTrace := pruneStack(string(debug.Stack()), skip) + t.Errorf("\n%s\n%s", stackTrace, message) + } +} + +func pruneStack(fullStackTrace string, skip int) string { + stack := strings.Split(fullStackTrace, "\n") + if len(stack) > 2*(skip+1) { + stack = stack[2*(skip+1):] + } + prunedStack := []string{} + re := regexp.MustCompile(`\/ginkgo\/|\/pkg\/testing\/|\/pkg\/runtime\/`) + for i := 0; i < len(stack)/2; i++ { + if !re.Match([]byte(stack[i*2])) { + prunedStack = append(prunedStack, stack[i*2]) + prunedStack = append(prunedStack, stack[i*2+1]) + } + } + return strings.Join(prunedStack, "\n") +} diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/internal/testingtsupport/testing_t_support_test.go b/Godeps/_workspace/src/github.com/onsi/gomega/internal/testingtsupport/testing_t_support_test.go new file mode 100644 index 0000000000000..b9fbd6c640466 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/internal/testingtsupport/testing_t_support_test.go @@ -0,0 +1,12 @@ +package testingtsupport_test + +import ( + . "github.com/onsi/gomega" + + "testing" +) + +func TestTestingT(t *testing.T) { + RegisterTestingT(t) + Ω(true).Should(BeTrue()) +} diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/matchers.go b/Godeps/_workspace/src/github.com/onsi/gomega/matchers.go new file mode 100644 index 0000000000000..307f88a4fed00 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/matchers.go @@ -0,0 +1,328 @@ +package gomega + +import ( + "time" + + "github.com/onsi/gomega/matchers" + "github.com/onsi/gomega/types" +) + +//Equal uses reflect.DeepEqual to compare actual with expected. Equal is strict about +//types when performing comparisons. +//It is an error for both actual and expected to be nil. Use BeNil() instead. +func Equal(expected interface{}) types.GomegaMatcher { + return &matchers.EqualMatcher{ + Expected: expected, + } +} + +//BeEquivalentTo is more lax than Equal, allowing equality between different types. +//This is done by converting actual to have the type of expected before +//attempting equality with reflect.DeepEqual. +//It is an error for actual and expected to be nil. Use BeNil() instead. +func BeEquivalentTo(expected interface{}) types.GomegaMatcher { + return &matchers.BeEquivalentToMatcher{ + Expected: expected, + } +} + +//BeNil succeeds if actual is nil +func BeNil() types.GomegaMatcher { + return &matchers.BeNilMatcher{} +} + +//BeTrue succeeds if actual is true +func BeTrue() types.GomegaMatcher { + return &matchers.BeTrueMatcher{} +} + +//BeFalse succeeds if actual is false +func BeFalse() types.GomegaMatcher { + return &matchers.BeFalseMatcher{} +} + +//HaveOccurred succeeds if actual is a non-nil error +//The typical Go error checking pattern looks like: +// err := SomethingThatMightFail() +// Ω(err).ShouldNot(HaveOccurred()) +func HaveOccurred() types.GomegaMatcher { + return &matchers.HaveOccurredMatcher{} +} + +//Succeed passes if actual is a nil error +//Succeed is intended to be used with functions that return a single error value. Instead of +// err := SomethingThatMightFail() +// Ω(err).ShouldNot(HaveOccurred()) +// +//You can write: +// Ω(SomethingThatMightFail()).Should(Succeed()) +// +//It is a mistake to use Succeed with a function that has multiple return values. Gomega's Ω and Expect +//functions automatically trigger failure if any return values after the first return value are non-zero/non-nil. +//This means that Ω(MultiReturnFunc()).ShouldNot(Succeed()) can never pass. +func Succeed() types.GomegaMatcher { + return &matchers.SucceedMatcher{} +} + +//MatchError succeeds if actual is a non-nil error that matches the passed in string/error. +// +//These are valid use-cases: +// Ω(err).Should(MatchError("an error")) //asserts that err.Error() == "an error" +// Ω(err).Should(MatchError(SomeError)) //asserts that err == SomeError (via reflect.DeepEqual) +// +//It is an error for err to be nil or an object that does not implement the Error interface +func MatchError(expected interface{}) types.GomegaMatcher { + return &matchers.MatchErrorMatcher{ + Expected: expected, + } +} + +//BeClosed succeeds if actual is a closed channel. +//It is an error to pass a non-channel to BeClosed, it is also an error to pass nil +// +//In order to check whether or not the channel is closed, Gomega must try to read from the channel +//(even in the `ShouldNot(BeClosed())` case). You should keep this in mind if you wish to make subsequent assertions about +//values coming down the channel. +// +//Also, if you are testing that a *buffered* channel is closed you must first read all values out of the channel before +//asserting that it is closed (it is not possible to detect that a buffered-channel has been closed until all its buffered values are read). +// +//Finally, as a corollary: it is an error to check whether or not a send-only channel is closed. +func BeClosed() types.GomegaMatcher { + return &matchers.BeClosedMatcher{} +} + +//Receive succeeds if there is a value to be received on actual. +//Actual must be a channel (and cannot be a send-only channel) -- anything else is an error. +// +//Receive returns immediately and never blocks: +// +//- If there is nothing on the channel `c` then Ω(c).Should(Receive()) will fail and Ω(c).ShouldNot(Receive()) will pass. +// +//- If the channel `c` is closed then Ω(c).Should(Receive()) will fail and Ω(c).ShouldNot(Receive()) will pass. +// +//- If there is something on the channel `c` ready to be read, then Ω(c).Should(Receive()) will pass and Ω(c).ShouldNot(Receive()) will fail. +// +//If you have a go-routine running in the background that will write to channel `c` you can: +// Eventually(c).Should(Receive()) +// +//This will timeout if nothing gets sent to `c` (you can modify the timeout interval as you normally do with `Eventually`) +// +//A similar use-case is to assert that no go-routine writes to a channel (for a period of time). You can do this with `Consistently`: +// Consistently(c).ShouldNot(Receive()) +// +//You can pass `Receive` a matcher. If you do so, it will match the received object against the matcher. For example: +// Ω(c).Should(Receive(Equal("foo"))) +// +//When given a matcher, `Receive` will always fail if there is nothing to be received on the channel. +// +//Passing Receive a matcher is especially useful when paired with Eventually: +// +// Eventually(c).Should(Receive(ContainSubstring("bar"))) +// +//will repeatedly attempt to pull values out of `c` until a value matching "bar" is received. +// +//Finally, if you want to have a reference to the value *sent* to the channel you can pass the `Receive` matcher a pointer to a variable of the appropriate type: +// var myThing thing +// Eventually(thingChan).Should(Receive(&myThing)) +// Ω(myThing.Sprocket).Should(Equal("foo")) +// Ω(myThing.IsValid()).Should(BeTrue()) +func Receive(args ...interface{}) types.GomegaMatcher { + var arg interface{} + if len(args) > 0 { + arg = args[0] + } + + return &matchers.ReceiveMatcher{ + Arg: arg, + } +} + +//BeSent succeeds if a value can be sent to actual. +//Actual must be a channel (and cannot be a receive-only channel) that can sent the type of the value passed into BeSent -- anything else is an error. +//In addition, actual must not be closed. +// +//BeSent never blocks: +// +//- If the channel `c` is not ready to receive then Ω(c).Should(BeSent("foo")) will fail immediately +//- If the channel `c` is eventually ready to receive then Eventually(c).Should(BeSent("foo")) will succeed.. presuming the channel becomes ready to receive before Eventually's timeout +//- If the channel `c` is closed then Ω(c).Should(BeSent("foo")) and Ω(c).ShouldNot(BeSent("foo")) will both fail immediately +// +//Of course, the value is actually sent to the channel. The point of `BeSent` is less to make an assertion about the availability of the channel (which is typically an implementation detail that your test should not be concerned with). +//Rather, the point of `BeSent` is to make it possible to easily and expressively write tests that can timeout on blocked channel sends. +func BeSent(arg interface{}) types.GomegaMatcher { + return &matchers.BeSentMatcher{ + Arg: arg, + } +} + +//MatchRegexp succeeds if actual is a string or stringer that matches the +//passed-in regexp. Optional arguments can be provided to construct a regexp +//via fmt.Sprintf(). +func MatchRegexp(regexp string, args ...interface{}) types.GomegaMatcher { + return &matchers.MatchRegexpMatcher{ + Regexp: regexp, + Args: args, + } +} + +//ContainSubstring succeeds if actual is a string or stringer that contains the +//passed-in regexp. Optional arguments can be provided to construct the substring +//via fmt.Sprintf(). +func ContainSubstring(substr string, args ...interface{}) types.GomegaMatcher { + return &matchers.ContainSubstringMatcher{ + Substr: substr, + Args: args, + } +} + +//HavePrefix succeeds if actual is a string or stringer that contains the +//passed-in string as a prefix. Optional arguments can be provided to construct +//via fmt.Sprintf(). +func HavePrefix(prefix string, args ...interface{}) types.GomegaMatcher { + return &matchers.HavePrefixMatcher{ + Prefix: prefix, + Args: args, + } +} + +//HaveSuffix succeeds if actual is a string or stringer that contains the +//passed-in string as a suffix. Optional arguments can be provided to construct +//via fmt.Sprintf(). +func HaveSuffix(suffix string, args ...interface{}) types.GomegaMatcher { + return &matchers.HaveSuffixMatcher{ + Suffix: suffix, + Args: args, + } +} + +//MatchJSON succeeds if actual is a string or stringer of JSON that matches +//the expected JSON. The JSONs are decoded and the resulting objects are compared via +//reflect.DeepEqual so things like key-ordering and whitespace shouldn't matter. +func MatchJSON(json interface{}) types.GomegaMatcher { + return &matchers.MatchJSONMatcher{ + JSONToMatch: json, + } +} + +//BeEmpty succeeds if actual is empty. Actual must be of type string, array, map, chan, or slice. +func BeEmpty() types.GomegaMatcher { + return &matchers.BeEmptyMatcher{} +} + +//HaveLen succeeds if actual has the passed-in length. Actual must be of type string, array, map, chan, or slice. +func HaveLen(count int) types.GomegaMatcher { + return &matchers.HaveLenMatcher{ + Count: count, + } +} + +//BeZero succeeds if actual is the zero value for its type or if actual is nil. +func BeZero() types.GomegaMatcher { + return &matchers.BeZeroMatcher{} +} + +//ContainElement succeeds if actual contains the passed in element. +//By default ContainElement() uses Equal() to perform the match, however a +//matcher can be passed in instead: +// Ω([]string{"Foo", "FooBar"}).Should(ContainElement(ContainSubstring("Bar"))) +// +//Actual must be an array, slice or map. +//For maps, ContainElement searches through the map's values. +func ContainElement(element interface{}) types.GomegaMatcher { + return &matchers.ContainElementMatcher{ + Element: element, + } +} + +//ConsistOf succeeds if actual contains preciely the elements passed into the matcher. The ordering of the elements does not matter. +//By default ConsistOf() uses Equal() to match the elements, however custom matchers can be passed in instead. Here are some examples: +// +// Ω([]string{"Foo", "FooBar"}).Should(ConsistOf("FooBar", "Foo")) +// Ω([]string{"Foo", "FooBar"}).Should(ConsistOf(ContainSubstring("Bar"), "Foo")) +// Ω([]string{"Foo", "FooBar"}).Should(ConsistOf(ContainSubstring("Foo"), ContainSubstring("Foo"))) +// +//Actual must be an array, slice or map. For maps, ConsistOf matches against the map's values. +// +//You typically pass variadic arguments to ConsistOf (as in the examples above). However, if you need to pass in a slice you can provided that it +//is the only element passed in to ConsistOf: +// +// Ω([]string{"Foo", "FooBar"}).Should(ConsistOf([]string{"FooBar", "Foo"})) +// +//Note that Go's type system does not allow you to write this as ConsistOf([]string{"FooBar", "Foo"}...) as []string and []interface{} are different types - hence the need for this special rule. + +func ConsistOf(elements ...interface{}) types.GomegaMatcher { + return &matchers.ConsistOfMatcher{ + Elements: elements, + } +} + +//HaveKey succeeds if actual is a map with the passed in key. +//By default HaveKey uses Equal() to perform the match, however a +//matcher can be passed in instead: +// Ω(map[string]string{"Foo": "Bar", "BazFoo": "Duck"}).Should(HaveKey(MatchRegexp(`.+Foo$`))) +func HaveKey(key interface{}) types.GomegaMatcher { + return &matchers.HaveKeyMatcher{ + Key: key, + } +} + +//HaveKeyWithValue succeeds if actual is a map with the passed in key and value. +//By default HaveKeyWithValue uses Equal() to perform the match, however a +//matcher can be passed in instead: +// Ω(map[string]string{"Foo": "Bar", "BazFoo": "Duck"}).Should(HaveKeyWithValue("Foo", "Bar")) +// Ω(map[string]string{"Foo": "Bar", "BazFoo": "Duck"}).Should(HaveKeyWithValue(MatchRegexp(`.+Foo$`), "Bar")) +func HaveKeyWithValue(key interface{}, value interface{}) types.GomegaMatcher { + return &matchers.HaveKeyWithValueMatcher{ + Key: key, + Value: value, + } +} + +//BeNumerically performs numerical assertions in a type-agnostic way. +//Actual and expected should be numbers, though the specific type of +//number is irrelevant (floa32, float64, uint8, etc...). +// +//There are six, self-explanatory, supported comparators: +// Ω(1.0).Should(BeNumerically("==", 1)) +// Ω(1.0).Should(BeNumerically("~", 0.999, 0.01)) +// Ω(1.0).Should(BeNumerically(">", 0.9)) +// Ω(1.0).Should(BeNumerically(">=", 1.0)) +// Ω(1.0).Should(BeNumerically("<", 3)) +// Ω(1.0).Should(BeNumerically("<=", 1.0)) +func BeNumerically(comparator string, compareTo ...interface{}) types.GomegaMatcher { + return &matchers.BeNumericallyMatcher{ + Comparator: comparator, + CompareTo: compareTo, + } +} + +//BeTemporally compares time.Time's like BeNumerically +//Actual and expected must be time.Time. The comparators are the same as for BeNumerically +// Ω(time.Now()).Should(BeTemporally(">", time.Time{})) +// Ω(time.Now()).Should(BeTemporally("~", time.Now(), time.Second)) +func BeTemporally(comparator string, compareTo time.Time, threshold ...time.Duration) types.GomegaMatcher { + return &matchers.BeTemporallyMatcher{ + Comparator: comparator, + CompareTo: compareTo, + Threshold: threshold, + } +} + +//BeAssignableToTypeOf succeeds if actual is assignable to the type of expected. +//It will return an error when one of the values is nil. +// Ω(0).Should(BeAssignableToTypeOf(0)) // Same values +// Ω(5).Should(BeAssignableToTypeOf(-1)) // different values same type +// Ω("foo").Should(BeAssignableToTypeOf("bar")) // different values same type +// Ω(struct{ Foo string }{}).Should(BeAssignableToTypeOf(struct{ Foo string }{})) +func BeAssignableToTypeOf(expected interface{}) types.GomegaMatcher { + return &matchers.AssignableToTypeOfMatcher{ + Expected: expected, + } +} + +//Panic succeeds if actual is a function that, when invoked, panics. +//Actual must be a function that takes no arguments and returns no results. +func Panic() types.GomegaMatcher { + return &matchers.PanicMatcher{} +} diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/matchers/assignable_to_type_of_matcher.go b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/assignable_to_type_of_matcher.go new file mode 100644 index 0000000000000..7f8897b3c0d8c --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/assignable_to_type_of_matcher.go @@ -0,0 +1,30 @@ +package matchers + +import ( + "fmt" + "github.com/onsi/gomega/format" + "reflect" +) + +type AssignableToTypeOfMatcher struct { + Expected interface{} +} + +func (matcher *AssignableToTypeOfMatcher) Match(actual interface{}) (success bool, err error) { + if actual == nil || matcher.Expected == nil { + return false, fmt.Errorf("Refusing to compare to .") + } + + actualType := reflect.TypeOf(actual) + expectedType := reflect.TypeOf(matcher.Expected) + + return actualType.AssignableTo(expectedType), nil +} + +func (matcher *AssignableToTypeOfMatcher) FailureMessage(actual interface{}) string { + return format.Message(actual, fmt.Sprintf("to be assignable to the type: %T", matcher.Expected)) +} + +func (matcher *AssignableToTypeOfMatcher) NegatedFailureMessage(actual interface{}) string { + return format.Message(actual, fmt.Sprintf("not to be assignable to the type: %T", matcher.Expected)) +} diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/matchers/assignable_to_type_of_matcher_test.go b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/assignable_to_type_of_matcher_test.go new file mode 100644 index 0000000000000..d2280e0506dcb --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/assignable_to_type_of_matcher_test.go @@ -0,0 +1,30 @@ +package matchers_test + +import ( + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" + . "github.com/onsi/gomega/matchers" +) + +var _ = Describe("AssignableToTypeOf", func() { + Context("When asserting assignability between types", func() { + It("should do the right thing", func() { + Ω(0).Should(BeAssignableToTypeOf(0)) + Ω(5).Should(BeAssignableToTypeOf(-1)) + Ω("foo").Should(BeAssignableToTypeOf("bar")) + Ω(struct{ Foo string }{}).Should(BeAssignableToTypeOf(struct{ Foo string }{})) + + Ω(0).ShouldNot(BeAssignableToTypeOf("bar")) + Ω(5).ShouldNot(BeAssignableToTypeOf(struct{ Foo string }{})) + Ω("foo").ShouldNot(BeAssignableToTypeOf(42)) + }) + }) + + Context("When asserting nil values", func() { + It("should error", func() { + success, err := (&AssignableToTypeOfMatcher{Expected: nil}).Match(nil) + Ω(success).Should(BeFalse()) + Ω(err).Should(HaveOccurred()) + }) + }) +}) diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/matchers/be_closed_matcher.go b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/be_closed_matcher.go new file mode 100644 index 0000000000000..c1b499597d34f --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/be_closed_matcher.go @@ -0,0 +1,45 @@ +package matchers + +import ( + "fmt" + "github.com/onsi/gomega/format" + "reflect" +) + +type BeClosedMatcher struct { +} + +func (matcher *BeClosedMatcher) Match(actual interface{}) (success bool, err error) { + if !isChan(actual) { + return false, fmt.Errorf("BeClosed matcher expects a channel. Got:\n%s", format.Object(actual, 1)) + } + + channelType := reflect.TypeOf(actual) + channelValue := reflect.ValueOf(actual) + + if channelType.ChanDir() == reflect.SendDir { + return false, fmt.Errorf("BeClosed matcher cannot determine if a send-only channel is closed or open. Got:\n%s", format.Object(actual, 1)) + } + + winnerIndex, _, open := reflect.Select([]reflect.SelectCase{ + reflect.SelectCase{Dir: reflect.SelectRecv, Chan: channelValue}, + reflect.SelectCase{Dir: reflect.SelectDefault}, + }) + + var closed bool + if winnerIndex == 0 { + closed = !open + } else if winnerIndex == 1 { + closed = false + } + + return closed, nil +} + +func (matcher *BeClosedMatcher) FailureMessage(actual interface{}) (message string) { + return format.Message(actual, "to be closed") +} + +func (matcher *BeClosedMatcher) NegatedFailureMessage(actual interface{}) (message string) { + return format.Message(actual, "to be open") +} diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/matchers/be_closed_matcher_test.go b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/be_closed_matcher_test.go new file mode 100644 index 0000000000000..b2c40c9103996 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/be_closed_matcher_test.go @@ -0,0 +1,70 @@ +package matchers_test + +import ( + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" + . "github.com/onsi/gomega/matchers" +) + +var _ = Describe("BeClosedMatcher", func() { + Context("when passed a channel", func() { + It("should do the right thing", func() { + openChannel := make(chan bool) + Ω(openChannel).ShouldNot(BeClosed()) + + var openReaderChannel <-chan bool + openReaderChannel = openChannel + Ω(openReaderChannel).ShouldNot(BeClosed()) + + closedChannel := make(chan bool) + close(closedChannel) + + Ω(closedChannel).Should(BeClosed()) + + var closedReaderChannel <-chan bool + closedReaderChannel = closedChannel + Ω(closedReaderChannel).Should(BeClosed()) + }) + }) + + Context("when passed a send-only channel", func() { + It("should error", func() { + openChannel := make(chan bool) + var openWriterChannel chan<- bool + openWriterChannel = openChannel + + success, err := (&BeClosedMatcher{}).Match(openWriterChannel) + Ω(success).Should(BeFalse()) + Ω(err).Should(HaveOccurred()) + + closedChannel := make(chan bool) + close(closedChannel) + + var closedWriterChannel chan<- bool + closedWriterChannel = closedChannel + + success, err = (&BeClosedMatcher{}).Match(closedWriterChannel) + Ω(success).Should(BeFalse()) + Ω(err).Should(HaveOccurred()) + + }) + }) + + Context("when passed something else", func() { + It("should error", func() { + var nilChannel chan bool + + success, err := (&BeClosedMatcher{}).Match(nilChannel) + Ω(success).Should(BeFalse()) + Ω(err).Should(HaveOccurred()) + + success, err = (&BeClosedMatcher{}).Match(nil) + Ω(success).Should(BeFalse()) + Ω(err).Should(HaveOccurred()) + + success, err = (&BeClosedMatcher{}).Match(7) + Ω(success).Should(BeFalse()) + Ω(err).Should(HaveOccurred()) + }) + }) +}) diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/matchers/be_empty_matcher.go b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/be_empty_matcher.go new file mode 100644 index 0000000000000..55bdd7d15dfe1 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/be_empty_matcher.go @@ -0,0 +1,26 @@ +package matchers + +import ( + "fmt" + "github.com/onsi/gomega/format" +) + +type BeEmptyMatcher struct { +} + +func (matcher *BeEmptyMatcher) Match(actual interface{}) (success bool, err error) { + length, ok := lengthOf(actual) + if !ok { + return false, fmt.Errorf("BeEmpty matcher expects a string/array/map/channel/slice. Got:\n%s", format.Object(actual, 1)) + } + + return length == 0, nil +} + +func (matcher *BeEmptyMatcher) FailureMessage(actual interface{}) (message string) { + return format.Message(actual, "to be empty") +} + +func (matcher *BeEmptyMatcher) NegatedFailureMessage(actual interface{}) (message string) { + return format.Message(actual, "not to be empty") +} diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/matchers/be_empty_matcher_test.go b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/be_empty_matcher_test.go new file mode 100644 index 0000000000000..541c1b951ec47 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/be_empty_matcher_test.go @@ -0,0 +1,52 @@ +package matchers_test + +import ( + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" + . "github.com/onsi/gomega/matchers" +) + +var _ = Describe("BeEmpty", func() { + Context("when passed a supported type", func() { + It("should do the right thing", func() { + Ω("").Should(BeEmpty()) + Ω(" ").ShouldNot(BeEmpty()) + + Ω([0]int{}).Should(BeEmpty()) + Ω([1]int{1}).ShouldNot(BeEmpty()) + + Ω([]int{}).Should(BeEmpty()) + Ω([]int{1}).ShouldNot(BeEmpty()) + + Ω(map[string]int{}).Should(BeEmpty()) + Ω(map[string]int{"a": 1}).ShouldNot(BeEmpty()) + + c := make(chan bool, 1) + Ω(c).Should(BeEmpty()) + c <- true + Ω(c).ShouldNot(BeEmpty()) + }) + }) + + Context("when passed a correctly typed nil", func() { + It("should be true", func() { + var nilSlice []int + Ω(nilSlice).Should(BeEmpty()) + + var nilMap map[int]string + Ω(nilMap).Should(BeEmpty()) + }) + }) + + Context("when passed an unsupported type", func() { + It("should error", func() { + success, err := (&BeEmptyMatcher{}).Match(0) + Ω(success).Should(BeFalse()) + Ω(err).Should(HaveOccurred()) + + success, err = (&BeEmptyMatcher{}).Match(nil) + Ω(success).Should(BeFalse()) + Ω(err).Should(HaveOccurred()) + }) + }) +}) diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/matchers/be_equivalent_to_matcher.go b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/be_equivalent_to_matcher.go new file mode 100644 index 0000000000000..32a0c3108a48f --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/be_equivalent_to_matcher.go @@ -0,0 +1,33 @@ +package matchers + +import ( + "fmt" + "github.com/onsi/gomega/format" + "reflect" +) + +type BeEquivalentToMatcher struct { + Expected interface{} +} + +func (matcher *BeEquivalentToMatcher) Match(actual interface{}) (success bool, err error) { + if actual == nil && matcher.Expected == nil { + return false, fmt.Errorf("Both actual and expected must not be nil.") + } + + convertedActual := actual + + if actual != nil && matcher.Expected != nil && reflect.TypeOf(actual).ConvertibleTo(reflect.TypeOf(matcher.Expected)) { + convertedActual = reflect.ValueOf(actual).Convert(reflect.TypeOf(matcher.Expected)).Interface() + } + + return reflect.DeepEqual(convertedActual, matcher.Expected), nil +} + +func (matcher *BeEquivalentToMatcher) FailureMessage(actual interface{}) (message string) { + return format.Message(actual, "to be equivalent to", matcher.Expected) +} + +func (matcher *BeEquivalentToMatcher) NegatedFailureMessage(actual interface{}) (message string) { + return format.Message(actual, "not to be equivalent to", matcher.Expected) +} diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/matchers/be_equivalent_to_matcher_test.go b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/be_equivalent_to_matcher_test.go new file mode 100644 index 0000000000000..def5104fa757a --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/be_equivalent_to_matcher_test.go @@ -0,0 +1,50 @@ +package matchers_test + +import ( + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" + . "github.com/onsi/gomega/matchers" +) + +var _ = Describe("BeEquivalentTo", func() { + Context("when asserting that nil is equivalent to nil", func() { + It("should error", func() { + success, err := (&BeEquivalentToMatcher{Expected: nil}).Match(nil) + + Ω(success).Should(BeFalse()) + Ω(err).Should(HaveOccurred()) + }) + }) + + Context("When asserting on nil", func() { + It("should do the right thing", func() { + Ω("foo").ShouldNot(BeEquivalentTo(nil)) + Ω(nil).ShouldNot(BeEquivalentTo(3)) + Ω([]int{1, 2}).ShouldNot(BeEquivalentTo(nil)) + }) + }) + + Context("When asserting on type aliases", func() { + It("should the right thing", func() { + Ω(StringAlias("foo")).Should(BeEquivalentTo("foo")) + Ω("foo").Should(BeEquivalentTo(StringAlias("foo"))) + Ω(StringAlias("foo")).ShouldNot(BeEquivalentTo("bar")) + Ω("foo").ShouldNot(BeEquivalentTo(StringAlias("bar"))) + }) + }) + + Context("When asserting on numbers", func() { + It("should convert actual to expected and do the right thing", func() { + Ω(5).Should(BeEquivalentTo(5)) + Ω(5.0).Should(BeEquivalentTo(5.0)) + Ω(5).Should(BeEquivalentTo(5.0)) + + Ω(5).ShouldNot(BeEquivalentTo("5")) + Ω(5).ShouldNot(BeEquivalentTo(3)) + + //Here be dragons! + Ω(5.1).Should(BeEquivalentTo(5)) + Ω(5).ShouldNot(BeEquivalentTo(5.1)) + }) + }) +}) diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/matchers/be_false_matcher.go b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/be_false_matcher.go new file mode 100644 index 0000000000000..0b224cbbc6451 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/be_false_matcher.go @@ -0,0 +1,25 @@ +package matchers + +import ( + "fmt" + "github.com/onsi/gomega/format" +) + +type BeFalseMatcher struct { +} + +func (matcher *BeFalseMatcher) Match(actual interface{}) (success bool, err error) { + if !isBool(actual) { + return false, fmt.Errorf("Expected a boolean. Got:\n%s", format.Object(actual, 1)) + } + + return actual == false, nil +} + +func (matcher *BeFalseMatcher) FailureMessage(actual interface{}) (message string) { + return format.Message(actual, "to be false") +} + +func (matcher *BeFalseMatcher) NegatedFailureMessage(actual interface{}) (message string) { + return format.Message(actual, "not to be false") +} diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/matchers/be_false_matcher_test.go b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/be_false_matcher_test.go new file mode 100644 index 0000000000000..3965a2c539216 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/be_false_matcher_test.go @@ -0,0 +1,20 @@ +package matchers_test + +import ( + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" + . "github.com/onsi/gomega/matchers" +) + +var _ = Describe("BeFalse", func() { + It("should handle true and false correctly", func() { + Ω(true).ShouldNot(BeFalse()) + Ω(false).Should(BeFalse()) + }) + + It("should only support booleans", func() { + success, err := (&BeFalseMatcher{}).Match("foo") + Ω(success).Should(BeFalse()) + Ω(err).Should(HaveOccurred()) + }) +}) diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/matchers/be_nil_matcher.go b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/be_nil_matcher.go new file mode 100644 index 0000000000000..7ee84fe1bcf1f --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/be_nil_matcher.go @@ -0,0 +1,18 @@ +package matchers + +import "github.com/onsi/gomega/format" + +type BeNilMatcher struct { +} + +func (matcher *BeNilMatcher) Match(actual interface{}) (success bool, err error) { + return isNil(actual), nil +} + +func (matcher *BeNilMatcher) FailureMessage(actual interface{}) (message string) { + return format.Message(actual, "to be nil") +} + +func (matcher *BeNilMatcher) NegatedFailureMessage(actual interface{}) (message string) { + return format.Message(actual, "not to be nil") +} diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/matchers/be_nil_matcher_test.go b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/be_nil_matcher_test.go new file mode 100644 index 0000000000000..753325363293b --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/be_nil_matcher_test.go @@ -0,0 +1,28 @@ +package matchers_test + +import ( + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" +) + +var _ = Describe("BeNil", func() { + It("should succeed when passed nil", func() { + Ω(nil).Should(BeNil()) + }) + + It("should succeed when passed a typed nil", func() { + var a []int + Ω(a).Should(BeNil()) + }) + + It("should succeed when passing nil pointer", func() { + var f *struct{} + Ω(f).Should(BeNil()) + }) + + It("should not succeed when not passed nil", func() { + Ω(0).ShouldNot(BeNil()) + Ω(false).ShouldNot(BeNil()) + Ω("").ShouldNot(BeNil()) + }) +}) diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/matchers/be_numerically_matcher.go b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/be_numerically_matcher.go new file mode 100644 index 0000000000000..52f83fe3f3910 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/be_numerically_matcher.go @@ -0,0 +1,119 @@ +package matchers + +import ( + "fmt" + "github.com/onsi/gomega/format" + "math" +) + +type BeNumericallyMatcher struct { + Comparator string + CompareTo []interface{} +} + +func (matcher *BeNumericallyMatcher) FailureMessage(actual interface{}) (message string) { + return format.Message(actual, fmt.Sprintf("to be %s", matcher.Comparator), matcher.CompareTo[0]) +} + +func (matcher *BeNumericallyMatcher) NegatedFailureMessage(actual interface{}) (message string) { + return format.Message(actual, fmt.Sprintf("not to be %s", matcher.Comparator), matcher.CompareTo[0]) +} + +func (matcher *BeNumericallyMatcher) Match(actual interface{}) (success bool, err error) { + if len(matcher.CompareTo) == 0 || len(matcher.CompareTo) > 2 { + return false, fmt.Errorf("BeNumerically requires 1 or 2 CompareTo arguments. Got:\n%s", format.Object(matcher.CompareTo, 1)) + } + if !isNumber(actual) { + return false, fmt.Errorf("Expected a number. Got:\n%s", format.Object(actual, 1)) + } + if !isNumber(matcher.CompareTo[0]) { + return false, fmt.Errorf("Expected a number. Got:\n%s", format.Object(matcher.CompareTo[0], 1)) + } + if len(matcher.CompareTo) == 2 && !isNumber(matcher.CompareTo[1]) { + return false, fmt.Errorf("Expected a number. Got:\n%s", format.Object(matcher.CompareTo[0], 1)) + } + + switch matcher.Comparator { + case "==", "~", ">", ">=", "<", "<=": + default: + return false, fmt.Errorf("Unknown comparator: %s", matcher.Comparator) + } + + if isFloat(actual) || isFloat(matcher.CompareTo[0]) { + var secondOperand float64 = 1e-8 + if len(matcher.CompareTo) == 2 { + secondOperand = toFloat(matcher.CompareTo[1]) + } + success = matcher.matchFloats(toFloat(actual), toFloat(matcher.CompareTo[0]), secondOperand) + } else if isInteger(actual) { + var secondOperand int64 = 0 + if len(matcher.CompareTo) == 2 { + secondOperand = toInteger(matcher.CompareTo[1]) + } + success = matcher.matchIntegers(toInteger(actual), toInteger(matcher.CompareTo[0]), secondOperand) + } else if isUnsignedInteger(actual) { + var secondOperand uint64 = 0 + if len(matcher.CompareTo) == 2 { + secondOperand = toUnsignedInteger(matcher.CompareTo[1]) + } + success = matcher.matchUnsignedIntegers(toUnsignedInteger(actual), toUnsignedInteger(matcher.CompareTo[0]), secondOperand) + } else { + return false, fmt.Errorf("Failed to compare:\n%s\n%s:\n%s", format.Object(actual, 1), matcher.Comparator, format.Object(matcher.CompareTo[0], 1)) + } + + return success, nil +} + +func (matcher *BeNumericallyMatcher) matchIntegers(actual, compareTo, threshold int64) (success bool) { + switch matcher.Comparator { + case "==", "~": + diff := actual - compareTo + return -threshold <= diff && diff <= threshold + case ">": + return (actual > compareTo) + case ">=": + return (actual >= compareTo) + case "<": + return (actual < compareTo) + case "<=": + return (actual <= compareTo) + } + return false +} + +func (matcher *BeNumericallyMatcher) matchUnsignedIntegers(actual, compareTo, threshold uint64) (success bool) { + switch matcher.Comparator { + case "==", "~": + if actual < compareTo { + actual, compareTo = compareTo, actual + } + return actual-compareTo <= threshold + case ">": + return (actual > compareTo) + case ">=": + return (actual >= compareTo) + case "<": + return (actual < compareTo) + case "<=": + return (actual <= compareTo) + } + return false +} + +func (matcher *BeNumericallyMatcher) matchFloats(actual, compareTo, threshold float64) (success bool) { + switch matcher.Comparator { + case "~": + return math.Abs(actual-compareTo) <= threshold + case "==": + return (actual == compareTo) + case ">": + return (actual > compareTo) + case ">=": + return (actual >= compareTo) + case "<": + return (actual < compareTo) + case "<=": + return (actual <= compareTo) + } + return false +} diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/matchers/be_numerically_matcher_test.go b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/be_numerically_matcher_test.go new file mode 100644 index 0000000000000..43fdb1fe0b6a2 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/be_numerically_matcher_test.go @@ -0,0 +1,148 @@ +package matchers_test + +import ( + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" + . "github.com/onsi/gomega/matchers" +) + +var _ = Describe("BeNumerically", func() { + Context("when passed a number", func() { + It("should support ==", func() { + Ω(uint32(5)).Should(BeNumerically("==", 5)) + Ω(float64(5.0)).Should(BeNumerically("==", 5)) + Ω(int8(5)).Should(BeNumerically("==", 5)) + }) + + It("should not have false positives", func() { + Ω(5.1).ShouldNot(BeNumerically("==", 5)) + Ω(5).ShouldNot(BeNumerically("==", 5.1)) + }) + + It("should support >", func() { + Ω(uint32(5)).Should(BeNumerically(">", 4)) + Ω(float64(5.0)).Should(BeNumerically(">", 4.9)) + Ω(int8(5)).Should(BeNumerically(">", 4)) + + Ω(uint32(5)).ShouldNot(BeNumerically(">", 5)) + Ω(float64(5.0)).ShouldNot(BeNumerically(">", 5.0)) + Ω(int8(5)).ShouldNot(BeNumerically(">", 5)) + }) + + It("should support <", func() { + Ω(uint32(5)).Should(BeNumerically("<", 6)) + Ω(float64(5.0)).Should(BeNumerically("<", 5.1)) + Ω(int8(5)).Should(BeNumerically("<", 6)) + + Ω(uint32(5)).ShouldNot(BeNumerically("<", 5)) + Ω(float64(5.0)).ShouldNot(BeNumerically("<", 5.0)) + Ω(int8(5)).ShouldNot(BeNumerically("<", 5)) + }) + + It("should support >=", func() { + Ω(uint32(5)).Should(BeNumerically(">=", 4)) + Ω(float64(5.0)).Should(BeNumerically(">=", 4.9)) + Ω(int8(5)).Should(BeNumerically(">=", 4)) + + Ω(uint32(5)).Should(BeNumerically(">=", 5)) + Ω(float64(5.0)).Should(BeNumerically(">=", 5.0)) + Ω(int8(5)).Should(BeNumerically(">=", 5)) + + Ω(uint32(5)).ShouldNot(BeNumerically(">=", 6)) + Ω(float64(5.0)).ShouldNot(BeNumerically(">=", 5.1)) + Ω(int8(5)).ShouldNot(BeNumerically(">=", 6)) + }) + + It("should support <=", func() { + Ω(uint32(5)).Should(BeNumerically("<=", 6)) + Ω(float64(5.0)).Should(BeNumerically("<=", 5.1)) + Ω(int8(5)).Should(BeNumerically("<=", 6)) + + Ω(uint32(5)).Should(BeNumerically("<=", 5)) + Ω(float64(5.0)).Should(BeNumerically("<=", 5.0)) + Ω(int8(5)).Should(BeNumerically("<=", 5)) + + Ω(uint32(5)).ShouldNot(BeNumerically("<=", 4)) + Ω(float64(5.0)).ShouldNot(BeNumerically("<=", 4.9)) + Ω(int8(5)).Should(BeNumerically("<=", 5)) + }) + + Context("when passed ~", func() { + Context("when passed a float", func() { + Context("and there is no precision parameter", func() { + It("should default to 1e-8", func() { + Ω(5.00000001).Should(BeNumerically("~", 5.00000002)) + Ω(5.00000001).ShouldNot(BeNumerically("~", 5.0000001)) + }) + }) + + Context("and there is a precision parameter", func() { + It("should use the precision parameter", func() { + Ω(5.1).Should(BeNumerically("~", 5.19, 0.1)) + Ω(5.1).Should(BeNumerically("~", 5.01, 0.1)) + Ω(5.1).ShouldNot(BeNumerically("~", 5.22, 0.1)) + Ω(5.1).ShouldNot(BeNumerically("~", 4.98, 0.1)) + }) + }) + }) + + Context("when passed an int/uint", func() { + Context("and there is no precision parameter", func() { + It("should just do strict equality", func() { + Ω(5).Should(BeNumerically("~", 5)) + Ω(5).ShouldNot(BeNumerically("~", 6)) + Ω(uint(5)).ShouldNot(BeNumerically("~", 6)) + }) + }) + + Context("and there is a precision parameter", func() { + It("should use precision paramter", func() { + Ω(5).Should(BeNumerically("~", 6, 2)) + Ω(5).ShouldNot(BeNumerically("~", 8, 2)) + Ω(uint(5)).Should(BeNumerically("~", 6, 1)) + }) + }) + }) + }) + }) + + Context("when passed a non-number", func() { + It("should error", func() { + success, err := (&BeNumericallyMatcher{Comparator: "==", CompareTo: []interface{}{5}}).Match("foo") + Ω(success).Should(BeFalse()) + Ω(err).Should(HaveOccurred()) + + success, err = (&BeNumericallyMatcher{Comparator: "=="}).Match(5) + Ω(success).Should(BeFalse()) + Ω(err).Should(HaveOccurred()) + + success, err = (&BeNumericallyMatcher{Comparator: "~", CompareTo: []interface{}{3.0, "foo"}}).Match(5.0) + Ω(success).Should(BeFalse()) + Ω(err).Should(HaveOccurred()) + + success, err = (&BeNumericallyMatcher{Comparator: "==", CompareTo: []interface{}{"bar"}}).Match(5) + Ω(success).Should(BeFalse()) + Ω(err).Should(HaveOccurred()) + + success, err = (&BeNumericallyMatcher{Comparator: "==", CompareTo: []interface{}{"bar"}}).Match("foo") + Ω(success).Should(BeFalse()) + Ω(err).Should(HaveOccurred()) + + success, err = (&BeNumericallyMatcher{Comparator: "==", CompareTo: []interface{}{nil}}).Match(0) + Ω(success).Should(BeFalse()) + Ω(err).Should(HaveOccurred()) + + success, err = (&BeNumericallyMatcher{Comparator: "==", CompareTo: []interface{}{0}}).Match(nil) + Ω(success).Should(BeFalse()) + Ω(err).Should(HaveOccurred()) + }) + }) + + Context("when passed an unsupported comparator", func() { + It("should error", func() { + success, err := (&BeNumericallyMatcher{Comparator: "!=", CompareTo: []interface{}{5}}).Match(4) + Ω(success).Should(BeFalse()) + Ω(err).Should(HaveOccurred()) + }) + }) +}) diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/matchers/be_sent_matcher.go b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/be_sent_matcher.go new file mode 100644 index 0000000000000..d7c32233ec429 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/be_sent_matcher.go @@ -0,0 +1,71 @@ +package matchers + +import ( + "fmt" + "reflect" + + "github.com/onsi/gomega/format" +) + +type BeSentMatcher struct { + Arg interface{} + channelClosed bool +} + +func (matcher *BeSentMatcher) Match(actual interface{}) (success bool, err error) { + if !isChan(actual) { + return false, fmt.Errorf("BeSent expects a channel. Got:\n%s", format.Object(actual, 1)) + } + + channelType := reflect.TypeOf(actual) + channelValue := reflect.ValueOf(actual) + + if channelType.ChanDir() == reflect.RecvDir { + return false, fmt.Errorf("BeSent matcher cannot be passed a receive-only channel. Got:\n%s", format.Object(actual, 1)) + } + + argType := reflect.TypeOf(matcher.Arg) + assignable := argType.AssignableTo(channelType.Elem()) + + if !assignable { + return false, fmt.Errorf("Cannot pass:\n%s to the channel:\n%s\nThe types don't match.", format.Object(matcher.Arg, 1), format.Object(actual, 1)) + } + + argValue := reflect.ValueOf(matcher.Arg) + + defer func() { + if e := recover(); e != nil { + success = false + err = fmt.Errorf("Cannot send to a closed channel") + matcher.channelClosed = true + } + }() + + winnerIndex, _, _ := reflect.Select([]reflect.SelectCase{ + reflect.SelectCase{Dir: reflect.SelectSend, Chan: channelValue, Send: argValue}, + reflect.SelectCase{Dir: reflect.SelectDefault}, + }) + + var didSend bool + if winnerIndex == 0 { + didSend = true + } + + return didSend, nil +} + +func (matcher *BeSentMatcher) FailureMessage(actual interface{}) (message string) { + return format.Message(actual, "to send:", matcher.Arg) +} + +func (matcher *BeSentMatcher) NegatedFailureMessage(actual interface{}) (message string) { + return format.Message(actual, "not to send:", matcher.Arg) +} + +func (matcher *BeSentMatcher) MatchMayChangeInTheFuture(actual interface{}) bool { + if !isChan(actual) { + return false + } + + return !matcher.channelClosed +} diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/matchers/be_sent_matcher_test.go b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/be_sent_matcher_test.go new file mode 100644 index 0000000000000..381c2b4028954 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/be_sent_matcher_test.go @@ -0,0 +1,106 @@ +package matchers_test + +import ( + "time" + . "github.com/onsi/gomega/matchers" + + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" +) + +var _ = Describe("BeSent", func() { + Context("when passed a channel and a matching type", func() { + Context("when the channel is ready to receive", func() { + It("should succeed and send the value down the channel", func() { + c := make(chan string) + d := make(chan string) + go func() { + val := <-c + d <- val + }() + + time.Sleep(10 * time.Millisecond) + + Ω(c).Should(BeSent("foo")) + Eventually(d).Should(Receive(Equal("foo"))) + }) + + It("should succeed (with a buffered channel)", func() { + c := make(chan string, 1) + Ω(c).Should(BeSent("foo")) + Ω(<-c).Should(Equal("foo")) + }) + }) + + Context("when the channel is not ready to receive", func() { + It("should fail and not send down the channel", func() { + c := make(chan string) + Ω(c).ShouldNot(BeSent("foo")) + Consistently(c).ShouldNot(Receive()) + }) + }) + + Context("when the channel is eventually ready to receive", func() { + It("should succeed", func() { + c := make(chan string) + d := make(chan string) + go func() { + time.Sleep(30 * time.Millisecond) + val := <-c + d <- val + }() + + Eventually(c).Should(BeSent("foo")) + Eventually(d).Should(Receive(Equal("foo"))) + }) + }) + + Context("when the channel is closed", func() { + It("should error", func() { + c := make(chan string) + close(c) + success, err := (&BeSentMatcher{Arg: "foo"}).Match(c) + Ω(success).Should(BeFalse()) + Ω(err).Should(HaveOccurred()) + }) + + It("should short-circuit Eventually", func() { + c := make(chan string) + close(c) + + t := time.Now() + failures := InterceptGomegaFailures(func() { + Eventually(c, 10.0).Should(BeSent("foo")) + }) + Ω(failures).Should(HaveLen(1)) + Ω(time.Since(t)).Should(BeNumerically("<", time.Second)) + }) + }) + }) + + Context("when passed a channel and a non-matching type", func() { + It("should error", func() { + success, err := (&BeSentMatcher{Arg: "foo"}).Match(make(chan int, 1)) + Ω(success).Should(BeFalse()) + Ω(err).Should(HaveOccurred()) + }) + }) + + Context("when passed a receive-only channel", func() { + It("should error", func() { + var c <-chan string + c = make(chan string, 1) + success, err := (&BeSentMatcher{Arg: "foo"}).Match(c) + Ω(success).Should(BeFalse()) + Ω(err).Should(HaveOccurred()) + }) + }) + + Context("when passed a nonchannel", func() { + It("should error", func() { + success, err := (&BeSentMatcher{Arg: "foo"}).Match("bar") + Ω(success).Should(BeFalse()) + Ω(err).Should(HaveOccurred()) + }) + }) +}) diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/matchers/be_temporally_matcher.go b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/be_temporally_matcher.go new file mode 100644 index 0000000000000..abda4eb1e7b69 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/be_temporally_matcher.go @@ -0,0 +1,65 @@ +package matchers + +import ( + "fmt" + "github.com/onsi/gomega/format" + "time" +) + +type BeTemporallyMatcher struct { + Comparator string + CompareTo time.Time + Threshold []time.Duration +} + +func (matcher *BeTemporallyMatcher) FailureMessage(actual interface{}) (message string) { + return format.Message(actual, fmt.Sprintf("to be %s", matcher.Comparator), matcher.CompareTo) +} + +func (matcher *BeTemporallyMatcher) NegatedFailureMessage(actual interface{}) (message string) { + return format.Message(actual, fmt.Sprintf("not to be %s", matcher.Comparator), matcher.CompareTo) +} + +func (matcher *BeTemporallyMatcher) Match(actual interface{}) (bool, error) { + // predicate to test for time.Time type + isTime := func(t interface{}) bool { + _, ok := t.(time.Time) + return ok + } + + if !isTime(actual) { + return false, fmt.Errorf("Expected a time.Time. Got:\n%s", format.Object(actual, 1)) + } + + switch matcher.Comparator { + case "==", "~", ">", ">=", "<", "<=": + default: + return false, fmt.Errorf("Unknown comparator: %s", matcher.Comparator) + } + + var threshold = time.Millisecond + if len(matcher.Threshold) == 1 { + threshold = matcher.Threshold[0] + } + + return matcher.matchTimes(actual.(time.Time), matcher.CompareTo, threshold), nil +} + +func (matcher *BeTemporallyMatcher) matchTimes(actual, compareTo time.Time, threshold time.Duration) (success bool) { + switch matcher.Comparator { + case "==": + return actual.Equal(compareTo) + case "~": + diff := actual.Sub(compareTo) + return -threshold <= diff && diff <= threshold + case ">": + return actual.After(compareTo) + case ">=": + return !actual.Before(compareTo) + case "<": + return actual.Before(compareTo) + case "<=": + return !actual.After(compareTo) + } + return false +} diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/matchers/be_temporally_matcher_test.go b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/be_temporally_matcher_test.go new file mode 100644 index 0000000000000..feb33e5dc136f --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/be_temporally_matcher_test.go @@ -0,0 +1,98 @@ +package matchers_test + +import ( + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" + . "github.com/onsi/gomega/matchers" + "time" +) + +var _ = Describe("BeTemporally", func() { + + var t0, t1, t2 time.Time + BeforeEach(func() { + t0 = time.Now() + t1 = t0.Add(time.Second) + t2 = t0.Add(-time.Second) + }) + + Context("When comparing times", func() { + + It("should support ==", func() { + Ω(t0).Should(BeTemporally("==", t0)) + Ω(t1).ShouldNot(BeTemporally("==", t0)) + Ω(t0).ShouldNot(BeTemporally("==", t1)) + Ω(t0).ShouldNot(BeTemporally("==", time.Time{})) + }) + + It("should support >", func() { + Ω(t0).Should(BeTemporally(">", t2)) + Ω(t0).ShouldNot(BeTemporally(">", t0)) + Ω(t2).ShouldNot(BeTemporally(">", t0)) + }) + + It("should support <", func() { + Ω(t0).Should(BeTemporally("<", t1)) + Ω(t0).ShouldNot(BeTemporally("<", t0)) + Ω(t1).ShouldNot(BeTemporally("<", t0)) + }) + + It("should support >=", func() { + Ω(t0).Should(BeTemporally(">=", t2)) + Ω(t0).Should(BeTemporally(">=", t0)) + Ω(t0).ShouldNot(BeTemporally(">=", t1)) + }) + + It("should support <=", func() { + Ω(t0).Should(BeTemporally("<=", t1)) + Ω(t0).Should(BeTemporally("<=", t0)) + Ω(t0).ShouldNot(BeTemporally("<=", t2)) + }) + + Context("when passed ~", func() { + Context("and there is no precision parameter", func() { + BeforeEach(func() { + t1 = t0.Add(time.Millisecond / 2) + t2 = t0.Add(-2 * time.Millisecond) + }) + It("should approximate", func() { + Ω(t0).Should(BeTemporally("~", t0)) + Ω(t0).Should(BeTemporally("~", t1)) + Ω(t0).ShouldNot(BeTemporally("~", t2)) + }) + }) + + Context("and there is a precision parameter", func() { + BeforeEach(func() { + t2 = t0.Add(3 * time.Second) + }) + It("should use precision paramter", func() { + d := 2 * time.Second + Ω(t0).Should(BeTemporally("~", t0, d)) + Ω(t0).Should(BeTemporally("~", t1, d)) + Ω(t0).ShouldNot(BeTemporally("~", t2, d)) + }) + }) + }) + }) + + Context("when passed a non-time", func() { + It("should error", func() { + success, err := (&BeTemporallyMatcher{Comparator: "==", CompareTo: t0}).Match("foo") + Ω(success).Should(BeFalse()) + Ω(err).Should(HaveOccurred()) + + success, err = (&BeTemporallyMatcher{Comparator: "=="}).Match(nil) + Ω(success).Should(BeFalse()) + Ω(err).Should(HaveOccurred()) + }) + }) + + Context("when passed an unsupported comparator", func() { + It("should error", func() { + success, err := (&BeTemporallyMatcher{Comparator: "!=", CompareTo: t0}).Match(t2) + Ω(success).Should(BeFalse()) + Ω(err).Should(HaveOccurred()) + }) + }) +}) diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/matchers/be_true_matcher.go b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/be_true_matcher.go new file mode 100644 index 0000000000000..1275e5fc9d804 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/be_true_matcher.go @@ -0,0 +1,25 @@ +package matchers + +import ( + "fmt" + "github.com/onsi/gomega/format" +) + +type BeTrueMatcher struct { +} + +func (matcher *BeTrueMatcher) Match(actual interface{}) (success bool, err error) { + if !isBool(actual) { + return false, fmt.Errorf("Expected a boolean. Got:\n%s", format.Object(actual, 1)) + } + + return actual.(bool), nil +} + +func (matcher *BeTrueMatcher) FailureMessage(actual interface{}) (message string) { + return format.Message(actual, "to be true") +} + +func (matcher *BeTrueMatcher) NegatedFailureMessage(actual interface{}) (message string) { + return format.Message(actual, "not to be true") +} diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/matchers/be_true_matcher_test.go b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/be_true_matcher_test.go new file mode 100644 index 0000000000000..ca32e56beaff2 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/be_true_matcher_test.go @@ -0,0 +1,20 @@ +package matchers_test + +import ( + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" + . "github.com/onsi/gomega/matchers" +) + +var _ = Describe("BeTrue", func() { + It("should handle true and false correctly", func() { + Ω(true).Should(BeTrue()) + Ω(false).ShouldNot(BeTrue()) + }) + + It("should only support booleans", func() { + success, err := (&BeTrueMatcher{}).Match("foo") + Ω(success).Should(BeFalse()) + Ω(err).Should(HaveOccurred()) + }) +}) diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/matchers/be_zero_matcher.go b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/be_zero_matcher.go new file mode 100644 index 0000000000000..b39c9144be7d7 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/be_zero_matcher.go @@ -0,0 +1,27 @@ +package matchers + +import ( + "github.com/onsi/gomega/format" + "reflect" +) + +type BeZeroMatcher struct { +} + +func (matcher *BeZeroMatcher) Match(actual interface{}) (success bool, err error) { + if actual == nil { + return true, nil + } + zeroValue := reflect.Zero(reflect.TypeOf(actual)).Interface() + + return reflect.DeepEqual(zeroValue, actual), nil + +} + +func (matcher *BeZeroMatcher) FailureMessage(actual interface{}) (message string) { + return format.Message(actual, "to be zero-valued") +} + +func (matcher *BeZeroMatcher) NegatedFailureMessage(actual interface{}) (message string) { + return format.Message(actual, "not to be zero-valued") +} diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/matchers/be_zero_matcher_test.go b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/be_zero_matcher_test.go new file mode 100644 index 0000000000000..8ec3643c28a64 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/be_zero_matcher_test.go @@ -0,0 +1,30 @@ +package matchers_test + +import ( + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" +) + +var _ = Describe("BeZero", func() { + It("should succeed if the passed in object is the zero value for its type", func() { + Ω(nil).Should(BeZero()) + + Ω("").Should(BeZero()) + Ω(" ").ShouldNot(BeZero()) + + Ω(0).Should(BeZero()) + Ω(1).ShouldNot(BeZero()) + + Ω(0.0).Should(BeZero()) + Ω(0.1).ShouldNot(BeZero()) + + // Ω([]int{}).Should(BeZero()) + Ω([]int{1}).ShouldNot(BeZero()) + + // Ω(map[string]int{}).Should(BeZero()) + Ω(map[string]int{"a": 1}).ShouldNot(BeZero()) + + Ω(myCustomType{}).Should(BeZero()) + Ω(myCustomType{s: "a"}).ShouldNot(BeZero()) + }) +}) diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/matchers/consist_of.go b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/consist_of.go new file mode 100644 index 0000000000000..7b0e0886842a1 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/consist_of.go @@ -0,0 +1,80 @@ +package matchers + +import ( + "fmt" + "reflect" + + "github.com/onsi/gomega/format" + "github.com/onsi/gomega/matchers/support/goraph/bipartitegraph" +) + +type ConsistOfMatcher struct { + Elements []interface{} +} + +func (matcher *ConsistOfMatcher) Match(actual interface{}) (success bool, err error) { + if !isArrayOrSlice(actual) && !isMap(actual) { + return false, fmt.Errorf("ConsistOf matcher expects an array/slice/map. Got:\n%s", format.Object(actual, 1)) + } + + elements := matcher.Elements + if len(matcher.Elements) == 1 && isArrayOrSlice(matcher.Elements[0]) { + elements = []interface{}{} + value := reflect.ValueOf(matcher.Elements[0]) + for i := 0; i < value.Len(); i++ { + elements = append(elements, value.Index(i).Interface()) + } + } + + matchers := []interface{}{} + for _, element := range elements { + matcher, isMatcher := element.(omegaMatcher) + if !isMatcher { + matcher = &EqualMatcher{Expected: element} + } + matchers = append(matchers, matcher) + } + + values := matcher.valuesOf(actual) + + if len(values) != len(matchers) { + return false, nil + } + + neighbours := func(v, m interface{}) (bool, error) { + match, err := m.(omegaMatcher).Match(v) + return match && err == nil, nil + } + + bipartiteGraph, err := bipartitegraph.NewBipartiteGraph(values, matchers, neighbours) + if err != nil { + return false, err + } + + return len(bipartiteGraph.LargestMatching()) == len(values), nil +} + +func (matcher *ConsistOfMatcher) valuesOf(actual interface{}) []interface{} { + value := reflect.ValueOf(actual) + values := []interface{}{} + if isMap(actual) { + keys := value.MapKeys() + for i := 0; i < value.Len(); i++ { + values = append(values, value.MapIndex(keys[i]).Interface()) + } + } else { + for i := 0; i < value.Len(); i++ { + values = append(values, value.Index(i).Interface()) + } + } + + return values +} + +func (matcher *ConsistOfMatcher) FailureMessage(actual interface{}) (message string) { + return format.Message(actual, "to consist of", matcher.Elements) +} + +func (matcher *ConsistOfMatcher) NegatedFailureMessage(actual interface{}) (message string) { + return format.Message(actual, "not to consist of", matcher.Elements) +} diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/matchers/consist_of_test.go b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/consist_of_test.go new file mode 100644 index 0000000000000..0b230e390b2e3 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/consist_of_test.go @@ -0,0 +1,75 @@ +package matchers_test + +import ( + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" +) + +var _ = Describe("ConsistOf", func() { + Context("with a slice", func() { + It("should do the right thing", func() { + Ω([]string{"foo", "bar", "baz"}).Should(ConsistOf("foo", "bar", "baz")) + Ω([]string{"foo", "bar", "baz"}).Should(ConsistOf("foo", "bar", "baz")) + Ω([]string{"foo", "bar", "baz"}).Should(ConsistOf("baz", "bar", "foo")) + Ω([]string{"foo", "bar", "baz"}).ShouldNot(ConsistOf("baz", "bar", "foo", "foo")) + Ω([]string{"foo", "bar", "baz"}).ShouldNot(ConsistOf("baz", "foo")) + }) + }) + + Context("with an array", func() { + It("should do the right thing", func() { + Ω([3]string{"foo", "bar", "baz"}).Should(ConsistOf("foo", "bar", "baz")) + Ω([3]string{"foo", "bar", "baz"}).Should(ConsistOf("baz", "bar", "foo")) + Ω([3]string{"foo", "bar", "baz"}).ShouldNot(ConsistOf("baz", "bar", "foo", "foo")) + Ω([3]string{"foo", "bar", "baz"}).ShouldNot(ConsistOf("baz", "foo")) + }) + }) + + Context("with a map", func() { + It("should apply to the values", func() { + Ω(map[int]string{1: "foo", 2: "bar", 3: "baz"}).Should(ConsistOf("foo", "bar", "baz")) + Ω(map[int]string{1: "foo", 2: "bar", 3: "baz"}).Should(ConsistOf("baz", "bar", "foo")) + Ω(map[int]string{1: "foo", 2: "bar", 3: "baz"}).ShouldNot(ConsistOf("baz", "bar", "foo", "foo")) + Ω(map[int]string{1: "foo", 2: "bar", 3: "baz"}).ShouldNot(ConsistOf("baz", "foo")) + }) + + }) + + Context("with anything else", func() { + It("should error", func() { + failures := InterceptGomegaFailures(func() { + Ω("foo").Should(ConsistOf("f", "o", "o")) + }) + + Ω(failures).Should(HaveLen(1)) + }) + }) + + Context("when passed matchers", func() { + It("should pass if the matchers pass", func() { + Ω([]string{"foo", "bar", "baz"}).Should(ConsistOf("foo", MatchRegexp("^ba"), "baz")) + Ω([]string{"foo", "bar", "baz"}).ShouldNot(ConsistOf("foo", MatchRegexp("^ba"))) + Ω([]string{"foo", "bar", "baz"}).ShouldNot(ConsistOf("foo", MatchRegexp("^ba"), MatchRegexp("foo"))) + Ω([]string{"foo", "bar", "baz"}).Should(ConsistOf("foo", MatchRegexp("^ba"), MatchRegexp("^ba"))) + Ω([]string{"foo", "bar", "baz"}).ShouldNot(ConsistOf("foo", MatchRegexp("^ba"), MatchRegexp("turducken"))) + }) + + It("should not depend on the order of the matchers", func() { + Ω([][]int{[]int{1, 2}, []int{2}}).Should(ConsistOf(ContainElement(1), ContainElement(2))) + Ω([][]int{[]int{1, 2}, []int{2}}).Should(ConsistOf(ContainElement(2), ContainElement(1))) + }) + + Context("when a matcher errors", func() { + It("should soldier on", func() { + Ω([]string{"foo", "bar", "baz"}).ShouldNot(ConsistOf(BeFalse(), "foo", "bar")) + Ω([]interface{}{"foo", "bar", false}).Should(ConsistOf(BeFalse(), ContainSubstring("foo"), "bar")) + }) + }) + }) + + Context("when passed exactly one argument, and that argument is a slice", func() { + It("should match against the elements of that argument", func() { + Ω([]string{"foo", "bar", "baz"}).Should(ConsistOf([]string{"foo", "bar", "baz"})) + }) + }) +}) diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/matchers/contain_element_matcher.go b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/contain_element_matcher.go new file mode 100644 index 0000000000000..014a20ffb62ba --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/contain_element_matcher.go @@ -0,0 +1,53 @@ +package matchers + +import ( + "fmt" + "github.com/onsi/gomega/format" + "reflect" +) + +type ContainElementMatcher struct { + Element interface{} +} + +func (matcher *ContainElementMatcher) Match(actual interface{}) (success bool, err error) { + if !isArrayOrSlice(actual) && !isMap(actual) { + return false, fmt.Errorf("ContainElement matcher expects an array/slice/map. Got:\n%s", format.Object(actual, 1)) + } + + elemMatcher, elementIsMatcher := matcher.Element.(omegaMatcher) + if !elementIsMatcher { + elemMatcher = &EqualMatcher{Expected: matcher.Element} + } + + value := reflect.ValueOf(actual) + var keys []reflect.Value + if isMap(actual) { + keys = value.MapKeys() + } + for i := 0; i < value.Len(); i++ { + var success bool + var err error + if isMap(actual) { + success, err = elemMatcher.Match(value.MapIndex(keys[i]).Interface()) + } else { + success, err = elemMatcher.Match(value.Index(i).Interface()) + } + if err != nil { + return false, fmt.Errorf("ContainElement's element matcher failed with:\n\t%s", err.Error()) + } + if success { + return true, nil + } + } + + return false, nil +} + +func (matcher *ContainElementMatcher) FailureMessage(actual interface{}) (message string) { + return format.Message(actual, "to contain element matching", matcher.Element) +} + +func (matcher *ContainElementMatcher) NegatedFailureMessage(actual interface{}) (message string) { + return format.Message(actual, "not to contain element matching", matcher.Element) +} diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/matchers/contain_element_matcher_test.go b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/contain_element_matcher_test.go new file mode 100644 index 0000000000000..4d29eeb5bf2a1 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/contain_element_matcher_test.go @@ -0,0 +1,72 @@ +package matchers_test + +import ( + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" + . "github.com/onsi/gomega/matchers" +) + +var _ = Describe("ContainElement", func() { + Context("when passed a supported type", func() { + Context("and expecting a non-matcher", func() { + It("should do the right thing", func() { + Ω([2]int{1, 2}).Should(ContainElement(2)) + Ω([2]int{1, 2}).ShouldNot(ContainElement(3)) + + Ω([]int{1, 2}).Should(ContainElement(2)) + Ω([]int{1, 2}).ShouldNot(ContainElement(3)) + + Ω(map[string]int{"foo": 1, "bar": 2}).Should(ContainElement(2)) + Ω(map[int]int{3: 1, 4: 2}).ShouldNot(ContainElement(3)) + + arr := make([]myCustomType, 2) + arr[0] = myCustomType{s: "foo", n: 3, f: 2.0, arr: []string{"a", "b"}} + arr[1] = myCustomType{s: "foo", n: 3, f: 2.0, arr: []string{"a", "c"}} + Ω(arr).Should(ContainElement(myCustomType{s: "foo", n: 3, f: 2.0, arr: []string{"a", "b"}})) + Ω(arr).ShouldNot(ContainElement(myCustomType{s: "foo", n: 3, f: 2.0, arr: []string{"b", "c"}})) + }) + }) + + Context("and expecting a matcher", func() { + It("should pass each element through the matcher", func() { + Ω([]int{1, 2, 3}).Should(ContainElement(BeNumerically(">=", 3))) + Ω([]int{1, 2, 3}).ShouldNot(ContainElement(BeNumerically(">", 3))) + Ω(map[string]int{"foo": 1, "bar": 2}).Should(ContainElement(BeNumerically(">=", 2))) + Ω(map[string]int{"foo": 1, "bar": 2}).ShouldNot(ContainElement(BeNumerically(">", 2))) + }) + + It("should fail if the matcher ever fails", func() { + actual := []interface{}{1, 2, "3", 4} + success, err := (&ContainElementMatcher{Element: BeNumerically(">=", 3)}).Match(actual) + Ω(success).Should(BeFalse()) + Ω(err).Should(HaveOccurred()) + }) + }) + }) + + Context("when passed a correctly typed nil", func() { + It("should operate succesfully on the passed in value", func() { + var nilSlice []int + Ω(nilSlice).ShouldNot(ContainElement(1)) + + var nilMap map[int]string + Ω(nilMap).ShouldNot(ContainElement("foo")) + }) + }) + + Context("when passed an unsupported type", func() { + It("should error", func() { + success, err := (&ContainElementMatcher{Element: 0}).Match(0) + Ω(success).Should(BeFalse()) + Ω(err).Should(HaveOccurred()) + + success, err = (&ContainElementMatcher{Element: 0}).Match("abc") + Ω(success).Should(BeFalse()) + Ω(err).Should(HaveOccurred()) + + success, err = (&ContainElementMatcher{Element: 0}).Match(nil) + Ω(success).Should(BeFalse()) + Ω(err).Should(HaveOccurred()) + }) + }) +}) diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/matchers/contain_substring_matcher.go b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/contain_substring_matcher.go new file mode 100644 index 0000000000000..2e7608921ac4f --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/contain_substring_matcher.go @@ -0,0 +1,37 @@ +package matchers + +import ( + "fmt" + "github.com/onsi/gomega/format" + "strings" +) + +type ContainSubstringMatcher struct { + Substr string + Args []interface{} +} + +func (matcher *ContainSubstringMatcher) Match(actual interface{}) (success bool, err error) { + actualString, ok := toString(actual) + if !ok { + return false, fmt.Errorf("ContainSubstring matcher requires a string or stringer. Got:\n%s", format.Object(actual, 1)) + } + + return strings.Contains(actualString, matcher.stringToMatch()), nil +} + +func (matcher *ContainSubstringMatcher) stringToMatch() string { + stringToMatch := matcher.Substr + if len(matcher.Args) > 0 { + stringToMatch = fmt.Sprintf(matcher.Substr, matcher.Args...) + } + return stringToMatch +} + +func (matcher *ContainSubstringMatcher) FailureMessage(actual interface{}) (message string) { + return format.Message(actual, "to contain substring", matcher.stringToMatch()) +} + +func (matcher *ContainSubstringMatcher) NegatedFailureMessage(actual interface{}) (message string) { + return format.Message(actual, "not to contain substring", matcher.stringToMatch()) +} diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/matchers/contain_substring_matcher_test.go b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/contain_substring_matcher_test.go new file mode 100644 index 0000000000000..6935168e5c035 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/contain_substring_matcher_test.go @@ -0,0 +1,36 @@ +package matchers_test + +import ( + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" + . "github.com/onsi/gomega/matchers" +) + +var _ = Describe("ContainSubstringMatcher", func() { + Context("when actual is a string", func() { + It("should match against the string", func() { + Ω("Marvelous").Should(ContainSubstring("rve")) + Ω("Marvelous").ShouldNot(ContainSubstring("boo")) + }) + }) + + Context("when the matcher is called with multiple arguments", func() { + It("should pass the string and arguments to sprintf", func() { + Ω("Marvelous3").Should(ContainSubstring("velous%d", 3)) + }) + }) + + Context("when actual is a stringer", func() { + It("should call the stringer and match agains the returned string", func() { + Ω(&myStringer{a: "Abc3"}).Should(ContainSubstring("bc3")) + }) + }) + + Context("when actual is neither a string nor a stringer", func() { + It("should error", func() { + success, err := (&ContainSubstringMatcher{Substr: "2"}).Match(2) + Ω(success).Should(BeFalse()) + Ω(err).Should(HaveOccurred()) + }) + }) +}) diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/matchers/equal_matcher.go b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/equal_matcher.go new file mode 100644 index 0000000000000..9f8f80a897326 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/equal_matcher.go @@ -0,0 +1,26 @@ +package matchers + +import ( + "fmt" + "github.com/onsi/gomega/format" + "reflect" +) + +type EqualMatcher struct { + Expected interface{} +} + +func (matcher *EqualMatcher) Match(actual interface{}) (success bool, err error) { + if actual == nil && matcher.Expected == nil { + return false, fmt.Errorf("Refusing to compare to .") + } + return reflect.DeepEqual(actual, matcher.Expected), nil +} + +func (matcher *EqualMatcher) FailureMessage(actual interface{}) (message string) { + return format.Message(actual, "to equal", matcher.Expected) +} + +func (matcher *EqualMatcher) NegatedFailureMessage(actual interface{}) (message string) { + return format.Message(actual, "not to equal", matcher.Expected) +} diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/matchers/equal_matcher_test.go b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/equal_matcher_test.go new file mode 100644 index 0000000000000..ef0d137dda46e --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/equal_matcher_test.go @@ -0,0 +1,44 @@ +package matchers_test + +import ( + "errors" + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" + . "github.com/onsi/gomega/matchers" +) + +var _ = Describe("Equal", func() { + Context("when asserting that nil equals nil", func() { + It("should error", func() { + success, err := (&EqualMatcher{Expected: nil}).Match(nil) + + Ω(success).Should(BeFalse()) + Ω(err).Should(HaveOccurred()) + }) + }) + + Context("When asserting equality between objects", func() { + It("should do the right thing", func() { + Ω(5).Should(Equal(5)) + Ω(5.0).Should(Equal(5.0)) + + Ω(5).ShouldNot(Equal("5")) + Ω(5).ShouldNot(Equal(5.0)) + Ω(5).ShouldNot(Equal(3)) + + Ω("5").Should(Equal("5")) + Ω([]int{1, 2}).Should(Equal([]int{1, 2})) + Ω([]int{1, 2}).ShouldNot(Equal([]int{2, 1})) + Ω(map[string]string{"a": "b", "c": "d"}).Should(Equal(map[string]string{"a": "b", "c": "d"})) + Ω(map[string]string{"a": "b", "c": "d"}).ShouldNot(Equal(map[string]string{"a": "b", "c": "e"})) + Ω(errors.New("foo")).Should(Equal(errors.New("foo"))) + Ω(errors.New("foo")).ShouldNot(Equal(errors.New("bar"))) + + Ω(myCustomType{s: "foo", n: 3, f: 2.0, arr: []string{"a", "b"}}).Should(Equal(myCustomType{s: "foo", n: 3, f: 2.0, arr: []string{"a", "b"}})) + Ω(myCustomType{s: "foo", n: 3, f: 2.0, arr: []string{"a", "b"}}).ShouldNot(Equal(myCustomType{s: "bar", n: 3, f: 2.0, arr: []string{"a", "b"}})) + Ω(myCustomType{s: "foo", n: 3, f: 2.0, arr: []string{"a", "b"}}).ShouldNot(Equal(myCustomType{s: "foo", n: 2, f: 2.0, arr: []string{"a", "b"}})) + Ω(myCustomType{s: "foo", n: 3, f: 2.0, arr: []string{"a", "b"}}).ShouldNot(Equal(myCustomType{s: "foo", n: 3, f: 3.0, arr: []string{"a", "b"}})) + Ω(myCustomType{s: "foo", n: 3, f: 2.0, arr: []string{"a", "b"}}).ShouldNot(Equal(myCustomType{s: "foo", n: 3, f: 2.0, arr: []string{"a", "b", "c"}})) + }) + }) +}) diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/matchers/have_key_matcher.go b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/have_key_matcher.go new file mode 100644 index 0000000000000..5701ba6e24cfa --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/have_key_matcher.go @@ -0,0 +1,53 @@ +package matchers + +import ( + "fmt" + "github.com/onsi/gomega/format" + "reflect" +) + +type HaveKeyMatcher struct { + Key interface{} +} + +func (matcher *HaveKeyMatcher) Match(actual interface{}) (success bool, err error) { + if !isMap(actual) { + return false, fmt.Errorf("HaveKey matcher expects a map. Got:%s", format.Object(actual, 1)) + } + + keyMatcher, keyIsMatcher := matcher.Key.(omegaMatcher) + if !keyIsMatcher { + keyMatcher = &EqualMatcher{Expected: matcher.Key} + } + + keys := reflect.ValueOf(actual).MapKeys() + for i := 0; i < len(keys); i++ { + success, err := keyMatcher.Match(keys[i].Interface()) + if err != nil { + return false, fmt.Errorf("HaveKey's key matcher failed with:\n%s%s", format.Indent, err.Error()) + } + if success { + return true, nil + } + } + + return false, nil +} + +func (matcher *HaveKeyMatcher) FailureMessage(actual interface{}) (message string) { + switch matcher.Key.(type) { + case omegaMatcher: + return format.Message(actual, "to have key matching", matcher.Key) + default: + return format.Message(actual, "to have key", matcher.Key) + } +} + +func (matcher *HaveKeyMatcher) NegatedFailureMessage(actual interface{}) (message string) { + switch matcher.Key.(type) { + case omegaMatcher: + return format.Message(actual, "not to have key matching", matcher.Key) + default: + return format.Message(actual, "not to have key", matcher.Key) + } +} diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/matchers/have_key_matcher_test.go b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/have_key_matcher_test.go new file mode 100644 index 0000000000000..c663e302bac1a --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/have_key_matcher_test.go @@ -0,0 +1,73 @@ +package matchers_test + +import ( + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" + . "github.com/onsi/gomega/matchers" +) + +var _ = Describe("HaveKey", func() { + var ( + stringKeys map[string]int + intKeys map[int]string + objKeys map[*myCustomType]string + + customA *myCustomType + customB *myCustomType + ) + BeforeEach(func() { + stringKeys = map[string]int{"foo": 2, "bar": 3} + intKeys = map[int]string{2: "foo", 3: "bar"} + + customA = &myCustomType{s: "a", n: 2, f: 2.3, arr: []string{"ice", "cream"}} + customB = &myCustomType{s: "b", n: 4, f: 3.1, arr: []string{"cake"}} + objKeys = map[*myCustomType]string{customA: "aardvark", customB: "kangaroo"} + }) + + Context("when passed a map", func() { + It("should do the right thing", func() { + Ω(stringKeys).Should(HaveKey("foo")) + Ω(stringKeys).ShouldNot(HaveKey("baz")) + + Ω(intKeys).Should(HaveKey(2)) + Ω(intKeys).ShouldNot(HaveKey(4)) + + Ω(objKeys).Should(HaveKey(customA)) + Ω(objKeys).Should(HaveKey(&myCustomType{s: "b", n: 4, f: 3.1, arr: []string{"cake"}})) + Ω(objKeys).ShouldNot(HaveKey(&myCustomType{s: "b", n: 4, f: 3.1, arr: []string{"apple", "pie"}})) + }) + }) + + Context("when passed a correctly typed nil", func() { + It("should operate succesfully on the passed in value", func() { + var nilMap map[int]string + Ω(nilMap).ShouldNot(HaveKey("foo")) + }) + }) + + Context("when the passed in key is actually a matcher", func() { + It("should pass each element through the matcher", func() { + Ω(stringKeys).Should(HaveKey(ContainSubstring("oo"))) + Ω(stringKeys).ShouldNot(HaveKey(ContainSubstring("foobar"))) + }) + + It("should fail if the matcher ever fails", func() { + actual := map[int]string{1: "a", 3: "b", 2: "c"} + success, err := (&HaveKeyMatcher{Key: ContainSubstring("ar")}).Match(actual) + Ω(success).Should(BeFalse()) + Ω(err).Should(HaveOccurred()) + }) + }) + + Context("when passed something that is not a map", func() { + It("should error", func() { + success, err := (&HaveKeyMatcher{Key: "foo"}).Match([]string{"foo"}) + Ω(success).Should(BeFalse()) + Ω(err).Should(HaveOccurred()) + + success, err = (&HaveKeyMatcher{Key: "foo"}).Match(nil) + Ω(success).Should(BeFalse()) + Ω(err).Should(HaveOccurred()) + }) + }) +}) diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/matchers/have_key_with_value_matcher.go b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/have_key_with_value_matcher.go new file mode 100644 index 0000000000000..464ac187e9084 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/have_key_with_value_matcher.go @@ -0,0 +1,73 @@ +package matchers + +import ( + "fmt" + "github.com/onsi/gomega/format" + "reflect" +) + +type HaveKeyWithValueMatcher struct { + Key interface{} + Value interface{} +} + +func (matcher *HaveKeyWithValueMatcher) Match(actual interface{}) (success bool, err error) { + if !isMap(actual) { + return false, fmt.Errorf("HaveKeyWithValue matcher expects a map. Got:%s", format.Object(actual, 1)) + } + + keyMatcher, keyIsMatcher := matcher.Key.(omegaMatcher) + if !keyIsMatcher { + keyMatcher = &EqualMatcher{Expected: matcher.Key} + } + + valueMatcher, valueIsMatcher := matcher.Value.(omegaMatcher) + if !valueIsMatcher { + valueMatcher = &EqualMatcher{Expected: matcher.Value} + } + + keys := reflect.ValueOf(actual).MapKeys() + for i := 0; i < len(keys); i++ { + success, err := keyMatcher.Match(keys[i].Interface()) + if err != nil { + return false, fmt.Errorf("HaveKeyWithValue's key matcher failed with:\n%s%s", format.Indent, err.Error()) + } + if success { + actualValue := reflect.ValueOf(actual).MapIndex(keys[i]) + success, err := valueMatcher.Match(actualValue.Interface()) + if err != nil { + return false, fmt.Errorf("HaveKeyWithValue's value matcher failed with:\n%s%s", format.Indent, err.Error()) + } + return success, nil + } + } + + return false, nil +} + +func (matcher *HaveKeyWithValueMatcher) FailureMessage(actual interface{}) (message string) { + str := "to have {key: value}" + if _, ok := matcher.Key.(omegaMatcher); ok { + str += " matching" + } else if _, ok := matcher.Value.(omegaMatcher); ok { + str += " matching" + } + + expect := make(map[interface{}]interface{}, 1) + expect[matcher.Key] = matcher.Value + return format.Message(actual, str, expect) +} + +func (matcher *HaveKeyWithValueMatcher) NegatedFailureMessage(actual interface{}) (message string) { + kStr := "not to have key" + if _, ok := matcher.Key.(omegaMatcher); ok { + kStr = "not to have key matching" + } + + vStr := "or that key's value not be" + if _, ok := matcher.Value.(omegaMatcher); ok { + vStr = "or to have that key's value not matching" + } + + return format.Message(actual, kStr, matcher.Key, vStr, matcher.Value) +} diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/matchers/have_key_with_value_matcher_test.go b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/have_key_with_value_matcher_test.go new file mode 100644 index 0000000000000..06a2242aeca54 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/have_key_with_value_matcher_test.go @@ -0,0 +1,82 @@ +package matchers_test + +import ( + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" + . "github.com/onsi/gomega/matchers" +) + +var _ = Describe("HaveKeyWithValue", func() { + var ( + stringKeys map[string]int + intKeys map[int]string + objKeys map[*myCustomType]*myCustomType + + customA *myCustomType + customB *myCustomType + ) + BeforeEach(func() { + stringKeys = map[string]int{"foo": 2, "bar": 3} + intKeys = map[int]string{2: "foo", 3: "bar"} + + customA = &myCustomType{s: "a", n: 2, f: 2.3, arr: []string{"ice", "cream"}} + customB = &myCustomType{s: "b", n: 4, f: 3.1, arr: []string{"cake"}} + objKeys = map[*myCustomType]*myCustomType{customA: customA, customB: customA} + }) + + Context("when passed a map", func() { + It("should do the right thing", func() { + Ω(stringKeys).Should(HaveKeyWithValue("foo", 2)) + Ω(stringKeys).ShouldNot(HaveKeyWithValue("foo", 1)) + Ω(stringKeys).ShouldNot(HaveKeyWithValue("baz", 2)) + Ω(stringKeys).ShouldNot(HaveKeyWithValue("baz", 1)) + + Ω(intKeys).Should(HaveKeyWithValue(2, "foo")) + Ω(intKeys).ShouldNot(HaveKeyWithValue(4, "foo")) + Ω(intKeys).ShouldNot(HaveKeyWithValue(2, "baz")) + + Ω(objKeys).Should(HaveKeyWithValue(customA, customA)) + Ω(objKeys).Should(HaveKeyWithValue(&myCustomType{s: "b", n: 4, f: 3.1, arr: []string{"cake"}}, &myCustomType{s: "a", n: 2, f: 2.3, arr: []string{"ice", "cream"}})) + Ω(objKeys).ShouldNot(HaveKeyWithValue(&myCustomType{s: "b", n: 4, f: 3.1, arr: []string{"apple", "pie"}}, customA)) + }) + }) + + Context("when passed a correctly typed nil", func() { + It("should operate succesfully on the passed in value", func() { + var nilMap map[int]string + Ω(nilMap).ShouldNot(HaveKeyWithValue("foo", "bar")) + }) + }) + + Context("when the passed in key or value is actually a matcher", func() { + It("should pass each element through the matcher", func() { + Ω(stringKeys).Should(HaveKeyWithValue(ContainSubstring("oo"), 2)) + Ω(intKeys).Should(HaveKeyWithValue(2, ContainSubstring("oo"))) + Ω(stringKeys).ShouldNot(HaveKeyWithValue(ContainSubstring("foobar"), 2)) + }) + + It("should fail if the matcher ever fails", func() { + actual := map[int]string{1: "a", 3: "b", 2: "c"} + success, err := (&HaveKeyWithValueMatcher{Key: ContainSubstring("ar"), Value: 2}).Match(actual) + Ω(success).Should(BeFalse()) + Ω(err).Should(HaveOccurred()) + + otherActual := map[string]int{"a": 1, "b": 2, "c": 3} + success, err = (&HaveKeyWithValueMatcher{Key: "a", Value: ContainSubstring("1")}).Match(otherActual) + Ω(success).Should(BeFalse()) + Ω(err).Should(HaveOccurred()) + }) + }) + + Context("when passed something that is not a map", func() { + It("should error", func() { + success, err := (&HaveKeyWithValueMatcher{Key: "foo", Value: "bar"}).Match([]string{"foo"}) + Ω(success).Should(BeFalse()) + Ω(err).Should(HaveOccurred()) + + success, err = (&HaveKeyWithValueMatcher{Key: "foo", Value: "bar"}).Match(nil) + Ω(success).Should(BeFalse()) + Ω(err).Should(HaveOccurred()) + }) + }) +}) diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/matchers/have_len_matcher.go b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/have_len_matcher.go new file mode 100644 index 0000000000000..a1837755701ba --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/have_len_matcher.go @@ -0,0 +1,27 @@ +package matchers + +import ( + "fmt" + "github.com/onsi/gomega/format" +) + +type HaveLenMatcher struct { + Count int +} + +func (matcher *HaveLenMatcher) Match(actual interface{}) (success bool, err error) { + length, ok := lengthOf(actual) + if !ok { + return false, fmt.Errorf("HaveLen matcher expects a string/array/map/channel/slice. Got:\n%s", format.Object(actual, 1)) + } + + return length == matcher.Count, nil +} + +func (matcher *HaveLenMatcher) FailureMessage(actual interface{}) (message string) { + return fmt.Sprintf("Expected\n%s\nto have length %d", format.Object(actual, 1), matcher.Count) +} + +func (matcher *HaveLenMatcher) NegatedFailureMessage(actual interface{}) (message string) { + return fmt.Sprintf("Expected\n%s\nnot to have length %d", format.Object(actual, 1), matcher.Count) +} diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/matchers/have_len_matcher_test.go b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/have_len_matcher_test.go new file mode 100644 index 0000000000000..1e6aa69d9d174 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/have_len_matcher_test.go @@ -0,0 +1,53 @@ +package matchers_test + +import ( + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" + . "github.com/onsi/gomega/matchers" +) + +var _ = Describe("HaveLen", func() { + Context("when passed a supported type", func() { + It("should do the right thing", func() { + Ω("").Should(HaveLen(0)) + Ω("AA").Should(HaveLen(2)) + + Ω([0]int{}).Should(HaveLen(0)) + Ω([2]int{1, 2}).Should(HaveLen(2)) + + Ω([]int{}).Should(HaveLen(0)) + Ω([]int{1, 2, 3}).Should(HaveLen(3)) + + Ω(map[string]int{}).Should(HaveLen(0)) + Ω(map[string]int{"a": 1, "b": 2, "c": 3, "d": 4}).Should(HaveLen(4)) + + c := make(chan bool, 3) + Ω(c).Should(HaveLen(0)) + c <- true + c <- true + Ω(c).Should(HaveLen(2)) + }) + }) + + Context("when passed a correctly typed nil", func() { + It("should operate succesfully on the passed in value", func() { + var nilSlice []int + Ω(nilSlice).Should(HaveLen(0)) + + var nilMap map[int]string + Ω(nilMap).Should(HaveLen(0)) + }) + }) + + Context("when passed an unsupported type", func() { + It("should error", func() { + success, err := (&HaveLenMatcher{Count: 0}).Match(0) + Ω(success).Should(BeFalse()) + Ω(err).Should(HaveOccurred()) + + success, err = (&HaveLenMatcher{Count: 0}).Match(nil) + Ω(success).Should(BeFalse()) + Ω(err).Should(HaveOccurred()) + }) + }) +}) diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/matchers/have_occurred_matcher.go b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/have_occurred_matcher.go new file mode 100644 index 0000000000000..b5095f1147be4 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/have_occurred_matcher.go @@ -0,0 +1,29 @@ +package matchers + +import ( + "fmt" + "github.com/onsi/gomega/format" +) + +type HaveOccurredMatcher struct { +} + +func (matcher *HaveOccurredMatcher) Match(actual interface{}) (success bool, err error) { + if actual == nil { + return false, nil + } + + if isError(actual) { + return true, nil + } + + return false, fmt.Errorf("Expected an error. Got:\n%s", format.Object(actual, 1)) +} + +func (matcher *HaveOccurredMatcher) FailureMessage(actual interface{}) (message string) { + return fmt.Sprintf("Expected an error to have occured. Got:\n%s", format.Object(actual, 1)) +} + +func (matcher *HaveOccurredMatcher) NegatedFailureMessage(actual interface{}) (message string) { + return fmt.Sprintf("Expected error:\n%s\n%s\n%s", format.Object(actual, 1), format.IndentString(actual.(error).Error(), 1), "not to have occurred") +} diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/matchers/have_occurred_matcher_test.go b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/have_occurred_matcher_test.go new file mode 100644 index 0000000000000..ef971aa6fd332 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/have_occurred_matcher_test.go @@ -0,0 +1,28 @@ +package matchers_test + +import ( + "errors" + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" + . "github.com/onsi/gomega/matchers" +) + +var _ = Describe("HaveOccurred", func() { + It("should succeed if matching an error", func() { + Ω(errors.New("Foo")).Should(HaveOccurred()) + }) + + It("should not succeed with nil", func() { + Ω(nil).ShouldNot(HaveOccurred()) + }) + + It("should only support errors and nil", func() { + success, err := (&HaveOccurredMatcher{}).Match("foo") + Ω(success).Should(BeFalse()) + Ω(err).Should(HaveOccurred()) + + success, err = (&HaveOccurredMatcher{}).Match("") + Ω(success).Should(BeFalse()) + Ω(err).Should(HaveOccurred()) + }) +}) diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/matchers/have_prefix_matcher.go b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/have_prefix_matcher.go new file mode 100644 index 0000000000000..8b63a89997b36 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/have_prefix_matcher.go @@ -0,0 +1,35 @@ +package matchers + +import ( + "fmt" + "github.com/onsi/gomega/format" +) + +type HavePrefixMatcher struct { + Prefix string + Args []interface{} +} + +func (matcher *HavePrefixMatcher) Match(actual interface{}) (success bool, err error) { + actualString, ok := toString(actual) + if !ok { + return false, fmt.Errorf("HavePrefix matcher requires a string or stringer. Got:\n%s", format.Object(actual, 1)) + } + prefix := matcher.prefix() + return len(actualString) >= len(prefix) && actualString[0:len(prefix)] == prefix, nil +} + +func (matcher *HavePrefixMatcher) prefix() string { + if len(matcher.Args) > 0 { + return fmt.Sprintf(matcher.Prefix, matcher.Args...) + } + return matcher.Prefix +} + +func (matcher *HavePrefixMatcher) FailureMessage(actual interface{}) (message string) { + return format.Message(actual, "to have prefix", matcher.prefix()) +} + +func (matcher *HavePrefixMatcher) NegatedFailureMessage(actual interface{}) (message string) { + return format.Message(actual, "not to have prefix", matcher.prefix()) +} diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/matchers/have_prefix_matcher_test.go b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/have_prefix_matcher_test.go new file mode 100644 index 0000000000000..bec3f975827a2 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/have_prefix_matcher_test.go @@ -0,0 +1,36 @@ +package matchers_test + +import ( + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" + . "github.com/onsi/gomega/matchers" +) + +var _ = Describe("HavePrefixMatcher", func() { + Context("when actual is a string", func() { + It("should match a string prefix", func() { + Ω("Ab").Should(HavePrefix("A")) + Ω("A").ShouldNot(HavePrefix("Ab")) + }) + }) + + Context("when the matcher is called with multiple arguments", func() { + It("should pass the string and arguments to sprintf", func() { + Ω("C3PO").Should(HavePrefix("C%dP", 3)) + }) + }) + + Context("when actual is a stringer", func() { + It("should call the stringer and match against the returned string", func() { + Ω(&myStringer{a: "Ab"}).Should(HavePrefix("A")) + }) + }) + + Context("when actual is neither a string nor a stringer", func() { + It("should error", func() { + success, err := (&HavePrefixMatcher{Prefix: "2"}).Match(2) + Ω(success).Should(BeFalse()) + Ω(err).Should(HaveOccurred()) + }) + }) +}) diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/matchers/have_suffix_matcher.go b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/have_suffix_matcher.go new file mode 100644 index 0000000000000..eb1b284da13f5 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/have_suffix_matcher.go @@ -0,0 +1,35 @@ +package matchers + +import ( + "fmt" + "github.com/onsi/gomega/format" +) + +type HaveSuffixMatcher struct { + Suffix string + Args []interface{} +} + +func (matcher *HaveSuffixMatcher) Match(actual interface{}) (success bool, err error) { + actualString, ok := toString(actual) + if !ok { + return false, fmt.Errorf("HaveSuffix matcher requires a string or stringer. Got:\n%s", format.Object(actual, 1)) + } + suffix := matcher.suffix() + return len(actualString) >= len(suffix) && actualString[len(actualString) - len(suffix):] == suffix, nil +} + +func (matcher *HaveSuffixMatcher) suffix() string { + if len(matcher.Args) > 0 { + return fmt.Sprintf(matcher.Suffix, matcher.Args...) + } + return matcher.Suffix +} + +func (matcher *HaveSuffixMatcher) FailureMessage(actual interface{}) (message string) { + return format.Message(actual, "to have suffix", matcher.suffix()) +} + +func (matcher *HaveSuffixMatcher) NegatedFailureMessage(actual interface{}) (message string) { + return format.Message(actual, "not to have suffix", matcher.suffix()) +} diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/matchers/have_suffix_matcher_test.go b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/have_suffix_matcher_test.go new file mode 100644 index 0000000000000..72e8975bae091 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/have_suffix_matcher_test.go @@ -0,0 +1,36 @@ +package matchers_test + +import ( + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" + . "github.com/onsi/gomega/matchers" +) + +var _ = Describe("HaveSuffixMatcher", func() { + Context("when actual is a string", func() { + It("should match a string suffix", func() { + Ω("Ab").Should(HaveSuffix("b")) + Ω("A").ShouldNot(HaveSuffix("Ab")) + }) + }) + + Context("when the matcher is called with multiple arguments", func() { + It("should pass the string and arguments to sprintf", func() { + Ω("C3PO").Should(HaveSuffix("%dPO", 3)) + }) + }) + + Context("when actual is a stringer", func() { + It("should call the stringer and match against the returned string", func() { + Ω(&myStringer{a: "Ab"}).Should(HaveSuffix("b")) + }) + }) + + Context("when actual is neither a string nor a stringer", func() { + It("should error", func() { + success, err := (&HaveSuffixMatcher{Suffix: "2"}).Match(2) + Ω(success).Should(BeFalse()) + Ω(err).Should(HaveOccurred()) + }) + }) +}) diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/matchers/match_error_matcher.go b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/match_error_matcher.go new file mode 100644 index 0000000000000..03cdf04588860 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/match_error_matcher.go @@ -0,0 +1,50 @@ +package matchers + +import ( + "fmt" + "github.com/onsi/gomega/format" + "reflect" +) + +type MatchErrorMatcher struct { + Expected interface{} +} + +func (matcher *MatchErrorMatcher) Match(actual interface{}) (success bool, err error) { + if isNil(actual) { + return false, fmt.Errorf("Expected an error, got nil") + } + + if !isError(actual) { + return false, fmt.Errorf("Expected an error. Got:\n%s", format.Object(actual, 1)) + } + + actualErr := actual.(error) + + if isString(matcher.Expected) { + return reflect.DeepEqual(actualErr.Error(), matcher.Expected), nil + } + + if isError(matcher.Expected) { + return reflect.DeepEqual(actualErr, matcher.Expected), nil + } + + var subMatcher omegaMatcher + var hasSubMatcher bool + if matcher.Expected != nil { + subMatcher, hasSubMatcher = (matcher.Expected).(omegaMatcher) + if hasSubMatcher { + return subMatcher.Match(actualErr.Error()) + } + } + + return false, fmt.Errorf("MatchError must be passed an error, string, or Matcher that can match on strings. Got:\n%s", format.Object(matcher.Expected, 1)) +} + +func (matcher *MatchErrorMatcher) FailureMessage(actual interface{}) (message string) { + return format.Message(actual, "to match error", matcher.Expected) +} + +func (matcher *MatchErrorMatcher) NegatedFailureMessage(actual interface{}) (message string) { + return format.Message(actual, "not to match error", matcher.Expected) +} diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/matchers/match_error_matcher_test.go b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/match_error_matcher_test.go new file mode 100644 index 0000000000000..338b512954368 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/match_error_matcher_test.go @@ -0,0 +1,93 @@ +package matchers_test + +import ( + "errors" + "fmt" + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" + . "github.com/onsi/gomega/matchers" +) + +type CustomError struct { +} + +func (c CustomError) Error() string { + return "an error" +} + +var _ = Describe("MatchErrorMatcher", func() { + Context("When asserting against an error", func() { + It("should succeed when matching with an error", func() { + err := errors.New("an error") + fmtErr := fmt.Errorf("an error") + customErr := CustomError{} + + Ω(err).Should(MatchError(errors.New("an error"))) + Ω(err).ShouldNot(MatchError(errors.New("another error"))) + + Ω(fmtErr).Should(MatchError(errors.New("an error"))) + Ω(customErr).Should(MatchError(CustomError{})) + }) + + It("should succeed when matching with a string", func() { + err := errors.New("an error") + fmtErr := fmt.Errorf("an error") + customErr := CustomError{} + + Ω(err).Should(MatchError("an error")) + Ω(err).ShouldNot(MatchError("another error")) + + Ω(fmtErr).Should(MatchError("an error")) + Ω(customErr).Should(MatchError("an error")) + }) + + Context("when passed a matcher", func() { + It("should pass if the matcher passes against the error string", func() { + err := errors.New("error 123 abc") + + Ω(err).Should(MatchError(MatchRegexp(`\d{3}`))) + }) + + It("should fail if the matcher fails against the error string", func() { + err := errors.New("no digits") + Ω(err).ShouldNot(MatchError(MatchRegexp(`\d`))) + }) + }) + + It("should fail when passed anything else", func() { + actualErr := errors.New("an error") + _, err := (&MatchErrorMatcher{ + Expected: []byte("an error"), + }).Match(actualErr) + Ω(err).Should(HaveOccurred()) + + _, err = (&MatchErrorMatcher{ + Expected: 3, + }).Match(actualErr) + Ω(err).Should(HaveOccurred()) + }) + }) + + Context("when passed nil", func() { + It("should fail", func() { + _, err := (&MatchErrorMatcher{ + Expected: "an error", + }).Match(nil) + Ω(err).Should(HaveOccurred()) + }) + }) + + Context("when passed a non-error", func() { + It("should fail", func() { + _, err := (&MatchErrorMatcher{ + Expected: "an error", + }).Match("an error") + Ω(err).Should(HaveOccurred()) + + _, err = (&MatchErrorMatcher{ + Expected: "an error", + }).Match(3) + Ω(err).Should(HaveOccurred()) + }) + }) +}) diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/matchers/match_json_matcher.go b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/match_json_matcher.go new file mode 100644 index 0000000000000..bedf851026849 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/match_json_matcher.go @@ -0,0 +1,61 @@ +package matchers + +import ( + "bytes" + "encoding/json" + "fmt" + "github.com/onsi/gomega/format" + "reflect" +) + +type MatchJSONMatcher struct { + JSONToMatch interface{} +} + +func (matcher *MatchJSONMatcher) Match(actual interface{}) (success bool, err error) { + actualString, expectedString, err := matcher.prettyPrint(actual) + if err != nil { + return false, err + } + + var aval interface{} + var eval interface{} + + // this is guarded by prettyPrint + json.Unmarshal([]byte(actualString), &aval) + json.Unmarshal([]byte(expectedString), &eval) + + return reflect.DeepEqual(aval, eval), nil +} + +func (matcher *MatchJSONMatcher) FailureMessage(actual interface{}) (message string) { + actualString, expectedString, _ := matcher.prettyPrint(actual) + return format.Message(actualString, "to match JSON of", expectedString) +} + +func (matcher *MatchJSONMatcher) NegatedFailureMessage(actual interface{}) (message string) { + actualString, expectedString, _ := matcher.prettyPrint(actual) + return format.Message(actualString, "not to match JSON of", expectedString) +} + +func (matcher *MatchJSONMatcher) prettyPrint(actual interface{}) (actualFormatted, expectedFormatted string, err error) { + actualString, aok := toString(actual) + expectedString, eok := toString(matcher.JSONToMatch) + + if !(aok && eok) { + return "", "", fmt.Errorf("MatchJSONMatcher matcher requires a string or stringer. Got:\n%s", format.Object(actual, 1)) + } + + abuf := new(bytes.Buffer) + ebuf := new(bytes.Buffer) + + if err := json.Indent(abuf, []byte(actualString), "", " "); err != nil { + return "", "", err + } + + if err := json.Indent(ebuf, []byte(expectedString), "", " "); err != nil { + return "", "", err + } + + return actualString, expectedString, nil +} diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/matchers/match_json_matcher_test.go b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/match_json_matcher_test.go new file mode 100644 index 0000000000000..c1924baac2dec --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/match_json_matcher_test.go @@ -0,0 +1,59 @@ +package matchers_test + +import ( + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" + . "github.com/onsi/gomega/matchers" +) + +var _ = Describe("MatchJSONMatcher", func() { + Context("When passed stringifiables", func() { + It("should succeed if the JSON matches", func() { + Ω("{}").Should(MatchJSON("{}")) + Ω(`{"a":1}`).Should(MatchJSON(`{"a":1}`)) + Ω(`{ + "a":1 + }`).Should(MatchJSON(`{"a":1}`)) + Ω(`{"a":1, "b":2}`).Should(MatchJSON(`{"b":2, "a":1}`)) + Ω(`{"a":1}`).ShouldNot(MatchJSON(`{"b":2, "a":1}`)) + }) + + It("should work with byte arrays", func() { + Ω([]byte("{}")).Should(MatchJSON([]byte("{}"))) + Ω("{}").Should(MatchJSON([]byte("{}"))) + Ω([]byte("{}")).Should(MatchJSON("{}")) + }) + }) + + Context("when either side is not valid JSON", func() { + It("should error", func() { + success, err := (&MatchJSONMatcher{JSONToMatch: `oops`}).Match(`{}`) + Ω(success).Should(BeFalse()) + Ω(err).Should(HaveOccurred()) + + success, err = (&MatchJSONMatcher{JSONToMatch: `{}`}).Match(`oops`) + Ω(success).Should(BeFalse()) + Ω(err).Should(HaveOccurred()) + }) + }) + + Context("when either side is neither a string nor a stringer", func() { + It("should error", func() { + success, err := (&MatchJSONMatcher{JSONToMatch: "{}"}).Match(2) + Ω(success).Should(BeFalse()) + Ω(err).Should(HaveOccurred()) + + success, err = (&MatchJSONMatcher{JSONToMatch: 2}).Match("{}") + Ω(success).Should(BeFalse()) + Ω(err).Should(HaveOccurred()) + + success, err = (&MatchJSONMatcher{JSONToMatch: nil}).Match("{}") + Ω(success).Should(BeFalse()) + Ω(err).Should(HaveOccurred()) + + success, err = (&MatchJSONMatcher{JSONToMatch: 2}).Match(nil) + Ω(success).Should(BeFalse()) + Ω(err).Should(HaveOccurred()) + }) + }) +}) diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/matchers/match_regexp_matcher.go b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/match_regexp_matcher.go new file mode 100644 index 0000000000000..7ca79a15be247 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/match_regexp_matcher.go @@ -0,0 +1,42 @@ +package matchers + +import ( + "fmt" + "github.com/onsi/gomega/format" + "regexp" +) + +type MatchRegexpMatcher struct { + Regexp string + Args []interface{} +} + +func (matcher *MatchRegexpMatcher) Match(actual interface{}) (success bool, err error) { + actualString, ok := toString(actual) + if !ok { + return false, fmt.Errorf("RegExp matcher requires a string or stringer.\nGot:%s", format.Object(actual, 1)) + } + + match, err := regexp.Match(matcher.regexp(), []byte(actualString)) + if err != nil { + return false, fmt.Errorf("RegExp match failed to compile with error:\n\t%s", err.Error()) + } + + return match, nil +} + +func (matcher *MatchRegexpMatcher) FailureMessage(actual interface{}) (message string) { + return format.Message(actual, "to match regular expression", matcher.regexp()) +} + +func (matcher *MatchRegexpMatcher) NegatedFailureMessage(actual interface{}) (message string) { + return format.Message(actual, "not to match regular expression", matcher.regexp()) +} + +func (matcher *MatchRegexpMatcher) regexp() string { + re := matcher.Regexp + if len(matcher.Args) > 0 { + re = fmt.Sprintf(matcher.Regexp, matcher.Args...) + } + return re +} diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/matchers/match_regexp_matcher_test.go b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/match_regexp_matcher_test.go new file mode 100644 index 0000000000000..bb521cce34771 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/match_regexp_matcher_test.go @@ -0,0 +1,44 @@ +package matchers_test + +import ( + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" + . "github.com/onsi/gomega/matchers" +) + +var _ = Describe("MatchRegexp", func() { + Context("when actual is a string", func() { + It("should match against the string", func() { + Ω(" a2!bla").Should(MatchRegexp(`\d!`)) + Ω(" a2!bla").ShouldNot(MatchRegexp(`[A-Z]`)) + }) + }) + + Context("when actual is a stringer", func() { + It("should call the stringer and match agains the returned string", func() { + Ω(&myStringer{a: "Abc3"}).Should(MatchRegexp(`[A-Z][a-z]+\d`)) + }) + }) + + Context("when the matcher is called with multiple arguments", func() { + It("should pass the string and arguments to sprintf", func() { + Ω(" a23!bla").Should(MatchRegexp(`\d%d!`, 3)) + }) + }) + + Context("when actual is neither a string nor a stringer", func() { + It("should error", func() { + success, err := (&MatchRegexpMatcher{Regexp: `\d`}).Match(2) + Ω(success).Should(BeFalse()) + Ω(err).Should(HaveOccurred()) + }) + }) + + Context("when the passed in regexp fails to compile", func() { + It("should error", func() { + success, err := (&MatchRegexpMatcher{Regexp: "("}).Match("Foo") + Ω(success).Should(BeFalse()) + Ω(err).Should(HaveOccurred()) + }) + }) +}) diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/matchers/matcher_tests_suite_test.go b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/matcher_tests_suite_test.go new file mode 100644 index 0000000000000..4bc6d9d0c279a --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/matcher_tests_suite_test.go @@ -0,0 +1,29 @@ +package matchers_test + +import ( + "testing" + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" +) + +type myStringer struct { + a string +} + +func (s *myStringer) String() string { + return s.a +} + +type StringAlias string + +type myCustomType struct { + s string + n int + f float32 + arr []string +} + +func Test(t *testing.T) { + RegisterFailHandler(Fail) + RunSpecs(t, "Gomega") +} diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/matchers/panic_matcher.go b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/panic_matcher.go new file mode 100644 index 0000000000000..75ab251bce94b --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/panic_matcher.go @@ -0,0 +1,42 @@ +package matchers + +import ( + "fmt" + "github.com/onsi/gomega/format" + "reflect" +) + +type PanicMatcher struct{} + +func (matcher *PanicMatcher) Match(actual interface{}) (success bool, err error) { + if actual == nil { + return false, fmt.Errorf("PanicMatcher expects a non-nil actual.") + } + + actualType := reflect.TypeOf(actual) + if actualType.Kind() != reflect.Func { + return false, fmt.Errorf("PanicMatcher expects a function. Got:\n%s", format.Object(actual, 1)) + } + if !(actualType.NumIn() == 0 && actualType.NumOut() == 0) { + return false, fmt.Errorf("PanicMatcher expects a function with no arguments and no return value. Got:\n%s", format.Object(actual, 1)) + } + + success = false + defer func() { + if e := recover(); e != nil { + success = true + } + }() + + reflect.ValueOf(actual).Call([]reflect.Value{}) + + return +} + +func (matcher *PanicMatcher) FailureMessage(actual interface{}) (message string) { + return format.Message(actual, "to panic") +} + +func (matcher *PanicMatcher) NegatedFailureMessage(actual interface{}) (message string) { + return format.Message(actual, "not to panic") +} diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/matchers/panic_matcher_test.go b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/panic_matcher_test.go new file mode 100644 index 0000000000000..17f3935e64b18 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/panic_matcher_test.go @@ -0,0 +1,36 @@ +package matchers_test + +import ( + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" + . "github.com/onsi/gomega/matchers" +) + +var _ = Describe("Panic", func() { + Context("when passed something that's not a function that takes zero arguments and returns nothing", func() { + It("should error", func() { + success, err := (&PanicMatcher{}).Match("foo") + Ω(success).Should(BeFalse()) + Ω(err).Should(HaveOccurred()) + + success, err = (&PanicMatcher{}).Match(nil) + Ω(success).Should(BeFalse()) + Ω(err).Should(HaveOccurred()) + + success, err = (&PanicMatcher{}).Match(func(foo string) {}) + Ω(success).Should(BeFalse()) + Ω(err).Should(HaveOccurred()) + + success, err = (&PanicMatcher{}).Match(func() string { return "bar" }) + Ω(success).Should(BeFalse()) + Ω(err).Should(HaveOccurred()) + }) + }) + + Context("when passed a function of the correct type", func() { + It("should call the function and pass if the function panics", func() { + Ω(func() { panic("ack!") }).Should(Panic()) + Ω(func() {}).ShouldNot(Panic()) + }) + }) +}) diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/matchers/receive_matcher.go b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/receive_matcher.go new file mode 100644 index 0000000000000..7a8c2cda51921 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/receive_matcher.go @@ -0,0 +1,126 @@ +package matchers + +import ( + "fmt" + "reflect" + + "github.com/onsi/gomega/format" +) + +type ReceiveMatcher struct { + Arg interface{} + receivedValue reflect.Value + channelClosed bool +} + +func (matcher *ReceiveMatcher) Match(actual interface{}) (success bool, err error) { + if !isChan(actual) { + return false, fmt.Errorf("ReceiveMatcher expects a channel. Got:\n%s", format.Object(actual, 1)) + } + + channelType := reflect.TypeOf(actual) + channelValue := reflect.ValueOf(actual) + + if channelType.ChanDir() == reflect.SendDir { + return false, fmt.Errorf("ReceiveMatcher matcher cannot be passed a send-only channel. Got:\n%s", format.Object(actual, 1)) + } + + var subMatcher omegaMatcher + var hasSubMatcher bool + + if matcher.Arg != nil { + subMatcher, hasSubMatcher = (matcher.Arg).(omegaMatcher) + if !hasSubMatcher { + argType := reflect.TypeOf(matcher.Arg) + if argType.Kind() != reflect.Ptr { + return false, fmt.Errorf("Cannot assign a value from the channel:\n%s\nTo:\n%s\nYou need to pass a pointer!", format.Object(actual, 1), format.Object(matcher.Arg, 1)) + } + + assignable := channelType.Elem().AssignableTo(argType.Elem()) + if !assignable { + return false, fmt.Errorf("Cannot assign a value from the channel:\n%s\nTo:\n%s", format.Object(actual, 1), format.Object(matcher.Arg, 1)) + } + } + } + + winnerIndex, value, open := reflect.Select([]reflect.SelectCase{ + reflect.SelectCase{Dir: reflect.SelectRecv, Chan: channelValue}, + reflect.SelectCase{Dir: reflect.SelectDefault}, + }) + + var closed bool + var didReceive bool + if winnerIndex == 0 { + closed = !open + didReceive = open + } + matcher.channelClosed = closed + + if closed { + return false, nil + } + + if hasSubMatcher { + if didReceive { + matcher.receivedValue = value + return subMatcher.Match(matcher.receivedValue.Interface()) + } else { + return false, nil + } + } + + if didReceive { + if matcher.Arg != nil { + outValue := reflect.ValueOf(matcher.Arg) + reflect.Indirect(outValue).Set(value) + } + + return true, nil + } else { + return false, nil + } +} + +func (matcher *ReceiveMatcher) FailureMessage(actual interface{}) (message string) { + subMatcher, hasSubMatcher := (matcher.Arg).(omegaMatcher) + + closedAddendum := "" + if matcher.channelClosed { + closedAddendum = " The channel is closed." + } + + if hasSubMatcher { + if matcher.receivedValue.IsValid() { + return subMatcher.FailureMessage(matcher.receivedValue.Interface()) + } + return "When passed a matcher, ReceiveMatcher's channel *must* receive something." + } else { + return format.Message(actual, "to receive something."+closedAddendum) + } +} + +func (matcher *ReceiveMatcher) NegatedFailureMessage(actual interface{}) (message string) { + subMatcher, hasSubMatcher := (matcher.Arg).(omegaMatcher) + + closedAddendum := "" + if matcher.channelClosed { + closedAddendum = " The channel is closed." + } + + if hasSubMatcher { + if matcher.receivedValue.IsValid() { + return subMatcher.NegatedFailureMessage(matcher.receivedValue.Interface()) + } + return "When passed a matcher, ReceiveMatcher's channel *must* receive something." + } else { + return format.Message(actual, "not to receive anything."+closedAddendum) + } +} + +func (matcher *ReceiveMatcher) MatchMayChangeInTheFuture(actual interface{}) bool { + if !isChan(actual) { + return false + } + + return !matcher.channelClosed +} diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/matchers/receive_matcher_test.go b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/receive_matcher_test.go new file mode 100644 index 0000000000000..938c078e6f7a4 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/receive_matcher_test.go @@ -0,0 +1,280 @@ +package matchers_test + +import ( + "time" + + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" + . "github.com/onsi/gomega/matchers" +) + +type kungFuActor interface { + DrunkenMaster() bool +} + +type jackie struct { + name string +} + +func (j *jackie) DrunkenMaster() bool { + return true +} + +var _ = Describe("ReceiveMatcher", func() { + Context("with no argument", func() { + Context("for a buffered channel", func() { + It("should succeed", func() { + channel := make(chan bool, 1) + + Ω(channel).ShouldNot(Receive()) + + channel <- true + + Ω(channel).Should(Receive()) + }) + }) + + Context("for an unbuffered channel", func() { + It("should succeed (eventually)", func() { + channel := make(chan bool) + + Ω(channel).ShouldNot(Receive()) + + go func() { + time.Sleep(10 * time.Millisecond) + channel <- true + }() + + Eventually(channel).Should(Receive()) + }) + }) + }) + + Context("with a pointer argument", func() { + Context("of the correct type", func() { + It("should write the value received on the channel to the pointer", func() { + channel := make(chan int, 1) + + var value int + + Ω(channel).ShouldNot(Receive(&value)) + Ω(value).Should(BeZero()) + + channel <- 17 + + Ω(channel).Should(Receive(&value)) + Ω(value).Should(Equal(17)) + }) + }) + + Context("to various types of objects", func() { + It("should work", func() { + //channels of strings + stringChan := make(chan string, 1) + stringChan <- "foo" + + var s string + Ω(stringChan).Should(Receive(&s)) + Ω(s).Should(Equal("foo")) + + //channels of slices + sliceChan := make(chan []bool, 1) + sliceChan <- []bool{true, true, false} + + var sl []bool + Ω(sliceChan).Should(Receive(&sl)) + Ω(sl).Should(Equal([]bool{true, true, false})) + + //channels of channels + chanChan := make(chan chan bool, 1) + c := make(chan bool) + chanChan <- c + + var receivedC chan bool + Ω(chanChan).Should(Receive(&receivedC)) + Ω(receivedC).Should(Equal(c)) + + //channels of interfaces + jackieChan := make(chan kungFuActor, 1) + aJackie := &jackie{name: "Jackie Chan"} + jackieChan <- aJackie + + var theJackie kungFuActor + Ω(jackieChan).Should(Receive(&theJackie)) + Ω(theJackie).Should(Equal(aJackie)) + }) + }) + + Context("of the wrong type", func() { + It("should error", func() { + channel := make(chan int) + var incorrectType bool + + success, err := (&ReceiveMatcher{Arg: &incorrectType}).Match(channel) + Ω(success).Should(BeFalse()) + Ω(err).Should(HaveOccurred()) + + var notAPointer int + success, err = (&ReceiveMatcher{Arg: notAPointer}).Match(channel) + Ω(success).Should(BeFalse()) + Ω(err).Should(HaveOccurred()) + }) + }) + }) + + Context("with a matcher", func() { + It("should defer to the underlying matcher", func() { + intChannel := make(chan int, 1) + intChannel <- 3 + Ω(intChannel).Should(Receive(Equal(3))) + + intChannel <- 2 + Ω(intChannel).ShouldNot(Receive(Equal(3))) + + stringChannel := make(chan []string, 1) + stringChannel <- []string{"foo", "bar", "baz"} + Ω(stringChannel).Should(Receive(ContainElement(ContainSubstring("fo")))) + + stringChannel <- []string{"foo", "bar", "baz"} + Ω(stringChannel).ShouldNot(Receive(ContainElement(ContainSubstring("archipelago")))) + }) + + It("should defer to the underlying matcher for the message", func() { + matcher := Receive(Equal(3)) + channel := make(chan int, 1) + channel <- 2 + matcher.Match(channel) + Ω(matcher.FailureMessage(channel)).Should(MatchRegexp(`Expected\s+: 2\s+to equal\s+: 3`)) + + channel <- 3 + matcher.Match(channel) + Ω(matcher.NegatedFailureMessage(channel)).Should(MatchRegexp(`Expected\s+: 3\s+not to equal\s+: 3`)) + }) + + It("should work just fine with Eventually", func() { + stringChannel := make(chan string) + + go func() { + time.Sleep(5 * time.Millisecond) + stringChannel <- "A" + time.Sleep(5 * time.Millisecond) + stringChannel <- "B" + }() + + Eventually(stringChannel).Should(Receive(Equal("B"))) + }) + + Context("if the matcher errors", func() { + It("should error", func() { + channel := make(chan int, 1) + channel <- 3 + success, err := (&ReceiveMatcher{Arg: ContainSubstring("three")}).Match(channel) + Ω(success).Should(BeFalse()) + Ω(err).Should(HaveOccurred()) + }) + }) + + Context("if nothing is received", func() { + It("should fail", func() { + channel := make(chan int, 1) + success, err := (&ReceiveMatcher{Arg: Equal(1)}).Match(channel) + Ω(success).Should(BeFalse()) + Ω(err).ShouldNot(HaveOccurred()) + }) + }) + }) + + Context("When actual is a *closed* channel", func() { + Context("for a buffered channel", func() { + It("should work until it hits the end of the buffer", func() { + channel := make(chan bool, 1) + channel <- true + + close(channel) + + Ω(channel).Should(Receive()) + Ω(channel).ShouldNot(Receive()) + }) + }) + + Context("for an unbuffered channel", func() { + It("should always fail", func() { + channel := make(chan bool) + close(channel) + + Ω(channel).ShouldNot(Receive()) + }) + }) + }) + + Context("When actual is a send-only channel", func() { + It("should error", func() { + channel := make(chan bool) + + var writerChannel chan<- bool + writerChannel = channel + + success, err := (&ReceiveMatcher{}).Match(writerChannel) + Ω(success).Should(BeFalse()) + Ω(err).Should(HaveOccurred()) + }) + }) + + Context("when acutal is a non-channel", func() { + It("should error", func() { + var nilChannel chan bool + + success, err := (&ReceiveMatcher{}).Match(nilChannel) + Ω(success).Should(BeFalse()) + Ω(err).Should(HaveOccurred()) + + success, err = (&ReceiveMatcher{}).Match(nil) + Ω(success).Should(BeFalse()) + Ω(err).Should(HaveOccurred()) + + success, err = (&ReceiveMatcher{}).Match(3) + Ω(success).Should(BeFalse()) + Ω(err).Should(HaveOccurred()) + }) + }) + + Describe("when used with eventually and a custom matcher", func() { + It("should return the matcher's error when a failing value is received on the channel, instead of the must receive something failure", func() { + failures := InterceptGomegaFailures(func() { + c := make(chan string, 0) + Eventually(c, 0.01).Should(Receive(Equal("hello"))) + }) + Ω(failures[0]).Should(ContainSubstring("When passed a matcher, ReceiveMatcher's channel *must* receive something.")) + + failures = InterceptGomegaFailures(func() { + c := make(chan string, 1) + c <- "hi" + Eventually(c, 0.01).Should(Receive(Equal("hello"))) + }) + Ω(failures[0]).Should(ContainSubstring(": hello")) + }) + }) + + Describe("Bailing early", func() { + It("should bail early when passed a closed channel", func() { + c := make(chan bool) + close(c) + + t := time.Now() + failures := InterceptGomegaFailures(func() { + Eventually(c).Should(Receive()) + }) + Ω(time.Since(t)).Should(BeNumerically("<", 500*time.Millisecond)) + Ω(failures).Should(HaveLen(1)) + }) + + It("should bail early when passed a non-channel", func() { + t := time.Now() + failures := InterceptGomegaFailures(func() { + Eventually(3).Should(Receive()) + }) + Ω(time.Since(t)).Should(BeNumerically("<", 500*time.Millisecond)) + Ω(failures).Should(HaveLen(1)) + }) + }) +}) diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/matchers/succeed_matcher.go b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/succeed_matcher.go new file mode 100644 index 0000000000000..c162b67b7bc15 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/succeed_matcher.go @@ -0,0 +1,30 @@ +package matchers + +import ( + "fmt" + + "github.com/onsi/gomega/format" +) + +type SucceedMatcher struct { +} + +func (matcher *SucceedMatcher) Match(actual interface{}) (success bool, err error) { + if actual == nil { + return true, nil + } + + if isError(actual) { + return false, nil + } + + return false, fmt.Errorf("Expected an error-type. Got:\n%s", format.Object(actual, 1)) +} + +func (matcher *SucceedMatcher) FailureMessage(actual interface{}) (message string) { + return fmt.Sprintf("Expected success, but got an error:\n%s", format.Object(actual, 1)) +} + +func (matcher *SucceedMatcher) NegatedFailureMessage(actual interface{}) (message string) { + return "Expected failure, but got no error." +} diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/matchers/succeed_matcher_test.go b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/succeed_matcher_test.go new file mode 100644 index 0000000000000..3562e7049de20 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/succeed_matcher_test.go @@ -0,0 +1,39 @@ +package matchers_test + +import ( + "errors" + + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" + . "github.com/onsi/gomega/matchers" +) + +func Erroring() error { + return errors.New("bam") +} + +func NotErroring() error { + return nil +} + +type AnyType struct{} + +func Invalid() *AnyType { + return nil +} + +var _ = Describe("Succeed", func() { + It("should succeed if the function succeeds", func() { + Ω(NotErroring()).Should(Succeed()) + }) + + It("should succeed (in the negated) if the function errored", func() { + Ω(Erroring()).ShouldNot(Succeed()) + }) + + It("should not if passed a non-error", func() { + success, err := (&SucceedMatcher{}).Match(Invalid()) + Ω(success).Should(BeFalse()) + Ω(err).Should(HaveOccurred()) + }) +}) diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/matchers/support/goraph/MIT.LICENSE b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/support/goraph/MIT.LICENSE new file mode 100644 index 0000000000000..8edd8175abe72 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/support/goraph/MIT.LICENSE @@ -0,0 +1,20 @@ +Copyright (c) 2014 Amit Kumar Gupta + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/matchers/support/goraph/bipartitegraph/bipartitegraph.go b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/support/goraph/bipartitegraph/bipartitegraph.go new file mode 100644 index 0000000000000..119d21ef3178b --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/support/goraph/bipartitegraph/bipartitegraph.go @@ -0,0 +1,41 @@ +package bipartitegraph + +import "errors" +import "fmt" + +import . "github.com/onsi/gomega/matchers/support/goraph/node" +import . "github.com/onsi/gomega/matchers/support/goraph/edge" + +type BipartiteGraph struct { + Left NodeOrderedSet + Right NodeOrderedSet + Edges EdgeSet +} + +func NewBipartiteGraph(leftValues, rightValues []interface{}, neighbours func(interface{}, interface{}) (bool, error)) (*BipartiteGraph, error) { + left := NodeOrderedSet{} + for i, _ := range leftValues { + left = append(left, Node{i}) + } + + right := NodeOrderedSet{} + for j, _ := range rightValues { + right = append(right, Node{j + len(left)}) + } + + edges := EdgeSet{} + for i, leftValue := range leftValues { + for j, rightValue := range rightValues { + neighbours, err := neighbours(leftValue, rightValue) + if err != nil { + return nil, errors.New(fmt.Sprintf("error determining adjacency for %v and %v: %s", leftValue, rightValue, err.Error())) + } + + if neighbours { + edges = append(edges, Edge{left[i], right[j]}) + } + } + } + + return &BipartiteGraph{left, right, edges}, nil +} diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/matchers/support/goraph/bipartitegraph/bipartitegraphmatching.go b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/support/goraph/bipartitegraph/bipartitegraphmatching.go new file mode 100644 index 0000000000000..32529c5113148 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/support/goraph/bipartitegraph/bipartitegraphmatching.go @@ -0,0 +1,161 @@ +package bipartitegraph + +import . "github.com/onsi/gomega/matchers/support/goraph/node" +import . "github.com/onsi/gomega/matchers/support/goraph/edge" +import "github.com/onsi/gomega/matchers/support/goraph/util" + +func (bg *BipartiteGraph) LargestMatching() (matching EdgeSet) { + paths := bg.maximalDisjointSLAPCollection(matching) + + for len(paths) > 0 { + for _, path := range paths { + matching = matching.SymmetricDifference(path) + } + paths = bg.maximalDisjointSLAPCollection(matching) + } + + return +} + +func (bg *BipartiteGraph) maximalDisjointSLAPCollection(matching EdgeSet) (result []EdgeSet) { + guideLayers := bg.createSLAPGuideLayers(matching) + if len(guideLayers) == 0 { + return + } + + used := make(map[Node]bool) + + for _, u := range guideLayers[len(guideLayers)-1] { + slap, found := bg.findDisjointSLAP(u, matching, guideLayers, used) + if found { + for _, edge := range slap { + used[edge.Node1] = true + used[edge.Node2] = true + } + result = append(result, slap) + } + } + + return +} + +func (bg *BipartiteGraph) findDisjointSLAP( + start Node, + matching EdgeSet, + guideLayers []NodeOrderedSet, + used map[Node]bool, +) ([]Edge, bool) { + return bg.findDisjointSLAPHelper(start, EdgeSet{}, len(guideLayers)-1, matching, guideLayers, used) +} + +func (bg *BipartiteGraph) findDisjointSLAPHelper( + currentNode Node, + currentSLAP EdgeSet, + currentLevel int, + matching EdgeSet, + guideLayers []NodeOrderedSet, + used map[Node]bool, +) (EdgeSet, bool) { + used[currentNode] = true + + if currentLevel == 0 { + return currentSLAP, true + } + + for _, nextNode := range guideLayers[currentLevel-1] { + if used[nextNode] { + continue + } + + edge, found := bg.Edges.FindByNodes(currentNode, nextNode) + if !found { + continue + } + + if matching.Contains(edge) == util.Odd(currentLevel) { + continue + } + + currentSLAP = append(currentSLAP, edge) + slap, found := bg.findDisjointSLAPHelper(nextNode, currentSLAP, currentLevel-1, matching, guideLayers, used) + if found { + return slap, true + } + currentSLAP = currentSLAP[:len(currentSLAP)-1] + } + + used[currentNode] = false + return nil, false +} + +func (bg *BipartiteGraph) createSLAPGuideLayers(matching EdgeSet) (guideLayers []NodeOrderedSet) { + used := make(map[Node]bool) + currentLayer := NodeOrderedSet{} + + for _, node := range bg.Left { + if matching.Free(node) { + used[node] = true + currentLayer = append(currentLayer, node) + } + } + + if len(currentLayer) == 0 { + return []NodeOrderedSet{} + } else { + guideLayers = append(guideLayers, currentLayer) + } + + done := false + + for !done { + lastLayer := currentLayer + currentLayer = NodeOrderedSet{} + + if util.Odd(len(guideLayers)) { + for _, leftNode := range lastLayer { + for _, rightNode := range bg.Right { + if used[rightNode] { + continue + } + + edge, found := bg.Edges.FindByNodes(leftNode, rightNode) + if !found || matching.Contains(edge) { + continue + } + + currentLayer = append(currentLayer, rightNode) + used[rightNode] = true + + if matching.Free(rightNode) { + done = true + } + } + } + } else { + for _, rightNode := range lastLayer { + for _, leftNode := range bg.Left { + if used[leftNode] { + continue + } + + edge, found := bg.Edges.FindByNodes(leftNode, rightNode) + if !found || !matching.Contains(edge) { + continue + } + + currentLayer = append(currentLayer, leftNode) + used[leftNode] = true + } + } + + } + + if len(currentLayer) == 0 { + return []NodeOrderedSet{} + } else { + guideLayers = append(guideLayers, currentLayer) + } + } + + return +} diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/matchers/support/goraph/edge/edge.go b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/support/goraph/edge/edge.go new file mode 100644 index 0000000000000..4fd15cc06944b --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/support/goraph/edge/edge.go @@ -0,0 +1,61 @@ +package edge + +import . "github.com/onsi/gomega/matchers/support/goraph/node" + +type Edge struct { + Node1 Node + Node2 Node +} + +type EdgeSet []Edge + +func (ec EdgeSet) Free(node Node) bool { + for _, e := range ec { + if e.Node1 == node || e.Node2 == node { + return false + } + } + + return true +} + +func (ec EdgeSet) Contains(edge Edge) bool { + for _, e := range ec { + if e == edge { + return true + } + } + + return false +} + +func (ec EdgeSet) FindByNodes(node1, node2 Node) (Edge, bool) { + for _, e := range ec { + if (e.Node1 == node1 && e.Node2 == node2) || (e.Node1 == node2 && e.Node2 == node1) { + return e, true + } + } + + return Edge{}, false +} + +func (ec EdgeSet) SymmetricDifference(ec2 EdgeSet) EdgeSet { + edgesToInclude := make(map[Edge]bool) + + for _, e := range ec { + edgesToInclude[e] = true + } + + for _, e := range ec2 { + edgesToInclude[e] = !edgesToInclude[e] + } + + result := EdgeSet{} + for e, include := range edgesToInclude { + if include { + result = append(result, e) + } + } + + return result +} diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/matchers/support/goraph/node/node.go b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/support/goraph/node/node.go new file mode 100644 index 0000000000000..800c2ea8caf30 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/support/goraph/node/node.go @@ -0,0 +1,7 @@ +package node + +type Node struct { + Id int +} + +type NodeOrderedSet []Node diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/matchers/support/goraph/util/util.go b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/support/goraph/util/util.go new file mode 100644 index 0000000000000..a24cd27505588 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/support/goraph/util/util.go @@ -0,0 +1,7 @@ +package util + +import "math" + +func Odd(n int) bool { + return math.Mod(float64(n), 2.0) == 1.0 +} diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/matchers/type_support.go b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/type_support.go new file mode 100644 index 0000000000000..ef9b44835ea38 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/matchers/type_support.go @@ -0,0 +1,165 @@ +/* +Gomega matchers + +This package implements the Gomega matchers and does not typically need to be imported. +See the docs for Gomega for documentation on the matchers + +http://onsi.github.io/gomega/ +*/ +package matchers + +import ( + "fmt" + "reflect" +) + +type omegaMatcher interface { + Match(actual interface{}) (success bool, err error) + FailureMessage(actual interface{}) (message string) + NegatedFailureMessage(actual interface{}) (message string) +} + +func isBool(a interface{}) bool { + return reflect.TypeOf(a).Kind() == reflect.Bool +} + +func isNumber(a interface{}) bool { + if a == nil { + return false + } + kind := reflect.TypeOf(a).Kind() + return reflect.Int <= kind && kind <= reflect.Float64 +} + +func isInteger(a interface{}) bool { + kind := reflect.TypeOf(a).Kind() + return reflect.Int <= kind && kind <= reflect.Int64 +} + +func isUnsignedInteger(a interface{}) bool { + kind := reflect.TypeOf(a).Kind() + return reflect.Uint <= kind && kind <= reflect.Uint64 +} + +func isFloat(a interface{}) bool { + kind := reflect.TypeOf(a).Kind() + return reflect.Float32 <= kind && kind <= reflect.Float64 +} + +func toInteger(a interface{}) int64 { + if isInteger(a) { + return reflect.ValueOf(a).Int() + } else if isUnsignedInteger(a) { + return int64(reflect.ValueOf(a).Uint()) + } else if isFloat(a) { + return int64(reflect.ValueOf(a).Float()) + } else { + panic(fmt.Sprintf("Expected a number! Got <%T> %#v", a, a)) + } +} + +func toUnsignedInteger(a interface{}) uint64 { + if isInteger(a) { + return uint64(reflect.ValueOf(a).Int()) + } else if isUnsignedInteger(a) { + return reflect.ValueOf(a).Uint() + } else if isFloat(a) { + return uint64(reflect.ValueOf(a).Float()) + } else { + panic(fmt.Sprintf("Expected a number! Got <%T> %#v", a, a)) + } +} + +func toFloat(a interface{}) float64 { + if isInteger(a) { + return float64(reflect.ValueOf(a).Int()) + } else if isUnsignedInteger(a) { + return float64(reflect.ValueOf(a).Uint()) + } else if isFloat(a) { + return reflect.ValueOf(a).Float() + } else { + panic(fmt.Sprintf("Expected a number! Got <%T> %#v", a, a)) + } +} + +func isError(a interface{}) bool { + _, ok := a.(error) + return ok +} + +func isChan(a interface{}) bool { + if isNil(a) { + return false + } + return reflect.TypeOf(a).Kind() == reflect.Chan +} + +func isMap(a interface{}) bool { + if a == nil { + return false + } + return reflect.TypeOf(a).Kind() == reflect.Map +} + +func isArrayOrSlice(a interface{}) bool { + if a == nil { + return false + } + switch reflect.TypeOf(a).Kind() { + case reflect.Array, reflect.Slice: + return true + default: + return false + } +} + +func isString(a interface{}) bool { + if a == nil { + return false + } + return reflect.TypeOf(a).Kind() == reflect.String +} + +func toString(a interface{}) (string, bool) { + aString, isString := a.(string) + if isString { + return aString, true + } + + aBytes, isBytes := a.([]byte) + if isBytes { + return string(aBytes), true + } + + aStringer, isStringer := a.(fmt.Stringer) + if isStringer { + return aStringer.String(), true + } + + return "", false +} + +func lengthOf(a interface{}) (int, bool) { + if a == nil { + return 0, false + } + switch reflect.TypeOf(a).Kind() { + case reflect.Map, reflect.Array, reflect.String, reflect.Chan, reflect.Slice: + return reflect.ValueOf(a).Len(), true + default: + return 0, false + } +} + +func isNil(a interface{}) bool { + if a == nil { + return true + } + + switch reflect.TypeOf(a).Kind() { + case reflect.Chan, reflect.Func, reflect.Interface, reflect.Map, reflect.Ptr, reflect.Slice: + return reflect.ValueOf(a).IsNil() + } + + return false +} diff --git a/Godeps/_workspace/src/github.com/onsi/gomega/types/types.go b/Godeps/_workspace/src/github.com/onsi/gomega/types/types.go new file mode 100644 index 0000000000000..1c632ade29135 --- /dev/null +++ b/Godeps/_workspace/src/github.com/onsi/gomega/types/types.go @@ -0,0 +1,17 @@ +package types + +type GomegaFailHandler func(message string, callerSkip ...int) + +//A simple *testing.T interface wrapper +type GomegaTestingT interface { + Errorf(format string, args ...interface{}) +} + +//All Gomega matchers must implement the GomegaMatcher interface +// +//For details on writing custom matchers, check out: http://onsi.github.io/gomega/#adding_your_own_matchers +type GomegaMatcher interface { + Match(actual interface{}) (success bool, err error) + FailureMessage(actual interface{}) (message string) + NegatedFailureMessage(actual interface{}) (message string) +}