diff --git a/vendor/github.com/aymerick/raymond/BENCHMARKS.md b/vendor/github.com/aymerick/raymond/BENCHMARKS.md new file mode 100644 index 0000000..c3af56c --- /dev/null +++ b/vendor/github.com/aymerick/raymond/BENCHMARKS.md @@ -0,0 +1,46 @@ +# Benchmarks + +Hardware: MacBookPro11,1 - Intel Core i5 - 2,6 GHz - 8 Go RAM + +With: + + - handlebars.js #8cba84df119c317fcebc49fb285518542ca9c2d0 + - raymond #7bbaaf50ed03c96b56687d7fa6c6e04e02375a98 + + +## handlebars.js (ops/ms) + + arguments 198 ±4 (5) + array-each 568 ±23 (5) + array-mustache 522 ±18 (4) + complex 71 ±7 (3) + data 67 ±2 (3) + depth-1 47 ±2 (3) + depth-2 14 ±1 (2) + object-mustache 1099 ±47 (5) + object 907 ±58 (4) + partial-recursion 46 ±3 (4) + partial 68 ±3 (3) + paths 1650 ±50 (3) + string 2552 ±157 (3) + subexpression 141 ±2 (4) + variables 2671 ±83 (4) + + +## raymond + + BenchmarkArguments 200000 6642 ns/op 151 ops/ms + BenchmarkArrayEach 100000 19584 ns/op 51 ops/ms + BenchmarkArrayMustache 100000 17305 ns/op 58 ops/ms + BenchmarkComplex 30000 50270 ns/op 20 ops/ms + BenchmarkData 50000 25551 ns/op 39 ops/ms + BenchmarkDepth1 100000 20162 ns/op 50 ops/ms + BenchmarkDepth2 30000 47782 ns/op 21 ops/ms + BenchmarkObjectMustache 200000 7668 ns/op 130 ops/ms + BenchmarkObject 200000 8843 ns/op 113 ops/ms + BenchmarkPartialRecursion 50000 23139 ns/op 43 ops/ms + BenchmarkPartial 50000 31015 ns/op 32 ops/ms + BenchmarkPath 200000 8997 ns/op 111 ops/ms + BenchmarkString 1000000 1879 ns/op 532 ops/ms + BenchmarkSubExpression 300000 4935 ns/op 203 ops/ms + BenchmarkVariables 200000 6478 ns/op 154 ops/ms diff --git a/vendor/github.com/aymerick/raymond/CHANGELOG.md b/vendor/github.com/aymerick/raymond/CHANGELOG.md new file mode 100644 index 0000000..363e274 --- /dev/null +++ b/vendor/github.com/aymerick/raymond/CHANGELOG.md @@ -0,0 +1,13 @@ +# Raymond Changelog + +### Raymond 1.1.0 _(June 15, 2015)_ + +- Permits templates references with lowercase versions of struct fields. +- Adds `ParseFile()` function. +- Adds `RegisterPartialFile()`, `RegisterPartialFiles()` and `Clone()` methods on `Template`. +- Helpers can now be struct methods. +- Ensures safe concurrent access to helpers and partials. + +### Raymond 1.0.0 _(June 09, 2015)_ + +- This is the first release. Raymond supports almost all handlebars features. See https://github.com/aymerick/raymond#limitations for a list of differences with the javascript implementation. diff --git a/vendor/github.com/aymerick/raymond/LICENSE b/vendor/github.com/aymerick/raymond/LICENSE new file mode 100644 index 0000000..6ce87cd --- /dev/null +++ b/vendor/github.com/aymerick/raymond/LICENSE @@ -0,0 +1,22 @@ +The MIT License (MIT) + +Copyright (c) 2015 Aymerick JEHANNE + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + diff --git a/vendor/github.com/aymerick/raymond/README.md b/vendor/github.com/aymerick/raymond/README.md new file mode 100644 index 0000000..8290a20 --- /dev/null +++ b/vendor/github.com/aymerick/raymond/README.md @@ -0,0 +1,1378 @@ +# raymond [![Build Status](https://secure.travis-ci.org/aymerick/raymond.svg?branch=master)](http://travis-ci.org/aymerick/raymond) [![GoDoc](https://godoc.org/github.com/aymerick/raymond?status.svg)](http://godoc.org/github.com/aymerick/raymond) + +Handlebars for [golang](https://golang.org) with the same features as [handlebars.js](http://handlebarsjs.com) `3.0`. + +The full API documentation is available here: . + +![Raymond Logo](https://github.com/aymerick/raymond/blob/master/raymond.png?raw=true "Raymond") + + +# Table of Contents + +- [Quick Start](#quick-start) +- [Correct Usage](#correct-usage) +- [Context](#context) +- [HTML Escaping](#html-escaping) +- [Helpers](#helpers) + - [Template Helpers](#template-helpers) + - [Built-In Helpers](#built-in-helpers) + - [The `if` block helper](#the-if-block-helper) + - [The `unless` block helper](#the-unless-block-helper) + - [The `each` block helper](#the-each-block-helper) + - [The `with` block helper](#the-with-block-helper) + - [The `lookup` helper](#the-lookup-helper) + - [The `log` helper](#the-log-helper) + - [Block Helpers](#block-helpers) + - [Block Evaluation](#block-evaluation) + - [Conditional](#conditional) + - [Else Block Evaluation](#else-block-evaluation) + - [Block Parameters](#block-parameters) + - [Helper Parameters](#helper-parameters) + - [Automatic conversion](#automatic-conversion) + - [Options Argument](#options-argument) + - [Context Values](#context-values) + - [Helper Hash Arguments](#helper-hash-arguments) + - [Private Data](#private-data) + - [Utilites](#utilites) + - [`Str()`](#str) + - [`IsTrue()`](#istrue) +- [Context Functions](#context-functions) +- [Partials](#partials) + - [Template Partials](#template-partials) + - [Global Partials](#global-partials) + - [Dynamic Partials](#dynamic-partials) + - [Partial Contexts](#partial-contexts) + - [Partial Parameters](#partial-parameters) +- [Utility Functions](#utility-functions) +- [Mustache](#mustache) +- [Limitations](#limitations) +- [Handlebars Lexer](#handlebars-lexer) +- [Handlebars Parser](#handlebars-parser) +- [Test](#test) +- [References](#references) +- [Others Implementations](#others-implementations) + + +## Quick Start + + $ go get github.com/aymerick/raymond + +The quick and dirty way of rendering a handlebars template: + +```go +package main + +import ( + "fmt" + + "github.com/aymerick/raymond" +) + +func main() { + tpl := `
+

{{title}}

+
+ {{body}} +
+
+` + + ctx := map[string]string{ + "title": "My New Post", + "body": "This is my first post!", + } + + result, err := raymond.Render(tpl, ctx) + if err != nil { + panic("Please fill a bug :)") + } + + fmt.Print(result) +} +``` + +Displays: + +```html +
+

My New Post

+
+ This is my first post! +
+
+``` + +Please note that the template will be parsed everytime you call `Render()` function. So you probably want to read the next section. + + +## Correct Usage + +To avoid parsing a template several times, use the `Parse()` and `Exec()` functions: + +```go +package main + +import ( + "fmt" + + "github.com/aymerick/raymond" +) + +func main() { + source := `
+

{{title}}

+
+ {{body}} +
+
+` + + ctxList := []map[string]string{ + { + "title": "My New Post", + "body": "This is my first post!", + }, + { + "title": "Here is another post", + "body": "This is my second post!", + }, + } + + // parse template + tpl, err := raymond.Parse(source) + if err != nil { + panic(err) + } + + for _, ctx := range ctxList { + // render template + result, err := tpl.Exec(ctx) + if err != nil { + panic(err) + } + + fmt.Print(result) + } +} + +``` + +Displays: + +```html +
+

My New Post

+
+ This is my first post! +
+
+
+

Here is another post

+
+ This is my second post! +
+
+``` + +You can use `MustParse()` and `MustExec()` functions if you don't want to deal with errors: + +```go +// parse template +tpl := raymond.MustParse(source) + +// render template +result := tpl.MustExec(ctx) +``` + + +## Context + +The rendering context can contain any type of values, including `array`, `slice`, `map`, `struct` and `func`. + +When using structs, be warned that only exported fields are accessible. However you can access exported fields in template with their lowercase names. + +For example, both `{{author.firstName}}` and `{{Author.FirstName}}` references give the same result, as long as `Author` and `FirstName` are exported struct fields. + +```go +package main + +import ( + "fmt" + + "github.com/aymerick/raymond" +) + +func main() { + source := `
+

By {{author.firstName}} {{author.lastName}}

+
{{body}}
+ +

Comments

+ + {{#each comments}} +

By {{author.firstName}} {{author.lastName}}

+
{{body}}
+ {{/each}} +
` + + type Post struct { + Author Person + Body string + Comments []Comment + } + + type Person struct { + FirstName string + LastName string + } + + type Comment struct { + Author Person + Body string + } + + ctx := Post{ + Person{"Jean", "Valjean"}, + "Life is difficult", + []Comment{ + Comment{ + Person{"Marcel", "Beliveau"}, + "LOL!", + }, + }, + } + + output := raymond.MustRender(source, ctx) + + fmt.Print(output) +} +``` + +Output: + +```html +
+

By Jean Valjean

+
Life is difficult
+ +

Comments

+ +

By Marcel Beliveau

+
LOL!
+
+``` + + +## HTML Escaping + +By default, the result of a mustache expression is HTML escaped. Use the triple mustache `{{{` to output unescaped values. + +```go +source := `
+

{{title}}

+
+ {{{body}}} +
+
+` + +ctx := map[string]string{ + "title": "All about

Tags", + "body": "

This is a post about <p> tags

", +} + +tpl := raymond.MustParse(source) +result := tpl.MustExec(ctx) + +fmt.Print(result) +``` + +Output: + +```html +
+

All about <p> Tags

+
+

This is a post about <p> tags

+
+
+``` + +When returning HTML from a helper, you should return a `SafeString` if you don't want it to be escaped by default. When using `SafeString` all unknown or unsafe data should be manually escaped with the `Escape` method. + +```go +raymond.RegisterHelper("link", func(url, text string) raymond.SafeString { + return raymond.SafeString("" + raymond.Escape(text) + "") +}) + +tpl := raymond.MustParse("{{link url text}}") + +ctx := map[string]string{ + "url": "http://www.aymerick.com/", + "text": "This is a cool website", +} + +result := tpl.MustExec(ctx) +fmt.Print(result) +``` + +Output: + +```html +This is a <em>cool</em> website +``` + + +## Helpers + +Helpers can be accessed from any context in a template. You can register a helper with the `RegisterHelper` function. + +For example: + +```html +
+

By {{fullName author}}

+
{{body}}
+ +

Comments

+ + {{#each comments}} +

By {{fullName author}}

+
{{body}}
+ {{/each}} +
+``` + +With this context and helper: + +```go +ctx := map[string]interface{}{ + "author": map[string]string{"firstName": "Jean", "lastName": "Valjean"}, + "body": "Life is difficult", + "comments": []map[string]interface{}{{ + "author": map[string]string{"firstName": "Marcel", "lastName": "Beliveau"}, + "body": "LOL!", + }}, +} + +raymond.RegisterHelper("fullName", func(person map[string]string) string { + return person["firstName"] + " " + person["lastName"] +}) +``` + +Outputs: + +```html +
+

By Jean Valjean

+
Life is difficult
+ +

Comments

+ +

By Marcel Beliveau

+
LOL!
+
+``` + +Helper arguments can be any type. + +The following example uses structs instead of maps and produces the same output as the previous one: + +```html +
+

By {{fullName author}}

+
{{body}}
+ +

Comments

+ + {{#each comments}} +

By {{fullName author}}

+
{{body}}
+ {{/each}} +
+``` + +With this context and helper: + +```go +type Post struct { + Author Person + Body string + Comments []Comment +} + +type Person struct { + FirstName string + LastName string +} + +type Comment struct { + Author Person + Body string +} + +ctx := Post{ + Person{"Jean", "Valjean"}, + "Life is difficult", + []Comment{ + Comment{ + Person{"Marcel", "Beliveau"}, + "LOL!", + }, + }, +} + +RegisterHelper("fullName", func(person Person) string { + return person.FirstName + " " + person.LastName +}) +``` + + +### Template Helpers + +You can register a helper on a specific template, and in that case that helper will be available to that template only: + +```go +tpl := raymond.MustParse("User: {{fullName user.firstName user.lastName}}") + +tpl.RegisterHelper("fullName", func(firstName, lastName string) string { + return firstName + " " + lastName +}) +``` + + +### Built-In Helpers + +Those built-in helpers are available to all templates. + + +#### The `if` block helper + +You can use the `if` helper to conditionally render a block. If its argument returns `false`, `nil`, `0`, `""`, an empty array, an empty slice or an empty map, then raymond will not render the block. + +```html +
+ {{#if author}} +

{{firstName}} {{lastName}}

+ {{/if}} +
+``` + +When using a block expression, you can specify a template section to run if the expression returns a falsy value. That section, marked by `{{else}}` is called an "else section". + +```html +
+ {{#if author}} +

{{firstName}} {{lastName}}

+ {{else}} +

Unknown Author

+ {{/if}} +
+``` + +You can chain several blocks. For example that template: + +```html +{{#if isActive}} + Active +{{else if isInactive}} + Inactive +{{else}} + Unknown +{{/if}} +``` + +With that context: + +```go +ctx := map[string]interface{}{ + "isActive": false, + "isInactive": false, +} +``` + +Outputs: + +```html + Unknown +``` + + +#### The `unless` block helper + +You can use the `unless` helper as the inverse of the `if` helper. Its block will be rendered if the expression returns a falsy value. + +```html +
+ {{#unless license}} +

WARNING: This entry does not have a license!

+ {{/unless}} +
+``` + + +#### The `each` block helper + +You can iterate over an array, a slice, a map or a struct instance using this built-in `each` helper. Inside the block, you can use `this` to reference the element being iterated over. + +For example: + +```html + +``` + +With this context: + +```go +map[string]interface{}{ + "people": []string{ + "Marcel", "Jean-Claude", "Yvette", + }, +} +``` + +Outputs: + +```html + +``` + +You can optionally provide an `{{else}}` section which will display only when the passed argument is an empty array, an empty slice or an empty map (a `struct` instance is never considered empty). + +```html +{{#each paragraphs}} +

{{this}}

+{{else}} +

No content

+{{/each}} +``` + +When looping through items in `each`, you can optionally reference the current loop index via `{{@index}}`. + +```html +{{#each array}} + {{@index}}: {{this}} +{{/each}} +``` + +Additionally for map and struct instance iteration, `{{@key}}` references the current map key or struct field name: + +```html +{{#each map}} + {{@key}}: {{this}} +{{/each}} +``` + +The first and last steps of iteration are noted via the `@first` and `@last` variables. + + +#### The `with` block helper + +You can shift the context for a section of a template by using the built-in `with` block helper. + +```html +
+

{{title}}

+ + {{#with author}} +

By {{firstName}} {{lastName}}

+ {{/with}} +
+``` + +With this context: + +```go +map[string]interface{}{ + "title": "My first post!", + "author": map[string]string{ + "firstName": "Jean", + "lastName": "Valjean", + }, +} +``` + +Outputs: + +```html +
+

My first post!

+ +

By Jean Valjean

+
+``` + +You can optionally provide an `{{else}}` section which will display only when the passed argument is falsy. + +```html +{{#with author}} +

{{name}}

+{{else}} +

No content

+{{/with}} +``` + + +#### The `lookup` helper + +The `lookup` helper allows for dynamic parameter resolution using handlebars variables. + +```html +{{#each bar}} + {{lookup ../foo @index}} +{{/each}} +``` + + +#### The `log` helper + +The `log` helper allows for logging while rendering a template. + +```html +{{log "Look at me!"}} +``` + +Note that the handlebars.js `@level` variable is not supported. + + +### Block Helpers + +Block helpers make it possible to define custom iterators and other functionality that can invoke the passed block with a new context. + + +#### Block Evaluation + +As an example, let's define a block helper that adds some markup to the wrapped text. + +```html +
+

{{title}}

+
+ {{#bold}}{{body}}{{/bold}} +
+
+``` + +The `bold` helper will add markup to make its text bold. + +```go +raymond.RegisterHelper("bold", func(options *raymond.Options) raymond.SafeString { + return raymond.SafeString(`
` + options.Fn() + "
") +}) +``` + +A helper evaluates the block content with current context by calling `options.Fn()`. + +If you want to evaluate the block with another context, then use `options.FnWith(ctx)`, like this french version of built-in `with` block helper: + +```go +raymond.RegisterHelper("avec", func(context interface{}, options *raymond.Options) string { + return options.FnWith(context) +}) +``` + +With that template: + +```html +{{#avec obj.text}}{{this}}{{/avec}} +``` + + +#### Conditional + +Let's write a french version of `if` block helper: + +```go +source := `{{#si yep}}YEP !{{/si}}` + +ctx := map[string]interface{}{"yep": true} + +raymond.RegisterHelper("si", func(conditional bool, options *raymond.Options) string { + if conditional { + return options.Fn() + } + return "" +}) +``` + +Note that as the first parameter of the helper is typed as `bool` an automatic conversion is made if corresponding context value is not a boolean. So this helper works with that context too: + +```go +ctx := map[string]interface{}{"yep": "message"} +``` + +Here, `"message"` is converted to `true` because it is an non-empty string. See `IsTrue()` function for more informations on boolean conversion. + + +#### Else Block Evaluation + +We can enhance the `si` block helper to evaluate the `else block` by calling `options.Inverse()` if conditional is false: + +```go +source := `{{#si yep}}YEP !{{else}}NOP !{{/si}}` + +ctx := map[string]interface{}{"yep": false} + +raymond.RegisterHelper("si", func(conditional bool, options *raymond.Options) string { + if conditional { + return options.Fn() + } + return options.Inverse() +}) +``` + +Outputs: +``` +NOP ! +``` + + +#### Block Parameters + +It's possible to receive named parameters from supporting helpers. + +```html +{{#each users as |user userId|}} + Id: {{userId}} Name: {{user.name}} +{{/each}} +``` + +In this particular example, `user` will have the same value as the current context and `userId` will have the index/key value for the iteration. + +This allows for nested helpers to avoid name conflicts. + +For example: + +```html +{{#each users as |user userId|}} + {{#each user.books as |book bookId|}} + User: {{userId}} Book: {{bookId}} + {{/each}} +{{/each}} +``` + +With this context: + +```go +ctx := map[string]interface{}{ + "users": map[string]interface{}{ + "marcel": map[string]interface{}{ + "books": map[string]interface{}{ + "book1": "My first book", + "book2": "My second book", + }, + }, + "didier": map[string]interface{}{ + "books": map[string]interface{}{ + "bookA": "Good book", + "bookB": "Bad book", + }, + }, + }, +} +``` + +Outputs: + +```html + User: marcel Book: book1 + User: marcel Book: book2 + User: didier Book: bookA + User: didier Book: bookB +``` + +As you can see, the second block parameter is the map key. When using structs, it is the struct field name. + +When using arrays and slices, the second parameter is element index: + +```go +ctx := map[string]interface{}{ + "users": []map[string]interface{}{ + { + "id": "marcel", + "books": []map[string]interface{}{ + {"id": "book1", "title": "My first book"}, + {"id": "book2", "title": "My second book"}, + }, + }, + { + "id": "didier", + "books": []map[string]interface{}{ + {"id": "bookA", "title": "Good book"}, + {"id": "bookB", "title": "Bad book"}, + }, + }, + }, +} +``` + +Outputs: + +```html + User: 0 Book: 0 + User: 0 Book: 1 + User: 1 Book: 0 + User: 1 Book: 1 +``` + + +### Helper Parameters + +When calling a helper in a template, raymond expects the same number of arguments as the number of helper function parameters. + +So this template: + +```html +{{add a}} +``` + +With this helper: + +```go +raymond.RegisterHelper("add", func(val1, val2 int) string { + return strconv.Itoa(val1 + val2) +}) +``` + +Will simply panics, because we call the helper with one argument whereas it expects two. + + +#### Automatic conversion + +Let's create a `concat` helper that expects two strings and concat them: + +```go +source := `{{concat a b}}` + +ctx := map[string]interface{}{ + "a": "Jean", + "b": "Valjean", +} + +raymond.RegisterHelper("concat", func(val1, val2 string) string { + return val1 + " " + val2 +}) +``` + +Everything goes well, two strings are passed as arguments to the helper that outputs: + +```html +Jean VALJEAN +``` + +But what happens if there is another type than `string` in the context ? For example: + +```go +ctx := map[string]interface{}{ + "a": 10, + "b": "Valjean", +} +``` + +Actually, raymond perfoms automatic string conversion. So because the first parameter of the helper is typed as `string`, the first argument will be converted from the `10` integer to `"10"`, and the helper outputs: + +```html +10 VALJEAN +``` + +Note that this kind of automatic conversion is done with `bool` type too, thanks to the `IsTrue()` function. + + +### Options Argument + +If a helper needs the `Options` argument, just add it at the end of helper parameters: + +```go +raymond.RegisterHelper("add", func(val1, val2 int, options *raymond.Options) string { + return strconv.Itoa(val1 + val2) + " " + options.ValueStr("bananas") +}) +``` + +Thanks to the `options` argument, helpers have access to the current evaluation context, to the `Hash` arguments, and they can manipulate the private data variables. + +The `Options` argument is even necessary for Block Helpers to evaluate block and "else block". + + +#### Context Values + +Helpers fetch current context values with `options.Value()` and `options.ValuesStr()`. + +`Value()` returns an `interface{}` and lets the helper do the type assertions whereas `ValueStr()` automatically converts the value to a `string`. + +For example: + +```go +source := `{{concat a b}}` + +ctx := map[string]interface{}{ + "a": "Marcel", + "b": "Beliveau", + "suffix": "FOREVER !", +} + +raymond.RegisterHelper("concat", func(val1, val2 string, options *raymond.Options) string { + return val1 + " " + val2 + " " + options.ValueStr("suffix") +}) +``` + +Outputs: + +```html +Marcel Beliveau FOREVER ! +``` + +Helpers can get the entire current context with `options.Ctx()` that returns an `interface{}`. + + +#### Helper Hash Arguments + +Helpers access hash arguments with `options.HashProp()` and `options.HashStr()`. + +`HashProp()` returns an `interface{}` and lets the helper do the type assertions whereas `HashStr()` automatically converts the value to a `string`. + +For example: + +```go +source := `{{concat suffix first=a second=b}}` + +ctx := map[string]interface{}{ + "a": "Marcel", + "b": "Beliveau", + "suffix": "FOREVER !", +} + +raymond.RegisterHelper("concat", func(suffix string, options *raymond.Options) string { + return options.HashStr("first") + " " + options.HashStr("second") + " " + suffix +}) +``` + +Outputs: + +```html +Marcel Beliveau FOREVER ! +``` + +Helpers can get the full hash with `options.Hash()` that returns a `map[string]interface{}`. + + +#### Private Data + +Helpers access private data variables with `options.Data()` and `options.DataStr()`. + +`Data()` returns an `interface{}` and lets the helper do the type assertions whereas `DataStr()` automatically converts the value to a `string`. + +Helpers can get the entire current data frame with `options.DataFrame()` that returns a `*DataFrame`. + +For helpers that need to inject their own private data frame, use `options.NewDataFrame()` to create the frame and `options.FnData()` to evaluate the block with that frame. + +For example: + +```go +source := `{{#voodoo kind=a}}Voodoo is {{@magix}}{{/voodoo}}` + +ctx := map[string]interface{}{ + "a": "awesome", +} + +raymond.RegisterHelper("voodoo", func(options *raymond.Options) string { + // create data frame with @magix data + frame := options.NewDataFrame() + frame.Set("magix", options.HashProp("kind")) + + // evaluates block with new data frame + return options.FnData(frame) +}) +``` + +Helpers that need to evaluate the block with a private data frame and a new context can call `options.FnCtxData()`. + + +### Utilites + +In addition to `Escape()`, raymond provides utility functions that can be usefull for helpers. + + +#### `Str()` + +`Str()` converts its parameter to a `string`. + +Booleans: + +```go +raymond.Str(3) + " foos and " + raymond.Str(-1.25) + " bars" +// Outputs: "3 foos and -1.25 bars" +``` + +Numbers: + +``` go +"everything is " + raymond.Str(true) + " and nothing is " + raymond.Str(false) +// Outputs: "everything is true and nothing is false" +``` + +Maps: + +```go +raymond.Str(map[string]string{"foo": "bar"}) +// Outputs: "map[foo:bar]" +``` + +Arrays and Slices: + +```go +raymond.Str([]interface{}{true, 10, "foo", 5, "bar"}) +// Outputs: "true10foo5bar" +``` + + +#### `IsTrue()` + +`IsTrue()` returns the truthy version of its parameter. + +It returns `false` when parameter is either: + + - an empty array + - an empty slice + - an empty map + - `""` + - `nil` + - `0` + - `false` + +For all others values, `IsTrue()` returns `true`. + + +## Context Functions + +In addition to helpers, lambdas found in context are evaluated. + +For example, that template and context: + +```go +source := "I {{feeling}} you" + +ctx := map[string]interface{}{ + "feeling": func() string { + rand.Seed(time.Now().UTC().UnixNano()) + + feelings := []string{"hate", "love"} + return feelings[rand.Intn(len(feelings))] + }, +} +``` + +Randomly renders `I hate you` or `I love you`. + +Those context functions behave like helper functions: they can be called with parameters and they can have an `Options` argument. + + +## Partials + +### Template Partials + +You can register template partials before execution: + +```go +tpl := raymond.MustParse("{{> foo}} baz") +tpl.RegisterPartial("foo", "bar") + +result := tpl.MustExec(nil) +fmt.Print(result) +``` + +Output: + +```html +bar baz +``` + +You can register several partials at once: + +```go +tpl := raymond.MustParse("{{> foo}} and {{> baz}}") +tpl.RegisterPartials(map[string]string{ + "foo": "bar", + "baz": "bat", +}) + +result := tpl.MustExec(nil) +fmt.Print(result) +``` + +Output: + +```html +bar and bat +``` + + +### Global Partials + +You can registers global partials that will be accessible by all templates: + +```go +raymond.RegisterPartial("foo", "bar") + +tpl := raymond.MustParse("{{> foo}} baz") +result := tpl.MustExec(nil) +fmt.Print(result) +``` + +Or: + +```go +raymond.RegisterPartials(map[string]string{ + "foo": "bar", + "baz": "bat", +}) + +tpl := raymond.MustParse("{{> foo}} and {{> baz}}") +result := tpl.MustExec(nil) +fmt.Print(result) +``` + + +### Dynamic Partials + +It's possible to dynamically select the partial to be executed by using sub expression syntax. + +For example, that template randomly evaluates the `foo` or `baz` partial: + +```go +tpl := raymond.MustParse("{{> (whichPartial) }}") +tpl.RegisterPartials(map[string]string{ + "foo": "bar", + "baz": "bat", +}) + +ctx := map[string]interface{}{ + "whichPartial": func() string { + rand.Seed(time.Now().UTC().UnixNano()) + + names := []string{"foo", "baz"} + return names[rand.Intn(len(names))] + }, +} + +result := tpl.MustExec(ctx) +fmt.Print(result) +``` + + +### Partial Contexts + +It's possible to execute partials on a custom context by passing in the context to the partial call. + +For example: + +```go +tpl := raymond.MustParse("User: {{> userDetails user }}") +tpl.RegisterPartial("userDetails", "{{firstname}} {{lastname}}") + +ctx := map[string]interface{}{ + "user": map[string]string{ + "firstname": "Jean", + "lastname": "Valjean", + }, +} + +result := tpl.MustExec(ctx) +fmt.Print(result) +``` + +Displays: + +```html +User: Jean Valjean +``` + + +### Partial Parameters + +Custom data can be passed to partials through hash parameters. + +For example: + +```go +tpl := raymond.MustParse("{{> myPartial name=hero }}") +tpl.RegisterPartial("myPartial", "My hero is {{name}}") + +ctx := map[string]interface{}{ + "hero": "Goldorak", +} + +result := tpl.MustExec(ctx) +fmt.Print(result) +``` + +Displays: + +```html +My hero is Goldorak +``` + + +## Utility Functions + +You can use following utility fuctions to parse and register partials from files: + +- `ParseFile()` - reads a file and return parsed template +- `Template.RegisterPartialFile()` - reads a file and registers its content as a partial with given name +- `Template.RegisterPartialFiles()` - reads several files and registers them as partials, the filename base is used as the partial name + + +## Mustache + +Handlebars is a superset of [mustache](https://mustache.github.io) but it differs on those points: + +- Alternative delimiters are not supported +- There is no recursive lookup + + +## Limitations + +These handlebars options are currently NOT implemented: + +- `compat` - enables recursive field lookup +- `knownHelpers` - list of helpers that are known to exist (truthy) at template execution time +- `knownHelpersOnly` - allows further optimizations based on the known helpers list +- `trackIds` - include the id names used to resolve parameters for helpers +- `noEscape` - disables HTML escaping globally +- `strict` - templates will throw rather than silently ignore missing fields +- `assumeObjects` - removes object existence checks when traversing paths +- `preventIndent` - disables the auto-indententation of nested partials +- `stringParams` - resolves a parameter to it's name if the value isn't present in the context stack + +These handlebars features are currently NOT implemented: + +- raw block content is not passed as a parameter to helper +- `blockHelperMissing` - helper called when a helper can not be directly resolved +- `helperMissing` - helper called when a potential helper expression was not found +- `@contextPath` - value set in `trackIds` mode that records the lookup path for the current context +- `@level` - log level + + +## Handlebars Lexer + +You should not use the lexer directly, but for your information here is an example: + +```go +package main + +import ( + "fmt" + + "github.com/aymerick/raymond/lexer" +) + +func main() { + source := "You know {{nothing}} John Snow" + + output := "" + + lex := lexer.Scan(source) + for { + // consume next token + token := lex.NextToken() + + output += fmt.Sprintf(" %s", token) + + // stops when all tokens have been consumed, or on error + if token.Kind == lexer.TokenEOF || token.Kind == lexer.TokenError { + break + } + } + + fmt.Print(output) +} +``` + +Outputs: + +``` +Content{"You know "} Open{"{{"} ID{"nothing"} Close{"}}"} Content{" John Snow"} EOF +``` + + +## Handlebars Parser + +You should not use the parser directly, but for your information here is an example: + +```go +package main + +import ( + "fmt" + + "github.com/aymerick/raymond/ast" + "github.com/aymerick/raymond/parser" +) + +fu nc main() { + source := "You know {{nothing}} John Snow" + + // parse template + program, err := parser.Parse(source) + if err != nil { + panic(err) + } + + // print AST + output := ast.Print(program) + + fmt.Print(output) +} +``` + +Outputs: + +``` +CONTENT[ 'You know ' ] +{{ PATH:nothing [] }} +CONTENT[ ' John Snow' ] +``` + + +## Test + +First, fetch mustache tests: + + $ git submodule update --init + +To run all tests: + + $ go test ./... + +To filter tests: + + $ go test -run="Partials" + +To run all test and all benchmarks: + + $ go test -bench . ./... + + +## References + + - + - + - + - + + +## Others Implementations + +- [handlebars.js](http://handlebarsjs.com) - javascript +- [handlebars.java](https://github.com/jknack/handlebars.java) - java +- [handlebars.rb](https://github.com/cowboyd/handlebars.rb) - ruby +- [handlebars.php](https://github.com/XaminProject/handlebars.php) - php +- [handlebars-objc](https://github.com/Bertrand/handlebars-objc) - Objective C +- [rumblebars](https://github.com/nicolas-cherel/rumblebars) - rust diff --git a/vendor/github.com/aymerick/raymond/VERSION b/vendor/github.com/aymerick/raymond/VERSION new file mode 100644 index 0000000..1cc5f65 --- /dev/null +++ b/vendor/github.com/aymerick/raymond/VERSION @@ -0,0 +1 @@ +1.1.0 \ No newline at end of file diff --git a/vendor/github.com/aymerick/raymond/ast/node.go b/vendor/github.com/aymerick/raymond/ast/node.go new file mode 100644 index 0000000..e080096 --- /dev/null +++ b/vendor/github.com/aymerick/raymond/ast/node.go @@ -0,0 +1,767 @@ +// Package ast provides structures to represent a handlebars Abstract Syntax Tree, and a Visitor interface to visit that tree. +package ast + +import ( + "fmt" + "strconv" +) + +// References: +// - https://github.com/wycats/handlebars.js/blob/master/lib/handlebars/compiler/ast.js +// - https://github.com/wycats/handlebars.js/blob/master/docs/compiler-api.md +// - https://github.com/golang/go/blob/master/src/text/template/parse/node.go + +// Node is an element in the AST. +type Node interface { + // node type + Type() NodeType + + // location of node in original input string + Location() Loc + + // string representation, used for debugging + String() string + + // accepts visitor + Accept(Visitor) interface{} +} + +// Visitor is the interface to visit an AST. +type Visitor interface { + VisitProgram(*Program) interface{} + + // statements + VisitMustache(*MustacheStatement) interface{} + VisitBlock(*BlockStatement) interface{} + VisitPartial(*PartialStatement) interface{} + VisitContent(*ContentStatement) interface{} + VisitComment(*CommentStatement) interface{} + + // expressions + VisitExpression(*Expression) interface{} + VisitSubExpression(*SubExpression) interface{} + VisitPath(*PathExpression) interface{} + + // literals + VisitString(*StringLiteral) interface{} + VisitBoolean(*BooleanLiteral) interface{} + VisitNumber(*NumberLiteral) interface{} + + // miscellaneous + VisitHash(*Hash) interface{} + VisitHashPair(*HashPair) interface{} +} + +// NodeType represents an AST Node type. +type NodeType int + +// Type returns itself, and permits struct includers to satisfy that part of Node interface. +func (t NodeType) Type() NodeType { + return t +} + +const ( + // program + NodeProgram NodeType = iota + + // statements + NodeMustache + NodeBlock + NodePartial + NodeContent + NodeComment + + // expressions + NodeExpression + NodeSubExpression + NodePath + + // literals + NodeBoolean + NodeNumber + NodeString + + // miscellaneous + NodeHash + NodeHashPair +) + +// Loc represents the position of a parsed node in source file. +type Loc struct { + Pos int // Byte position + Line int // Line number +} + +// Location returns itself, and permits struct includers to satisfy that part of Node interface. +func (l Loc) Location() Loc { + return l +} + +// Strip describes node whitespace management. +type Strip struct { + Open bool + Close bool + + OpenStandalone bool + CloseStandalone bool + InlineStandalone bool +} + +// NewStrip instanciates a Strip for given open and close mustaches. +func NewStrip(openStr, closeStr string) *Strip { + return &Strip{ + Open: (len(openStr) > 2) && openStr[2] == '~', + Close: (len(closeStr) > 2) && closeStr[len(closeStr)-3] == '~', + } +} + +// NewStripForStr instanciates a Strip for given tag. +func NewStripForStr(str string) *Strip { + return &Strip{ + Open: (len(str) > 2) && str[2] == '~', + Close: (len(str) > 2) && str[len(str)-3] == '~', + } +} + +// String returns a string representation of receiver that can be used for debugging. +func (s *Strip) String() string { + return fmt.Sprintf("Open: %t, Close: %t, OpenStandalone: %t, CloseStandalone: %t, InlineStandalone: %t", s.Open, s.Close, s.OpenStandalone, s.CloseStandalone, s.InlineStandalone) +} + +// +// Program +// + +// Program represents a program node. +type Program struct { + NodeType + Loc + + Body []Node // [ Statement ... ] + BlockParams []string + Chained bool + + // whitespace management + Strip *Strip +} + +// NewProgram instanciates a new program node. +func NewProgram(pos int, line int) *Program { + return &Program{ + NodeType: NodeProgram, + Loc: Loc{pos, line}, + } +} + +// String returns a string representation of receiver that can be used for debugging. +func (node *Program) String() string { + return fmt.Sprintf("Program{Pos: %d}", node.Loc.Pos) +} + +// Accept is the receiver entry point for visitors. +func (node *Program) Accept(visitor Visitor) interface{} { + return visitor.VisitProgram(node) +} + +// AddStatement adds given statement to program. +func (node *Program) AddStatement(statement Node) { + node.Body = append(node.Body, statement) +} + +// +// Mustache Statement +// + +// MustacheStatement represents a mustache node. +type MustacheStatement struct { + NodeType + Loc + + Unescaped bool + Expression *Expression + + // whitespace management + Strip *Strip +} + +// NewMustacheStatement instanciates a new mustache node. +func NewMustacheStatement(pos int, line int, unescaped bool) *MustacheStatement { + return &MustacheStatement{ + NodeType: NodeMustache, + Loc: Loc{pos, line}, + Unescaped: unescaped, + } +} + +// String returns a string representation of receiver that can be used for debugging. +func (node *MustacheStatement) String() string { + return fmt.Sprintf("Mustache{Pos: %d}", node.Loc.Pos) +} + +// Accept is the receiver entry point for visitors. +func (node *MustacheStatement) Accept(visitor Visitor) interface{} { + return visitor.VisitMustache(node) +} + +// +// Block Statement +// + +// BlockStatement represents a block node. +type BlockStatement struct { + NodeType + Loc + + Expression *Expression + + Program *Program + Inverse *Program + + // whitespace management + OpenStrip *Strip + InverseStrip *Strip + CloseStrip *Strip +} + +// NewBlockStatement instanciates a new block node. +func NewBlockStatement(pos int, line int) *BlockStatement { + return &BlockStatement{ + NodeType: NodeBlock, + Loc: Loc{pos, line}, + } +} + +// String returns a string representation of receiver that can be used for debugging. +func (node *BlockStatement) String() string { + return fmt.Sprintf("Block{Pos: %d}", node.Loc.Pos) +} + +// Accept is the receiver entry point for visitors. +func (node *BlockStatement) Accept(visitor Visitor) interface{} { + return visitor.VisitBlock(node) +} + +// +// Partial Statement +// + +// PartialStatement represents a partial node. +type PartialStatement struct { + NodeType + Loc + + Name Node // PathExpression | SubExpression + Params []Node // [ Expression ... ] + Hash *Hash + + // whitespace management + Strip *Strip + Indent string +} + +// NewPartialStatement instanciates a new partial node. +func NewPartialStatement(pos int, line int) *PartialStatement { + return &PartialStatement{ + NodeType: NodePartial, + Loc: Loc{pos, line}, + } +} + +// String returns a string representation of receiver that can be used for debugging. +func (node *PartialStatement) String() string { + return fmt.Sprintf("Partial{Name:%s, Pos:%d}", node.Name, node.Loc.Pos) +} + +// Accept is the receiver entry point for visitors. +func (node *PartialStatement) Accept(visitor Visitor) interface{} { + return visitor.VisitPartial(node) +} + +// +// Content Statement +// + +// ContentStatement represents a content node. +type ContentStatement struct { + NodeType + Loc + + Value string + Original string + + // whitespace management + RightStripped bool + LeftStripped bool +} + +// NewContentStatement instanciates a new content node. +func NewContentStatement(pos int, line int, val string) *ContentStatement { + return &ContentStatement{ + NodeType: NodeContent, + Loc: Loc{pos, line}, + + Value: val, + Original: val, + } +} + +// String returns a string representation of receiver that can be used for debugging. +func (node *ContentStatement) String() string { + return fmt.Sprintf("Content{Value:'%s', Pos:%d}", node.Value, node.Loc.Pos) +} + +// Accept is the receiver entry point for visitors. +func (node *ContentStatement) Accept(visitor Visitor) interface{} { + return visitor.VisitContent(node) +} + +// +// Comment Statement +// + +// CommentStatement represents a comment node. +type CommentStatement struct { + NodeType + Loc + + Value string + + // whitespace management + Strip *Strip +} + +// NewCommentStatement instanciates a new comment node. +func NewCommentStatement(pos int, line int, val string) *CommentStatement { + return &CommentStatement{ + NodeType: NodeComment, + Loc: Loc{pos, line}, + + Value: val, + } +} + +// String returns a string representation of receiver that can be used for debugging. +func (node *CommentStatement) String() string { + return fmt.Sprintf("Comment{Value:'%s', Pos:%d}", node.Value, node.Loc.Pos) +} + +// Accept is the receiver entry point for visitors. +func (node *CommentStatement) Accept(visitor Visitor) interface{} { + return visitor.VisitComment(node) +} + +// +// Expression +// + +// Expression represents an expression node. +type Expression struct { + NodeType + Loc + + Path Node // PathExpression | StringLiteral | BooleanLiteral | NumberLiteral + Params []Node // [ Expression ... ] + Hash *Hash +} + +// NewExpression instanciates a new expression node. +func NewExpression(pos int, line int) *Expression { + return &Expression{ + NodeType: NodeExpression, + Loc: Loc{pos, line}, + } +} + +// String returns a string representation of receiver that can be used for debugging. +func (node *Expression) String() string { + return fmt.Sprintf("Expr{Path:%s, Pos:%d}", node.Path, node.Loc.Pos) +} + +// Accept is the receiver entry point for visitors. +func (node *Expression) Accept(visitor Visitor) interface{} { + return visitor.VisitExpression(node) +} + +// HelperName returns helper name, or an empty string if this expression can't be a helper. +func (node *Expression) HelperName() string { + path, ok := node.Path.(*PathExpression) + if !ok { + return "" + } + + if path.Data || (len(path.Parts) != 1) || (path.Depth > 0) || path.Scoped { + return "" + } + + return path.Parts[0] +} + +// FieldPath returns path expression representing a field path, or nil if this is not a field path. +func (node *Expression) FieldPath() *PathExpression { + path, ok := node.Path.(*PathExpression) + if !ok { + return nil + } + + return path +} + +// LiteralStr returns the string representation of literal value, with a boolean set to false if this is not a literal. +func (node *Expression) LiteralStr() (string, bool) { + return LiteralStr(node.Path) +} + +// Canonical returns the canonical form of expression node as a string. +func (node *Expression) Canonical() string { + if str, ok := HelperNameStr(node.Path); ok { + return str + } + + return "" +} + +// HelperNameStr returns the string representation of a helper name, with a boolean set to false if this is not a valid helper name. +// +// helperName : path | dataName | STRING | NUMBER | BOOLEAN | UNDEFINED | NULL +func HelperNameStr(node Node) (string, bool) { + // PathExpression + if str, ok := PathExpressionStr(node); ok { + return str, ok + } + + // Literal + if str, ok := LiteralStr(node); ok { + return str, ok + } + + return "", false +} + +// PathExpressionStr returns the string representation of path expression value, with a boolean set to false if this is not a path expression. +func PathExpressionStr(node Node) (string, bool) { + if path, ok := node.(*PathExpression); ok { + result := path.Original + + // "[foo bar]"" => "foo bar" + if (len(result) >= 2) && (result[0] == '[') && (result[len(result)-1] == ']') { + result = result[1 : len(result)-1] + } + + return result, true + } + + return "", false +} + +// LiteralStr returns the string representation of literal value, with a boolean set to false if this is not a literal. +func LiteralStr(node Node) (string, bool) { + if lit, ok := node.(*StringLiteral); ok { + return lit.Value, true + } + + if lit, ok := node.(*BooleanLiteral); ok { + return lit.Canonical(), true + } + + if lit, ok := node.(*NumberLiteral); ok { + return lit.Canonical(), true + } + + return "", false +} + +// +// SubExpression +// + +// SubExpression represents a subexpression node. +type SubExpression struct { + NodeType + Loc + + Expression *Expression +} + +// NewSubExpression instanciates a new subexpression node. +func NewSubExpression(pos int, line int) *SubExpression { + return &SubExpression{ + NodeType: NodeSubExpression, + Loc: Loc{pos, line}, + } +} + +// String returns a string representation of receiver that can be used for debugging. +func (node *SubExpression) String() string { + return fmt.Sprintf("Sexp{Path:%s, Pos:%d}", node.Expression.Path, node.Loc.Pos) +} + +// Accept is the receiver entry point for visitors. +func (node *SubExpression) Accept(visitor Visitor) interface{} { + return visitor.VisitSubExpression(node) +} + +// +// Path Expression +// + +// PathExpression represents a path expression node. +type PathExpression struct { + NodeType + Loc + + Original string + Depth int + Parts []string + Data bool + Scoped bool +} + +// NewPathExpression instanciates a new path expression node. +func NewPathExpression(pos int, line int, data bool) *PathExpression { + result := &PathExpression{ + NodeType: NodePath, + Loc: Loc{pos, line}, + + Data: data, + } + + if data { + result.Original = "@" + } + + return result +} + +// String returns a string representation of receiver that can be used for debugging. +func (node *PathExpression) String() string { + return fmt.Sprintf("Path{Original:'%s', Pos:%d}", node.Original, node.Loc.Pos) +} + +// Accept is the receiver entry point for visitors. +func (node *PathExpression) Accept(visitor Visitor) interface{} { + return visitor.VisitPath(node) +} + +// Part adds path part. +func (node *PathExpression) Part(part string) { + node.Original += part + + switch part { + case "..": + node.Depth += 1 + node.Scoped = true + case ".", "this": + node.Scoped = true + default: + node.Parts = append(node.Parts, part) + } +} + +// Sep adds path separator. +func (node *PathExpression) Sep(separator string) { + node.Original += separator +} + +// IsDataRoot returns true if path expression is @root. +func (node *PathExpression) IsDataRoot() bool { + return node.Data && (node.Parts[0] == "root") +} + +// +// String Literal +// + +// StringLiteral represents a string node. +type StringLiteral struct { + NodeType + Loc + + Value string +} + +// NewStringLiteral instanciates a new string node. +func NewStringLiteral(pos int, line int, val string) *StringLiteral { + return &StringLiteral{ + NodeType: NodeString, + Loc: Loc{pos, line}, + + Value: val, + } +} + +// String returns a string representation of receiver that can be used for debugging. +func (node *StringLiteral) String() string { + return fmt.Sprintf("String{Value:'%s', Pos:%d}", node.Value, node.Loc.Pos) +} + +// Accept is the receiver entry point for visitors. +func (node *StringLiteral) Accept(visitor Visitor) interface{} { + return visitor.VisitString(node) +} + +// +// Boolean Literal +// + +// BooleanLiteral represents a boolean node. +type BooleanLiteral struct { + NodeType + Loc + + Value bool + Original string +} + +// NewBooleanLiteral instanciates a new boolean node. +func NewBooleanLiteral(pos int, line int, val bool, original string) *BooleanLiteral { + return &BooleanLiteral{ + NodeType: NodeBoolean, + Loc: Loc{pos, line}, + + Value: val, + Original: original, + } +} + +// String returns a string representation of receiver that can be used for debugging. +func (node *BooleanLiteral) String() string { + return fmt.Sprintf("Boolean{Value:%s, Pos:%d}", node.Canonical(), node.Loc.Pos) +} + +// Accept is the receiver entry point for visitors. +func (node *BooleanLiteral) Accept(visitor Visitor) interface{} { + return visitor.VisitBoolean(node) +} + +// Canonical returns the canonical form of boolean node as a string (ie. "true" | "false"). +func (node *BooleanLiteral) Canonical() string { + if node.Value { + return "true" + } else { + return "false" + } +} + +// +// Number Literal +// + +// NumberLiteral represents a number node. +type NumberLiteral struct { + NodeType + Loc + + Value float64 + IsInt bool + Original string +} + +// NewNumberLiteral instanciates a new number node. +func NewNumberLiteral(pos int, line int, val float64, isInt bool, original string) *NumberLiteral { + return &NumberLiteral{ + NodeType: NodeNumber, + Loc: Loc{pos, line}, + + Value: val, + IsInt: isInt, + Original: original, + } +} + +// String returns a string representation of receiver that can be used for debugging. +func (node *NumberLiteral) String() string { + return fmt.Sprintf("Number{Value:%s, Pos:%d}", node.Canonical(), node.Loc.Pos) +} + +// Accept is the receiver entry point for visitors. +func (node *NumberLiteral) Accept(visitor Visitor) interface{} { + return visitor.VisitNumber(node) +} + +// Canonical returns the canonical form of number node as a string (eg: "12", "-1.51"). +func (node *NumberLiteral) Canonical() string { + prec := -1 + if node.IsInt { + prec = 0 + } + return strconv.FormatFloat(node.Value, 'f', prec, 64) +} + +// Number returns an integer or a float. +func (node *NumberLiteral) Number() interface{} { + if node.IsInt { + return int(node.Value) + } else { + return node.Value + } +} + +// +// Hash +// + +// Hash represents a hash node. +type Hash struct { + NodeType + Loc + + Pairs []*HashPair +} + +// NewNumberLiteral instanciates a new hash node. +func NewHash(pos int, line int) *Hash { + return &Hash{ + NodeType: NodeHash, + Loc: Loc{pos, line}, + } +} + +// String returns a string representation of receiver that can be used for debugging. +func (node *Hash) String() string { + result := fmt.Sprintf("Hash{[", node.Loc.Pos) + + for i, p := range node.Pairs { + if i > 0 { + result += ", " + } + result += p.String() + } + + return result + fmt.Sprintf("], Pos:%d}", node.Loc.Pos) +} + +// Accept is the receiver entry point for visitors. +func (node *Hash) Accept(visitor Visitor) interface{} { + return visitor.VisitHash(node) +} + +// +// HashPair +// + +// HashPair represents a hash pair node. +type HashPair struct { + NodeType + Loc + + Key string + Val Node // Expression +} + +// NewHashPair instanciates a new hash pair node. +func NewHashPair(pos int, line int) *HashPair { + return &HashPair{ + NodeType: NodeHashPair, + Loc: Loc{pos, line}, + } +} + +// String returns a string representation of receiver that can be used for debugging. +func (node *HashPair) String() string { + return node.Key + "=" + node.Val.String() +} + +// Accept is the receiver entry point for visitors. +func (node *HashPair) Accept(visitor Visitor) interface{} { + return visitor.VisitHashPair(node) +} diff --git a/vendor/github.com/aymerick/raymond/ast/print.go b/vendor/github.com/aymerick/raymond/ast/print.go new file mode 100644 index 0000000..133ae6e --- /dev/null +++ b/vendor/github.com/aymerick/raymond/ast/print.go @@ -0,0 +1,279 @@ +package ast + +import ( + "fmt" + "strings" +) + +// printVisitor implements the Visitor interface to print a AST. +type printVisitor struct { + buf string + depth int + + original bool + inBlock bool +} + +func newPrintVisitor() *printVisitor { + return &printVisitor{} +} + +// Print returns a string representation of given AST, that can be used for debugging purpose. +func Print(node Node) string { + visitor := newPrintVisitor() + node.Accept(visitor) + return visitor.output() +} + +func (v *printVisitor) output() string { + return v.buf +} + +func (v *printVisitor) indent() { + for i := 0; i < v.depth; { + v.buf += " " + i++ + } +} + +func (v *printVisitor) str(val string) { + v.buf += val +} + +func (v *printVisitor) nl() { + v.str("\n") +} + +func (v *printVisitor) line(val string) { + v.indent() + v.str(val) + v.nl() +} + +// +// Visitor interface +// + +// Statements + +// VisitProgram implements corresponding Visitor interface method +func (v *printVisitor) VisitProgram(node *Program) interface{} { + if len(node.BlockParams) > 0 { + v.line("BLOCK PARAMS: [ " + strings.Join(node.BlockParams, " ") + " ]") + } + + for _, n := range node.Body { + n.Accept(v) + } + + return nil +} + +// VisitMustache implements corresponding Visitor interface method +func (v *printVisitor) VisitMustache(node *MustacheStatement) interface{} { + v.indent() + v.str("{{ ") + + node.Expression.Accept(v) + + v.str(" }}") + v.nl() + + return nil +} + +// VisitBlock implements corresponding Visitor interface method +func (v *printVisitor) VisitBlock(node *BlockStatement) interface{} { + v.inBlock = true + + v.line("BLOCK:") + v.depth++ + + node.Expression.Accept(v) + + if node.Program != nil { + v.line("PROGRAM:") + v.depth++ + node.Program.Accept(v) + v.depth-- + } + + if node.Inverse != nil { + // if node.Program != nil { + // v.depth++ + // } + + v.line("{{^}}") + v.depth++ + node.Inverse.Accept(v) + v.depth-- + + // if node.Program != nil { + // v.depth-- + // } + } + + v.inBlock = false + + return nil +} + +// VisitPartial implements corresponding Visitor interface method +func (v *printVisitor) VisitPartial(node *PartialStatement) interface{} { + v.indent() + v.str("{{> PARTIAL:") + + v.original = true + node.Name.Accept(v) + v.original = false + + if len(node.Params) > 0 { + v.str(" ") + node.Params[0].Accept(v) + } + + // hash + if node.Hash != nil { + v.str(" ") + node.Hash.Accept(v) + } + + v.str(" }}") + v.nl() + + return nil +} + +// VisitContent implements corresponding Visitor interface method +func (v *printVisitor) VisitContent(node *ContentStatement) interface{} { + v.line("CONTENT[ '" + node.Value + "' ]") + + return nil +} + +// VisitComment implements corresponding Visitor interface method +func (v *printVisitor) VisitComment(node *CommentStatement) interface{} { + v.line("{{! '" + node.Value + "' }}") + + return nil +} + +// Expressions + +// VisitExpression implements corresponding Visitor interface method +func (v *printVisitor) VisitExpression(node *Expression) interface{} { + if v.inBlock { + v.indent() + } + + // path + node.Path.Accept(v) + + // params + v.str(" [") + for i, n := range node.Params { + if i > 0 { + v.str(", ") + } + n.Accept(v) + } + v.str("]") + + // hash + if node.Hash != nil { + v.str(" ") + node.Hash.Accept(v) + } + + if v.inBlock { + v.nl() + } + + return nil +} + +// VisitSubExpression implements corresponding Visitor interface method +func (v *printVisitor) VisitSubExpression(node *SubExpression) interface{} { + node.Expression.Accept(v) + + return nil +} + +// VisitPath implements corresponding Visitor interface method +func (v *printVisitor) VisitPath(node *PathExpression) interface{} { + if v.original { + v.str(node.Original) + } else { + path := strings.Join(node.Parts, "/") + + result := "" + if node.Data { + result += "@" + } + + v.str(result + "PATH:" + path) + } + + return nil +} + +// Literals + +// VisitString implements corresponding Visitor interface method +func (v *printVisitor) VisitString(node *StringLiteral) interface{} { + if v.original { + v.str(node.Value) + } else { + v.str("\"" + node.Value + "\"") + } + + return nil +} + +// VisitBoolean implements corresponding Visitor interface method +func (v *printVisitor) VisitBoolean(node *BooleanLiteral) interface{} { + if v.original { + v.str(node.Original) + } else { + v.str(fmt.Sprintf("BOOLEAN{%s}", node.Canonical())) + } + + return nil +} + +// VisitNumber implements corresponding Visitor interface method +func (v *printVisitor) VisitNumber(node *NumberLiteral) interface{} { + if v.original { + v.str(node.Original) + } else { + v.str(fmt.Sprintf("NUMBER{%s}", node.Canonical())) + } + + return nil +} + +// Miscellaneous + +// VisitHash implements corresponding Visitor interface method +func (v *printVisitor) VisitHash(node *Hash) interface{} { + v.str("HASH{") + + for i, p := range node.Pairs { + if i > 0 { + v.str(", ") + } + p.Accept(v) + } + + v.str("}") + + return nil +} + +// VisitHashPair implements corresponding Visitor interface method +func (v *printVisitor) VisitHashPair(node *HashPair) interface{} { + v.str(node.Key + "=") + node.Val.Accept(v) + + return nil +} diff --git a/vendor/github.com/aymerick/raymond/base_test.go b/vendor/github.com/aymerick/raymond/base_test.go new file mode 100644 index 0000000..b769331 --- /dev/null +++ b/vendor/github.com/aymerick/raymond/base_test.go @@ -0,0 +1,167 @@ +package raymond + +import ( + "fmt" + "regexp" + "testing" +) + +type Test struct { + name string + input string + data interface{} + privData map[string]interface{} + helpers map[string]interface{} + partials map[string]string + output interface{} +} + +func launchTests(t *testing.T, tests []Test) { + // NOTE: TestMustache() makes Parallel testing fail + // t.Parallel() + + for _, test := range tests { + var err error + var tpl *Template + + // parse template + tpl, err = Parse(test.input) + if err != nil { + t.Errorf("Test '%s' failed - Failed to parse template\ninput:\n\t'%s'\nerror:\n\t%s", test.name, test.input, err) + } else { + if len(test.helpers) > 0 { + // register helpers + tpl.RegisterHelpers(test.helpers) + } + + if len(test.partials) > 0 { + // register partials + tpl.RegisterPartials(test.partials) + } + + // setup private data frame + var privData *DataFrame + if test.privData != nil { + privData = NewDataFrame() + for k, v := range test.privData { + privData.Set(k, v) + } + } + + // render template + output, err := tpl.ExecWith(test.data, privData) + if err != nil { + t.Errorf("Test '%s' failed\ninput:\n\t'%s'\ndata:\n\t%s\nerror:\n\t%s\nAST:\n\t%s", test.name, test.input, Str(test.data), err, tpl.PrintAST()) + } else { + // check output + var expectedArr []string + expectedArr, ok := test.output.([]string) + if ok { + match := false + for _, expectedStr := range expectedArr { + if expectedStr == output { + match = true + break + } + } + + if !match { + t.Errorf("Test '%s' failed\ninput:\n\t'%s'\ndata:\n\t%s\npartials:\n\t%s\nexpected\n\t%q\ngot\n\t%q\nAST:\n%s", test.name, test.input, Str(test.data), Str(test.partials), expectedArr, output, tpl.PrintAST()) + } + } else { + expectedStr, ok := test.output.(string) + if !ok { + panic(fmt.Errorf("Erroneous test output description: %q", test.output)) + } + + if expectedStr != output { + t.Errorf("Test '%s' failed\ninput:\n\t'%s'\ndata:\n\t%s\npartials:\n\t%s\nexpected\n\t%q\ngot\n\t%q\nAST:\n%s", test.name, test.input, Str(test.data), Str(test.partials), expectedStr, output, tpl.PrintAST()) + } + } + } + } + } +} + +func launchErrorTests(t *testing.T, tests []Test) { + t.Parallel() + + for _, test := range tests { + var err error + var tpl *Template + + // parse template + tpl, err = Parse(test.input) + if err != nil { + t.Errorf("Test '%s' failed - Failed to parse template\ninput:\n\t'%s'\nerror:\n\t%s", test.name, test.input, err) + } else { + if len(test.helpers) > 0 { + // register helpers + tpl.RegisterHelpers(test.helpers) + } + + if len(test.partials) > 0 { + // register partials + tpl.RegisterPartials(test.partials) + } + + // setup private data frame + var privData *DataFrame + if test.privData != nil { + privData := NewDataFrame() + for k, v := range test.privData { + privData.Set(k, v) + } + } + + // render template + output, err := tpl.ExecWith(test.data, privData) + if err == nil { + t.Errorf("Test '%s' failed - Error expected\ninput:\n\t'%s'\ngot\n\t%q\nAST:\n%q", test.name, test.input, output, tpl.PrintAST()) + } else { + var errMatch error + match := false + + // check output + var expectedArr []string + expectedArr, ok := test.output.([]string) + if ok { + if len(expectedArr) > 0 { + for _, expectedStr := range expectedArr { + match, errMatch = regexp.MatchString(regexp.QuoteMeta(expectedStr), fmt.Sprint(err)) + if errMatch != nil { + panic("Failed to match regexp") + } + + if match { + break + } + } + } else { + // nothing to test + match = true + } + } else { + expectedStr, ok := test.output.(string) + if !ok { + panic(fmt.Errorf("Erroneous test output description: %q", test.output)) + } + + if expectedStr != "" { + match, errMatch = regexp.MatchString(regexp.QuoteMeta(expectedStr), fmt.Sprint(err)) + if errMatch != nil { + panic("Failed to match regexp") + } + } else { + // nothing to test + match = true + } + } + + if !match { + t.Errorf("Test '%s' failed - Incorrect error returned\ninput:\n\t'%s'\ndata:\n\t%s\nexpected\n\t%q\ngot\n\t%q", test.name, test.input, Str(test.data), test.output, err) + } + } + } + } +} diff --git a/vendor/github.com/aymerick/raymond/benchmark_test.go b/vendor/github.com/aymerick/raymond/benchmark_test.go new file mode 100644 index 0000000..f9ea74c --- /dev/null +++ b/vendor/github.com/aymerick/raymond/benchmark_test.go @@ -0,0 +1,316 @@ +package raymond + +import "testing" + +// +// Those tests come from: +// https://github.com/wycats/handlebars.js/blob/master/bench/ +// +// Note that handlebars.js does NOT benchmark template compilation, it only benchmarks evaluation. +// + +func BenchmarkArguments(b *testing.B) { + source := `{{foo person "person" 1 true foo=bar foo="person" foo=1 foo=true}}` + + ctx := map[string]bool{ + "bar": true, + } + + tpl := MustParse(source) + tpl.RegisterHelper("foo", func(a, b, c, d interface{}) string { return "" }) + + b.ResetTimer() + for i := 0; i < b.N; i++ { + tpl.MustExec(ctx) + } +} + +func BenchmarkArrayEach(b *testing.B) { + source := `{{#each names}}{{name}}{{/each}}` + + ctx := map[string][]map[string]string{ + "names": { + {"name": "Moe"}, + {"name": "Larry"}, + {"name": "Curly"}, + {"name": "Shemp"}, + }, + } + + tpl := MustParse(source) + + b.ResetTimer() + for i := 0; i < b.N; i++ { + tpl.MustExec(ctx) + } +} + +func BenchmarkArrayMustache(b *testing.B) { + source := `{{#names}}{{name}}{{/names}}` + + ctx := map[string][]map[string]string{ + "names": { + {"name": "Moe"}, + {"name": "Larry"}, + {"name": "Curly"}, + {"name": "Shemp"}, + }, + } + + tpl := MustParse(source) + + b.ResetTimer() + for i := 0; i < b.N; i++ { + tpl.MustExec(ctx) + } +} + +func BenchmarkComplex(b *testing.B) { + source := `

{{header}}

+{{#if items}} +
    + {{#each items}} + {{#if current}} +
  • {{name}}
  • + {{^}} +
  • {{name}}
  • + {{/if}} + {{/each}} +
+{{^}} +

The list is empty.

+{{/if}} +` + + ctx := map[string]interface{}{ + "header": func() string { return "Colors" }, + "hasItems": true, + "items": []map[string]interface{}{ + {"name": "red", "current": true, "url": "#Red"}, + {"name": "green", "current": false, "url": "#Green"}, + {"name": "blue", "current": false, "url": "#Blue"}, + }, + } + + tpl := MustParse(source) + + b.ResetTimer() + for i := 0; i < b.N; i++ { + tpl.MustExec(ctx) + } +} + +func BenchmarkData(b *testing.B) { + source := `{{#each names}}{{@index}}{{name}}{{/each}}` + + ctx := map[string][]map[string]string{ + "names": { + {"name": "Moe"}, + {"name": "Larry"}, + {"name": "Curly"}, + {"name": "Shemp"}, + }, + } + + tpl := MustParse(source) + + b.ResetTimer() + for i := 0; i < b.N; i++ { + tpl.MustExec(ctx) + } +} + +func BenchmarkDepth1(b *testing.B) { + source := `{{#each names}}{{../foo}}{{/each}}` + + ctx := map[string]interface{}{ + "names": []map[string]string{ + {"name": "Moe"}, + {"name": "Larry"}, + {"name": "Curly"}, + {"name": "Shemp"}, + }, + "foo": "bar", + } + + tpl := MustParse(source) + + b.ResetTimer() + for i := 0; i < b.N; i++ { + tpl.MustExec(ctx) + } +} + +func BenchmarkDepth2(b *testing.B) { + source := `{{#each names}}{{#each name}}{{../bat}}{{../../foo}}{{/each}}{{/each}}` + + ctx := map[string]interface{}{ + "names": []map[string]interface{}{ + {"bat": "foo", "name": []string{"Moe"}}, + {"bat": "foo", "name": []string{"Larry"}}, + {"bat": "foo", "name": []string{"Curly"}}, + {"bat": "foo", "name": []string{"Shemp"}}, + }, + "foo": "bar", + } + + tpl := MustParse(source) + + b.ResetTimer() + for i := 0; i < b.N; i++ { + tpl.MustExec(ctx) + } +} + +func BenchmarkObjectMustache(b *testing.B) { + source := `{{#person}}{{name}}{{age}}{{/person}}` + + ctx := map[string]interface{}{ + "person": map[string]interface{}{ + "name": "Larry", + "age": 45, + }, + } + + tpl := MustParse(source) + + b.ResetTimer() + for i := 0; i < b.N; i++ { + tpl.MustExec(ctx) + } +} + +func BenchmarkObject(b *testing.B) { + source := `{{#with person}}{{name}}{{age}}{{/with}}` + + ctx := map[string]interface{}{ + "person": map[string]interface{}{ + "name": "Larry", + "age": 45, + }, + } + + tpl := MustParse(source) + + b.ResetTimer() + for i := 0; i < b.N; i++ { + tpl.MustExec(ctx) + } +} + +func BenchmarkPartialRecursion(b *testing.B) { + source := `{{name}}{{#each kids}}{{>recursion}}{{/each}}` + + ctx := map[string]interface{}{ + "name": 1, + "kids": []map[string]interface{}{ + { + "name": "1.1", + "kids": []map[string]interface{}{ + { + "name": "1.1.1", + "kids": []map[string]interface{}{}, + }, + }, + }, + }, + } + + tpl := MustParse(source) + + partial := MustParse(`{{name}}{{#each kids}}{{>recursion}}{{/each}}`) + tpl.RegisterPartialTemplate("recursion", partial) + + b.ResetTimer() + for i := 0; i < b.N; i++ { + tpl.MustExec(ctx) + } +} + +func BenchmarkPartial(b *testing.B) { + source := `{{#each peeps}}{{>variables}}{{/each}}` + + ctx := map[string]interface{}{ + "peeps": []map[string]interface{}{ + {"name": "Moe", "count": 15}, + {"name": "Moe", "count": 5}, + {"name": "Curly", "count": 1}, + }, + } + + tpl := MustParse(source) + + partial := MustParse(`Hello {{name}}! You have {{count}} new messages.`) + tpl.RegisterPartialTemplate("variables", partial) + + b.ResetTimer() + for i := 0; i < b.N; i++ { + tpl.MustExec(ctx) + } +} + +func BenchmarkPath(b *testing.B) { + source := `{{person.name.bar.baz}}{{person.age}}{{person.foo}}{{animal.age}}` + + ctx := map[string]interface{}{ + "person": map[string]interface{}{ + "name": map[string]interface{}{ + "bar": map[string]string{ + "baz": "Larry", + }, + }, + "age": 45, + }, + } + + tpl := MustParse(source) + + b.ResetTimer() + for i := 0; i < b.N; i++ { + tpl.MustExec(ctx) + } +} + +func BenchmarkString(b *testing.B) { + source := `Hello world` + + tpl := MustParse(source) + + b.ResetTimer() + for i := 0; i < b.N; i++ { + tpl.MustExec(nil) + } +} + +func BenchmarkSubExpression(b *testing.B) { + source := `{{echo (header)}}` + + ctx := map[string]interface{}{} + + tpl := MustParse(source) + tpl.RegisterHelpers(map[string]interface{}{ + "echo": func(v string) string { return "foo " + v }, + "header": func() string { return "Colors" }, + }) + + b.ResetTimer() + for i := 0; i < b.N; i++ { + tpl.MustExec(ctx) + } +} + +func BenchmarkVariables(b *testing.B) { + source := `Hello {{name}}! You have {{count}} new messages.` + + ctx := map[string]interface{}{ + "name": "Mick", + "count": 30, + } + + tpl := MustParse(source) + + b.ResetTimer() + for i := 0; i < b.N; i++ { + tpl.MustExec(ctx) + } +} diff --git a/vendor/github.com/aymerick/raymond/data_frame.go b/vendor/github.com/aymerick/raymond/data_frame.go new file mode 100644 index 0000000..ce63218 --- /dev/null +++ b/vendor/github.com/aymerick/raymond/data_frame.go @@ -0,0 +1,95 @@ +package raymond + +import "reflect" + +// DataFrame represents a private data frame. +// +// Cf. private variables documentation at: http://handlebarsjs.com/block_helpers.html +type DataFrame struct { + parent *DataFrame + data map[string]interface{} +} + +// NewDataFrame instanciates a new private data frame. +func NewDataFrame() *DataFrame { + return &DataFrame{ + data: make(map[string]interface{}), + } +} + +// Copy instanciates a new private data frame with receiver as parent. +func (p *DataFrame) Copy() *DataFrame { + result := NewDataFrame() + + for k, v := range p.data { + result.data[k] = v + } + + result.parent = p + + return result +} + +// newIterDataFrame instanciates a new private data frame with receiver as parent and with iteration data set (@index, @key, @first, @last) +func (p *DataFrame) newIterDataFrame(length int, i int, key interface{}) *DataFrame { + result := p.Copy() + + result.Set("index", i) + result.Set("key", key) + result.Set("first", i == 0) + result.Set("last", i == length-1) + + return result +} + +// Set sets a data value. +func (p *DataFrame) Set(key string, val interface{}) { + p.data[key] = val +} + +// Get gets a data value. +func (p *DataFrame) Get(key string) interface{} { + return p.find([]string{key}) +} + +// find gets a deep data value +// +// @todo This is NOT consistent with the way we resolve data in template (cf. `evalDataPathExpression()`) ! FIX THAT ! +func (p *DataFrame) find(parts []string) interface{} { + data := p.data + + for i, part := range parts { + val := data[part] + if val == nil { + return nil + } + + if i == len(parts)-1 { + // found + return val + } + + valValue := reflect.ValueOf(val) + if valValue.Kind() != reflect.Map { + // not found + return nil + } + + // continue + data = mapStringInterface(valValue) + } + + // not found + return nil +} + +// mapStringInterface converts any `map` to `map[string]interface{}` +func mapStringInterface(value reflect.Value) map[string]interface{} { + result := make(map[string]interface{}) + + for _, key := range value.MapKeys() { + result[strValue(key)] = value.MapIndex(key).Interface() + } + + return result +} diff --git a/vendor/github.com/aymerick/raymond/escape.go b/vendor/github.com/aymerick/raymond/escape.go new file mode 100644 index 0000000..6a0363c --- /dev/null +++ b/vendor/github.com/aymerick/raymond/escape.go @@ -0,0 +1,65 @@ +package raymond + +import ( + "bytes" + "strings" +) + +// +// That whole file is borrowed from https://github.com/golang/go/tree/master/src/html/escape.go +// +// With changes: +// ' => ' +// " => " +// +// To stay in sync with JS implementation, and make mustache tests pass. +// + +type writer interface { + WriteString(string) (int, error) +} + +const escapedChars = `&'<>"` + +func escape(w writer, s string) error { + i := strings.IndexAny(s, escapedChars) + for i != -1 { + if _, err := w.WriteString(s[:i]); err != nil { + return err + } + var esc string + switch s[i] { + case '&': + esc = "&" + case '\'': + esc = "'" + case '<': + esc = "<" + case '>': + esc = ">" + case '"': + esc = """ + default: + panic("unrecognized escape character") + } + s = s[i+1:] + if _, err := w.WriteString(esc); err != nil { + return err + } + i = strings.IndexAny(s, escapedChars) + } + _, err := w.WriteString(s) + return err +} + +// Escape escapes special HTML characters. +// +// It can be used by helpers that return a SafeString and that need to escape some content by themselves. +func Escape(s string) string { + if strings.IndexAny(s, escapedChars) == -1 { + return s + } + var buf bytes.Buffer + escape(&buf, s) + return buf.String() +} diff --git a/vendor/github.com/aymerick/raymond/escape_test.go b/vendor/github.com/aymerick/raymond/escape_test.go new file mode 100644 index 0000000..b77bb09 --- /dev/null +++ b/vendor/github.com/aymerick/raymond/escape_test.go @@ -0,0 +1,20 @@ +package raymond + +import "fmt" + +func ExampleEscape() { + tpl := MustParse("{{link url text}}") + + tpl.RegisterHelper("link", func(url string, text string) SafeString { + return SafeString("" + Escape(text) + "") + }) + + ctx := map[string]string{ + "url": "http://www.aymerick.com/", + "text": "This is a cool website", + } + + result := tpl.MustExec(ctx) + fmt.Print(result) + // Output: This is a <em>cool</em> website +} diff --git a/vendor/github.com/aymerick/raymond/eval.go b/vendor/github.com/aymerick/raymond/eval.go new file mode 100644 index 0000000..d49b708 --- /dev/null +++ b/vendor/github.com/aymerick/raymond/eval.go @@ -0,0 +1,984 @@ +package raymond + +import ( + "bytes" + "fmt" + "reflect" + "strconv" + "strings" + + "github.com/aymerick/raymond/ast" +) + +var ( + // @note borrowed from https://github.com/golang/go/tree/master/src/text/template/exec.go + errorType = reflect.TypeOf((*error)(nil)).Elem() + fmtStringerType = reflect.TypeOf((*fmt.Stringer)(nil)).Elem() + + zero reflect.Value +) + +// evalVisitor evaluates a handlebars template with context +type evalVisitor struct { + tpl *Template + + // contexts stack + ctx []reflect.Value + + // current data frame (chained with parent) + dataFrame *DataFrame + + // block parameters stack + blockParams []map[string]interface{} + + // block statements stack + blocks []*ast.BlockStatement + + // expressions stack + exprs []*ast.Expression + + // memoize expressions that were function calls + exprFunc map[*ast.Expression]bool + + // used for info on panic + curNode ast.Node +} + +// NewEvalVisitor instanciate a new evaluation visitor with given context and initial private data frame +// +// If privData is nil, then a default data frame is created +func newEvalVisitor(tpl *Template, ctx interface{}, privData *DataFrame) *evalVisitor { + frame := privData + if frame == nil { + frame = NewDataFrame() + } + + return &evalVisitor{ + tpl: tpl, + ctx: []reflect.Value{reflect.ValueOf(ctx)}, + dataFrame: frame, + exprFunc: make(map[*ast.Expression]bool), + } +} + +// at sets current node +func (v *evalVisitor) at(node ast.Node) { + v.curNode = node +} + +// +// Contexts stack +// + +// pushCtx pushes new context to the stack +func (v *evalVisitor) pushCtx(ctx reflect.Value) { + v.ctx = append(v.ctx, ctx) +} + +// popCtx pops last context from stack +func (v *evalVisitor) popCtx() reflect.Value { + if len(v.ctx) == 0 { + return zero + } + + var result reflect.Value + result, v.ctx = v.ctx[len(v.ctx)-1], v.ctx[:len(v.ctx)-1] + + return result +} + +// rootCtx returns root context +func (v *evalVisitor) rootCtx() reflect.Value { + return v.ctx[0] +} + +// curCtx returns current context +func (v *evalVisitor) curCtx() reflect.Value { + return v.ancestorCtx(0) +} + +// ancestorCtx returns ancestor context +func (v *evalVisitor) ancestorCtx(depth int) reflect.Value { + index := len(v.ctx) - 1 - depth + if index < 0 { + return zero + } + + return v.ctx[index] +} + +// +// Private data frame +// + +// setDataFrame sets new data frame +func (v *evalVisitor) setDataFrame(frame *DataFrame) { + v.dataFrame = frame +} + +// popDataFrame sets back parent data frame +func (v *evalVisitor) popDataFrame() { + v.dataFrame = v.dataFrame.parent +} + +// +// Block Parameters stack +// + +// pushBlockParams pushes new block params to the stack +func (v *evalVisitor) pushBlockParams(params map[string]interface{}) { + v.blockParams = append(v.blockParams, params) +} + +// popBlockParams pops last block params from stack +func (v *evalVisitor) popBlockParams() map[string]interface{} { + var result map[string]interface{} + + if len(v.blockParams) == 0 { + return result + } + + result, v.blockParams = v.blockParams[len(v.blockParams)-1], v.blockParams[:len(v.blockParams)-1] + return result +} + +// blockParam iterates on stack to find given block parameter, and returns its value or nil if not founc +func (v *evalVisitor) blockParam(name string) interface{} { + for i := len(v.blockParams) - 1; i >= 0; i-- { + for k, v := range v.blockParams[i] { + if name == k { + return v + } + } + } + + return nil +} + +// +// Blocks stack +// + +// pushBlock pushes new block statement to stack +func (v *evalVisitor) pushBlock(block *ast.BlockStatement) { + v.blocks = append(v.blocks, block) +} + +// popBlock pops last block statement from stack +func (v *evalVisitor) popBlock() *ast.BlockStatement { + if len(v.blocks) == 0 { + return nil + } + + var result *ast.BlockStatement + result, v.blocks = v.blocks[len(v.blocks)-1], v.blocks[:len(v.blocks)-1] + + return result +} + +// curBlock returns current block statement +func (v *evalVisitor) curBlock() *ast.BlockStatement { + if len(v.blocks) == 0 { + return nil + } + + return v.blocks[len(v.blocks)-1] +} + +// +// Expressions stack +// + +// pushExpr pushes new expression to stack +func (v *evalVisitor) pushExpr(expression *ast.Expression) { + v.exprs = append(v.exprs, expression) +} + +// popExpr pops last expression from stack +func (v *evalVisitor) popExpr() *ast.Expression { + if len(v.exprs) == 0 { + return nil + } + + var result *ast.Expression + result, v.exprs = v.exprs[len(v.exprs)-1], v.exprs[:len(v.exprs)-1] + + return result +} + +// curExpr returns current expression +func (v *evalVisitor) curExpr() *ast.Expression { + if len(v.exprs) == 0 { + return nil + } + + return v.exprs[len(v.exprs)-1] +} + +// +// Error functions +// + +// errPanic panics +func (v *evalVisitor) errPanic(err error) { + panic(fmt.Errorf("Evaluation error: %s\nCurrent node:\n\t%s", err, v.curNode)) +} + +// errorf panics with a custom message +func (v *evalVisitor) errorf(format string, args ...interface{}) { + v.errPanic(fmt.Errorf(format, args...)) +} + +// +// Evaluation +// + +// evalProgram eEvaluates program with given context and returns string result +func (v *evalVisitor) evalProgram(program *ast.Program, ctx interface{}, data *DataFrame, key interface{}) string { + blockParams := make(map[string]interface{}) + + // compute block params + if len(program.BlockParams) > 0 { + blockParams[program.BlockParams[0]] = ctx + } + + if (len(program.BlockParams) > 1) && (key != nil) { + blockParams[program.BlockParams[1]] = key + } + + // push contexts + if len(blockParams) > 0 { + v.pushBlockParams(blockParams) + } + + ctxVal := reflect.ValueOf(ctx) + if ctxVal.IsValid() { + v.pushCtx(ctxVal) + } + + if data != nil { + v.setDataFrame(data) + } + + // evaluate program + result, _ := program.Accept(v).(string) + + // pop contexts + if data != nil { + v.popDataFrame() + } + + if ctxVal.IsValid() { + v.popCtx() + } + + if len(blockParams) > 0 { + v.popBlockParams() + } + + return result +} + +// evalPath evaluates all path parts with given context +func (v *evalVisitor) evalPath(ctx reflect.Value, parts []string, exprRoot bool) (reflect.Value, bool) { + partResolved := false + + for i := 0; i < len(parts); i++ { + part := parts[i] + + // "[foo bar]"" => "foo bar" + if (len(part) >= 2) && (part[0] == '[') && (part[len(part)-1] == ']') { + part = part[1 : len(part)-1] + } + + ctx = v.evalField(ctx, part, exprRoot) + if !ctx.IsValid() { + break + } + + // we resolved at least one part of path + partResolved = true + } + + return ctx, partResolved +} + +// evalField evaluates field with given context +func (v *evalVisitor) evalField(ctx reflect.Value, fieldName string, exprRoot bool) reflect.Value { + result := zero + + ctx, _ = indirect(ctx) + if !ctx.IsValid() { + return result + } + + // check if this is a method call + result, isMeth := v.evalMethod(ctx, fieldName, exprRoot) + if !isMeth { + switch ctx.Kind() { + case reflect.Struct: + // example: firstName => FirstName + expFieldName := strings.Title(fieldName) + + // check if struct have this field and that it is exported + if tField, ok := ctx.Type().FieldByName(expFieldName); ok && (tField.PkgPath == "") { + // struct field + result = ctx.FieldByIndex(tField.Index) + } + case reflect.Map: + nameVal := reflect.ValueOf(fieldName) + if nameVal.Type().AssignableTo(ctx.Type().Key()) { + // map key + result = ctx.MapIndex(nameVal) + } + case reflect.Array, reflect.Slice: + if i, err := strconv.Atoi(fieldName); (err == nil) && (i < ctx.Len()) { + result = ctx.Index(i) + } + } + } + + // check if result is a function + result, _ = indirect(result) + if result.Kind() == reflect.Func { + result = v.evalFieldFunc(fieldName, result, exprRoot) + } + + return result +} + +// evalFieldFunc tries to evaluate given method name, and a boolean to indicate if this was a method call +func (v *evalVisitor) evalMethod(ctx reflect.Value, name string, exprRoot bool) (reflect.Value, bool) { + if ctx.Kind() != reflect.Interface && ctx.CanAddr() { + ctx = ctx.Addr() + } + + method := ctx.MethodByName(name) + if !method.IsValid() { + // example: subject() => Subject() + method = ctx.MethodByName(strings.Title(name)) + } + + if !method.IsValid() { + return zero, false + } + + return v.evalFieldFunc(name, method, exprRoot), true +} + +// evalFieldFunc evaluates given function +func (v *evalVisitor) evalFieldFunc(name string, funcVal reflect.Value, exprRoot bool) reflect.Value { + ensureValidHelper(name, funcVal) + + var options *Options + if exprRoot { + // create function arg with all params/hash + expr := v.curExpr() + options = v.helperOptions(expr) + + // ok, that expression was a function call + v.exprFunc[expr] = true + } else { + // we are not at root of expression, so we are a parameter... and we don't like + // infinite loops caused by trying to parse ourself forever + options = newEmptyOptions(v) + } + + return v.callFunc(name, funcVal, options) +} + +// findBlockParam returns node's block parameter +func (v *evalVisitor) findBlockParam(node *ast.PathExpression) (string, interface{}) { + if len(node.Parts) > 0 { + name := node.Parts[0] + if value := v.blockParam(name); value != nil { + return name, value + } + } + + return "", nil +} + +// evalPathExpression evaluates a path expression +func (v *evalVisitor) evalPathExpression(node *ast.PathExpression, exprRoot bool) interface{} { + var result interface{} + + if name, value := v.findBlockParam(node); value != nil { + // block parameter value + + // We push a new context so we can evaluate the path expression (note: this may be a bad idea). + // + // Example: + // {{#foo as |bar|}} + // {{bar.baz}} + // {{/foo}} + // + // With data: + // {"foo": {"baz": "bat"}} + newCtx := map[string]interface{}{name: value} + + v.pushCtx(reflect.ValueOf(newCtx)) + result = v.evalCtxPathExpression(node, exprRoot) + v.popCtx() + } else { + ctxTried := false + + if node.IsDataRoot() { + // context path + result = v.evalCtxPathExpression(node, exprRoot) + + ctxTried = true + } + + if (result == nil) && node.Data { + // if it is @root, then we tried to evaluate with root context but nothing was found + // so let's try with private data + + // private data + result = v.evalDataPathExpression(node, exprRoot) + } + + if (result == nil) && !ctxTried { + // context path + result = v.evalCtxPathExpression(node, exprRoot) + } + } + + return result +} + +// evalDataPathExpression evaluates a private data path expression +func (v *evalVisitor) evalDataPathExpression(node *ast.PathExpression, exprRoot bool) interface{} { + // find data frame + frame := v.dataFrame + for i := node.Depth; i > 0; i-- { + if frame.parent == nil { + return nil + } + frame = frame.parent + } + + // resolve data + // @note Can be changed to v.evalCtx() as context can't be an array + result, _ := v.evalCtxPath(reflect.ValueOf(frame.data), node.Parts, exprRoot) + return result +} + +// evalCtxPathExpression evaluates a context path expression +func (v *evalVisitor) evalCtxPathExpression(node *ast.PathExpression, exprRoot bool) interface{} { + v.at(node) + + if node.IsDataRoot() { + // `@root` - remove the first part + parts := node.Parts[1:len(node.Parts)] + + result, _ := v.evalCtxPath(v.rootCtx(), parts, exprRoot) + return result + } + + return v.evalDepthPath(node.Depth, node.Parts, exprRoot) +} + +// evalDepthPath iterates on contexts, starting at given depth, until there is one that resolve given path parts +func (v *evalVisitor) evalDepthPath(depth int, parts []string, exprRoot bool) interface{} { + var result interface{} + partResolved := false + + ctx := v.ancestorCtx(depth) + + for (result == nil) && ctx.IsValid() && (depth <= len(v.ctx) && !partResolved) { + // try with context + result, partResolved = v.evalCtxPath(ctx, parts, exprRoot) + + // As soon as we find the first part of a path, we must not try to resolve with parent context if result is finally `nil` + // Reference: "Dotted Names - Context Precedence" mustache test + if !partResolved && (result == nil) { + // try with previous context + depth++ + ctx = v.ancestorCtx(depth) + } + } + + return result +} + +// evalCtxPath evaluates path with given context +func (v *evalVisitor) evalCtxPath(ctx reflect.Value, parts []string, exprRoot bool) (interface{}, bool) { + var result interface{} + partResolved := false + + switch ctx.Kind() { + case reflect.Array, reflect.Slice: + // Array context + var results []interface{} + + for i := 0; i < ctx.Len(); i++ { + value, _ := v.evalPath(ctx.Index(i), parts, exprRoot) + if value.IsValid() { + results = append(results, value.Interface()) + } + } + + result = results + default: + // NOT array context + var value reflect.Value + + value, partResolved = v.evalPath(ctx, parts, exprRoot) + if value.IsValid() { + result = value.Interface() + } + } + + return result, partResolved +} + +// +// Helpers +// + +// isHelperCall returns true if given expression is a helper call +func (v *evalVisitor) isHelperCall(node *ast.Expression) bool { + if helperName := node.HelperName(); helperName != "" { + return v.findHelper(helperName) != zero + } + return false +} + +// findHelper finds given helper +func (v *evalVisitor) findHelper(name string) reflect.Value { + // check template helpers + if h := v.tpl.findHelper(name); h != zero { + return h + } + + // check global helpers + return findHelper(name) +} + +// callFunc calls function with given options +func (v *evalVisitor) callFunc(name string, funcVal reflect.Value, options *Options) reflect.Value { + params := options.Params() + + funcType := funcVal.Type() + + // @todo Is there a better way to do that ? + strType := reflect.TypeOf("") + boolType := reflect.TypeOf(true) + + // check parameters number + addOptions := false + numIn := funcType.NumIn() + + if numIn == len(params)+1 { + lastArgType := funcType.In(numIn - 1) + if reflect.TypeOf(options).AssignableTo(lastArgType) { + addOptions = true + } + } + + if !addOptions && (len(params) != numIn) { + v.errorf("Helper '%s' called with wrong number of arguments, needed %d but got %d", name, numIn, len(params)) + } + + // check and collect arguments + args := make([]reflect.Value, numIn) + for i, param := range params { + arg := reflect.ValueOf(param) + argType := funcType.In(i) + + if !arg.IsValid() { + if canBeNil(argType) { + arg = reflect.Zero(argType) + } else if argType.Kind() == reflect.String { + arg = reflect.ValueOf("") + } else { + // @todo Maybe we can panic on that + return reflect.Zero(strType) + } + } + + if !arg.Type().AssignableTo(argType) { + if strType.AssignableTo(argType) { + // convert parameter to string + arg = reflect.ValueOf(strValue(arg)) + } else if boolType.AssignableTo(argType) { + // convert parameter to bool + val, _ := isTrueValue(arg) + arg = reflect.ValueOf(val) + } else { + v.errorf("Helper %s called with argument %d with type %s but it should be %s", name, i, arg.Type(), argType) + } + } + + args[i] = arg + } + + if addOptions { + args[numIn-1] = reflect.ValueOf(options) + } + + result := funcVal.Call(args) + + return result[0] +} + +// callHelper invoqs helper function for given expression node +func (v *evalVisitor) callHelper(name string, helper reflect.Value, node *ast.Expression) interface{} { + result := v.callFunc(name, helper, v.helperOptions(node)) + if !result.IsValid() { + return nil + } + + // @todo We maybe want to ensure here that helper returned a string or a SafeString + return result.Interface() +} + +// helperOptions computes helper options argument from an expression +func (v *evalVisitor) helperOptions(node *ast.Expression) *Options { + var params []interface{} + var hash map[string]interface{} + + for _, paramNode := range node.Params { + param := paramNode.Accept(v) + params = append(params, param) + } + + if node.Hash != nil { + hash, _ = node.Hash.Accept(v).(map[string]interface{}) + } + + return newOptions(v, params, hash) +} + +// +// Partials +// + +// findPartial finds given partial +func (v *evalVisitor) findPartial(name string) *partial { + // check template partials + if p := v.tpl.findPartial(name); p != nil { + return p + } + + // check global partials + return findPartial(name) +} + +// partialContext computes partial context +func (v *evalVisitor) partialContext(node *ast.PartialStatement) reflect.Value { + if nb := len(node.Params); nb > 1 { + v.errorf("Unsupported number of partial arguments: %d", nb) + } + + if (len(node.Params) > 0) && (node.Hash != nil) { + v.errorf("Passing both context and named parameters to a partial is not allowed") + } + + if len(node.Params) == 1 { + return reflect.ValueOf(node.Params[0].Accept(v)) + } + + if node.Hash != nil { + hash, _ := node.Hash.Accept(v).(map[string]interface{}) + return reflect.ValueOf(hash) + } + + return zero +} + +// evalPartial evaluates a partial +func (v *evalVisitor) evalPartial(p *partial, node *ast.PartialStatement) string { + // get partial template + partialTpl, err := p.template() + if err != nil { + v.errPanic(err) + } + + // push partial context + ctx := v.partialContext(node) + if ctx.IsValid() { + v.pushCtx(ctx) + } + + // evaluate partial template + result, _ := partialTpl.program.Accept(v).(string) + + // ident partial + result = indentLines(result, node.Indent) + + if ctx.IsValid() { + v.popCtx() + } + + return result +} + +// indentLines indents all lines of given string +func indentLines(str string, indent string) string { + if indent == "" { + return str + } + + var indented []string + + lines := strings.Split(str, "\n") + for i, line := range lines { + if (i == (len(lines) - 1)) && (line == "") { + // input string ends with a new line + indented = append(indented, line) + } else { + indented = append(indented, indent+line) + } + } + + return strings.Join(indented, "\n") +} + +// +// Functions +// + +// wasFuncCall returns true if given expression was a function call +func (v *evalVisitor) wasFuncCall(node *ast.Expression) bool { + // check if expression was tagged as a function call + return v.exprFunc[node] +} + +// +// Visitor interface +// + +// Statements + +// VisitProgram implements corresponding Visitor interface method +func (v *evalVisitor) VisitProgram(node *ast.Program) interface{} { + v.at(node) + + buf := new(bytes.Buffer) + + for _, n := range node.Body { + if str := Str(n.Accept(v)); str != "" { + if _, err := buf.Write([]byte(str)); err != nil { + v.errPanic(err) + } + } + } + + return buf.String() +} + +// VisitMustache implements corresponding Visitor interface method +func (v *evalVisitor) VisitMustache(node *ast.MustacheStatement) interface{} { + v.at(node) + + // evaluate expression + expr := node.Expression.Accept(v) + + // check if this is a safe string + isSafe := isSafeString(expr) + + // get string value + str := Str(expr) + if !isSafe && !node.Unescaped { + // escape html + str = Escape(str) + } + + return str +} + +// VisitBlock implements corresponding Visitor interface method +func (v *evalVisitor) VisitBlock(node *ast.BlockStatement) interface{} { + v.at(node) + + v.pushBlock(node) + + var result interface{} + + // evaluate expression + expr := node.Expression.Accept(v) + + if v.isHelperCall(node.Expression) || v.wasFuncCall(node.Expression) { + // it is the responsability of the helper/function to evaluate block + result = expr + } else { + val := reflect.ValueOf(expr) + + truth, _ := isTrueValue(val) + if truth { + if node.Program != nil { + switch val.Kind() { + case reflect.Array, reflect.Slice: + concat := "" + + // Array context + for i := 0; i < val.Len(); i++ { + // Computes new private data frame + frame := v.dataFrame.newIterDataFrame(val.Len(), i, nil) + + // Evaluate program + concat += v.evalProgram(node.Program, val.Index(i).Interface(), frame, i) + } + + result = concat + default: + // NOT array + result = v.evalProgram(node.Program, expr, nil, nil) + } + } + } else if node.Inverse != nil { + result, _ = node.Inverse.Accept(v).(string) + } + } + + v.popBlock() + + return result +} + +// VisitPartial implements corresponding Visitor interface method +func (v *evalVisitor) VisitPartial(node *ast.PartialStatement) interface{} { + v.at(node) + + // partialName: helperName | sexpr + name, ok := ast.HelperNameStr(node.Name) + if !ok { + if subExpr, ok := node.Name.(*ast.SubExpression); ok { + name, _ = subExpr.Accept(v).(string) + } + } + + if name == "" { + v.errorf("Unexpected partial name: %q", node.Name) + } + + partial := v.findPartial(name) + if partial == nil { + v.errorf("Partial not found: %s", name) + } + + return v.evalPartial(partial, node) +} + +// VisitContent implements corresponding Visitor interface method +func (v *evalVisitor) VisitContent(node *ast.ContentStatement) interface{} { + v.at(node) + + // write content as is + return node.Value +} + +// VisitComment implements corresponding Visitor interface method +func (v *evalVisitor) VisitComment(node *ast.CommentStatement) interface{} { + v.at(node) + + // ignore comments + return "" +} + +// Expressions + +// VisitExpression implements corresponding Visitor interface method +func (v *evalVisitor) VisitExpression(node *ast.Expression) interface{} { + v.at(node) + + var result interface{} + done := false + + v.pushExpr(node) + + // helper call + if helperName := node.HelperName(); helperName != "" { + if helper := v.findHelper(helperName); helper != zero { + result = v.callHelper(helperName, helper, node) + done = true + } + } + + if !done { + // literal + if literal, ok := node.LiteralStr(); ok { + if val := v.evalField(v.curCtx(), literal, true); val.IsValid() { + result = val.Interface() + done = true + } + } + } + + if !done { + // field path + if path := node.FieldPath(); path != nil { + // @todo Find a cleaner way ! Don't break the pattern ! + // this is an exception to visitor pattern, because we need to pass the info + // that this path is at root of current expression + if val := v.evalPathExpression(path, true); val != nil { + result = val + } + } + } + + v.popExpr() + + return result +} + +// VisitSubExpression implements corresponding Visitor interface method +func (v *evalVisitor) VisitSubExpression(node *ast.SubExpression) interface{} { + v.at(node) + + return node.Expression.Accept(v) +} + +// VisitPath implements corresponding Visitor interface method +func (v *evalVisitor) VisitPath(node *ast.PathExpression) interface{} { + return v.evalPathExpression(node, false) +} + +// Literals + +// VisitString implements corresponding Visitor interface method +func (v *evalVisitor) VisitString(node *ast.StringLiteral) interface{} { + v.at(node) + + return node.Value +} + +// VisitBoolean implements corresponding Visitor interface method +func (v *evalVisitor) VisitBoolean(node *ast.BooleanLiteral) interface{} { + v.at(node) + + return node.Value +} + +// VisitNumber implements corresponding Visitor interface method +func (v *evalVisitor) VisitNumber(node *ast.NumberLiteral) interface{} { + v.at(node) + + return node.Number() +} + +// Miscellaneous + +// VisitHash implements corresponding Visitor interface method +func (v *evalVisitor) VisitHash(node *ast.Hash) interface{} { + v.at(node) + + result := make(map[string]interface{}) + + for _, pair := range node.Pairs { + if value := pair.Accept(v); value != nil { + result[pair.Key] = value + } + } + + return result +} + +// VisitHashPair implements corresponding Visitor interface method +func (v *evalVisitor) VisitHashPair(node *ast.HashPair) interface{} { + v.at(node) + + return node.Val.Accept(v) +} diff --git a/vendor/github.com/aymerick/raymond/eval_test.go b/vendor/github.com/aymerick/raymond/eval_test.go new file mode 100644 index 0000000..b7bd82e --- /dev/null +++ b/vendor/github.com/aymerick/raymond/eval_test.go @@ -0,0 +1,215 @@ +package raymond + +import "testing" + +var evalTests = []Test{ + { + "only content", + "this is content", + nil, nil, nil, nil, + "this is content", + }, + { + "checks path in parent contexts", + "{{#a}}{{one}}{{#b}}{{one}}{{two}}{{one}}{{/b}}{{/a}}", + map[string]interface{}{"a": map[string]int{"one": 1}, "b": map[string]int{"two": 2}}, + nil, nil, nil, + "1121", + }, + { + "block params", + "{{#foo as |bar|}}{{bar}}{{/foo}}{{bar}}", + map[string]string{"foo": "baz", "bar": "bat"}, + nil, nil, nil, + "bazbat", + }, + { + "block params on array", + "{{#foo as |bar i|}}{{i}}.{{bar}} {{/foo}}", + map[string][]string{"foo": {"baz", "bar", "bat"}}, + nil, nil, nil, + "0.baz 1.bar 2.bat ", + }, + { + "nested block params", + "{{#foos as |foo iFoo|}}{{#wats as |wat iWat|}}{{iFoo}}.{{iWat}}.{{foo}}-{{wat}} {{/wats}}{{/foos}}", + map[string][]string{"foos": {"baz", "bar"}, "wats": {"the", "phoque"}}, + nil, nil, nil, + "0.0.baz-the 0.1.baz-phoque 1.0.bar-the 1.1.bar-phoque ", + }, + { + "block params with path reference", + "{{#foo as |bar|}}{{bar.baz}}{{/foo}}", + map[string]map[string]string{"foo": {"baz": "bat"}}, + nil, nil, nil, + "bat", + }, + { + "falsy block evaluation", + "{{#foo}}bar{{/foo}} baz", + map[string]interface{}{"foo": false}, + nil, nil, nil, + " baz", + }, + { + "block helper returns a SafeString", + "{{title}} - {{#bold}}{{body}}{{/bold}}", + map[string]string{ + "title": "My new blog post", + "body": "I have so many things to say!", + }, + nil, + map[string]interface{}{"bold": func(options *Options) SafeString { + return SafeString(`
` + options.Fn() + "
") + }}, + nil, + `My new blog post -
I have so many things to say!
`, + }, + { + "chained blocks", + "{{#if a}}A{{else if b}}B{{else}}C{{/if}}", + map[string]interface{}{"b": false}, + nil, nil, nil, + "C", + }, + + // @todo Test with a "../../path" (depth 2 path) while context is only depth 1 +} + +func TestEval(t *testing.T) { + t.Parallel() + + launchTests(t, evalTests) +} + +var evalErrors = []Test{ + { + "functions with wrong number of arguments", + `{{foo "bar"}}`, + map[string]interface{}{"foo": func(a string, b string) string { return "foo" }}, + nil, nil, nil, + "Helper 'foo' called with wrong number of arguments, needed 2 but got 1", + }, + { + "functions with wrong number of returned values (1)", + "{{foo}}", + map[string]interface{}{"foo": func() {}}, + nil, nil, nil, + "Helper function must return a string or a SafeString", + }, + { + "functions with wrong number of returned values (2)", + "{{foo}}", + map[string]interface{}{"foo": func() (string, bool, string) { return "foo", true, "bar" }}, + nil, nil, nil, + "Helper function must return a string or a SafeString", + }, +} + +func TestEvalErrors(t *testing.T) { + launchErrorTests(t, evalErrors) +} + +func TestEvalStruct(t *testing.T) { + t.Parallel() + + source := `
+

By {{author.FirstName}} {{Author.lastName}}

+
{{Body}}
+ +

Comments

+ + {{#each comments}} +

By {{Author.FirstName}} {{author.LastName}}

+
{{body}}
+ {{/each}} +
` + + expected := `
+

By Jean Valjean

+
Life is difficult
+ +

Comments

+ +

By Marcel Beliveau

+
LOL!
+
` + + type Person struct { + FirstName string + LastName string + } + + type Comment struct { + Author Person + Body string + } + + type Post struct { + Author Person + Body string + Comments []Comment + } + + ctx := Post{ + Person{"Jean", "Valjean"}, + "Life is difficult", + []Comment{ + Comment{ + Person{"Marcel", "Beliveau"}, + "LOL!", + }, + }, + } + + output := MustRender(source, ctx) + if output != expected { + t.Errorf("Failed to evaluate with struct context") + } +} + +type TestFoo struct { +} + +func (t *TestFoo) Subject() string { + return "foo" +} + +func TestEvalMethod(t *testing.T) { + t.Parallel() + + source := `Subject is {{subject}}! YES I SAID {{Subject}}!` + expected := `Subject is foo! YES I SAID foo!` + + ctx := &TestFoo{} + + output := MustRender(source, ctx) + if output != expected { + t.Errorf("Failed to evaluate struct method: %s", output) + } +} + +type TestBar struct { +} + +func (t *TestBar) Subject() interface{} { + return testBar +} + +func testBar() string { + return "bar" +} + +func TestEvalMethodReturningFunc(t *testing.T) { + t.Parallel() + + source := `Subject is {{subject}}! YES I SAID {{Subject}}!` + expected := `Subject is bar! YES I SAID bar!` + + ctx := &TestBar{} + + output := MustRender(source, ctx) + if output != expected { + t.Errorf("Failed to evaluate struct method: %s", output) + } +} diff --git a/vendor/github.com/aymerick/raymond/handlebars/base_test.go b/vendor/github.com/aymerick/raymond/handlebars/base_test.go new file mode 100644 index 0000000..777e5b6 --- /dev/null +++ b/vendor/github.com/aymerick/raymond/handlebars/base_test.go @@ -0,0 +1,100 @@ +package handlebars + +import ( + "fmt" + "io/ioutil" + "path" + "strconv" + "testing" + + "github.com/aymerick/raymond" +) + +// cf. https://github.com/aymerick/go-fuzz-tests/raymond +const DUMP_TPL = false + +var dump_tpl_nb = 0 + +type Test struct { + name string + input string + data interface{} + privData map[string]interface{} + helpers map[string]interface{} + partials map[string]string + output interface{} +} + +func launchTests(t *testing.T, tests []Test) { + t.Parallel() + + for _, test := range tests { + var err error + var tpl *raymond.Template + + if DUMP_TPL { + filename := strconv.Itoa(dump_tpl_nb) + if err := ioutil.WriteFile(path.Join(".", "dump_tpl", filename), []byte(test.input), 0644); err != nil { + panic(err) + } + dump_tpl_nb += 1 + } + + // parse template + tpl, err = raymond.Parse(test.input) + if err != nil { + t.Errorf("Test '%s' failed - Failed to parse template\ninput:\n\t'%s'\nerror:\n\t%s", test.name, test.input, err) + } else { + if len(test.helpers) > 0 { + // register helpers + tpl.RegisterHelpers(test.helpers) + } + + if len(test.partials) > 0 { + // register partials + tpl.RegisterPartials(test.partials) + } + + // setup private data frame + var privData *raymond.DataFrame + if test.privData != nil { + privData = raymond.NewDataFrame() + for k, v := range test.privData { + privData.Set(k, v) + } + } + + // render template + output, err := tpl.ExecWith(test.data, privData) + if err != nil { + t.Errorf("Test '%s' failed\ninput:\n\t'%s'\ndata:\n\t%s\nerror:\n\t%s\nAST:\n\t%s", test.name, test.input, raymond.Str(test.data), err, tpl.PrintAST()) + } else { + // check output + var expectedArr []string + expectedArr, ok := test.output.([]string) + if ok { + match := false + for _, expectedStr := range expectedArr { + if expectedStr == output { + match = true + break + } + } + + if !match { + t.Errorf("Test '%s' failed\ninput:\n\t'%s'\ndata:\n\t%s\npartials:\n\t%s\nexpected\n\t%q\ngot\n\t%q\nAST:\n%s", test.name, test.input, raymond.Str(test.data), raymond.Str(test.partials), expectedArr, output, tpl.PrintAST()) + } + } else { + expectedStr, ok := test.output.(string) + if !ok { + panic(fmt.Errorf("Erroneous test output description: %q", test.output)) + } + + if expectedStr != output { + t.Errorf("Test '%s' failed\ninput:\n\t'%s'\ndata:\n\t%s\npartials:\n\t%s\nexpected\n\t%q\ngot\n\t%q\nAST:\n%s", test.name, test.input, raymond.Str(test.data), raymond.Str(test.partials), expectedStr, output, tpl.PrintAST()) + } + } + } + } + } +} diff --git a/vendor/github.com/aymerick/raymond/handlebars/basic_test.go b/vendor/github.com/aymerick/raymond/handlebars/basic_test.go new file mode 100644 index 0000000..084b06f --- /dev/null +++ b/vendor/github.com/aymerick/raymond/handlebars/basic_test.go @@ -0,0 +1,651 @@ +package handlebars + +import ( + "fmt" + "regexp" + "testing" + + "github.com/aymerick/raymond" +) + +// +// Those tests come from: +// https://github.com/wycats/handlebars.js/blob/master/spec/basic.js +// +var basicTests = []Test{ + { + "most basic", + "{{foo}}", + map[string]string{"foo": "foo"}, + nil, nil, nil, + "foo", + }, + { + "escaping (1)", + "\\{{foo}}", + map[string]string{"foo": "food"}, + nil, nil, nil, + "{{foo}}", + }, + { + "escaping (2)", + "content \\{{foo}}", + map[string]string{}, + nil, nil, nil, + "content {{foo}}", + }, + { + "escaping (3)", + "\\\\{{foo}}", + map[string]string{"foo": "food"}, + nil, nil, nil, + "\\food", + }, + { + "escaping (4)", + "content \\\\{{foo}}", + map[string]string{"foo": "food"}, + nil, nil, nil, + "content \\food", + }, + { + "escaping (5)", + "\\\\ {{foo}}", + map[string]string{"foo": "food"}, + nil, nil, nil, + "\\\\ food", + }, + { + "compiling with a basic context", + "Goodbye\n{{cruel}}\n{{world}}!", + map[string]string{"cruel": "cruel", "world": "world"}, + nil, nil, nil, + "Goodbye\ncruel\nworld!", + }, + { + "compiling with an undefined context (1)", + "Goodbye\n{{cruel}}\n{{world.bar}}!", + nil, nil, nil, nil, + "Goodbye\n\n!", + }, + { + "compiling with an undefined context (2)", + "{{#unless foo}}Goodbye{{../test}}{{test2}}{{/unless}}", + nil, nil, nil, nil, + "Goodbye", + }, + { + "comments (1)", + "{{! Goodbye}}Goodbye\n{{cruel}}\n{{world}}!", + map[string]string{"cruel": "cruel", "world": "world"}, + nil, nil, nil, + "Goodbye\ncruel\nworld!", + }, + { + "comments (2)", + " {{~! comment ~}} blah", + nil, nil, nil, nil, + "blah", + }, + { + "comments (3)", + " {{~!-- long-comment --~}} blah", + nil, nil, nil, nil, + "blah", + }, + { + "comments (4)", + " {{! comment ~}} blah", + nil, nil, nil, nil, + " blah", + }, + { + "comments (5)", + " {{!-- long-comment --~}} blah", + nil, nil, nil, nil, + " blah", + }, + { + "comments (6)", + " {{~! comment}} blah", + nil, nil, nil, nil, + " blah", + }, + { + "comments (7)", + " {{~!-- long-comment --}} blah", + nil, nil, nil, nil, + " blah", + }, + { + "boolean (1)", + "{{#goodbye}}GOODBYE {{/goodbye}}cruel {{world}}!", + map[string]interface{}{"goodbye": true, "world": "world"}, + nil, nil, nil, + "GOODBYE cruel world!", + }, + { + "boolean (2)", + "{{#goodbye}}GOODBYE {{/goodbye}}cruel {{world}}!", + map[string]interface{}{"goodbye": false, "world": "world"}, + nil, nil, nil, + "cruel world!", + }, + { + "zeros (1)", + "num1: {{num1}}, num2: {{num2}}", + map[string]interface{}{"num1": 42, "num2": 0}, + nil, nil, nil, + "num1: 42, num2: 0", + }, + { + "zeros (2)", + "num: {{.}}", + 0, + nil, nil, nil, + "num: 0", + }, + { + "zeros (3)", + "num: {{num1/num2}}", + map[string]map[string]interface{}{"num1": {"num2": 0}}, + nil, nil, nil, + "num: 0", + }, + { + "false (1)", + "val1: {{val1}}, val2: {{val2}}", + map[string]interface{}{"val1": false, "val2": false}, + nil, nil, nil, + "val1: false, val2: false", + }, + { + "false (2)", + "val: {{.}}", + false, + nil, nil, nil, + "val: false", + }, + { + "false (3)", + "val: {{val1/val2}}", + map[string]map[string]interface{}{"val1": {"val2": false}}, + nil, nil, nil, + "val: false", + }, + { + "false (4)", + "val1: {{{val1}}}, val2: {{{val2}}}", + map[string]interface{}{"val1": false, "val2": false}, + nil, nil, nil, + "val1: false, val2: false", + }, + { + "false (5)", + "val: {{{val1/val2}}}", + map[string]map[string]interface{}{"val1": {"val2": false}}, + nil, nil, nil, + "val: false", + }, + { + "newlines (1)", + "Alan's\nTest", + nil, nil, nil, nil, + "Alan's\nTest", + }, + { + "newlines (2)", + "Alan's\rTest", + nil, nil, nil, nil, + "Alan's\rTest", + }, + { + "escaping text (1)", + "Awesome's", + map[string]string{}, + nil, nil, nil, + "Awesome's", + }, + { + "escaping text (2)", + "Awesome\\", + map[string]string{}, + nil, nil, nil, + "Awesome\\", + }, + { + "escaping text (3)", + "Awesome\\\\ foo", + map[string]string{}, + nil, nil, nil, + "Awesome\\\\ foo", + }, + { + "escaping text (4)", + "Awesome {{foo}}", + map[string]string{"foo": "\\"}, + nil, nil, nil, + "Awesome \\", + }, + { + "escaping text (5)", + " ' ' ", + map[string]string{}, + nil, nil, nil, + " ' ' ", + }, + { + "escaping expressions (6)", + "{{{awesome}}}", + map[string]string{"awesome": "&'\\<>"}, + nil, nil, nil, + "&'\\<>", + }, + { + "escaping expressions (7)", + "{{&awesome}}", + map[string]string{"awesome": "&'\\<>"}, + nil, nil, nil, + "&'\\<>", + }, + { + "escaping expressions (8)", + "{{awesome}}", + map[string]string{"awesome": "&\"'`\\<>"}, + nil, nil, nil, + "&"'`\\<>", + }, + { + "escaping expressions (9)", + "{{awesome}}", + map[string]string{"awesome": "Escaped, looks like: <b>"}, + nil, nil, nil, + "Escaped, <b> looks like: &lt;b&gt;", + }, + { + "functions returning safestrings shouldn't be escaped", + "{{awesome}}", + map[string]interface{}{"awesome": func() raymond.SafeString { return raymond.SafeString("&'\\<>") }}, + nil, nil, nil, + "&'\\<>", + }, + { + "functions (1)", + "{{awesome}}", + map[string]interface{}{"awesome": func() string { return "Awesome" }}, + nil, nil, nil, + "Awesome", + }, + { + "functions (2)", + "{{awesome}}", + map[string]interface{}{"awesome": func(options *raymond.Options) string { + return options.ValueStr("more") + }, "more": "More awesome"}, + nil, nil, nil, + "More awesome", + }, + { + "functions with context argument", + "{{awesome frank}}", + map[string]interface{}{"awesome": func(context string) string { + return context + }, "frank": "Frank"}, + nil, nil, nil, + "Frank", + }, + { + "pathed functions with context argument", + "{{bar.awesome frank}}", + map[string]interface{}{"bar": map[string]interface{}{"awesome": func(context string) string { + return context + }}, "frank": "Frank"}, + nil, nil, nil, + "Frank", + }, + { + "depthed functions with context argument", + "{{#with frank}}{{../awesome .}}{{/with}}", + map[string]interface{}{"awesome": func(context string) string { + return context + }, "frank": "Frank"}, + nil, nil, nil, + "Frank", + }, + { + "block functions with context argument", + "{{#awesome 1}}inner {{.}}{{/awesome}}", + map[string]interface{}{"awesome": func(context interface{}, options *raymond.Options) string { + return options.FnWith(context) + }}, + nil, nil, nil, + "inner 1", + }, + { + "depthed block functions with context argument", + "{{#with value}}{{#../awesome 1}}inner {{.}}{{/../awesome}}{{/with}}", + map[string]interface{}{ + "awesome": func(context interface{}, options *raymond.Options) string { + return options.FnWith(context) + }, + "value": true, + }, + nil, nil, nil, + "inner 1", + }, + { + "block functions without context argument", + "{{#awesome}}inner{{/awesome}}", + map[string]interface{}{ + "awesome": func(options *raymond.Options) string { + return options.Fn() + }, + }, + nil, nil, nil, + "inner", + }, + // // @note I don't even understand why this test passes with the JS implementation... it should be + // // the responsability of the function to evaluate the block + // { + // "pathed block functions without context argument", + // "{{#foo.awesome}}inner{{/foo.awesome}}", + // map[string]map[string]interface{}{ + // "foo": { + // "awesome": func(options *raymond.Options) interface{} { + // return options.Ctx() + // }, + // }, + // }, + // nil, nil, nil, + // "inner", + // }, + // // @note I don't even understand why this test passes with the JS implementation... it should be + // // the responsability of the function to evaluate the block + // { + // "depthed block functions without context argument", + // "{{#with value}}{{#../awesome}}inner{{/../awesome}}{{/with}}", + // map[string]interface{}{ + // "value": true, + // "awesome": func(options *raymond.Options) interface{} { + // return options.Ctx() + // }, + // }, + // nil, nil, nil, + // "inner", + // }, + { + "paths with hyphens (1)", + "{{foo-bar}}", + map[string]string{"foo-bar": "baz"}, + nil, nil, nil, + "baz", + }, + { + "paths with hyphens (2)", + "{{foo.foo-bar}}", + map[string]map[string]string{"foo": {"foo-bar": "baz"}}, + nil, nil, nil, + "baz", + }, + { + "paths with hyphens (3)", + "{{foo/foo-bar}}", + map[string]map[string]string{"foo": {"foo-bar": "baz"}}, + nil, nil, nil, + "baz", + }, + { + "nested paths", + "Goodbye {{alan/expression}} world!", + map[string]map[string]string{"alan": {"expression": "beautiful"}}, + nil, nil, nil, + "Goodbye beautiful world!", + }, + { + "nested paths with empty string value", + "Goodbye {{alan/expression}} world!", + map[string]map[string]string{"alan": {"expression": ""}}, + nil, nil, nil, + "Goodbye world!", + }, + { + "literal paths (1)", + "Goodbye {{[@alan]/expression}} world!", + map[string]map[string]string{"@alan": {"expression": "beautiful"}}, + nil, nil, nil, + "Goodbye beautiful world!", + }, + { + "literal paths (2)", + "Goodbye {{[foo bar]/expression}} world!", + map[string]map[string]string{"foo bar": {"expression": "beautiful"}}, + nil, nil, nil, + "Goodbye beautiful world!", + }, + { + "literal references", + "Goodbye {{[foo bar]}} world!", + map[string]string{"foo bar": "beautiful"}, + nil, nil, nil, + "Goodbye beautiful world!", + }, + // @note MMm ok, well... no... I don't see the purpose of that test + { + "that current context path ({{.}}) doesn't hit helpers", + "test: {{.}}", + nil, nil, + map[string]interface{}{"helper": func() string { + panic("fail") + return "" + }}, + nil, + "test: ", + }, + { + "complex but empty paths (1)", + "{{person/name}}", + map[string]map[string]interface{}{"person": {"name": nil}}, + nil, nil, nil, + "", + }, + { + "complex but empty paths (2)", + "{{person/name}}", + map[string]map[string]string{"person": {}}, + nil, nil, nil, + "", + }, + { + "this keyword in paths (1)", + "{{#goodbyes}}{{this}}{{/goodbyes}}", + map[string]interface{}{"goodbyes": []string{"goodbye", "Goodbye", "GOODBYE"}}, + nil, nil, nil, + "goodbyeGoodbyeGOODBYE", + }, + { + "this keyword in paths (2)", + "{{#hellos}}{{this/text}}{{/hellos}}", + map[string]interface{}{"hellos": []interface{}{ + map[string]string{"text": "hello"}, + map[string]string{"text": "Hello"}, + map[string]string{"text": "HELLO"}, + }}, + nil, nil, nil, + "helloHelloHELLO", + }, + { + "this keyword nested inside path' (1)", + "{{[this]}}", + map[string]string{"this": "bar"}, + nil, nil, nil, + "bar", + }, + { + "this keyword nested inside path' (2)", + "{{text/[this]}}", + map[string]map[string]string{"text": {"this": "bar"}}, + nil, nil, nil, + "bar", + }, + { + "this keyword in helpers (1)", + "{{#goodbyes}}{{foo this}}{{/goodbyes}}", + map[string]interface{}{"goodbyes": []string{"goodbye", "Goodbye", "GOODBYE"}}, + nil, + map[string]interface{}{"foo": barSuffixHelper}, + nil, + "bar goodbyebar Goodbyebar GOODBYE", + }, + { + "this keyword in helpers (2)", + "{{#hellos}}{{foo this/text}}{{/hellos}}", + map[string]interface{}{"hellos": []map[string]string{{"text": "hello"}, {"text": "Hello"}, {"text": "HELLO"}}}, + nil, + map[string]interface{}{"foo": barSuffixHelper}, + nil, + "bar hellobar Hellobar HELLO", + }, + { + "this keyword nested inside helpers param (1)", + "{{foo [this]}}", + map[string]interface{}{"this": "bar"}, + nil, + map[string]interface{}{"foo": echoHelper}, + nil, + "bar", + }, + { + "this keyword nested inside helpers param (2)", + "{{foo text/[this]}}", + map[string]map[string]string{"text": {"this": "bar"}}, + nil, + map[string]interface{}{"foo": echoHelper}, + nil, + "bar", + }, + { + "pass string literals (1)", + `{{"foo"}}`, + map[string]string{}, + nil, nil, nil, + "", + }, + { + "pass string literals (2)", + `{{"foo"}}`, + map[string]string{"foo": "bar"}, + nil, nil, nil, + "bar", + }, + { + "pass string literals (3)", + `{{#"foo"}}{{.}}{{/"foo"}}`, + map[string]interface{}{"foo": []string{"bar", "baz"}}, + nil, nil, nil, + "barbaz", + }, + { + "pass number literals (1)", + "{{12}}", + map[string]string{}, + nil, nil, nil, + "", + }, + { + "pass number literals (2)", + "{{12}}", + map[string]string{"12": "bar"}, + nil, nil, nil, + "bar", + }, + { + "pass number literals (3)", + "{{12.34}}", + map[string]string{}, + nil, nil, nil, + "", + }, + { + "pass number literals (4)", + "{{12.34}}", + map[string]string{"12.34": "bar"}, + nil, nil, nil, + "bar", + }, + { + "pass number literals (5)", + "{{12.34 1}}", + map[string]interface{}{"12.34": func(context string) string { + return "bar" + context + }}, + nil, nil, nil, + "bar1", + }, + { + "pass boolean literals (1)", + "{{true}}", + map[string]string{}, + nil, nil, nil, + "", + }, + { + "pass boolean literals (2)", + "{{true}}", + map[string]string{"": "foo"}, + nil, nil, nil, + "", + }, + { + "pass boolean literals (3)", + "{{false}}", + map[string]string{"false": "foo"}, + nil, nil, nil, + "foo", + }, + { + "should handle literals in subexpression", + "{{foo (false)}}", + map[string]interface{}{"false": func() string { return "bar" }}, + nil, + map[string]interface{}{"foo": func(context string) string { + return context + }}, + nil, + "bar", + }, +} + +func TestBasic(t *testing.T) { + launchTests(t, basicTests) +} + +func TestBasicErrors(t *testing.T) { + t.Parallel() + + var err error + + inputs := []string{ + // this keyword nested inside path + "{{#hellos}}{{text/this/foo}}{{/hellos}}", + // this keyword nested inside helpers param + "{{#hellos}}{{foo text/this/foo}}{{/hellos}}", + } + + expectedError := regexp.QuoteMeta("Invalid path: text/this") + + for _, input := range inputs { + _, err = raymond.Parse(input) + if err == nil { + t.Errorf("Test failed - Error expected") + } + + match, errMatch := regexp.MatchString(expectedError, fmt.Sprint(err)) + if errMatch != nil { + panic("Failed to match regexp") + } + + if !match { + t.Errorf("Test failed - Expected error:\n\t%s\n\nGot:\n\t%s", expectedError, err) + } + } +} diff --git a/vendor/github.com/aymerick/raymond/handlebars/blocks_test.go b/vendor/github.com/aymerick/raymond/handlebars/blocks_test.go new file mode 100644 index 0000000..de435ba --- /dev/null +++ b/vendor/github.com/aymerick/raymond/handlebars/blocks_test.go @@ -0,0 +1,208 @@ +package handlebars + +import "testing" + +// +// Those tests come from: +// https://github.com/wycats/handlebars.js/blob/master/spec/blocks.js +// +var blocksTests = []Test{ + { + "array (1) - Arrays iterate over the contents when not empty", + "{{#goodbyes}}{{text}}! {{/goodbyes}}cruel {{world}}!", + map[string]interface{}{"goodbyes": []map[string]string{{"text": "goodbye"}, {"text": "Goodbye"}, {"text": "GOODBYE"}}, "world": "world"}, + nil, nil, nil, + "goodbye! Goodbye! GOODBYE! cruel world!", + }, + { + "array (2) - Arrays ignore the contents when empty", + "{{#goodbyes}}{{text}}! {{/goodbyes}}cruel {{world}}!", + map[string]interface{}{"goodbyes": []map[string]string{}, "world": "world"}, + nil, nil, nil, + "cruel world!", + }, + { + "array without data", + "{{#goodbyes}}{{text}}{{/goodbyes}} {{#goodbyes}}{{text}}{{/goodbyes}}", + map[string]interface{}{"goodbyes": []map[string]string{{"text": "goodbye"}, {"text": "Goodbye"}, {"text": "GOODBYE"}}, "world": "world"}, + nil, nil, nil, + "goodbyeGoodbyeGOODBYE goodbyeGoodbyeGOODBYE", + }, + { + "array with @index - The @index variable is used", + "{{#goodbyes}}{{@index}}. {{text}}! {{/goodbyes}}cruel {{world}}!", + map[string]interface{}{"goodbyes": []map[string]string{{"text": "goodbye"}, {"text": "Goodbye"}, {"text": "GOODBYE"}}, "world": "world"}, + nil, nil, nil, + "0. goodbye! 1. Goodbye! 2. GOODBYE! cruel world!", + }, + { + "empty block (1) - Arrays iterate over the contents when not empty", + "{{#goodbyes}}{{/goodbyes}}cruel {{world}}!", + map[string]interface{}{"goodbyes": []map[string]string{{"text": "goodbye"}, {"text": "Goodbye"}, {"text": "GOODBYE"}}, "world": "world"}, + nil, nil, nil, + "cruel world!", + }, + { + "empty block (1) - Arrays ignore the contents when empty", + "{{#goodbyes}}{{/goodbyes}}cruel {{world}}!", + map[string]interface{}{"goodbyes": []map[string]string{}, "world": "world"}, + nil, nil, nil, + "cruel world!", + }, + { + "block with complex lookup - Templates can access variables in contexts up the stack with relative path syntax", + "{{#goodbyes}}{{text}} cruel {{../name}}! {{/goodbyes}}", + map[string]interface{}{"goodbyes": []map[string]string{{"text": "goodbye"}, {"text": "Goodbye"}, {"text": "GOODBYE"}}, "name": "Alan"}, + nil, nil, nil, + "goodbye cruel Alan! Goodbye cruel Alan! GOODBYE cruel Alan! ", + }, + { + "multiple blocks with complex lookup", + "{{#goodbyes}}{{../name}}{{../name}}{{/goodbyes}}", + map[string]interface{}{"goodbyes": []map[string]string{{"text": "goodbye"}, {"text": "Goodbye"}, {"text": "GOODBYE"}}, "name": "Alan"}, + nil, nil, nil, + "AlanAlanAlanAlanAlanAlan", + }, + + // @todo "{{#goodbyes}}{{text}} cruel {{foo/../name}}! {{/goodbyes}}" should throw error + + { + "block with deep nested complex lookup", + "{{#outer}}Goodbye {{#inner}}cruel {{../sibling}} {{../../omg}}{{/inner}}{{/outer}}", + map[string]interface{}{"omg": "OMG!", "outer": []map[string]interface{}{{"sibling": "sad", "inner": []map[string]string{{"text": "goodbye"}}}}}, + nil, nil, nil, + "Goodbye cruel sad OMG!", + }, + { + "inverted sections with unset value - Inverted section rendered when value isn't set.", + "{{#goodbyes}}{{this}}{{/goodbyes}}{{^goodbyes}}Right On!{{/goodbyes}}", + map[string]interface{}{}, + nil, nil, nil, + "Right On!", + }, + { + "inverted sections with false value - Inverted section rendered when value is false.", + "{{#goodbyes}}{{this}}{{/goodbyes}}{{^goodbyes}}Right On!{{/goodbyes}}", + map[string]interface{}{"goodbyes": false}, + nil, nil, nil, + "Right On!", + }, + { + "inverted section with empty set - Inverted section rendered when value is empty set.", + "{{#goodbyes}}{{this}}{{/goodbyes}}{{^goodbyes}}Right On!{{/goodbyes}}", + map[string]interface{}{"goodbyes": []interface{}{}}, + nil, nil, nil, + "Right On!", + }, + { + "block inverted sections", + "{{#people}}{{name}}{{^}}{{none}}{{/people}}", + map[string]interface{}{"none": "No people"}, + nil, nil, nil, + "No people", + }, + { + "chained inverted sections (1)", + "{{#people}}{{name}}{{else if none}}{{none}}{{/people}}", + map[string]interface{}{"none": "No people"}, + nil, nil, nil, + "No people", + }, + { + "chained inverted sections (2)", + "{{#people}}{{name}}{{else if nothere}}fail{{else unless nothere}}{{none}}{{/people}}", + map[string]interface{}{"none": "No people"}, + nil, nil, nil, + "No people", + }, + { + "chained inverted sections (3)", + "{{#people}}{{name}}{{else if none}}{{none}}{{else}}fail{{/people}}", + map[string]interface{}{"none": "No people"}, + nil, nil, nil, + "No people", + }, + + // @todo "{{#people}}{{name}}{{else if none}}{{none}}{{/if}}" should throw error + + { + "block inverted sections with empty arrays", + "{{#people}}{{name}}{{^}}{{none}}{{/people}}", + map[string]interface{}{"none": "No people", "people": map[string]interface{}{}}, + nil, nil, nil, + "No people", + }, + { + "block standalone else sections (1)", + "{{#people}}\n{{name}}\n{{^}}\n{{none}}\n{{/people}}\n", + map[string]interface{}{"none": "No people"}, + nil, nil, nil, + "No people\n", + }, + { + "block standalone else sections (2)", + "{{#none}}\n{{.}}\n{{^}}\n{{none}}\n{{/none}}\n", + map[string]interface{}{"none": "No people"}, + nil, nil, nil, + "No people\n", + }, + { + "block standalone else sections (3)", + "{{#people}}\n{{name}}\n{{^}}\n{{none}}\n{{/people}}\n", + map[string]interface{}{"none": "No people"}, + nil, nil, nil, + "No people\n", + }, + { + "block standalone chained else sections (1)", + "{{#people}}\n{{name}}\n{{else if none}}\n{{none}}\n{{/people}}\n", + map[string]interface{}{"none": "No people"}, + nil, nil, nil, + "No people\n", + }, + { + "block standalone chained else sections (2)", + "{{#people}}\n{{name}}\n{{else if none}}\n{{none}}\n{{^}}\n{{/people}}\n", + map[string]interface{}{"none": "No people"}, + nil, nil, nil, + "No people\n", + }, + { + "should handle nesting", + "{{#data}}\n{{#if true}}\n{{.}}\n{{/if}}\n{{/data}}\nOK.", + map[string]interface{}{"data": []int{1, 3, 5}}, + nil, nil, nil, + "1\n3\n5\nOK.", + }, + // // @todo compat mode + // { + // "block with deep recursive lookup lookup", + // "{{#outer}}Goodbye {{#inner}}cruel {{omg}}{{/inner}}{{/outer}}", + // map[string]interface{}{"omg": "OMG!", "outer": []map[string]interface{}{{"inner": []map[string]string{{"text": "goodbye"}}}}}, + // nil, + // nil, + // nil, + // "Goodbye cruel OMG!", + // }, + // // @todo compat mode + // { + // "block with deep recursive pathed lookup", + // "{{#outer}}Goodbye {{#inner}}cruel {{omg.yes}}{{/inner}}{{/outer}}", + // map[string]interface{}{"omg": map[string]string{"yes": "OMG!"}, "outer": []map[string]interface{}{{"inner": []map[string]string{{"yes": "no", "text": "goodbye"}}}}}, + // nil, + // nil, + // nil, + // "Goodbye cruel OMG!", + // }, + { + "block with missed recursive lookup", + "{{#outer}}Goodbye {{#inner}}cruel {{omg.yes}}{{/inner}}{{/outer}}", + map[string]interface{}{"omg": map[string]string{"no": "OMG!"}, "outer": []map[string]interface{}{{"inner": []map[string]string{{"yes": "no", "text": "goodbye"}}}}}, + nil, nil, nil, + "Goodbye cruel ", + }, +} + +func TestBlocks(t *testing.T) { + launchTests(t, blocksTests) +} diff --git a/vendor/github.com/aymerick/raymond/handlebars/builtins_test.go b/vendor/github.com/aymerick/raymond/handlebars/builtins_test.go new file mode 100644 index 0000000..1f986e4 --- /dev/null +++ b/vendor/github.com/aymerick/raymond/handlebars/builtins_test.go @@ -0,0 +1,341 @@ +package handlebars + +import "testing" + +// +// Those tests come from: +// https://github.com/wycats/handlebars.js/blob/master/spec/builtin.js +// +var builtinsTests = []Test{ + { + "#if - if with boolean argument shows the contents when true", + "{{#if goodbye}}GOODBYE {{/if}}cruel {{world}}!", + map[string]interface{}{"goodbye": true, "world": "world"}, + nil, nil, nil, + "GOODBYE cruel world!", + }, + { + "#if - if with string argument shows the contents", + "{{#if goodbye}}GOODBYE {{/if}}cruel {{world}}!", + map[string]interface{}{"goodbye": "dummy", "world": "world"}, + nil, nil, nil, + "GOODBYE cruel world!", + }, + { + "#if - if with boolean argument does not show the contents when false", + "{{#if goodbye}}GOODBYE {{/if}}cruel {{world}}!", + map[string]interface{}{"goodbye": false, "world": "world"}, + nil, nil, nil, + "cruel world!", + }, + { + "#if - if with undefined does not show the contents", + "{{#if goodbye}}GOODBYE {{/if}}cruel {{world}}!", + map[string]interface{}{"world": "world"}, + nil, nil, nil, + "cruel world!", + }, + { + "#if - if with non-empty array shows the contents", + "{{#if goodbye}}GOODBYE {{/if}}cruel {{world}}!", + map[string]interface{}{"goodbye": []string{"foo"}, "world": "world"}, + nil, nil, nil, + "GOODBYE cruel world!", + }, + { + "#if - if with empty array does not show the contents", + "{{#if goodbye}}GOODBYE {{/if}}cruel {{world}}!", + map[string]interface{}{"goodbye": []string{}, "world": "world"}, + nil, nil, nil, + "cruel world!", + }, + { + "#if - if with zero does not show the contents", + "{{#if goodbye}}GOODBYE {{/if}}cruel {{world}}!", + map[string]interface{}{"goodbye": 0, "world": "world"}, + nil, nil, nil, + "cruel world!", + }, + { + "#if - if with zero and includeZero option shows the contents", + "{{#if goodbye includeZero=true}}GOODBYE {{/if}}cruel {{world}}!", + map[string]interface{}{"goodbye": 0, "world": "world"}, + nil, nil, nil, + "GOODBYE cruel world!", + }, + { + "#if - if with function shows the contents when function returns true", + "{{#if goodbye}}GOODBYE {{/if}}cruel {{world}}!", + map[string]interface{}{ + "goodbye": func() bool { return true }, + "world": "world", + }, + nil, nil, nil, + "GOODBYE cruel world!", + }, + { + "#if - if with function shows the contents when function returns string", + "{{#if goodbye}}GOODBYE {{/if}}cruel {{world}}!", + map[string]interface{}{ + "goodbye": func() string { return "world" }, + "world": "world", + }, + nil, nil, nil, + "GOODBYE cruel world!", + }, + { + "#if - if with function does not show the contents when returns false", + "{{#if goodbye}}GOODBYE {{/if}}cruel {{world}}!", + map[string]interface{}{ + "goodbye": func() bool { return false }, + "world": "world", + }, + nil, nil, nil, + "cruel world!", + }, + { + "#if - if with function does not show the contents when returns undefined", + "{{#if goodbye}}GOODBYE {{/if}}cruel {{world}}!", + map[string]interface{}{ + "goodbye": func() interface{} { return nil }, + "world": "world", + }, + nil, nil, nil, + "cruel world!", + }, + { + "#with", + "{{#with person}}{{first}} {{last}}{{/with}}", + map[string]interface{}{"person": map[string]string{"first": "Alan", "last": "Johnson"}}, + nil, nil, nil, + "Alan Johnson", + }, + { + "#with - with with function argument", + "{{#with person}}{{first}} {{last}}{{/with}}", + map[string]interface{}{ + "person": func() map[string]string { return map[string]string{"first": "Alan", "last": "Johnson"} }, + }, nil, nil, nil, + "Alan Johnson", + }, + { + "#with - with with else", + "{{#with person}}Person is present{{else}}Person is not present{{/with}}", + map[string]interface{}{}, + nil, nil, nil, + "Person is not present", + }, + + { + "#each - each with array argument iterates over the contents when not empty", + "{{#each goodbyes}}{{text}}! {{/each}}cruel {{world}}!", + map[string]interface{}{"goodbyes": []map[string]string{{"text": "goodbye"}, {"text": "Goodbye"}, {"text": "GOODBYE"}}, "world": "world"}, + nil, nil, nil, + "goodbye! Goodbye! GOODBYE! cruel world!", + }, + { + "#each - each with array argument ignores the contents when empty", + "{{#each goodbyes}}{{text}}! {{/each}}cruel {{world}}!", + map[string]interface{}{"goodbyes": []map[string]string{}, "world": "world"}, + nil, nil, nil, + "cruel world!", + }, + { + "#each - each without data (1)", + "{{#each goodbyes}}{{text}}! {{/each}}cruel {{world}}!", + map[string]interface{}{"goodbyes": []map[string]string{{"text": "goodbye"}, {"text": "Goodbye"}, {"text": "GOODBYE"}}, "world": "world"}, + nil, nil, nil, + "goodbye! Goodbye! GOODBYE! cruel world!", + }, + { + "#each - each without data (2)", + "{{#each .}}{{.}}{{/each}}", + map[string]interface{}{"goodbyes": "cruel", "world": "world"}, + nil, nil, nil, + // note: a go hash is not ordered, so result may vary, this behaviour differs from the JS implementation + []string{"cruelworld", "worldcruel"}, + }, + { + "#each - each without context", + "{{#each goodbyes}}{{text}}! {{/each}}cruel {{world}}!", + nil, nil, nil, nil, + "cruel !", + }, + + // NOTE: we test with a map instead of an object + { + "#each - each with an object and @key (map)", + "{{#each goodbyes}}{{@key}}. {{text}}! {{/each}}cruel {{world}}!", + map[string]interface{}{"goodbyes": map[interface{}]map[string]string{"#1": {"text": "goodbye"}, 2: {"text": "GOODBYE"}}, "world": "world"}, + nil, nil, nil, + []string{"<b>#1</b>. goodbye! 2. GOODBYE! cruel world!", "2. GOODBYE! <b>#1</b>. goodbye! cruel world!"}, + }, + // NOTE: An additional test with a struct, but without an html stuff for the key, because it is impossible + { + "#each - each with an object and @key (struct)", + "{{#each goodbyes}}{{@key}}. {{text}}! {{/each}}cruel {{world}}!", + map[string]interface{}{ + "goodbyes": struct { + Foo map[string]string + Bar map[string]int + }{map[string]string{"text": "baz"}, map[string]int{"text": 10}}, + "world": "world", + }, + nil, nil, nil, + []string{"Foo. baz! Bar. 10! cruel world!", "Bar. 10! Foo. baz! cruel world!"}, + }, + { + "#each - each with @index", + "{{#each goodbyes}}{{@index}}. {{text}}! {{/each}}cruel {{world}}!", + map[string]interface{}{"goodbyes": []map[string]string{{"text": "goodbye"}, {"text": "Goodbye"}, {"text": "GOODBYE"}}, "world": "world"}, + nil, nil, nil, + "0. goodbye! 1. Goodbye! 2. GOODBYE! cruel world!", + }, + { + "#each - each with nested @index", + "{{#each goodbyes}}{{@index}}. {{text}}! {{#each ../goodbyes}}{{@index}} {{/each}}After {{@index}} {{/each}}{{@index}}cruel {{world}}!", + map[string]interface{}{"goodbyes": []map[string]string{{"text": "goodbye"}, {"text": "Goodbye"}, {"text": "GOODBYE"}}, "world": "world"}, + nil, nil, nil, + "0. goodbye! 0 1 2 After 0 1. Goodbye! 0 1 2 After 1 2. GOODBYE! 0 1 2 After 2 cruel world!", + }, + { + "#each - each with block params", + "{{#each goodbyes as |value index|}}{{index}}. {{value.text}}! {{#each ../goodbyes as |childValue childIndex|}} {{index}} {{childIndex}}{{/each}} After {{index}} {{/each}}{{index}}cruel {{world}}!", + map[string]interface{}{"goodbyes": []map[string]string{{"text": "goodbye"}, {"text": "Goodbye"}}, "world": "world"}, + nil, nil, nil, + "0. goodbye! 0 0 0 1 After 0 1. Goodbye! 1 0 1 1 After 1 cruel world!", + }, + // @note: That test differs from JS impl because maps and structs are not ordered in go + { + "#each - each object with @index", + "{{#each goodbyes}}{{@index}}. {{text}}! {{/each}}cruel {{world}}!", + map[string]interface{}{"goodbyes": map[string]map[string]string{"a": {"text": "goodbye"}, "b": {"text": "Goodbye"}}, "world": "world"}, + nil, nil, nil, + []string{"0. goodbye! 1. Goodbye! cruel world!", "0. Goodbye! 1. goodbye! cruel world!"}, + }, + { + "#each - each with nested @first", + "{{#each goodbyes}}({{#if @first}}{{text}}! {{/if}}{{#each ../goodbyes}}{{#if @first}}{{text}}!{{/if}}{{/each}}{{#if @first}} {{text}}!{{/if}}) {{/each}}cruel {{world}}!", + map[string]interface{}{"goodbyes": []map[string]string{{"text": "goodbye"}, {"text": "Goodbye"}, {"text": "GOODBYE"}}, "world": "world"}, + nil, nil, nil, + "(goodbye! goodbye! goodbye!) (goodbye!) (goodbye!) cruel world!", + }, + // @note: That test differs from JS impl because maps and structs are not ordered in go + { + "#each - each object with @first", + "{{#each goodbyes}}{{#if @first}}{{text}}! {{/if}}{{/each}}cruel {{world}}!", + map[string]interface{}{"goodbyes": map[string]map[string]string{"foo": {"text": "goodbye"}, "bar": {"text": "Goodbye"}}, "world": "world"}, + nil, nil, nil, + []string{"goodbye! cruel world!", "Goodbye! cruel world!"}, + }, + { + "#each - each with @last", + "{{#each goodbyes}}{{#if @last}}{{text}}! {{/if}}{{/each}}cruel {{world}}!", + map[string]interface{}{"goodbyes": []map[string]string{{"text": "goodbye"}, {"text": "Goodbye"}, {"text": "GOODBYE"}}, "world": "world"}, + nil, nil, nil, + "GOODBYE! cruel world!", + }, + // @note: That test differs from JS impl because maps and structs are not ordered in go + { + "#each - each object with @last", + "{{#each goodbyes}}{{#if @last}}{{text}}! {{/if}}{{/each}}cruel {{world}}!", + map[string]interface{}{"goodbyes": map[string]map[string]string{"foo": {"text": "goodbye"}, "bar": {"text": "Goodbye"}}, "world": "world"}, + nil, nil, nil, + []string{"goodbye! cruel world!", "Goodbye! cruel world!"}, + }, + { + "#each - each with nested @last", + "{{#each goodbyes}}({{#if @last}}{{text}}! {{/if}}{{#each ../goodbyes}}{{#if @last}}{{text}}!{{/if}}{{/each}}{{#if @last}} {{text}}!{{/if}}) {{/each}}cruel {{world}}!", + map[string]interface{}{"goodbyes": []map[string]string{{"text": "goodbye"}, {"text": "Goodbye"}, {"text": "GOODBYE"}}, "world": "world"}, + nil, nil, nil, + "(GOODBYE!) (GOODBYE!) (GOODBYE! GOODBYE! GOODBYE!) cruel world!", + }, + + { + "#each - each with function argument (1)", + "{{#each goodbyes}}{{text}}! {{/each}}cruel {{world}}!", + map[string]interface{}{"goodbyes": func() []map[string]string { + return []map[string]string{{"text": "goodbye"}, {"text": "Goodbye"}, {"text": "GOODBYE"}} + }, "world": "world"}, + nil, nil, nil, + "goodbye! Goodbye! GOODBYE! cruel world!", + }, + { + "#each - each with function argument (2)", + "{{#each goodbyes}}{{text}}! {{/each}}cruel {{world}}!", + map[string]interface{}{"goodbyes": []map[string]string{}, "world": "world"}, + nil, nil, nil, + "cruel world!", + }, + { + "#each - data passed to helpers", + "{{#each letters}}{{this}}{{detectDataInsideEach}}{{/each}}", + map[string][]string{"letters": {"a", "b", "c"}}, + map[string]interface{}{"exclaim": "!"}, + map[string]interface{}{"detectDataInsideEach": detectDataHelper}, + nil, + "a!b!c!", + }, + + // @todo "each on implicit context" should throw error + + // SKIP: #log - "should call logger at default level" + // SKIP: #log - "should call logger at data level" + // SKIP: #log - "should output to info" + // SKIP: #log - "should log at data level" + // SKIP: #log - "should handle missing logger" + + // @note Test added + // @todo Check log output + { + "#log", + "{{log blah}}", + map[string]string{"blah": "whee"}, + nil, nil, nil, + "", + }, + + // @note Test added + { + "#lookup - should lookup array element", + "{{#each goodbyes}}{{lookup ../data @index}}{{/each}}", + map[string]interface{}{"goodbyes": []int{0, 1}, "data": []string{"foo", "bar"}}, + nil, nil, nil, + "foobar", + }, + { + "#lookup - should lookup map element", + "{{#each goodbyes}}{{lookup ../data .}}{{/each}}", + map[string]interface{}{"goodbyes": []string{"foo", "bar"}, "data": map[string]string{"foo": "baz", "bar": "bat"}}, + nil, nil, nil, + "bazbat", + }, + { + "#lookup - should lookup struct field", + "{{#each goodbyes}}{{lookup ../data .}}{{/each}}", + map[string]interface{}{"goodbyes": []string{"Foo", "Bar"}, "data": struct { + Foo string + Bar string + }{"baz", "bat"}}, + nil, nil, nil, + "bazbat", + }, + { + "#lookup - should lookup arbitrary content", + "{{#each goodbyes}}{{lookup ../data .}}{{/each}}", + map[string]interface{}{"goodbyes": []int{0, 1}, "data": []string{"foo", "bar"}}, + nil, nil, nil, + "foobar", + }, + { + "#lookup - should not fail on undefined value", + "{{#each goodbyes}}{{lookup ../bar .}}{{/each}}", + map[string]interface{}{"goodbyes": []int{0, 1}, "data": []string{"foo", "bar"}}, + nil, nil, nil, + "", + }, +} + +func TestBuiltins(t *testing.T) { + launchTests(t, builtinsTests) +} diff --git a/vendor/github.com/aymerick/raymond/handlebars/data_test.go b/vendor/github.com/aymerick/raymond/handlebars/data_test.go new file mode 100644 index 0000000..fb80020 --- /dev/null +++ b/vendor/github.com/aymerick/raymond/handlebars/data_test.go @@ -0,0 +1,300 @@ +package handlebars + +import ( + "testing" + + "github.com/aymerick/raymond" +) + +// +// Those tests come from: +// https://github.com/wycats/handlebars.js/blob/master/spec/data.js +// +var dataTests = []Test{ + { + "passing in data to a compiled function that expects data - works with helpers", + "{{hello}}", + map[string]string{"noun": "cat"}, + map[string]interface{}{"adjective": "happy"}, + map[string]interface{}{"hello": func(options *raymond.Options) string { + return options.DataStr("adjective") + " " + options.ValueStr("noun") + }}, + nil, + "happy cat", + }, + { + "data can be looked up via @foo", + "{{@hello}}", + nil, + map[string]interface{}{"hello": "hello"}, + nil, nil, + "hello", + }, + { + "deep @foo triggers automatic top-level data", + `{{#let world="world"}}{{#if foo}}{{#if foo}}Hello {{@world}}{{/if}}{{/if}}{{/let}}`, + map[string]bool{"foo": true}, + map[string]interface{}{"hello": "hello"}, + map[string]interface{}{"let": func(options *raymond.Options) string { + frame := options.NewDataFrame() + + for k, v := range options.Hash() { + frame.Set(k, v) + } + + return options.FnData(frame) + }}, + nil, + "Hello world", + }, + { + "parameter data can be looked up via @foo", + `{{hello @world}}`, + nil, + map[string]interface{}{"world": "world"}, + map[string]interface{}{"hello": func(context string) string { + return "Hello " + context + }}, + nil, + "Hello world", + }, + { + "hash values can be looked up via @foo", + `{{hello noun=@world}}`, + nil, + map[string]interface{}{"world": "world"}, + map[string]interface{}{"hello": func(options *raymond.Options) string { + return "Hello " + options.HashStr("noun") + }}, + nil, + "Hello world", + }, + { + "nested parameter data can be looked up via @foo.bar", + `{{hello @world.bar}}`, + nil, + map[string]interface{}{"world": map[string]string{"bar": "world"}}, + map[string]interface{}{"hello": func(context string) string { + return "Hello " + context + }}, + nil, + "Hello world", + }, + { + "nested parameter data does not fail with @world.bar", + `{{hello @world.bar}}`, + nil, + map[string]interface{}{"foo": map[string]string{"bar": "world"}}, + map[string]interface{}{"hello": func(context string) string { + return "Hello " + context + }}, + nil, + // @todo Test differs with JS implementation: we don't output `undefined` + "Hello ", + }, + + // @todo "parameter data throws when using complex scope references", + + { + "data can be functions", + `{{@hello}}`, + nil, + map[string]interface{}{"hello": func() string { return "hello" }}, + nil, nil, + "hello", + }, + { + "data can be functions with params", + `{{@hello "hello"}}`, + nil, + map[string]interface{}{"hello": func(context string) string { return context }}, + nil, nil, + "hello", + }, + + { + "data is inherited downstream", + `{{#let foo=1 bar=2}}{{#let foo=bar.baz}}{{@bar}}{{@foo}}{{/let}}{{@foo}}{{/let}}`, + map[string]map[string]string{"bar": {"baz": "hello world"}}, + nil, + map[string]interface{}{"let": func(options *raymond.Options) string { + frame := options.NewDataFrame() + + for k, v := range options.Hash() { + frame.Set(k, v) + } + + return options.FnData(frame) + }}, + nil, + "2hello world1", + }, + { + "passing in data to a compiled function that expects data - works with helpers in partials", + `{{>myPartial}}`, + map[string]string{"noun": "cat"}, + map[string]interface{}{"adjective": "happy"}, + map[string]interface{}{"hello": func(options *raymond.Options) string { + return options.DataStr("adjective") + " " + options.ValueStr("noun") + }}, + map[string]string{ + "myPartial": "{{hello}}", + }, + "happy cat", + }, + { + "passing in data to a compiled function that expects data - works with helpers and parameters", + `{{hello world}}`, + map[string]interface{}{"exclaim": true, "world": "world"}, + map[string]interface{}{"adjective": "happy"}, + map[string]interface{}{"hello": func(context string, options *raymond.Options) string { + str := "error" + if b, ok := options.Value("exclaim").(bool); ok { + if b { + str = "!" + } else { + str = "" + } + } + + return options.DataStr("adjective") + " " + context + str + }}, + nil, + "happy world!", + }, + { + "passing in data to a compiled function that expects data - works with block helpers", + `{{#hello}}{{world}}{{/hello}}`, + map[string]bool{"exclaim": true}, + map[string]interface{}{"adjective": "happy"}, + map[string]interface{}{ + "hello": func(options *raymond.Options) string { + return options.Fn() + }, + "world": func(options *raymond.Options) string { + str := "error" + if b, ok := options.Value("exclaim").(bool); ok { + if b { + str = "!" + } else { + str = "" + } + } + + return options.DataStr("adjective") + " world" + str + }, + }, + nil, + "happy world!", + }, + { + "passing in data to a compiled function that expects data - works with block helpers that use ..", + `{{#hello}}{{world ../zomg}}{{/hello}}`, + map[string]interface{}{"exclaim": true, "zomg": "world"}, + map[string]interface{}{"adjective": "happy"}, + map[string]interface{}{ + "hello": func(options *raymond.Options) string { + return options.FnWith(map[string]string{"exclaim": "?"}) + }, + "world": func(context string, options *raymond.Options) string { + return options.DataStr("adjective") + " " + context + options.ValueStr("exclaim") + }, + }, + nil, + "happy world?", + }, + { + "passing in data to a compiled function that expects data - data is passed to with block helpers where children use ..", + `{{#hello}}{{world ../zomg}}{{/hello}}`, + map[string]interface{}{"exclaim": true, "zomg": "world"}, + map[string]interface{}{"adjective": "happy", "accessData": "#win"}, + map[string]interface{}{ + "hello": func(options *raymond.Options) string { + return options.DataStr("accessData") + " " + options.FnWith(map[string]string{"exclaim": "?"}) + }, + "world": func(context string, options *raymond.Options) string { + return options.DataStr("adjective") + " " + context + options.ValueStr("exclaim") + }, + }, + nil, + "#win happy world?", + }, + { + "you can override inherited data when invoking a helper", + `{{#hello}}{{world zomg}}{{/hello}}`, + map[string]interface{}{"exclaim": true, "zomg": "planet"}, + map[string]interface{}{"adjective": "happy"}, + map[string]interface{}{ + "hello": func(options *raymond.Options) string { + ctx := map[string]string{"exclaim": "?", "zomg": "world"} + data := options.NewDataFrame() + data.Set("adjective", "sad") + + return options.FnCtxData(ctx, data) + }, + "world": func(context string, options *raymond.Options) string { + return options.DataStr("adjective") + " " + context + options.ValueStr("exclaim") + }, + }, + nil, + "sad world?", + }, + { + "you can override inherited data when invoking a helper with depth", + `{{#hello}}{{world ../zomg}}{{/hello}}`, + map[string]interface{}{"exclaim": true, "zomg": "world"}, + map[string]interface{}{"adjective": "happy"}, + map[string]interface{}{ + "hello": func(options *raymond.Options) string { + ctx := map[string]string{"exclaim": "?"} + data := options.NewDataFrame() + data.Set("adjective", "sad") + + return options.FnCtxData(ctx, data) + }, + "world": func(context string, options *raymond.Options) string { + return options.DataStr("adjective") + " " + context + options.ValueStr("exclaim") + }, + }, + nil, + "sad world?", + }, + { + "@root - the root context can be looked up via @root", + `{{@root.foo}}`, + map[string]interface{}{"foo": "hello"}, + nil, nil, nil, + "hello", + }, + { + "@root - passed root values take priority", + `{{@root.foo}}`, + nil, + map[string]interface{}{"root": map[string]string{"foo": "hello"}}, + nil, nil, + "hello", + }, + { + "nesting - the root context can be looked up via @root", + `{{#helper}}{{#helper}}{{@./depth}} {{@../depth}} {{@../../depth}}{{/helper}}{{/helper}}`, + map[string]interface{}{"foo": "hello"}, + map[string]interface{}{"depth": 0}, + map[string]interface{}{ + "helper": func(options *raymond.Options) string { + data := options.NewDataFrame() + + if depth, ok := options.Data("depth").(int); ok { + data.Set("depth", depth+1) + } + + return options.FnData(data) + }, + }, + nil, + "2 1 0", + }, +} + +func TestData(t *testing.T) { + launchTests(t, dataTests) +} diff --git a/vendor/github.com/aymerick/raymond/handlebars/doc.go b/vendor/github.com/aymerick/raymond/handlebars/doc.go new file mode 100644 index 0000000..6a4c50c --- /dev/null +++ b/vendor/github.com/aymerick/raymond/handlebars/doc.go @@ -0,0 +1,2 @@ +// Package handlebars contains all the tests that come from handlebars.js project. +package handlebars diff --git a/vendor/github.com/aymerick/raymond/handlebars/helpers_test.go b/vendor/github.com/aymerick/raymond/handlebars/helpers_test.go new file mode 100644 index 0000000..d04441b --- /dev/null +++ b/vendor/github.com/aymerick/raymond/handlebars/helpers_test.go @@ -0,0 +1,670 @@ +package handlebars + +import ( + "fmt" + "reflect" + "strings" + "testing" + + "github.com/aymerick/raymond" +) + +// +// Helpers +// + +func barSuffixHelper(context string) string { + return "bar " + context +} + +func echoHelper(str string) string { + return str +} + +func echoNbHelper(str string, nb int) string { + result := "" + for i := 0; i < nb; i++ { + result += str + } + + return result +} + +func linkHelper(prefix string, options *raymond.Options) string { + return fmt.Sprintf(`%s`, prefix, options.ValueStr("url"), options.ValueStr("text")) +} + +func rawHelper(options *raymond.Options) string { + return options.Fn() +} + +func rawThreeHelper(a, b, c string, options *raymond.Options) string { + return options.Fn() + a + b + c +} + +func formHelper(options *raymond.Options) string { + return "
" + options.Fn() + "
" +} + +func formCtxHelper(context interface{}, options *raymond.Options) string { + return "
" + options.FnWith(context) + "
" +} + +func listHelper(context interface{}, options *raymond.Options) string { + val := reflect.ValueOf(context) + switch val.Kind() { + case reflect.Array, reflect.Slice: + if val.Len() > 0 { + result := "
    " + for i := 0; i < val.Len(); i++ { + result += "
  • " + result += options.FnWith(val.Index(i).Interface()) + result += "
  • " + } + result += "
" + + return result + } + } + + return "

" + options.Inverse() + "

" +} + +func blogHelper(val string) string { + return "val is " + val +} + +func equalHelper(a, b string) string { + return raymond.Str(a == b) +} + +func dashHelper(a, b string) string { + return a + "-" + b +} + +func concatHelper(a, b string) string { + return a + b +} + +func detectDataHelper(options *raymond.Options) string { + if val, ok := options.DataFrame().Get("exclaim").(string); ok { + return val + } + + return "" +} + +// +// Those tests come from: +// https://github.com/wycats/handlebars.js/blob/master/spec/helper.js +// +var helpersTests = []Test{ + { + "helper with complex lookup", + "{{#goodbyes}}{{{link ../prefix}}}{{/goodbyes}}", + map[string]interface{}{"prefix": "/root", "goodbyes": []map[string]string{{"text": "Goodbye", "url": "goodbye"}}}, + nil, + map[string]interface{}{"link": linkHelper}, + nil, + `Goodbye`, + }, + { + "helper for raw block gets raw content", + "{{{{raw}}}} {{test}} {{{{/raw}}}}", + map[string]interface{}{"test": "hello"}, + nil, + map[string]interface{}{"raw": rawHelper}, + nil, + " {{test}} ", + }, + { + "helper for raw block gets parameters", + "{{{{raw 1 2 3}}}} {{test}} {{{{/raw}}}}", + map[string]interface{}{"test": "hello"}, + nil, + map[string]interface{}{"raw": rawThreeHelper}, + nil, + " {{test}} 123", + }, + { + "helper block with complex lookup expression", + "{{#goodbyes}}{{../name}}{{/goodbyes}}", + map[string]interface{}{"name": "Alan"}, + nil, + map[string]interface{}{"goodbyes": func(options *raymond.Options) string { + out := "" + for _, str := range []string{"Goodbye", "goodbye", "GOODBYE"} { + out += str + " " + options.FnWith(str) + "! " + } + return out + }}, + nil, + "Goodbye Alan! goodbye Alan! GOODBYE Alan! ", + }, + { + "helper with complex lookup and nested template", + "{{#goodbyes}}{{#link ../prefix}}{{text}}{{/link}}{{/goodbyes}}", + map[string]interface{}{"prefix": "/root", "goodbyes": []map[string]string{{"text": "Goodbye", "url": "goodbye"}}}, + nil, + map[string]interface{}{"link": linkHelper}, + nil, + `Goodbye`, + }, + { + // note: The JS implementation returns undefined, we return empty string + "helper returning undefined value (1)", + " {{nothere}}", + map[string]interface{}{}, + nil, + map[string]interface{}{"nothere": func() string { + return "" + }}, + nil, + " ", + }, + { + // note: The JS implementation returns undefined, we return empty string + "helper returning undefined value (2)", + " {{#nothere}}{{/nothere}}", + map[string]interface{}{}, + nil, + map[string]interface{}{"nothere": func() string { + return "" + }}, + nil, + " ", + }, + { + "block helper", + "{{#goodbyes}}{{text}}! {{/goodbyes}}cruel {{world}}!", + map[string]interface{}{"world": "world"}, + nil, + map[string]interface{}{"goodbyes": func(options *raymond.Options) string { + return options.FnWith(map[string]string{"text": "GOODBYE"}) + }}, + nil, + "GOODBYE! cruel world!", + }, + { + "block helper staying in the same context", + "{{#form}}

{{name}}

{{/form}}", + map[string]interface{}{"name": "Yehuda"}, + nil, + map[string]interface{}{"form": formHelper}, + nil, + "

Yehuda

", + }, + { + "block helper should have context in this", + "
    {{#people}}
  • {{#link}}{{name}}{{/link}}
  • {{/people}}
", + map[string]interface{}{"people": []map[string]interface{}{{"name": "Alan", "id": 1}, {"name": "Yehuda", "id": 2}}}, + nil, + map[string]interface{}{"link": func(options *raymond.Options) string { + return fmt.Sprintf("%s", options.ValueStr("id"), options.Fn()) + }}, + nil, + ``, + }, + { + "block helper for undefined value", + "{{#empty}}shouldn't render{{/empty}}", + nil, nil, nil, nil, + "", + }, + { + "block helper passing a new context", + "{{#form yehuda}}

{{name}}

{{/form}}", + map[string]map[string]string{"yehuda": {"name": "Yehuda"}}, + nil, + map[string]interface{}{"form": formCtxHelper}, + nil, + "

Yehuda

", + }, + { + "block helper passing a complex path context", + "{{#form yehuda/cat}}

{{name}}

{{/form}}", + map[string]map[string]interface{}{"yehuda": {"name": "Yehuda", "cat": map[string]string{"name": "Harold"}}}, + nil, + map[string]interface{}{"form": formCtxHelper}, + nil, + "

Harold

", + }, + { + "nested block helpers", + "{{#form yehuda}}

{{name}}

{{#link}}Hello{{/link}}{{/form}}", + map[string]map[string]string{"yehuda": {"name": "Yehuda"}}, + nil, + map[string]interface{}{"link": func(options *raymond.Options) string { + return fmt.Sprintf("%s", options.ValueStr("name"), options.Fn()) + }, "form": formCtxHelper}, + nil, + `

Yehuda

Hello
`, + }, + { + "block helper inverted sections (1) - an inverse wrapper is passed in as a new context", + "{{#list people}}{{name}}{{^}}Nobody's here{{/list}}", + map[string][]map[string]string{"people": {{"name": "Alan"}, {"name": "Yehuda"}}}, + nil, + map[string]interface{}{"list": listHelper}, + nil, + `
  • Alan
  • Yehuda
`, + }, + { + "block helper inverted sections (2) - an inverse wrapper can be optionally called", + "{{#list people}}{{name}}{{^}}Nobody's here{{/list}}", + map[string][]map[string]string{"people": {}}, + nil, + map[string]interface{}{"list": listHelper}, + nil, + `

Nobody's here

`, + }, + { + "block helper inverted sections (3) - the context of an inverse is the parent of the block", + "{{#list people}}Hello{{^}}{{message}}{{/list}}", + map[string]interface{}{"people": []interface{}{}, "message": "Nobody's here"}, + nil, + map[string]interface{}{"list": listHelper}, + nil, + `

Nobody's here

`, + }, + + { + "pathed lambdas with parameters (1)", + "{{./helper 1}}", + map[string]interface{}{ + "helper": func(param int) string { return "winning" }, + "hash": map[string]interface{}{ + "helper": func(param int) string { return "winning" }, + }}, + nil, + map[string]interface{}{"./helper": func(param int) string { return "fail" }}, + nil, + "winning", + }, + { + "pathed lambdas with parameters (2)", + "{{hash/helper 1}}", + map[string]interface{}{ + "helper": func(param int) string { return "winning" }, + "hash": map[string]interface{}{ + "helper": func(param int) string { return "winning" }, + }}, + nil, + map[string]interface{}{"./helper": func(param int) string { return "fail" }}, + nil, + "winning", + }, + + { + "helpers hash - providing a helpers hash (1)", + "Goodbye {{cruel}} {{world}}!", + map[string]interface{}{"cruel": "cruel"}, + nil, + map[string]interface{}{"world": func() string { return "world" }}, + nil, + "Goodbye cruel world!", + }, + { + "helpers hash - providing a helpers hash (2)", + "Goodbye {{#iter}}{{cruel}} {{world}}{{/iter}}!", + map[string]interface{}{"iter": []map[string]string{{"cruel": "cruel"}}}, + nil, + map[string]interface{}{"world": func() string { return "world" }}, + nil, + "Goodbye cruel world!", + }, + { + "helpers hash - in cases of conflict, helpers win (1)", + "{{{lookup}}}", + map[string]interface{}{"lookup": "Explicit"}, + nil, + map[string]interface{}{"lookup": func() string { return "helpers" }}, + nil, + "helpers", + }, + { + "helpers hash - in cases of conflict, helpers win (2)", + "{{lookup}}", + map[string]interface{}{"lookup": "Explicit"}, + nil, + map[string]interface{}{"lookup": func() string { return "helpers" }}, + nil, + "helpers", + }, + { + "helpers hash - the helpers hash is available is nested contexts", + "{{#outer}}{{#inner}}{{helper}}{{/inner}}{{/outer}}", + map[string]interface{}{"outer": map[string]interface{}{"inner": map[string]interface{}{"unused": []string{}}}}, + nil, + map[string]interface{}{"helper": func() string { return "helper" }}, + nil, + "helper", + }, + + // @todo "helpers hash - the helper hash should augment the global hash" + + // @todo "registration" + + { + "decimal number literals work", + "Message: {{hello -1.2 1.2}}", + nil, nil, + map[string]interface{}{"hello": func(times, times2 interface{}) string { + ts, t2s := "NaN", "NaN" + + if v, ok := times.(float64); ok { + ts = raymond.Str(v) + } + + if v, ok := times2.(float64); ok { + t2s = raymond.Str(v) + } + + return "Hello " + ts + " " + t2s + " times" + }}, + nil, + "Message: Hello -1.2 1.2 times", + }, + { + "negative number literals work", + "Message: {{hello -12}}", + nil, nil, + map[string]interface{}{"hello": func(times interface{}) string { + ts := "NaN" + + if v, ok := times.(int); ok { + ts = raymond.Str(v) + } + + return "Hello " + ts + " times" + }}, + nil, + "Message: Hello -12 times", + }, + + { + "String literal parameters - simple literals work", + `Message: {{hello "world" 12 true false}}`, + nil, nil, + map[string]interface{}{"hello": func(p, t, b, b2 interface{}) string { + times, bool1, bool2 := "NaN", "NaB", "NaB" + + param, ok := p.(string) + if !ok { + param = "NaN" + } + + if v, ok := t.(int); ok { + times = raymond.Str(v) + } + + if v, ok := b.(bool); ok { + bool1 = raymond.Str(v) + } + + if v, ok := b2.(bool); ok { + bool2 = raymond.Str(v) + } + + return "Hello " + param + " " + times + " times: " + bool1 + " " + bool2 + }}, + nil, + "Message: Hello world 12 times: true false", + }, + + // @todo "using a quote in the middle of a parameter raises an error" + + { + "String literal parameters - escaping a String is possible", + "Message: {{{hello \"\\\"world\\\"\"}}}", + nil, nil, + map[string]interface{}{"hello": func(param string) string { + return "Hello " + param + }}, + nil, + `Message: Hello "world"`, + }, + { + "String literal parameters - it works with ' marks", + "Message: {{{hello \"Alan's world\"}}}", + nil, nil, + map[string]interface{}{"hello": func(param string) string { + return "Hello " + param + }}, + nil, + `Message: Hello Alan's world`, + }, + + { + "multiple parameters - simple multi-params work", + "Message: {{goodbye cruel world}}", + map[string]string{"cruel": "cruel", "world": "world"}, + nil, + map[string]interface{}{"goodbye": func(cruel, world string) string { + return "Goodbye " + cruel + " " + world + }}, + nil, + "Message: Goodbye cruel world", + }, + { + "multiple parameters - block multi-params work", + "Message: {{#goodbye cruel world}}{{greeting}} {{adj}} {{noun}}{{/goodbye}}", + map[string]string{"cruel": "cruel", "world": "world"}, + nil, + map[string]interface{}{"goodbye": func(cruel, world string, options *raymond.Options) string { + return options.FnWith(map[string]interface{}{"greeting": "Goodbye", "adj": cruel, "noun": world}) + }}, + nil, + "Message: Goodbye cruel world", + }, + + { + "hash - helpers can take an optional hash", + `{{goodbye cruel="CRUEL" world="WORLD" times=12}}`, + nil, nil, + map[string]interface{}{"goodbye": func(options *raymond.Options) string { + return "GOODBYE " + options.HashStr("cruel") + " " + options.HashStr("world") + " " + options.HashStr("times") + " TIMES" + }}, + nil, + "GOODBYE CRUEL WORLD 12 TIMES", + }, + { + "hash - helpers can take an optional hash with booleans (1)", + `{{goodbye cruel="CRUEL" world="WORLD" print=true}}`, + nil, nil, + map[string]interface{}{"goodbye": func(options *raymond.Options) string { + p, ok := options.HashProp("print").(bool) + if ok { + if p { + return "GOODBYE " + options.HashStr("cruel") + " " + options.HashStr("world") + } else { + return "NOT PRINTING" + } + } + + return "THIS SHOULD NOT HAPPEN" + }}, + nil, + "GOODBYE CRUEL WORLD", + }, + { + "hash - helpers can take an optional hash with booleans (2)", + `{{goodbye cruel="CRUEL" world="WORLD" print=false}}`, + nil, nil, + map[string]interface{}{"goodbye": func(options *raymond.Options) string { + p, ok := options.HashProp("print").(bool) + if ok { + if p { + return "GOODBYE " + options.HashStr("cruel") + " " + options.HashStr("world") + } else { + return "NOT PRINTING" + } + } + + return "THIS SHOULD NOT HAPPEN" + }}, + nil, + "NOT PRINTING", + }, + { + "block helpers can take an optional hash", + `{{#goodbye cruel="CRUEL" times=12}}world{{/goodbye}}`, + nil, nil, + map[string]interface{}{"goodbye": func(options *raymond.Options) string { + return "GOODBYE " + options.HashStr("cruel") + " " + options.Fn() + " " + options.HashStr("times") + " TIMES" + }}, + nil, + "GOODBYE CRUEL world 12 TIMES", + }, + { + "block helpers can take an optional hash with single quoted stings", + `{{#goodbye cruel='CRUEL' times=12}}world{{/goodbye}}`, + nil, nil, + map[string]interface{}{"goodbye": func(options *raymond.Options) string { + return "GOODBYE " + options.HashStr("cruel") + " " + options.Fn() + " " + options.HashStr("times") + " TIMES" + }}, + nil, + "GOODBYE CRUEL world 12 TIMES", + }, + { + "block helpers can take an optional hash with booleans (1)", + `{{#goodbye cruel="CRUEL" print=true}}world{{/goodbye}}`, + nil, nil, + map[string]interface{}{"goodbye": func(options *raymond.Options) string { + p, ok := options.HashProp("print").(bool) + if ok { + if p { + return "GOODBYE " + options.HashStr("cruel") + " " + options.Fn() + } else { + return "NOT PRINTING" + } + } + + return "THIS SHOULD NOT HAPPEN" + }}, + nil, + "GOODBYE CRUEL world", + }, + { + "block helpers can take an optional hash with booleans (1)", + `{{#goodbye cruel="CRUEL" print=false}}world{{/goodbye}}`, + nil, nil, + map[string]interface{}{"goodbye": func(options *raymond.Options) string { + p, ok := options.HashProp("print").(bool) + if ok { + if p { + return "GOODBYE " + options.HashStr("cruel") + " " + options.Fn() + } else { + return "NOT PRINTING" + } + } + + return "THIS SHOULD NOT HAPPEN" + }}, + nil, + "NOT PRINTING", + }, + + // @todo "helperMissing - if a context is not found, helperMissing is used" throw error + + // @todo "helperMissing - if a context is not found, custom helperMissing is used" + + // @todo "helperMissing - if a value is not found, custom helperMissing is used" + + { + "block helpers can take an optional hash with booleans (1)", + `{{#goodbye cruel="CRUEL" print=false}}world{{/goodbye}}`, + nil, nil, + map[string]interface{}{"goodbye": func(options *raymond.Options) string { + p, ok := options.HashProp("print").(bool) + if ok { + if p { + return "GOODBYE " + options.HashStr("cruel") + " " + options.Fn() + } else { + return "NOT PRINTING" + } + } + + return "THIS SHOULD NOT HAPPEN" + }}, + nil, + "NOT PRINTING", + }, + + // @todo "knownHelpers/knownHelpersOnly" tests + + // @todo "blockHelperMissing" tests + + // @todo "name field" tests + + { + "name conflicts - helpers take precedence over same-named context properties", + `{{goodbye}} {{cruel world}}`, + map[string]string{"goodbye": "goodbye", "world": "world"}, + nil, + map[string]interface{}{ + "goodbye": func(options *raymond.Options) string { + return strings.ToUpper(options.ValueStr("goodbye")) + }, + "cruel": func(world string) string { + return "cruel " + strings.ToUpper(world) + }, + }, + nil, + "GOODBYE cruel WORLD", + }, + { + "name conflicts - helpers take precedence over same-named context properties", + `{{#goodbye}} {{cruel world}}{{/goodbye}}`, + map[string]string{"goodbye": "goodbye", "world": "world"}, + nil, + map[string]interface{}{ + "goodbye": func(options *raymond.Options) string { + return strings.ToUpper(options.ValueStr("goodbye")) + options.Fn() + }, + "cruel": func(world string) string { + return "cruel " + strings.ToUpper(world) + }, + }, + nil, + "GOODBYE cruel WORLD", + }, + { + "name conflicts - Scoped names take precedence over helpers", + `{{this.goodbye}} {{cruel world}} {{cruel this.goodbye}}`, + map[string]string{"goodbye": "goodbye", "world": "world"}, + nil, + map[string]interface{}{ + "goodbye": func(options *raymond.Options) string { + return strings.ToUpper(options.ValueStr("goodbye")) + }, + "cruel": func(world string) string { + return "cruel " + strings.ToUpper(world) + }, + }, + nil, + "goodbye cruel WORLD cruel GOODBYE", + }, + { + "name conflicts - Scoped names take precedence over block helpers", + `{{#goodbye}} {{cruel world}}{{/goodbye}} {{this.goodbye}}`, + map[string]string{"goodbye": "goodbye", "world": "world"}, + nil, + map[string]interface{}{ + "goodbye": func(options *raymond.Options) string { + return strings.ToUpper(options.ValueStr("goodbye")) + options.Fn() + }, + "cruel": func(world string) string { + return "cruel " + strings.ToUpper(world) + }, + }, + nil, + "GOODBYE cruel WORLD goodbye", + }, + + // @todo "block params" tests +} + +func TestHelpers(t *testing.T) { + launchTests(t, helpersTests) +} diff --git a/vendor/github.com/aymerick/raymond/handlebars/partials_test.go b/vendor/github.com/aymerick/raymond/handlebars/partials_test.go new file mode 100644 index 0000000..0c26662 --- /dev/null +++ b/vendor/github.com/aymerick/raymond/handlebars/partials_test.go @@ -0,0 +1,182 @@ +package handlebars + +import "testing" + +// +// Those tests come from: +// https://github.com/wycats/handlebars.js/blob/master/spec/partials.js +// +var partialsTests = []Test{ + { + "basic partials", + "Dudes: {{#dudes}}{{> dude}}{{/dudes}}", + map[string]interface{}{"dudes": []map[string]string{{"name": "Yehuda", "url": "http://yehuda"}, {"name": "Alan", "url": "http://alan"}}}, + nil, nil, + map[string]string{"dude": "{{name}} ({{url}}) "}, + "Dudes: Yehuda (http://yehuda) Alan (http://alan) ", + }, + { + "dynamic partials", + "Dudes: {{#dudes}}{{> (partial)}}{{/dudes}}", + map[string]interface{}{"dudes": []map[string]string{{"name": "Yehuda", "url": "http://yehuda"}, {"name": "Alan", "url": "http://alan"}}}, + nil, + map[string]interface{}{"partial": func() string { + return "dude" + }}, + map[string]string{"dude": "{{name}} ({{url}}) "}, + "Dudes: Yehuda (http://yehuda) Alan (http://alan) ", + }, + + // @todo "failing dynamic partials" + + { + "partials with context", + "Dudes: {{>dude dudes}}", + map[string]interface{}{"dudes": []map[string]string{{"name": "Yehuda", "url": "http://yehuda"}, {"name": "Alan", "url": "http://alan"}}}, + nil, nil, + map[string]string{"dude": "{{#this}}{{name}} ({{url}}) {{/this}}"}, + "Dudes: Yehuda (http://yehuda) Alan (http://alan) ", + }, + { + "partials with undefined context", + "Dudes: {{>dude dudes}}", + map[string]interface{}{}, + nil, nil, + map[string]string{"dude": "{{foo}} Empty"}, + "Dudes: Empty", + }, + + // @todo "partials with duplicate parameters" + + { + "partials with parameters", + "Dudes: {{#dudes}}{{> dude others=..}}{{/dudes}}", + map[string]interface{}{"foo": "bar", "dudes": []map[string]string{{"name": "Yehuda", "url": "http://yehuda"}, {"name": "Alan", "url": "http://alan"}}}, + nil, nil, + map[string]string{"dude": "{{others.foo}}{{name}} ({{url}}) "}, + "Dudes: barYehuda (http://yehuda) barAlan (http://alan) ", + }, + { + "partial in a partial", + "Dudes: {{#dudes}}{{>dude}}{{/dudes}}", + map[string]interface{}{"dudes": []map[string]string{{"name": "Yehuda", "url": "http://yehuda"}, {"name": "Alan", "url": "http://alan"}}}, + nil, nil, + map[string]string{"dude": "{{name}} {{> url}} ", "url": `{{url}}`}, + `Dudes: Yehuda http://yehuda Alan http://alan `, + }, + + // @todo "rendering undefined partial throws an exception" + + // @todo "registering undefined partial throws an exception" + + // SKIP: "rendering template partial in vm mode throws an exception" + // SKIP: "rendering function partial in vm mode" + + { + "GH-14: a partial preceding a selector", + "Dudes: {{>dude}} {{anotherDude}}", + map[string]string{"name": "Jeepers", "anotherDude": "Creepers"}, + nil, nil, + map[string]string{"dude": "{{name}}"}, + "Dudes: Jeepers Creepers", + }, + { + "Partials with slash paths", + "Dudes: {{> shared/dude}}", + map[string]string{"name": "Jeepers", "anotherDude": "Creepers"}, + nil, nil, + map[string]string{"shared/dude": "{{name}}"}, + "Dudes: Jeepers", + }, + { + "Partials with slash and point paths", + "Dudes: {{> shared/dude.thing}}", + map[string]string{"name": "Jeepers", "anotherDude": "Creepers"}, + nil, nil, + map[string]string{"shared/dude.thing": "{{name}}"}, + "Dudes: Jeepers", + }, + + // @todo "Global Partials" + + // @todo "Multiple partial registration" + + { + "Partials with integer path", + "Dudes: {{> 404}}", + map[string]string{"name": "Jeepers", "anotherDude": "Creepers"}, + nil, nil, + map[string]string{"404": "{{name}}"}, // @note Difference with JS test: partial name is a string + "Dudes: Jeepers", + }, + // @note This is not supported by our implementation. But really... who cares ? + // { + // "Partials with complex path", + // "Dudes: {{> 404/asdf?.bar}}", + // map[string]string{"name": "Jeepers", "anotherDude": "Creepers"}, + // nil, nil, + // map[string]string{"404/asdf?.bar": "{{name}}"}, + // "Dudes: Jeepers", + // }, + { + "Partials with escaped", + "Dudes: {{> [+404/asdf?.bar]}}", + map[string]string{"name": "Jeepers", "anotherDude": "Creepers"}, + nil, nil, + map[string]string{"+404/asdf?.bar": "{{name}}"}, + "Dudes: Jeepers", + }, + { + "Partials with string", + "Dudes: {{> '+404/asdf?.bar'}}", + map[string]string{"name": "Jeepers", "anotherDude": "Creepers"}, + nil, nil, + map[string]string{"+404/asdf?.bar": "{{name}}"}, + "Dudes: Jeepers", + }, + { + "should handle empty partial", + "Dudes: {{#dudes}}{{> dude}}{{/dudes}}", + map[string]interface{}{"dudes": []map[string]string{{"name": "Yehuda", "url": "http://yehuda"}, {"name": "Alan", "url": "http://alan"}}}, + nil, nil, + map[string]string{"dude": ""}, + "Dudes: ", + }, + + // @todo "throw on missing partial" + + // SKIP: "should pass compiler flags" + + { + "standalone partials (1) - indented partials", + "Dudes:\n{{#dudes}}\n {{>dude}}\n{{/dudes}}", + map[string]interface{}{"dudes": []map[string]string{{"name": "Yehuda", "url": "http://yehuda"}, {"name": "Alan", "url": "http://alan"}}}, + nil, nil, + map[string]string{"dude": "{{name}}\n"}, + "Dudes:\n Yehuda\n Alan\n", + }, + { + "standalone partials (2) - nested indented partials", + "Dudes:\n{{#dudes}}\n {{>dude}}\n{{/dudes}}", + map[string]interface{}{"dudes": []map[string]string{{"name": "Yehuda", "url": "http://yehuda"}, {"name": "Alan", "url": "http://alan"}}}, + nil, nil, + map[string]string{"dude": "{{name}}\n {{> url}}", "url": "{{url}}!\n"}, + "Dudes:\n Yehuda\n http://yehuda!\n Alan\n http://alan!\n", + }, + + // // @todo preventIndent option + // { + // "standalone partials (3) - prevent nested indented partials", + // "Dudes:\n{{#dudes}}\n {{>dude}}\n{{/dudes}}", + // map[string]interface{}{"dudes": []map[string]string{{"name": "Yehuda", "url": "http://yehuda"}, {"name": "Alan", "url": "http://alan"}}}, + // nil, nil, + // map[string]string{"dude": "{{name}}\n {{> url}}", "url": "{{url}}!\n"}, + // "Dudes:\n Yehuda\n http://yehuda!\n Alan\n http://alan!\n", + // }, + + // @todo "compat mode" +} + +func TestPartials(t *testing.T) { + launchTests(t, partialsTests) +} diff --git a/vendor/github.com/aymerick/raymond/handlebars/subexpressions_test.go b/vendor/github.com/aymerick/raymond/handlebars/subexpressions_test.go new file mode 100644 index 0000000..31a12a7 --- /dev/null +++ b/vendor/github.com/aymerick/raymond/handlebars/subexpressions_test.go @@ -0,0 +1,209 @@ +package handlebars + +import ( + "testing" + + "github.com/aymerick/raymond" +) + +// +// Those tests come from: +// https://github.com/wycats/handlebars.js/blob/master/spec/subexpression.js +// +var subexpressionsTests = []Test{ + { + "arg-less helper", + "{{foo (bar)}}!", + map[string]interface{}{}, + nil, + map[string]interface{}{ + "foo": func(val string) string { + return val + val + }, + "bar": func() string { + return "LOL" + }, + }, + nil, + "LOLLOL!", + }, + { + "helper w args", + "{{blog (equal a b)}}", + map[string]interface{}{"bar": "LOL"}, + nil, + map[string]interface{}{ + "blog": blogHelper, + "equal": equalHelper, + }, + nil, + "val is true", + }, + { + "mixed paths and helpers", + "{{blog baz.bat (equal a b) baz.bar}}", + map[string]interface{}{"bar": "LOL", "baz": map[string]string{"bat": "foo!", "bar": "bar!"}}, + nil, + map[string]interface{}{ + "blog": func(p, p2, p3 string) string { + return "val is " + p + ", " + p2 + " and " + p3 + }, + "equal": equalHelper, + }, + nil, + "val is foo!, true and bar!", + }, + { + "supports much nesting", + "{{blog (equal (equal true true) true)}}", + map[string]interface{}{"bar": "LOL"}, + nil, + map[string]interface{}{ + "blog": blogHelper, + "equal": equalHelper, + }, + nil, + "val is true", + }, + + { + "GH-800 : Complex subexpressions (1)", + "{{dash 'abc' (concat a b)}}", + map[string]interface{}{"a": "a", "b": "b", "c": map[string]string{"c": "c"}, "d": "d", "e": map[string]string{"e": "e"}}, + nil, + map[string]interface{}{"dash": dashHelper, "concat": concatHelper}, + nil, + "abc-ab", + }, + { + "GH-800 : Complex subexpressions (2)", + "{{dash d (concat a b)}}", + map[string]interface{}{"a": "a", "b": "b", "c": map[string]string{"c": "c"}, "d": "d", "e": map[string]string{"e": "e"}}, + nil, + map[string]interface{}{"dash": dashHelper, "concat": concatHelper}, + nil, + "d-ab", + }, + { + "GH-800 : Complex subexpressions (3)", + "{{dash c.c (concat a b)}}", + map[string]interface{}{"a": "a", "b": "b", "c": map[string]string{"c": "c"}, "d": "d", "e": map[string]string{"e": "e"}}, + nil, + map[string]interface{}{"dash": dashHelper, "concat": concatHelper}, + nil, + "c-ab", + }, + { + "GH-800 : Complex subexpressions (4)", + "{{dash (concat a b) c.c}}", + map[string]interface{}{"a": "a", "b": "b", "c": map[string]string{"c": "c"}, "d": "d", "e": map[string]string{"e": "e"}}, + nil, + map[string]interface{}{"dash": dashHelper, "concat": concatHelper}, + nil, + "ab-c", + }, + { + "GH-800 : Complex subexpressions (5)", + "{{dash (concat a e.e) c.c}}", + map[string]interface{}{"a": "a", "b": "b", "c": map[string]string{"c": "c"}, "d": "d", "e": map[string]string{"e": "e"}}, + nil, + map[string]interface{}{"dash": dashHelper, "concat": concatHelper}, + nil, + "ae-c", + }, + + { + // note: test not relevant + "provides each nested helper invocation its own options hash", + "{{equal (equal true true) true}}", + map[string]interface{}{}, + nil, + map[string]interface{}{ + "equal": equalHelper, + }, + nil, + "true", + }, + { + "with hashes", + "{{blog (equal (equal true true) true fun='yes')}}", + map[string]interface{}{"bar": "LOL"}, + nil, + map[string]interface{}{ + "blog": blogHelper, + "equal": equalHelper, + }, + nil, + "val is true", + }, + { + "as hashes", + "{{blog fun=(equal (blog fun=1) 'val is 1')}}", + map[string]interface{}{}, + nil, + map[string]interface{}{ + "blog": func(options *raymond.Options) string { + return "val is " + options.HashStr("fun") + }, + "equal": equalHelper, + }, + nil, + "val is true", + }, + { + "multiple subexpressions in a hash", + `{{input aria-label=(t "Name") placeholder=(t "Example User")}}`, + map[string]interface{}{}, + nil, + map[string]interface{}{ + "input": func(options *raymond.Options) raymond.SafeString { + return raymond.SafeString(``) + }, + "t": func(param string) raymond.SafeString { + return raymond.SafeString(param) + }, + }, + nil, + ``, + }, + { + "multiple subexpressions in a hash with context", + `{{input aria-label=(t item.field) placeholder=(t item.placeholder)}}`, + map[string]map[string]string{"item": {"field": "Name", "placeholder": "Example User"}}, + nil, + map[string]interface{}{ + "input": func(options *raymond.Options) raymond.SafeString { + return raymond.SafeString(``) + }, + "t": func(param string) raymond.SafeString { + return raymond.SafeString(param) + }, + }, + nil, + ``, + }, + + // @todo "in string params mode" + + // @todo "as hashes in string params mode" + + { + "subexpression functions on the context", + "{{foo (bar)}}!", + map[string]interface{}{"bar": func() string { return "LOL" }}, + nil, + map[string]interface{}{ + "foo": func(val string) string { + return val + val + }, + }, + nil, + "LOLLOL!", + }, + + // @todo "subexpressions can't just be property lookups" should raise error +} + +func TestSubexpressions(t *testing.T) { + launchTests(t, subexpressionsTests) +} diff --git a/vendor/github.com/aymerick/raymond/handlebars/whitespace_test.go b/vendor/github.com/aymerick/raymond/handlebars/whitespace_test.go new file mode 100644 index 0000000..f11e16e --- /dev/null +++ b/vendor/github.com/aymerick/raymond/handlebars/whitespace_test.go @@ -0,0 +1,259 @@ +package handlebars + +import "testing" + +// +// Those tests come from: +// https://github.com/wycats/handlebars.js/blob/master/spec/whitespace-control.js +// +var whitespaceControlTests = []Test{ + { + "should strip whitespace around mustache calls (1)", + " {{~foo~}} ", + map[string]string{"foo": "bar<"}, + nil, nil, nil, + "bar<", + }, + { + "should strip whitespace around mustache calls (2)", + " {{~foo}} ", + map[string]string{"foo": "bar<"}, + nil, nil, nil, + "bar< ", + }, + { + "should strip whitespace around mustache calls (3)", + " {{foo~}} ", + map[string]string{"foo": "bar<"}, + nil, nil, nil, + " bar<", + }, + { + "should strip whitespace around mustache calls (4)", + " {{~&foo~}} ", + map[string]string{"foo": "bar<"}, + nil, nil, nil, + "bar<", + }, + { + "should strip whitespace around mustache calls (5)", + " {{~{foo}~}} ", + map[string]string{"foo": "bar<"}, + nil, nil, nil, + "bar<", + }, + { + "should strip whitespace around mustache calls (6)", + "1\n{{foo~}} \n\n 23\n{{bar}}4", + nil, nil, nil, nil, + "1\n23\n4", + }, + + { + "blocks - should strip whitespace around simple block calls (1)", + " {{~#if foo~}} bar {{~/if~}} ", + map[string]string{"foo": "bar<"}, + nil, nil, nil, + "bar", + }, + { + "blocks - should strip whitespace around simple block calls (2)", + " {{#if foo~}} bar {{/if~}} ", + map[string]string{"foo": "bar<"}, + nil, nil, nil, + " bar ", + }, + { + "blocks - should strip whitespace around simple block calls (3)", + " {{~#if foo}} bar {{~/if}} ", + map[string]string{"foo": "bar<"}, + nil, nil, nil, + " bar ", + }, + { + "blocks - should strip whitespace around simple block calls (4)", + " {{#if foo}} bar {{/if}} ", + map[string]string{"foo": "bar<"}, + nil, nil, nil, + " bar ", + }, + { + "blocks - should strip whitespace around simple block calls (5)", + " \n\n{{~#if foo~}} \n\nbar \n\n{{~/if~}}\n\n ", + map[string]string{"foo": "bar<"}, + nil, nil, nil, + "bar", + }, + { + "blocks - should strip whitespace around simple block calls (6)", + " a\n\n{{~#if foo~}} \n\nbar \n\n{{~/if~}}\n\na ", + map[string]string{"foo": "bar<"}, + nil, nil, nil, + " abara ", + }, + + { + "should strip whitespace around inverse block calls (1)", + " {{~^if foo~}} bar {{~/if~}} ", + nil, nil, nil, nil, + "bar", + }, + { + "should strip whitespace around inverse block calls (2)", + " {{^if foo~}} bar {{/if~}} ", + nil, nil, nil, nil, + " bar ", + }, + { + "should strip whitespace around inverse block calls (3)", + " {{~^if foo}} bar {{~/if}} ", + nil, nil, nil, nil, + " bar ", + }, + { + "should strip whitespace around inverse block calls (4)", + " {{^if foo}} bar {{/if}} ", + nil, nil, nil, nil, + " bar ", + }, + { + "should strip whitespace around inverse block calls (5)", + " \n\n{{~^if foo~}} \n\nbar \n\n{{~/if~}}\n\n ", + nil, nil, nil, nil, + "bar", + }, + + { + "should strip whitespace around complex block calls (1)", + "{{#if foo~}} bar {{~^~}} baz {{~/if}}", + map[string]string{"foo": "bar<"}, + nil, nil, nil, + "bar", + }, + { + "should strip whitespace around complex block calls (2)", + "{{#if foo~}} bar {{^~}} baz {{/if}}", + map[string]string{"foo": "bar<"}, + nil, nil, nil, + "bar ", + }, + { + "should strip whitespace around complex block calls (3)", + "{{#if foo}} bar {{~^~}} baz {{~/if}}", + map[string]string{"foo": "bar<"}, + nil, nil, nil, + " bar", + }, + { + "should strip whitespace around complex block calls (4)", + "{{#if foo}} bar {{^~}} baz {{/if}}", + map[string]string{"foo": "bar<"}, + nil, nil, nil, + " bar ", + }, + { + "should strip whitespace around complex block calls (5)", + "{{#if foo~}} bar {{~else~}} baz {{~/if}}", + map[string]string{"foo": "bar<"}, + nil, nil, nil, + "bar", + }, + { + "should strip whitespace around complex block calls (6)", + "\n\n{{~#if foo~}} \n\nbar \n\n{{~^~}} \n\nbaz \n\n{{~/if~}}\n\n", + map[string]string{"foo": "bar<"}, + nil, nil, nil, + "bar", + }, + { + "should strip whitespace around complex block calls (7)", + "\n\n{{~#if foo~}} \n\n{{{foo}}} \n\n{{~^~}} \n\nbaz \n\n{{~/if~}}\n\n", + map[string]string{"foo": "bar<"}, + nil, nil, nil, + "bar<", + }, + { + "should strip whitespace around complex block calls (8)", + "{{#if foo~}} bar {{~^~}} baz {{~/if}}", + nil, nil, nil, nil, + "baz", + }, + { + "should strip whitespace around complex block calls (9)", + "{{#if foo}} bar {{~^~}} baz {{/if}}", + nil, nil, nil, nil, + "baz ", + }, + { + "should strip whitespace around complex block calls (10)", + "{{#if foo~}} bar {{~^}} baz {{~/if}}", + nil, nil, nil, nil, + " baz", + }, + { + "should strip whitespace around complex block calls (11)", + "{{#if foo~}} bar {{~^}} baz {{/if}}", + nil, nil, nil, nil, + " baz ", + }, + { + "should strip whitespace around complex block calls (12)", + "{{#if foo~}} bar {{~else~}} baz {{~/if}}", + nil, nil, nil, nil, + "baz", + }, + { + "should strip whitespace around complex block calls (13)", + "\n\n{{~#if foo~}} \n\nbar \n\n{{~^~}} \n\nbaz \n\n{{~/if~}}\n\n", + nil, nil, nil, nil, + "baz", + }, + + { + "should strip whitespace around partials (1)", + "foo {{~> dude~}} ", + nil, nil, nil, + map[string]string{"dude": "bar"}, + "foobar", + }, + { + "should strip whitespace around partials (2)", + "foo {{> dude~}} ", + nil, nil, nil, + map[string]string{"dude": "bar"}, + "foo bar", + }, + { + "should strip whitespace around partials (3)", + "foo {{> dude}} ", + nil, nil, nil, + map[string]string{"dude": "bar"}, + "foo bar ", + }, + { + "should strip whitespace around partials (4)", + "foo\n {{~> dude}} ", + nil, nil, nil, + map[string]string{"dude": "bar"}, + "foobar", + }, + { + "should strip whitespace around partials (5)", + "foo\n {{> dude}} ", + nil, nil, nil, + map[string]string{"dude": "bar"}, + "foo\n bar", + }, + + { + "should only strip whitespace once", + " {{~foo~}} {{foo}} {{foo}} ", + map[string]string{"foo": "bar"}, + nil, nil, nil, + "barbar bar ", + }, +} + +func TestWhitespaceControl(t *testing.T) { + launchTests(t, whitespaceControlTests) +} diff --git a/vendor/github.com/aymerick/raymond/helper.go b/vendor/github.com/aymerick/raymond/helper.go new file mode 100644 index 0000000..eb766c3 --- /dev/null +++ b/vendor/github.com/aymerick/raymond/helper.go @@ -0,0 +1,371 @@ +package raymond + +import ( + "fmt" + "log" + "reflect" + "sync" +) + +// Options represents the options argument provided to helpers and context functions. +type Options struct { + // evaluation visitor + eval *evalVisitor + + // params + params []interface{} + hash map[string]interface{} +} + +// helpers stores all globally registered helpers +var helpers = make(map[string]reflect.Value) + +// protects global helpers +var helpersMutex sync.RWMutex + +func init() { + // register builtin helpers + RegisterHelper("if", ifHelper) + RegisterHelper("unless", unlessHelper) + RegisterHelper("with", withHelper) + RegisterHelper("each", eachHelper) + RegisterHelper("log", logHelper) + RegisterHelper("lookup", lookupHelper) +} + +// RegisterHelper registers a global helper. That helper will be available to all templates. +func RegisterHelper(name string, helper interface{}) { + helpersMutex.Lock() + defer helpersMutex.Unlock() + + if helpers[name] != zero { + panic(fmt.Errorf("Helper already registered: %s", name)) + } + + val := reflect.ValueOf(helper) + ensureValidHelper(name, val) + + helpers[name] = val +} + +// RegisterHelpers registers several global helpers. Those helpers will be available to all templates. +func RegisterHelpers(helpers map[string]interface{}) { + for name, helper := range helpers { + RegisterHelper(name, helper) + } +} + +// ensureValidHelper panics if given helper is not valid +func ensureValidHelper(name string, funcValue reflect.Value) { + if funcValue.Kind() != reflect.Func { + panic(fmt.Errorf("Helper must be a function: %s", name)) + } + + funcType := funcValue.Type() + + if funcType.NumOut() != 1 { + panic(fmt.Errorf("Helper function must return a string or a SafeString: %s", name)) + } + + // @todo Check if first returned value is a string, SafeString or interface{} ? +} + +// findHelper finds a globally registered helper +func findHelper(name string) reflect.Value { + helpersMutex.RLock() + defer helpersMutex.RUnlock() + + return helpers[name] +} + +// newOptions instanciates a new Options +func newOptions(eval *evalVisitor, params []interface{}, hash map[string]interface{}) *Options { + return &Options{ + eval: eval, + params: params, + hash: hash, + } +} + +// newEmptyOptions instanciates a new empty Options +func newEmptyOptions(eval *evalVisitor) *Options { + return &Options{ + eval: eval, + hash: make(map[string]interface{}), + } +} + +// +// Context Values +// + +// Value returns field value from current context. +func (options *Options) Value(name string) interface{} { + value := options.eval.evalField(options.eval.curCtx(), name, false) + if !value.IsValid() { + return nil + } + + return value.Interface() +} + +// ValueStr returns string representation of field value from current context. +func (options *Options) ValueStr(name string) string { + return Str(options.Value(name)) +} + +// Ctx returns current evaluation context. +func (options *Options) Ctx() interface{} { + return options.eval.curCtx() +} + +// +// Hash Arguments +// + +// HashProp returns hash property. +func (options *Options) HashProp(name string) interface{} { + return options.hash[name] +} + +// HashStr returns string representation of hash property. +func (options *Options) HashStr(name string) string { + return Str(options.hash[name]) +} + +// Hash returns entire hash. +func (options *Options) Hash() map[string]interface{} { + return options.hash +} + +// +// Parameters +// + +// Param returns parameter at given position. +func (options *Options) Param(pos int) interface{} { + if len(options.params) > pos { + return options.params[pos] + } else { + return nil + } +} + +// ParamStr returns string representation of parameter at given position. +func (options *Options) ParamStr(pos int) string { + return Str(options.Param(pos)) +} + +// Params returns all parameters. +func (options *Options) Params() []interface{} { + return options.params +} + +// +// Private data +// + +// Data returns private data value. +func (options *Options) Data(name string) interface{} { + return options.eval.dataFrame.Get(name) +} + +// DataStr returns string representation of private data value. +func (options *Options) DataStr(name string) string { + return Str(options.eval.dataFrame.Get(name)) +} + +// DataFrame returns current private data frame. +func (options *Options) DataFrame() *DataFrame { + return options.eval.dataFrame +} + +// NewDataFrame instanciates a new data frame that is a copy of current evaluation data frame. +// +// Parent of returned data frame is set to current evaluation data frame. +func (options *Options) NewDataFrame() *DataFrame { + return options.eval.dataFrame.Copy() +} + +// newIterDataFrame instanciates a new data frame and set iteration specific vars +func (options *Options) newIterDataFrame(length int, i int, key interface{}) *DataFrame { + return options.eval.dataFrame.newIterDataFrame(length, i, key) +} + +// +// Evaluation +// + +// evalBlock evaluates block with given context, private data and iteration key +func (options *Options) evalBlock(ctx interface{}, data *DataFrame, key interface{}) string { + result := "" + + if block := options.eval.curBlock(); (block != nil) && (block.Program != nil) { + result = options.eval.evalProgram(block.Program, ctx, data, key) + } + + return result +} + +// Fn evaluates block with current evaluation context. +func (options *Options) Fn() string { + return options.evalBlock(nil, nil, nil) +} + +// FnCtxData evaluates block with given context and private data frame. +func (options *Options) FnCtxData(ctx interface{}, data *DataFrame) string { + return options.evalBlock(ctx, data, nil) +} + +// FnWith evaluates block with given context. +func (options *Options) FnWith(ctx interface{}) string { + return options.evalBlock(ctx, nil, nil) +} + +// FnData evaluates block with given private data frame. +func (options *Options) FnData(data *DataFrame) string { + return options.evalBlock(nil, data, nil) +} + +// Inverse evaluates "else block". +func (options *Options) Inverse() string { + result := "" + if block := options.eval.curBlock(); (block != nil) && (block.Inverse != nil) { + result, _ = block.Inverse.Accept(options.eval).(string) + } + + return result +} + +// Eval evaluates field for given context. +func (options *Options) Eval(ctx interface{}, field string) interface{} { + if ctx == nil { + return nil + } + + if field == "" { + return nil + } + + val := options.eval.evalField(reflect.ValueOf(ctx), field, false) + if !val.IsValid() { + return nil + } + + return val.Interface() +} + +// +// Misc +// + +// isIncludableZero returns true if 'includeZero' option is set and first param is the number 0 +func (options *Options) isIncludableZero() bool { + b, ok := options.HashProp("includeZero").(bool) + if ok && b { + nb, ok := options.Param(0).(int) + if ok && nb == 0 { + return true + } + } + + return false +} + +// +// Builtin helpers +// + +// #if block helper +func ifHelper(conditional interface{}, options *Options) interface{} { + if options.isIncludableZero() || IsTrue(conditional) { + return options.Fn() + } + + return options.Inverse() +} + +// #unless block helper +func unlessHelper(conditional interface{}, options *Options) interface{} { + if options.isIncludableZero() || IsTrue(conditional) { + return options.Inverse() + } + + return options.Fn() +} + +// #with block helper +func withHelper(context interface{}, options *Options) interface{} { + if IsTrue(context) { + return options.FnWith(context) + } + + return options.Inverse() +} + +// #each block helper +func eachHelper(context interface{}, options *Options) interface{} { + if !IsTrue(context) { + return options.Inverse() + } + + result := "" + + val := reflect.ValueOf(context) + switch val.Kind() { + case reflect.Array, reflect.Slice: + for i := 0; i < val.Len(); i++ { + // computes private data + data := options.newIterDataFrame(val.Len(), i, nil) + + // evaluates block + result += options.evalBlock(val.Index(i).Interface(), data, i) + } + case reflect.Map: + // note: a go hash is not ordered, so result may vary, this behaviour differs from the JS implementation + keys := val.MapKeys() + for i := 0; i < len(keys); i++ { + key := keys[i].Interface() + ctx := val.MapIndex(keys[i]).Interface() + + // computes private data + data := options.newIterDataFrame(len(keys), i, key) + + // evaluates block + result += options.evalBlock(ctx, data, key) + } + case reflect.Struct: + var exportedFields []int + + // collect exported fields only + for i := 0; i < val.NumField(); i++ { + if tField := val.Type().Field(i); tField.PkgPath == "" { + exportedFields = append(exportedFields, i) + } + } + + for i, fieldIndex := range exportedFields { + key := val.Type().Field(fieldIndex).Name + ctx := val.Field(fieldIndex).Interface() + + // computes private data + data := options.newIterDataFrame(len(exportedFields), i, key) + + // evaluates block + result += options.evalBlock(ctx, data, key) + } + } + + return result +} + +// #log helper +func logHelper(message string) interface{} { + log.Print(message) + return "" +} + +// #lookup helper +func lookupHelper(obj interface{}, field string, options *Options) interface{} { + return Str(options.Eval(obj, field)) +} diff --git a/vendor/github.com/aymerick/raymond/helper_test.go b/vendor/github.com/aymerick/raymond/helper_test.go new file mode 100644 index 0000000..ccfab9b --- /dev/null +++ b/vendor/github.com/aymerick/raymond/helper_test.go @@ -0,0 +1,165 @@ +package raymond + +import "testing" + +const ( + VERBOSE = false +) + +// +// Helpers +// + +func barHelper(options *Options) string { return "bar" } + +func echoHelper(str string, nb int) string { + result := "" + for i := 0; i < nb; i++ { + result += str + } + + return result +} + +func boolHelper(b bool) string { + if b { + return "yes it is" + } + + return "absolutely not" +} + +func gnakHelper(nb int) string { + result := "" + for i := 0; i < nb; i++ { + result += "GnAK!" + } + + return result +} + +// +// Tests +// + +var helperTests = []Test{ + { + "simple helper", + `{{foo}}`, + nil, nil, + map[string]interface{}{"foo": barHelper}, + nil, + `bar`, + }, + { + "helper with literal string param", + `{{echo "foo" 1}}`, + nil, nil, + map[string]interface{}{"echo": echoHelper}, + nil, + `foo`, + }, + { + "helper with identifier param", + `{{echo foo 1}}`, + map[string]interface{}{"foo": "bar"}, + nil, + map[string]interface{}{"echo": echoHelper}, + nil, + `bar`, + }, + { + "helper with literal boolean param", + `{{bool true}}`, + nil, nil, + map[string]interface{}{"bool": boolHelper}, + nil, + `yes it is`, + }, + { + "helper with literal boolean param", + `{{bool false}}`, + nil, nil, + map[string]interface{}{"bool": boolHelper}, + nil, + `absolutely not`, + }, + { + "helper with literal boolean param", + `{{gnak 5}}`, + nil, nil, + map[string]interface{}{"gnak": gnakHelper}, + nil, + `GnAK!GnAK!GnAK!GnAK!GnAK!`, + }, + { + "helper with several parameters", + `{{echo "GnAK!" 3}}`, + nil, nil, + map[string]interface{}{"echo": echoHelper}, + nil, + `GnAK!GnAK!GnAK!`, + }, + { + "#if helper with true literal", + `{{#if true}}YES MAN{{/if}}`, + nil, nil, nil, nil, + `YES MAN`, + }, + { + "#if helper with false literal", + `{{#if false}}YES MAN{{/if}}`, + nil, nil, nil, nil, + ``, + }, + { + "#if helper with truthy identifier", + `{{#if ok}}YES MAN{{/if}}`, + map[string]interface{}{"ok": true}, + nil, nil, nil, + `YES MAN`, + }, + { + "#if helper with falsy identifier", + `{{#if ok}}YES MAN{{/if}}`, + map[string]interface{}{"ok": false}, + nil, nil, nil, + ``, + }, + { + "#unless helper with true literal", + `{{#unless true}}YES MAN{{/unless}}`, + nil, nil, nil, nil, + ``, + }, + { + "#unless helper with false literal", + `{{#unless false}}YES MAN{{/unless}}`, + nil, nil, nil, nil, + `YES MAN`, + }, + { + "#unless helper with truthy identifier", + `{{#unless ok}}YES MAN{{/unless}}`, + map[string]interface{}{"ok": true}, + nil, nil, nil, + ``, + }, + { + "#unless helper with falsy identifier", + `{{#unless ok}}YES MAN{{/unless}}`, + map[string]interface{}{"ok": false}, + nil, nil, nil, + `YES MAN`, + }, +} + +// +// Let's go +// + +func TestHelper(t *testing.T) { + t.Parallel() + + launchTests(t, helperTests) +} diff --git a/vendor/github.com/aymerick/raymond/lexer/lexer.go b/vendor/github.com/aymerick/raymond/lexer/lexer.go new file mode 100644 index 0000000..3c43959 --- /dev/null +++ b/vendor/github.com/aymerick/raymond/lexer/lexer.go @@ -0,0 +1,650 @@ +// Package lexer provides a handlebars tokenizer. +package lexer + +import ( + "fmt" + "regexp" + "strings" + "unicode" + "unicode/utf8" +) + +// References: +// - https://github.com/wycats/handlebars.js/blob/master/src/handlebars.l +// - https://github.com/golang/go/blob/master/src/text/template/parse/lex.go + +const ( + // Mustaches detection + ESCAPED_ESCAPED_OPEN_MUSTACHE = "\\\\{{" + ESCAPED_OPEN_MUSTACHE = "\\{{" + OPEN_MUSTACHE = "{{" + CLOSE_MUSTACHE = "}}" + CLOSE_STRIP_MUSTACHE = "~}}" + CLOSE_UNESCAPED_STRIP_MUSTACHE = "}~}}" +) + +const eof = -1 + +// lexFunc represents a function that returns the next lexer function. +type lexFunc func(*Lexer) lexFunc + +// Lexer is a lexical analyzer. +type Lexer struct { + input string // input to scan + name string // lexer name, used for testing purpose + tokens chan Token // channel of scanned tokens + nextFunc lexFunc // the next function to execute + + pos int // current byte position in input string + line int // current line position in input string + width int // size of last rune scanned from input string + start int // start position of the token we are scanning + + // the shameful contextual properties needed because `nextFunc` is not enough + closeComment *regexp.Regexp // regexp to scan close of current comment + rawBlock bool // are we parsing a raw block content ? +} + +var ( + lookheadChars = `[\s` + regexp.QuoteMeta("=~}/)|") + `]` + literalLookheadChars = `[\s` + regexp.QuoteMeta("~})") + `]` + + // characters not allowed in an identifier + unallowedIDChars = " \n\t!\"#%&'()*+,./;<=>@[\\]^`{|}~" + + // regular expressions + rID = regexp.MustCompile(`^[^` + regexp.QuoteMeta(unallowedIDChars) + `]+`) + rDotID = regexp.MustCompile(`^\.` + lookheadChars) + rTrue = regexp.MustCompile(`^true` + literalLookheadChars) + rFalse = regexp.MustCompile(`^false` + literalLookheadChars) + rOpenRaw = regexp.MustCompile(`^\{\{\{\{`) + rCloseRaw = regexp.MustCompile(`^\}\}\}\}`) + rOpenEndRaw = regexp.MustCompile(`^\{\{\{\{/`) + rOpenEndRawLookAhead = regexp.MustCompile(`\{\{\{\{/`) + rOpenUnescaped = regexp.MustCompile(`^\{\{~?\{`) + rCloseUnescaped = regexp.MustCompile(`^\}~?\}\}`) + rOpenBlock = regexp.MustCompile(`^\{\{~?#`) + rOpenEndBlock = regexp.MustCompile(`^\{\{~?/`) + rOpenPartial = regexp.MustCompile(`^\{\{~?>`) + // {{^}} or {{else}} + rInverse = regexp.MustCompile(`^(\{\{~?\^\s*~?\}\}|\{\{~?\s*else\s*~?\}\})`) + rOpenInverse = regexp.MustCompile(`^\{\{~?\^`) + rOpenInverseChain = regexp.MustCompile(`^\{\{~?\s*else`) + // {{ or {{& + rOpen = regexp.MustCompile(`^\{\{~?&?`) + rClose = regexp.MustCompile(`^~?\}\}`) + rOpenBlockParams = regexp.MustCompile(`^as\s+\|`) + // {{!-- ... --}} + rOpenCommentDash = regexp.MustCompile(`^\{\{~?!--\s*`) + rCloseCommentDash = regexp.MustCompile(`^\s*--~?\}\}`) + // {{! ... }} + rOpenComment = regexp.MustCompile(`^\{\{~?!\s*`) + rCloseComment = regexp.MustCompile(`^\s*~?\}\}`) +) + +// Scan scans given input. +// +// Tokens can then be fetched sequentially thanks to NextToken() function on returned lexer. +func Scan(input string) *Lexer { + return scanWithName(input, "") +} + +// scanWithName scans given input, with a name used for testing +// +// Tokens can then be fetched sequentially thanks to NextToken() function on returned lexer. +func scanWithName(input string, name string) *Lexer { + result := &Lexer{ + input: input, + name: name, + tokens: make(chan Token), + line: 1, + } + + go result.run() + + return result +} + +// Collect scans and collect all tokens. +// +// This should be used for debugging purpose only. You should use Scan() and lexer.NextToken() functions instead. +func Collect(input string) []Token { + var result []Token + + l := Scan(input) + for { + token := l.NextToken() + result = append(result, token) + + if token.Kind == TokenEOF || token.Kind == TokenError { + break + } + } + + return result +} + +// NextToken returns the next scanned token. +func (l *Lexer) NextToken() Token { + result := <-l.tokens + + return result +} + +// Pos returns the current byte position. +func (l *Lexer) Pos() int { + return l.pos +} + +// Line returns the current line number. +func (l *Lexer) Line() int { + return l.line +} + +// run starts lexical analysis +func (l *Lexer) run() { + for l.nextFunc = lexContent; l.nextFunc != nil; { + l.nextFunc = l.nextFunc(l) + } +} + +// next returns next character from input, or eof of there is nothing left to scan +func (l *Lexer) next() rune { + if l.pos >= len(l.input) { + l.width = 0 + return eof + } + + r, w := utf8.DecodeRuneInString(l.input[l.pos:]) + l.width = w + l.pos += l.width + + return r +} + +func (l *Lexer) produce(kind TokenKind, val string) { + l.tokens <- Token{kind, val, l.start, l.line} + + // scanning a new token + l.start = l.pos + + // update line number + l.line += strings.Count(val, "\n") +} + +// emit emits a new scanned token +func (l *Lexer) emit(kind TokenKind) { + l.produce(kind, l.input[l.start:l.pos]) +} + +// emitContent emits scanned content +func (l *Lexer) emitContent() { + if l.pos > l.start { + l.emit(TokenContent) + } +} + +// emitString emits a scanned string +func (l *Lexer) emitString(delimiter rune) { + str := l.input[l.start:l.pos] + + // replace escaped delimiters + str = strings.Replace(str, "\\"+string(delimiter), string(delimiter), -1) + + l.produce(TokenString, str) +} + +// peek returns but does not consume the next character in the input +func (l *Lexer) peek() rune { + r := l.next() + l.backup() + return r +} + +// backup steps back one character +// +// WARNING: Can only be called once per call of next +func (l *Lexer) backup() { + l.pos -= l.width +} + +// ignoreskips all characters that have been scanned up to current position +func (l *Lexer) ignore() { + l.start = l.pos +} + +// accept scans the next character if it is included in given string +func (l *Lexer) accept(valid string) bool { + if strings.IndexRune(valid, l.next()) >= 0 { + return true + } + + l.backup() + + return false +} + +// acceptRun scans all following characters that are part of given string +func (l *Lexer) acceptRun(valid string) { + for strings.IndexRune(valid, l.next()) >= 0 { + } + + l.backup() +} + +// errorf emits an error token +func (l *Lexer) errorf(format string, args ...interface{}) lexFunc { + l.tokens <- Token{TokenError, fmt.Sprintf(format, args...), l.start, l.line} + return nil +} + +// isString returns true if content at current scanning position starts with given string +func (l *Lexer) isString(str string) bool { + return strings.HasPrefix(l.input[l.pos:], str) +} + +// findRegexp returns the first string from current scanning position that matches given regular expression +func (l *Lexer) findRegexp(r *regexp.Regexp) string { + return r.FindString(l.input[l.pos:]) +} + +// indexRegexp returns the index of the first string from current scanning position that matches given regular expression +// +// It returns -1 if not found +func (l *Lexer) indexRegexp(r *regexp.Regexp) int { + loc := r.FindStringIndex(l.input[l.pos:]) + if loc == nil { + return -1 + } else { + return loc[0] + } +} + +// lexContent scans content (ie: not between mustaches) +func lexContent(l *Lexer) lexFunc { + var next lexFunc + + if l.rawBlock { + if i := l.indexRegexp(rOpenEndRawLookAhead); i != -1 { + // {{{{/ + l.rawBlock = false + l.pos += i + + next = lexOpenMustache + } else { + return l.errorf("Unclosed raw block") + } + } else if l.isString(ESCAPED_ESCAPED_OPEN_MUSTACHE) { + // \\{{ + + // emit content with only one escaped escape + l.next() + l.emitContent() + + // ignore second escaped escape + l.next() + l.ignore() + + next = lexContent + } else if l.isString(ESCAPED_OPEN_MUSTACHE) { + // \{{ + next = lexEscapedOpenMustache + } else if str := l.findRegexp(rOpenCommentDash); str != "" { + // {{!-- + l.closeComment = rCloseCommentDash + + next = lexComment + } else if str := l.findRegexp(rOpenComment); str != "" { + // {{! + l.closeComment = rCloseComment + + next = lexComment + } else if l.isString(OPEN_MUSTACHE) { + // {{ + next = lexOpenMustache + } + + if next != nil { + // emit scanned content + l.emitContent() + + // scan next token + return next + } + + // scan next rune + if l.next() == eof { + // emit scanned content + l.emitContent() + + // this is over + l.emit(TokenEOF) + return nil + } + + // continue content scanning + return lexContent +} + +// lexEscapedOpenMustache scans \{{ +func lexEscapedOpenMustache(l *Lexer) lexFunc { + // ignore escape character + l.next() + l.ignore() + + // scan mustaches + for l.peek() == '{' { + l.next() + } + + return lexContent +} + +// lexOpenMustache scans {{ +func lexOpenMustache(l *Lexer) lexFunc { + var str string + var tok TokenKind + + nextFunc := lexExpression + + if str = l.findRegexp(rOpenEndRaw); str != "" { + tok = TokenOpenEndRawBlock + } else if str = l.findRegexp(rOpenRaw); str != "" { + tok = TokenOpenRawBlock + l.rawBlock = true + } else if str = l.findRegexp(rOpenUnescaped); str != "" { + tok = TokenOpenUnescaped + } else if str = l.findRegexp(rOpenBlock); str != "" { + tok = TokenOpenBlock + } else if str = l.findRegexp(rOpenEndBlock); str != "" { + tok = TokenOpenEndBlock + } else if str = l.findRegexp(rOpenPartial); str != "" { + tok = TokenOpenPartial + } else if str = l.findRegexp(rInverse); str != "" { + tok = TokenInverse + nextFunc = lexContent + } else if str = l.findRegexp(rOpenInverse); str != "" { + tok = TokenOpenInverse + } else if str = l.findRegexp(rOpenInverseChain); str != "" { + tok = TokenOpenInverseChain + } else if str = l.findRegexp(rOpen); str != "" { + tok = TokenOpen + } else { + // this is rotten + panic("Current pos MUST be an opening mustache") + } + + l.pos += len(str) + l.emit(tok) + + return nextFunc +} + +// lexCloseMustache scans }} or ~}} +func lexCloseMustache(l *Lexer) lexFunc { + var str string + var tok TokenKind + + if str = l.findRegexp(rCloseRaw); str != "" { + // }}}} + tok = TokenCloseRawBlock + } else if str = l.findRegexp(rCloseUnescaped); str != "" { + // }}} + tok = TokenCloseUnescaped + } else if str = l.findRegexp(rClose); str != "" { + // }} + tok = TokenClose + } else { + // this is rotten + panic("Current pos MUST be a closing mustache") + } + + l.pos += len(str) + l.emit(tok) + + return lexContent +} + +// lexExpression scans inside mustaches +func lexExpression(l *Lexer) lexFunc { + // search close mustache delimiter + if l.isString(CLOSE_MUSTACHE) || l.isString(CLOSE_STRIP_MUSTACHE) || l.isString(CLOSE_UNESCAPED_STRIP_MUSTACHE) { + return lexCloseMustache + } + + // search some patterns before advancing scanning position + + // "as |" + if str := l.findRegexp(rOpenBlockParams); str != "" { + l.pos += len(str) + l.emit(TokenOpenBlockParams) + return lexExpression + } + + // .. + if l.isString("..") { + l.pos += len("..") + l.emit(TokenID) + return lexExpression + } + + // . + if str := l.findRegexp(rDotID); str != "" { + l.pos += len(".") + l.emit(TokenID) + return lexExpression + } + + // true + if str := l.findRegexp(rTrue); str != "" { + l.pos += len("true") + l.emit(TokenBoolean) + return lexExpression + } + + // false + if str := l.findRegexp(rFalse); str != "" { + l.pos += len("false") + l.emit(TokenBoolean) + return lexExpression + } + + // let's scan next character + switch r := l.next(); { + case r == eof: + return l.errorf("Unclosed expression") + case isIgnorable(r): + return lexIgnorable + case r == '(': + l.emit(TokenOpenSexpr) + case r == ')': + l.emit(TokenCloseSexpr) + case r == '=': + l.emit(TokenEquals) + case r == '@': + l.emit(TokenData) + case r == '"' || r == '\'': + l.backup() + return lexString + case r == '/' || r == '.': + l.emit(TokenSep) + case r == '|': + l.emit(TokenCloseBlockParams) + case r == '+' || r == '-' || (r >= '0' && r <= '9'): + l.backup() + return lexNumber + case r == '[': + return lexPathLiteral + case strings.IndexRune(unallowedIDChars, r) < 0: + l.backup() + return lexIdentifier + default: + return l.errorf("Unexpected character in expression: '%c'", r) + } + + return lexExpression +} + +// lexComment scans {{!-- or {{! +func lexComment(l *Lexer) lexFunc { + if str := l.findRegexp(l.closeComment); str != "" { + l.pos += len(str) + l.emit(TokenComment) + + return lexContent + } + + if r := l.next(); r == eof { + return l.errorf("Unclosed comment") + } + + return lexComment +} + +// lexIgnorable scans all following ignorable characters +func lexIgnorable(l *Lexer) lexFunc { + for isIgnorable(l.peek()) { + l.next() + } + l.ignore() + + return lexExpression +} + +// lexString scans a string +func lexString(l *Lexer) lexFunc { + // get string delimiter + delim := l.next() + var prev rune = 0 + + // ignore delimiter + l.ignore() + + for { + r := l.next() + if r == eof || r == '\n' { + return l.errorf("Unterminated string") + } + + if (r == delim) && (prev != '\\') { + break + } + + prev = r + } + + // remove end delimiter + l.backup() + + // emit string + l.emitString(delim) + + // skip end delimiter + l.next() + l.ignore() + + return lexExpression +} + +// lexNumber scans a number: decimal, octal, hex, float, or imaginary. This +// isn't a perfect number scanner - for instance it accepts "." and "0x0.2" +// and "089" - but when it's wrong the input is invalid and the parser (via +// strconv) will notice. +// +// NOTE: borrowed from https://github.com/golang/go/tree/master/src/text/template/parse/lex.go +func lexNumber(l *Lexer) lexFunc { + if !l.scanNumber() { + return l.errorf("bad number syntax: %q", l.input[l.start:l.pos]) + } + if sign := l.peek(); sign == '+' || sign == '-' { + // Complex: 1+2i. No spaces, must end in 'i'. + if !l.scanNumber() || l.input[l.pos-1] != 'i' { + return l.errorf("bad number syntax: %q", l.input[l.start:l.pos]) + } + l.emit(TokenNumber) + } else { + l.emit(TokenNumber) + } + return lexExpression +} + +// scanNumber scans a number +// +// NOTE: borrowed from https://github.com/golang/go/tree/master/src/text/template/parse/lex.go +func (l *Lexer) scanNumber() bool { + // Optional leading sign. + l.accept("+-") + + // Is it hex? + digits := "0123456789" + + if l.accept("0") && l.accept("xX") { + digits = "0123456789abcdefABCDEF" + } + + l.acceptRun(digits) + + if l.accept(".") { + l.acceptRun(digits) + } + + if l.accept("eE") { + l.accept("+-") + l.acceptRun("0123456789") + } + + // Is it imaginary? + l.accept("i") + + // Next thing mustn't be alphanumeric. + if isAlphaNumeric(l.peek()) { + l.next() + return false + } + + return true +} + +// lexIdentifier scans an ID +func lexIdentifier(l *Lexer) lexFunc { + str := l.findRegexp(rID) + if len(str) == 0 { + // this is rotten + panic("Identifier expected") + } + + l.pos += len(str) + l.emit(TokenID) + + return lexExpression +} + +// lexPathLiteral scans an [ID] +func lexPathLiteral(l *Lexer) lexFunc { + for { + r := l.next() + if r == eof || r == '\n' { + return l.errorf("Unterminated path literal") + } + + if r == ']' { + break + } + } + + l.emit(TokenID) + + return lexExpression +} + +// isIgnorable returns true if given character is ignorable (ie. whitespace of line feed) +func isIgnorable(r rune) bool { + return r == ' ' || r == '\t' || r == '\n' +} + +// isAlphaNumeric reports whether r is an alphabetic, digit, or underscore. +// +// NOTE borrowed from https://github.com/golang/go/tree/master/src/text/template/parse/lex.go +func isAlphaNumeric(r rune) bool { + return r == '_' || unicode.IsLetter(r) || unicode.IsDigit(r) +} diff --git a/vendor/github.com/aymerick/raymond/lexer/lexer_test.go b/vendor/github.com/aymerick/raymond/lexer/lexer_test.go new file mode 100644 index 0000000..71e8bf0 --- /dev/null +++ b/vendor/github.com/aymerick/raymond/lexer/lexer_test.go @@ -0,0 +1,541 @@ +package lexer + +import ( + "fmt" + "testing" +) + +type lexTest struct { + name string + input string + tokens []Token +} + +// helpers +func tokContent(val string) Token { return Token{TokenContent, val, 0, 1} } +func tokID(val string) Token { return Token{TokenID, val, 0, 1} } +func tokSep(val string) Token { return Token{TokenSep, val, 0, 1} } +func tokString(val string) Token { return Token{TokenString, val, 0, 1} } +func tokNumber(val string) Token { return Token{TokenNumber, val, 0, 1} } +func tokInverse(val string) Token { return Token{TokenInverse, val, 0, 1} } +func tokBool(val string) Token { return Token{TokenBoolean, val, 0, 1} } +func tokError(val string) Token { return Token{TokenError, val, 0, 1} } +func tokComment(val string) Token { return Token{TokenComment, val, 0, 1} } + +var tokEOF = Token{TokenEOF, "", 0, 1} +var tokEquals = Token{TokenEquals, "=", 0, 1} +var tokData = Token{TokenData, "@", 0, 1} +var tokOpen = Token{TokenOpen, "{{", 0, 1} +var tokOpenAmp = Token{TokenOpen, "{{&", 0, 1} +var tokOpenPartial = Token{TokenOpenPartial, "{{>", 0, 1} +var tokClose = Token{TokenClose, "}}", 0, 1} +var tokOpenStrip = Token{TokenOpen, "{{~", 0, 1} +var tokCloseStrip = Token{TokenClose, "~}}", 0, 1} +var tokOpenUnescaped = Token{TokenOpenUnescaped, "{{{", 0, 1} +var tokCloseUnescaped = Token{TokenCloseUnescaped, "}}}", 0, 1} +var tokOpenUnescapedStrip = Token{TokenOpenUnescaped, "{{~{", 0, 1} +var tokCloseUnescapedStrip = Token{TokenCloseUnescaped, "}~}}", 0, 1} +var tokOpenBlock = Token{TokenOpenBlock, "{{#", 0, 1} +var tokOpenEndBlock = Token{TokenOpenEndBlock, "{{/", 0, 1} +var tokOpenInverse = Token{TokenOpenInverse, "{{^", 0, 1} +var tokOpenInverseChain = Token{TokenOpenInverseChain, "{{else", 0, 1} +var tokOpenSexpr = Token{TokenOpenSexpr, "(", 0, 1} +var tokCloseSexpr = Token{TokenCloseSexpr, ")", 0, 1} +var tokOpenBlockParams = Token{TokenOpenBlockParams, "as |", 0, 1} +var tokCloseBlockParams = Token{TokenCloseBlockParams, "|", 0, 1} +var tokOpenRawBlock = Token{TokenOpenRawBlock, "{{{{", 0, 1} +var tokCloseRawBlock = Token{TokenCloseRawBlock, "}}}}", 0, 1} +var tokOpenEndRawBlock = Token{TokenOpenEndRawBlock, "{{{{/", 0, 1} + +var lexTests = []lexTest{ + {"empty", "", []Token{tokEOF}}, + {"spaces", " \t\n", []Token{tokContent(" \t\n"), tokEOF}}, + {"content", `now is the time`, []Token{tokContent(`now is the time`), tokEOF}}, + + { + `does not tokenizes identifier starting with true as boolean`, + `{{ foo truebar }}`, + []Token{tokOpen, tokID("foo"), tokID("truebar"), tokClose, tokEOF}, + }, + { + `does not tokenizes identifier starting with false as boolean`, + `{{ foo falsebar }}`, + []Token{tokOpen, tokID("foo"), tokID("falsebar"), tokClose, tokEOF}, + }, + { + `tokenizes raw block`, + `{{{{foo}}}} {{{{/foo}}}}`, + []Token{tokOpenRawBlock, tokID("foo"), tokCloseRawBlock, tokContent(" "), tokOpenEndRawBlock, tokID("foo"), tokCloseRawBlock, tokEOF}, + }, + { + `tokenizes raw block with mustaches in content`, + `{{{{foo}}}}{{bar}}{{{{/foo}}}}`, + []Token{tokOpenRawBlock, tokID("foo"), tokCloseRawBlock, tokContent("{{bar}}"), tokOpenEndRawBlock, tokID("foo"), tokCloseRawBlock, tokEOF}, + }, + { + `tokenizes @../foo`, + `{{@../foo}}`, + []Token{tokOpen, tokData, tokID(".."), tokSep("/"), tokID("foo"), tokClose, tokEOF}, + }, + { + `tokenizes escaped mustaches`, + "\\{{bar}}", + []Token{tokContent("{{bar}}"), tokEOF}, + }, + { + `tokenizes strip mustaches`, + `{{~ foo ~}}`, + []Token{tokOpenStrip, tokID("foo"), tokCloseStrip, tokEOF}, + }, + { + `tokenizes unescaped strip mustaches`, + `{{~{ foo }~}}`, + []Token{tokOpenUnescapedStrip, tokID("foo"), tokCloseUnescapedStrip, tokEOF}, + }, + + // + // Next tests come from: + // https://github.com/wycats/handlebars.js/blob/master/spec/tokenizer.js + // + { + `tokenizes a simple mustache as "OPEN ID CLOSE"`, + `{{foo}}`, + []Token{tokOpen, tokID("foo"), tokClose, tokEOF}, + }, + { + `supports unescaping with &`, + `{{&bar}}`, + []Token{tokOpenAmp, tokID("bar"), tokClose, tokEOF}, + }, + { + `supports unescaping with {{{`, + `{{{bar}}}`, + []Token{tokOpenUnescaped, tokID("bar"), tokCloseUnescaped, tokEOF}, + }, + { + `supports escaping delimiters`, + "{{foo}} \\{{bar}} {{baz}}", + []Token{tokOpen, tokID("foo"), tokClose, tokContent(" "), tokContent("{{bar}} "), tokOpen, tokID("baz"), tokClose, tokEOF}, + }, + { + `supports escaping multiple delimiters`, + "{{foo}} \\{{bar}} \\{{baz}}", + []Token{tokOpen, tokID("foo"), tokClose, tokContent(" "), tokContent("{{bar}} "), tokContent("{{baz}}"), tokEOF}, + }, + { + `supports escaping a triple stash`, + "{{foo}} \\{{{bar}}} {{baz}}", + []Token{tokOpen, tokID("foo"), tokClose, tokContent(" "), tokContent("{{{bar}}} "), tokOpen, tokID("baz"), tokClose, tokEOF}, + }, + { + `supports escaping escape character`, + "{{foo}} \\\\{{bar}} {{baz}}", + []Token{tokOpen, tokID("foo"), tokClose, tokContent(" \\"), tokOpen, tokID("bar"), tokClose, tokContent(" "), tokOpen, tokID("baz"), tokClose, tokEOF}, + }, + { + `supports escaping multiple escape characters`, + "{{foo}} \\\\{{bar}} \\\\{{baz}}", + []Token{tokOpen, tokID("foo"), tokClose, tokContent(" \\"), tokOpen, tokID("bar"), tokClose, tokContent(" \\"), tokOpen, tokID("baz"), tokClose, tokEOF}, + }, + { + `supports escaped mustaches after escaped escape characters`, + "{{foo}} \\\\{{bar}} \\{{baz}}", + // NOTE: JS implementation returns: + // ['OPEN', 'ID', 'CLOSE', 'CONTENT', 'OPEN', 'ID', 'CLOSE', 'CONTENT', 'CONTENT', 'CONTENT'], + // WTF is the last CONTENT ? + []Token{tokOpen, tokID("foo"), tokClose, tokContent(" \\"), tokOpen, tokID("bar"), tokClose, tokContent(" "), tokContent("{{baz}}"), tokEOF}, + }, + { + `supports escaped escape characters after escaped mustaches`, + "{{foo}} \\{{bar}} \\\\{{baz}}", + // NOTE: JS implementation returns: + // []Token{tokOpen, tokID("foo"), tokClose, tokContent(" "), tokContent("{{bar}} "), tokContent("\\"), tokOpen, tokID("baz"), tokClose, tokEOF}, + []Token{tokOpen, tokID("foo"), tokClose, tokContent(" "), tokContent("{{bar}} \\"), tokOpen, tokID("baz"), tokClose, tokEOF}, + }, + { + `supports escaped escape character on a triple stash`, + "{{foo}} \\\\{{{bar}}} {{baz}}", + []Token{tokOpen, tokID("foo"), tokClose, tokContent(" \\"), tokOpenUnescaped, tokID("bar"), tokCloseUnescaped, tokContent(" "), tokOpen, tokID("baz"), tokClose, tokEOF}, + }, + { + `tokenizes a simple path`, + `{{foo/bar}}`, + []Token{tokOpen, tokID("foo"), tokSep("/"), tokID("bar"), tokClose, tokEOF}, + }, + { + `allows dot notation (1)`, + `{{foo.bar}}`, + []Token{tokOpen, tokID("foo"), tokSep("."), tokID("bar"), tokClose, tokEOF}, + }, + { + `allows dot notation (2)`, + `{{foo.bar.baz}}`, + []Token{tokOpen, tokID("foo"), tokSep("."), tokID("bar"), tokSep("."), tokID("baz"), tokClose, tokEOF}, + }, + { + `allows path literals with []`, + `{{foo.[bar]}}`, + []Token{tokOpen, tokID("foo"), tokSep("."), tokID("[bar]"), tokClose, tokEOF}, + }, + { + `allows multiple path literals on a line with []`, + `{{foo.[bar]}}{{foo.[baz]}}`, + []Token{tokOpen, tokID("foo"), tokSep("."), tokID("[bar]"), tokClose, tokOpen, tokID("foo"), tokSep("."), tokID("[baz]"), tokClose, tokEOF}, + }, + { + `tokenizes {{.}} as OPEN ID CLOSE`, + `{{.}}`, + []Token{tokOpen, tokID("."), tokClose, tokEOF}, + }, + { + `tokenizes a path as "OPEN (ID SEP)* ID CLOSE"`, + `{{../foo/bar}}`, + []Token{tokOpen, tokID(".."), tokSep("/"), tokID("foo"), tokSep("/"), tokID("bar"), tokClose, tokEOF}, + }, + { + `tokenizes a path with .. as a parent path`, + `{{../foo.bar}}`, + []Token{tokOpen, tokID(".."), tokSep("/"), tokID("foo"), tokSep("."), tokID("bar"), tokClose, tokEOF}, + }, + { + `tokenizes a path with this/foo as OPEN ID SEP ID CLOSE`, + `{{this/foo}}`, + []Token{tokOpen, tokID("this"), tokSep("/"), tokID("foo"), tokClose, tokEOF}, + }, + { + `tokenizes a simple mustache with spaces as "OPEN ID CLOSE"`, + `{{ foo }}`, + []Token{tokOpen, tokID("foo"), tokClose, tokEOF}, + }, + { + `tokenizes a simple mustache with line breaks as "OPEN ID ID CLOSE"`, + "{{ foo \n bar }}", + []Token{tokOpen, tokID("foo"), tokID("bar"), tokClose, tokEOF}, + }, + { + `tokenizes raw content as "CONTENT"`, + `foo {{ bar }} baz`, + []Token{tokContent("foo "), tokOpen, tokID("bar"), tokClose, tokContent(" baz"), tokEOF}, + }, + { + `tokenizes a partial as "OPEN_PARTIAL ID CLOSE"`, + `{{> foo}}`, + []Token{tokOpenPartial, tokID("foo"), tokClose, tokEOF}, + }, + { + `tokenizes a partial with context as "OPEN_PARTIAL ID ID CLOSE"`, + `{{> foo bar }}`, + []Token{tokOpenPartial, tokID("foo"), tokID("bar"), tokClose, tokEOF}, + }, + { + `tokenizes a partial without spaces as "OPEN_PARTIAL ID CLOSE"`, + `{{>foo}}`, + []Token{tokOpenPartial, tokID("foo"), tokClose, tokEOF}, + }, + { + `tokenizes a partial space at the }); as "OPEN_PARTIAL ID CLOSE"`, + `{{>foo }}`, + []Token{tokOpenPartial, tokID("foo"), tokClose, tokEOF}, + }, + { + `tokenizes a partial space at the }); as "OPEN_PARTIAL ID CLOSE"`, + `{{>foo/bar.baz }}`, + []Token{tokOpenPartial, tokID("foo"), tokSep("/"), tokID("bar"), tokSep("."), tokID("baz"), tokClose, tokEOF}, + }, + { + `tokenizes a comment as "COMMENT"`, + `foo {{! this is a comment }} bar {{ baz }}`, + []Token{tokContent("foo "), tokComment("{{! this is a comment }}"), tokContent(" bar "), tokOpen, tokID("baz"), tokClose, tokEOF}, + }, + { + `tokenizes a block comment as "COMMENT"`, + `foo {{!-- this is a {{comment}} --}} bar {{ baz }}`, + []Token{tokContent("foo "), tokComment("{{!-- this is a {{comment}} --}}"), tokContent(" bar "), tokOpen, tokID("baz"), tokClose, tokEOF}, + }, + { + `tokenizes a block comment with whitespace as "COMMENT"`, + "foo {{!-- this is a\n{{comment}}\n--}} bar {{ baz }}", + []Token{tokContent("foo "), tokComment("{{!-- this is a\n{{comment}}\n--}}"), tokContent(" bar "), tokOpen, tokID("baz"), tokClose, tokEOF}, + }, + { + `tokenizes open and closing blocks as OPEN_BLOCK, ID, CLOSE ..., OPEN_ENDBLOCK ID CLOSE`, + `{{#foo}}content{{/foo}}`, + []Token{tokOpenBlock, tokID("foo"), tokClose, tokContent("content"), tokOpenEndBlock, tokID("foo"), tokClose, tokEOF}, + }, + { + `tokenizes inverse sections as "INVERSE"`, + `{{^}}`, + []Token{tokInverse("{{^}}"), tokEOF}, + }, + { + `tokenizes inverse sections as "INVERSE" with alternate format`, + `{{else}}`, + []Token{tokInverse("{{else}}"), tokEOF}, + }, + { + `tokenizes inverse sections as "INVERSE" with spaces`, + `{{ else }}`, + []Token{tokInverse("{{ else }}"), tokEOF}, + }, + { + `tokenizes inverse sections with ID as "OPEN_INVERSE ID CLOSE"`, + `{{^foo}}`, + []Token{tokOpenInverse, tokID("foo"), tokClose, tokEOF}, + }, + { + `tokenizes inverse sections with ID and spaces as "OPEN_INVERSE ID CLOSE"`, + `{{^ foo }}`, + []Token{tokOpenInverse, tokID("foo"), tokClose, tokEOF}, + }, + { + `tokenizes mustaches with params as "OPEN ID ID ID CLOSE"`, + `{{ foo bar baz }}`, + []Token{tokOpen, tokID("foo"), tokID("bar"), tokID("baz"), tokClose, tokEOF}, + }, + { + `tokenizes mustaches with String params as "OPEN ID ID STRING CLOSE"`, + `{{ foo bar "baz" }}`, + []Token{tokOpen, tokID("foo"), tokID("bar"), tokString("baz"), tokClose, tokEOF}, + }, + { + `tokenizes mustaches with String params using single quotes as "OPEN ID ID STRING CLOSE"`, + `{{ foo bar 'baz' }}`, + []Token{tokOpen, tokID("foo"), tokID("bar"), tokString("baz"), tokClose, tokEOF}, + }, + { + `tokenizes String params with spaces inside as "STRING"`, + `{{ foo bar "baz bat" }}`, + []Token{tokOpen, tokID("foo"), tokID("bar"), tokString("baz bat"), tokClose, tokEOF}, + }, + { + `tokenizes String params with escapes quotes as STRING`, + `{{ foo "bar\"baz" }}`, + []Token{tokOpen, tokID("foo"), tokString(`bar"baz`), tokClose, tokEOF}, + }, + { + `tokenizes String params using single quotes with escapes quotes as STRING`, + `{{ foo 'bar\'baz' }}`, + []Token{tokOpen, tokID("foo"), tokString(`bar'baz`), tokClose, tokEOF}, + }, + { + `tokenizes numbers`, + `{{ foo 1 }}`, + []Token{tokOpen, tokID("foo"), tokNumber("1"), tokClose, tokEOF}, + }, + { + `tokenizes floats`, + `{{ foo 1.1 }}`, + []Token{tokOpen, tokID("foo"), tokNumber("1.1"), tokClose, tokEOF}, + }, + { + `tokenizes negative numbers`, + `{{ foo -1 }}`, + []Token{tokOpen, tokID("foo"), tokNumber("-1"), tokClose, tokEOF}, + }, + { + `tokenizes negative floats`, + `{{ foo -1.1 }}`, + []Token{tokOpen, tokID("foo"), tokNumber("-1.1"), tokClose, tokEOF}, + }, + { + `tokenizes boolean true`, + `{{ foo true }}`, + []Token{tokOpen, tokID("foo"), tokBool("true"), tokClose, tokEOF}, + }, + { + `tokenizes boolean false`, + `{{ foo false }}`, + []Token{tokOpen, tokID("foo"), tokBool("false"), tokClose, tokEOF}, + }, + // SKIP: 'tokenizes undefined and null' + { + `tokenizes hash arguments (1)`, + `{{ foo bar=baz }}`, + []Token{tokOpen, tokID("foo"), tokID("bar"), tokEquals, tokID("baz"), tokClose, tokEOF}, + }, + { + `tokenizes hash arguments (2)`, + `{{ foo bar baz=bat }}`, + []Token{tokOpen, tokID("foo"), tokID("bar"), tokID("baz"), tokEquals, tokID("bat"), tokClose, tokEOF}, + }, + { + `tokenizes hash arguments (3)`, + `{{ foo bar baz=1 }}`, + []Token{tokOpen, tokID("foo"), tokID("bar"), tokID("baz"), tokEquals, tokNumber("1"), tokClose, tokEOF}, + }, + { + `tokenizes hash arguments (4)`, + `{{ foo bar baz=true }}`, + []Token{tokOpen, tokID("foo"), tokID("bar"), tokID("baz"), tokEquals, tokBool("true"), tokClose, tokEOF}, + }, + { + `tokenizes hash arguments (5)`, + `{{ foo bar baz=false }}`, + []Token{tokOpen, tokID("foo"), tokID("bar"), tokID("baz"), tokEquals, tokBool("false"), tokClose, tokEOF}, + }, + { + `tokenizes hash arguments (6)`, + "{{ foo bar\n baz=bat }}", + []Token{tokOpen, tokID("foo"), tokID("bar"), tokID("baz"), tokEquals, tokID("bat"), tokClose, tokEOF}, + }, + { + `tokenizes hash arguments (7)`, + `{{ foo bar baz="bat" }}`, + []Token{tokOpen, tokID("foo"), tokID("bar"), tokID("baz"), tokEquals, tokString("bat"), tokClose, tokEOF}, + }, + { + `tokenizes hash arguments (8)`, + `{{ foo bar baz="bat" bam=wot }}`, + []Token{tokOpen, tokID("foo"), tokID("bar"), tokID("baz"), tokEquals, tokString("bat"), tokID("bam"), tokEquals, tokID("wot"), tokClose, tokEOF}, + }, + { + `tokenizes hash arguments (9)`, + `{{foo omg bar=baz bat="bam"}}`, + []Token{tokOpen, tokID("foo"), tokID("omg"), tokID("bar"), tokEquals, tokID("baz"), tokID("bat"), tokEquals, tokString("bam"), tokClose, tokEOF}, + }, + { + `tokenizes special @ identifiers (1)`, + `{{ @foo }}`, + []Token{tokOpen, tokData, tokID("foo"), tokClose, tokEOF}, + }, + { + `tokenizes special @ identifiers (2)`, + `{{ foo @bar }}`, + []Token{tokOpen, tokID("foo"), tokData, tokID("bar"), tokClose, tokEOF}, + }, + { + `tokenizes special @ identifiers (3)`, + `{{ foo bar=@baz }}`, + []Token{tokOpen, tokID("foo"), tokID("bar"), tokEquals, tokData, tokID("baz"), tokClose, tokEOF}, + }, + { + `does not time out in a mustache with a single } followed by EOF`, + `{{foo}`, + []Token{tokOpen, tokID("foo"), tokError("Unexpected character in expression: '}'")}, + }, + { + `does not time out in a mustache when invalid ID characters are used`, + `{{foo & }}`, + []Token{tokOpen, tokID("foo"), tokError("Unexpected character in expression: '&'")}, + }, + { + `tokenizes subexpressions (1)`, + `{{foo (bar)}}`, + []Token{tokOpen, tokID("foo"), tokOpenSexpr, tokID("bar"), tokCloseSexpr, tokClose, tokEOF}, + }, + { + `tokenizes subexpressions (2)`, + `{{foo (a-x b-y)}}`, + []Token{tokOpen, tokID("foo"), tokOpenSexpr, tokID("a-x"), tokID("b-y"), tokCloseSexpr, tokClose, tokEOF}, + }, + { + `tokenizes nested subexpressions`, + `{{foo (bar (lol rofl)) (baz)}}`, + []Token{tokOpen, tokID("foo"), tokOpenSexpr, tokID("bar"), tokOpenSexpr, tokID("lol"), tokID("rofl"), tokCloseSexpr, tokCloseSexpr, tokOpenSexpr, tokID("baz"), tokCloseSexpr, tokClose, tokEOF}, + }, + { + `tokenizes nested subexpressions: literals`, + `{{foo (bar (lol true) false) (baz 1) (blah 'b') (blorg "c")}}`, + []Token{tokOpen, tokID("foo"), tokOpenSexpr, tokID("bar"), tokOpenSexpr, tokID("lol"), tokBool("true"), tokCloseSexpr, tokBool("false"), tokCloseSexpr, tokOpenSexpr, tokID("baz"), tokNumber("1"), tokCloseSexpr, tokOpenSexpr, tokID("blah"), tokString("b"), tokCloseSexpr, tokOpenSexpr, tokID("blorg"), tokString("c"), tokCloseSexpr, tokClose, tokEOF}, + }, + { + `tokenizes block params (1)`, + `{{#foo as |bar|}}`, + []Token{tokOpenBlock, tokID("foo"), tokOpenBlockParams, tokID("bar"), tokCloseBlockParams, tokClose, tokEOF}, + }, + { + `tokenizes block params (2)`, + `{{#foo as |bar baz|}}`, + []Token{tokOpenBlock, tokID("foo"), tokOpenBlockParams, tokID("bar"), tokID("baz"), tokCloseBlockParams, tokClose, tokEOF}, + }, + { + `tokenizes block params (3)`, + `{{#foo as | bar baz |}}`, + []Token{tokOpenBlock, tokID("foo"), tokOpenBlockParams, tokID("bar"), tokID("baz"), tokCloseBlockParams, tokClose, tokEOF}, + }, + { + `tokenizes block params (4)`, + `{{#foo as as | bar baz |}}`, + []Token{tokOpenBlock, tokID("foo"), tokID("as"), tokOpenBlockParams, tokID("bar"), tokID("baz"), tokCloseBlockParams, tokClose, tokEOF}, + }, + { + `tokenizes block params (5)`, + `{{else foo as |bar baz|}}`, + []Token{tokOpenInverseChain, tokID("foo"), tokOpenBlockParams, tokID("bar"), tokID("baz"), tokCloseBlockParams, tokClose, tokEOF}, + }, +} + +func collect(t *lexTest) []Token { + var result []Token + + l := scanWithName(t.input, t.name) + for { + token := l.NextToken() + result = append(result, token) + + if token.Kind == TokenEOF || token.Kind == TokenError { + break + } + } + + return result +} + +func equal(i1, i2 []Token, checkPos bool) bool { + if len(i1) != len(i2) { + return false + } + + for k := range i1 { + if i1[k].Kind != i2[k].Kind { + return false + } + + if checkPos && i1[k].Pos != i2[k].Pos { + return false + } + + if i1[k].Val != i2[k].Val { + return false + } + } + + return true +} + +func TestLexer(t *testing.T) { + t.Parallel() + + for _, test := range lexTests { + tokens := collect(&test) + if !equal(tokens, test.tokens, false) { + t.Errorf("Test '%s' failed\ninput:\n\t'%s'\nexpected\n\t%v\ngot\n\t%+v\n", test.name, test.input, test.tokens, tokens) + } + } +} + +// @todo Test errors: +// `{{{{raw foo` + +// package example +func Example() { + source := "You know {{nothing}} John Snow" + + output := "" + + lex := Scan(source) + for { + // consume next token + token := lex.NextToken() + + output += fmt.Sprintf(" %s", token) + + // stops when all tokens have been consumed, or on error + if token.Kind == TokenEOF || token.Kind == TokenError { + break + } + } + + fmt.Print(output) + // Output: Content{"You know "} Open{"{{"} ID{"nothing"} Close{"}}"} Content{" John Snow"} EOF +} diff --git a/vendor/github.com/aymerick/raymond/lexer/token.go b/vendor/github.com/aymerick/raymond/lexer/token.go new file mode 100644 index 0000000..2c9c5ed --- /dev/null +++ b/vendor/github.com/aymerick/raymond/lexer/token.go @@ -0,0 +1,122 @@ +package lexer + +import "fmt" + +const ( + TokenError TokenKind = iota + TokenEOF + + // mustache delimiters + TokenOpen // OPEN + TokenClose // CLOSE + TokenOpenRawBlock // OPEN_RAW_BLOCK + TokenCloseRawBlock // CLOSE_RAW_BLOCK + TokenOpenEndRawBlock // END_RAW_BLOCK + TokenOpenUnescaped // OPEN_UNESCAPED + TokenCloseUnescaped // CLOSE_UNESCAPED + TokenOpenBlock // OPEN_BLOCK + TokenOpenEndBlock // OPEN_ENDBLOCK + TokenInverse // INVERSE + TokenOpenInverse // OPEN_INVERSE + TokenOpenInverseChain // OPEN_INVERSE_CHAIN + TokenOpenPartial // OPEN_PARTIAL + TokenComment // COMMENT + + // inside mustaches + TokenOpenSexpr // OPEN_SEXPR + TokenCloseSexpr // CLOSE_SEXPR + TokenEquals // EQUALS + TokenData // DATA + TokenSep // SEP + TokenOpenBlockParams // OPEN_BLOCK_PARAMS + TokenCloseBlockParams // CLOSE_BLOCK_PARAMS + + // tokens with content + TokenContent // CONTENT + TokenID // ID + TokenString // STRING + TokenNumber // NUMBER + TokenBoolean // BOOLEAN +) + +const ( + // Option to generate token position in its string representation + DUMP_TOKEN_POS = false + + // Option to generate values for all token kinds for their string representations + DUMP_ALL_TOKENS_VAL = true +) + +// TokenKind represents a Token type. +type TokenKind int + +// Token represents a scanned token. +type Token struct { + Kind TokenKind // Token kind + Val string // Token value + + Pos int // Byte position in input string + Line int // Line number in input string +} + +// tokenName permits to display token name given token type +var tokenName = map[TokenKind]string{ + TokenError: "Error", + TokenEOF: "EOF", + TokenContent: "Content", + TokenComment: "Comment", + TokenOpen: "Open", + TokenClose: "Close", + TokenOpenUnescaped: "OpenUnescaped", + TokenCloseUnescaped: "CloseUnescaped", + TokenOpenBlock: "OpenBlock", + TokenOpenEndBlock: "OpenEndBlock", + TokenOpenRawBlock: "OpenRawBlock", + TokenCloseRawBlock: "CloseRawBlock", + TokenOpenEndRawBlock: "OpenEndRawBlock", + TokenOpenBlockParams: "OpenBlockParams", + TokenCloseBlockParams: "CloseBlockParams", + TokenInverse: "Inverse", + TokenOpenInverse: "OpenInverse", + TokenOpenInverseChain: "OpenInverseChain", + TokenOpenPartial: "OpenPartial", + TokenOpenSexpr: "OpenSexpr", + TokenCloseSexpr: "CloseSexpr", + TokenID: "ID", + TokenEquals: "Equals", + TokenString: "String", + TokenNumber: "Number", + TokenBoolean: "Boolean", + TokenData: "Data", + TokenSep: "Sep", +} + +// String returns the token kind string representation for debugging. +func (k TokenKind) String() string { + s := tokenName[k] + if s == "" { + return fmt.Sprintf("Token-%d", int(k)) + } + return s +} + +// String returns the token string representation for debugging. +func (t Token) String() string { + result := "" + + if DUMP_TOKEN_POS { + result += fmt.Sprintf("%d:", t.Pos) + } + + result += fmt.Sprintf("%s", t.Kind) + + if (DUMP_ALL_TOKENS_VAL || (t.Kind >= TokenContent)) && len(t.Val) > 0 { + if len(t.Val) > 100 { + result += fmt.Sprintf("{%.20q...}", t.Val) + } else { + result += fmt.Sprintf("{%q}", t.Val) + } + } + + return result +} diff --git a/vendor/github.com/aymerick/raymond/mustache_test.go b/vendor/github.com/aymerick/raymond/mustache_test.go new file mode 100644 index 0000000..cae066f --- /dev/null +++ b/vendor/github.com/aymerick/raymond/mustache_test.go @@ -0,0 +1,234 @@ +package raymond + +import ( + "io/ioutil" + "path" + "regexp" + "strings" + "testing" + + "gopkg.in/yaml.v2" +) + +// +// Note, as the JS implementation, the divergences from mustache spec: +// - we don't support alternative delimeters +// - the mustache lambda spec differs +// + +type mustacheTest struct { + Name string + Desc string + Data interface{} + Template string + Expected string + Partials map[string]string +} + +type mustacheTestFile struct { + Overview string + Tests []mustacheTest +} + +var ( + rAltDelim = regexp.MustCompile(regexp.QuoteMeta("{{=")) +) + +var ( + musTestLambdaInterMult = 0 +) + +func TestMustache(t *testing.T) { + skipFiles := map[string]bool{ + // mustache lambdas differ from handlebars lambdas + "~lambdas.yml": true, + } + + for _, fileName := range mustacheTestFiles() { + if skipFiles[fileName] { + // fmt.Printf("Skipped file: %s\n", fileName) + continue + } + + launchTests(t, testsFromMustacheFile(fileName)) + } +} + +func testsFromMustacheFile(fileName string) []Test { + result := []Test{} + + fileData, err := ioutil.ReadFile(path.Join("mustache", "specs", fileName)) + if err != nil { + panic(err) + } + + var testFile mustacheTestFile + if err := yaml.Unmarshal(fileData, &testFile); err != nil { + panic(err) + } + + for _, mustacheTest := range testFile.Tests { + if mustBeSkipped(mustacheTest, fileName) { + // fmt.Printf("Skipped test: %s\n", mustacheTest.Name) + continue + } + + test := Test{ + name: mustacheTest.Name, + input: mustacheTest.Template, + data: mustacheTest.Data, + partials: mustacheTest.Partials, + output: mustacheTest.Expected, + } + + result = append(result, test) + } + + return result +} + +// returns true if test must be skipped +func mustBeSkipped(test mustacheTest, fileName string) bool { + // handlebars does not support alternative delimiters + return haveAltDelimiter(test) || + // the JS implementation skips those tests + fileName == "partials.yml" && (test.Name == "Failed Lookup" || test.Name == "Standalone Indentation") +} + +// returns true if test have alternative delimeter in template or in partials +func haveAltDelimiter(test mustacheTest) bool { + // check template + if rAltDelim.MatchString(test.Template) { + return true + } + + // check partials + for _, partial := range test.Partials { + if rAltDelim.MatchString(partial) { + return true + } + } + + return false +} + +func mustacheTestFiles() []string { + var result []string + + files, err := ioutil.ReadDir(path.Join("mustache", "specs")) + if err != nil { + panic(err) + } + + for _, file := range files { + fileName := file.Name() + + if !file.IsDir() && strings.HasSuffix(fileName, ".yml") { + result = append(result, fileName) + } + } + + return result +} + +// +// Following tests come fron ~lambdas.yml +// + +var mustacheLambdasTests = []Test{ + { + "Interpolation", + "Hello, {{lambda}}!", + map[string]interface{}{"lambda": func() string { return "world" }}, + nil, nil, nil, + "Hello, world!", + }, + + // // SKIP: lambda return value is not parsed + // { + // "Interpolation - Expansion", + // "Hello, {{lambda}}!", + // map[string]interface{}{"lambda": func() string { return "{{planet}}" }}, + // nil, nil, nil, + // "Hello, world!", + // }, + + // SKIP "Interpolation - Alternate Delimiters" + + { + "Interpolation - Multiple Calls", + "{{lambda}} == {{{lambda}}} == {{lambda}}", + map[string]interface{}{"lambda": func() string { + musTestLambdaInterMult += 1 + return Str(musTestLambdaInterMult) + }}, + nil, nil, nil, + "1 == 2 == 3", + }, + + { + "Escaping", + "<{{lambda}}{{{lambda}}}", + map[string]interface{}{"lambda": func() string { return ">" }}, + nil, nil, nil, + "<>>", + }, + + // // SKIP: "Lambdas used for sections should receive the raw section string." + // { + // "Section", + // "<{{#lambda}}{{x}}{{/lambda}}>", + // map[string]interface{}{"lambda": func(param string) string { + // if param == "{{x}}" { + // return "yes" + // } + + // return "false" + // }, "x": "Error!"}, + // nil, nil, nil, + // "", + // }, + + // // SKIP: lambda return value is not parsed + // { + // "Section - Expansion", + // "<{{#lambda}}-{{/lambda}}>", + // map[string]interface{}{"lambda": func(param string) string { + // return param + "{{planet}}" + param + // }, "planet": "Earth"}, + // nil, nil, nil, + // "<-Earth->", + // }, + + // SKIP: "Section - Alternate Delimiters" + + { + "Section - Multiple Calls", + "{{#lambda}}FILE{{/lambda}} != {{#lambda}}LINE{{/lambda}}", + map[string]interface{}{"lambda": func(options *Options) string { + return "__" + options.Fn() + "__" + }}, + nil, nil, nil, + "__FILE__ != __LINE__", + }, + + // // SKIP: "Lambdas used for inverted sections should be considered truthy." + // { + // "Inverted Section", + // "<{{^lambda}}{{static}}{{/lambda}}>", + // map[string]interface{}{ + // "lambda": func() interface{} { + // return false + // }, + // "static": "static", + // }, + // nil, nil, nil, + // "<>", + // }, +} + +func TestMustacheLambdas(t *testing.T) { + t.Parallel() + + launchTests(t, mustacheLambdasTests) +} diff --git a/vendor/github.com/aymerick/raymond/parser/parser.go b/vendor/github.com/aymerick/raymond/parser/parser.go new file mode 100644 index 0000000..9f2ef3e --- /dev/null +++ b/vendor/github.com/aymerick/raymond/parser/parser.go @@ -0,0 +1,846 @@ +// Package parser provides a handlebars syntax analyser. It consumes the tokens provided by the lexer to build an AST. +package parser + +import ( + "fmt" + "regexp" + "runtime" + "strconv" + + "github.com/aymerick/raymond/ast" + "github.com/aymerick/raymond/lexer" +) + +// References: +// - https://github.com/wycats/handlebars.js/blob/master/src/handlebars.yy +// - https://github.com/golang/go/blob/master/src/text/template/parse/parse.go + +// Parser is a syntax analyzer. +type parser struct { + // Lexer + lex *lexer.Lexer + + // Root node + root ast.Node + + // Tokens parsed but not consumed yet + tokens []*lexer.Token + + // All tokens have been retreieved from lexer + lexOver bool +} + +var ( + rOpenComment = regexp.MustCompile(`^\{\{~?!-?-?`) + rCloseComment = regexp.MustCompile(`-?-?~?\}\}$`) + rOpenAmp = regexp.MustCompile(`^\{\{~?&`) +) + +// new instanciates a new parser +func new(input string) *parser { + return &parser{ + lex: lexer.Scan(input), + } +} + +// Parse analyzes given input and returns the AST root node. +func Parse(input string) (result *ast.Program, err error) { + // recover error + defer errRecover(&err) + + parser := new(input) + + // parse + result = parser.parseProgram() + + // check last token + token := parser.shift() + if token.Kind != lexer.TokenEOF { + // Parsing ended before EOF + errToken(token, "Syntax error") + } + + // fix whitespaces + processWhitespaces(result) + + // named returned values + return +} + +// errRecover recovers parsing panic +func errRecover(errp *error) { + e := recover() + if e != nil { + switch err := e.(type) { + case runtime.Error: + panic(e) + case error: + *errp = err + default: + panic(e) + } + } +} + +// errPanic panics +func errPanic(err error, line int) { + panic(fmt.Errorf("Parse error on line %d:\n%s", line, err)) +} + +// errNode panics with given node infos +func errNode(node ast.Node, msg string) { + errPanic(fmt.Errorf("%s\nNode: %s", msg, node), node.Location().Line) +} + +// errNode panics with given Token infos +func errToken(tok *lexer.Token, msg string) { + errPanic(fmt.Errorf("%s\nToken: %s", msg, tok), tok.Line) +} + +// errNode panics because of an unexpected Token kind +func errExpected(expect lexer.TokenKind, tok *lexer.Token) { + errPanic(fmt.Errorf("Expecting %s, got: '%s'", expect, tok), tok.Line) +} + +// program : statement* +func (p *parser) parseProgram() *ast.Program { + result := ast.NewProgram(p.lex.Pos(), p.lex.Line()) + + for p.isStatement() { + result.AddStatement(p.parseStatement()) + } + + return result +} + +// statement : mustache | block | rawBlock | partial | content | COMMENT +func (p *parser) parseStatement() ast.Node { + var result ast.Node + + tok := p.next() + + switch tok.Kind { + case lexer.TokenOpen, lexer.TokenOpenUnescaped: + // mustache + result = p.parseMustache() + case lexer.TokenOpenBlock: + // block + result = p.parseBlock() + case lexer.TokenOpenInverse: + // block + result = p.parseInverse() + case lexer.TokenOpenRawBlock: + // rawBlock + result = p.parseRawBlock() + case lexer.TokenOpenPartial: + // partial + result = p.parsePartial() + case lexer.TokenContent: + // content + result = p.parseContent() + case lexer.TokenComment: + // COMMENT + result = p.parseComment() + } + + return result +} + +// isStatement returns true if next token starts a statement +func (p *parser) isStatement() bool { + if !p.have(1) { + return false + } + + switch p.next().Kind { + case lexer.TokenOpen, lexer.TokenOpenUnescaped, lexer.TokenOpenBlock, + lexer.TokenOpenInverse, lexer.TokenOpenRawBlock, lexer.TokenOpenPartial, + lexer.TokenContent, lexer.TokenComment: + return true + } + + return false +} + +// content : CONTENT +func (p *parser) parseContent() *ast.ContentStatement { + // CONTENT + tok := p.shift() + if tok.Kind != lexer.TokenContent { + // @todo This check can be removed if content is optional in a raw block + errExpected(lexer.TokenContent, tok) + } + + return ast.NewContentStatement(tok.Pos, tok.Line, tok.Val) +} + +// COMMENT +func (p *parser) parseComment() *ast.CommentStatement { + // COMMENT + tok := p.shift() + + value := rOpenComment.ReplaceAllString(tok.Val, "") + value = rCloseComment.ReplaceAllString(value, "") + + result := ast.NewCommentStatement(tok.Pos, tok.Line, value) + result.Strip = ast.NewStripForStr(tok.Val) + + return result +} + +// param* hash? +func (p *parser) parseExpressionParamsHash() ([]ast.Node, *ast.Hash) { + var params []ast.Node + var hash *ast.Hash + + // params* + if p.isParam() { + params = p.parseParams() + } + + // hash? + if p.isHashSegment() { + hash = p.parseHash() + } + + return params, hash +} + +// helperName param* hash? +func (p *parser) parseExpression(tok *lexer.Token) *ast.Expression { + result := ast.NewExpression(tok.Pos, tok.Line) + + // helperName + result.Path = p.parseHelperName() + + // param* hash? + result.Params, result.Hash = p.parseExpressionParamsHash() + + return result +} + +// rawBlock : openRawBlock content endRawBlock +// openRawBlock : OPEN_RAW_BLOCK helperName param* hash? CLOSE_RAW_BLOCK +// endRawBlock : OPEN_END_RAW_BLOCK helperName CLOSE_RAW_BLOCK +func (p *parser) parseRawBlock() *ast.BlockStatement { + // OPEN_RAW_BLOCK + tok := p.shift() + + result := ast.NewBlockStatement(tok.Pos, tok.Line) + + // helperName param* hash? + result.Expression = p.parseExpression(tok) + + openName := result.Expression.Canonical() + + // CLOSE_RAW_BLOCK + tok = p.shift() + if tok.Kind != lexer.TokenCloseRawBlock { + errExpected(lexer.TokenCloseRawBlock, tok) + } + + // content + // @todo Is content mandatory in a raw block ? + content := p.parseContent() + + program := ast.NewProgram(tok.Pos, tok.Line) + program.AddStatement(content) + + result.Program = program + + // OPEN_END_RAW_BLOCK + tok = p.shift() + if tok.Kind != lexer.TokenOpenEndRawBlock { + // should never happen as it is caught by lexer + errExpected(lexer.TokenOpenEndRawBlock, tok) + } + + // helperName + endId := p.parseHelperName() + + closeName, ok := ast.HelperNameStr(endId) + if !ok { + errNode(endId, "Erroneous closing expression") + } + + if openName != closeName { + errNode(endId, fmt.Sprintf("%s doesn't match %s", openName, closeName)) + } + + // CLOSE_RAW_BLOCK + tok = p.shift() + if tok.Kind != lexer.TokenCloseRawBlock { + errExpected(lexer.TokenCloseRawBlock, tok) + } + + return result +} + +// block : openBlock program inverseChain? closeBlock +func (p *parser) parseBlock() *ast.BlockStatement { + // openBlock + result, blockParams := p.parseOpenBlock() + + // program + program := p.parseProgram() + program.BlockParams = blockParams + result.Program = program + + // inverseChain? + if p.isInverseChain() { + result.Inverse = p.parseInverseChain() + } + + // closeBlock + p.parseCloseBlock(result) + + setBlockInverseStrip(result) + + return result +} + +// setBlockInverseStrip is called when parsing `block` (openBlock | openInverse) and `inverseChain` +// +// TODO: This was totally cargo culted ! CHECK THAT ! +// +// cf. prepareBlock() in: +// https://github.com/wycats/handlebars.js/blob/master/lib/handlebars/compiler/helper.js +func setBlockInverseStrip(block *ast.BlockStatement) { + if block.Inverse == nil { + return + } + + if block.Inverse.Chained { + b, _ := block.Inverse.Body[0].(*ast.BlockStatement) + b.CloseStrip = block.CloseStrip + } + + block.InverseStrip = block.Inverse.Strip +} + +// block : openInverse program inverseAndProgram? closeBlock +func (p *parser) parseInverse() *ast.BlockStatement { + // openInverse + result, blockParams := p.parseOpenBlock() + + // program + program := p.parseProgram() + + program.BlockParams = blockParams + result.Inverse = program + + // inverseAndProgram? + if p.isInverse() { + result.Program = p.parseInverseAndProgram() + } + + // closeBlock + p.parseCloseBlock(result) + + setBlockInverseStrip(result) + + return result +} + +// helperName param* hash? blockParams? +func (p *parser) parseOpenBlockExpression(tok *lexer.Token) (*ast.BlockStatement, []string) { + var blockParams []string + + result := ast.NewBlockStatement(tok.Pos, tok.Line) + + // helperName param* hash? + result.Expression = p.parseExpression(tok) + + // blockParams? + if p.isBlockParams() { + blockParams = p.parseBlockParams() + } + + // named returned values + return result, blockParams +} + +// inverseChain : openInverseChain program inverseChain? +// | inverseAndProgram +func (p *parser) parseInverseChain() *ast.Program { + if p.isInverse() { + // inverseAndProgram + return p.parseInverseAndProgram() + } else { + result := ast.NewProgram(p.lex.Pos(), p.lex.Line()) + + // openInverseChain + block, blockParams := p.parseOpenBlock() + + // program + program := p.parseProgram() + + program.BlockParams = blockParams + block.Program = program + + // inverseChain? + if p.isInverseChain() { + block.Inverse = p.parseInverseChain() + } + + setBlockInverseStrip(block) + + result.Chained = true + result.AddStatement(block) + + return result + } +} + +// Returns true if current token starts an inverse chain +func (p *parser) isInverseChain() bool { + return p.isOpenInverseChain() || p.isInverse() +} + +// inverseAndProgram : INVERSE program +func (p *parser) parseInverseAndProgram() *ast.Program { + // INVERSE + tok := p.shift() + + // program + result := p.parseProgram() + result.Strip = ast.NewStripForStr(tok.Val) + + return result +} + +// openBlock : OPEN_BLOCK helperName param* hash? blockParams? CLOSE +// openInverse : OPEN_INVERSE helperName param* hash? blockParams? CLOSE +// openInverseChain: OPEN_INVERSE_CHAIN helperName param* hash? blockParams? CLOSE +func (p *parser) parseOpenBlock() (*ast.BlockStatement, []string) { + // OPEN_BLOCK | OPEN_INVERSE | OPEN_INVERSE_CHAIN + tok := p.shift() + + // helperName param* hash? blockParams? + result, blockParams := p.parseOpenBlockExpression(tok) + + // CLOSE + tokClose := p.shift() + if tokClose.Kind != lexer.TokenClose { + errExpected(lexer.TokenClose, tokClose) + } + + result.OpenStrip = ast.NewStrip(tok.Val, tokClose.Val) + + // named returned values + return result, blockParams +} + +// closeBlock : OPEN_ENDBLOCK helperName CLOSE +func (p *parser) parseCloseBlock(block *ast.BlockStatement) { + // OPEN_ENDBLOCK + tok := p.shift() + if tok.Kind != lexer.TokenOpenEndBlock { + errExpected(lexer.TokenOpenEndBlock, tok) + } + + // helperName + endId := p.parseHelperName() + + closeName, ok := ast.HelperNameStr(endId) + if !ok { + errNode(endId, "Erroneous closing expression") + } + + openName := block.Expression.Canonical() + if openName != closeName { + errNode(endId, fmt.Sprintf("%s doesn't match %s", openName, closeName)) + } + + // CLOSE + tokClose := p.shift() + if tokClose.Kind != lexer.TokenClose { + errExpected(lexer.TokenClose, tokClose) + } + + block.CloseStrip = ast.NewStrip(tok.Val, tokClose.Val) +} + +// mustache : OPEN helperName param* hash? CLOSE +// | OPEN_UNESCAPED helperName param* hash? CLOSE_UNESCAPED +func (p *parser) parseMustache() *ast.MustacheStatement { + // OPEN | OPEN_UNESCAPED + tok := p.shift() + + closeToken := lexer.TokenClose + if tok.Kind == lexer.TokenOpenUnescaped { + closeToken = lexer.TokenCloseUnescaped + } + + unescaped := false + if (tok.Kind == lexer.TokenOpenUnescaped) || (rOpenAmp.MatchString(tok.Val)) { + unescaped = true + } + + result := ast.NewMustacheStatement(tok.Pos, tok.Line, unescaped) + + // helperName param* hash? + result.Expression = p.parseExpression(tok) + + // CLOSE | CLOSE_UNESCAPED + tokClose := p.shift() + if tokClose.Kind != closeToken { + errExpected(closeToken, tokClose) + } + + result.Strip = ast.NewStrip(tok.Val, tokClose.Val) + + return result +} + +// partial : OPEN_PARTIAL partialName param* hash? CLOSE +func (p *parser) parsePartial() *ast.PartialStatement { + // OPEN_PARTIAL + tok := p.shift() + + result := ast.NewPartialStatement(tok.Pos, tok.Line) + + // partialName + result.Name = p.parsePartialName() + + // param* hash? + result.Params, result.Hash = p.parseExpressionParamsHash() + + // CLOSE + tokClose := p.shift() + if tokClose.Kind != lexer.TokenClose { + errExpected(lexer.TokenClose, tokClose) + } + + result.Strip = ast.NewStrip(tok.Val, tokClose.Val) + + return result +} + +// helperName | sexpr +func (p *parser) parseHelperNameOrSexpr() ast.Node { + if p.isSexpr() { + // sexpr + return p.parseSexpr() + } else { + // helperName + return p.parseHelperName() + } +} + +// param : helperName | sexpr +func (p *parser) parseParam() ast.Node { + return p.parseHelperNameOrSexpr() +} + +// Returns true if next tokens represent a `param` +func (p *parser) isParam() bool { + return (p.isSexpr() || p.isHelperName()) && !p.isHashSegment() +} + +// param* +func (p *parser) parseParams() []ast.Node { + var result []ast.Node + + for p.isParam() { + result = append(result, p.parseParam()) + } + + return result +} + +// sexpr : OPEN_SEXPR helperName param* hash? CLOSE_SEXPR +func (p *parser) parseSexpr() *ast.SubExpression { + // OPEN_SEXPR + tok := p.shift() + + result := ast.NewSubExpression(tok.Pos, tok.Line) + + // helperName param* hash? + result.Expression = p.parseExpression(tok) + + // CLOSE_SEXPR + tok = p.shift() + if tok.Kind != lexer.TokenCloseSexpr { + errExpected(lexer.TokenCloseSexpr, tok) + } + + return result +} + +// hash : hashSegment+ +func (p *parser) parseHash() *ast.Hash { + var pairs []*ast.HashPair + + for p.isHashSegment() { + pairs = append(pairs, p.parseHashSegment()) + } + + firstLoc := pairs[0].Location() + + result := ast.NewHash(firstLoc.Pos, firstLoc.Line) + result.Pairs = pairs + + return result +} + +// returns true if next tokens represents a `hashSegment` +func (p *parser) isHashSegment() bool { + return p.have(2) && (p.next().Kind == lexer.TokenID) && (p.nextAt(1).Kind == lexer.TokenEquals) +} + +// hashSegment : ID EQUALS param +func (p *parser) parseHashSegment() *ast.HashPair { + // ID + tok := p.shift() + + // EQUALS + p.shift() + + // param + param := p.parseParam() + + result := ast.NewHashPair(tok.Pos, tok.Line) + result.Key = tok.Val + result.Val = param + + return result +} + +// blockParams : OPEN_BLOCK_PARAMS ID+ CLOSE_BLOCK_PARAMS +func (p *parser) parseBlockParams() []string { + var result []string + + // OPEN_BLOCK_PARAMS + tok := p.shift() + + // ID+ + for p.isID() { + result = append(result, p.shift().Val) + } + + if len(result) == 0 { + errExpected(lexer.TokenID, p.next()) + } + + // CLOSE_BLOCK_PARAMS + tok = p.shift() + if tok.Kind != lexer.TokenCloseBlockParams { + errExpected(lexer.TokenCloseBlockParams, tok) + } + + return result +} + +// helperName : path | dataName | STRING | NUMBER | BOOLEAN | UNDEFINED | NULL +func (p *parser) parseHelperName() ast.Node { + var result ast.Node + + tok := p.next() + + switch tok.Kind { + case lexer.TokenBoolean: + // BOOLEAN + p.shift() + result = ast.NewBooleanLiteral(tok.Pos, tok.Line, (tok.Val == "true"), tok.Val) + case lexer.TokenNumber: + // NUMBER + p.shift() + + val, isInt := parseNumber(tok) + result = ast.NewNumberLiteral(tok.Pos, tok.Line, val, isInt, tok.Val) + case lexer.TokenString: + // STRING + p.shift() + result = ast.NewStringLiteral(tok.Pos, tok.Line, tok.Val) + case lexer.TokenData: + // dataName + result = p.parseDataName() + default: + // path + result = p.parsePath(false) + } + + return result +} + +// parseNumber parses a number +func parseNumber(tok *lexer.Token) (result float64, isInt bool) { + var valInt int + var err error + + valInt, err = strconv.Atoi(tok.Val) + if err == nil { + isInt = true + + result = float64(valInt) + } else { + isInt = false + + result, err = strconv.ParseFloat(tok.Val, 64) + if err != nil { + errToken(tok, fmt.Sprintf("Failed to parse number: %s", tok.Val)) + } + } + + // named returned values + return +} + +// Returns true if next tokens represent a `helperName` +func (p *parser) isHelperName() bool { + switch p.next().Kind { + case lexer.TokenBoolean, lexer.TokenNumber, lexer.TokenString, lexer.TokenData, lexer.TokenID: + return true + } + + return false +} + +// partialName : helperName | sexpr +func (p *parser) parsePartialName() ast.Node { + return p.parseHelperNameOrSexpr() +} + +// dataName : DATA pathSegments +func (p *parser) parseDataName() *ast.PathExpression { + // DATA + p.shift() + + // pathSegments + return p.parsePath(true) +} + +// path : pathSegments +// pathSegments : pathSegments SEP ID +// | ID +func (p *parser) parsePath(data bool) *ast.PathExpression { + var tok *lexer.Token + + // ID + tok = p.shift() + if tok.Kind != lexer.TokenID { + errExpected(lexer.TokenID, tok) + } + + result := ast.NewPathExpression(tok.Pos, tok.Line, data) + result.Part(tok.Val) + + for p.isPathSep() { + // SEP + tok = p.shift() + result.Sep(tok.Val) + + // ID + tok = p.shift() + if tok.Kind != lexer.TokenID { + errExpected(lexer.TokenID, tok) + } + + result.Part(tok.Val) + + if len(result.Parts) > 0 { + switch tok.Val { + case "..", ".", "this": + errToken(tok, "Invalid path: "+result.Original) + } + } + } + + return result +} + +// Ensures there is token to parse at given index +func (p *parser) ensure(index int) { + if p.lexOver { + // nothing more to grab + return + } + + nb := index + 1 + + for len(p.tokens) < nb { + // fetch next token + tok := p.lex.NextToken() + + // queue it + p.tokens = append(p.tokens, &tok) + + if (tok.Kind == lexer.TokenEOF) || (tok.Kind == lexer.TokenError) { + p.lexOver = true + break + } + } +} + +// have returns true is there are a list given number of tokens to consume left +func (p *parser) have(nb int) bool { + p.ensure(nb - 1) + + return len(p.tokens) >= nb +} + +// nextAt returns next token at given index, without consuming it +func (p *parser) nextAt(index int) *lexer.Token { + p.ensure(index) + + return p.tokens[index] +} + +// next returns next token without consuming it +func (p *parser) next() *lexer.Token { + return p.nextAt(0) +} + +// shift returns next token and remove it from the tokens buffer +// +// Panics if next token is `TokenError` +func (p *parser) shift() *lexer.Token { + var result *lexer.Token + + p.ensure(0) + + result, p.tokens = p.tokens[0], p.tokens[1:] + + // check error token + if result.Kind == lexer.TokenError { + errToken(result, "Lexer error") + } + + return result +} + +// isToken returns true if next token is of given type +func (p *parser) isToken(kind lexer.TokenKind) bool { + return p.have(1) && p.next().Kind == kind +} + +// isSexpr returns true if next token starts a sexpr +func (p *parser) isSexpr() bool { + return p.isToken(lexer.TokenOpenSexpr) +} + +// isPathSep returns true if next token is a path separator +func (p *parser) isPathSep() bool { + return p.isToken(lexer.TokenSep) +} + +// isID returns true if next token is an ID +func (p *parser) isID() bool { + return p.isToken(lexer.TokenID) +} + +// isBlockParams returns true if next token starts a block params +func (p *parser) isBlockParams() bool { + return p.isToken(lexer.TokenOpenBlockParams) +} + +// isInverse returns true if next token starts an INVERSE sequence +func (p *parser) isInverse() bool { + return p.isToken(lexer.TokenInverse) +} + +// isOpenInverseChain returns true if next token is OPEN_INVERSE_CHAIN +func (p *parser) isOpenInverseChain() bool { + return p.isToken(lexer.TokenOpenInverseChain) +} diff --git a/vendor/github.com/aymerick/raymond/parser/parser_test.go b/vendor/github.com/aymerick/raymond/parser/parser_test.go new file mode 100644 index 0000000..0baafbc --- /dev/null +++ b/vendor/github.com/aymerick/raymond/parser/parser_test.go @@ -0,0 +1,200 @@ +package parser + +import ( + "fmt" + "regexp" + "testing" + + "github.com/aymerick/raymond/ast" + "github.com/aymerick/raymond/lexer" +) + +type parserTest struct { + name string + input string + output string +} + +var parserTests = []parserTest{ + // + // Next tests come from: + // https://github.com/wycats/handlebars.js/blob/master/spec/parser.js + // + {"parses simple mustaches (1)", `{{123}}`, "{{ NUMBER{123} [] }}\n"}, + {"parses simple mustaches (2)", `{{"foo"}}`, "{{ \"foo\" [] }}\n"}, + {"parses simple mustaches (3)", `{{false}}`, "{{ BOOLEAN{false} [] }}\n"}, + {"parses simple mustaches (4)", `{{true}}`, "{{ BOOLEAN{true} [] }}\n"}, + {"parses simple mustaches (5)", `{{foo}}`, "{{ PATH:foo [] }}\n"}, + {"parses simple mustaches (6)", `{{foo?}}`, "{{ PATH:foo? [] }}\n"}, + {"parses simple mustaches (7)", `{{foo_}}`, "{{ PATH:foo_ [] }}\n"}, + {"parses simple mustaches (8)", `{{foo-}}`, "{{ PATH:foo- [] }}\n"}, + {"parses simple mustaches (9)", `{{foo:}}`, "{{ PATH:foo: [] }}\n"}, + + {"parses simple mustaches with data", `{{@foo}}`, "{{ @PATH:foo [] }}\n"}, + {"parses simple mustaches with data paths", `{{@../foo}}`, "{{ @PATH:foo [] }}\n"}, + {"parses mustaches with paths", `{{foo/bar}}`, "{{ PATH:foo/bar [] }}\n"}, + {"parses mustaches with this/foo", `{{this/foo}}`, "{{ PATH:foo [] }}\n"}, + {"parses mustaches with - in a path", `{{foo-bar}}`, "{{ PATH:foo-bar [] }}\n"}, + {"parses mustaches with parameters", `{{foo bar}}`, "{{ PATH:foo [PATH:bar] }}\n"}, + {"parses mustaches with string parameters", `{{foo bar "baz" }}`, "{{ PATH:foo [PATH:bar, \"baz\"] }}\n"}, + {"parses mustaches with NUMBER parameters", `{{foo 1}}`, "{{ PATH:foo [NUMBER{1}] }}\n"}, + {"parses mustaches with BOOLEAN parameters (1)", `{{foo true}}`, "{{ PATH:foo [BOOLEAN{true}] }}\n"}, + {"parses mustaches with BOOLEAN parameters (2)", `{{foo false}}`, "{{ PATH:foo [BOOLEAN{false}] }}\n"}, + {"parses mustaches with DATA parameters", `{{foo @bar}}`, "{{ PATH:foo [@PATH:bar] }}\n"}, + + {"parses mustaches with hash arguments (01)", `{{foo bar=baz}}`, "{{ PATH:foo [] HASH{bar=PATH:baz} }}\n"}, + {"parses mustaches with hash arguments (02)", `{{foo bar=1}}`, "{{ PATH:foo [] HASH{bar=NUMBER{1}} }}\n"}, + {"parses mustaches with hash arguments (03)", `{{foo bar=true}}`, "{{ PATH:foo [] HASH{bar=BOOLEAN{true}} }}\n"}, + {"parses mustaches with hash arguments (04)", `{{foo bar=false}}`, "{{ PATH:foo [] HASH{bar=BOOLEAN{false}} }}\n"}, + {"parses mustaches with hash arguments (05)", `{{foo bar=@baz}}`, "{{ PATH:foo [] HASH{bar=@PATH:baz} }}\n"}, + {"parses mustaches with hash arguments (06)", `{{foo bar=baz bat=bam}}`, "{{ PATH:foo [] HASH{bar=PATH:baz, bat=PATH:bam} }}\n"}, + {"parses mustaches with hash arguments (07)", `{{foo bar=baz bat="bam"}}`, "{{ PATH:foo [] HASH{bar=PATH:baz, bat=\"bam\"} }}\n"}, + {"parses mustaches with hash arguments (08)", `{{foo bat='bam'}}`, "{{ PATH:foo [] HASH{bat=\"bam\"} }}\n"}, + {"parses mustaches with hash arguments (09)", `{{foo omg bar=baz bat="bam"}}`, "{{ PATH:foo [PATH:omg] HASH{bar=PATH:baz, bat=\"bam\"} }}\n"}, + {"parses mustaches with hash arguments (10)", `{{foo omg bar=baz bat="bam" baz=1}}`, "{{ PATH:foo [PATH:omg] HASH{bar=PATH:baz, bat=\"bam\", baz=NUMBER{1}} }}\n"}, + {"parses mustaches with hash arguments (11)", `{{foo omg bar=baz bat="bam" baz=true}}`, "{{ PATH:foo [PATH:omg] HASH{bar=PATH:baz, bat=\"bam\", baz=BOOLEAN{true}} }}\n"}, + {"parses mustaches with hash arguments (12)", `{{foo omg bar=baz bat="bam" baz=false}}`, "{{ PATH:foo [PATH:omg] HASH{bar=PATH:baz, bat=\"bam\", baz=BOOLEAN{false}} }}\n"}, + + {"parses contents followed by a mustache", `foo bar {{baz}}`, "CONTENT[ 'foo bar ' ]\n{{ PATH:baz [] }}\n"}, + + {"parses a partial (1)", `{{> foo }}`, "{{> PARTIAL:foo }}\n"}, + {"parses a partial (2)", `{{> "foo" }}`, "{{> PARTIAL:foo }}\n"}, + {"parses a partial (3)", `{{> 1 }}`, "{{> PARTIAL:1 }}\n"}, + {"parses a partial with context", `{{> foo bar}}`, "{{> PARTIAL:foo PATH:bar }}\n"}, + {"parses a partial with hash", `{{> foo bar=bat}}`, "{{> PARTIAL:foo HASH{bar=PATH:bat} }}\n"}, + {"parses a partial with context and hash", `{{> foo bar bat=baz}}`, "{{> PARTIAL:foo PATH:bar HASH{bat=PATH:baz} }}\n"}, + {"parses a partial with a complex name", `{{> shared/partial?.bar}}`, "{{> PARTIAL:shared/partial?.bar }}\n"}, + + {"parses a comment", `{{! this is a comment }}`, "{{! ' this is a comment ' }}\n"}, + {"parses a multi-line comment", "{{!\nthis is a multi-line comment\n}}", "{{! '\nthis is a multi-line comment\n' }}\n"}, + + {"parses an inverse section", `{{#foo}} bar {{^}} baz {{/foo}}`, "BLOCK:\n PATH:foo []\n PROGRAM:\n CONTENT[ ' bar ' ]\n {{^}}\n CONTENT[ ' baz ' ]\n"}, + {"parses an inverse (else-style) section", `{{#foo}} bar {{else}} baz {{/foo}}`, "BLOCK:\n PATH:foo []\n PROGRAM:\n CONTENT[ ' bar ' ]\n {{^}}\n CONTENT[ ' baz ' ]\n"}, + {"parses multiple inverse sections", `{{#foo}} bar {{else if bar}}{{else}} baz {{/foo}}`, "BLOCK:\n PATH:foo []\n PROGRAM:\n CONTENT[ ' bar ' ]\n {{^}}\n BLOCK:\n PATH:if [PATH:bar]\n PROGRAM:\n {{^}}\n CONTENT[ ' baz ' ]\n"}, + {"parses empty blocks", `{{#foo}}{{/foo}}`, "BLOCK:\n PATH:foo []\n PROGRAM:\n"}, + {"parses empty blocks with empty inverse section", `{{#foo}}{{^}}{{/foo}}`, "BLOCK:\n PATH:foo []\n PROGRAM:\n {{^}}\n"}, + {"parses empty blocks with empty inverse (else-style) section", `{{#foo}}{{else}}{{/foo}}`, "BLOCK:\n PATH:foo []\n PROGRAM:\n {{^}}\n"}, + {"parses non-empty blocks with empty inverse section", `{{#foo}} bar {{^}}{{/foo}}`, "BLOCK:\n PATH:foo []\n PROGRAM:\n CONTENT[ ' bar ' ]\n {{^}}\n"}, + {"parses non-empty blocks with empty inverse (else-style) section", `{{#foo}} bar {{else}}{{/foo}}`, "BLOCK:\n PATH:foo []\n PROGRAM:\n CONTENT[ ' bar ' ]\n {{^}}\n"}, + {"parses empty blocks with non-empty inverse section", `{{#foo}}{{^}} bar {{/foo}}`, "BLOCK:\n PATH:foo []\n PROGRAM:\n {{^}}\n CONTENT[ ' bar ' ]\n"}, + {"parses empty blocks with non-empty inverse (else-style) section", `{{#foo}}{{else}} bar {{/foo}}`, "BLOCK:\n PATH:foo []\n PROGRAM:\n {{^}}\n CONTENT[ ' bar ' ]\n"}, + {"parses a standalone inverse section", `{{^foo}}bar{{/foo}}`, "BLOCK:\n PATH:foo []\n {{^}}\n CONTENT[ 'bar' ]\n"}, + {"parses block with block params", `{{#foo as |bar baz|}}content{{/foo}}`, "BLOCK:\n PATH:foo []\n PROGRAM:\n BLOCK PARAMS: [ bar baz ]\n CONTENT[ 'content' ]\n"}, + {"parses inverse block with block params", `{{^foo as |bar baz|}}content{{/foo}}`, "BLOCK:\n PATH:foo []\n {{^}}\n BLOCK PARAMS: [ bar baz ]\n CONTENT[ 'content' ]\n"}, + {"parses chained inverse block with block params", `{{#foo}}{{else foo as |bar baz|}}content{{/foo}}`, "BLOCK:\n PATH:foo []\n PROGRAM:\n {{^}}\n BLOCK:\n PATH:foo []\n PROGRAM:\n BLOCK PARAMS: [ bar baz ]\n CONTENT[ 'content' ]\n"}, +} + +func TestParser(t *testing.T) { + t.Parallel() + + for _, test := range parserTests { + output := "" + + node, err := Parse(test.input) + if err == nil { + output = ast.Print(node) + } + + if (err != nil) || (test.output != output) { + t.Errorf("Test '%s' failed\ninput:\n\t'%s'\nexpected\n\t%q\ngot\n\t%q\nerror:\n\t%s", test.name, test.input, test.output, output, err) + } + } +} + +var parserErrorTests = []parserTest{ + {"lexer error", `{{! unclosed comment`, "Lexer error"}, + {"syntax error", `foo{{^}}`, "Syntax error"}, + + {"open raw block must be closed", `{{{{raw foo}} bar {{{{/raw}}}}`, "Expecting CloseRawBlock"}, + {"end raw block must be closed", `{{{{raw foo}}}} bar {{{{/raw}}`, "Expecting CloseRawBlock"}, + + {"raw block names must match (1)", `{{{{1}}}}{{foo}}{{{{/raw}}}}`, "1 doesn't match raw"}, + {"raw block names must match (2)", `{{{{raw}}}}{{foo}}{{{{/1}}}}`, "raw doesn't match 1"}, + {"raw block names must match (3)", `{{{{goodbyes}}}}test{{{{/hellos}}}}`, "goodbyes doesn't match hellos"}, + + {"open block must be closed", `{{#foo bar}}}{{/foo}}`, "Expecting Close"}, + {"end block must be closed", `{{#foo bar}}{{/foo}}}`, "Expecting Close"}, + {"an open block must have a end block", `{{#foo}}test`, "Expecting OpenEndBlock"}, + + {"block names must match (1)", `{{#1 bar}}{{/foo}}`, "1 doesn't match foo"}, + {"block names must match (2)", `{{#foo bar}}{{/1}}`, "foo doesn't match 1"}, + {"block names must match (3)", `{{#foo}}test{{/bar}}`, "foo doesn't match bar"}, + + {"an mustache must terminate with a close mustache", `{{foo}}}`, "Expecting Close"}, + {"an unescaped mustache must terminate with a close unescaped mustache", `{{{foo}}`, "Expecting CloseUnescaped"}, + + {"an partial must terminate with a close mustache", `{{> foo}}}`, "Expecting Close"}, + {"a subexpression must terminate with a close subexpression", `{{foo (false}}`, "Expecting CloseSexpr"}, + + {"raises on missing hash value (1)", `{{foo bar=}}`, "Parse error on line 1"}, + {"raises on missing hash value (2)", `{{foo bar=baz bim=}}`, "Parse error on line 1"}, + + {"block param must have at least one param", `{{#foo as ||}}content{{/foo}}`, "Expecting ID"}, + {"open block params must be closed", `{{#foo as |}}content{{/foo}}`, "Expecting ID"}, + + {"a path must start with an ID", `{{#/}}content{{/foo}}`, "Expecting ID"}, + {"a path must end with an ID", `{{foo/bar/}}`, "Expecting ID"}, + + // + // Next tests come from: + // https://github.com/wycats/handlebars.js/blob/master/spec/parser.js + // + {"throws on old inverse section", `{{else foo}}bar{{/foo}}`, ""}, + + {"raises if there's a parser error (1)", `foo{{^}}bar`, "Parse error on line 1"}, + {"raises if there's a parser error (2)", `{{foo}`, "Parse error on line 1"}, + {"raises if there's a parser error (3)", `{{foo &}}`, "Parse error on line 1"}, + {"raises if there's a parser error (4)", `{{#goodbyes}}{{/hellos}}`, "Parse error on line 1"}, + {"raises if there's a parser error (5)", `{{#goodbyes}}{{/hellos}}`, "goodbyes doesn't match hellos"}, + + {"should handle invalid paths (1)", `{{foo/../bar}}`, `Invalid path: foo/..`}, + {"should handle invalid paths (2)", `{{foo/./bar}}`, `Invalid path: foo/.`}, + {"should handle invalid paths (3)", `{{foo/this/bar}}`, `Invalid path: foo/this`}, + + {"knows how to report the correct line number in errors (1)", "hello\nmy\n{{foo}", "Parse error on line 3"}, + {"knows how to report the correct line number in errors (2)", "hello\n\nmy\n\n{{foo}", "Parse error on line 5"}, + + {"knows how to report the correct line number in errors when the first character is a newline", "\n\nhello\n\nmy\n\n{{foo}", "Parse error on line 7"}, +} + +func TestParserErrors(t *testing.T) { + t.Parallel() + + for _, test := range parserErrorTests { + node, err := Parse(test.input) + if err == nil { + output := ast.Print(node) + tokens := lexer.Collect(test.input) + + t.Errorf("Test '%s' failed - Error expected\ninput:\n\t'%s'\ngot\n\t%q\ntokens:\n\t%q", test.name, test.input, output, tokens) + } else if test.output != "" { + matched, errMatch := regexp.MatchString(regexp.QuoteMeta(test.output), fmt.Sprint(err)) + if errMatch != nil { + panic("Failed to match regexp") + } + + if !matched { + t.Errorf("Test '%s' failed - Incorrect error returned\ninput:\n\t'%s'\nexpected\n\t%q\ngot\n\t%q", test.name, test.input, test.output, err) + } + } + } +} + +// package example +func Example() { + source := "You know {{nothing}} John Snow" + + // parse template + program, err := Parse(source) + if err != nil { + panic(err) + } + + // print AST + output := ast.Print(program) + + fmt.Print(output) + // CONTENT[ 'You know ' ] + // {{ PATH:nothing [] }} + // CONTENT[ ' John Snow' ] +} diff --git a/vendor/github.com/aymerick/raymond/parser/whitespace.go b/vendor/github.com/aymerick/raymond/parser/whitespace.go new file mode 100644 index 0000000..8f8c2c4 --- /dev/null +++ b/vendor/github.com/aymerick/raymond/parser/whitespace.go @@ -0,0 +1,360 @@ +package parser + +import ( + "regexp" + + "github.com/aymerick/raymond/ast" +) + +// whitespaceVisitor walks through the AST to perform whitespace control +// +// The logic was shamelessly borrowed from: +// https://github.com/wycats/handlebars.js/blob/master/lib/handlebars/compiler/whitespace-control.js +type whitespaceVisitor struct { + isRootSeen bool +} + +var ( + rTrimLeft = regexp.MustCompile(`^[ \t]*\r?\n?`) + rTrimLeftMultiple = regexp.MustCompile(`^\s+`) + + rTrimRight = regexp.MustCompile(`[ \t]+$`) + rTrimRightMultiple = regexp.MustCompile(`\s+$`) + + rPrevWhitespace = regexp.MustCompile(`\r?\n\s*?$`) + rPrevWhitespaceStart = regexp.MustCompile(`(^|\r?\n)\s*?$`) + + rNextWhitespace = regexp.MustCompile(`^\s*?\r?\n`) + rNextWhitespaceEnd = regexp.MustCompile(`^\s*?(\r?\n|$)`) + + rPartialIndent = regexp.MustCompile(`([ \t]+$)`) +) + +// newWhitespaceVisitor instanciates a new whitespaceVisitor +func newWhitespaceVisitor() *whitespaceVisitor { + return &whitespaceVisitor{} +} + +// processWhitespaces performs whitespace control on given AST +// +// WARNING: It must be called only once on AST. +func processWhitespaces(node ast.Node) { + node.Accept(newWhitespaceVisitor()) +} + +func omitRightFirst(body []ast.Node, multiple bool) { + omitRight(body, -1, multiple) +} + +func omitRight(body []ast.Node, i int, multiple bool) { + if i+1 >= len(body) { + return + } + + current := body[i+1] + + node, ok := current.(*ast.ContentStatement) + if !ok { + return + } + + if !multiple && node.RightStripped { + return + } + + original := node.Value + + r := rTrimLeft + if multiple { + r = rTrimLeftMultiple + } + + node.Value = r.ReplaceAllString(node.Value, "") + + node.RightStripped = (original != node.Value) +} + +func omitLeftLast(body []ast.Node, multiple bool) { + omitLeft(body, len(body), multiple) +} + +func omitLeft(body []ast.Node, i int, multiple bool) bool { + if i-1 < 0 { + return false + } + + current := body[i-1] + + node, ok := current.(*ast.ContentStatement) + if !ok { + return false + } + + if !multiple && node.LeftStripped { + return false + } + + original := node.Value + + r := rTrimRight + if multiple { + r = rTrimRightMultiple + } + + node.Value = r.ReplaceAllString(node.Value, "") + + node.LeftStripped = (original != node.Value) + + return node.LeftStripped +} + +func isPrevWhitespace(body []ast.Node) bool { + return isPrevWhitespaceProgram(body, len(body), false) +} + +func isPrevWhitespaceProgram(body []ast.Node, i int, isRoot bool) bool { + if i < 1 { + return isRoot + } + + prev := body[i-1] + + if node, ok := prev.(*ast.ContentStatement); ok { + if (node.Value == "") && node.RightStripped { + // already stripped, so it may be an empty string not catched by regexp + return true + } + + r := rPrevWhitespaceStart + if (i > 1) || !isRoot { + r = rPrevWhitespace + } + + return r.MatchString(node.Value) + } + + return false +} + +func isNextWhitespace(body []ast.Node) bool { + return isNextWhitespaceProgram(body, -1, false) +} + +func isNextWhitespaceProgram(body []ast.Node, i int, isRoot bool) bool { + if i+1 >= len(body) { + return isRoot + } + + next := body[i+1] + + if node, ok := next.(*ast.ContentStatement); ok { + if (node.Value == "") && node.LeftStripped { + // already stripped, so it may be an empty string not catched by regexp + return true + } + + r := rNextWhitespaceEnd + if (i+2 > len(body)) || !isRoot { + r = rNextWhitespace + } + + return r.MatchString(node.Value) + } + + return false +} + +// +// Visitor interface +// + +func (v *whitespaceVisitor) VisitProgram(program *ast.Program) interface{} { + isRoot := !v.isRootSeen + v.isRootSeen = true + + body := program.Body + for i, current := range body { + strip, _ := current.Accept(v).(*ast.Strip) + if strip == nil { + continue + } + + _isPrevWhitespace := isPrevWhitespaceProgram(body, i, isRoot) + _isNextWhitespace := isNextWhitespaceProgram(body, i, isRoot) + + openStandalone := strip.OpenStandalone && _isPrevWhitespace + closeStandalone := strip.CloseStandalone && _isNextWhitespace + inlineStandalone := strip.InlineStandalone && _isPrevWhitespace && _isNextWhitespace + + if strip.Close { + omitRight(body, i, true) + } + + if strip.Open && (i > 0) { + omitLeft(body, i, true) + } + + if inlineStandalone { + omitRight(body, i, false) + + if omitLeft(body, i, false) { + // If we are on a standalone node, save the indent info for partials + if partial, ok := current.(*ast.PartialStatement); ok { + // Pull out the whitespace from the final line + if i > 0 { + if prevContent, ok := body[i-1].(*ast.ContentStatement); ok { + partial.Indent = rPartialIndent.FindString(prevContent.Original) + } + } + } + } + } + + if b, ok := current.(*ast.BlockStatement); ok { + if openStandalone { + prog := b.Program + if prog == nil { + prog = b.Inverse + } + + omitRightFirst(prog.Body, false) + + // Strip out the previous content node if it's whitespace only + omitLeft(body, i, false) + } + + if closeStandalone { + prog := b.Inverse + if prog == nil { + prog = b.Program + } + + // Always strip the next node + omitRight(body, i, false) + + omitLeftLast(prog.Body, false) + } + + } + } + + return nil +} + +func (v *whitespaceVisitor) VisitBlock(block *ast.BlockStatement) interface{} { + if block.Program != nil { + block.Program.Accept(v) + } + + if block.Inverse != nil { + block.Inverse.Accept(v) + } + + program := block.Program + inverse := block.Inverse + + if program == nil { + program = inverse + inverse = nil + } + + firstInverse := inverse + lastInverse := inverse + + if (inverse != nil) && inverse.Chained { + b, _ := inverse.Body[0].(*ast.BlockStatement) + firstInverse = b.Program + + for lastInverse.Chained { + b, _ := lastInverse.Body[len(lastInverse.Body)-1].(*ast.BlockStatement) + lastInverse = b.Program + } + } + + closeProg := firstInverse + if closeProg == nil { + closeProg = program + } + + strip := &ast.Strip{ + Open: (block.OpenStrip != nil) && block.OpenStrip.Open, + Close: (block.CloseStrip != nil) && block.CloseStrip.Close, + + OpenStandalone: isNextWhitespace(program.Body), + CloseStandalone: isPrevWhitespace(closeProg.Body), + } + + if (block.OpenStrip != nil) && block.OpenStrip.Close { + omitRightFirst(program.Body, true) + } + + if inverse != nil { + if block.InverseStrip != nil { + inverseStrip := block.InverseStrip + + if inverseStrip.Open { + omitLeftLast(program.Body, true) + } + + if inverseStrip.Close { + omitRightFirst(firstInverse.Body, true) + } + } + + if (block.CloseStrip != nil) && block.CloseStrip.Open { + omitLeftLast(lastInverse.Body, true) + } + + // Find standalone else statements + if isPrevWhitespace(program.Body) && isNextWhitespace(firstInverse.Body) { + omitLeftLast(program.Body, false) + + omitRightFirst(firstInverse.Body, false) + } + } else if (block.CloseStrip != nil) && block.CloseStrip.Open { + omitLeftLast(program.Body, true) + } + + return strip +} + +func (v *whitespaceVisitor) VisitMustache(mustache *ast.MustacheStatement) interface{} { + return mustache.Strip +} + +func _inlineStandalone(strip *ast.Strip) interface{} { + return &ast.Strip{ + Open: strip.Open, + Close: strip.Close, + InlineStandalone: true, + } +} + +func (v *whitespaceVisitor) VisitPartial(node *ast.PartialStatement) interface{} { + strip := node.Strip + if strip == nil { + strip = &ast.Strip{} + } + + return _inlineStandalone(strip) +} + +func (v *whitespaceVisitor) VisitComment(node *ast.CommentStatement) interface{} { + strip := node.Strip + if strip == nil { + strip = &ast.Strip{} + } + + return _inlineStandalone(strip) +} + +// NOOP +func (v *whitespaceVisitor) VisitContent(node *ast.ContentStatement) interface{} { return nil } +func (v *whitespaceVisitor) VisitExpression(node *ast.Expression) interface{} { return nil } +func (v *whitespaceVisitor) VisitSubExpression(node *ast.SubExpression) interface{} { return nil } +func (v *whitespaceVisitor) VisitPath(node *ast.PathExpression) interface{} { return nil } +func (v *whitespaceVisitor) VisitString(node *ast.StringLiteral) interface{} { return nil } +func (v *whitespaceVisitor) VisitBoolean(node *ast.BooleanLiteral) interface{} { return nil } +func (v *whitespaceVisitor) VisitNumber(node *ast.NumberLiteral) interface{} { return nil } +func (v *whitespaceVisitor) VisitHash(node *ast.Hash) interface{} { return nil } +func (v *whitespaceVisitor) VisitHashPair(node *ast.HashPair) interface{} { return nil } diff --git a/vendor/github.com/aymerick/raymond/partial.go b/vendor/github.com/aymerick/raymond/partial.go new file mode 100644 index 0000000..62d6bf1 --- /dev/null +++ b/vendor/github.com/aymerick/raymond/partial.go @@ -0,0 +1,85 @@ +package raymond + +import ( + "fmt" + "sync" +) + +// partial represents a partial template +type partial struct { + name string + source string + tpl *Template +} + +// partials stores all global partials +var partials map[string]*partial + +// protects global partials +var partialsMutex sync.RWMutex + +func init() { + partials = make(map[string]*partial) +} + +// newPartial instanciates a new partial +func newPartial(name string, source string, tpl *Template) *partial { + return &partial{ + name: name, + source: source, + tpl: tpl, + } +} + +// RegisterPartial registers a global partial. That partial will be available to all templates. +func RegisterPartial(name string, source string) { + partialsMutex.Lock() + defer partialsMutex.Unlock() + + if partials[name] != nil { + panic(fmt.Errorf("Partial already registered: %s", name)) + } + + partials[name] = newPartial(name, source, nil) +} + +// RegisterPartials registers several global partials. Those partials will be available to all templates. +func RegisterPartials(partials map[string]string) { + for name, p := range partials { + RegisterPartial(name, p) + } +} + +// RegisterPartial registers a global partial with given parsed template. That partial will be available to all templates. +func RegisterPartialTemplate(name string, tpl *Template) { + partialsMutex.Lock() + defer partialsMutex.Unlock() + + if partials[name] != nil { + panic(fmt.Errorf("Partial already registered: %s", name)) + } + + partials[name] = newPartial(name, "", tpl) +} + +// findPartial finds a registered global partial +func findPartial(name string) *partial { + partialsMutex.RLock() + defer partialsMutex.RUnlock() + + return partials[name] +} + +// template returns parsed partial template +func (p *partial) template() (*Template, error) { + if p.tpl == nil { + var err error + + p.tpl, err = Parse(p.source) + if err != nil { + return nil, err + } + } + + return p.tpl, nil +} diff --git a/vendor/github.com/aymerick/raymond/raymond.go b/vendor/github.com/aymerick/raymond/raymond.go new file mode 100644 index 0000000..c6df6b3 --- /dev/null +++ b/vendor/github.com/aymerick/raymond/raymond.go @@ -0,0 +1,28 @@ +// Package raymond provides handlebars evaluation +package raymond + +// Render parses a template and evaluates it with given context +// +// Note that this function call is not optimal as your template is parsed everytime you call it. You should use Parse() function instead. +func Render(source string, ctx interface{}) (string, error) { + // parse template + tpl, err := Parse(source) + if err != nil { + return "", err + } + + // renders template + str, err := tpl.Exec(ctx) + if err != nil { + return "", err + } + + return str, nil +} + +// MustRender parses a template and evaluates it with given context. It panics on error. +// +// Note that this function call is not optimal as your template is parsed everytime you call it. You should use Parse() function instead. +func MustRender(source string, ctx interface{}) string { + return MustParse(source).MustExec(ctx) +} diff --git a/vendor/github.com/aymerick/raymond/raymond.png b/vendor/github.com/aymerick/raymond/raymond.png new file mode 100644 index 0000000..6a7c942 Binary files /dev/null and b/vendor/github.com/aymerick/raymond/raymond.png differ diff --git a/vendor/github.com/aymerick/raymond/raymond_test.go b/vendor/github.com/aymerick/raymond/raymond_test.go new file mode 100644 index 0000000..bd34eeb --- /dev/null +++ b/vendor/github.com/aymerick/raymond/raymond_test.go @@ -0,0 +1,115 @@ +package raymond + +import "fmt" + +func Example() { + source := "

{{title}}

{{body.content}}

" + + ctx := map[string]interface{}{ + "title": "foo", + "body": map[string]string{"content": "bar"}, + } + + // parse template + tpl := MustParse(source) + + // evaluate template with context + output := tpl.MustExec(ctx) + + // alternatively, for one shots: + // output := MustRender(source, ctx) + + fmt.Print(output) + // Output:

foo

bar

+} + +func Example_struct() { + source := `
+

By {{fullName author}}

+
{{body}}
+ +

Comments

+ + {{#each comments}} +

By {{fullName author}}

+
{{body}}
+ {{/each}} +
` + + type Person struct { + FirstName string + LastName string + } + + type Comment struct { + Author Person + Body string + } + + type Post struct { + Author Person + Body string + Comments []Comment + } + + ctx := Post{ + Person{"Jean", "Valjean"}, + "Life is difficult", + []Comment{ + Comment{ + Person{"Marcel", "Beliveau"}, + "LOL!", + }, + }, + } + + RegisterHelper("fullName", func(person Person) string { + return person.FirstName + " " + person.LastName + }) + + output := MustRender(source, ctx) + + fmt.Print(output) + // Output:
+ //

By Jean Valjean

+ //
Life is difficult
+ // + //

Comments

+ // + //

By Marcel Beliveau

+ //
LOL!
+ //
+} + +func ExampleRender() { + tpl := "

{{title}}

{{body.content}}

" + + ctx := map[string]interface{}{ + "title": "foo", + "body": map[string]string{"content": "bar"}, + } + + // render template with context + output, err := Render(tpl, ctx) + if err != nil { + panic(err) + } + + fmt.Print(output) + // Output:

foo

bar

+} + +func ExampleMustRender() { + tpl := "

{{title}}

{{body.content}}

" + + ctx := map[string]interface{}{ + "title": "foo", + "body": map[string]string{"content": "bar"}, + } + + // render template with context + output := MustRender(tpl, ctx) + + fmt.Print(output) + // Output:

foo

bar

+} diff --git a/vendor/github.com/aymerick/raymond/string.go b/vendor/github.com/aymerick/raymond/string.go new file mode 100644 index 0000000..cd12bb2 --- /dev/null +++ b/vendor/github.com/aymerick/raymond/string.go @@ -0,0 +1,84 @@ +package raymond + +import ( + "fmt" + "reflect" + "strconv" +) + +// SafeString represents a string that must not be escaped. +// +// A SafeString can be returned by helpers to disable escaping. +type SafeString string + +// IsSafeString returns true if argument is a SafeString +func isSafeString(value interface{}) bool { + if _, ok := value.(SafeString); ok { + return true + } + return false +} + +// Str returns string representation of any basic type value. +func Str(value interface{}) string { + return strValue(reflect.ValueOf(value)) +} + +// strValue returns string representation of a reflect.Value +func strValue(value reflect.Value) string { + result := "" + + ival, ok := printableValue(value) + if !ok { + panic(fmt.Errorf("Can't print value: %q", value)) + } + + val := reflect.ValueOf(ival) + + switch val.Kind() { + case reflect.Array, reflect.Slice: + for i := 0; i < val.Len(); i++ { + result += strValue(val.Index(i)) + } + case reflect.Bool: + result = "false" + if val.Bool() { + result = "true" + } + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: + result = fmt.Sprintf("%d", ival) + case reflect.Float32, reflect.Float64: + result = strconv.FormatFloat(val.Float(), 'f', -1, 64) + case reflect.Invalid: + result = "" + default: + result = fmt.Sprintf("%s", ival) + } + + return result +} + +// printableValue returns the, possibly indirected, interface value inside v that +// is best for a call to formatted printer. +// +// NOTE: borrowed from https://github.com/golang/go/tree/master/src/text/template/exec.go +func printableValue(v reflect.Value) (interface{}, bool) { + if v.Kind() == reflect.Ptr { + v, _ = indirect(v) // fmt.Fprint handles nil. + } + if !v.IsValid() { + return "", true + } + + if !v.Type().Implements(errorType) && !v.Type().Implements(fmtStringerType) { + if v.CanAddr() && (reflect.PtrTo(v.Type()).Implements(errorType) || reflect.PtrTo(v.Type()).Implements(fmtStringerType)) { + v = v.Addr() + } else { + switch v.Kind() { + case reflect.Chan, reflect.Func: + return nil, false + } + } + } + return v.Interface(), true +} diff --git a/vendor/github.com/aymerick/raymond/string_test.go b/vendor/github.com/aymerick/raymond/string_test.go new file mode 100644 index 0000000..eaec630 --- /dev/null +++ b/vendor/github.com/aymerick/raymond/string_test.go @@ -0,0 +1,59 @@ +package raymond + +import ( + "fmt" + "testing" +) + +type strTest struct { + name string + input interface{} + output string +} + +var strTests = []strTest{ + {"String", "foo", "foo"}, + {"Boolean true", true, "true"}, + {"Boolean false", false, "false"}, + {"Integer", 25, "25"}, + {"Float", 25.75, "25.75"}, + {"Nil", nil, ""}, + {"[]string", []string{"foo", "bar"}, "foobar"}, + {"[]interface{} (strings)", []interface{}{"foo", "bar"}, "foobar"}, + {"[]Boolean", []bool{true, false}, "truefalse"}, +} + +func TestStr(t *testing.T) { + t.Parallel() + + for _, test := range strTests { + if res := Str(test.input); res != test.output { + t.Errorf("Failed to stringify: %s\nexpected:\n\t'%s'got:\n\t%q", test.name, test.output, res) + } + } +} + +func ExampleStr() { + output := Str(3) + " foos are " + Str(true) + " and " + Str(-1.25) + " bars are " + Str(false) + "\n" + output += "But you know '" + Str(nil) + "' John Snow\n" + output += "map: " + Str(map[string]string{"foo": "bar"}) + "\n" + output += "array: " + Str([]interface{}{true, 10, "foo", 5, "bar"}) + + fmt.Println(output) + // Output: 3 foos are true and -1.25 bars are false + // But you know '' John Snow + // map: map[foo:bar] + // array: true10foo5bar +} + +func ExampleSafeString() { + RegisterHelper("em", func() SafeString { + return SafeString("FOO BAR") + }) + + tpl := MustParse("{{em}}") + + result := tpl.MustExec(nil) + fmt.Print(result) + // Output: FOO BAR +} diff --git a/vendor/github.com/aymerick/raymond/template.go b/vendor/github.com/aymerick/raymond/template.go new file mode 100644 index 0000000..4d4cc8f --- /dev/null +++ b/vendor/github.com/aymerick/raymond/template.go @@ -0,0 +1,249 @@ +package raymond + +import ( + "fmt" + "io/ioutil" + "reflect" + "runtime" + "sync" + + "github.com/aymerick/raymond/ast" + "github.com/aymerick/raymond/parser" +) + +// Template represents a handlebars template. +type Template struct { + source string + program *ast.Program + helpers map[string]reflect.Value + partials map[string]*partial + mutex sync.RWMutex // protects helpers and partials + +} + +// newTemplate instanciate a new template without parsing it +func newTemplate(source string) *Template { + return &Template{ + source: source, + helpers: make(map[string]reflect.Value), + partials: make(map[string]*partial), + } +} + +// Parse instanciates a template by parsing given source. +func Parse(source string) (*Template, error) { + tpl := newTemplate(source) + + // parse template + if err := tpl.parse(); err != nil { + return nil, err + } + + return tpl, nil +} + +// MustParse instanciates a template by parsing given source. It panics on error. +func MustParse(source string) *Template { + result, err := Parse(source) + if err != nil { + panic(err) + } + return result +} + +// ParseFile reads given file and returns parsed template. +func ParseFile(filePath string) (*Template, error) { + b, err := ioutil.ReadFile(filePath) + if err != nil { + return nil, err + } + + return Parse(string(b)) +} + +// parse parses the template +// +// It can be called several times, the parsing will be done only once. +func (tpl *Template) parse() error { + if tpl.program == nil { + var err error + + tpl.program, err = parser.Parse(tpl.source) + if err != nil { + return err + } + } + + return nil +} + +// Clone returns a copy of that template. +func (tpl *Template) Clone() *Template { + result := newTemplate(tpl.source) + + result.program = tpl.program + + tpl.mutex.RLock() + defer tpl.mutex.RUnlock() + + for name, helper := range tpl.helpers { + result.RegisterHelper(name, helper.Interface()) + } + + for name, partial := range tpl.partials { + result.addPartial(name, partial.source, partial.tpl) + } + + return result +} + +func (tpl *Template) findHelper(name string) reflect.Value { + tpl.mutex.RLock() + defer tpl.mutex.RUnlock() + + return tpl.helpers[name] +} + +// RegisterHelper registers a helper for that template. +func (tpl *Template) RegisterHelper(name string, helper interface{}) { + tpl.mutex.Lock() + defer tpl.mutex.Unlock() + + if tpl.helpers[name] != zero { + panic(fmt.Sprintf("Helper %s already registered", name)) + } + + val := reflect.ValueOf(helper) + ensureValidHelper(name, val) + + tpl.helpers[name] = val +} + +// RegisterHelpers registers several helpers for that template. +func (tpl *Template) RegisterHelpers(helpers map[string]interface{}) { + for name, helper := range helpers { + tpl.RegisterHelper(name, helper) + } +} + +func (tpl *Template) addPartial(name string, source string, template *Template) { + tpl.mutex.Lock() + defer tpl.mutex.Unlock() + + if tpl.partials[name] != nil { + panic(fmt.Sprintf("Partial %s already registered", name)) + } + + tpl.partials[name] = newPartial(name, source, template) +} + +func (tpl *Template) findPartial(name string) *partial { + tpl.mutex.RLock() + defer tpl.mutex.RUnlock() + + return tpl.partials[name] +} + +// RegisterPartial registers a partial for that template. +func (tpl *Template) RegisterPartial(name string, source string) { + tpl.addPartial(name, source, nil) +} + +// RegisterPartials registers several partials for that template. +func (tpl *Template) RegisterPartials(partials map[string]string) { + for name, partial := range partials { + tpl.RegisterPartial(name, partial) + } +} + +// RegisterPartialFile reads given file and registers its content as a partial with given name. +func (tpl *Template) RegisterPartialFile(filePath string, name string) error { + b, err := ioutil.ReadFile(filePath) + if err != nil { + return err + } + + tpl.RegisterPartial(name, string(b)) + + return nil +} + +// RegisterPartialFiles reads several files and registers them as partials, the filename base is used as the partial name. +func (tpl *Template) RegisterPartialFiles(filePaths ...string) error { + if len(filePaths) == 0 { + return nil + } + + for _, filePath := range filePaths { + name := fileBase(filePath) + + if err := tpl.RegisterPartialFile(filePath, name); err != nil { + return err + } + } + + return nil +} + +// RegisterPartial registers an already parsed partial for that template. +func (tpl *Template) RegisterPartialTemplate(name string, template *Template) { + tpl.addPartial(name, "", template) +} + +// Exec evaluates template with given context. +func (tpl *Template) Exec(ctx interface{}) (result string, err error) { + return tpl.ExecWith(ctx, nil) +} + +// MustExec evaluates template with given context. It panics on error. +func (tpl *Template) MustExec(ctx interface{}) string { + result, err := tpl.Exec(ctx) + if err != nil { + panic(err) + } + return result +} + +// ExecWith evaluates template with given context and private data frame. +func (tpl *Template) ExecWith(ctx interface{}, privData *DataFrame) (result string, err error) { + defer errRecover(&err) + + // parses template if necessary + err = tpl.parse() + if err != nil { + return + } + + // setup visitor + v := newEvalVisitor(tpl, ctx, privData) + + // visit AST + result, _ = tpl.program.Accept(v).(string) + + // named return values + return +} + +// errRecover recovers evaluation panic +func errRecover(errp *error) { + e := recover() + if e != nil { + switch err := e.(type) { + case runtime.Error: + panic(e) + case error: + *errp = err + default: + panic(e) + } + } +} + +// PrintAST returns string representation of parsed template. +func (tpl *Template) PrintAST() string { + if err := tpl.parse(); err != nil { + return fmt.Sprintf("PARSER ERROR: %s", err) + } + + return ast.Print(tpl.program) +} diff --git a/vendor/github.com/aymerick/raymond/template_test.go b/vendor/github.com/aymerick/raymond/template_test.go new file mode 100644 index 0000000..c194b28 --- /dev/null +++ b/vendor/github.com/aymerick/raymond/template_test.go @@ -0,0 +1,166 @@ +package raymond + +import ( + "fmt" + "testing" +) + +var sourceBasic = `
+

{{title}}

+
+ {{body}} +
+
` + +var basicAST = `CONTENT[ '
+

' ] +{{ PATH:title [] }} +CONTENT[ '

+
+ ' ] +{{ PATH:body [] }} +CONTENT[ ' +
+
' ] +` + +func TestNewTemplate(t *testing.T) { + t.Parallel() + + tpl := newTemplate(sourceBasic) + if tpl.source != sourceBasic { + t.Errorf("Failed to instantiate template") + } +} + +func TestParse(t *testing.T) { + t.Parallel() + + tpl, err := Parse(sourceBasic) + if err != nil || (tpl.source != sourceBasic) { + t.Errorf("Failed to parse template") + } + + if str := tpl.PrintAST(); str != basicAST { + t.Errorf("Template parsing incorrect: %s", str) + } +} + +func TestClone(t *testing.T) { + t.Parallel() + + sourcePartial := `I am a {{wat}} partial` + sourcePartial2 := `Partial for the {{wat}}` + + tpl := MustParse(sourceBasic) + tpl.RegisterPartial("p", sourcePartial) + + if (len(tpl.partials) != 1) || (tpl.partials["p"] == nil) { + t.Errorf("What?") + } + + cloned := tpl.Clone() + + if (len(cloned.partials) != 1) || (cloned.partials["p"] == nil) { + t.Errorf("Template partials must be cloned") + } + + cloned.RegisterPartial("p2", sourcePartial2) + + if (len(cloned.partials) != 2) || (cloned.partials["p"] == nil) || (cloned.partials["p2"] == nil) { + t.Errorf("Failed to register a partial on cloned template") + } + + if (len(tpl.partials) != 1) || (tpl.partials["p"] == nil) { + t.Errorf("Modification of a cloned template MUST NOT affect original template") + } +} + +func ExampleTemplate_Exec() { + source := "

{{title}}

{{body.content}}

" + + ctx := map[string]interface{}{ + "title": "foo", + "body": map[string]string{"content": "bar"}, + } + + // parse template + tpl := MustParse(source) + + // evaluate template with context + output, err := tpl.Exec(ctx) + if err != nil { + panic(err) + } + + fmt.Print(output) + // Output:

foo

bar

+} + +func ExampleTemplate_MustExec() { + source := "

{{title}}

{{body.content}}

" + + ctx := map[string]interface{}{ + "title": "foo", + "body": map[string]string{"content": "bar"}, + } + + // parse template + tpl := MustParse(source) + + // evaluate template with context + output := tpl.MustExec(ctx) + + fmt.Print(output) + // Output:

foo

bar

+} + +func ExampleTemplate_ExecWith() { + source := "

{{title}}

{{#body}}{{content}} and {{@baz.bat}}{{/body}}

" + + ctx := map[string]interface{}{ + "title": "foo", + "body": map[string]string{"content": "bar"}, + } + + // parse template + tpl := MustParse(source) + + // computes private data frame + frame := NewDataFrame() + frame.Set("baz", map[string]string{"bat": "unicorns"}) + + // evaluate template + output, err := tpl.ExecWith(ctx, frame) + if err != nil { + panic(err) + } + + fmt.Print(output) + // Output:

foo

bar and unicorns

+} + +func ExampleTemplate_PrintAST() { + source := "

{{title}}

{{#body}}{{content}} and {{@baz.bat}}{{/body}}

" + + // parse template + tpl := MustParse(source) + + // print AST + output := tpl.PrintAST() + + fmt.Print(output) + // Output: CONTENT[ '

' ] + // {{ PATH:title [] }} + // CONTENT[ '

' ] + // BLOCK: + // PATH:body [] + // PROGRAM: + // {{ PATH:content [] + // }} + // CONTENT[ ' and ' ] + // {{ @PATH:baz/bat [] + // }} + // CONTENT[ '

' ] + // +} diff --git a/vendor/github.com/aymerick/raymond/utils.go b/vendor/github.com/aymerick/raymond/utils.go new file mode 100644 index 0000000..3deaaf3 --- /dev/null +++ b/vendor/github.com/aymerick/raymond/utils.go @@ -0,0 +1,85 @@ +package raymond + +import ( + "path" + "reflect" +) + +// indirect returns the item at the end of indirection, and a bool to indicate if it's nil. +// We indirect through pointers and empty interfaces (only) because +// non-empty interfaces have methods we might need. +// +// NOTE: borrowed from https://github.com/golang/go/tree/master/src/text/template/exec.go +func indirect(v reflect.Value) (rv reflect.Value, isNil bool) { + for ; v.Kind() == reflect.Ptr || v.Kind() == reflect.Interface; v = v.Elem() { + if v.IsNil() { + return v, true + } + if v.Kind() == reflect.Interface && v.NumMethod() > 0 { + break + } + } + return v, false +} + +// IsTrue returns true if obj is a truthy value. +func IsTrue(obj interface{}) bool { + thruth, ok := isTrueValue(reflect.ValueOf(obj)) + if !ok { + return false + } + return thruth +} + +// isTrueValue reports whether the value is 'true', in the sense of not the zero of its type, +// and whether the value has a meaningful truth value +// +// NOTE: borrowed from https://github.com/golang/go/tree/master/src/text/template/exec.go +func isTrueValue(val reflect.Value) (truth, ok bool) { + if !val.IsValid() { + // Something like var x interface{}, never set. It's a form of nil. + return false, true + } + switch val.Kind() { + case reflect.Array, reflect.Map, reflect.Slice, reflect.String: + truth = val.Len() > 0 + case reflect.Bool: + truth = val.Bool() + case reflect.Complex64, reflect.Complex128: + truth = val.Complex() != 0 + case reflect.Chan, reflect.Func, reflect.Ptr, reflect.Interface: + truth = !val.IsNil() + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + truth = val.Int() != 0 + case reflect.Float32, reflect.Float64: + truth = val.Float() != 0 + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: + truth = val.Uint() != 0 + case reflect.Struct: + truth = true // Struct values are always true. + default: + return + } + return truth, true +} + +// canBeNil reports whether an untyped nil can be assigned to the type. See reflect.Zero. +// +// NOTE: borrowed from https://github.com/golang/go/tree/master/src/text/template/exec.go +func canBeNil(typ reflect.Type) bool { + switch typ.Kind() { + case reflect.Chan, reflect.Func, reflect.Interface, reflect.Map, reflect.Ptr, reflect.Slice: + return true + } + return false +} + +// fileBase returns base file name +// +// example: /foo/bar/baz.png => baz +func fileBase(filePath string) string { + fileName := path.Base(filePath) + fileExt := path.Ext(filePath) + + return fileName[:len(fileName)-len(fileExt)] +} diff --git a/vendor/github.com/aymerick/raymond/utils_test.go b/vendor/github.com/aymerick/raymond/utils_test.go new file mode 100644 index 0000000..cecac37 --- /dev/null +++ b/vendor/github.com/aymerick/raymond/utils_test.go @@ -0,0 +1,51 @@ +package raymond + +import "fmt" + +func ExampleIsTrue() { + output := "Empty array: " + Str(IsTrue([0]string{})) + "\n" + output += "Non empty array: " + Str(IsTrue([1]string{"foo"})) + "\n" + + output += "Empty slice: " + Str(IsTrue([]string{})) + "\n" + output += "Non empty slice: " + Str(IsTrue([]string{"foo"})) + "\n" + + output += "Empty map: " + Str(IsTrue(map[string]string{})) + "\n" + output += "Non empty map: " + Str(IsTrue(map[string]string{"foo": "bar"})) + "\n" + + output += "Empty string: " + Str(IsTrue("")) + "\n" + output += "Non empty string: " + Str(IsTrue("foo")) + "\n" + + output += "true bool: " + Str(IsTrue(true)) + "\n" + output += "false bool: " + Str(IsTrue(false)) + "\n" + + output += "0 integer: " + Str(IsTrue(0)) + "\n" + output += "positive integer: " + Str(IsTrue(10)) + "\n" + output += "negative integer: " + Str(IsTrue(-10)) + "\n" + + output += "0 float: " + Str(IsTrue(0.0)) + "\n" + output += "positive float: " + Str(IsTrue(10.0)) + "\n" + output += "negative integer: " + Str(IsTrue(-10.0)) + "\n" + + output += "struct: " + Str(IsTrue(struct{}{})) + "\n" + output += "nil: " + Str(IsTrue(nil)) + "\n" + + fmt.Println(output) + // Output: Empty array: false + // Non empty array: true + // Empty slice: false + // Non empty slice: true + // Empty map: false + // Non empty map: true + // Empty string: false + // Non empty string: true + // true bool: true + // false bool: false + // 0 integer: false + // positive integer: true + // negative integer: true + // 0 float: false + // positive float: true + // negative integer: true + // struct: true + // nil: false +} diff --git a/vendor/github.com/drone/drone-go/drone/client.go b/vendor/github.com/drone/drone-go/drone/client.go new file mode 100644 index 0000000..e65f254 --- /dev/null +++ b/vendor/github.com/drone/drone-go/drone/client.go @@ -0,0 +1,342 @@ +package drone + +//go:generate mockery -all +//go:generate mv mocks/Client.go mocks/client.go + +import ( + "bytes" + "encoding/json" + "fmt" + "io" + "io/ioutil" + "net/http" + "net/url" + + "golang.org/x/oauth2" +) + +const ( + pathSelf = "%s/api/user" + pathFeed = "%s/api/user/feed" + pathRepos = "%s/api/user/repos" + pathRepo = "%s/api/repos/%s/%s" + pathEncrypt = "%s/api/repos/%s/%s/encrypt" + pathBuilds = "%s/api/repos/%s/%s/builds" + pathBuild = "%s/api/repos/%s/%s/builds/%v" + pathJob = "%s/api/repos/%s/%s/builds/%d/%d" + pathLog = "%s/api/repos/%s/%s/logs/%d/%d" + pathKey = "%s/api/repos/%s/%s/key" + pathNodes = "%s/api/nodes" + pathNode = "%s/api/nodes/%d" + pathUsers = "%s/api/users" + pathUser = "%s/api/users/%s" +) + +type client struct { + client *http.Client + base string // base url +} + +// NewClient returns a client at the specified url. +func NewClient(uri string) Client { + return &client{http.DefaultClient, uri} +} + +// NewClientToken returns a client at the specified url that +// authenticates all outbound requests with the given token. +func NewClientToken(uri, token string) Client { + config := new(oauth2.Config) + auther := config.Client(oauth2.NoContext, &oauth2.Token{AccessToken: token}) + return &client{auther, uri} +} + +// SetClient sets the default http client. This should be +// used in conjunction with golang.org/x/oauth2 to +// authenticate requests to the Drone server. +func (c *client) SetClient(client *http.Client) { + c.client = client +} + +// Self returns the currently authenticated user. +func (c *client) Self() (*User, error) { + out := new(User) + uri := fmt.Sprintf(pathSelf, c.base) + err := c.get(uri, out) + return out, err +} + +// User returns a user by login. +func (c *client) User(login string) (*User, error) { + out := new(User) + uri := fmt.Sprintf(pathUser, c.base, login) + err := c.get(uri, out) + return out, err +} + +// UserList returns a list of all registered users. +func (c *client) UserList() ([]*User, error) { + out := make([]*User, 0) + uri := fmt.Sprintf(pathUsers, c.base) + err := c.get(uri, &out) + return out, err +} + +// UserPost creates a new user account. +func (c *client) UserPost(in *User) (*User, error) { + out := new(User) + uri := fmt.Sprintf(pathUsers, c.base) + err := c.post(uri, in, out) + return out, err +} + +// UserPatch updates a user account. +func (c *client) UserPatch(in *User) (*User, error) { + out := new(User) + uri := fmt.Sprintf(pathUser, c.base, in.Login) + err := c.patch(uri, in, out) + return out, err +} + +// UserDel deletes a user account. +func (c *client) UserDel(login string) error { + uri := fmt.Sprintf(pathUser, c.base, login) + err := c.delete(uri) + return err +} + +// UserFeed returns the user's activity feed. +func (c *client) UserFeed() ([]*Activity, error) { + out := make([]*Activity, 0) + uri := fmt.Sprintf(pathFeed, c.base) + err := c.get(uri, &out) + return out, err +} + +// Repo returns a repository by name. +func (c *client) Repo(owner string, name string) (*Repo, error) { + out := new(Repo) + uri := fmt.Sprintf(pathRepo, c.base, owner, name) + err := c.get(uri, out) + return out, err +} + +// RepoList returns a list of all repositories to which +// the user has explicit access in the host system. +func (c *client) RepoList() ([]*Repo, error) { + out := make([]*Repo, 0) + uri := fmt.Sprintf(pathRepos, c.base) + err := c.get(uri, &out) + return out, err +} + +// RepoPost activates a repository. +func (c *client) RepoPost(owner string, name string) (*Repo, error) { + out := new(Repo) + uri := fmt.Sprintf(pathRepo, c.base, owner, name) + err := c.post(uri, nil, out) + return out, err +} + +// RepoPatch updates a repository. +func (c *client) RepoPatch(in *Repo) (*Repo, error) { + out := new(Repo) + uri := fmt.Sprintf(pathRepo, c.base, in.Owner, in.Name) + err := c.patch(uri, in, out) + return out, err +} + +// RepoDel deletes a repository. +func (c *client) RepoDel(owner, name string) error { + uri := fmt.Sprintf(pathRepo, c.base, owner, name) + err := c.delete(uri) + return err +} + +// RepoKey returns a repository public key. +func (c *client) RepoKey(owner, name string) (*Key, error) { + out := new(Key) + uri := fmt.Sprintf(pathKey, c.base, owner, name) + rc, err := c.stream(uri, "GET", nil, nil) + if err != nil { + return nil, err + } + defer rc.Close() + raw, _ := ioutil.ReadAll(rc) + out.Public = string(raw) + return out, err +} + +// Build returns a repository build by number. +func (c *client) Build(owner, name string, num int) (*Build, error) { + out := new(Build) + uri := fmt.Sprintf(pathBuild, c.base, owner, name, num) + err := c.get(uri, out) + return out, err +} + +// Build returns the latest repository build by branch. +func (c *client) BuildLast(owner, name, branch string) (*Build, error) { + out := new(Build) + uri := fmt.Sprintf(pathBuild, c.base, owner, name, "latest") + if len(branch) != 0 { + uri += "?branch=" + branch + } + err := c.get(uri, out) + return out, err +} + +// BuildList returns a list of recent builds for the +// the specified repository. +func (c *client) BuildList(owner, name string) ([]*Build, error) { + out := make([]*Build, 0) + uri := fmt.Sprintf(pathBuilds, c.base, owner, name) + err := c.get(uri, &out) + return out, err +} + +// BuildStart re-starts a stopped build. +func (c *client) BuildStart(owner, name string, num int) (*Build, error) { + out := new(Build) + uri := fmt.Sprintf(pathBuild, c.base, owner, name, num) + err := c.post(uri, nil, out) + return out, err +} + +// BuildStop cancels the running job. +func (c *client) BuildStop(owner, name string, num, job int) error { + uri := fmt.Sprintf(pathJob, c.base, owner, name, num, job) + err := c.delete(uri) + return err +} + +// BuildFork re-starts a stopped build with a new build number, +// preserving the prior history. +func (c *client) BuildFork(owner, name string, num int) (*Build, error) { + out := new(Build) + uri := fmt.Sprintf(pathBuild+"?fork=true", c.base, owner, name, num) + err := c.post(uri, nil, out) + return out, err +} + +// BuildLogs returns the build logs for the specified job. +func (c *client) BuildLogs(owner, name string, num, job int) (io.ReadCloser, error) { + uri := fmt.Sprintf(pathLog, c.base, owner, name, num, job) + return c.stream(uri, "GET", nil, nil) +} + +// Node returns a node by id. +func (c *client) Node(id int64) (*Node, error) { + out := new(Node) + uri := fmt.Sprintf(pathNode, c.base, id) + err := c.get(uri, out) + return out, err +} + +// NodeList returns a list of all registered worker nodes. +func (c *client) NodeList() ([]*Node, error) { + out := make([]*Node, 0) + uri := fmt.Sprintf(pathNodes, c.base) + err := c.get(uri, &out) + return out, err +} + +// NodePost registers a new worker node. +func (c *client) NodePost(in *Node) (*Node, error) { + out := new(Node) + uri := fmt.Sprintf(pathNodes, c.base) + err := c.post(uri, in, out) + return out, err +} + +// NodeDel deletes a worker node. +func (c *client) NodeDel(id int64) error { + uri := fmt.Sprintf(pathNode, c.base, id) + err := c.delete(uri) + return err +} + +// +// http request helper functions +// + +// helper function for making an http GET request. +func (c *client) get(rawurl string, out interface{}) error { + return c.do(rawurl, "GET", nil, out) +} + +// helper function for making an http POST request. +func (c *client) post(rawurl string, in, out interface{}) error { + return c.do(rawurl, "POST", in, out) +} + +// helper function for making an http PUT request. +func (c *client) put(rawurl string, in, out interface{}) error { + return c.do(rawurl, "PUT", in, out) +} + +// helper function for making an http PATCH request. +func (c *client) patch(rawurl string, in, out interface{}) error { + return c.do(rawurl, "PATCH", in, out) +} + +// helper function for making an http DELETE request. +func (c *client) delete(rawurl string) error { + return c.do(rawurl, "DELETE", nil, nil) +} + +// helper function to make an http request +func (c *client) do(rawurl, method string, in, out interface{}) error { + // executes the http request and returns the body as + // and io.ReadCloser + body, err := c.stream(rawurl, method, in, out) + if err != nil { + return err + } + defer body.Close() + + // if a json response is expected, parse and return + // the json response. + if out != nil { + return json.NewDecoder(body).Decode(out) + } + return nil +} + +// helper function to stream an http request +func (c *client) stream(rawurl, method string, in, out interface{}) (io.ReadCloser, error) { + uri, err := url.Parse(rawurl) + if err != nil { + return nil, err + } + + // if we are posting or putting data, we need to + // write it to the body of the request. + var buf io.ReadWriter + if in != nil { + buf = new(bytes.Buffer) + err := json.NewEncoder(buf).Encode(in) + if err != nil { + return nil, err + } + } + + // creates a new http request to bitbucket. + req, err := http.NewRequest(method, uri.String(), buf) + if err != nil { + return nil, err + } + if in != nil { + req.Header.Set("Content-Type", "application/json") + } + + resp, err := c.client.Do(req) + if err != nil { + return nil, err + } + if resp.StatusCode > http.StatusPartialContent { + defer resp.Body.Close() + out, _ := ioutil.ReadAll(resp.Body) + return nil, fmt.Errorf(string(out)) + } + return resp.Body, nil +} diff --git a/vendor/github.com/drone/drone-go/drone/const.go b/vendor/github.com/drone/drone-go/drone/const.go new file mode 100644 index 0000000..9490931 --- /dev/null +++ b/vendor/github.com/drone/drone-go/drone/const.go @@ -0,0 +1,44 @@ +package drone + +const ( + EventPush = "push" + EventPull = "pull_request" + EventTag = "tag" + EventDeploy = "deployment" +) + +const ( + StatusSkipped = "skipped" + StatusPending = "pending" + StatusRunning = "running" + StatusSuccess = "success" + StatusFailure = "failure" + StatusKilled = "killed" + StatusError = "error" +) + +const ( + Freebsd_386 uint = iota + Freebsd_amd64 + Freebsd_arm + Linux_386 + Linux_amd64 + Linux_arm + Linux_arm64 + Solaris_amd64 + Windows_386 + Windows_amd64 +) + +var Archs = map[string]uint{ + "freebsd_386": Freebsd_386, + "freebsd_amd64": Freebsd_amd64, + "freebsd_arm": Freebsd_arm, + "linux_386": Linux_386, + "linux_amd64": Linux_amd64, + "linux_arm": Linux_arm, + "linux_arm64": Linux_arm64, + "solaris_amd64": Solaris_amd64, + "windows_386": Windows_386, + "windows_amd64": Windows_amd64, +} diff --git a/vendor/github.com/drone/drone-go/drone/interface.go b/vendor/github.com/drone/drone-go/drone/interface.go new file mode 100644 index 0000000..bdaeefa --- /dev/null +++ b/vendor/github.com/drone/drone-go/drone/interface.go @@ -0,0 +1,81 @@ +package drone + +import "io" + +type Client interface { + // Self returns the currently authenticated user. + Self() (*User, error) + + // User returns a user by login. + User(string) (*User, error) + + // UserList returns a list of all registered users. + UserList() ([]*User, error) + + // UserPost creates a new user account. + UserPost(*User) (*User, error) + + // UserPatch updates a user account. + UserPatch(*User) (*User, error) + + // UserDel deletes a user account. + UserDel(string) error + + // UserFeed returns the user's activity feed. + UserFeed() ([]*Activity, error) + + // Repo returns a repository by name. + Repo(string, string) (*Repo, error) + + // RepoList returns a list of all repositories to which + // the user has explicit access in the host system. + RepoList() ([]*Repo, error) + + // RepoPost activates a repository. + RepoPost(string, string) (*Repo, error) + + // RepoPatch updates a repository. + RepoPatch(*Repo) (*Repo, error) + + // RepoDel deletes a repository. + RepoDel(string, string) error + + // RepoKey returns a repository public key. + RepoKey(string, string) (*Key, error) + + // Build returns a repository build by number. + Build(string, string, int) (*Build, error) + + // BuildLast returns the latest repository build by branch. + // An empty branch will result in the default branch. + BuildLast(string, string, string) (*Build, error) + + // BuildList returns a list of recent builds for the + // the specified repository. + BuildList(string, string) ([]*Build, error) + + // BuildStart re-starts a stopped build. + BuildStart(string, string, int) (*Build, error) + + // BuildStop stops the specified running job for given build. + BuildStop(string, string, int, int) error + + // BuildFork re-starts a stopped build with a new build number, + // preserving the prior history. + BuildFork(string, string, int) (*Build, error) + + // BuildLogs returns the build logs for the specified job. + BuildLogs(string, string, int, int) (io.ReadCloser, error) + + // Node returns a node by id. + Node(int64) (*Node, error) + + // NodeList returns a list of all registered worker nodes. + NodeList() ([]*Node, error) + + // NodePost registers a new worker node. + NodePost(*Node) (*Node, error) + + // NodeDel deletes a worker node. + NodeDel(int64) error +} diff --git a/vendor/github.com/drone/drone-go/drone/mocks/client.go b/vendor/github.com/drone/drone-go/drone/mocks/client.go new file mode 100644 index 0000000..7c739b5 --- /dev/null +++ b/vendor/github.com/drone/drone-go/drone/mocks/client.go @@ -0,0 +1,435 @@ +package mocks + +import "github.com/drone/drone-go/drone" +import "github.com/stretchr/testify/mock" + +import "io" + +type Client struct { + mock.Mock +} + +func (_m *Client) Self() (*drone.User, error) { + ret := _m.Called() + + var r0 *drone.User + if rf, ok := ret.Get(0).(func() *drone.User); ok { + r0 = rf() + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*drone.User) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func() error); ok { + r1 = rf() + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} +func (_m *Client) User(_a0 string) (*drone.User, error) { + ret := _m.Called(_a0) + + var r0 *drone.User + if rf, ok := ret.Get(0).(func(string) *drone.User); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*drone.User) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(string) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} +func (_m *Client) UserList() ([]*drone.User, error) { + ret := _m.Called() + + var r0 []*drone.User + if rf, ok := ret.Get(0).(func() []*drone.User); ok { + r0 = rf() + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*drone.User) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func() error); ok { + r1 = rf() + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} +func (_m *Client) UserPost(_a0 *drone.User) (*drone.User, error) { + ret := _m.Called(_a0) + + var r0 *drone.User + if rf, ok := ret.Get(0).(func(*drone.User) *drone.User); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*drone.User) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*drone.User) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} +func (_m *Client) UserPatch(_a0 *drone.User) (*drone.User, error) { + ret := _m.Called(_a0) + + var r0 *drone.User + if rf, ok := ret.Get(0).(func(*drone.User) *drone.User); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*drone.User) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*drone.User) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} +func (_m *Client) UserDel(_a0 string) error { + ret := _m.Called(_a0) + + var r0 error + if rf, ok := ret.Get(0).(func(string) error); ok { + r0 = rf(_a0) + } else { + r0 = ret.Error(0) + } + + return r0 +} +func (_m *Client) UserFeed() ([]*drone.Activity, error) { + ret := _m.Called() + + var r0 []*drone.Activity + if rf, ok := ret.Get(0).(func() []*drone.Activity); ok { + r0 = rf() + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*drone.Activity) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func() error); ok { + r1 = rf() + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} +func (_m *Client) Repo(_a0 string, _a1 string) (*drone.Repo, error) { + ret := _m.Called(_a0, _a1) + + var r0 *drone.Repo + if rf, ok := ret.Get(0).(func(string, string) *drone.Repo); ok { + r0 = rf(_a0, _a1) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*drone.Repo) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(string, string) error); ok { + r1 = rf(_a0, _a1) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} +func (_m *Client) RepoList() ([]*drone.Repo, error) { + ret := _m.Called() + + var r0 []*drone.Repo + if rf, ok := ret.Get(0).(func() []*drone.Repo); ok { + r0 = rf() + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*drone.Repo) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func() error); ok { + r1 = rf() + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} +func (_m *Client) RepoPost(_a0 string, _a1 string) (*drone.Repo, error) { + ret := _m.Called(_a0, _a1) + + var r0 *drone.Repo + if rf, ok := ret.Get(0).(func(string, string) *drone.Repo); ok { + r0 = rf(_a0, _a1) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*drone.Repo) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(string, string) error); ok { + r1 = rf(_a0, _a1) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} +func (_m *Client) RepoPatch(_a0 *drone.Repo) (*drone.Repo, error) { + ret := _m.Called(_a0) + + var r0 *drone.Repo + if rf, ok := ret.Get(0).(func(*drone.Repo) *drone.Repo); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*drone.Repo) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*drone.Repo) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} +func (_m *Client) RepoDel(_a0 string, _a1 string) error { + ret := _m.Called(_a0, _a1) + + var r0 error + if rf, ok := ret.Get(0).(func(string, string) error); ok { + r0 = rf(_a0, _a1) + } else { + r0 = ret.Error(0) + } + + return r0 +} +func (_m *Client) RepoKey(_a0 string, _a1 string) (*drone.Key, error) { + ret := _m.Called(_a0, _a1) + + var r0 *drone.Key + if rf, ok := ret.Get(0).(func(string, string) *drone.Key); ok { + r0 = rf(_a0, _a1) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*drone.Key) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(string, string) error); ok { + r1 = rf(_a0, _a1) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} +func (_m *Client) Build(_a0 string, _a1 string, _a2 int) (*drone.Build, error) { + ret := _m.Called(_a0, _a1, _a2) + + var r0 *drone.Build + if rf, ok := ret.Get(0).(func(string, string, int) *drone.Build); ok { + r0 = rf(_a0, _a1, _a2) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*drone.Build) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(string, string, int) error); ok { + r1 = rf(_a0, _a1, _a2) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} +func (_m *Client) BuildList(_a0 string, _a1 string) ([]*drone.Build, error) { + ret := _m.Called(_a0, _a1) + + var r0 []*drone.Build + if rf, ok := ret.Get(0).(func(string, string) []*drone.Build); ok { + r0 = rf(_a0, _a1) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*drone.Build) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(string, string) error); ok { + r1 = rf(_a0, _a1) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} +func (_m *Client) BuildStart(_a0 string, _a1 string, _a2 int) (*drone.Build, error) { + ret := _m.Called(_a0, _a1, _a2) + + var r0 *drone.Build + if rf, ok := ret.Get(0).(func(string, string, int) *drone.Build); ok { + r0 = rf(_a0, _a1, _a2) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*drone.Build) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(string, string, int) error); ok { + r1 = rf(_a0, _a1, _a2) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} +func (_m *Client) BuildStop(_a0 string, _a1 string, _a2 int, _a3 int) error { + ret := _m.Called(_a0, _a1, _a2, _a3) + + var r0 error + if rf, ok := ret.Get(0).(func(string, string, int, int) error); ok { + r0 = rf(_a0, _a1, _a2, _a3) + } else { + r0 = ret.Error(0) + } + + return r0 +} +func (_m *Client) BuildLogs(_a0 string, _a1 string, _a2 int, _a3 int) (io.ReadCloser, error) { + ret := _m.Called(_a0, _a1, _a2, _a3) + + var r0 io.ReadCloser + if rf, ok := ret.Get(0).(func(string, string, int, int) io.ReadCloser); ok { + r0 = rf(_a0, _a1, _a2, _a3) + } else { + r0 = ret.Get(0).(io.ReadCloser) + } + + var r1 error + if rf, ok := ret.Get(1).(func(string, string, int, int) error); ok { + r1 = rf(_a0, _a1, _a2, _a3) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} +func (_m *Client) Node(_a0 int64) (*drone.Node, error) { + ret := _m.Called(_a0) + + var r0 *drone.Node + if rf, ok := ret.Get(0).(func(int64) *drone.Node); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*drone.Node) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(int64) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} +func (_m *Client) NodeList() ([]*drone.Node, error) { + ret := _m.Called() + + var r0 []*drone.Node + if rf, ok := ret.Get(0).(func() []*drone.Node); ok { + r0 = rf() + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*drone.Node) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func() error); ok { + r1 = rf() + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} +func (_m *Client) NodePost(_a0 *drone.Node) (*drone.Node, error) { + ret := _m.Called(_a0) + + var r0 *drone.Node + if rf, ok := ret.Get(0).(func(*drone.Node) *drone.Node); ok { + r0 = rf(_a0) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*drone.Node) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(*drone.Node) error); ok { + r1 = rf(_a0) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} +func (_m *Client) NodeDel(_a0 int64) error { + ret := _m.Called(_a0) + + var r0 error + if rf, ok := ret.Get(0).(func(int64) error); ok { + r0 = rf(_a0) + } else { + r0 = ret.Error(0) + } + + return r0 +} diff --git a/vendor/github.com/drone/drone-go/drone/types.go b/vendor/github.com/drone/drone-go/drone/types.go new file mode 100644 index 0000000..16efa3c --- /dev/null +++ b/vendor/github.com/drone/drone-go/drone/types.go @@ -0,0 +1,157 @@ +package drone + +// User represents a user account. +type User struct { + ID int64 `json:"id""` + Login string `json:"login"` + Email string `json:"email"` + Avatar string `json:"avatar_url"` + Active bool `json:"active"` + Admin bool `json:"admin"` +} + +// Repo represents a version control repository. +type Repo struct { + ID int64 `json:"id"` + Owner string `json:"owner"` + Name string `json:"name"` + FullName string `json:"full_name"` + Avatar string `json:"avatar_url"` + Link string `json:"link_url"` + Clone string `json:"clone_url"` + Branch string `json:"default_branch"` + Timeout int64 `json:"timeout"` + IsPrivate bool `json:"private"` + IsTrusted bool `json:"trusted"` + AllowPull bool `json:"allow_pr"` + AllowPush bool `json:"allow_push"` + AllowDeploy bool `json:"allow_deploys"` + AllowTag bool `json:"allow_tags"` +} + +// Build represents the process of compiling and testing a changeset, +// typically triggered by the remote system (ie GitHub). +type Build struct { + ID int64 `json:"id"` + Number int `json:"number"` + Event string `json:"event"` + Status string `json:"status"` + Enqueued int64 `json:"enqueued_at"` + Created int64 `json:"created_at"` + Started int64 `json:"started_at"` + Finished int64 `json:"finished_at"` + Commit string `json:"commit"` + Branch string `json:"branch"` + Ref string `json:"ref"` + Refspec string `json:"refspec"` + Remote string `json:"remote"` + Title string `json:"title"` + Message string `json:"message"` + Timestamp int64 `json:"timestamp"` + Author string `json:"author"` + Avatar string `json:"author_avatar"` + Email string `json:"author_email"` + Link string `json:"link_url"` +} + +// Job represents a single job that is being executed as part +// of a Build. +type Job struct { + ID int64 `json:"id"` + Number int `json:"number"` + Status string `json:"status"` + ExitCode int `json:"exit_code"` + Enqueued int64 `json:"enqueued_at"` + Started int64 `json:"started_at"` + Finished int64 `json:"finished_at"` + + Environment map[string]string `json:"environment"` +} + +// Activity represents a build activity. It combines the +// build details with summary Repository information. +type Activity struct { + Owner string `json:"owner"` + Name string `json:"name"` + FullName string `json:"full_name"` + Number int `json:"number"` + Event string `json:"event"` + Status string `json:"status"` + Enqueued int64 `json:"enqueued_at"` + Created int64 `json:"created_at"` + Started int64 `json:"started_at"` + Finished int64 `json:"finished_at"` + Commit string `json:"commit"` + Branch string `json:"branch"` + Ref string `json:"ref"` + Refspec string `json:"refspec"` + Remote string `json:"remote"` + Title string `json:"title"` + Message string `json:"message"` + Timestamp int64 `json:"timestamp"` + Author string `json:"author"` + Avatar string `json:"author_avatar"` + Email string `json:"author_email"` + Link string `json:"link_url"` +} + +// Repo represents a local or remote Docker daemon that is +// repsonsible for running jobs. +type Node struct { + ID int64 `json:"id"` + Addr string `json:"address"` + Arch string `json:"architecture"` + Cert string `json:"cert"` + Key string `json:"key"` + CA string `json:"ca"` +} + +// Key represents an RSA public and private key assigned to a +// repository. It may be used to clone private repositories, or as +// a deployment key. +type Key struct { + Public string `json:"public"` + Private string `json:"private"` +} + +// Netrc defines a default .netrc file that should be injected +// into the build environment. It will be used to authorize access +// to https resources, such as git+https clones. +type Netrc struct { + Machine string `json:"machine"` + Login string `json:"login"` + Password string `json:"user"` +} + +type System struct { + Version string `json:"version"` + Link string `json:"link_url"` + Plugins []string `json:"plugins"` + Globals []string `json:"globals"` +} + +// Workspace defines the build's workspace inside the +// container. This helps the plugin locate the source +// code directory. +type Workspace struct { + Root string `json:"root"` + Path string `json:"path"` + + Netrc *Netrc `json:"netrc"` + Keys *Key `json:"keys"` +} + +// Payload defines the full payload send to plugins. +type Payload struct { + Yaml string `json:"config"` + YamlEnc string `json:"secret"` + Repo *Repo `json:"repo"` + Build *Build `json:"build"` + BuildLast *Build `json:"build_last"` + Job *Job `json:"job"` + Netrc *Netrc `json:"netrc"` + Keys *Key `json:"keys"` + System *System `json:"system"` + Workspace *Workspace `json:"workspace"` + Vargs interface{} `json:"vargs"` +} diff --git a/vendor/github.com/drone/drone-go/drone/types_json.go b/vendor/github.com/drone/drone-go/drone/types_json.go new file mode 100644 index 0000000..8d7f663 --- /dev/null +++ b/vendor/github.com/drone/drone-go/drone/types_json.go @@ -0,0 +1,114 @@ +package drone + +import ( + "encoding/json" + "strconv" +) + +// StringSlice representes a string or an array of strings. +type StringSlice struct { + parts []string +} + +func (e *StringSlice) UnmarshalJSON(b []byte) error { + if len(b) == 0 { + return nil + } + + p := make([]string, 0, 1) + if err := json.Unmarshal(b, &p); err != nil { + var s string + if err := json.Unmarshal(b, &s); err != nil { + return err + } + p = append(p, s) + } + + e.parts = p + return nil +} + +func (e *StringSlice) Len() int { + if e == nil { + return 0 + } + return len(e.parts) +} + +func (e *StringSlice) Slice() []string { + if e == nil { + return nil + } + return e.parts +} + +// StringInt representes a string or an integer value. +type StringInt struct { + value string +} + +func (e *StringInt) UnmarshalJSON(b []byte) error { + var num int + err := json.Unmarshal(b, &num) + if err == nil { + e.value = strconv.Itoa(num) + return nil + } + return json.Unmarshal(b, &e.value) +} + +func (e StringInt) String() string { + return e.value +} + +// StringMap representes a string or a map of strings. +// StringMap representes a string or a map of strings. +type StringMap struct { + parts map[string]string +} + +func (e *StringMap) UnmarshalJSON(b []byte) error { + if len(b) == 0 { + return nil + } + + p := map[string]string{} + if err := json.Unmarshal(b, &p); err != nil { + var s string + if err := json.Unmarshal(b, &s); err != nil { + return err + } + p[""] = s + } + + e.parts = p + return nil +} + +func (e *StringMap) Len() int { + if e == nil { + return 0 + } + return len(e.parts) +} + +func (e *StringMap) String() (str string) { + if e == nil { + return + } + for _, val := range e.parts { + return val // returns the first string value + } + return +} + +func (e *StringMap) Map() map[string]string { + if e == nil { + return nil + } + return e.parts +} + +func NewStringMap(parts map[string]string) StringMap { + return StringMap{parts} +} diff --git a/vendor/github.com/drone/drone-go/plugin/plugin.go b/vendor/github.com/drone/drone-go/plugin/plugin.go new file mode 100644 index 0000000..488ad3f --- /dev/null +++ b/vendor/github.com/drone/drone-go/plugin/plugin.go @@ -0,0 +1,131 @@ +package plugin + +import ( + "bytes" + "encoding/json" + "fmt" + "io" + "os" +) + +var Stdin *ParamSet + +func init() { + // defaults to stdin + Stdin = NewParamSet(os.Stdin) + + // check for params after the double dash + // in the command string + for i, argv := range os.Args { + if argv == "--" { + arg := os.Args[i+1] + buf := bytes.NewBufferString(arg) + Stdin = NewParamSet(buf) + break + } + } +} + +// this init function is deprecated, but I'm keeping it +// around just in case it proves useful in the future. +func deprecated_init() { + // if piping from stdin we can just exit + // and use the default Stdin value + stat, _ := os.Stdin.Stat() + if (stat.Mode() & os.ModeCharDevice) == 0 { + return + } + + // check for params after the double dash + // in the command string + for i, argv := range os.Args { + if argv == "--" { + arg := os.Args[i+1] + buf := bytes.NewBufferString(arg) + Stdin = NewParamSet(buf) + return + } + } + + // else use the first variable in the list + if len(os.Args) > 1 { + buf := bytes.NewBufferString(os.Args[1]) + Stdin = NewParamSet(buf) + } +} + +type ParamSet struct { + reader io.Reader + params map[string]interface{} +} + +func NewParamSet(reader io.Reader) *ParamSet { + var p = new(ParamSet) + p.reader = reader + p.params = map[string]interface{}{} + return p +} + +// Param defines a parameter with the specified name. +func (p ParamSet) Param(name string, value interface{}) { + p.params[name] = value +} + +// Parse parses parameter definitions from the map. +func (p ParamSet) Parse() error { + raw := map[string]json.RawMessage{} + err := json.NewDecoder(p.reader).Decode(&raw) + if err != nil { + return err + } + + for key, val := range p.params { + data, ok := raw[key] + if !ok { + continue + } + err := json.Unmarshal(data, val) + if err != nil { + return fmt.Errorf("Unable to unarmshal %s. %s", key, err) + } + } + + return nil +} + +// Unmarshal parses the JSON payload from the command +// arguments and unmarshal into a value pointed to by v. +func (p ParamSet) Unmarshal(v interface{}) error { + return json.NewDecoder(p.reader).Decode(v) +} + +// Param defines a parameter with the specified name. +func Param(name string, value interface{}) { + Stdin.Param(name, value) +} + +// Parse parses parameter definitions from the map. +func Parse() error { + return Stdin.Parse() +} + +// Unmarshal parses the JSON payload from the command +// arguments and unmarshal into a value pointed to by v. +func Unmarshal(v interface{}) error { + return Stdin.Unmarshal(v) +} + +// Unmarshal parses the JSON payload from the command +// arguments and unmarshal into a value pointed to by v. +func MustUnmarshal(v interface{}) error { + return Stdin.Unmarshal(v) +} + +// MustParse parses parameter definitions from the map +// and panics if there is a parsing error. +func MustParse() { + err := Parse() + if err != nil { + panic(err) + } +} diff --git a/vendor/github.com/drone/drone-go/plugin/plugin_test.go b/vendor/github.com/drone/drone-go/plugin/plugin_test.go new file mode 100644 index 0000000..b0736c3 --- /dev/null +++ b/vendor/github.com/drone/drone-go/plugin/plugin_test.go @@ -0,0 +1 @@ +package plugin diff --git a/vendor/github.com/drone/drone-go/template/template.go b/vendor/github.com/drone/drone-go/template/template.go new file mode 100644 index 0000000..5f89805 --- /dev/null +++ b/vendor/github.com/drone/drone-go/template/template.go @@ -0,0 +1,124 @@ +package template + +import ( + "encoding/json" + "fmt" + "io" + "strings" + "time" + "unicode" + + "github.com/aymerick/raymond" + "github.com/drone/drone-go/drone" +) + +func init() { + raymond.RegisterHelpers(funcs) +} + +// Render parses and executes a template, returning the results +// in string format. +func Render(template string, playload *drone.Payload) (string, error) { + return raymond.Render(template, normalize(playload)) +} + +// RenderTrim parses and executes a template, returning the results +// in string format. The result is trimmed to remove left and right +// padding and newlines that may be added unintentially in the +// template markup. +func RenderTrim(template string, playload *drone.Payload) (string, error) { + out, err := Render(template, playload) + return strings.Trim(out, " \n"), err +} + +// Write parses and executes a template, writing the results to +// writer w. +func Write(w io.Writer, template string, playload *drone.Payload) error { + out, err := Render(template, playload) + if err != nil { + return err + } + _, err = io.WriteString(w, out) + return err +} + +var funcs = map[string]interface{}{ + "uppercase": strings.ToUpper, + "lowercase": strings.ToLower, + "uppercasefirst": uppercaseFirst, + "duration": toDuration, + "datetime": toDatetime, + "success": isSuccess, + "failure": isFailure, +} + +// uppercaseFirst is a helper function that takes a string and capitalizes +// the first letter. +func uppercaseFirst(s string) string { + a := []rune(s) + a[0] = unicode.ToUpper(a[0]) + s = string(a) + return s +} + +// toDuration is a helper function that calculates a duration for a start and +// and end time, and returns the duration in string format. +func toDuration(started, finished float64) string { + dur := time.Duration(int64(finished - started)) + return fmt.Sprintln(dur) +} + +// toDatetime is a helper function that converts a unix timestamp to a string. +func toDatetime(timestamp float64, layout, zone string) string { + if len(zone) == 0 { + return time.Unix(int64(timestamp), 0).Format(layout) + } + loc, err := time.LoadLocation(zone) + if err != nil { + fmt.Printf("Error parsing timezone, defaulting to local timezone. %s\n", err) + return time.Unix(int64(timestamp), 0).Local().Format(layout) + } + return time.Unix(int64(timestamp), 0).In(loc).Format(layout) +} + +// isSuccess is a helper function that executes a block iff the status +// is success, else it executes the else block. +func isSuccess(conditional bool, options *raymond.Options) string { + if !conditional { + return options.Inverse() + } + + switch options.ParamStr(0) { + case "success": + return options.Fn() + default: + return options.Inverse() + } +} + +// isFailure is a helper function that executes a block iff the status +// is a form of failure, else it executes the else block. +func isFailure(conditional bool, options *raymond.Options) string { + if !conditional { + return options.Inverse() + } + + switch options.ParamStr(0) { + case "failure", "error", "killed": + return options.Fn() + default: + return options.Inverse() + } +} + +// normalize takes a Go representation of the variable, marshals +// to json and then unmarshals to a map[string]interfacce{}. This +// is important because it let's us use the JSON variable names +// in our template +func normalize(in interface{}) map[string]interface{} { + data, _ := json.Marshal(in) // we own the types, so this should never fail + + out := map[string]interface{}{} + json.Unmarshal(data, &out) + return out +} diff --git a/vendor/github.com/drone/drone-go/template/template_test.go b/vendor/github.com/drone/drone-go/template/template_test.go new file mode 100644 index 0000000..000c56b --- /dev/null +++ b/vendor/github.com/drone/drone-go/template/template_test.go @@ -0,0 +1,92 @@ +package template + +import ( + "testing" + + "github.com/drone/drone-go/drone" +) + +var tests = []struct { + Payload *drone.Payload + Input string + Output string +}{ + { + &drone.Payload{Build: &drone.Build{Number: 1}}, + "build #{{build.number}}", + "build #1", + }, + { + &drone.Payload{Build: &drone.Build{Status: drone.StatusSuccess}}, + "{{uppercase build.status}}", + "SUCCESS", + }, + { + &drone.Payload{Build: &drone.Build{Author: "Octocat"}}, + "{{lowercase build.author}}", + "octocat", + }, + { + &drone.Payload{Build: &drone.Build{Status: drone.StatusSuccess}}, + "{{uppercasefirst build.status}}", + "Success", + }, + { + &drone.Payload{Build: &drone.Build{ + Started: 1448127131, + Finished: 1448127505}, + }, + "{{ duration build.started_at build.finished_at }}", + "374ns", + }, + { + &drone.Payload{Build: &drone.Build{Finished: 1448127505}}, + `finished at {{ datetime build.finished_at "3:04PM" "UTC" }}`, + "finished at 5:38PM", + }, + // verify the success if / else block works + { + &drone.Payload{Build: &drone.Build{Status: drone.StatusSuccess}}, + "{{#success build.status}}SUCCESS{{/success}}", + "SUCCESS", + }, + { + &drone.Payload{Build: &drone.Build{Status: drone.StatusFailure}}, + "{{#success build.status}}SUCCESS{{/success}}", + "", + }, + { + &drone.Payload{Build: &drone.Build{Status: drone.StatusFailure}}, + "{{#success build.status}}SUCCESS{{else}}NOT SUCCESS{{/success}}", + "NOT SUCCESS", + }, + // verify the failure if / else block works + { + &drone.Payload{Build: &drone.Build{Status: drone.StatusFailure}}, + "{{#failure build.status}}FAILURE{{/failure}}", + "FAILURE", + }, + { + &drone.Payload{Build: &drone.Build{Status: drone.StatusSuccess}}, + "{{#failure build.status}}FAILURE{{/failure}}", + "", + }, + { + &drone.Payload{Build: &drone.Build{Status: drone.StatusSuccess}}, + "{{#failure build.status}}FAILURE{{else}}NOT FAILURE{{/failure}}", + "NOT FAILURE", + }, +} + +func TestTemplate(t *testing.T) { + + for _, test := range tests { + got, err := RenderTrim(test.Input, test.Payload) + if err != nil { + t.Errorf("Failed rendering template %q, got error %s.", test.Input, err) + } + if got != test.Output { + t.Errorf("Wanted rendered template %q, got %q", test.Output, got) + } + } +} diff --git a/vendor/golang.org/x/net/context/context.go b/vendor/golang.org/x/net/context/context.go new file mode 100644 index 0000000..e7ee376 --- /dev/null +++ b/vendor/golang.org/x/net/context/context.go @@ -0,0 +1,447 @@ +// Copyright 2014 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package context defines the Context type, which carries deadlines, +// cancelation signals, and other request-scoped values across API boundaries +// and between processes. +// +// Incoming requests to a server should create a Context, and outgoing calls to +// servers should accept a Context. The chain of function calls between must +// propagate the Context, optionally replacing it with a modified copy created +// using WithDeadline, WithTimeout, WithCancel, or WithValue. +// +// Programs that use Contexts should follow these rules to keep interfaces +// consistent across packages and enable static analysis tools to check context +// propagation: +// +// Do not store Contexts inside a struct type; instead, pass a Context +// explicitly to each function that needs it. The Context should be the first +// parameter, typically named ctx: +// +// func DoSomething(ctx context.Context, arg Arg) error { +// // ... use ctx ... +// } +// +// Do not pass a nil Context, even if a function permits it. Pass context.TODO +// if you are unsure about which Context to use. +// +// Use context Values only for request-scoped data that transits processes and +// APIs, not for passing optional parameters to functions. +// +// The same Context may be passed to functions running in different goroutines; +// Contexts are safe for simultaneous use by multiple goroutines. +// +// See http://blog.golang.org/context for example code for a server that uses +// Contexts. +package context // import "golang.org/x/net/context" + +import ( + "errors" + "fmt" + "sync" + "time" +) + +// A Context carries a deadline, a cancelation signal, and other values across +// API boundaries. +// +// Context's methods may be called by multiple goroutines simultaneously. +type Context interface { + // Deadline returns the time when work done on behalf of this context + // should be canceled. Deadline returns ok==false when no deadline is + // set. Successive calls to Deadline return the same results. + Deadline() (deadline time.Time, ok bool) + + // Done returns a channel that's closed when work done on behalf of this + // context should be canceled. Done may return nil if this context can + // never be canceled. Successive calls to Done return the same value. + // + // WithCancel arranges for Done to be closed when cancel is called; + // WithDeadline arranges for Done to be closed when the deadline + // expires; WithTimeout arranges for Done to be closed when the timeout + // elapses. + // + // Done is provided for use in select statements: + // + // // Stream generates values with DoSomething and sends them to out + // // until DoSomething returns an error or ctx.Done is closed. + // func Stream(ctx context.Context, out <-chan Value) error { + // for { + // v, err := DoSomething(ctx) + // if err != nil { + // return err + // } + // select { + // case <-ctx.Done(): + // return ctx.Err() + // case out <- v: + // } + // } + // } + // + // See http://blog.golang.org/pipelines for more examples of how to use + // a Done channel for cancelation. + Done() <-chan struct{} + + // Err returns a non-nil error value after Done is closed. Err returns + // Canceled if the context was canceled or DeadlineExceeded if the + // context's deadline passed. No other values for Err are defined. + // After Done is closed, successive calls to Err return the same value. + Err() error + + // Value returns the value associated with this context for key, or nil + // if no value is associated with key. Successive calls to Value with + // the same key returns the same result. + // + // Use context values only for request-scoped data that transits + // processes and API boundaries, not for passing optional parameters to + // functions. + // + // A key identifies a specific value in a Context. Functions that wish + // to store values in Context typically allocate a key in a global + // variable then use that key as the argument to context.WithValue and + // Context.Value. A key can be any type that supports equality; + // packages should define keys as an unexported type to avoid + // collisions. + // + // Packages that define a Context key should provide type-safe accessors + // for the values stores using that key: + // + // // Package user defines a User type that's stored in Contexts. + // package user + // + // import "golang.org/x/net/context" + // + // // User is the type of value stored in the Contexts. + // type User struct {...} + // + // // key is an unexported type for keys defined in this package. + // // This prevents collisions with keys defined in other packages. + // type key int + // + // // userKey is the key for user.User values in Contexts. It is + // // unexported; clients use user.NewContext and user.FromContext + // // instead of using this key directly. + // var userKey key = 0 + // + // // NewContext returns a new Context that carries value u. + // func NewContext(ctx context.Context, u *User) context.Context { + // return context.WithValue(ctx, userKey, u) + // } + // + // // FromContext returns the User value stored in ctx, if any. + // func FromContext(ctx context.Context) (*User, bool) { + // u, ok := ctx.Value(userKey).(*User) + // return u, ok + // } + Value(key interface{}) interface{} +} + +// Canceled is the error returned by Context.Err when the context is canceled. +var Canceled = errors.New("context canceled") + +// DeadlineExceeded is the error returned by Context.Err when the context's +// deadline passes. +var DeadlineExceeded = errors.New("context deadline exceeded") + +// An emptyCtx is never canceled, has no values, and has no deadline. It is not +// struct{}, since vars of this type must have distinct addresses. +type emptyCtx int + +func (*emptyCtx) Deadline() (deadline time.Time, ok bool) { + return +} + +func (*emptyCtx) Done() <-chan struct{} { + return nil +} + +func (*emptyCtx) Err() error { + return nil +} + +func (*emptyCtx) Value(key interface{}) interface{} { + return nil +} + +func (e *emptyCtx) String() string { + switch e { + case background: + return "context.Background" + case todo: + return "context.TODO" + } + return "unknown empty Context" +} + +var ( + background = new(emptyCtx) + todo = new(emptyCtx) +) + +// Background returns a non-nil, empty Context. It is never canceled, has no +// values, and has no deadline. It is typically used by the main function, +// initialization, and tests, and as the top-level Context for incoming +// requests. +func Background() Context { + return background +} + +// TODO returns a non-nil, empty Context. Code should use context.TODO when +// it's unclear which Context to use or it's is not yet available (because the +// surrounding function has not yet been extended to accept a Context +// parameter). TODO is recognized by static analysis tools that determine +// whether Contexts are propagated correctly in a program. +func TODO() Context { + return todo +} + +// A CancelFunc tells an operation to abandon its work. +// A CancelFunc does not wait for the work to stop. +// After the first call, subsequent calls to a CancelFunc do nothing. +type CancelFunc func() + +// WithCancel returns a copy of parent with a new Done channel. The returned +// context's Done channel is closed when the returned cancel function is called +// or when the parent context's Done channel is closed, whichever happens first. +// +// Canceling this context releases resources associated with it, so code should +// call cancel as soon as the operations running in this Context complete. +func WithCancel(parent Context) (ctx Context, cancel CancelFunc) { + c := newCancelCtx(parent) + propagateCancel(parent, &c) + return &c, func() { c.cancel(true, Canceled) } +} + +// newCancelCtx returns an initialized cancelCtx. +func newCancelCtx(parent Context) cancelCtx { + return cancelCtx{ + Context: parent, + done: make(chan struct{}), + } +} + +// propagateCancel arranges for child to be canceled when parent is. +func propagateCancel(parent Context, child canceler) { + if parent.Done() == nil { + return // parent is never canceled + } + if p, ok := parentCancelCtx(parent); ok { + p.mu.Lock() + if p.err != nil { + // parent has already been canceled + child.cancel(false, p.err) + } else { + if p.children == nil { + p.children = make(map[canceler]bool) + } + p.children[child] = true + } + p.mu.Unlock() + } else { + go func() { + select { + case <-parent.Done(): + child.cancel(false, parent.Err()) + case <-child.Done(): + } + }() + } +} + +// parentCancelCtx follows a chain of parent references until it finds a +// *cancelCtx. This function understands how each of the concrete types in this +// package represents its parent. +func parentCancelCtx(parent Context) (*cancelCtx, bool) { + for { + switch c := parent.(type) { + case *cancelCtx: + return c, true + case *timerCtx: + return &c.cancelCtx, true + case *valueCtx: + parent = c.Context + default: + return nil, false + } + } +} + +// removeChild removes a context from its parent. +func removeChild(parent Context, child canceler) { + p, ok := parentCancelCtx(parent) + if !ok { + return + } + p.mu.Lock() + if p.children != nil { + delete(p.children, child) + } + p.mu.Unlock() +} + +// A canceler is a context type that can be canceled directly. The +// implementations are *cancelCtx and *timerCtx. +type canceler interface { + cancel(removeFromParent bool, err error) + Done() <-chan struct{} +} + +// A cancelCtx can be canceled. When canceled, it also cancels any children +// that implement canceler. +type cancelCtx struct { + Context + + done chan struct{} // closed by the first cancel call. + + mu sync.Mutex + children map[canceler]bool // set to nil by the first cancel call + err error // set to non-nil by the first cancel call +} + +func (c *cancelCtx) Done() <-chan struct{} { + return c.done +} + +func (c *cancelCtx) Err() error { + c.mu.Lock() + defer c.mu.Unlock() + return c.err +} + +func (c *cancelCtx) String() string { + return fmt.Sprintf("%v.WithCancel", c.Context) +} + +// cancel closes c.done, cancels each of c's children, and, if +// removeFromParent is true, removes c from its parent's children. +func (c *cancelCtx) cancel(removeFromParent bool, err error) { + if err == nil { + panic("context: internal error: missing cancel error") + } + c.mu.Lock() + if c.err != nil { + c.mu.Unlock() + return // already canceled + } + c.err = err + close(c.done) + for child := range c.children { + // NOTE: acquiring the child's lock while holding parent's lock. + child.cancel(false, err) + } + c.children = nil + c.mu.Unlock() + + if removeFromParent { + removeChild(c.Context, c) + } +} + +// WithDeadline returns a copy of the parent context with the deadline adjusted +// to be no later than d. If the parent's deadline is already earlier than d, +// WithDeadline(parent, d) is semantically equivalent to parent. The returned +// context's Done channel is closed when the deadline expires, when the returned +// cancel function is called, or when the parent context's Done channel is +// closed, whichever happens first. +// +// Canceling this context releases resources associated with it, so code should +// call cancel as soon as the operations running in this Context complete. +func WithDeadline(parent Context, deadline time.Time) (Context, CancelFunc) { + if cur, ok := parent.Deadline(); ok && cur.Before(deadline) { + // The current deadline is already sooner than the new one. + return WithCancel(parent) + } + c := &timerCtx{ + cancelCtx: newCancelCtx(parent), + deadline: deadline, + } + propagateCancel(parent, c) + d := deadline.Sub(time.Now()) + if d <= 0 { + c.cancel(true, DeadlineExceeded) // deadline has already passed + return c, func() { c.cancel(true, Canceled) } + } + c.mu.Lock() + defer c.mu.Unlock() + if c.err == nil { + c.timer = time.AfterFunc(d, func() { + c.cancel(true, DeadlineExceeded) + }) + } + return c, func() { c.cancel(true, Canceled) } +} + +// A timerCtx carries a timer and a deadline. It embeds a cancelCtx to +// implement Done and Err. It implements cancel by stopping its timer then +// delegating to cancelCtx.cancel. +type timerCtx struct { + cancelCtx + timer *time.Timer // Under cancelCtx.mu. + + deadline time.Time +} + +func (c *timerCtx) Deadline() (deadline time.Time, ok bool) { + return c.deadline, true +} + +func (c *timerCtx) String() string { + return fmt.Sprintf("%v.WithDeadline(%s [%s])", c.cancelCtx.Context, c.deadline, c.deadline.Sub(time.Now())) +} + +func (c *timerCtx) cancel(removeFromParent bool, err error) { + c.cancelCtx.cancel(false, err) + if removeFromParent { + // Remove this timerCtx from its parent cancelCtx's children. + removeChild(c.cancelCtx.Context, c) + } + c.mu.Lock() + if c.timer != nil { + c.timer.Stop() + c.timer = nil + } + c.mu.Unlock() +} + +// WithTimeout returns WithDeadline(parent, time.Now().Add(timeout)). +// +// Canceling this context releases resources associated with it, so code should +// call cancel as soon as the operations running in this Context complete: +// +// func slowOperationWithTimeout(ctx context.Context) (Result, error) { +// ctx, cancel := context.WithTimeout(ctx, 100*time.Millisecond) +// defer cancel() // releases resources if slowOperation completes before timeout elapses +// return slowOperation(ctx) +// } +func WithTimeout(parent Context, timeout time.Duration) (Context, CancelFunc) { + return WithDeadline(parent, time.Now().Add(timeout)) +} + +// WithValue returns a copy of parent in which the value associated with key is +// val. +// +// Use context Values only for request-scoped data that transits processes and +// APIs, not for passing optional parameters to functions. +func WithValue(parent Context, key interface{}, val interface{}) Context { + return &valueCtx{parent, key, val} +} + +// A valueCtx carries a key-value pair. It implements Value for that key and +// delegates all other calls to the embedded Context. +type valueCtx struct { + Context + key, val interface{} +} + +func (c *valueCtx) String() string { + return fmt.Sprintf("%v.WithValue(%#v, %#v)", c.Context, c.key, c.val) +} + +func (c *valueCtx) Value(key interface{}) interface{} { + if c.key == key { + return c.val + } + return c.Context.Value(key) +} diff --git a/vendor/golang.org/x/net/context/context_test.go b/vendor/golang.org/x/net/context/context_test.go new file mode 100644 index 0000000..05345fc --- /dev/null +++ b/vendor/golang.org/x/net/context/context_test.go @@ -0,0 +1,575 @@ +// Copyright 2014 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package context + +import ( + "fmt" + "math/rand" + "runtime" + "strings" + "sync" + "testing" + "time" +) + +// otherContext is a Context that's not one of the types defined in context.go. +// This lets us test code paths that differ based on the underlying type of the +// Context. +type otherContext struct { + Context +} + +func TestBackground(t *testing.T) { + c := Background() + if c == nil { + t.Fatalf("Background returned nil") + } + select { + case x := <-c.Done(): + t.Errorf("<-c.Done() == %v want nothing (it should block)", x) + default: + } + if got, want := fmt.Sprint(c), "context.Background"; got != want { + t.Errorf("Background().String() = %q want %q", got, want) + } +} + +func TestTODO(t *testing.T) { + c := TODO() + if c == nil { + t.Fatalf("TODO returned nil") + } + select { + case x := <-c.Done(): + t.Errorf("<-c.Done() == %v want nothing (it should block)", x) + default: + } + if got, want := fmt.Sprint(c), "context.TODO"; got != want { + t.Errorf("TODO().String() = %q want %q", got, want) + } +} + +func TestWithCancel(t *testing.T) { + c1, cancel := WithCancel(Background()) + + if got, want := fmt.Sprint(c1), "context.Background.WithCancel"; got != want { + t.Errorf("c1.String() = %q want %q", got, want) + } + + o := otherContext{c1} + c2, _ := WithCancel(o) + contexts := []Context{c1, o, c2} + + for i, c := range contexts { + if d := c.Done(); d == nil { + t.Errorf("c[%d].Done() == %v want non-nil", i, d) + } + if e := c.Err(); e != nil { + t.Errorf("c[%d].Err() == %v want nil", i, e) + } + + select { + case x := <-c.Done(): + t.Errorf("<-c.Done() == %v want nothing (it should block)", x) + default: + } + } + + cancel() + time.Sleep(100 * time.Millisecond) // let cancelation propagate + + for i, c := range contexts { + select { + case <-c.Done(): + default: + t.Errorf("<-c[%d].Done() blocked, but shouldn't have", i) + } + if e := c.Err(); e != Canceled { + t.Errorf("c[%d].Err() == %v want %v", i, e, Canceled) + } + } +} + +func TestParentFinishesChild(t *testing.T) { + // Context tree: + // parent -> cancelChild + // parent -> valueChild -> timerChild + parent, cancel := WithCancel(Background()) + cancelChild, stop := WithCancel(parent) + defer stop() + valueChild := WithValue(parent, "key", "value") + timerChild, stop := WithTimeout(valueChild, 10000*time.Hour) + defer stop() + + select { + case x := <-parent.Done(): + t.Errorf("<-parent.Done() == %v want nothing (it should block)", x) + case x := <-cancelChild.Done(): + t.Errorf("<-cancelChild.Done() == %v want nothing (it should block)", x) + case x := <-timerChild.Done(): + t.Errorf("<-timerChild.Done() == %v want nothing (it should block)", x) + case x := <-valueChild.Done(): + t.Errorf("<-valueChild.Done() == %v want nothing (it should block)", x) + default: + } + + // The parent's children should contain the two cancelable children. + pc := parent.(*cancelCtx) + cc := cancelChild.(*cancelCtx) + tc := timerChild.(*timerCtx) + pc.mu.Lock() + if len(pc.children) != 2 || !pc.children[cc] || !pc.children[tc] { + t.Errorf("bad linkage: pc.children = %v, want %v and %v", + pc.children, cc, tc) + } + pc.mu.Unlock() + + if p, ok := parentCancelCtx(cc.Context); !ok || p != pc { + t.Errorf("bad linkage: parentCancelCtx(cancelChild.Context) = %v, %v want %v, true", p, ok, pc) + } + if p, ok := parentCancelCtx(tc.Context); !ok || p != pc { + t.Errorf("bad linkage: parentCancelCtx(timerChild.Context) = %v, %v want %v, true", p, ok, pc) + } + + cancel() + + pc.mu.Lock() + if len(pc.children) != 0 { + t.Errorf("pc.cancel didn't clear pc.children = %v", pc.children) + } + pc.mu.Unlock() + + // parent and children should all be finished. + check := func(ctx Context, name string) { + select { + case <-ctx.Done(): + default: + t.Errorf("<-%s.Done() blocked, but shouldn't have", name) + } + if e := ctx.Err(); e != Canceled { + t.Errorf("%s.Err() == %v want %v", name, e, Canceled) + } + } + check(parent, "parent") + check(cancelChild, "cancelChild") + check(valueChild, "valueChild") + check(timerChild, "timerChild") + + // WithCancel should return a canceled context on a canceled parent. + precanceledChild := WithValue(parent, "key", "value") + select { + case <-precanceledChild.Done(): + default: + t.Errorf("<-precanceledChild.Done() blocked, but shouldn't have") + } + if e := precanceledChild.Err(); e != Canceled { + t.Errorf("precanceledChild.Err() == %v want %v", e, Canceled) + } +} + +func TestChildFinishesFirst(t *testing.T) { + cancelable, stop := WithCancel(Background()) + defer stop() + for _, parent := range []Context{Background(), cancelable} { + child, cancel := WithCancel(parent) + + select { + case x := <-parent.Done(): + t.Errorf("<-parent.Done() == %v want nothing (it should block)", x) + case x := <-child.Done(): + t.Errorf("<-child.Done() == %v want nothing (it should block)", x) + default: + } + + cc := child.(*cancelCtx) + pc, pcok := parent.(*cancelCtx) // pcok == false when parent == Background() + if p, ok := parentCancelCtx(cc.Context); ok != pcok || (ok && pc != p) { + t.Errorf("bad linkage: parentCancelCtx(cc.Context) = %v, %v want %v, %v", p, ok, pc, pcok) + } + + if pcok { + pc.mu.Lock() + if len(pc.children) != 1 || !pc.children[cc] { + t.Errorf("bad linkage: pc.children = %v, cc = %v", pc.children, cc) + } + pc.mu.Unlock() + } + + cancel() + + if pcok { + pc.mu.Lock() + if len(pc.children) != 0 { + t.Errorf("child's cancel didn't remove self from pc.children = %v", pc.children) + } + pc.mu.Unlock() + } + + // child should be finished. + select { + case <-child.Done(): + default: + t.Errorf("<-child.Done() blocked, but shouldn't have") + } + if e := child.Err(); e != Canceled { + t.Errorf("child.Err() == %v want %v", e, Canceled) + } + + // parent should not be finished. + select { + case x := <-parent.Done(): + t.Errorf("<-parent.Done() == %v want nothing (it should block)", x) + default: + } + if e := parent.Err(); e != nil { + t.Errorf("parent.Err() == %v want nil", e) + } + } +} + +func testDeadline(c Context, wait time.Duration, t *testing.T) { + select { + case <-time.After(wait): + t.Fatalf("context should have timed out") + case <-c.Done(): + } + if e := c.Err(); e != DeadlineExceeded { + t.Errorf("c.Err() == %v want %v", e, DeadlineExceeded) + } +} + +func TestDeadline(t *testing.T) { + c, _ := WithDeadline(Background(), time.Now().Add(100*time.Millisecond)) + if got, prefix := fmt.Sprint(c), "context.Background.WithDeadline("; !strings.HasPrefix(got, prefix) { + t.Errorf("c.String() = %q want prefix %q", got, prefix) + } + testDeadline(c, 200*time.Millisecond, t) + + c, _ = WithDeadline(Background(), time.Now().Add(100*time.Millisecond)) + o := otherContext{c} + testDeadline(o, 200*time.Millisecond, t) + + c, _ = WithDeadline(Background(), time.Now().Add(100*time.Millisecond)) + o = otherContext{c} + c, _ = WithDeadline(o, time.Now().Add(300*time.Millisecond)) + testDeadline(c, 200*time.Millisecond, t) +} + +func TestTimeout(t *testing.T) { + c, _ := WithTimeout(Background(), 100*time.Millisecond) + if got, prefix := fmt.Sprint(c), "context.Background.WithDeadline("; !strings.HasPrefix(got, prefix) { + t.Errorf("c.String() = %q want prefix %q", got, prefix) + } + testDeadline(c, 200*time.Millisecond, t) + + c, _ = WithTimeout(Background(), 100*time.Millisecond) + o := otherContext{c} + testDeadline(o, 200*time.Millisecond, t) + + c, _ = WithTimeout(Background(), 100*time.Millisecond) + o = otherContext{c} + c, _ = WithTimeout(o, 300*time.Millisecond) + testDeadline(c, 200*time.Millisecond, t) +} + +func TestCanceledTimeout(t *testing.T) { + c, _ := WithTimeout(Background(), 200*time.Millisecond) + o := otherContext{c} + c, cancel := WithTimeout(o, 400*time.Millisecond) + cancel() + time.Sleep(100 * time.Millisecond) // let cancelation propagate + select { + case <-c.Done(): + default: + t.Errorf("<-c.Done() blocked, but shouldn't have") + } + if e := c.Err(); e != Canceled { + t.Errorf("c.Err() == %v want %v", e, Canceled) + } +} + +type key1 int +type key2 int + +var k1 = key1(1) +var k2 = key2(1) // same int as k1, different type +var k3 = key2(3) // same type as k2, different int + +func TestValues(t *testing.T) { + check := func(c Context, nm, v1, v2, v3 string) { + if v, ok := c.Value(k1).(string); ok == (len(v1) == 0) || v != v1 { + t.Errorf(`%s.Value(k1).(string) = %q, %t want %q, %t`, nm, v, ok, v1, len(v1) != 0) + } + if v, ok := c.Value(k2).(string); ok == (len(v2) == 0) || v != v2 { + t.Errorf(`%s.Value(k2).(string) = %q, %t want %q, %t`, nm, v, ok, v2, len(v2) != 0) + } + if v, ok := c.Value(k3).(string); ok == (len(v3) == 0) || v != v3 { + t.Errorf(`%s.Value(k3).(string) = %q, %t want %q, %t`, nm, v, ok, v3, len(v3) != 0) + } + } + + c0 := Background() + check(c0, "c0", "", "", "") + + c1 := WithValue(Background(), k1, "c1k1") + check(c1, "c1", "c1k1", "", "") + + if got, want := fmt.Sprint(c1), `context.Background.WithValue(1, "c1k1")`; got != want { + t.Errorf("c.String() = %q want %q", got, want) + } + + c2 := WithValue(c1, k2, "c2k2") + check(c2, "c2", "c1k1", "c2k2", "") + + c3 := WithValue(c2, k3, "c3k3") + check(c3, "c2", "c1k1", "c2k2", "c3k3") + + c4 := WithValue(c3, k1, nil) + check(c4, "c4", "", "c2k2", "c3k3") + + o0 := otherContext{Background()} + check(o0, "o0", "", "", "") + + o1 := otherContext{WithValue(Background(), k1, "c1k1")} + check(o1, "o1", "c1k1", "", "") + + o2 := WithValue(o1, k2, "o2k2") + check(o2, "o2", "c1k1", "o2k2", "") + + o3 := otherContext{c4} + check(o3, "o3", "", "c2k2", "c3k3") + + o4 := WithValue(o3, k3, nil) + check(o4, "o4", "", "c2k2", "") +} + +func TestAllocs(t *testing.T) { + bg := Background() + for _, test := range []struct { + desc string + f func() + limit float64 + gccgoLimit float64 + }{ + { + desc: "Background()", + f: func() { Background() }, + limit: 0, + gccgoLimit: 0, + }, + { + desc: fmt.Sprintf("WithValue(bg, %v, nil)", k1), + f: func() { + c := WithValue(bg, k1, nil) + c.Value(k1) + }, + limit: 3, + gccgoLimit: 3, + }, + { + desc: "WithTimeout(bg, 15*time.Millisecond)", + f: func() { + c, _ := WithTimeout(bg, 15*time.Millisecond) + <-c.Done() + }, + limit: 8, + gccgoLimit: 15, + }, + { + desc: "WithCancel(bg)", + f: func() { + c, cancel := WithCancel(bg) + cancel() + <-c.Done() + }, + limit: 5, + gccgoLimit: 8, + }, + { + desc: "WithTimeout(bg, 100*time.Millisecond)", + f: func() { + c, cancel := WithTimeout(bg, 100*time.Millisecond) + cancel() + <-c.Done() + }, + limit: 8, + gccgoLimit: 25, + }, + } { + limit := test.limit + if runtime.Compiler == "gccgo" { + // gccgo does not yet do escape analysis. + // TOOD(iant): Remove this when gccgo does do escape analysis. + limit = test.gccgoLimit + } + if n := testing.AllocsPerRun(100, test.f); n > limit { + t.Errorf("%s allocs = %f want %d", test.desc, n, int(limit)) + } + } +} + +func TestSimultaneousCancels(t *testing.T) { + root, cancel := WithCancel(Background()) + m := map[Context]CancelFunc{root: cancel} + q := []Context{root} + // Create a tree of contexts. + for len(q) != 0 && len(m) < 100 { + parent := q[0] + q = q[1:] + for i := 0; i < 4; i++ { + ctx, cancel := WithCancel(parent) + m[ctx] = cancel + q = append(q, ctx) + } + } + // Start all the cancels in a random order. + var wg sync.WaitGroup + wg.Add(len(m)) + for _, cancel := range m { + go func(cancel CancelFunc) { + cancel() + wg.Done() + }(cancel) + } + // Wait on all the contexts in a random order. + for ctx := range m { + select { + case <-ctx.Done(): + case <-time.After(1 * time.Second): + buf := make([]byte, 10<<10) + n := runtime.Stack(buf, true) + t.Fatalf("timed out waiting for <-ctx.Done(); stacks:\n%s", buf[:n]) + } + } + // Wait for all the cancel functions to return. + done := make(chan struct{}) + go func() { + wg.Wait() + close(done) + }() + select { + case <-done: + case <-time.After(1 * time.Second): + buf := make([]byte, 10<<10) + n := runtime.Stack(buf, true) + t.Fatalf("timed out waiting for cancel functions; stacks:\n%s", buf[:n]) + } +} + +func TestInterlockedCancels(t *testing.T) { + parent, cancelParent := WithCancel(Background()) + child, cancelChild := WithCancel(parent) + go func() { + parent.Done() + cancelChild() + }() + cancelParent() + select { + case <-child.Done(): + case <-time.After(1 * time.Second): + buf := make([]byte, 10<<10) + n := runtime.Stack(buf, true) + t.Fatalf("timed out waiting for child.Done(); stacks:\n%s", buf[:n]) + } +} + +func TestLayersCancel(t *testing.T) { + testLayers(t, time.Now().UnixNano(), false) +} + +func TestLayersTimeout(t *testing.T) { + testLayers(t, time.Now().UnixNano(), true) +} + +func testLayers(t *testing.T, seed int64, testTimeout bool) { + rand.Seed(seed) + errorf := func(format string, a ...interface{}) { + t.Errorf(fmt.Sprintf("seed=%d: %s", seed, format), a...) + } + const ( + timeout = 200 * time.Millisecond + minLayers = 30 + ) + type value int + var ( + vals []*value + cancels []CancelFunc + numTimers int + ctx = Background() + ) + for i := 0; i < minLayers || numTimers == 0 || len(cancels) == 0 || len(vals) == 0; i++ { + switch rand.Intn(3) { + case 0: + v := new(value) + ctx = WithValue(ctx, v, v) + vals = append(vals, v) + case 1: + var cancel CancelFunc + ctx, cancel = WithCancel(ctx) + cancels = append(cancels, cancel) + case 2: + var cancel CancelFunc + ctx, cancel = WithTimeout(ctx, timeout) + cancels = append(cancels, cancel) + numTimers++ + } + } + checkValues := func(when string) { + for _, key := range vals { + if val := ctx.Value(key).(*value); key != val { + errorf("%s: ctx.Value(%p) = %p want %p", when, key, val, key) + } + } + } + select { + case <-ctx.Done(): + errorf("ctx should not be canceled yet") + default: + } + if s, prefix := fmt.Sprint(ctx), "context.Background."; !strings.HasPrefix(s, prefix) { + t.Errorf("ctx.String() = %q want prefix %q", s, prefix) + } + t.Log(ctx) + checkValues("before cancel") + if testTimeout { + select { + case <-ctx.Done(): + case <-time.After(timeout + 100*time.Millisecond): + errorf("ctx should have timed out") + } + checkValues("after timeout") + } else { + cancel := cancels[rand.Intn(len(cancels))] + cancel() + select { + case <-ctx.Done(): + default: + errorf("ctx should be canceled") + } + checkValues("after cancel") + } +} + +func TestCancelRemoves(t *testing.T) { + checkChildren := func(when string, ctx Context, want int) { + if got := len(ctx.(*cancelCtx).children); got != want { + t.Errorf("%s: context has %d children, want %d", when, got, want) + } + } + + ctx, _ := WithCancel(Background()) + checkChildren("after creation", ctx, 0) + _, cancel := WithCancel(ctx) + checkChildren("with WithCancel child ", ctx, 1) + cancel() + checkChildren("after cancelling WithCancel child", ctx, 0) + + ctx, _ = WithCancel(Background()) + checkChildren("after creation", ctx, 0) + _, cancel = WithTimeout(ctx, 60*time.Minute) + checkChildren("with WithTimeout child ", ctx, 1) + cancel() + checkChildren("after cancelling WithTimeout child", ctx, 0) +} diff --git a/vendor/golang.org/x/net/context/ctxhttp/cancelreq.go b/vendor/golang.org/x/net/context/ctxhttp/cancelreq.go new file mode 100644 index 0000000..48610e3 --- /dev/null +++ b/vendor/golang.org/x/net/context/ctxhttp/cancelreq.go @@ -0,0 +1,18 @@ +// Copyright 2015 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build go1.5 + +package ctxhttp + +import "net/http" + +func canceler(client *http.Client, req *http.Request) func() { + ch := make(chan struct{}) + req.Cancel = ch + + return func() { + close(ch) + } +} diff --git a/vendor/golang.org/x/net/context/ctxhttp/cancelreq_go14.go b/vendor/golang.org/x/net/context/ctxhttp/cancelreq_go14.go new file mode 100644 index 0000000..56bcbad --- /dev/null +++ b/vendor/golang.org/x/net/context/ctxhttp/cancelreq_go14.go @@ -0,0 +1,23 @@ +// Copyright 2015 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !go1.5 + +package ctxhttp + +import "net/http" + +type requestCanceler interface { + CancelRequest(*http.Request) +} + +func canceler(client *http.Client, req *http.Request) func() { + rc, ok := client.Transport.(requestCanceler) + if !ok { + return func() {} + } + return func() { + rc.CancelRequest(req) + } +} diff --git a/vendor/golang.org/x/net/context/ctxhttp/ctxhttp.go b/vendor/golang.org/x/net/context/ctxhttp/ctxhttp.go new file mode 100644 index 0000000..504dd63 --- /dev/null +++ b/vendor/golang.org/x/net/context/ctxhttp/ctxhttp.go @@ -0,0 +1,79 @@ +// Copyright 2015 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package ctxhttp provides helper functions for performing context-aware HTTP requests. +package ctxhttp // import "golang.org/x/net/context/ctxhttp" + +import ( + "io" + "net/http" + "net/url" + "strings" + + "golang.org/x/net/context" +) + +// Do sends an HTTP request with the provided http.Client and returns an HTTP response. +// If the client is nil, http.DefaultClient is used. +// If the context is canceled or times out, ctx.Err() will be returned. +func Do(ctx context.Context, client *http.Client, req *http.Request) (*http.Response, error) { + if client == nil { + client = http.DefaultClient + } + + // Request cancelation changed in Go 1.5, see cancelreq.go and cancelreq_go14.go. + cancel := canceler(client, req) + + type responseAndError struct { + resp *http.Response + err error + } + result := make(chan responseAndError, 1) + + go func() { + resp, err := client.Do(req) + result <- responseAndError{resp, err} + }() + + select { + case <-ctx.Done(): + cancel() + return nil, ctx.Err() + case r := <-result: + return r.resp, r.err + } +} + +// Get issues a GET request via the Do function. +func Get(ctx context.Context, client *http.Client, url string) (*http.Response, error) { + req, err := http.NewRequest("GET", url, nil) + if err != nil { + return nil, err + } + return Do(ctx, client, req) +} + +// Head issues a HEAD request via the Do function. +func Head(ctx context.Context, client *http.Client, url string) (*http.Response, error) { + req, err := http.NewRequest("HEAD", url, nil) + if err != nil { + return nil, err + } + return Do(ctx, client, req) +} + +// Post issues a POST request via the Do function. +func Post(ctx context.Context, client *http.Client, url string, bodyType string, body io.Reader) (*http.Response, error) { + req, err := http.NewRequest("POST", url, body) + if err != nil { + return nil, err + } + req.Header.Set("Content-Type", bodyType) + return Do(ctx, client, req) +} + +// PostForm issues a POST request via the Do function. +func PostForm(ctx context.Context, client *http.Client, url string, data url.Values) (*http.Response, error) { + return Post(ctx, client, url, "application/x-www-form-urlencoded", strings.NewReader(data.Encode())) +} diff --git a/vendor/golang.org/x/net/context/ctxhttp/ctxhttp_test.go b/vendor/golang.org/x/net/context/ctxhttp/ctxhttp_test.go new file mode 100644 index 0000000..47b53d7 --- /dev/null +++ b/vendor/golang.org/x/net/context/ctxhttp/ctxhttp_test.go @@ -0,0 +1,72 @@ +// Copyright 2015 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ctxhttp + +import ( + "io/ioutil" + "net/http" + "net/http/httptest" + "testing" + "time" + + "golang.org/x/net/context" +) + +const ( + requestDuration = 100 * time.Millisecond + requestBody = "ok" +) + +func TestNoTimeout(t *testing.T) { + ctx := context.Background() + resp, err := doRequest(ctx) + + if resp == nil || err != nil { + t.Fatalf("error received from client: %v %v", err, resp) + } +} +func TestCancel(t *testing.T) { + ctx, cancel := context.WithCancel(context.Background()) + go func() { + time.Sleep(requestDuration / 2) + cancel() + }() + + resp, err := doRequest(ctx) + + if resp != nil || err == nil { + t.Fatalf("expected error, didn't get one. resp: %v", resp) + } + if err != ctx.Err() { + t.Fatalf("expected error from context but got: %v", err) + } +} + +func TestCancelAfterRequest(t *testing.T) { + ctx, cancel := context.WithCancel(context.Background()) + + resp, err := doRequest(ctx) + + // Cancel before reading the body. + // Request.Body should still be readable after the context is canceled. + cancel() + + b, err := ioutil.ReadAll(resp.Body) + if err != nil || string(b) != requestBody { + t.Fatalf("could not read body: %q %v", b, err) + } +} + +func doRequest(ctx context.Context) (*http.Response, error) { + var okHandler = http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + time.Sleep(requestDuration) + w.Write([]byte(requestBody)) + }) + + serv := httptest.NewServer(okHandler) + defer serv.Close() + + return Get(ctx, nil, serv.URL) +} diff --git a/vendor/golang.org/x/net/context/withtimeout_test.go b/vendor/golang.org/x/net/context/withtimeout_test.go new file mode 100644 index 0000000..a6754dc --- /dev/null +++ b/vendor/golang.org/x/net/context/withtimeout_test.go @@ -0,0 +1,26 @@ +// Copyright 2014 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package context_test + +import ( + "fmt" + "time" + + "golang.org/x/net/context" +) + +func ExampleWithTimeout() { + // Pass a context with a timeout to tell a blocking function that it + // should abandon its work after the timeout elapses. + ctx, _ := context.WithTimeout(context.Background(), 100*time.Millisecond) + select { + case <-time.After(200 * time.Millisecond): + fmt.Println("overslept") + case <-ctx.Done(): + fmt.Println(ctx.Err()) // prints "context deadline exceeded" + } + // Output: + // context deadline exceeded +} diff --git a/vendor/golang.org/x/oauth2/AUTHORS b/vendor/golang.org/x/oauth2/AUTHORS new file mode 100644 index 0000000..15167cd --- /dev/null +++ b/vendor/golang.org/x/oauth2/AUTHORS @@ -0,0 +1,3 @@ +# This source code refers to The Go Authors for copyright purposes. +# The master list of authors is in the main Go distribution, +# visible at http://tip.golang.org/AUTHORS. diff --git a/vendor/golang.org/x/oauth2/CONTRIBUTING.md b/vendor/golang.org/x/oauth2/CONTRIBUTING.md new file mode 100644 index 0000000..46aa2b1 --- /dev/null +++ b/vendor/golang.org/x/oauth2/CONTRIBUTING.md @@ -0,0 +1,31 @@ +# Contributing to Go + +Go is an open source project. + +It is the work of hundreds of contributors. We appreciate your help! + + +## Filing issues + +When [filing an issue](https://github.com/golang/oauth2/issues), make sure to answer these five questions: + +1. What version of Go are you using (`go version`)? +2. What operating system and processor architecture are you using? +3. What did you do? +4. What did you expect to see? +5. What did you see instead? + +General questions should go to the [golang-nuts mailing list](https://groups.google.com/group/golang-nuts) instead of the issue tracker. +The gophers there will answer or ask you to file an issue if you've tripped over a bug. + +## Contributing code + +Please read the [Contribution Guidelines](https://golang.org/doc/contribute.html) +before sending patches. + +**We do not accept GitHub pull requests** +(we use [Gerrit](https://code.google.com/p/gerrit/) instead for code review). + +Unless otherwise noted, the Go source files are distributed under +the BSD-style license found in the LICENSE file. + diff --git a/vendor/golang.org/x/oauth2/CONTRIBUTORS b/vendor/golang.org/x/oauth2/CONTRIBUTORS new file mode 100644 index 0000000..1c4577e --- /dev/null +++ b/vendor/golang.org/x/oauth2/CONTRIBUTORS @@ -0,0 +1,3 @@ +# This source code was written by the Go contributors. +# The master list of contributors is in the main Go distribution, +# visible at http://tip.golang.org/CONTRIBUTORS. diff --git a/vendor/golang.org/x/oauth2/LICENSE b/vendor/golang.org/x/oauth2/LICENSE new file mode 100644 index 0000000..d02f24f --- /dev/null +++ b/vendor/golang.org/x/oauth2/LICENSE @@ -0,0 +1,27 @@ +Copyright (c) 2009 The oauth2 Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/golang.org/x/oauth2/README.md b/vendor/golang.org/x/oauth2/README.md new file mode 100644 index 0000000..0d51417 --- /dev/null +++ b/vendor/golang.org/x/oauth2/README.md @@ -0,0 +1,64 @@ +# OAuth2 for Go + +[![Build Status](https://travis-ci.org/golang/oauth2.svg?branch=master)](https://travis-ci.org/golang/oauth2) + +oauth2 package contains a client implementation for OAuth 2.0 spec. + +## Installation + +~~~~ +go get golang.org/x/oauth2 +~~~~ + +See godoc for further documentation and examples. + +* [godoc.org/golang.org/x/oauth2](http://godoc.org/golang.org/x/oauth2) +* [godoc.org/golang.org/x/oauth2/google](http://godoc.org/golang.org/x/oauth2/google) + + +## App Engine + +In change 96e89be (March 2015) we removed the `oauth2.Context2` type in favor +of the [`context.Context`](https://golang.org/x/net/context#Context) type from +the `golang.org/x/net/context` package + +This means its no longer possible to use the "Classic App Engine" +`appengine.Context` type with the `oauth2` package. (You're using +Classic App Engine if you import the package `"appengine"`.) + +To work around this, you may use the new `"google.golang.org/appengine"` +package. This package has almost the same API as the `"appengine"` package, +but it can be fetched with `go get` and used on "Managed VMs" and well as +Classic App Engine. + +See the [new `appengine` package's readme](https://github.com/golang/appengine#updating-a-go-app-engine-app) +for information on updating your app. + +If you don't want to update your entire app to use the new App Engine packages, +you may use both sets of packages in parallel, using only the new packages +with the `oauth2` package. + + import ( + "golang.org/x/net/context" + "golang.org/x/oauth2" + "golang.org/x/oauth2/google" + newappengine "google.golang.org/appengine" + newurlfetch "google.golang.org/appengine/urlfetch" + + "appengine" + ) + + func handler(w http.ResponseWriter, r *http.Request) { + var c appengine.Context = appengine.NewContext(r) + c.Infof("Logging a message with the old package") + + var ctx context.Context = newappengine.NewContext(r) + client := &http.Client{ + Transport: &oauth2.Transport{ + Source: google.AppEngineTokenSource(ctx, "scope"), + Base: &newurlfetch.Transport{Context: ctx}, + }, + } + client.Get("...") + } + diff --git a/vendor/golang.org/x/oauth2/bitbucket/bitbucket.go b/vendor/golang.org/x/oauth2/bitbucket/bitbucket.go new file mode 100644 index 0000000..44af1f1 --- /dev/null +++ b/vendor/golang.org/x/oauth2/bitbucket/bitbucket.go @@ -0,0 +1,16 @@ +// Copyright 2015 The oauth2 Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package bitbucket provides constants for using OAuth2 to access Bitbucket. +package bitbucket + +import ( + "golang.org/x/oauth2" +) + +// Endpoint is Bitbucket's OAuth 2.0 endpoint. +var Endpoint = oauth2.Endpoint{ + AuthURL: "https://bitbucket.org/site/oauth2/authorize", + TokenURL: "https://bitbucket.org/site/oauth2/access_token", +} diff --git a/vendor/golang.org/x/oauth2/client_appengine.go b/vendor/golang.org/x/oauth2/client_appengine.go new file mode 100644 index 0000000..52f8d1d --- /dev/null +++ b/vendor/golang.org/x/oauth2/client_appengine.go @@ -0,0 +1,25 @@ +// Copyright 2014 The oauth2 Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build appengine + +// App Engine hooks. + +package oauth2 + +import ( + "net/http" + + "golang.org/x/net/context" + "golang.org/x/oauth2/internal" + "google.golang.org/appengine/urlfetch" +) + +func init() { + internal.RegisterContextClientFunc(contextClientAppEngine) +} + +func contextClientAppEngine(ctx context.Context) (*http.Client, error) { + return urlfetch.Client(ctx), nil +} diff --git a/vendor/golang.org/x/oauth2/clientcredentials/clientcredentials.go b/vendor/golang.org/x/oauth2/clientcredentials/clientcredentials.go new file mode 100644 index 0000000..baebced --- /dev/null +++ b/vendor/golang.org/x/oauth2/clientcredentials/clientcredentials.go @@ -0,0 +1,112 @@ +// Copyright 2014 The oauth2 Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package clientcredentials implements the OAuth2.0 "client credentials" token flow, +// also known as the "two-legged OAuth 2.0". +// +// This should be used when the client is acting on its own behalf or when the client +// is the resource owner. It may also be used when requesting access to protected +// resources based on an authorization previously arranged with the authorization +// server. +// +// See http://tools.ietf.org/html/draft-ietf-oauth-v2-31#section-4.4 +package clientcredentials // import "golang.org/x/oauth2/clientcredentials" + +import ( + "net/http" + "net/url" + "strings" + + "golang.org/x/net/context" + "golang.org/x/oauth2" + "golang.org/x/oauth2/internal" +) + +// tokenFromInternal maps an *internal.Token struct into +// an *oauth2.Token struct. +func tokenFromInternal(t *internal.Token) *oauth2.Token { + if t == nil { + return nil + } + tk := &oauth2.Token{ + AccessToken: t.AccessToken, + TokenType: t.TokenType, + RefreshToken: t.RefreshToken, + Expiry: t.Expiry, + } + return tk.WithExtra(t.Raw) +} + +// retrieveToken takes a *Config and uses that to retrieve an *internal.Token. +// This token is then mapped from *internal.Token into an *oauth2.Token which is +// returned along with an error. +func retrieveToken(ctx context.Context, c *Config, v url.Values) (*oauth2.Token, error) { + tk, err := internal.RetrieveToken(ctx, c.ClientID, c.ClientSecret, c.TokenURL, v) + if err != nil { + return nil, err + } + return tokenFromInternal(tk), nil +} + +// Client Credentials Config describes a 2-legged OAuth2 flow, with both the +// client application information and the server's endpoint URLs. +type Config struct { + // ClientID is the application's ID. + ClientID string + + // ClientSecret is the application's secret. + ClientSecret string + + // TokenURL is the resource server's token endpoint + // URL. This is a constant specific to each server. + TokenURL string + + // Scope specifies optional requested permissions. + Scopes []string +} + +// Token uses client credentials to retreive a token. +// The HTTP client to use is derived from the context. +// If nil, http.DefaultClient is used. +func (c *Config) Token(ctx context.Context) (*oauth2.Token, error) { + return retrieveToken(ctx, c, url.Values{ + "grant_type": {"client_credentials"}, + "scope": internal.CondVal(strings.Join(c.Scopes, " ")), + }) +} + +// Client returns an HTTP client using the provided token. +// The token will auto-refresh as necessary. The underlying +// HTTP transport will be obtained using the provided context. +// The returned client and its Transport should not be modified. +func (c *Config) Client(ctx context.Context) *http.Client { + return oauth2.NewClient(ctx, c.TokenSource(ctx)) +} + +// TokenSource returns a TokenSource that returns t until t expires, +// automatically refreshing it as necessary using the provided context and the +// client ID and client secret. +// +// Most users will use Config.Client instead. +func (c *Config) TokenSource(ctx context.Context) oauth2.TokenSource { + source := &tokenSource{ + ctx: ctx, + conf: c, + } + return oauth2.ReuseTokenSource(nil, source) +} + +type tokenSource struct { + ctx context.Context + conf *Config +} + +// Token refreshes the token by using a new client credentials request. +// tokens received this way do not include a refresh token +func (c *tokenSource) Token() (*oauth2.Token, error) { + return retrieveToken(c.ctx, c.conf, url.Values{ + "grant_type": {"client_credentials"}, + "scope": internal.CondVal(strings.Join(c.conf.Scopes, " ")), + }) +} diff --git a/vendor/golang.org/x/oauth2/clientcredentials/clientcredentials_test.go b/vendor/golang.org/x/oauth2/clientcredentials/clientcredentials_test.go new file mode 100644 index 0000000..ab319e0 --- /dev/null +++ b/vendor/golang.org/x/oauth2/clientcredentials/clientcredentials_test.go @@ -0,0 +1,96 @@ +// Copyright 2014 The oauth2 Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package clientcredentials + +import ( + "io/ioutil" + "net/http" + "net/http/httptest" + "testing" + + "golang.org/x/oauth2" +) + +func newConf(url string) *Config { + return &Config{ + ClientID: "CLIENT_ID", + ClientSecret: "CLIENT_SECRET", + Scopes: []string{"scope1", "scope2"}, + TokenURL: url + "/token", + } +} + +type mockTransport struct { + rt func(req *http.Request) (resp *http.Response, err error) +} + +func (t *mockTransport) RoundTrip(req *http.Request) (resp *http.Response, err error) { + return t.rt(req) +} + +func TestTokenRequest(t *testing.T) { + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if r.URL.String() != "/token" { + t.Errorf("authenticate client request URL = %q; want %q", r.URL, "/token") + } + headerAuth := r.Header.Get("Authorization") + if headerAuth != "Basic Q0xJRU5UX0lEOkNMSUVOVF9TRUNSRVQ=" { + t.Errorf("Unexpected authorization header, %v is found.", headerAuth) + } + if got, want := r.Header.Get("Content-Type"), "application/x-www-form-urlencoded"; got != want { + t.Errorf("Content-Type header = %q; want %q", got, want) + } + body, err := ioutil.ReadAll(r.Body) + if err != nil { + r.Body.Close() + } + if err != nil { + t.Errorf("failed reading request body: %s.", err) + } + if string(body) != "client_id=CLIENT_ID&grant_type=client_credentials&scope=scope1+scope2" { + t.Errorf("payload = %q; want %q", string(body), "client_id=CLIENT_ID&grant_type=client_credentials&scope=scope1+scope2") + } + w.Header().Set("Content-Type", "application/x-www-form-urlencoded") + w.Write([]byte("access_token=90d64460d14870c08c81352a05dedd3465940a7c&token_type=bearer")) + })) + defer ts.Close() + conf := newConf(ts.URL) + tok, err := conf.Token(oauth2.NoContext) + if err != nil { + t.Error(err) + } + if !tok.Valid() { + t.Fatalf("token invalid. got: %#v", tok) + } + if tok.AccessToken != "90d64460d14870c08c81352a05dedd3465940a7c" { + t.Errorf("Access token = %q; want %q", tok.AccessToken, "90d64460d14870c08c81352a05dedd3465940a7c") + } + if tok.TokenType != "bearer" { + t.Errorf("token type = %q; want %q", tok.TokenType, "bearer") + } +} + +func TestTokenRefreshRequest(t *testing.T) { + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if r.URL.String() == "/somethingelse" { + return + } + if r.URL.String() != "/token" { + t.Errorf("Unexpected token refresh request URL, %v is found.", r.URL) + } + headerContentType := r.Header.Get("Content-Type") + if headerContentType != "application/x-www-form-urlencoded" { + t.Errorf("Unexpected Content-Type header, %v is found.", headerContentType) + } + body, _ := ioutil.ReadAll(r.Body) + if string(body) != "client_id=CLIENT_ID&grant_type=client_credentials&scope=scope1+scope2" { + t.Errorf("Unexpected refresh token payload, %v is found.", string(body)) + } + })) + defer ts.Close() + conf := newConf(ts.URL) + c := conf.Client(oauth2.NoContext) + c.Get(ts.URL + "/somethingelse") +} diff --git a/vendor/golang.org/x/oauth2/example_test.go b/vendor/golang.org/x/oauth2/example_test.go new file mode 100644 index 0000000..8be2788 --- /dev/null +++ b/vendor/golang.org/x/oauth2/example_test.go @@ -0,0 +1,45 @@ +// Copyright 2014 The oauth2 Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package oauth2_test + +import ( + "fmt" + "log" + + "golang.org/x/oauth2" +) + +func ExampleConfig() { + conf := &oauth2.Config{ + ClientID: "YOUR_CLIENT_ID", + ClientSecret: "YOUR_CLIENT_SECRET", + Scopes: []string{"SCOPE1", "SCOPE2"}, + Endpoint: oauth2.Endpoint{ + AuthURL: "https://provider.com/o/oauth2/auth", + TokenURL: "https://provider.com/o/oauth2/token", + }, + } + + // Redirect user to consent page to ask for permission + // for the scopes specified above. + url := conf.AuthCodeURL("state", oauth2.AccessTypeOffline) + fmt.Printf("Visit the URL for the auth dialog: %v", url) + + // Use the authorization code that is pushed to the redirect URL. + // NewTransportWithCode will do the handshake to retrieve + // an access token and initiate a Transport that is + // authorized and authenticated by the retrieved token. + var code string + if _, err := fmt.Scan(&code); err != nil { + log.Fatal(err) + } + tok, err := conf.Exchange(oauth2.NoContext, code) + if err != nil { + log.Fatal(err) + } + + client := conf.Client(oauth2.NoContext, tok) + client.Get("...") +} diff --git a/vendor/golang.org/x/oauth2/facebook/facebook.go b/vendor/golang.org/x/oauth2/facebook/facebook.go new file mode 100644 index 0000000..962e86b --- /dev/null +++ b/vendor/golang.org/x/oauth2/facebook/facebook.go @@ -0,0 +1,16 @@ +// Copyright 2015 The oauth2 Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package facebook provides constants for using OAuth2 to access Facebook. +package facebook // import "golang.org/x/oauth2/facebook" + +import ( + "golang.org/x/oauth2" +) + +// Endpoint is Facebook's OAuth 2.0 endpoint. +var Endpoint = oauth2.Endpoint{ + AuthURL: "https://www.facebook.com/dialog/oauth", + TokenURL: "https://graph.facebook.com/oauth/access_token", +} diff --git a/vendor/golang.org/x/oauth2/github/github.go b/vendor/golang.org/x/oauth2/github/github.go new file mode 100644 index 0000000..1648cb5 --- /dev/null +++ b/vendor/golang.org/x/oauth2/github/github.go @@ -0,0 +1,16 @@ +// Copyright 2014 The oauth2 Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package github provides constants for using OAuth2 to access Github. +package github // import "golang.org/x/oauth2/github" + +import ( + "golang.org/x/oauth2" +) + +// Endpoint is Github's OAuth 2.0 endpoint. +var Endpoint = oauth2.Endpoint{ + AuthURL: "https://github.com/login/oauth/authorize", + TokenURL: "https://github.com/login/oauth/access_token", +} diff --git a/vendor/golang.org/x/oauth2/google/appengine.go b/vendor/golang.org/x/oauth2/google/appengine.go new file mode 100644 index 0000000..8554221 --- /dev/null +++ b/vendor/golang.org/x/oauth2/google/appengine.go @@ -0,0 +1,86 @@ +// Copyright 2014 The oauth2 Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package google + +import ( + "sort" + "strings" + "sync" + "time" + + "golang.org/x/net/context" + "golang.org/x/oauth2" +) + +// Set at init time by appenginevm_hook.go. If true, we are on App Engine Managed VMs. +var appengineVM bool + +// Set at init time by appengine_hook.go. If nil, we're not on App Engine. +var appengineTokenFunc func(c context.Context, scopes ...string) (token string, expiry time.Time, err error) + +// AppEngineTokenSource returns a token source that fetches tokens +// issued to the current App Engine application's service account. +// If you are implementing a 3-legged OAuth 2.0 flow on App Engine +// that involves user accounts, see oauth2.Config instead. +// +// The provided context must have come from appengine.NewContext. +func AppEngineTokenSource(ctx context.Context, scope ...string) oauth2.TokenSource { + if appengineTokenFunc == nil { + panic("google: AppEngineTokenSource can only be used on App Engine.") + } + scopes := append([]string{}, scope...) + sort.Strings(scopes) + return &appEngineTokenSource{ + ctx: ctx, + scopes: scopes, + key: strings.Join(scopes, " "), + } +} + +// aeTokens helps the fetched tokens to be reused until their expiration. +var ( + aeTokensMu sync.Mutex + aeTokens = make(map[string]*tokenLock) // key is space-separated scopes +) + +type tokenLock struct { + mu sync.Mutex // guards t; held while fetching or updating t + t *oauth2.Token +} + +type appEngineTokenSource struct { + ctx context.Context + scopes []string + key string // to aeTokens map; space-separated scopes +} + +func (ts *appEngineTokenSource) Token() (*oauth2.Token, error) { + if appengineTokenFunc == nil { + panic("google: AppEngineTokenSource can only be used on App Engine.") + } + + aeTokensMu.Lock() + tok, ok := aeTokens[ts.key] + if !ok { + tok = &tokenLock{} + aeTokens[ts.key] = tok + } + aeTokensMu.Unlock() + + tok.mu.Lock() + defer tok.mu.Unlock() + if tok.t.Valid() { + return tok.t, nil + } + access, exp, err := appengineTokenFunc(ts.ctx, ts.scopes...) + if err != nil { + return nil, err + } + tok.t = &oauth2.Token{ + AccessToken: access, + Expiry: exp, + } + return tok.t, nil +} diff --git a/vendor/golang.org/x/oauth2/google/appengine_hook.go b/vendor/golang.org/x/oauth2/google/appengine_hook.go new file mode 100644 index 0000000..362766f --- /dev/null +++ b/vendor/golang.org/x/oauth2/google/appengine_hook.go @@ -0,0 +1,13 @@ +// Copyright 2015 The oauth2 Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build appengine + +package google + +import "google.golang.org/appengine" + +func init() { + appengineTokenFunc = appengine.AccessToken +} diff --git a/vendor/golang.org/x/oauth2/google/appenginevm_hook.go b/vendor/golang.org/x/oauth2/google/appenginevm_hook.go new file mode 100644 index 0000000..633611c --- /dev/null +++ b/vendor/golang.org/x/oauth2/google/appenginevm_hook.go @@ -0,0 +1,14 @@ +// Copyright 2015 The oauth2 Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build appenginevm + +package google + +import "google.golang.org/appengine" + +func init() { + appengineVM = true + appengineTokenFunc = appengine.AccessToken +} diff --git a/vendor/golang.org/x/oauth2/google/default.go b/vendor/golang.org/x/oauth2/google/default.go new file mode 100644 index 0000000..66daeef --- /dev/null +++ b/vendor/golang.org/x/oauth2/google/default.go @@ -0,0 +1,155 @@ +// Copyright 2015 The oauth2 Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package google + +import ( + "encoding/json" + "errors" + "fmt" + "io/ioutil" + "net/http" + "os" + "path/filepath" + "runtime" + + "golang.org/x/net/context" + "golang.org/x/oauth2" + "golang.org/x/oauth2/jwt" + "google.golang.org/cloud/compute/metadata" +) + +// DefaultClient returns an HTTP Client that uses the +// DefaultTokenSource to obtain authentication credentials. +// +// This client should be used when developing services +// that run on Google App Engine or Google Compute Engine +// and use "Application Default Credentials." +// +// For more details, see: +// https://developers.google.com/accounts/docs/application-default-credentials +// +func DefaultClient(ctx context.Context, scope ...string) (*http.Client, error) { + ts, err := DefaultTokenSource(ctx, scope...) + if err != nil { + return nil, err + } + return oauth2.NewClient(ctx, ts), nil +} + +// DefaultTokenSource is a token source that uses +// "Application Default Credentials". +// +// It looks for credentials in the following places, +// preferring the first location found: +// +// 1. A JSON file whose path is specified by the +// GOOGLE_APPLICATION_CREDENTIALS environment variable. +// 2. A JSON file in a location known to the gcloud command-line tool. +// On Windows, this is %APPDATA%/gcloud/application_default_credentials.json. +// On other systems, $HOME/.config/gcloud/application_default_credentials.json. +// 3. On Google App Engine it uses the appengine.AccessToken function. +// 4. On Google Compute Engine and Google App Engine Managed VMs, it fetches +// credentials from the metadata server. +// (In this final case any provided scopes are ignored.) +// +// For more details, see: +// https://developers.google.com/accounts/docs/application-default-credentials +// +func DefaultTokenSource(ctx context.Context, scope ...string) (oauth2.TokenSource, error) { + // First, try the environment variable. + const envVar = "GOOGLE_APPLICATION_CREDENTIALS" + if filename := os.Getenv(envVar); filename != "" { + ts, err := tokenSourceFromFile(ctx, filename, scope) + if err != nil { + return nil, fmt.Errorf("google: error getting credentials using %v environment variable: %v", envVar, err) + } + return ts, nil + } + + // Second, try a well-known file. + filename := wellKnownFile() + _, err := os.Stat(filename) + if err == nil { + ts, err2 := tokenSourceFromFile(ctx, filename, scope) + if err2 == nil { + return ts, nil + } + err = err2 + } else if os.IsNotExist(err) { + err = nil // ignore this error + } + if err != nil { + return nil, fmt.Errorf("google: error getting credentials using well-known file (%v): %v", filename, err) + } + + // Third, if we're on Google App Engine use those credentials. + if appengineTokenFunc != nil && !appengineVM { + return AppEngineTokenSource(ctx, scope...), nil + } + + // Fourth, if we're on Google Compute Engine use the metadata server. + if metadata.OnGCE() { + return ComputeTokenSource(""), nil + } + + // None are found; return helpful error. + const url = "https://developers.google.com/accounts/docs/application-default-credentials" + return nil, fmt.Errorf("google: could not find default credentials. See %v for more information.", url) +} + +func wellKnownFile() string { + const f = "application_default_credentials.json" + if runtime.GOOS == "windows" { + return filepath.Join(os.Getenv("APPDATA"), "gcloud", f) + } + return filepath.Join(guessUnixHomeDir(), ".config", "gcloud", f) +} + +func tokenSourceFromFile(ctx context.Context, filename string, scopes []string) (oauth2.TokenSource, error) { + b, err := ioutil.ReadFile(filename) + if err != nil { + return nil, err + } + var d struct { + // Common fields + Type string + ClientID string `json:"client_id"` + + // User Credential fields + ClientSecret string `json:"client_secret"` + RefreshToken string `json:"refresh_token"` + + // Service Account fields + ClientEmail string `json:"client_email"` + PrivateKeyID string `json:"private_key_id"` + PrivateKey string `json:"private_key"` + } + if err := json.Unmarshal(b, &d); err != nil { + return nil, err + } + switch d.Type { + case "authorized_user": + cfg := &oauth2.Config{ + ClientID: d.ClientID, + ClientSecret: d.ClientSecret, + Scopes: append([]string{}, scopes...), // copy + Endpoint: Endpoint, + } + tok := &oauth2.Token{RefreshToken: d.RefreshToken} + return cfg.TokenSource(ctx, tok), nil + case "service_account": + cfg := &jwt.Config{ + Email: d.ClientEmail, + PrivateKey: []byte(d.PrivateKey), + Scopes: append([]string{}, scopes...), // copy + TokenURL: JWTTokenURL, + } + return cfg.TokenSource(ctx), nil + case "": + return nil, errors.New("missing 'type' field in credentials") + default: + return nil, fmt.Errorf("unknown credential type: %q", d.Type) + } +} diff --git a/vendor/golang.org/x/oauth2/google/example_test.go b/vendor/golang.org/x/oauth2/google/example_test.go new file mode 100644 index 0000000..1726280 --- /dev/null +++ b/vendor/golang.org/x/oauth2/google/example_test.go @@ -0,0 +1,150 @@ +// Copyright 2014 The oauth2 Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build appenginevm !appengine + +package google_test + +import ( + "fmt" + "io/ioutil" + "log" + "net/http" + + "golang.org/x/oauth2" + "golang.org/x/oauth2/google" + "golang.org/x/oauth2/jwt" + "google.golang.org/appengine" + "google.golang.org/appengine/urlfetch" +) + +func ExampleDefaultClient() { + client, err := google.DefaultClient(oauth2.NoContext, + "https://www.googleapis.com/auth/devstorage.full_control") + if err != nil { + log.Fatal(err) + } + client.Get("...") +} + +func Example_webServer() { + // Your credentials should be obtained from the Google + // Developer Console (https://console.developers.google.com). + conf := &oauth2.Config{ + ClientID: "YOUR_CLIENT_ID", + ClientSecret: "YOUR_CLIENT_SECRET", + RedirectURL: "YOUR_REDIRECT_URL", + Scopes: []string{ + "https://www.googleapis.com/auth/bigquery", + "https://www.googleapis.com/auth/blogger", + }, + Endpoint: google.Endpoint, + } + // Redirect user to Google's consent page to ask for permission + // for the scopes specified above. + url := conf.AuthCodeURL("state") + fmt.Printf("Visit the URL for the auth dialog: %v", url) + + // Handle the exchange code to initiate a transport. + tok, err := conf.Exchange(oauth2.NoContext, "authorization-code") + if err != nil { + log.Fatal(err) + } + client := conf.Client(oauth2.NoContext, tok) + client.Get("...") +} + +func ExampleJWTConfigFromJSON() { + // Your credentials should be obtained from the Google + // Developer Console (https://console.developers.google.com). + // Navigate to your project, then see the "Credentials" page + // under "APIs & Auth". + // To create a service account client, click "Create new Client ID", + // select "Service Account", and click "Create Client ID". A JSON + // key file will then be downloaded to your computer. + data, err := ioutil.ReadFile("/path/to/your-project-key.json") + if err != nil { + log.Fatal(err) + } + conf, err := google.JWTConfigFromJSON(data, "https://www.googleapis.com/auth/bigquery") + if err != nil { + log.Fatal(err) + } + // Initiate an http.Client. The following GET request will be + // authorized and authenticated on the behalf of + // your service account. + client := conf.Client(oauth2.NoContext) + client.Get("...") +} + +func ExampleSDKConfig() { + // The credentials will be obtained from the first account that + // has been authorized with `gcloud auth login`. + conf, err := google.NewSDKConfig("") + if err != nil { + log.Fatal(err) + } + // Initiate an http.Client. The following GET request will be + // authorized and authenticated on the behalf of the SDK user. + client := conf.Client(oauth2.NoContext) + client.Get("...") +} + +func Example_serviceAccount() { + // Your credentials should be obtained from the Google + // Developer Console (https://console.developers.google.com). + conf := &jwt.Config{ + Email: "xxx@developer.gserviceaccount.com", + // The contents of your RSA private key or your PEM file + // that contains a private key. + // If you have a p12 file instead, you + // can use `openssl` to export the private key into a pem file. + // + // $ openssl pkcs12 -in key.p12 -passin pass:notasecret -out key.pem -nodes + // + // The field only supports PEM containers with no passphrase. + // The openssl command will convert p12 keys to passphrase-less PEM containers. + PrivateKey: []byte("-----BEGIN RSA PRIVATE KEY-----..."), + Scopes: []string{ + "https://www.googleapis.com/auth/bigquery", + "https://www.googleapis.com/auth/blogger", + }, + TokenURL: google.JWTTokenURL, + // If you would like to impersonate a user, you can + // create a transport with a subject. The following GET + // request will be made on the behalf of user@example.com. + // Optional. + Subject: "user@example.com", + } + // Initiate an http.Client, the following GET request will be + // authorized and authenticated on the behalf of user@example.com. + client := conf.Client(oauth2.NoContext) + client.Get("...") +} + +func ExampleAppEngineTokenSource() { + var req *http.Request // from the ServeHTTP handler + ctx := appengine.NewContext(req) + client := &http.Client{ + Transport: &oauth2.Transport{ + Source: google.AppEngineTokenSource(ctx, "https://www.googleapis.com/auth/bigquery"), + Base: &urlfetch.Transport{ + Context: ctx, + }, + }, + } + client.Get("...") +} + +func ExampleComputeTokenSource() { + client := &http.Client{ + Transport: &oauth2.Transport{ + // Fetch from Google Compute Engine's metadata server to retrieve + // an access token for the provided account. + // If no account is specified, "default" is used. + Source: google.ComputeTokenSource(""), + }, + } + client.Get("...") +} diff --git a/vendor/golang.org/x/oauth2/google/google.go b/vendor/golang.org/x/oauth2/google/google.go new file mode 100644 index 0000000..74aa7d9 --- /dev/null +++ b/vendor/golang.org/x/oauth2/google/google.go @@ -0,0 +1,145 @@ +// Copyright 2014 The oauth2 Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package google provides support for making OAuth2 authorized and +// authenticated HTTP requests to Google APIs. +// It supports the Web server flow, client-side credentials, service accounts, +// Google Compute Engine service accounts, and Google App Engine service +// accounts. +// +// For more information, please read +// https://developers.google.com/accounts/docs/OAuth2 +// and +// https://developers.google.com/accounts/docs/application-default-credentials. +package google // import "golang.org/x/oauth2/google" + +import ( + "encoding/json" + "errors" + "fmt" + "strings" + "time" + + "golang.org/x/oauth2" + "golang.org/x/oauth2/jwt" + "google.golang.org/cloud/compute/metadata" +) + +// Endpoint is Google's OAuth 2.0 endpoint. +var Endpoint = oauth2.Endpoint{ + AuthURL: "https://accounts.google.com/o/oauth2/auth", + TokenURL: "https://accounts.google.com/o/oauth2/token", +} + +// JWTTokenURL is Google's OAuth 2.0 token URL to use with the JWT flow. +const JWTTokenURL = "https://accounts.google.com/o/oauth2/token" + +// ConfigFromJSON uses a Google Developers Console client_credentials.json +// file to construct a config. +// client_credentials.json can be downloadable from https://console.developers.google.com, +// under "APIs & Auth" > "Credentials". Download the Web application credentials in the +// JSON format and provide the contents of the file as jsonKey. +func ConfigFromJSON(jsonKey []byte, scope ...string) (*oauth2.Config, error) { + type cred struct { + ClientID string `json:"client_id"` + ClientSecret string `json:"client_secret"` + RedirectURIs []string `json:"redirect_uris"` + AuthURI string `json:"auth_uri"` + TokenURI string `json:"token_uri"` + } + var j struct { + Web *cred `json:"web"` + Installed *cred `json:"installed"` + } + if err := json.Unmarshal(jsonKey, &j); err != nil { + return nil, err + } + var c *cred + switch { + case j.Web != nil: + c = j.Web + case j.Installed != nil: + c = j.Installed + default: + return nil, fmt.Errorf("oauth2/google: no credentials found") + } + if len(c.RedirectURIs) < 1 { + return nil, errors.New("oauth2/google: missing redirect URL in the client_credentials.json") + } + return &oauth2.Config{ + ClientID: c.ClientID, + ClientSecret: c.ClientSecret, + RedirectURL: c.RedirectURIs[0], + Scopes: scope, + Endpoint: oauth2.Endpoint{ + AuthURL: c.AuthURI, + TokenURL: c.TokenURI, + }, + }, nil +} + +// JWTConfigFromJSON uses a Google Developers service account JSON key file to read +// the credentials that authorize and authenticate the requests. +// Create a service account on "Credentials" page under "APIs & Auth" for your +// project at https://console.developers.google.com to download a JSON key file. +func JWTConfigFromJSON(jsonKey []byte, scope ...string) (*jwt.Config, error) { + var key struct { + Email string `json:"client_email"` + PrivateKey string `json:"private_key"` + } + if err := json.Unmarshal(jsonKey, &key); err != nil { + return nil, err + } + return &jwt.Config{ + Email: key.Email, + PrivateKey: []byte(key.PrivateKey), + Scopes: scope, + TokenURL: JWTTokenURL, + }, nil +} + +// ComputeTokenSource returns a token source that fetches access tokens +// from Google Compute Engine (GCE)'s metadata server. It's only valid to use +// this token source if your program is running on a GCE instance. +// If no account is specified, "default" is used. +// Further information about retrieving access tokens from the GCE metadata +// server can be found at https://cloud.google.com/compute/docs/authentication. +func ComputeTokenSource(account string) oauth2.TokenSource { + return oauth2.ReuseTokenSource(nil, computeSource{account: account}) +} + +type computeSource struct { + account string +} + +func (cs computeSource) Token() (*oauth2.Token, error) { + if !metadata.OnGCE() { + return nil, errors.New("oauth2/google: can't get a token from the metadata service; not running on GCE") + } + acct := cs.account + if acct == "" { + acct = "default" + } + tokenJSON, err := metadata.Get("instance/service-accounts/" + acct + "/token") + if err != nil { + return nil, err + } + var res struct { + AccessToken string `json:"access_token"` + ExpiresInSec int `json:"expires_in"` + TokenType string `json:"token_type"` + } + err = json.NewDecoder(strings.NewReader(tokenJSON)).Decode(&res) + if err != nil { + return nil, fmt.Errorf("oauth2/google: invalid token JSON from metadata: %v", err) + } + if res.ExpiresInSec == 0 || res.AccessToken == "" { + return nil, fmt.Errorf("oauth2/google: incomplete token received from metadata") + } + return &oauth2.Token{ + AccessToken: res.AccessToken, + TokenType: res.TokenType, + Expiry: time.Now().Add(time.Duration(res.ExpiresInSec) * time.Second), + }, nil +} diff --git a/vendor/golang.org/x/oauth2/google/google_test.go b/vendor/golang.org/x/oauth2/google/google_test.go new file mode 100644 index 0000000..4cc0188 --- /dev/null +++ b/vendor/golang.org/x/oauth2/google/google_test.go @@ -0,0 +1,67 @@ +// Copyright 2015 The oauth2 Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package google + +import ( + "strings" + "testing" +) + +var webJSONKey = []byte(` +{ + "web": { + "auth_uri": "https://google.com/o/oauth2/auth", + "client_secret": "3Oknc4jS_wA2r9i", + "token_uri": "https://google.com/o/oauth2/token", + "client_email": "222-nprqovg5k43uum874cs9osjt2koe97g8@developer.gserviceaccount.com", + "redirect_uris": ["https://www.example.com/oauth2callback"], + "client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/222-nprqovg5k43uum874cs9osjt2koe97g8@developer.gserviceaccount.com", + "client_id": "222-nprqovg5k43uum874cs9osjt2koe97g8.apps.googleusercontent.com", + "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs", + "javascript_origins": ["https://www.example.com"] + } +}`) + +var installedJSONKey = []byte(`{ + "installed": { + "client_id": "222-installed.apps.googleusercontent.com", + "redirect_uris": ["https://www.example.com/oauth2callback"] + } +}`) + +func TestConfigFromJSON(t *testing.T) { + conf, err := ConfigFromJSON(webJSONKey, "scope1", "scope2") + if err != nil { + t.Error(err) + } + if got, want := conf.ClientID, "222-nprqovg5k43uum874cs9osjt2koe97g8.apps.googleusercontent.com"; got != want { + t.Errorf("ClientID = %q; want %q", got, want) + } + if got, want := conf.ClientSecret, "3Oknc4jS_wA2r9i"; got != want { + t.Errorf("ClientSecret = %q; want %q", got, want) + } + if got, want := conf.RedirectURL, "https://www.example.com/oauth2callback"; got != want { + t.Errorf("RedictURL = %q; want %q", got, want) + } + if got, want := strings.Join(conf.Scopes, ","), "scope1,scope2"; got != want { + t.Errorf("Scopes = %q; want %q", got, want) + } + if got, want := conf.Endpoint.AuthURL, "https://google.com/o/oauth2/auth"; got != want { + t.Errorf("AuthURL = %q; want %q", got, want) + } + if got, want := conf.Endpoint.TokenURL, "https://google.com/o/oauth2/token"; got != want { + t.Errorf("TokenURL = %q; want %q", got, want) + } +} + +func TestConfigFromJSON_Installed(t *testing.T) { + conf, err := ConfigFromJSON(installedJSONKey) + if err != nil { + t.Error(err) + } + if got, want := conf.ClientID, "222-installed.apps.googleusercontent.com"; got != want { + t.Errorf("ClientID = %q; want %q", got, want) + } +} diff --git a/vendor/golang.org/x/oauth2/google/jwt.go b/vendor/golang.org/x/oauth2/google/jwt.go new file mode 100644 index 0000000..6f7ec3a --- /dev/null +++ b/vendor/golang.org/x/oauth2/google/jwt.go @@ -0,0 +1,71 @@ +// Copyright 2015 The oauth2 Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package google + +import ( + "crypto/rsa" + "fmt" + "time" + + "golang.org/x/oauth2" + "golang.org/x/oauth2/internal" + "golang.org/x/oauth2/jws" +) + +// JWTAccessTokenSourceFromJSON uses a Google Developers service account JSON +// key file to read the credentials that authorize and authenticate the +// requests, and returns a TokenSource that does not use any OAuth2 flow but +// instead creates a JWT and sends that as the access token. +// The audience is typically a URL that specifies the scope of the credentials. +// +// Note that this is not a standard OAuth flow, but rather an +// optimization supported by a few Google services. +// Unless you know otherwise, you should use JWTConfigFromJSON instead. +func JWTAccessTokenSourceFromJSON(jsonKey []byte, audience string) (oauth2.TokenSource, error) { + cfg, err := JWTConfigFromJSON(jsonKey) + if err != nil { + return nil, fmt.Errorf("google: could not parse JSON key: %v", err) + } + pk, err := internal.ParseKey(cfg.PrivateKey) + if err != nil { + return nil, fmt.Errorf("google: could not parse key: %v", err) + } + ts := &jwtAccessTokenSource{ + email: cfg.Email, + audience: audience, + pk: pk, + } + tok, err := ts.Token() + if err != nil { + return nil, err + } + return oauth2.ReuseTokenSource(tok, ts), nil +} + +type jwtAccessTokenSource struct { + email, audience string + pk *rsa.PrivateKey +} + +func (ts *jwtAccessTokenSource) Token() (*oauth2.Token, error) { + iat := time.Now() + exp := iat.Add(time.Hour) + cs := &jws.ClaimSet{ + Iss: ts.email, + Sub: ts.email, + Aud: ts.audience, + Iat: iat.Unix(), + Exp: exp.Unix(), + } + hdr := &jws.Header{ + Algorithm: "RS256", + Typ: "JWT", + } + msg, err := jws.Encode(hdr, cs, ts.pk) + if err != nil { + return nil, fmt.Errorf("google: could not encode JWT: %v", err) + } + return &oauth2.Token{AccessToken: msg, TokenType: "Bearer", Expiry: exp}, nil +} diff --git a/vendor/golang.org/x/oauth2/google/sdk.go b/vendor/golang.org/x/oauth2/google/sdk.go new file mode 100644 index 0000000..01ba0ec --- /dev/null +++ b/vendor/golang.org/x/oauth2/google/sdk.go @@ -0,0 +1,168 @@ +// Copyright 2015 The oauth2 Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package google + +import ( + "encoding/json" + "errors" + "fmt" + "net/http" + "os" + "os/user" + "path/filepath" + "runtime" + "strings" + "time" + + "golang.org/x/net/context" + "golang.org/x/oauth2" + "golang.org/x/oauth2/internal" +) + +type sdkCredentials struct { + Data []struct { + Credential struct { + ClientID string `json:"client_id"` + ClientSecret string `json:"client_secret"` + AccessToken string `json:"access_token"` + RefreshToken string `json:"refresh_token"` + TokenExpiry *time.Time `json:"token_expiry"` + } `json:"credential"` + Key struct { + Account string `json:"account"` + Scope string `json:"scope"` + } `json:"key"` + } +} + +// An SDKConfig provides access to tokens from an account already +// authorized via the Google Cloud SDK. +type SDKConfig struct { + conf oauth2.Config + initialToken *oauth2.Token +} + +// NewSDKConfig creates an SDKConfig for the given Google Cloud SDK +// account. If account is empty, the account currently active in +// Google Cloud SDK properties is used. +// Google Cloud SDK credentials must be created by running `gcloud auth` +// before using this function. +// The Google Cloud SDK is available at https://cloud.google.com/sdk/. +func NewSDKConfig(account string) (*SDKConfig, error) { + configPath, err := sdkConfigPath() + if err != nil { + return nil, fmt.Errorf("oauth2/google: error getting SDK config path: %v", err) + } + credentialsPath := filepath.Join(configPath, "credentials") + f, err := os.Open(credentialsPath) + if err != nil { + return nil, fmt.Errorf("oauth2/google: failed to load SDK credentials: %v", err) + } + defer f.Close() + + var c sdkCredentials + if err := json.NewDecoder(f).Decode(&c); err != nil { + return nil, fmt.Errorf("oauth2/google: failed to decode SDK credentials from %q: %v", credentialsPath, err) + } + if len(c.Data) == 0 { + return nil, fmt.Errorf("oauth2/google: no credentials found in %q, run `gcloud auth login` to create one", credentialsPath) + } + if account == "" { + propertiesPath := filepath.Join(configPath, "properties") + f, err := os.Open(propertiesPath) + if err != nil { + return nil, fmt.Errorf("oauth2/google: failed to load SDK properties: %v", err) + } + defer f.Close() + ini, err := internal.ParseINI(f) + if err != nil { + return nil, fmt.Errorf("oauth2/google: failed to parse SDK properties %q: %v", propertiesPath, err) + } + core, ok := ini["core"] + if !ok { + return nil, fmt.Errorf("oauth2/google: failed to find [core] section in %v", ini) + } + active, ok := core["account"] + if !ok { + return nil, fmt.Errorf("oauth2/google: failed to find %q attribute in %v", "account", core) + } + account = active + } + + for _, d := range c.Data { + if account == "" || d.Key.Account == account { + if d.Credential.AccessToken == "" && d.Credential.RefreshToken == "" { + return nil, fmt.Errorf("oauth2/google: no token available for account %q", account) + } + var expiry time.Time + if d.Credential.TokenExpiry != nil { + expiry = *d.Credential.TokenExpiry + } + return &SDKConfig{ + conf: oauth2.Config{ + ClientID: d.Credential.ClientID, + ClientSecret: d.Credential.ClientSecret, + Scopes: strings.Split(d.Key.Scope, " "), + Endpoint: Endpoint, + RedirectURL: "oob", + }, + initialToken: &oauth2.Token{ + AccessToken: d.Credential.AccessToken, + RefreshToken: d.Credential.RefreshToken, + Expiry: expiry, + }, + }, nil + } + } + return nil, fmt.Errorf("oauth2/google: no such credentials for account %q", account) +} + +// Client returns an HTTP client using Google Cloud SDK credentials to +// authorize requests. The token will auto-refresh as necessary. The +// underlying http.RoundTripper will be obtained using the provided +// context. The returned client and its Transport should not be +// modified. +func (c *SDKConfig) Client(ctx context.Context) *http.Client { + return &http.Client{ + Transport: &oauth2.Transport{ + Source: c.TokenSource(ctx), + }, + } +} + +// TokenSource returns an oauth2.TokenSource that retrieve tokens from +// Google Cloud SDK credentials using the provided context. +// It will returns the current access token stored in the credentials, +// and refresh it when it expires, but it won't update the credentials +// with the new access token. +func (c *SDKConfig) TokenSource(ctx context.Context) oauth2.TokenSource { + return c.conf.TokenSource(ctx, c.initialToken) +} + +// Scopes are the OAuth 2.0 scopes the current account is authorized for. +func (c *SDKConfig) Scopes() []string { + return c.conf.Scopes +} + +// sdkConfigPath tries to guess where the gcloud config is located. +// It can be overridden during tests. +var sdkConfigPath = func() (string, error) { + if runtime.GOOS == "windows" { + return filepath.Join(os.Getenv("APPDATA"), "gcloud"), nil + } + homeDir := guessUnixHomeDir() + if homeDir == "" { + return "", errors.New("unable to get current user home directory: os/user lookup failed; $HOME is empty") + } + return filepath.Join(homeDir, ".config", "gcloud"), nil +} + +func guessUnixHomeDir() string { + usr, err := user.Current() + if err == nil { + return usr.HomeDir + } + return os.Getenv("HOME") +} diff --git a/vendor/golang.org/x/oauth2/google/sdk_test.go b/vendor/golang.org/x/oauth2/google/sdk_test.go new file mode 100644 index 0000000..79df889 --- /dev/null +++ b/vendor/golang.org/x/oauth2/google/sdk_test.go @@ -0,0 +1,46 @@ +// Copyright 2015 The oauth2 Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package google + +import "testing" + +func TestSDKConfig(t *testing.T) { + sdkConfigPath = func() (string, error) { + return "testdata/gcloud", nil + } + + tests := []struct { + account string + accessToken string + err bool + }{ + {"", "bar_access_token", false}, + {"foo@example.com", "foo_access_token", false}, + {"bar@example.com", "bar_access_token", false}, + {"baz@serviceaccount.example.com", "", true}, + } + for _, tt := range tests { + c, err := NewSDKConfig(tt.account) + if got, want := err != nil, tt.err; got != want { + if !tt.err { + t.Errorf("expected no error, got error: %v", tt.err, err) + } else { + t.Errorf("expected error, got none") + } + continue + } + if err != nil { + continue + } + tok := c.initialToken + if tok == nil { + t.Errorf("expected token %q, got: nil", tt.accessToken) + continue + } + if tok.AccessToken != tt.accessToken { + t.Errorf("expected token %q, got: %q", tt.accessToken, tok.AccessToken) + } + } +} diff --git a/vendor/golang.org/x/oauth2/internal/oauth2.go b/vendor/golang.org/x/oauth2/internal/oauth2.go new file mode 100644 index 0000000..dc8ebfc --- /dev/null +++ b/vendor/golang.org/x/oauth2/internal/oauth2.go @@ -0,0 +1,76 @@ +// Copyright 2014 The oauth2 Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package internal contains support packages for oauth2 package. +package internal + +import ( + "bufio" + "crypto/rsa" + "crypto/x509" + "encoding/pem" + "errors" + "fmt" + "io" + "strings" +) + +// ParseKey converts the binary contents of a private key file +// to an *rsa.PrivateKey. It detects whether the private key is in a +// PEM container or not. If so, it extracts the the private key +// from PEM container before conversion. It only supports PEM +// containers with no passphrase. +func ParseKey(key []byte) (*rsa.PrivateKey, error) { + block, _ := pem.Decode(key) + if block != nil { + key = block.Bytes + } + parsedKey, err := x509.ParsePKCS8PrivateKey(key) + if err != nil { + parsedKey, err = x509.ParsePKCS1PrivateKey(key) + if err != nil { + return nil, fmt.Errorf("private key should be a PEM or plain PKSC1 or PKCS8; parse error: %v", err) + } + } + parsed, ok := parsedKey.(*rsa.PrivateKey) + if !ok { + return nil, errors.New("private key is invalid") + } + return parsed, nil +} + +func ParseINI(ini io.Reader) (map[string]map[string]string, error) { + result := map[string]map[string]string{ + "": map[string]string{}, // root section + } + scanner := bufio.NewScanner(ini) + currentSection := "" + for scanner.Scan() { + line := strings.TrimSpace(scanner.Text()) + if strings.HasPrefix(line, ";") { + // comment. + continue + } + if strings.HasPrefix(line, "[") && strings.HasSuffix(line, "]") { + currentSection = strings.TrimSpace(line[1 : len(line)-1]) + result[currentSection] = map[string]string{} + continue + } + parts := strings.SplitN(line, "=", 2) + if len(parts) == 2 && parts[0] != "" { + result[currentSection][strings.TrimSpace(parts[0])] = strings.TrimSpace(parts[1]) + } + } + if err := scanner.Err(); err != nil { + return nil, fmt.Errorf("error scanning ini: %v", err) + } + return result, nil +} + +func CondVal(v string) []string { + if v == "" { + return nil + } + return []string{v} +} diff --git a/vendor/golang.org/x/oauth2/internal/oauth2_test.go b/vendor/golang.org/x/oauth2/internal/oauth2_test.go new file mode 100644 index 0000000..014a351 --- /dev/null +++ b/vendor/golang.org/x/oauth2/internal/oauth2_test.go @@ -0,0 +1,62 @@ +// Copyright 2014 The oauth2 Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package internal contains support packages for oauth2 package. +package internal + +import ( + "reflect" + "strings" + "testing" +) + +func TestParseINI(t *testing.T) { + tests := []struct { + ini string + want map[string]map[string]string + }{ + { + `root = toor +[foo] +bar = hop +ini = nin +`, + map[string]map[string]string{ + "": map[string]string{"root": "toor"}, + "foo": map[string]string{"bar": "hop", "ini": "nin"}, + }, + }, + { + `[empty] +[section] +empty= +`, + map[string]map[string]string{ + "": map[string]string{}, + "empty": map[string]string{}, + "section": map[string]string{"empty": ""}, + }, + }, + { + `ignore +[invalid +=stuff +;comment=true +`, + map[string]map[string]string{ + "": map[string]string{}, + }, + }, + } + for _, tt := range tests { + result, err := ParseINI(strings.NewReader(tt.ini)) + if err != nil { + t.Errorf("ParseINI(%q) error %v, want: no error", tt.ini, err) + continue + } + if !reflect.DeepEqual(result, tt.want) { + t.Errorf("ParseINI(%q) = %#v, want: %#v", tt.ini, result, tt.want) + } + } +} diff --git a/vendor/golang.org/x/oauth2/internal/token.go b/vendor/golang.org/x/oauth2/internal/token.go new file mode 100644 index 0000000..db209d8 --- /dev/null +++ b/vendor/golang.org/x/oauth2/internal/token.go @@ -0,0 +1,214 @@ +// Copyright 2014 The oauth2 Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package internal contains support packages for oauth2 package. +package internal + +import ( + "encoding/json" + "fmt" + "io" + "io/ioutil" + "mime" + "net/http" + "net/url" + "strconv" + "strings" + "time" + + "golang.org/x/net/context" +) + +// Token represents the crendentials used to authorize +// the requests to access protected resources on the OAuth 2.0 +// provider's backend. +// +// This type is a mirror of oauth2.Token and exists to break +// an otherwise-circular dependency. Other internal packages +// should convert this Token into an oauth2.Token before use. +type Token struct { + // AccessToken is the token that authorizes and authenticates + // the requests. + AccessToken string + + // TokenType is the type of token. + // The Type method returns either this or "Bearer", the default. + TokenType string + + // RefreshToken is a token that's used by the application + // (as opposed to the user) to refresh the access token + // if it expires. + RefreshToken string + + // Expiry is the optional expiration time of the access token. + // + // If zero, TokenSource implementations will reuse the same + // token forever and RefreshToken or equivalent + // mechanisms for that TokenSource will not be used. + Expiry time.Time + + // Raw optionally contains extra metadata from the server + // when updating a token. + Raw interface{} +} + +// tokenJSON is the struct representing the HTTP response from OAuth2 +// providers returning a token in JSON form. +type tokenJSON struct { + AccessToken string `json:"access_token"` + TokenType string `json:"token_type"` + RefreshToken string `json:"refresh_token"` + ExpiresIn expirationTime `json:"expires_in"` // at least PayPal returns string, while most return number + Expires expirationTime `json:"expires"` // broken Facebook spelling of expires_in +} + +func (e *tokenJSON) expiry() (t time.Time) { + if v := e.ExpiresIn; v != 0 { + return time.Now().Add(time.Duration(v) * time.Second) + } + if v := e.Expires; v != 0 { + return time.Now().Add(time.Duration(v) * time.Second) + } + return +} + +type expirationTime int32 + +func (e *expirationTime) UnmarshalJSON(b []byte) error { + var n json.Number + err := json.Unmarshal(b, &n) + if err != nil { + return err + } + i, err := n.Int64() + if err != nil { + return err + } + *e = expirationTime(i) + return nil +} + +var brokenAuthHeaderProviders = []string{ + "https://accounts.google.com/", + "https://www.googleapis.com/", + "https://api.instagram.com/", + "https://www.douban.com/", + "https://api.dropbox.com/", + "https://api.soundcloud.com/", + "https://www.linkedin.com/", + "https://api.twitch.tv/", + "https://oauth.vk.com/", + "https://api.odnoklassniki.ru/", + "https://connect.stripe.com/", + "https://api.pushbullet.com/", + "https://oauth.sandbox.trainingpeaks.com/", + "https://oauth.trainingpeaks.com/", + "https://www.strava.com/oauth/", + "https://app.box.com/", + "https://test-sandbox.auth.corp.google.com", + "https://user.gini.net/", + "https://api.netatmo.net/", + "https://slack.com/", +} + +// providerAuthHeaderWorks reports whether the OAuth2 server identified by the tokenURL +// implements the OAuth2 spec correctly +// See https://code.google.com/p/goauth2/issues/detail?id=31 for background. +// In summary: +// - Reddit only accepts client secret in the Authorization header +// - Dropbox accepts either it in URL param or Auth header, but not both. +// - Google only accepts URL param (not spec compliant?), not Auth header +// - Stripe only accepts client secret in Auth header with Bearer method, not Basic +func providerAuthHeaderWorks(tokenURL string) bool { + for _, s := range brokenAuthHeaderProviders { + if strings.HasPrefix(tokenURL, s) { + // Some sites fail to implement the OAuth2 spec fully. + return false + } + } + + // Assume the provider implements the spec properly + // otherwise. We can add more exceptions as they're + // discovered. We will _not_ be adding configurable hooks + // to this package to let users select server bugs. + return true +} + +func RetrieveToken(ctx context.Context, ClientID, ClientSecret, TokenURL string, v url.Values) (*Token, error) { + hc, err := ContextClient(ctx) + if err != nil { + return nil, err + } + v.Set("client_id", ClientID) + bustedAuth := !providerAuthHeaderWorks(TokenURL) + if bustedAuth && ClientSecret != "" { + v.Set("client_secret", ClientSecret) + } + req, err := http.NewRequest("POST", TokenURL, strings.NewReader(v.Encode())) + if err != nil { + return nil, err + } + req.Header.Set("Content-Type", "application/x-www-form-urlencoded") + if !bustedAuth { + req.SetBasicAuth(ClientID, ClientSecret) + } + r, err := hc.Do(req) + if err != nil { + return nil, err + } + defer r.Body.Close() + body, err := ioutil.ReadAll(io.LimitReader(r.Body, 1<<20)) + if err != nil { + return nil, fmt.Errorf("oauth2: cannot fetch token: %v", err) + } + if code := r.StatusCode; code < 200 || code > 299 { + return nil, fmt.Errorf("oauth2: cannot fetch token: %v\nResponse: %s", r.Status, body) + } + + var token *Token + content, _, _ := mime.ParseMediaType(r.Header.Get("Content-Type")) + switch content { + case "application/x-www-form-urlencoded", "text/plain": + vals, err := url.ParseQuery(string(body)) + if err != nil { + return nil, err + } + token = &Token{ + AccessToken: vals.Get("access_token"), + TokenType: vals.Get("token_type"), + RefreshToken: vals.Get("refresh_token"), + Raw: vals, + } + e := vals.Get("expires_in") + if e == "" { + // TODO(jbd): Facebook's OAuth2 implementation is broken and + // returns expires_in field in expires. Remove the fallback to expires, + // when Facebook fixes their implementation. + e = vals.Get("expires") + } + expires, _ := strconv.Atoi(e) + if expires != 0 { + token.Expiry = time.Now().Add(time.Duration(expires) * time.Second) + } + default: + var tj tokenJSON + if err = json.Unmarshal(body, &tj); err != nil { + return nil, err + } + token = &Token{ + AccessToken: tj.AccessToken, + TokenType: tj.TokenType, + RefreshToken: tj.RefreshToken, + Expiry: tj.expiry(), + Raw: make(map[string]interface{}), + } + json.Unmarshal(body, &token.Raw) // no error checks for optional fields + } + // Don't overwrite `RefreshToken` with an empty value + // if this was a token refreshing request. + if token.RefreshToken == "" { + token.RefreshToken = v.Get("refresh_token") + } + return token, nil +} diff --git a/vendor/golang.org/x/oauth2/internal/token_test.go b/vendor/golang.org/x/oauth2/internal/token_test.go new file mode 100644 index 0000000..864f6fa --- /dev/null +++ b/vendor/golang.org/x/oauth2/internal/token_test.go @@ -0,0 +1,28 @@ +// Copyright 2014 The oauth2 Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package internal contains support packages for oauth2 package. +package internal + +import ( + "fmt" + "testing" +) + +func Test_providerAuthHeaderWorks(t *testing.T) { + for _, p := range brokenAuthHeaderProviders { + if providerAuthHeaderWorks(p) { + t.Errorf("URL: %s not found in list", p) + } + p := fmt.Sprintf("%ssomesuffix", p) + if providerAuthHeaderWorks(p) { + t.Errorf("URL: %s not found in list", p) + } + } + p := "https://api.not-in-the-list-example.com/" + if !providerAuthHeaderWorks(p) { + t.Errorf("URL: %s found in list", p) + } + +} diff --git a/vendor/golang.org/x/oauth2/internal/transport.go b/vendor/golang.org/x/oauth2/internal/transport.go new file mode 100644 index 0000000..521e7b4 --- /dev/null +++ b/vendor/golang.org/x/oauth2/internal/transport.go @@ -0,0 +1,67 @@ +// Copyright 2014 The oauth2 Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package internal contains support packages for oauth2 package. +package internal + +import ( + "net/http" + + "golang.org/x/net/context" +) + +// HTTPClient is the context key to use with golang.org/x/net/context's +// WithValue function to associate an *http.Client value with a context. +var HTTPClient ContextKey + +// ContextKey is just an empty struct. It exists so HTTPClient can be +// an immutable public variable with a unique type. It's immutable +// because nobody else can create a ContextKey, being unexported. +type ContextKey struct{} + +// ContextClientFunc is a func which tries to return an *http.Client +// given a Context value. If it returns an error, the search stops +// with that error. If it returns (nil, nil), the search continues +// down the list of registered funcs. +type ContextClientFunc func(context.Context) (*http.Client, error) + +var contextClientFuncs []ContextClientFunc + +func RegisterContextClientFunc(fn ContextClientFunc) { + contextClientFuncs = append(contextClientFuncs, fn) +} + +func ContextClient(ctx context.Context) (*http.Client, error) { + for _, fn := range contextClientFuncs { + c, err := fn(ctx) + if err != nil { + return nil, err + } + if c != nil { + return c, nil + } + } + if hc, ok := ctx.Value(HTTPClient).(*http.Client); ok { + return hc, nil + } + return http.DefaultClient, nil +} + +func ContextTransport(ctx context.Context) http.RoundTripper { + hc, err := ContextClient(ctx) + // This is a rare error case (somebody using nil on App Engine). + if err != nil { + return ErrorTransport{err} + } + return hc.Transport +} + +// ErrorTransport returns the specified error on RoundTrip. +// This RoundTripper should be used in rare error cases where +// error handling can be postponed to response handling time. +type ErrorTransport struct{ Err error } + +func (t ErrorTransport) RoundTrip(*http.Request) (*http.Response, error) { + return nil, t.Err +} diff --git a/vendor/golang.org/x/oauth2/jws/jws.go b/vendor/golang.org/x/oauth2/jws/jws.go new file mode 100644 index 0000000..bc7df09 --- /dev/null +++ b/vendor/golang.org/x/oauth2/jws/jws.go @@ -0,0 +1,159 @@ +// Copyright 2014 The oauth2 Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package jws provides encoding and decoding utilities for +// signed JWS messages. +package jws // import "golang.org/x/oauth2/jws" + +import ( + "bytes" + "crypto" + "crypto/rand" + "crypto/rsa" + "crypto/sha256" + "encoding/base64" + "encoding/json" + "errors" + "fmt" + "strings" + "time" +) + +// ClaimSet contains information about the JWT signature including the +// permissions being requested (scopes), the target of the token, the issuer, +// the time the token was issued, and the lifetime of the token. +type ClaimSet struct { + Iss string `json:"iss"` // email address of the client_id of the application making the access token request + Scope string `json:"scope,omitempty"` // space-delimited list of the permissions the application requests + Aud string `json:"aud"` // descriptor of the intended target of the assertion (Optional). + Exp int64 `json:"exp"` // the expiration time of the assertion (seconds since Unix epoch) + Iat int64 `json:"iat"` // the time the assertion was issued (seconds since Unix epoch) + Typ string `json:"typ,omitempty"` // token type (Optional). + + // Email for which the application is requesting delegated access (Optional). + Sub string `json:"sub,omitempty"` + + // The old name of Sub. Client keeps setting Prn to be + // complaint with legacy OAuth 2.0 providers. (Optional) + Prn string `json:"prn,omitempty"` + + // See http://tools.ietf.org/html/draft-jones-json-web-token-10#section-4.3 + // This array is marshalled using custom code (see (c *ClaimSet) encode()). + PrivateClaims map[string]interface{} `json:"-"` +} + +func (c *ClaimSet) encode() (string, error) { + // Reverting time back for machines whose time is not perfectly in sync. + // If client machine's time is in the future according + // to Google servers, an access token will not be issued. + now := time.Now().Add(-10 * time.Second) + if c.Iat == 0 { + c.Iat = now.Unix() + } + if c.Exp == 0 { + c.Exp = now.Add(time.Hour).Unix() + } + if c.Exp < c.Iat { + return "", fmt.Errorf("jws: invalid Exp = %v; must be later than Iat = %v", c.Exp, c.Iat) + } + + b, err := json.Marshal(c) + if err != nil { + return "", err + } + + if len(c.PrivateClaims) == 0 { + return base64Encode(b), nil + } + + // Marshal private claim set and then append it to b. + prv, err := json.Marshal(c.PrivateClaims) + if err != nil { + return "", fmt.Errorf("jws: invalid map of private claims %v", c.PrivateClaims) + } + + // Concatenate public and private claim JSON objects. + if !bytes.HasSuffix(b, []byte{'}'}) { + return "", fmt.Errorf("jws: invalid JSON %s", b) + } + if !bytes.HasPrefix(prv, []byte{'{'}) { + return "", fmt.Errorf("jws: invalid JSON %s", prv) + } + b[len(b)-1] = ',' // Replace closing curly brace with a comma. + b = append(b, prv[1:]...) // Append private claims. + return base64Encode(b), nil +} + +// Header represents the header for the signed JWS payloads. +type Header struct { + // The algorithm used for signature. + Algorithm string `json:"alg"` + + // Represents the token type. + Typ string `json:"typ"` +} + +func (h *Header) encode() (string, error) { + b, err := json.Marshal(h) + if err != nil { + return "", err + } + return base64Encode(b), nil +} + +// Decode decodes a claim set from a JWS payload. +func Decode(payload string) (*ClaimSet, error) { + // decode returned id token to get expiry + s := strings.Split(payload, ".") + if len(s) < 2 { + // TODO(jbd): Provide more context about the error. + return nil, errors.New("jws: invalid token received") + } + decoded, err := base64Decode(s[1]) + if err != nil { + return nil, err + } + c := &ClaimSet{} + err = json.NewDecoder(bytes.NewBuffer(decoded)).Decode(c) + return c, err +} + +// Encode encodes a signed JWS with provided header and claim set. +func Encode(header *Header, c *ClaimSet, signature *rsa.PrivateKey) (string, error) { + head, err := header.encode() + if err != nil { + return "", err + } + cs, err := c.encode() + if err != nil { + return "", err + } + ss := fmt.Sprintf("%s.%s", head, cs) + h := sha256.New() + h.Write([]byte(ss)) + b, err := rsa.SignPKCS1v15(rand.Reader, signature, crypto.SHA256, h.Sum(nil)) + if err != nil { + return "", err + } + sig := base64Encode(b) + return fmt.Sprintf("%s.%s", ss, sig), nil +} + +// base64Encode returns and Base64url encoded version of the input string with any +// trailing "=" stripped. +func base64Encode(b []byte) string { + return strings.TrimRight(base64.URLEncoding.EncodeToString(b), "=") +} + +// base64Decode decodes the Base64url encoded string +func base64Decode(s string) ([]byte, error) { + // add back missing padding + switch len(s) % 4 { + case 2: + s += "==" + case 3: + s += "=" + } + return base64.URLEncoding.DecodeString(s) +} diff --git a/vendor/golang.org/x/oauth2/jwt/example_test.go b/vendor/golang.org/x/oauth2/jwt/example_test.go new file mode 100644 index 0000000..6d61883 --- /dev/null +++ b/vendor/golang.org/x/oauth2/jwt/example_test.go @@ -0,0 +1,31 @@ +// Copyright 2014 The oauth2 Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package jwt_test + +import ( + "golang.org/x/oauth2" + "golang.org/x/oauth2/jwt" +) + +func ExampleJWTConfig() { + conf := &jwt.Config{ + Email: "xxx@developer.com", + // The contents of your RSA private key or your PEM file + // that contains a private key. + // If you have a p12 file instead, you + // can use `openssl` to export the private key into a pem file. + // + // $ openssl pkcs12 -in key.p12 -out key.pem -nodes + // + // It only supports PEM containers with no passphrase. + PrivateKey: []byte("-----BEGIN RSA PRIVATE KEY-----..."), + Subject: "user@example.com", + TokenURL: "https://provider.com/o/oauth2/token", + } + // Initiate an http.Client, the following GET request will be + // authorized and authenticated on the behalf of user@example.com. + client := conf.Client(oauth2.NoContext) + client.Get("...") +} diff --git a/vendor/golang.org/x/oauth2/jwt/jwt.go b/vendor/golang.org/x/oauth2/jwt/jwt.go new file mode 100644 index 0000000..11a2687 --- /dev/null +++ b/vendor/golang.org/x/oauth2/jwt/jwt.go @@ -0,0 +1,153 @@ +// Copyright 2014 The oauth2 Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package jwt implements the OAuth 2.0 JSON Web Token flow, commonly +// known as "two-legged OAuth 2.0". +// +// See: https://tools.ietf.org/html/draft-ietf-oauth-jwt-bearer-12 +package jwt + +import ( + "encoding/json" + "fmt" + "io" + "io/ioutil" + "net/http" + "net/url" + "strings" + "time" + + "golang.org/x/net/context" + "golang.org/x/oauth2" + "golang.org/x/oauth2/internal" + "golang.org/x/oauth2/jws" +) + +var ( + defaultGrantType = "urn:ietf:params:oauth:grant-type:jwt-bearer" + defaultHeader = &jws.Header{Algorithm: "RS256", Typ: "JWT"} +) + +// Config is the configuration for using JWT to fetch tokens, +// commonly known as "two-legged OAuth 2.0". +type Config struct { + // Email is the OAuth client identifier used when communicating with + // the configured OAuth provider. + Email string + + // PrivateKey contains the contents of an RSA private key or the + // contents of a PEM file that contains a private key. The provided + // private key is used to sign JWT payloads. + // PEM containers with a passphrase are not supported. + // Use the following command to convert a PKCS 12 file into a PEM. + // + // $ openssl pkcs12 -in key.p12 -out key.pem -nodes + // + PrivateKey []byte + + // Subject is the optional user to impersonate. + Subject string + + // Scopes optionally specifies a list of requested permission scopes. + Scopes []string + + // TokenURL is the endpoint required to complete the 2-legged JWT flow. + TokenURL string + + // Expires optionally specifies how long the token is valid for. + Expires time.Duration +} + +// TokenSource returns a JWT TokenSource using the configuration +// in c and the HTTP client from the provided context. +func (c *Config) TokenSource(ctx context.Context) oauth2.TokenSource { + return oauth2.ReuseTokenSource(nil, jwtSource{ctx, c}) +} + +// Client returns an HTTP client wrapping the context's +// HTTP transport and adding Authorization headers with tokens +// obtained from c. +// +// The returned client and its Transport should not be modified. +func (c *Config) Client(ctx context.Context) *http.Client { + return oauth2.NewClient(ctx, c.TokenSource(ctx)) +} + +// jwtSource is a source that always does a signed JWT request for a token. +// It should typically be wrapped with a reuseTokenSource. +type jwtSource struct { + ctx context.Context + conf *Config +} + +func (js jwtSource) Token() (*oauth2.Token, error) { + pk, err := internal.ParseKey(js.conf.PrivateKey) + if err != nil { + return nil, err + } + hc := oauth2.NewClient(js.ctx, nil) + claimSet := &jws.ClaimSet{ + Iss: js.conf.Email, + Scope: strings.Join(js.conf.Scopes, " "), + Aud: js.conf.TokenURL, + } + if subject := js.conf.Subject; subject != "" { + claimSet.Sub = subject + // prn is the old name of sub. Keep setting it + // to be compatible with legacy OAuth 2.0 providers. + claimSet.Prn = subject + } + if t := js.conf.Expires; t > 0 { + claimSet.Exp = time.Now().Add(t).Unix() + } + payload, err := jws.Encode(defaultHeader, claimSet, pk) + if err != nil { + return nil, err + } + v := url.Values{} + v.Set("grant_type", defaultGrantType) + v.Set("assertion", payload) + resp, err := hc.PostForm(js.conf.TokenURL, v) + if err != nil { + return nil, fmt.Errorf("oauth2: cannot fetch token: %v", err) + } + defer resp.Body.Close() + body, err := ioutil.ReadAll(io.LimitReader(resp.Body, 1<<20)) + if err != nil { + return nil, fmt.Errorf("oauth2: cannot fetch token: %v", err) + } + if c := resp.StatusCode; c < 200 || c > 299 { + return nil, fmt.Errorf("oauth2: cannot fetch token: %v\nResponse: %s", resp.Status, body) + } + // tokenRes is the JSON response body. + var tokenRes struct { + AccessToken string `json:"access_token"` + TokenType string `json:"token_type"` + IDToken string `json:"id_token"` + ExpiresIn int64 `json:"expires_in"` // relative seconds from now + } + if err := json.Unmarshal(body, &tokenRes); err != nil { + return nil, fmt.Errorf("oauth2: cannot fetch token: %v", err) + } + token := &oauth2.Token{ + AccessToken: tokenRes.AccessToken, + TokenType: tokenRes.TokenType, + } + raw := make(map[string]interface{}) + json.Unmarshal(body, &raw) // no error checks for optional fields + token = token.WithExtra(raw) + + if secs := tokenRes.ExpiresIn; secs > 0 { + token.Expiry = time.Now().Add(time.Duration(secs) * time.Second) + } + if v := tokenRes.IDToken; v != "" { + // decode returned id token to get expiry + claimSet, err := jws.Decode(v) + if err != nil { + return nil, fmt.Errorf("oauth2: error decoding JWT token: %v", err) + } + token.Expiry = time.Unix(claimSet.Exp, 0) + } + return token, nil +} diff --git a/vendor/golang.org/x/oauth2/jwt/jwt_test.go b/vendor/golang.org/x/oauth2/jwt/jwt_test.go new file mode 100644 index 0000000..da922c3 --- /dev/null +++ b/vendor/golang.org/x/oauth2/jwt/jwt_test.go @@ -0,0 +1,134 @@ +// Copyright 2014 The oauth2 Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package jwt + +import ( + "net/http" + "net/http/httptest" + "testing" + + "golang.org/x/oauth2" +) + +var dummyPrivateKey = []byte(`-----BEGIN RSA PRIVATE KEY----- +MIIEpAIBAAKCAQEAx4fm7dngEmOULNmAs1IGZ9Apfzh+BkaQ1dzkmbUgpcoghucE +DZRnAGd2aPyB6skGMXUytWQvNYav0WTR00wFtX1ohWTfv68HGXJ8QXCpyoSKSSFY +fuP9X36wBSkSX9J5DVgiuzD5VBdzUISSmapjKm+DcbRALjz6OUIPEWi1Tjl6p5RK +1w41qdbmt7E5/kGhKLDuT7+M83g4VWhgIvaAXtnhklDAggilPPa8ZJ1IFe31lNlr +k4DRk38nc6sEutdf3RL7QoH7FBusI7uXV03DC6dwN1kP4GE7bjJhcRb/7jYt7CQ9 +/E9Exz3c0yAp0yrTg0Fwh+qxfH9dKwN52S7SBwIDAQABAoIBAQCaCs26K07WY5Jt +3a2Cw3y2gPrIgTCqX6hJs7O5ByEhXZ8nBwsWANBUe4vrGaajQHdLj5OKfsIDrOvn +2NI1MqflqeAbu/kR32q3tq8/Rl+PPiwUsW3E6Pcf1orGMSNCXxeducF2iySySzh3 +nSIhCG5uwJDWI7a4+9KiieFgK1pt/Iv30q1SQS8IEntTfXYwANQrfKUVMmVF9aIK +6/WZE2yd5+q3wVVIJ6jsmTzoDCX6QQkkJICIYwCkglmVy5AeTckOVwcXL0jqw5Kf +5/soZJQwLEyBoQq7Kbpa26QHq+CJONetPP8Ssy8MJJXBT+u/bSseMb3Zsr5cr43e +DJOhwsThAoGBAPY6rPKl2NT/K7XfRCGm1sbWjUQyDShscwuWJ5+kD0yudnT/ZEJ1 +M3+KS/iOOAoHDdEDi9crRvMl0UfNa8MAcDKHflzxg2jg/QI+fTBjPP5GOX0lkZ9g +z6VePoVoQw2gpPFVNPPTxKfk27tEzbaffvOLGBEih0Kb7HTINkW8rIlzAoGBAM9y +1yr+jvfS1cGFtNU+Gotoihw2eMKtIqR03Yn3n0PK1nVCDKqwdUqCypz4+ml6cxRK +J8+Pfdh7D+ZJd4LEG6Y4QRDLuv5OA700tUoSHxMSNn3q9As4+T3MUyYxWKvTeu3U +f2NWP9ePU0lV8ttk7YlpVRaPQmc1qwooBA/z/8AdAoGAW9x0HWqmRICWTBnpjyxx +QGlW9rQ9mHEtUotIaRSJ6K/F3cxSGUEkX1a3FRnp6kPLcckC6NlqdNgNBd6rb2rA +cPl/uSkZP42Als+9YMoFPU/xrrDPbUhu72EDrj3Bllnyb168jKLa4VBOccUvggxr +Dm08I1hgYgdN5huzs7y6GeUCgYEAj+AZJSOJ6o1aXS6rfV3mMRve9bQ9yt8jcKXw +5HhOCEmMtaSKfnOF1Ziih34Sxsb7O2428DiX0mV/YHtBnPsAJidL0SdLWIapBzeg +KHArByIRkwE6IvJvwpGMdaex1PIGhx5i/3VZL9qiq/ElT05PhIb+UXgoWMabCp84 +OgxDK20CgYAeaFo8BdQ7FmVX2+EEejF+8xSge6WVLtkaon8bqcn6P0O8lLypoOhd +mJAYH8WU+UAy9pecUnDZj14LAGNVmYcse8HFX71MoshnvCTFEPVo4rZxIAGwMpeJ +5jgQ3slYLpqrGlcbLgUXBUgzEO684Wk/UV9DFPlHALVqCfXQ9dpJPg== +-----END RSA PRIVATE KEY-----`) + +func TestJWTFetch_JSONResponse(t *testing.T) { + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "application/json") + w.Write([]byte(`{ + "access_token": "90d64460d14870c08c81352a05dedd3465940a7c", + "scope": "user", + "token_type": "bearer", + "expires_in": 3600 + }`)) + })) + defer ts.Close() + + conf := &Config{ + Email: "aaa@xxx.com", + PrivateKey: dummyPrivateKey, + TokenURL: ts.URL, + } + tok, err := conf.TokenSource(oauth2.NoContext).Token() + if err != nil { + t.Fatal(err) + } + if !tok.Valid() { + t.Errorf("Token invalid") + } + if tok.AccessToken != "90d64460d14870c08c81352a05dedd3465940a7c" { + t.Errorf("Unexpected access token, %#v", tok.AccessToken) + } + if tok.TokenType != "bearer" { + t.Errorf("Unexpected token type, %#v", tok.TokenType) + } + if tok.Expiry.IsZero() { + t.Errorf("Unexpected token expiry, %#v", tok.Expiry) + } + scope := tok.Extra("scope") + if scope != "user" { + t.Errorf("Unexpected value for scope: %v", scope) + } +} + +func TestJWTFetch_BadResponse(t *testing.T) { + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "application/json") + w.Write([]byte(`{"scope": "user", "token_type": "bearer"}`)) + })) + defer ts.Close() + + conf := &Config{ + Email: "aaa@xxx.com", + PrivateKey: dummyPrivateKey, + TokenURL: ts.URL, + } + tok, err := conf.TokenSource(oauth2.NoContext).Token() + if err != nil { + t.Fatal(err) + } + if tok == nil { + t.Fatalf("token is nil") + } + if tok.Valid() { + t.Errorf("token is valid. want invalid.") + } + if tok.AccessToken != "" { + t.Errorf("Unexpected non-empty access token %q.", tok.AccessToken) + } + if want := "bearer"; tok.TokenType != want { + t.Errorf("TokenType = %q; want %q", tok.TokenType, want) + } + scope := tok.Extra("scope") + if want := "user"; scope != want { + t.Errorf("token scope = %q; want %q", scope, want) + } +} + +func TestJWTFetch_BadResponseType(t *testing.T) { + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "application/json") + w.Write([]byte(`{"access_token":123, "scope": "user", "token_type": "bearer"}`)) + })) + defer ts.Close() + conf := &Config{ + Email: "aaa@xxx.com", + PrivateKey: dummyPrivateKey, + TokenURL: ts.URL, + } + tok, err := conf.TokenSource(oauth2.NoContext).Token() + if err == nil { + t.Error("got a token; expected error") + if tok.AccessToken != "" { + t.Errorf("Unexpected access token, %#v.", tok.AccessToken) + } + } +} diff --git a/vendor/golang.org/x/oauth2/linkedin/linkedin.go b/vendor/golang.org/x/oauth2/linkedin/linkedin.go new file mode 100644 index 0000000..de91d5b --- /dev/null +++ b/vendor/golang.org/x/oauth2/linkedin/linkedin.go @@ -0,0 +1,16 @@ +// Copyright 2015 The oauth2 Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package linkedin provides constants for using OAuth2 to access LinkedIn. +package linkedin // import "golang.org/x/oauth2/linkedin" + +import ( + "golang.org/x/oauth2" +) + +// Endpoint is LinkedIn's OAuth 2.0 endpoint. +var Endpoint = oauth2.Endpoint{ + AuthURL: "https://www.linkedin.com/uas/oauth2/authorization", + TokenURL: "https://www.linkedin.com/uas/oauth2/accessToken", +} diff --git a/vendor/golang.org/x/oauth2/oauth2.go b/vendor/golang.org/x/oauth2/oauth2.go new file mode 100644 index 0000000..cca8b18 --- /dev/null +++ b/vendor/golang.org/x/oauth2/oauth2.go @@ -0,0 +1,325 @@ +// Copyright 2014 The oauth2 Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package oauth2 provides support for making +// OAuth2 authorized and authenticated HTTP requests. +// It can additionally grant authorization with Bearer JWT. +package oauth2 // import "golang.org/x/oauth2" + +import ( + "bytes" + "errors" + "net/http" + "net/url" + "strings" + "sync" + + "golang.org/x/net/context" + "golang.org/x/oauth2/internal" +) + +// NoContext is the default context you should supply if not using +// your own context.Context (see https://golang.org/x/net/context). +var NoContext = context.TODO() + +// Config describes a typical 3-legged OAuth2 flow, with both the +// client application information and the server's endpoint URLs. +type Config struct { + // ClientID is the application's ID. + ClientID string + + // ClientSecret is the application's secret. + ClientSecret string + + // Endpoint contains the resource server's token endpoint + // URLs. These are constants specific to each server and are + // often available via site-specific packages, such as + // google.Endpoint or github.Endpoint. + Endpoint Endpoint + + // RedirectURL is the URL to redirect users going through + // the OAuth flow, after the resource owner's URLs. + RedirectURL string + + // Scope specifies optional requested permissions. + Scopes []string +} + +// A TokenSource is anything that can return a token. +type TokenSource interface { + // Token returns a token or an error. + // Token must be safe for concurrent use by multiple goroutines. + // The returned Token must not be modified. + Token() (*Token, error) +} + +// Endpoint contains the OAuth 2.0 provider's authorization and token +// endpoint URLs. +type Endpoint struct { + AuthURL string + TokenURL string +} + +var ( + // AccessTypeOnline and AccessTypeOffline are options passed + // to the Options.AuthCodeURL method. They modify the + // "access_type" field that gets sent in the URL returned by + // AuthCodeURL. + // + // Online is the default if neither is specified. If your + // application needs to refresh access tokens when the user + // is not present at the browser, then use offline. This will + // result in your application obtaining a refresh token the + // first time your application exchanges an authorization + // code for a user. + AccessTypeOnline AuthCodeOption = SetAuthURLParam("access_type", "online") + AccessTypeOffline AuthCodeOption = SetAuthURLParam("access_type", "offline") + + // ApprovalForce forces the users to view the consent dialog + // and confirm the permissions request at the URL returned + // from AuthCodeURL, even if they've already done so. + ApprovalForce AuthCodeOption = SetAuthURLParam("approval_prompt", "force") +) + +// An AuthCodeOption is passed to Config.AuthCodeURL. +type AuthCodeOption interface { + setValue(url.Values) +} + +type setParam struct{ k, v string } + +func (p setParam) setValue(m url.Values) { m.Set(p.k, p.v) } + +// SetAuthURLParam builds an AuthCodeOption which passes key/value parameters +// to a provider's authorization endpoint. +func SetAuthURLParam(key, value string) AuthCodeOption { + return setParam{key, value} +} + +// AuthCodeURL returns a URL to OAuth 2.0 provider's consent page +// that asks for permissions for the required scopes explicitly. +// +// State is a token to protect the user from CSRF attacks. You must +// always provide a non-zero string and validate that it matches the +// the state query parameter on your redirect callback. +// See http://tools.ietf.org/html/rfc6749#section-10.12 for more info. +// +// Opts may include AccessTypeOnline or AccessTypeOffline, as well +// as ApprovalForce. +func (c *Config) AuthCodeURL(state string, opts ...AuthCodeOption) string { + var buf bytes.Buffer + buf.WriteString(c.Endpoint.AuthURL) + v := url.Values{ + "response_type": {"code"}, + "client_id": {c.ClientID}, + "redirect_uri": internal.CondVal(c.RedirectURL), + "scope": internal.CondVal(strings.Join(c.Scopes, " ")), + "state": internal.CondVal(state), + } + for _, opt := range opts { + opt.setValue(v) + } + if strings.Contains(c.Endpoint.AuthURL, "?") { + buf.WriteByte('&') + } else { + buf.WriteByte('?') + } + buf.WriteString(v.Encode()) + return buf.String() +} + +// PasswordCredentialsToken converts a resource owner username and password +// pair into a token. +// +// Per the RFC, this grant type should only be used "when there is a high +// degree of trust between the resource owner and the client (e.g., the client +// is part of the device operating system or a highly privileged application), +// and when other authorization grant types are not available." +// See https://tools.ietf.org/html/rfc6749#section-4.3 for more info. +// +// The HTTP client to use is derived from the context. +// If nil, http.DefaultClient is used. +func (c *Config) PasswordCredentialsToken(ctx context.Context, username, password string) (*Token, error) { + return retrieveToken(ctx, c, url.Values{ + "grant_type": {"password"}, + "username": {username}, + "password": {password}, + "scope": internal.CondVal(strings.Join(c.Scopes, " ")), + }) +} + +// Exchange converts an authorization code into a token. +// +// It is used after a resource provider redirects the user back +// to the Redirect URI (the URL obtained from AuthCodeURL). +// +// The HTTP client to use is derived from the context. +// If a client is not provided via the context, http.DefaultClient is used. +// +// The code will be in the *http.Request.FormValue("code"). Before +// calling Exchange, be sure to validate FormValue("state"). +func (c *Config) Exchange(ctx context.Context, code string) (*Token, error) { + return retrieveToken(ctx, c, url.Values{ + "grant_type": {"authorization_code"}, + "code": {code}, + "redirect_uri": internal.CondVal(c.RedirectURL), + "scope": internal.CondVal(strings.Join(c.Scopes, " ")), + }) +} + +// Client returns an HTTP client using the provided token. +// The token will auto-refresh as necessary. The underlying +// HTTP transport will be obtained using the provided context. +// The returned client and its Transport should not be modified. +func (c *Config) Client(ctx context.Context, t *Token) *http.Client { + return NewClient(ctx, c.TokenSource(ctx, t)) +} + +// TokenSource returns a TokenSource that returns t until t expires, +// automatically refreshing it as necessary using the provided context. +// +// Most users will use Config.Client instead. +func (c *Config) TokenSource(ctx context.Context, t *Token) TokenSource { + tkr := &tokenRefresher{ + ctx: ctx, + conf: c, + } + if t != nil { + tkr.refreshToken = t.RefreshToken + } + return &reuseTokenSource{ + t: t, + new: tkr, + } +} + +// tokenRefresher is a TokenSource that makes "grant_type"=="refresh_token" +// HTTP requests to renew a token using a RefreshToken. +type tokenRefresher struct { + ctx context.Context // used to get HTTP requests + conf *Config + refreshToken string +} + +// WARNING: Token is not safe for concurrent access, as it +// updates the tokenRefresher's refreshToken field. +// Within this package, it is used by reuseTokenSource which +// synchronizes calls to this method with its own mutex. +func (tf *tokenRefresher) Token() (*Token, error) { + if tf.refreshToken == "" { + return nil, errors.New("oauth2: token expired and refresh token is not set") + } + + tk, err := retrieveToken(tf.ctx, tf.conf, url.Values{ + "grant_type": {"refresh_token"}, + "refresh_token": {tf.refreshToken}, + }) + + if err != nil { + return nil, err + } + if tf.refreshToken != tk.RefreshToken { + tf.refreshToken = tk.RefreshToken + } + return tk, err +} + +// reuseTokenSource is a TokenSource that holds a single token in memory +// and validates its expiry before each call to retrieve it with +// Token. If it's expired, it will be auto-refreshed using the +// new TokenSource. +type reuseTokenSource struct { + new TokenSource // called when t is expired. + + mu sync.Mutex // guards t + t *Token +} + +// Token returns the current token if it's still valid, else will +// refresh the current token (using r.Context for HTTP client +// information) and return the new one. +func (s *reuseTokenSource) Token() (*Token, error) { + s.mu.Lock() + defer s.mu.Unlock() + if s.t.Valid() { + return s.t, nil + } + t, err := s.new.Token() + if err != nil { + return nil, err + } + s.t = t + return t, nil +} + +// StaticTokenSource returns a TokenSource that always returns the same token. +// Because the provided token t is never refreshed, StaticTokenSource is only +// useful for tokens that never expire. +func StaticTokenSource(t *Token) TokenSource { + return staticTokenSource{t} +} + +// staticTokenSource is a TokenSource that always returns the same Token. +type staticTokenSource struct { + t *Token +} + +func (s staticTokenSource) Token() (*Token, error) { + return s.t, nil +} + +// HTTPClient is the context key to use with golang.org/x/net/context's +// WithValue function to associate an *http.Client value with a context. +var HTTPClient internal.ContextKey + +// NewClient creates an *http.Client from a Context and TokenSource. +// The returned client is not valid beyond the lifetime of the context. +// +// As a special case, if src is nil, a non-OAuth2 client is returned +// using the provided context. This exists to support related OAuth2 +// packages. +func NewClient(ctx context.Context, src TokenSource) *http.Client { + if src == nil { + c, err := internal.ContextClient(ctx) + if err != nil { + return &http.Client{Transport: internal.ErrorTransport{err}} + } + return c + } + return &http.Client{ + Transport: &Transport{ + Base: internal.ContextTransport(ctx), + Source: ReuseTokenSource(nil, src), + }, + } +} + +// ReuseTokenSource returns a TokenSource which repeatedly returns the +// same token as long as it's valid, starting with t. +// When its cached token is invalid, a new token is obtained from src. +// +// ReuseTokenSource is typically used to reuse tokens from a cache +// (such as a file on disk) between runs of a program, rather than +// obtaining new tokens unnecessarily. +// +// The initial token t may be nil, in which case the TokenSource is +// wrapped in a caching version if it isn't one already. This also +// means it's always safe to wrap ReuseTokenSource around any other +// TokenSource without adverse effects. +func ReuseTokenSource(t *Token, src TokenSource) TokenSource { + // Don't wrap a reuseTokenSource in itself. That would work, + // but cause an unnecessary number of mutex operations. + // Just build the equivalent one. + if rt, ok := src.(*reuseTokenSource); ok { + if t == nil { + // Just use it directly. + return rt + } + src = rt.new + } + return &reuseTokenSource{ + t: t, + new: src, + } +} diff --git a/vendor/golang.org/x/oauth2/oauth2_test.go b/vendor/golang.org/x/oauth2/oauth2_test.go new file mode 100644 index 0000000..a99d7a3 --- /dev/null +++ b/vendor/golang.org/x/oauth2/oauth2_test.go @@ -0,0 +1,471 @@ +// Copyright 2014 The oauth2 Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package oauth2 + +import ( + "encoding/json" + "errors" + "fmt" + "io/ioutil" + "net/http" + "net/http/httptest" + "net/url" + "reflect" + "strconv" + "testing" + "time" + + "golang.org/x/net/context" +) + +type mockTransport struct { + rt func(req *http.Request) (resp *http.Response, err error) +} + +func (t *mockTransport) RoundTrip(req *http.Request) (resp *http.Response, err error) { + return t.rt(req) +} + +type mockCache struct { + token *Token + readErr error +} + +func (c *mockCache) ReadToken() (*Token, error) { + return c.token, c.readErr +} + +func (c *mockCache) WriteToken(*Token) { + // do nothing +} + +func newConf(url string) *Config { + return &Config{ + ClientID: "CLIENT_ID", + ClientSecret: "CLIENT_SECRET", + RedirectURL: "REDIRECT_URL", + Scopes: []string{"scope1", "scope2"}, + Endpoint: Endpoint{ + AuthURL: url + "/auth", + TokenURL: url + "/token", + }, + } +} + +func TestAuthCodeURL(t *testing.T) { + conf := newConf("server") + url := conf.AuthCodeURL("foo", AccessTypeOffline, ApprovalForce) + if url != "server/auth?access_type=offline&approval_prompt=force&client_id=CLIENT_ID&redirect_uri=REDIRECT_URL&response_type=code&scope=scope1+scope2&state=foo" { + t.Errorf("Auth code URL doesn't match the expected, found: %v", url) + } +} + +func TestAuthCodeURL_CustomParam(t *testing.T) { + conf := newConf("server") + param := SetAuthURLParam("foo", "bar") + url := conf.AuthCodeURL("baz", param) + if url != "server/auth?client_id=CLIENT_ID&foo=bar&redirect_uri=REDIRECT_URL&response_type=code&scope=scope1+scope2&state=baz" { + t.Errorf("Auth code URL doesn't match the expected, found: %v", url) + } +} + +func TestAuthCodeURL_Optional(t *testing.T) { + conf := &Config{ + ClientID: "CLIENT_ID", + Endpoint: Endpoint{ + AuthURL: "/auth-url", + TokenURL: "/token-url", + }, + } + url := conf.AuthCodeURL("") + if url != "/auth-url?client_id=CLIENT_ID&response_type=code" { + t.Fatalf("Auth code URL doesn't match the expected, found: %v", url) + } +} + +func TestExchangeRequest(t *testing.T) { + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if r.URL.String() != "/token" { + t.Errorf("Unexpected exchange request URL, %v is found.", r.URL) + } + headerAuth := r.Header.Get("Authorization") + if headerAuth != "Basic Q0xJRU5UX0lEOkNMSUVOVF9TRUNSRVQ=" { + t.Errorf("Unexpected authorization header, %v is found.", headerAuth) + } + headerContentType := r.Header.Get("Content-Type") + if headerContentType != "application/x-www-form-urlencoded" { + t.Errorf("Unexpected Content-Type header, %v is found.", headerContentType) + } + body, err := ioutil.ReadAll(r.Body) + if err != nil { + t.Errorf("Failed reading request body: %s.", err) + } + if string(body) != "client_id=CLIENT_ID&code=exchange-code&grant_type=authorization_code&redirect_uri=REDIRECT_URL&scope=scope1+scope2" { + t.Errorf("Unexpected exchange payload, %v is found.", string(body)) + } + w.Header().Set("Content-Type", "application/x-www-form-urlencoded") + w.Write([]byte("access_token=90d64460d14870c08c81352a05dedd3465940a7c&scope=user&token_type=bearer")) + })) + defer ts.Close() + conf := newConf(ts.URL) + tok, err := conf.Exchange(NoContext, "exchange-code") + if err != nil { + t.Error(err) + } + if !tok.Valid() { + t.Fatalf("Token invalid. Got: %#v", tok) + } + if tok.AccessToken != "90d64460d14870c08c81352a05dedd3465940a7c" { + t.Errorf("Unexpected access token, %#v.", tok.AccessToken) + } + if tok.TokenType != "bearer" { + t.Errorf("Unexpected token type, %#v.", tok.TokenType) + } + scope := tok.Extra("scope") + if scope != "user" { + t.Errorf("Unexpected value for scope: %v", scope) + } +} + +func TestExchangeRequest_JSONResponse(t *testing.T) { + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if r.URL.String() != "/token" { + t.Errorf("Unexpected exchange request URL, %v is found.", r.URL) + } + headerAuth := r.Header.Get("Authorization") + if headerAuth != "Basic Q0xJRU5UX0lEOkNMSUVOVF9TRUNSRVQ=" { + t.Errorf("Unexpected authorization header, %v is found.", headerAuth) + } + headerContentType := r.Header.Get("Content-Type") + if headerContentType != "application/x-www-form-urlencoded" { + t.Errorf("Unexpected Content-Type header, %v is found.", headerContentType) + } + body, err := ioutil.ReadAll(r.Body) + if err != nil { + t.Errorf("Failed reading request body: %s.", err) + } + if string(body) != "client_id=CLIENT_ID&code=exchange-code&grant_type=authorization_code&redirect_uri=REDIRECT_URL&scope=scope1+scope2" { + t.Errorf("Unexpected exchange payload, %v is found.", string(body)) + } + w.Header().Set("Content-Type", "application/json") + w.Write([]byte(`{"access_token": "90d64460d14870c08c81352a05dedd3465940a7c", "scope": "user", "token_type": "bearer", "expires_in": 86400}`)) + })) + defer ts.Close() + conf := newConf(ts.URL) + tok, err := conf.Exchange(NoContext, "exchange-code") + if err != nil { + t.Error(err) + } + if !tok.Valid() { + t.Fatalf("Token invalid. Got: %#v", tok) + } + if tok.AccessToken != "90d64460d14870c08c81352a05dedd3465940a7c" { + t.Errorf("Unexpected access token, %#v.", tok.AccessToken) + } + if tok.TokenType != "bearer" { + t.Errorf("Unexpected token type, %#v.", tok.TokenType) + } + scope := tok.Extra("scope") + if scope != "user" { + t.Errorf("Unexpected value for scope: %v", scope) + } + expiresIn := tok.Extra("expires_in") + if expiresIn != float64(86400) { + t.Errorf("Unexpected non-numeric value for expires_in: %v", expiresIn) + } +} + +func TestExtraValueRetrieval(t *testing.T) { + values := url.Values{} + + kvmap := map[string]string{ + "scope": "user", "token_type": "bearer", "expires_in": "86400.92", + "server_time": "1443571905.5606415", "referer_ip": "10.0.0.1", + "etag": "\"afZYj912P4alikMz_P11982\"", "request_id": "86400", + "untrimmed": " untrimmed ", + } + + for key, value := range kvmap { + values.Set(key, value) + } + + tok := Token{ + raw: values, + } + + scope := tok.Extra("scope") + if scope != "user" { + t.Errorf("Unexpected scope %v wanted \"user\"", scope) + } + serverTime := tok.Extra("server_time") + if serverTime != 1443571905.5606415 { + t.Errorf("Unexpected non-float64 value for server_time: %v", serverTime) + } + refererIp := tok.Extra("referer_ip") + if refererIp != "10.0.0.1" { + t.Errorf("Unexpected non-string value for referer_ip: %v", refererIp) + } + expires_in := tok.Extra("expires_in") + if expires_in != 86400.92 { + t.Errorf("Unexpected value for expires_in, wanted 86400 got %v", expires_in) + } + requestId := tok.Extra("request_id") + if requestId != int64(86400) { + t.Errorf("Unexpected non-int64 value for request_id: %v", requestId) + } + untrimmed := tok.Extra("untrimmed") + if untrimmed != " untrimmed " { + t.Errorf("Unexpected value for untrimmed, got %q expected \" untrimmed \"", untrimmed) + } +} + +const day = 24 * time.Hour + +func TestExchangeRequest_JSONResponse_Expiry(t *testing.T) { + seconds := int32(day.Seconds()) + jsonNumberType := reflect.TypeOf(json.Number("0")) + for _, c := range []struct { + expires string + expect error + }{ + {fmt.Sprintf(`"expires_in": %d`, seconds), nil}, + {fmt.Sprintf(`"expires_in": "%d"`, seconds), nil}, // PayPal case + {fmt.Sprintf(`"expires": %d`, seconds), nil}, // Facebook case + {`"expires": false`, &json.UnmarshalTypeError{Value: "bool", Type: jsonNumberType}}, // wrong type + {`"expires": {}`, &json.UnmarshalTypeError{Value: "object", Type: jsonNumberType}}, // wrong type + {`"expires": "zzz"`, &strconv.NumError{Func: "ParseInt", Num: "zzz", Err: strconv.ErrSyntax}}, // wrong value + } { + testExchangeRequest_JSONResponse_expiry(t, c.expires, c.expect) + } +} + +func testExchangeRequest_JSONResponse_expiry(t *testing.T, exp string, expect error) { + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "application/json") + w.Write([]byte(fmt.Sprintf(`{"access_token": "90d", "scope": "user", "token_type": "bearer", %s}`, exp))) + })) + defer ts.Close() + conf := newConf(ts.URL) + t1 := time.Now().Add(day) + tok, err := conf.Exchange(NoContext, "exchange-code") + t2 := time.Now().Add(day) + // Do a fmt.Sprint comparison so either side can be + // nil. fmt.Sprint just stringifies them to "", and no + // non-nil expected error ever stringifies as "", so this + // isn't terribly disgusting. We do this because Go 1.4 and + // Go 1.5 return a different deep value for + // json.UnmarshalTypeError. In Go 1.5, the + // json.UnmarshalTypeError contains a new field with a new + // non-zero value. Rather than ignore it here with reflect or + // add new files and +build tags, just look at the strings. + if fmt.Sprint(err) != fmt.Sprint(expect) { + t.Errorf("Error = %v; want %v", err, expect) + } + if err != nil { + return + } + if !tok.Valid() { + t.Fatalf("Token invalid. Got: %#v", tok) + } + expiry := tok.Expiry + if expiry.Before(t1) || expiry.After(t2) { + t.Errorf("Unexpected value for Expiry: %v (shold be between %v and %v)", expiry, t1, t2) + } +} + +func TestExchangeRequest_BadResponse(t *testing.T) { + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "application/json") + w.Write([]byte(`{"scope": "user", "token_type": "bearer"}`)) + })) + defer ts.Close() + conf := newConf(ts.URL) + tok, err := conf.Exchange(NoContext, "code") + if err != nil { + t.Fatal(err) + } + if tok.AccessToken != "" { + t.Errorf("Unexpected access token, %#v.", tok.AccessToken) + } +} + +func TestExchangeRequest_BadResponseType(t *testing.T) { + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "application/json") + w.Write([]byte(`{"access_token":123, "scope": "user", "token_type": "bearer"}`)) + })) + defer ts.Close() + conf := newConf(ts.URL) + _, err := conf.Exchange(NoContext, "exchange-code") + if err == nil { + t.Error("expected error from invalid access_token type") + } +} + +func TestExchangeRequest_NonBasicAuth(t *testing.T) { + tr := &mockTransport{ + rt: func(r *http.Request) (w *http.Response, err error) { + headerAuth := r.Header.Get("Authorization") + if headerAuth != "" { + t.Errorf("Unexpected authorization header, %v is found.", headerAuth) + } + return nil, errors.New("no response") + }, + } + c := &http.Client{Transport: tr} + conf := &Config{ + ClientID: "CLIENT_ID", + Endpoint: Endpoint{ + AuthURL: "https://accounts.google.com/auth", + TokenURL: "https://accounts.google.com/token", + }, + } + + ctx := context.WithValue(context.Background(), HTTPClient, c) + conf.Exchange(ctx, "code") +} + +func TestPasswordCredentialsTokenRequest(t *testing.T) { + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + defer r.Body.Close() + expected := "/token" + if r.URL.String() != expected { + t.Errorf("URL = %q; want %q", r.URL, expected) + } + headerAuth := r.Header.Get("Authorization") + expected = "Basic Q0xJRU5UX0lEOkNMSUVOVF9TRUNSRVQ=" + if headerAuth != expected { + t.Errorf("Authorization header = %q; want %q", headerAuth, expected) + } + headerContentType := r.Header.Get("Content-Type") + expected = "application/x-www-form-urlencoded" + if headerContentType != expected { + t.Errorf("Content-Type header = %q; want %q", headerContentType, expected) + } + body, err := ioutil.ReadAll(r.Body) + if err != nil { + t.Errorf("Failed reading request body: %s.", err) + } + expected = "client_id=CLIENT_ID&grant_type=password&password=password1&scope=scope1+scope2&username=user1" + if string(body) != expected { + t.Errorf("res.Body = %q; want %q", string(body), expected) + } + w.Header().Set("Content-Type", "application/x-www-form-urlencoded") + w.Write([]byte("access_token=90d64460d14870c08c81352a05dedd3465940a7c&scope=user&token_type=bearer")) + })) + defer ts.Close() + conf := newConf(ts.URL) + tok, err := conf.PasswordCredentialsToken(NoContext, "user1", "password1") + if err != nil { + t.Error(err) + } + if !tok.Valid() { + t.Fatalf("Token invalid. Got: %#v", tok) + } + expected := "90d64460d14870c08c81352a05dedd3465940a7c" + if tok.AccessToken != expected { + t.Errorf("AccessToken = %q; want %q", tok.AccessToken, expected) + } + expected = "bearer" + if tok.TokenType != expected { + t.Errorf("TokenType = %q; want %q", tok.TokenType, expected) + } +} + +func TestTokenRefreshRequest(t *testing.T) { + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if r.URL.String() == "/somethingelse" { + return + } + if r.URL.String() != "/token" { + t.Errorf("Unexpected token refresh request URL, %v is found.", r.URL) + } + headerContentType := r.Header.Get("Content-Type") + if headerContentType != "application/x-www-form-urlencoded" { + t.Errorf("Unexpected Content-Type header, %v is found.", headerContentType) + } + body, _ := ioutil.ReadAll(r.Body) + if string(body) != "client_id=CLIENT_ID&grant_type=refresh_token&refresh_token=REFRESH_TOKEN" { + t.Errorf("Unexpected refresh token payload, %v is found.", string(body)) + } + })) + defer ts.Close() + conf := newConf(ts.URL) + c := conf.Client(NoContext, &Token{RefreshToken: "REFRESH_TOKEN"}) + c.Get(ts.URL + "/somethingelse") +} + +func TestFetchWithNoRefreshToken(t *testing.T) { + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if r.URL.String() == "/somethingelse" { + return + } + if r.URL.String() != "/token" { + t.Errorf("Unexpected token refresh request URL, %v is found.", r.URL) + } + headerContentType := r.Header.Get("Content-Type") + if headerContentType != "application/x-www-form-urlencoded" { + t.Errorf("Unexpected Content-Type header, %v is found.", headerContentType) + } + body, _ := ioutil.ReadAll(r.Body) + if string(body) != "client_id=CLIENT_ID&grant_type=refresh_token&refresh_token=REFRESH_TOKEN" { + t.Errorf("Unexpected refresh token payload, %v is found.", string(body)) + } + })) + defer ts.Close() + conf := newConf(ts.URL) + c := conf.Client(NoContext, nil) + _, err := c.Get(ts.URL + "/somethingelse") + if err == nil { + t.Errorf("Fetch should return an error if no refresh token is set") + } +} + +func TestRefreshToken_RefreshTokenReplacement(t *testing.T) { + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "application/json") + w.Write([]byte(`{"access_token":"ACCESS TOKEN", "scope": "user", "token_type": "bearer", "refresh_token": "NEW REFRESH TOKEN"}`)) + return + })) + defer ts.Close() + conf := newConf(ts.URL) + tkr := tokenRefresher{ + conf: conf, + ctx: NoContext, + refreshToken: "OLD REFRESH TOKEN", + } + tk, err := tkr.Token() + if err != nil { + t.Errorf("Unexpected refreshToken error returned: %v", err) + return + } + if tk.RefreshToken != tkr.refreshToken { + t.Errorf("tokenRefresher.refresh_token = %s; want %s", tkr.refreshToken, tk.RefreshToken) + } +} + +func TestConfigClientWithToken(t *testing.T) { + tok := &Token{ + AccessToken: "abc123", + } + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if got, want := r.Header.Get("Authorization"), fmt.Sprintf("Bearer %s", tok.AccessToken); got != want { + t.Errorf("Authorization header = %q; want %q", got, want) + } + return + })) + defer ts.Close() + conf := newConf(ts.URL) + + c := conf.Client(NoContext, tok) + req, err := http.NewRequest("GET", ts.URL, nil) + if err != nil { + t.Error(err) + } + _, err = c.Do(req) + if err != nil { + t.Error(err) + } +} diff --git a/vendor/golang.org/x/oauth2/odnoklassniki/odnoklassniki.go b/vendor/golang.org/x/oauth2/odnoklassniki/odnoklassniki.go new file mode 100644 index 0000000..2f7a962 --- /dev/null +++ b/vendor/golang.org/x/oauth2/odnoklassniki/odnoklassniki.go @@ -0,0 +1,16 @@ +// Copyright 2015 The oauth2 Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package odnoklassniki provides constants for using OAuth2 to access Odnoklassniki. +package odnoklassniki // import "golang.org/x/oauth2/odnoklassniki" + +import ( + "golang.org/x/oauth2" +) + +// Endpoint is Odnoklassniki's OAuth 2.0 endpoint. +var Endpoint = oauth2.Endpoint{ + AuthURL: "https://www.odnoklassniki.ru/oauth/authorize", + TokenURL: "https://api.odnoklassniki.ru/oauth/token.do", +} diff --git a/vendor/golang.org/x/oauth2/paypal/paypal.go b/vendor/golang.org/x/oauth2/paypal/paypal.go new file mode 100644 index 0000000..baeaa23 --- /dev/null +++ b/vendor/golang.org/x/oauth2/paypal/paypal.go @@ -0,0 +1,22 @@ +// Copyright 2015 The oauth2 Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package paypal provides constants for using OAuth2 to access PayPal. +package paypal // import "golang.org/x/oauth2/paypal" + +import ( + "golang.org/x/oauth2" +) + +// Endpoint is PayPal's OAuth 2.0 endpoint in live (production) environment. +var Endpoint = oauth2.Endpoint{ + AuthURL: "https://www.paypal.com/webapps/auth/protocol/openidconnect/v1/authorize", + TokenURL: "https://api.paypal.com/v1/identity/openidconnect/tokenservice", +} + +// SandboxEndpoint is PayPal's OAuth 2.0 endpoint in sandbox (testing) environment. +var SandboxEndpoint = oauth2.Endpoint{ + AuthURL: "https://www.sandbox.paypal.com/webapps/auth/protocol/openidconnect/v1/authorize", + TokenURL: "https://api.sandbox.paypal.com/v1/identity/openidconnect/tokenservice", +} diff --git a/vendor/golang.org/x/oauth2/token.go b/vendor/golang.org/x/oauth2/token.go new file mode 100644 index 0000000..4e596f0 --- /dev/null +++ b/vendor/golang.org/x/oauth2/token.go @@ -0,0 +1,158 @@ +// Copyright 2014 The oauth2 Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package oauth2 + +import ( + "net/http" + "net/url" + "strconv" + "strings" + "time" + + "golang.org/x/net/context" + "golang.org/x/oauth2/internal" +) + +// expiryDelta determines how earlier a token should be considered +// expired than its actual expiration time. It is used to avoid late +// expirations due to client-server time mismatches. +const expiryDelta = 10 * time.Second + +// Token represents the crendentials used to authorize +// the requests to access protected resources on the OAuth 2.0 +// provider's backend. +// +// Most users of this package should not access fields of Token +// directly. They're exported mostly for use by related packages +// implementing derivative OAuth2 flows. +type Token struct { + // AccessToken is the token that authorizes and authenticates + // the requests. + AccessToken string `json:"access_token"` + + // TokenType is the type of token. + // The Type method returns either this or "Bearer", the default. + TokenType string `json:"token_type,omitempty"` + + // RefreshToken is a token that's used by the application + // (as opposed to the user) to refresh the access token + // if it expires. + RefreshToken string `json:"refresh_token,omitempty"` + + // Expiry is the optional expiration time of the access token. + // + // If zero, TokenSource implementations will reuse the same + // token forever and RefreshToken or equivalent + // mechanisms for that TokenSource will not be used. + Expiry time.Time `json:"expiry,omitempty"` + + // raw optionally contains extra metadata from the server + // when updating a token. + raw interface{} +} + +// Type returns t.TokenType if non-empty, else "Bearer". +func (t *Token) Type() string { + if strings.EqualFold(t.TokenType, "bearer") { + return "Bearer" + } + if strings.EqualFold(t.TokenType, "mac") { + return "MAC" + } + if strings.EqualFold(t.TokenType, "basic") { + return "Basic" + } + if t.TokenType != "" { + return t.TokenType + } + return "Bearer" +} + +// SetAuthHeader sets the Authorization header to r using the access +// token in t. +// +// This method is unnecessary when using Transport or an HTTP Client +// returned by this package. +func (t *Token) SetAuthHeader(r *http.Request) { + r.Header.Set("Authorization", t.Type()+" "+t.AccessToken) +} + +// WithExtra returns a new Token that's a clone of t, but using the +// provided raw extra map. This is only intended for use by packages +// implementing derivative OAuth2 flows. +func (t *Token) WithExtra(extra interface{}) *Token { + t2 := new(Token) + *t2 = *t + t2.raw = extra + return t2 +} + +// Extra returns an extra field. +// Extra fields are key-value pairs returned by the server as a +// part of the token retrieval response. +func (t *Token) Extra(key string) interface{} { + if raw, ok := t.raw.(map[string]interface{}); ok { + return raw[key] + } + + vals, ok := t.raw.(url.Values) + if !ok { + return nil + } + + v := vals.Get(key) + switch s := strings.TrimSpace(v); strings.Count(s, ".") { + case 0: // Contains no "."; try to parse as int + if i, err := strconv.ParseInt(s, 10, 64); err == nil { + return i + } + case 1: // Contains a single "."; try to parse as float + if f, err := strconv.ParseFloat(s, 64); err == nil { + return f + } + } + + return v +} + +// expired reports whether the token is expired. +// t must be non-nil. +func (t *Token) expired() bool { + if t.Expiry.IsZero() { + return false + } + return t.Expiry.Add(-expiryDelta).Before(time.Now()) +} + +// Valid reports whether t is non-nil, has an AccessToken, and is not expired. +func (t *Token) Valid() bool { + return t != nil && t.AccessToken != "" && !t.expired() +} + +// tokenFromInternal maps an *internal.Token struct into +// a *Token struct. +func tokenFromInternal(t *internal.Token) *Token { + if t == nil { + return nil + } + return &Token{ + AccessToken: t.AccessToken, + TokenType: t.TokenType, + RefreshToken: t.RefreshToken, + Expiry: t.Expiry, + raw: t.Raw, + } +} + +// retrieveToken takes a *Config and uses that to retrieve an *internal.Token. +// This token is then mapped from *internal.Token into an *oauth2.Token which is returned along +// with an error.. +func retrieveToken(ctx context.Context, c *Config, v url.Values) (*Token, error) { + tk, err := internal.RetrieveToken(ctx, c.ClientID, c.ClientSecret, c.Endpoint.TokenURL, v) + if err != nil { + return nil, err + } + return tokenFromInternal(tk), nil +} diff --git a/vendor/golang.org/x/oauth2/token_test.go b/vendor/golang.org/x/oauth2/token_test.go new file mode 100644 index 0000000..8344329 --- /dev/null +++ b/vendor/golang.org/x/oauth2/token_test.go @@ -0,0 +1,72 @@ +// Copyright 2014 The oauth2 Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package oauth2 + +import ( + "testing" + "time" +) + +func TestTokenExtra(t *testing.T) { + type testCase struct { + key string + val interface{} + want interface{} + } + const key = "extra-key" + cases := []testCase{ + {key: key, val: "abc", want: "abc"}, + {key: key, val: 123, want: 123}, + {key: key, val: "", want: ""}, + {key: "other-key", val: "def", want: nil}, + } + for _, tc := range cases { + extra := make(map[string]interface{}) + extra[tc.key] = tc.val + tok := &Token{raw: extra} + if got, want := tok.Extra(key), tc.want; got != want { + t.Errorf("Extra(%q) = %q; want %q", key, got, want) + } + } +} + +func TestTokenExpiry(t *testing.T) { + now := time.Now() + cases := []struct { + name string + tok *Token + want bool + }{ + {name: "12 seconds", tok: &Token{Expiry: now.Add(12 * time.Second)}, want: false}, + {name: "10 seconds", tok: &Token{Expiry: now.Add(expiryDelta)}, want: true}, + {name: "-1 hour", tok: &Token{Expiry: now.Add(-1 * time.Hour)}, want: true}, + } + for _, tc := range cases { + if got, want := tc.tok.expired(), tc.want; got != want { + t.Errorf("expired (%q) = %v; want %v", tc.name, got, want) + } + } +} + +func TestTokenTypeMethod(t *testing.T) { + cases := []struct { + name string + tok *Token + want string + }{ + {name: "bearer-mixed_case", tok: &Token{TokenType: "beAREr"}, want: "Bearer"}, + {name: "default-bearer", tok: &Token{}, want: "Bearer"}, + {name: "basic", tok: &Token{TokenType: "basic"}, want: "Basic"}, + {name: "basic-capitalized", tok: &Token{TokenType: "Basic"}, want: "Basic"}, + {name: "mac", tok: &Token{TokenType: "mac"}, want: "MAC"}, + {name: "mac-caps", tok: &Token{TokenType: "MAC"}, want: "MAC"}, + {name: "mac-mixed_case", tok: &Token{TokenType: "mAc"}, want: "MAC"}, + } + for _, tc := range cases { + if got, want := tc.tok.Type(), tc.want; got != want { + t.Errorf("TokenType(%q) = %v; want %v", tc.name, got, want) + } + } +} diff --git a/vendor/golang.org/x/oauth2/transport.go b/vendor/golang.org/x/oauth2/transport.go new file mode 100644 index 0000000..90db088 --- /dev/null +++ b/vendor/golang.org/x/oauth2/transport.go @@ -0,0 +1,132 @@ +// Copyright 2014 The oauth2 Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package oauth2 + +import ( + "errors" + "io" + "net/http" + "sync" +) + +// Transport is an http.RoundTripper that makes OAuth 2.0 HTTP requests, +// wrapping a base RoundTripper and adding an Authorization header +// with a token from the supplied Sources. +// +// Transport is a low-level mechanism. Most code will use the +// higher-level Config.Client method instead. +type Transport struct { + // Source supplies the token to add to outgoing requests' + // Authorization headers. + Source TokenSource + + // Base is the base RoundTripper used to make HTTP requests. + // If nil, http.DefaultTransport is used. + Base http.RoundTripper + + mu sync.Mutex // guards modReq + modReq map[*http.Request]*http.Request // original -> modified +} + +// RoundTrip authorizes and authenticates the request with an +// access token. If no token exists or token is expired, +// tries to refresh/fetch a new token. +func (t *Transport) RoundTrip(req *http.Request) (*http.Response, error) { + if t.Source == nil { + return nil, errors.New("oauth2: Transport's Source is nil") + } + token, err := t.Source.Token() + if err != nil { + return nil, err + } + + req2 := cloneRequest(req) // per RoundTripper contract + token.SetAuthHeader(req2) + t.setModReq(req, req2) + res, err := t.base().RoundTrip(req2) + if err != nil { + t.setModReq(req, nil) + return nil, err + } + res.Body = &onEOFReader{ + rc: res.Body, + fn: func() { t.setModReq(req, nil) }, + } + return res, nil +} + +// CancelRequest cancels an in-flight request by closing its connection. +func (t *Transport) CancelRequest(req *http.Request) { + type canceler interface { + CancelRequest(*http.Request) + } + if cr, ok := t.base().(canceler); ok { + t.mu.Lock() + modReq := t.modReq[req] + delete(t.modReq, req) + t.mu.Unlock() + cr.CancelRequest(modReq) + } +} + +func (t *Transport) base() http.RoundTripper { + if t.Base != nil { + return t.Base + } + return http.DefaultTransport +} + +func (t *Transport) setModReq(orig, mod *http.Request) { + t.mu.Lock() + defer t.mu.Unlock() + if t.modReq == nil { + t.modReq = make(map[*http.Request]*http.Request) + } + if mod == nil { + delete(t.modReq, orig) + } else { + t.modReq[orig] = mod + } +} + +// cloneRequest returns a clone of the provided *http.Request. +// The clone is a shallow copy of the struct and its Header map. +func cloneRequest(r *http.Request) *http.Request { + // shallow copy of the struct + r2 := new(http.Request) + *r2 = *r + // deep copy of the Header + r2.Header = make(http.Header, len(r.Header)) + for k, s := range r.Header { + r2.Header[k] = append([]string(nil), s...) + } + return r2 +} + +type onEOFReader struct { + rc io.ReadCloser + fn func() +} + +func (r *onEOFReader) Read(p []byte) (n int, err error) { + n, err = r.rc.Read(p) + if err == io.EOF { + r.runFunc() + } + return +} + +func (r *onEOFReader) Close() error { + err := r.rc.Close() + r.runFunc() + return err +} + +func (r *onEOFReader) runFunc() { + if fn := r.fn; fn != nil { + fn() + r.fn = nil + } +} diff --git a/vendor/golang.org/x/oauth2/transport_test.go b/vendor/golang.org/x/oauth2/transport_test.go new file mode 100644 index 0000000..35cb25e --- /dev/null +++ b/vendor/golang.org/x/oauth2/transport_test.go @@ -0,0 +1,86 @@ +package oauth2 + +import ( + "net/http" + "net/http/httptest" + "testing" + "time" +) + +type tokenSource struct{ token *Token } + +func (t *tokenSource) Token() (*Token, error) { + return t.token, nil +} + +func TestTransportTokenSource(t *testing.T) { + ts := &tokenSource{ + token: &Token{ + AccessToken: "abc", + }, + } + tr := &Transport{ + Source: ts, + } + server := newMockServer(func(w http.ResponseWriter, r *http.Request) { + if r.Header.Get("Authorization") != "Bearer abc" { + t.Errorf("Transport doesn't set the Authorization header from the fetched token") + } + }) + defer server.Close() + client := http.Client{Transport: tr} + client.Get(server.URL) +} + +// Test for case-sensitive token types, per https://github.com/golang/oauth2/issues/113 +func TestTransportTokenSourceTypes(t *testing.T) { + const val = "abc" + tests := []struct { + key string + val string + want string + }{ + {key: "bearer", val: val, want: "Bearer abc"}, + {key: "mac", val: val, want: "MAC abc"}, + {key: "basic", val: val, want: "Basic abc"}, + } + for _, tc := range tests { + ts := &tokenSource{ + token: &Token{ + AccessToken: tc.val, + TokenType: tc.key, + }, + } + tr := &Transport{ + Source: ts, + } + server := newMockServer(func(w http.ResponseWriter, r *http.Request) { + if got, want := r.Header.Get("Authorization"), tc.want; got != want { + t.Errorf("Authorization header (%q) = %q; want %q", val, got, want) + } + }) + defer server.Close() + client := http.Client{Transport: tr} + client.Get(server.URL) + } +} + +func TestTokenValidNoAccessToken(t *testing.T) { + token := &Token{} + if token.Valid() { + t.Errorf("Token should not be valid with no access token") + } +} + +func TestExpiredWithExpiry(t *testing.T) { + token := &Token{ + Expiry: time.Now().Add(-5 * time.Hour), + } + if token.Valid() { + t.Errorf("Token should not be valid if it expired in the past") + } +} + +func newMockServer(handler func(w http.ResponseWriter, r *http.Request)) *httptest.Server { + return httptest.NewServer(http.HandlerFunc(handler)) +} diff --git a/vendor/golang.org/x/oauth2/vk/vk.go b/vendor/golang.org/x/oauth2/vk/vk.go new file mode 100644 index 0000000..5acdeb1 --- /dev/null +++ b/vendor/golang.org/x/oauth2/vk/vk.go @@ -0,0 +1,16 @@ +// Copyright 2015 The oauth2 Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package vk provides constants for using OAuth2 to access VK.com. +package vk // import "golang.org/x/oauth2/vk" + +import ( + "golang.org/x/oauth2" +) + +// Endpoint is VK's OAuth 2.0 endpoint. +var Endpoint = oauth2.Endpoint{ + AuthURL: "https://oauth.vk.com/authorize", + TokenURL: "https://oauth.vk.com/access_token", +}