Concrete Pagers strategy :wrench:
diff --git a/collections.go b/collections.go
index 38078f0..1383f24 100644
--- a/collections.go
+++ b/collections.go
@@ -1,62 +1,47 @@
 package gophercloud
 
-import "errors"
+import (
+	"encoding/json"
+	"errors"
+	"io/ioutil"
+	"net/http"
+
+	"github.com/mitchellh/mapstructure"
+)
 
 var (
 	// ErrPageNotAvailable is returned from a Pager when a next or previous page is requested, but does not exist.
 	ErrPageNotAvailable = errors.New("The requested Collection page does not exist.")
 )
 
-// Collection must be satisfied by the result type of any resource collection.
+// Page must be satisfied by the result type of any resource collection.
 // It allows clients to interact with the resource uniformly, regardless of whether or not or how it's paginated.
-type Collection interface {
+type Page interface {
 
 	// NextPageURL generates the URL for the page of data that follows this collection.
 	// Return "" if no such page exists.
 	NextPageURL() string
-
-	// Concat creates a new Collection that contains all of the elements from this page and another page.
-	// It's used to aggregate results for the AllPages method.
-	Concat(Collection) Collection
 }
 
 // Pager knows how to advance through a specific resource collection, one page at a time.
 type Pager struct {
 	initialURL string
 
-	advance func(string) (Collection, error)
+	advance func(string) (Page, error)
 }
 
 // NewPager constructs a manually-configured pager.
 // Supply the URL for the first page and a function that requests a specific page given a URL.
-func NewPager(initialURL string, advance func(string) (Collection, error)) Pager {
+func NewPager(initialURL string, advance func(string) (Page, error)) Pager {
 	return Pager{
 		initialURL: initialURL,
 		advance:    advance,
 	}
 }
 
-// NewSinglePager constructs a Pager that "iterates" over a single-paged Collection.
-// Supply a function that returns the only page.
-func NewSinglePager(only func() (Collection, error)) Pager {
-	consumed := false
-	single := func(_ string) (Collection, error) {
-		if !consumed {
-			consumed = true
-			return only()
-		}
-		return nil, ErrPageNotAvailable
-	}
-
-	return Pager{
-		initialURL: "",
-		advance:    single,
-	}
-}
-
 // EachPage iterates over each page returned by a Pager, yielding one at a time to a handler function.
 // Return "false" from the handler to prematurely stop iterating.
-func (p Pager) EachPage(handler func(Collection) bool) error {
+func (p Pager) EachPage(handler func(Page) bool) error {
 	currentURL := p.initialURL
 	for {
 		currentPage, err := p.advance(currentURL)
@@ -75,28 +60,112 @@
 	}
 }
 
-// AllPages accumulates every page reachable from a Pager into a single Collection, for convenience.
-func (p Pager) AllPages() (Collection, error) {
-	var megaPage Collection
-
-	err := p.EachPage(func(page Collection) bool {
-		if megaPage == nil {
-			megaPage = page
-		} else {
-			megaPage = megaPage.Concat(page)
-		}
-		return true
-	})
-
-	return megaPage, err
+// AllPages accumulates every page reachable from a Pager into a single Page, for convenience.
+func (p Pager) AllPages() (Page, error) {
+	return nil, errors.New("Wat")
 }
 
-// PaginationLinks stores the `next` and `previous` links that are provided by some (but not all) paginated resources.
-type PaginationLinks struct {
+// ConcretePage stores generic information derived from an HTTP response.
+type ConcretePage struct {
+	http.Header
+	Body map[string]interface{}
+}
 
-	// Next is the full URL to the next page of results, or nil if this is the last page.
-	Next *string `json:"next,omitempty"`
+// NewConcretePage parses an HTTP response as JSON and returns a ConcretePage containing the results.
+func NewConcretePage(resp http.Response) (ConcretePage, error) {
+	var parsedBody map[string]interface{}
 
-	// Previous is the full URL to the previous page of results, or nil if this is the first page.
-	Previous *string `json:"previous,omitempty"`
+	defer resp.Body.Close()
+	rawBody, err := ioutil.ReadAll(resp.Body)
+	if err != nil {
+		return ConcretePage{}, err
+	}
+	err = json.Unmarshal(rawBody, &parsedBody)
+	if err != nil {
+		return ConcretePage{}, err
+	}
+
+	return ConcretePage{Header: resp.Header, Body: parsedBody}, err
+}
+
+// SinglePage is a page that contains all of the results from an operation.
+type SinglePage ConcretePage
+
+// NextPageURL always returns "" to indicate that there are no more pages to return.
+func (current SinglePage) NextPageURL() string {
+	return ""
+}
+
+// NewSinglePager constructs a Pager that "iterates" over a single Page.
+// Supply a function that returns the only page.
+func NewSinglePager(only func() (http.Response, error)) Pager {
+	consumed := false
+	single := func(_ string) (Page, error) {
+		if !consumed {
+			consumed = true
+			resp, err := only()
+			if err != nil {
+				return SinglePage{}, err
+			}
+
+			cp, err := NewConcretePage(resp)
+			if err != nil {
+				return SinglePage{}, err
+			}
+			return SinglePage(cp), nil
+		}
+		return SinglePage{}, ErrPageNotAvailable
+	}
+
+	return Pager{
+		initialURL: "",
+		advance:    single,
+	}
+}
+
+// PaginatedLinksPage is a page in a collection that provides navigational "Next" and "Previous" links within its result.
+type PaginatedLinksPage ConcretePage
+
+// NextPageURL extracts the pagination structure from a JSON response and returns the "next" link, if one is present.
+func (current PaginatedLinksPage) NextPageURL() string {
+	type response struct {
+		Links struct {
+			Next *string `mapstructure:"next,omitempty"`
+		} `mapstructure:"links"`
+	}
+
+	var r response
+	err := mapstructure.Decode(current.Body, &r)
+	if err != nil {
+		// FIXME NextPageURL should be able to fail
+		panic(err)
+	}
+
+	if r.Links.Next == nil {
+		return ""
+	}
+
+	return *r.Links.Next
+}
+
+// NewLinkedPager creates a Pager that uses a "links" element in the JSON response to locate the next page.
+func NewLinkedPager(initialURL string, request func(string) (http.Response, error)) Pager {
+	advance := func(url string) (Page, error) {
+		resp, err := request(url)
+		if err != nil {
+			return nil, err
+		}
+
+		cp, err := NewConcretePage(resp)
+		if err != nil {
+			return nil, err
+		}
+
+		return PaginatedLinksPage(cp), nil
+	}
+
+	return Pager{
+		initialURL: initialURL,
+		advance:    advance,
+	}
 }
diff --git a/collections_test.go b/collections_test.go
index c83bd38..a005fd7 100644
--- a/collections_test.go
+++ b/collections_test.go
@@ -1,45 +1,63 @@
 package gophercloud
 
 import (
-	"fmt"
+	"bytes"
+	"errors"
+	"io"
 	"net/http"
 	"reflect"
 	"testing"
 
-	"github.com/rackspace/gophercloud/testhelper"
+	"github.com/mitchellh/mapstructure"
 )
 
+type nopCloser struct {
+	io.Reader
+}
+
+func (nopCloser) Close() error { return nil }
+
+func responseWithBody(body string) (http.Response, error) {
+	return http.Response{
+		Body: nopCloser{bytes.NewReader([]byte(body))},
+	}, nil
+}
+
 // SinglePage sample and test cases.
 
-type SinglePageCollection struct {
-	results []int
+type SingleIntList struct {
+	SinglePage
 }
 
-func (c SinglePageCollection) NextPageURL() string {
-	panic("NextPageURL should never be called on a single-paged collection.")
-}
+func ExtractSingleInts(page Page) ([]int, error) {
+	var response struct {
+		Ints []int `mapstructure:"ints"`
+	}
 
-func (c SinglePageCollection) Concat(other Collection) Collection {
-	panic("Concat should never be called on a single-paged collection.")
-}
+	err := mapstructure.Decode(page.(SingleIntList).Body, &response)
+	if err != nil {
+		return nil, err
+	}
 
-func ExtractSingleInts(c Collection) []int {
-	return c.(SinglePageCollection).results
+	return response.Ints, nil
 }
 
 func setupSinglePaged() Pager {
-	return NewSinglePager(func() (Collection, error) {
-		return SinglePageCollection{results: []int{1, 2, 3}}, nil
+	return NewSinglePager(func() (http.Response, error) {
+		return responseWithBody(`{ "ints": [1, 2, 3] }`)
 	})
 }
 
 func TestEnumerateSinglePaged(t *testing.T) {
 	callCount := 0
-	err := setupSinglePaged().EachPage(func(page Collection) bool {
+	err := setupSinglePaged().EachPage(func(page Page) bool {
 		callCount++
 
 		expected := []int{1, 2, 3}
-		actual := AsSingleInts(page)
+		actual, err := ExtractSingleInts(page)
+		if err != nil {
+			t.Fatalf("Unexpected error extracting ints: %v", err)
+		}
 		if !reflect.DeepEqual(expected, actual) {
 			t.Errorf("Expected %v, but was %v", expected, actual)
 		}
@@ -54,90 +72,52 @@
 	}
 }
 
-func TestAllSinglePaged(t *testing.T) {
-	r, err := setupSinglePaged().AllPages()
-	if err != nil {
-		t.Fatalf("Unexpected error when iterating pages: %v", err)
-	}
-
-	expected := []int{1, 2, 3}
-	actual := ExtractSingleInts(r)
-	if !reflect.DeepEqual(expected, actual) {
-		t.Errorf("Expected %v, but was %v", expected, actual)
-	}
-}
-
 // LinkedPager sample and test cases.
 
-type LinkedCollection struct {
-	PaginationLinks
-
-	results []int
+type LinkedIntPage struct {
+	PaginatedLinksPage
 }
 
-func (page LinkedCollection) NextPageURL() string {
-	n := page.PaginationLinks.Next
-	if n == nil {
-		return ""
+func ExtractLinkedInts(page Page) ([]int, error) {
+	var response struct {
+		Ints []int `mapstructure:"ints"`
 	}
-	return *n
-}
 
-func (page LinkedCollection) Concat(other Collection) Collection {
-	return LinkedCollection{
-		service: page.service,
-		results: append(c.results, AsLinkedInts(other)...),
+	err := mapstructure.Decode(page.(LinkedIntPage).Body, &response)
+	if err != nil {
+		return nil, err
 	}
+
+	return response.Ints, nil
 }
 
-func AsLinkedInts(results Collection) []int {
-	return results.(LinkedCollection).results
-}
-
-func createLinked() Pager {
-	nextURL := testhelper.Server.URL + "/foo?page=2&perPage=3"
-	return CreatePager(func(url) Collection {
-		LinkedCollection{
-			PaginationLinks: PaginationLinks{Next: &nextURL},
-			results:         []int{1, 2, 3},
-		}
-	})
-}
-
-func setupLinkedResponses(t *testing.T) {
-	testhelper.Mux.HandleFunc("/foo", func(w http.ResponseWriter, r *http.Request) {
-		testhelper.TestMethod(t, r, "GET")
-		testhelper.TestHeader(t, r, "X-Auth-Token", "1234")
-		w.Header().Add("Content-Type", "application/json")
-
-		r.ParseForm()
-
-		pages := r.Form["page"]
-		if len(pages) != 1 {
-			t.Errorf("Endpoint called with unexpected page: %#v", r.Form)
-		}
-
-		switch pages[0] {
-		case "2":
-			fmt.Fprintf(w, `[4, 5, 6]`)
-		case "3":
-			fmt.Fprintf(w, `[7, 8, 9]`)
+func createLinked(t *testing.T) Pager {
+	return NewLinkedPager("page1", func(url string) (http.Response, error) {
+		switch url {
+		case "page1":
+			return responseWithBody(`{ "ints": [1, 2, 3], "links": { "next": "page2" } }`)
+		case "page2":
+			return responseWithBody(`{ "ints": [4, 5, 6], "links": { "next": "page3" } }`)
+		case "page3":
+			return responseWithBody(`{ "ints": [7, 8, 9], "links": { "next": null } }`)
 		default:
-			t.Errorf("Endpoint called with unexpected page: %s", pages[0])
+			t.Fatalf("LinkedPager called with unexpected URL: %v", url)
+			return http.Response{}, errors.New("Wat")
 		}
 	})
 }
 
 func TestEnumerateLinked(t *testing.T) {
-	testhelper.SetupHTTP()
-	defer testhelper.TeardownHTTP()
-
-	setupLinkedResponses(t)
-	lc := createLinked()
+	pager := createLinked(t)
 
 	callCount := 0
-	err := EachPage(lc, func(page Collection) bool {
-		actual := AsLinkedInts(page)
+	err := pager.EachPage(func(page Page) bool {
+		actual, err := ExtractLinkedInts(page)
+		if err != nil {
+			t.Errorf("Unable to extract ints from page: %v", err)
+			return false
+		}
+
 		t.Logf("Handler invoked with %v", actual)
 
 		var expected []int
@@ -168,28 +148,3 @@
 		t.Errorf("Expected 3 calls, but was %d", callCount)
 	}
 }
-
-func TestAllLinked(t *testing.T) {
-	testhelper.SetupHTTP()
-	defer testhelper.TeardownHTTP()
-
-	setupLinkedResponses(t)
-	lc := createLinked()
-
-	all, err := AllPages(lc)
-	if err != nil {
-		t.Fatalf("Unexpected error collection all linked pages: %v", err)
-	}
-
-	actual := AsLinkedInts(all)
-	expected := []int{1, 2, 3, 4, 5, 6, 7, 8, 9}
-
-	if !reflect.DeepEqual(expected, actual) {
-		t.Errorf("Expected %v, but was %v", expected, actual)
-	}
-
-	original := []int{1, 2, 3}
-	if !reflect.DeepEqual(AsLinkedInts(lc), original) {
-		t.Errorf("AllPages modified the original page, and now it contains: %v", lc)
-	}
-}