OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [trunk/] [gnu-dev/] [or1k-gcc/] [libgo/] [go/] [exp/] [types/] [check_test.go] - Blame information for rev 747

Details | Compare with Previous | View Log

Line No. Rev Author Line
1 747 jeremybenn
// Copyright 2011 The Go Authors. All rights reserved.
2
// Use of this source code is governed by a BSD-style
3
// license that can be found in the LICENSE file.
4
 
5
// This file implements a typechecker test harness. The packages specified
6
// in tests are typechecked. Error messages reported by the typechecker are
7
// compared against the error messages expected in the test files.
8
//
9
// Expected errors are indicated in the test files by putting a comment
10
// of the form /* ERROR "rx" */ immediately following an offending token.
11
// The harness will verify that an error matching the regular expression
12
// rx is reported at that source position. Consecutive comments may be
13
// used to indicate multiple errors for the same token position.
14
//
15
// For instance, the following test file indicates that a "not declared"
16
// error should be reported for the undeclared variable x:
17
//
18
//      package p
19
//      func f() {
20
//              _ = x /* ERROR "not declared" */ + 1
21
//      }
22
 
23
package types
24
 
25
import (
26
        "fmt"
27
        "go/ast"
28
        "go/parser"
29
        "go/scanner"
30
        "go/token"
31
        "io/ioutil"
32
        "os"
33
        "regexp"
34
        "testing"
35
)
36
 
37
// The test filenames do not end in .go so that they are invisible
38
// to gofmt since they contain comments that must not change their
39
// positions relative to surrounding tokens.
40
 
41
var tests = []struct {
42
        name  string
43
        files []string
44
}{
45
        {"test0", []string{"testdata/test0.src"}},
46
}
47
 
48
var fset = token.NewFileSet()
49
 
50
func getFile(filename string) (file *token.File) {
51
        fset.Iterate(func(f *token.File) bool {
52
                if f.Name() == filename {
53
                        file = f
54
                        return false // end iteration
55
                }
56
                return true
57
        })
58
        return file
59
}
60
 
61
func getPos(filename string, offset int) token.Pos {
62
        if f := getFile(filename); f != nil {
63
                return f.Pos(offset)
64
        }
65
        return token.NoPos
66
}
67
 
68
func parseFiles(t *testing.T, testname string, filenames []string) (map[string]*ast.File, error) {
69
        files := make(map[string]*ast.File)
70
        var errors scanner.ErrorList
71
        for _, filename := range filenames {
72
                if _, exists := files[filename]; exists {
73
                        t.Fatalf("%s: duplicate file %s", testname, filename)
74
                }
75
                file, err := parser.ParseFile(fset, filename, nil, parser.DeclarationErrors)
76
                if file == nil {
77
                        t.Fatalf("%s: could not parse file %s", testname, filename)
78
                }
79
                files[filename] = file
80
                if err != nil {
81
                        // if the parser returns a non-scanner.ErrorList error
82
                        // the file couldn't be read in the first place and
83
                        // file == nil; in that case we shouldn't reach here
84
                        errors = append(errors, err.(scanner.ErrorList)...)
85
                }
86
 
87
        }
88
        return files, errors
89
}
90
 
91
// ERROR comments must be of the form /* ERROR "rx" */ and rx is
92
// a regular expression that matches the expected error message.
93
//
94
var errRx = regexp.MustCompile(`^/\* *ERROR *"([^"]*)" *\*/$`)
95
 
96
// expectedErrors collects the regular expressions of ERROR comments found
97
// in files and returns them as a map of error positions to error messages.
98
//
99
func expectedErrors(t *testing.T, testname string, files map[string]*ast.File) map[token.Pos]string {
100
        errors := make(map[token.Pos]string)
101
        for filename := range files {
102
                src, err := ioutil.ReadFile(filename)
103
                if err != nil {
104
                        t.Fatalf("%s: could not read %s", testname, filename)
105
                }
106
 
107
                var s scanner.Scanner
108
                // file was parsed already - do not add it again to the file
109
                // set otherwise the position information returned here will
110
                // not match the position information collected by the parser
111
                s.Init(getFile(filename), src, nil, scanner.ScanComments)
112
                var prev token.Pos // position of last non-comment, non-semicolon token
113
 
114
        scanFile:
115
                for {
116
                        pos, tok, lit := s.Scan()
117
                        switch tok {
118
                        case token.EOF:
119
                                break scanFile
120
                        case token.COMMENT:
121
                                s := errRx.FindStringSubmatch(lit)
122
                                if len(s) == 2 {
123
                                        errors[prev] = string(s[1])
124
                                }
125
                        case token.SEMICOLON:
126
                                // ignore automatically inserted semicolon
127
                                if lit == "\n" {
128
                                        break
129
                                }
130
                                fallthrough
131
                        default:
132
                                prev = pos
133
                        }
134
                }
135
        }
136
        return errors
137
}
138
 
139
func eliminate(t *testing.T, expected map[token.Pos]string, errors error) {
140
        if errors == nil {
141
                return
142
        }
143
        for _, error := range errors.(scanner.ErrorList) {
144
                // error.Pos is a token.Position, but we want
145
                // a token.Pos so we can do a map lookup
146
                pos := getPos(error.Pos.Filename, error.Pos.Offset)
147
                if msg, found := expected[pos]; found {
148
                        // we expect a message at pos; check if it matches
149
                        rx, err := regexp.Compile(msg)
150
                        if err != nil {
151
                                t.Errorf("%s: %v", error.Pos, err)
152
                                continue
153
                        }
154
                        if match := rx.MatchString(error.Msg); !match {
155
                                t.Errorf("%s: %q does not match %q", error.Pos, error.Msg, msg)
156
                                continue
157
                        }
158
                        // we have a match - eliminate this error
159
                        delete(expected, pos)
160
                } else {
161
                        // To keep in mind when analyzing failed test output:
162
                        // If the same error position occurs multiple times in errors,
163
                        // this message will be triggered (because the first error at
164
                        // the position removes this position from the expected errors).
165
                        t.Errorf("%s: no (multiple?) error expected, but found: %s", error.Pos, error.Msg)
166
                }
167
        }
168
}
169
 
170
func check(t *testing.T, testname string, testfiles []string) {
171
        // TODO(gri) Eventually all these different phases should be
172
        //           subsumed into a single function call that takes
173
        //           a set of files and creates a fully resolved and
174
        //           type-checked AST.
175
 
176
        files, err := parseFiles(t, testname, testfiles)
177
 
178
        // we are expecting the following errors
179
        // (collect these after parsing the files so that
180
        // they are found in the file set)
181
        errors := expectedErrors(t, testname, files)
182
 
183
        // verify errors returned by the parser
184
        eliminate(t, errors, err)
185
 
186
        // verify errors returned after resolving identifiers
187
        pkg, err := ast.NewPackage(fset, files, GcImporter, Universe)
188
        eliminate(t, errors, err)
189
 
190
        // verify errors returned by the typechecker
191
        _, err = Check(fset, pkg)
192
        eliminate(t, errors, err)
193
 
194
        // there should be no expected errors left
195
        if len(errors) > 0 {
196
                t.Errorf("%s: %d errors not reported:", testname, len(errors))
197
                for pos, msg := range errors {
198
                        t.Errorf("%s: %s\n", fset.Position(pos), msg)
199
                }
200
        }
201
}
202
 
203
func TestCheck(t *testing.T) {
204
        // For easy debugging w/o changing the testing code,
205
        // if there is a local test file, only test that file.
206
        const testfile = "test.go"
207
        if fi, err := os.Stat(testfile); err == nil && !fi.IsDir() {
208
                fmt.Printf("WARNING: Testing only %s (remove it to run all tests)\n", testfile)
209
                check(t, testfile, []string{testfile})
210
                return
211
        }
212
 
213
        // Otherwise, run all the tests.
214
        for _, test := range tests {
215
                check(t, test.name, test.files)
216
        }
217
}

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.