1
2
3
4
5
6
7
8
9
10
11
12 package relnote
13
14 import (
15 "bufio"
16 "bytes"
17 "errors"
18 "fmt"
19 "io"
20 "io/fs"
21 "path"
22 "regexp"
23 "slices"
24 "strconv"
25 "strings"
26
27 md "rsc.io/markdown"
28 )
29
30
31 func NewParser() *md.Parser {
32 var p md.Parser
33 p.HeadingIDs = true
34 return &p
35 }
36
37
38 func CheckFragment(data string) error {
39 doc := NewParser().Parse(data)
40
41 txt := ""
42 if len(doc.Blocks) > 0 {
43 txt = text(doc)
44 }
45 if !strings.Contains(txt, "TODO") && !strings.ContainsAny(txt, ".?!") {
46 return errors.New("File must contain a complete sentence or a TODO.")
47 }
48 return nil
49 }
50
51
52 func text(b md.Block) string {
53 switch b := b.(type) {
54 case *md.Document:
55 return blocksText(b.Blocks)
56 case *md.Heading:
57 return text(b.Text)
58 case *md.Text:
59 return inlineText(b.Inline)
60 case *md.CodeBlock:
61 return strings.Join(b.Text, "\n")
62 case *md.HTMLBlock:
63 return strings.Join(b.Text, "\n")
64 case *md.List:
65 return blocksText(b.Items)
66 case *md.Item:
67 return blocksText(b.Blocks)
68 case *md.Empty:
69 return ""
70 case *md.Paragraph:
71 return text(b.Text)
72 case *md.Quote:
73 return blocksText(b.Blocks)
74 case *md.ThematicBreak:
75 return "---"
76 default:
77 panic(fmt.Sprintf("unknown block type %T", b))
78 }
79 }
80
81
82 func blocksText(bs []md.Block) string {
83 var d strings.Builder
84 for _, b := range bs {
85 io.WriteString(&d, text(b))
86 fmt.Fprintln(&d)
87 }
88 return d.String()
89 }
90
91
92 func inlineText(ins []md.Inline) string {
93 var buf bytes.Buffer
94 for _, in := range ins {
95 in.PrintText(&buf)
96 }
97 return buf.String()
98 }
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117 func Merge(fsys fs.FS) (*md.Document, error) {
118 filenames, err := sortedMarkdownFilenames(fsys)
119 if err != nil {
120 return nil, err
121 }
122 doc := &md.Document{Links: map[string]*md.Link{}}
123 var prevPkg string
124 for _, filename := range filenames {
125 newdoc, err := parseMarkdownFile(fsys, filename)
126 if err != nil {
127 return nil, err
128 }
129 if len(newdoc.Blocks) == 0 {
130 continue
131 }
132 pkg := stdlibPackage(filename)
133
134 addSymbolLinks(newdoc, pkg)
135 if len(doc.Blocks) > 0 {
136
137
138 if pkg != "" && pkg != prevPkg {
139 h := stdlibPackageHeading(pkg, lastBlock(doc).Pos().EndLine)
140 doc.Blocks = append(doc.Blocks, h)
141 }
142 prevPkg = pkg
143
144
145 lastLine := lastBlock(doc).Pos().EndLine
146 delta := lastLine + 2 - newdoc.Blocks[0].Pos().StartLine
147 for _, b := range newdoc.Blocks {
148 addLines(b, delta)
149 }
150 }
151
152 for _, b := range newdoc.Blocks {
153 if _, ok := b.(*md.Empty); !ok {
154 doc.Blocks = append(doc.Blocks, b)
155 }
156 }
157
158 for key, link := range newdoc.Links {
159 if doc.Links[key] != nil {
160 return nil, fmt.Errorf("duplicate link reference %q; second in %s", key, filename)
161 }
162 doc.Links[key] = link
163 }
164 }
165
166 doc.Blocks = removeEmptySections(doc.Blocks)
167 if len(doc.Blocks) > 0 && len(doc.Links) > 0 {
168
169 lastPos := lastBlock(doc).Pos()
170 lastPos.StartLine += 2
171 lastPos.EndLine += 2
172 doc.Blocks = append(doc.Blocks, &md.Empty{Position: lastPos})
173 }
174 return doc, nil
175 }
176
177
178
179
180
181 func stdlibPackage(filename string) string {
182 dir, rest, _ := strings.Cut(filename, "/")
183 if !strings.HasSuffix(dir, "stdlib") {
184 return ""
185 }
186 dir, rest, _ = strings.Cut(rest, "/")
187 if !strings.HasSuffix(dir, "minor") {
188 return ""
189 }
190 pkg := path.Dir(rest)
191 if pkg == "." {
192 return ""
193 }
194 return pkg
195 }
196
197 func stdlibPackageHeading(pkg string, lastLine int) *md.Heading {
198 line := lastLine + 2
199 pos := md.Position{StartLine: line, EndLine: line}
200 return &md.Heading{
201 Position: pos,
202 Level: 4,
203 Text: &md.Text{
204 Position: pos,
205 Inline: []md.Inline{
206 &md.Link{
207 Inner: []md.Inline{&md.Code{Text: pkg}},
208 URL: "/pkg/" + pkg + "/",
209 },
210 },
211 },
212 }
213 }
214
215
216
217
218 func removeEmptySections(bs []md.Block) []md.Block {
219 res := bs[:0]
220 delta := 0
221
222
223 rem := func(level int) {
224 for len(res) > 0 {
225 last := res[len(res)-1]
226 if lh, ok := last.(*md.Heading); ok && lh.Level >= level {
227 res = res[:len(res)-1]
228
229
230 delta += lh.EndLine - lh.StartLine + 2
231 } else {
232 break
233 }
234 }
235 }
236
237 for _, b := range bs {
238 if h, ok := b.(*md.Heading); ok {
239 rem(h.Level)
240 }
241 addLines(b, -delta)
242 res = append(res, b)
243 }
244
245 rem(1)
246 return res
247 }
248
249 func sortedMarkdownFilenames(fsys fs.FS) ([]string, error) {
250 var filenames []string
251 err := fs.WalkDir(fsys, ".", func(path string, d fs.DirEntry, err error) error {
252 if err != nil {
253 return err
254 }
255 if !d.IsDir() && strings.HasSuffix(path, ".md") {
256 filenames = append(filenames, path)
257 }
258 return nil
259 })
260 if err != nil {
261 return nil, err
262 }
263
264
265
266 slices.Sort(filenames)
267 return filenames, nil
268 }
269
270
271
272 func lastBlock(doc *md.Document) md.Block {
273 return doc.Blocks[len(doc.Blocks)-1]
274 }
275
276
277
278 func addLines(b md.Block, n int) {
279 pos := position(b)
280 pos.StartLine += n
281 pos.EndLine += n
282 }
283
284 func position(b md.Block) *md.Position {
285 switch b := b.(type) {
286 case *md.Heading:
287 return &b.Position
288 case *md.Text:
289 return &b.Position
290 case *md.CodeBlock:
291 return &b.Position
292 case *md.HTMLBlock:
293 return &b.Position
294 case *md.List:
295 return &b.Position
296 case *md.Item:
297 return &b.Position
298 case *md.Empty:
299 return &b.Position
300 case *md.Paragraph:
301 return &b.Position
302 case *md.Quote:
303 return &b.Position
304 case *md.ThematicBreak:
305 return &b.Position
306 default:
307 panic(fmt.Sprintf("unknown block type %T", b))
308 }
309 }
310
311 func parseMarkdownFile(fsys fs.FS, path string) (*md.Document, error) {
312 f, err := fsys.Open(path)
313 if err != nil {
314 return nil, err
315 }
316 defer f.Close()
317 data, err := io.ReadAll(f)
318 if err != nil {
319 return nil, err
320 }
321 in := string(data)
322 doc := NewParser().Parse(in)
323 return doc, nil
324 }
325
326
327
328 type APIFeature struct {
329 Package string
330 Build string
331 Feature string
332 Issue int
333 }
334
335
336 var apiFileLineRegexp = regexp.MustCompile(`^pkg ([^ \t]+)[ \t]*(\([^)]+\))?, ([^#]*)(#\d+)?$`)
337
338
339
340
341
342
343
344 func parseAPIFile(fsys fs.FS, filename string) ([]APIFeature, error) {
345 f, err := fsys.Open(filename)
346 if err != nil {
347 return nil, err
348 }
349 defer f.Close()
350 var features []APIFeature
351 scan := bufio.NewScanner(f)
352 for scan.Scan() {
353 line := strings.TrimSpace(scan.Text())
354 if line == "" || line[0] == '#' {
355 continue
356 }
357 matches := apiFileLineRegexp.FindStringSubmatch(line)
358 if len(matches) == 0 {
359 return nil, fmt.Errorf("%s: malformed line %q", filename, line)
360 }
361 if len(matches) != 5 {
362 return nil, fmt.Errorf("wrong number of matches for line %q", line)
363 }
364 f := APIFeature{
365 Package: matches[1],
366 Build: matches[2],
367 Feature: strings.TrimSpace(matches[3]),
368 }
369 if issue := matches[4]; issue != "" {
370 var err error
371 f.Issue, err = strconv.Atoi(issue[1:])
372 if err != nil {
373 return nil, err
374 }
375 }
376 features = append(features, f)
377 }
378 if scan.Err() != nil {
379 return nil, scan.Err()
380 }
381 return features, nil
382 }
383
384
385
386
387
388 func GroupAPIFeaturesByFile(fs []APIFeature) (map[string][]APIFeature, error) {
389 m := map[string][]APIFeature{}
390 for _, f := range fs {
391 if f.Issue == 0 {
392 return nil, fmt.Errorf("%+v: zero issue", f)
393 }
394 filename := fmt.Sprintf("%s/%d.md", f.Package, f.Issue)
395 m[filename] = append(m[filename], f)
396 }
397 return m, nil
398 }
399
400
401
402
403
404
405 func CheckAPIFile(apiFS fs.FS, filename string, docFS fs.FS, docRoot string) error {
406 features, err := parseAPIFile(apiFS, filename)
407 if err != nil {
408 return err
409 }
410 byFile, err := GroupAPIFeaturesByFile(features)
411 if err != nil {
412 return err
413 }
414 var filenames []string
415 for fn := range byFile {
416 filenames = append(filenames, fn)
417 }
418 slices.Sort(filenames)
419 mcDir, err := minorChangesDir(docFS)
420 if err != nil {
421 return err
422 }
423 var errs []error
424 for _, fn := range filenames {
425
426 fn = path.Join(mcDir, fn)
427
428 if err := checkFragmentFile(docFS, fn); err != nil {
429 errs = append(errs, fmt.Errorf("%s: %v\nSee doc/README.md for more information.", path.Join(docRoot, fn), err))
430 }
431 }
432 return errors.Join(errs...)
433 }
434
435
436
437 func minorChangesDir(docFS fs.FS) (string, error) {
438 dirs, err := fs.Glob(docFS, "*stdlib/*minor")
439 if err != nil {
440 return "", err
441 }
442 var bad string
443 if len(dirs) == 0 {
444 bad = "No"
445 } else if len(dirs) > 1 {
446 bad = "More than one"
447 }
448 if bad != "" {
449 return "", fmt.Errorf("%s directory matches *stdlib/*minor.\nThis shouldn't happen; please file a bug at https://go.dev/issues/new.",
450 bad)
451 }
452 return dirs[0], nil
453 }
454
455 func checkFragmentFile(fsys fs.FS, filename string) error {
456 f, err := fsys.Open(filename)
457 if err != nil {
458 if errors.Is(err, fs.ErrNotExist) {
459 err = errors.New("File does not exist. Every API change must have a corresponding release note file.")
460 }
461 return err
462 }
463 defer f.Close()
464 data, err := io.ReadAll(f)
465 return CheckFragment(string(data))
466 }
467
View as plain text