diff --git a/build/build.go b/build/build.go index 62b360c61..2ec712fc7 100644 --- a/build/build.go +++ b/build/build.go @@ -21,6 +21,7 @@ import ( "sort" "strconv" "strings" + "sync" "time" "github.com/fsnotify/fsnotify" @@ -938,23 +939,40 @@ func (s *Session) buildImportPathWithSrcDir(path string, srcDir string) (*Packag return pkg, archive, nil } +// getExeModTime will determine the mod time of the GopherJS binary +// the first time this is called and cache the result for subsequent calls. +var getExeModTime = func() func() time.Time { + var ( + once sync.Once + result time.Time + ) + getTime := func() { + gopherjsBinary, err := os.Executable() + if err == nil { + var fileInfo os.FileInfo + fileInfo, err = os.Stat(gopherjsBinary) + if err == nil { + result = fileInfo.ModTime() + return + } + } + os.Stderr.WriteString("Could not get GopherJS binary's modification timestamp. Please report issue.\n") + result = time.Now() + } + return func() time.Time { + once.Do(getTime) + return result + } +}() + // BuildPackage compiles an already loaded package. func (s *Session) BuildPackage(pkg *PackageData) (*compiler.Archive, error) { if archive, ok := s.UpToDateArchives[pkg.ImportPath]; ok { return archive, nil } - var fileInfo os.FileInfo - gopherjsBinary, err := os.Executable() - if err == nil { - fileInfo, err = os.Stat(gopherjsBinary) - if err == nil && fileInfo.ModTime().After(pkg.SrcModTime) { - pkg.SrcModTime = fileInfo.ModTime() - } - } - if err != nil { - os.Stderr.WriteString("Could not get GopherJS binary's modification timestamp. Please report issue.\n") - pkg.SrcModTime = time.Now() + if exeModTime := getExeModTime(); exeModTime.After(pkg.SrcModTime) { + pkg.SrcModTime = exeModTime } for _, importedPkgPath := range pkg.Imports { @@ -966,22 +984,18 @@ func (s *Session) BuildPackage(pkg *PackageData) (*compiler.Archive, error) { return nil, err } - impModTime := importedPkg.SrcModTime - if impModTime.After(pkg.SrcModTime) { + if impModTime := importedPkg.SrcModTime; impModTime.After(pkg.SrcModTime) { pkg.SrcModTime = impModTime } } - if pkg.FileModTime().After(pkg.SrcModTime) { - pkg.SrcModTime = pkg.FileModTime() + if fileModTime := pkg.FileModTime(); fileModTime.After(pkg.SrcModTime) { + pkg.SrcModTime = fileModTime } if !s.options.NoCache { - archive := s.buildCache.LoadArchive(pkg.ImportPath) - if archive != nil && !pkg.SrcModTime.After(archive.BuildTime) { - if err := archive.RegisterTypes(s.Types); err != nil { - panic(fmt.Errorf("failed to load type information from %v: %w", archive, err)) - } + archive := s.buildCache.LoadArchive(pkg.ImportPath, pkg.SrcModTime, s.Types) + if archive != nil { s.UpToDateArchives[pkg.ImportPath] = archive // Existing archive is up to date, no need to build it from scratch. return archive, nil @@ -1021,7 +1035,7 @@ func (s *Session) BuildPackage(pkg *PackageData) (*compiler.Archive, error) { fmt.Println(pkg.ImportPath) } - s.buildCache.StoreArchive(archive) + s.buildCache.StoreArchive(archive, time.Now()) s.UpToDateArchives[pkg.ImportPath] = archive return archive, nil diff --git a/build/cache/cache.go b/build/cache/cache.go index 2c4e5703a..fc0949d67 100644 --- a/build/cache/cache.go +++ b/build/cache/cache.go @@ -6,9 +6,11 @@ import ( "crypto/sha256" "fmt" "go/build" + "go/types" "os" "path" "path/filepath" + "time" "github.com/gopherjs/gopherjs/compiler" log "github.com/sirupsen/logrus" @@ -90,7 +92,10 @@ func (bc BuildCache) String() string { // StoreArchive compiled archive in the cache. Any error inside this method // will cause the cache not to be persisted. -func (bc *BuildCache) StoreArchive(a *compiler.Archive) { +// +// The passed in buildTime is used to determine if the archive is out-of-date when reloaded. +// Typically it should be set to the srcModTime or time.Now(). +func (bc *BuildCache) StoreArchive(a *compiler.Archive, buildTime time.Time) { if bc == nil { return // Caching is disabled. } @@ -106,7 +111,7 @@ func (bc *BuildCache) StoreArchive(a *compiler.Archive) { return } defer f.Close() - if err := compiler.WriteArchive(a, f); err != nil { + if err := compiler.WriteArchive(a, buildTime, f); err != nil { log.Warningf("Failed to write build cache archive %q: %v", a, err) // Make sure we don't leave a half-written archive behind. os.Remove(f.Name()) @@ -125,7 +130,10 @@ func (bc *BuildCache) StoreArchive(a *compiler.Archive) { // // The returned archive would have been built with the same configuration as // the build cache was. -func (bc *BuildCache) LoadArchive(importPath string) *compiler.Archive { +// +// The imports map is used to resolve package dependencies and may modify the +// map to include the package from the read archive. See [gcexportdata.Read]. +func (bc *BuildCache) LoadArchive(importPath string, srcModTime time.Time, imports map[string]*types.Package) *compiler.Archive { if bc == nil { return nil // Caching is disabled. } @@ -140,12 +148,16 @@ func (bc *BuildCache) LoadArchive(importPath string) *compiler.Archive { return nil // Cache miss. } defer f.Close() - a, err := compiler.ReadArchive(importPath, f) + a, buildTime, err := compiler.ReadArchive(importPath, f, srcModTime, imports) if err != nil { log.Warningf("Failed to read cached package archive for %q: %v", importPath, err) return nil // Invalid/corrupted archive, cache miss. } - log.Infof("Found cached package archive for %q, built at %v.", importPath, a.BuildTime) + if a == nil { + log.Infof("Found out-of-date package archive for %q, built at %v.", importPath, buildTime) + return nil // Archive is out-of-date, cache miss. + } + log.Infof("Found cached package archive for %q, built at %v.", importPath, buildTime) return a } diff --git a/build/cache/cache_test.go b/build/cache/cache_test.go index fd89ec187..0a0541f64 100644 --- a/build/cache/cache_test.go +++ b/build/cache/cache_test.go @@ -1,7 +1,9 @@ package cache import ( + "go/types" "testing" + "time" "github.com/google/go-cmp/cmp" "github.com/gopherjs/gopherjs/compiler" @@ -15,21 +17,24 @@ func TestStore(t *testing.T) { Imports: []string{"fake/dep"}, } + srcModTime := newTime(0.0) + buildTime := newTime(5.0) + imports := map[string]*types.Package{} bc := BuildCache{} - if got := bc.LoadArchive(want.ImportPath); got != nil { + if got := bc.LoadArchive(want.ImportPath, srcModTime, imports); got != nil { t.Errorf("Got: %s was found in the cache. Want: empty cache.", got.ImportPath) } - bc.StoreArchive(want) - got := bc.LoadArchive(want.ImportPath) + bc.StoreArchive(want, buildTime) + got := bc.LoadArchive(want.ImportPath, srcModTime, imports) if got == nil { - t.Errorf("Got: %s wan not found in the cache. Want: archive is can be loaded after store.", want.ImportPath) + t.Errorf("Got: %s was not found in the cache. Want: archive is can be loaded after store.", want.ImportPath) } if diff := cmp.Diff(want, got); diff != "" { t.Errorf("Loaded archive is different from stored (-want,+got):\n%s", diff) } // Make sure the package names are a part of the cache key. - if got := bc.LoadArchive("fake/other"); got != nil { + if got := bc.LoadArchive("fake/other", srcModTime, imports); got != nil { t.Errorf("Got: fake/other was found in cache: %#v. Want: nil for packages that weren't cached.", got) } } @@ -59,20 +64,54 @@ func TestInvalidation(t *testing.T) { }, } + srcModTime := newTime(0.0) + buildTime := newTime(5.0) + imports := map[string]*types.Package{} for _, test := range tests { a := &compiler.Archive{ImportPath: "package/fake"} - test.cache1.StoreArchive(a) + test.cache1.StoreArchive(a, buildTime) - if got := test.cache2.LoadArchive(a.ImportPath); got != nil { + if got := test.cache2.LoadArchive(a.ImportPath, srcModTime, imports); got != nil { t.Logf("-cache1,+cache2:\n%s", cmp.Diff(test.cache1, test.cache2)) t.Errorf("Got: %v loaded from cache. Want: build parameter change invalidates cache.", got) } } } +func TestOldArchive(t *testing.T) { + cacheForTest(t) + + want := &compiler.Archive{ + ImportPath: "fake/package", + Imports: []string{"fake/dep"}, + } + + buildTime := newTime(5.0) + imports := map[string]*types.Package{} + bc := BuildCache{} + bc.StoreArchive(want, buildTime) + + oldSrcModTime := newTime(2.0) // older than archive build time, so archive is up-to-date + got := bc.LoadArchive(want.ImportPath, oldSrcModTime, imports) + if got == nil { + t.Errorf("Got: %s was nil. Want: up-to-date archive to be loaded.", want.ImportPath) + } + + newerSrcModTime := newTime(7.0) // newer than archive build time, so archive is stale + got = bc.LoadArchive(want.ImportPath, newerSrcModTime, imports) + if got != nil { + t.Errorf("Got: %s was not nil. Want: stale archive to not be loaded.", want.ImportPath) + } +} + func cacheForTest(t *testing.T) { t.Helper() originalRoot := cacheRoot t.Cleanup(func() { cacheRoot = originalRoot }) cacheRoot = t.TempDir() } + +func newTime(seconds float64) time.Time { + return time.Date(1969, 7, 20, 20, 17, 0, 0, time.UTC). + Add(time.Duration(seconds * float64(time.Second))) +} diff --git a/compiler/compiler.go b/compiler/compiler.go index cffd4c86d..96ec390d8 100644 --- a/compiler/compiler.go +++ b/compiler/compiler.go @@ -46,45 +46,26 @@ type Archive struct { // A list of full package import paths that the current package imports across // all source files. See go/types.Package.Imports(). Imports []string - // Serialized contents of go/types.Package in a binary format. This information - // is used by the compiler to type-check packages that import this one. See - // gcexportdata.Write(). - // - // TODO(nevkontakte): It would be more convenient to store go/types.Package - // itself and only serialize it when writing the archive onto disk. - ExportData []byte + // The package information is used by the compiler to type-check packages + // that import this one. See [gcexportdata.Write]. + Package *types.Package // Compiled package-level symbols. Declarations []*Decl // Concatenated contents of all raw .inc.js of the package. IncJSCode []byte - // JSON-serialized contents of go/token.FileSet. This is used to obtain source - // code locations for various symbols (e.g. for sourcemap generation). See - // token.FileSet.Write(). - // - // TODO(nevkontakte): This is also more convenient to store as the original - // object and only serialize before writing onto disk. - FileSet []byte + // The file set containing the source code locations for various symbols + // (e.g. for sourcemap generation). See [token.FileSet.Write]. + FileSet *token.FileSet // Whether or not the package was compiled with minification enabled. Minified bool // A list of go:linkname directives encountered in the package. GoLinknames []GoLinkname - // Time when this archive was built. - BuildTime time.Time } func (a Archive) String() string { return fmt.Sprintf("compiler.Archive{%s}", a.ImportPath) } -// RegisterTypes adds package type information from the archive into the provided map. -func (a *Archive) RegisterTypes(packages map[string]*types.Package) error { - var err error - // TODO(nevkontakte): Should this be shared throughout the build? - fset := token.NewFileSet() - packages[a.ImportPath], err = gcexportdata.Read(bytes.NewReader(a.ExportData), fset, packages, a.ImportPath) - return err -} - type Dependency struct { Pkg string Type string @@ -185,10 +166,7 @@ func WriteProgramCode(pkgs []*Archive, w *SourceMapFilter, goVersion string) err func WritePkgCode(pkg *Archive, dceSelection map[*Decl]struct{}, gls goLinknameSet, minify bool, w *SourceMapFilter) error { if w.MappingCallback != nil && pkg.FileSet != nil { - w.fileSet = token.NewFileSet() - if err := w.fileSet.Read(json.NewDecoder(bytes.NewReader(pkg.FileSet)).Decode); err != nil { - panic(err) - } + w.fileSet = pkg.FileSet } if _, err := w.Write(pkg.IncJSCode); err != nil { return err @@ -277,19 +255,98 @@ func WritePkgCode(pkg *Archive, dceSelection map[*Decl]struct{}, gls goLinknameS return nil } +type serializableArchive struct { + ImportPath string + Name string + Imports []string + ExportData []byte + Declarations []*Decl + IncJSCode []byte + FileSet []byte + Minified bool + GoLinknames []GoLinkname + BuildTime time.Time +} + // ReadArchive reads serialized compiled archive of the importPath package. -func ReadArchive(path string, r io.Reader) (*Archive, error) { +// +// The given srcModTime is used to determine if the archive is out-of-date. +// If the archive is out-of-date, the returned archive is nil. +// If there was not an error, the returned time is when the archive was built. +// +// The imports map is used to resolve package dependencies and may modify the +// map to include the package from the read archive. See [gcexportdata.Read]. +func ReadArchive(importPath string, r io.Reader, srcModTime time.Time, imports map[string]*types.Package) (*Archive, time.Time, error) { + var sa serializableArchive + if err := gob.NewDecoder(r).Decode(&sa); err != nil { + return nil, time.Time{}, err + } + + if srcModTime.After(sa.BuildTime) { + // Archive is out-of-date. + return nil, sa.BuildTime, nil + } + var a Archive - if err := gob.NewDecoder(r).Decode(&a); err != nil { - return nil, err + fset := token.NewFileSet() + if len(sa.ExportData) > 0 { + pkg, err := gcexportdata.Read(bytes.NewReader(sa.ExportData), fset, imports, importPath) + if err != nil { + return nil, sa.BuildTime, err + } + a.Package = pkg + } + + if len(sa.FileSet) > 0 { + a.FileSet = token.NewFileSet() + if err := a.FileSet.Read(json.NewDecoder(bytes.NewReader(sa.FileSet)).Decode); err != nil { + return nil, sa.BuildTime, err + } } - return &a, nil + a.ImportPath = sa.ImportPath + a.Name = sa.Name + a.Imports = sa.Imports + a.Declarations = sa.Declarations + a.IncJSCode = sa.IncJSCode + a.Minified = sa.Minified + a.GoLinknames = sa.GoLinknames + return &a, sa.BuildTime, nil } // WriteArchive writes compiled package archive on disk for later reuse. -func WriteArchive(a *Archive, w io.Writer) error { - return gob.NewEncoder(w).Encode(a) +// +// The passed in buildTime is used to determine if the archive is out-of-date. +// Typically it should be set to the srcModTime or time.Now() but it is exposed for testing purposes. +func WriteArchive(a *Archive, buildTime time.Time, w io.Writer) error { + exportData := new(bytes.Buffer) + if a.Package != nil { + if err := gcexportdata.Write(exportData, nil, a.Package); err != nil { + return fmt.Errorf("failed to write export data: %w", err) + } + } + + encodedFileSet := new(bytes.Buffer) + if a.FileSet != nil { + if err := a.FileSet.Write(json.NewEncoder(encodedFileSet).Encode); err != nil { + return err + } + } + + sa := serializableArchive{ + ImportPath: a.ImportPath, + Name: a.Name, + Imports: a.Imports, + ExportData: exportData.Bytes(), + Declarations: a.Declarations, + IncJSCode: a.IncJSCode, + FileSet: encodedFileSet.Bytes(), + Minified: a.Minified, + GoLinknames: a.GoLinknames, + BuildTime: buildTime, + } + + return gob.NewEncoder(w).Encode(sa) } type SourceMapFilter struct { diff --git a/compiler/compiler_test.go b/compiler/compiler_test.go index 65178e986..16789971b 100644 --- a/compiler/compiler_test.go +++ b/compiler/compiler_test.go @@ -6,6 +6,7 @@ import ( "regexp" "sort" "testing" + "time" "github.com/google/go-cmp/cmp" "golang.org/x/tools/go/packages" @@ -109,7 +110,7 @@ func TestDeclSelection_KeepUnusedUnexportedMethodForInterface(t *testing.T) { println("foo2") } - type IFoo interface { + type IFoo interface { Bar() baz() } @@ -405,6 +406,33 @@ func TestLengthParenthesizingIssue841(t *testing.T) { } } +func TestArchiveSelectionAfterSerialization(t *testing.T) { + src := ` + package main + type Foo interface{ int | string } + + type Bar[T Foo] struct{ v T } + func (b Bar[T]) Baz() { println(b.v) } + + var ghost = Bar[int]{v: 7} // unused + + func main() { + println("do nothing") + }` + srcFiles := []srctesting.Source{{Name: `main.go`, Contents: []byte(src)}} + root := srctesting.ParseSources(t, srcFiles, nil) + rootPath := root.PkgPath + origArchives := compileProject(t, root, false) + readArchives := reloadCompiledProject(t, origArchives, rootPath) + + origJS := renderPackage(t, origArchives[rootPath], false) + readJS := renderPackage(t, readArchives[rootPath], false) + + if diff := cmp.Diff(origJS, readJS); diff != "" { + t.Errorf("the reloaded files produce different JS:\n%s", diff) + } +} + func compareOrder(t *testing.T, sourceFiles []srctesting.Source, minify bool) { t.Helper() outputNormal := compile(t, sourceFiles, minify) @@ -416,12 +444,12 @@ func compareOrder(t *testing.T, sourceFiles []srctesting.Source, minify bool) { outputReversed := compile(t, sourceFiles, minify) - if diff := cmp.Diff(string(outputNormal), string(outputReversed)); diff != "" { + if diff := cmp.Diff(outputNormal, outputReversed); diff != "" { t.Errorf("files in different order produce different JS:\n%s", diff) } } -func compile(t *testing.T, sourceFiles []srctesting.Source, minify bool) []byte { +func compile(t *testing.T, sourceFiles []srctesting.Source, minify bool) string { t.Helper() rootPkg := srctesting.ParseSources(t, sourceFiles, nil) archives := compileProject(t, rootPkg, minify) @@ -432,11 +460,7 @@ func compile(t *testing.T, sourceFiles []srctesting.Source, minify bool) []byte t.Fatalf(`root package not found in archives: %s`, path) } - b := renderPackage(t, a, minify) - if len(b) == 0 { - t.Fatal(`compile had no output`) - } - return b + return renderPackage(t, a, minify) } // compileProject compiles the given root package and all packages imported by the root. @@ -481,12 +505,68 @@ func compileProject(t *testing.T, root *packages.Package, minify bool) map[strin return archiveCache } -func renderPackage(t *testing.T, archive *Archive, minify bool) []byte { +// newTime creates an arbitrary time.Time offset by the given number of seconds. +// This is useful for quickly creating times that are before or after another. +func newTime(seconds float64) time.Time { + return time.Date(1969, 7, 20, 20, 17, 0, 0, time.UTC). + Add(time.Duration(seconds * float64(time.Second))) +} + +// reloadCompiledProject persists the given archives into memory then reloads +// them from memory to simulate a cache reload of a precompiled project. +func reloadCompiledProject(t *testing.T, archives map[string]*Archive, rootPkgPath string) map[string]*Archive { + t.Helper() + + buildTime := newTime(5.0) + serialized := map[string][]byte{} + for path, a := range archives { + buf := &bytes.Buffer{} + if err := WriteArchive(a, buildTime, buf); err != nil { + t.Fatalf(`failed to write archive for %s: %v`, path, err) + } + serialized[path] = buf.Bytes() + } + + srcModTime := newTime(0.0) + reloadCache := map[string]*Archive{} + var importContext *ImportContext + importContext = &ImportContext{ + Packages: map[string]*types.Package{}, + Import: func(path string) (*Archive, error) { + // find in local cache + if a, ok := reloadCache[path]; ok { + return a, nil + } + + // deserialize archive + buf, ok := serialized[path] + if !ok { + t.Fatalf(`archive not found for %s`, path) + } + a, _, err := ReadArchive(path, bytes.NewReader(buf), srcModTime, importContext.Packages) + if err != nil { + t.Fatalf(`failed to read archive for %s: %v`, path, err) + } + reloadCache[path] = a + return a, nil + }, + } + + _, err := importContext.Import(rootPkgPath) + if err != nil { + t.Fatal(`failed to reload archives:`, err) + } + return reloadCache +} + +func renderPackage(t *testing.T, archive *Archive, minify bool) string { t.Helper() - selection := make(map[*Decl]struct{}) + + sel := &dce.Selector[*Decl]{} for _, d := range archive.Declarations { - selection[d] = struct{}{} + sel.Include(d, false) } + selection := sel.AliveDecls() buf := &bytes.Buffer{} @@ -494,7 +574,11 @@ func renderPackage(t *testing.T, archive *Archive, minify bool) []byte { t.Fatal(err) } - return buf.Bytes() + b := buf.String() + if len(b) == 0 { + t.Fatal(`render package had no output`) + } + return b } type selectionTester struct { diff --git a/compiler/decls.go b/compiler/decls.go index 5419c4c7d..9f6518875 100644 --- a/compiler/decls.go +++ b/compiler/decls.go @@ -23,6 +23,9 @@ import ( // // It contains code generated by the compiler for this specific symbol, which is // grouped by the execution stage it belongs to in the JavaScript runtime. +// +// When adding new fields to this struct, make sure the field is exported +// so that it Gob serializes correctly for the archive cache. type Decl struct { // The package- or receiver-type-qualified name of function or method obj. // See go/types.Func.FullName(). @@ -52,8 +55,8 @@ type Decl struct { // JavaScript code that needs to be executed during the package init phase to // set the symbol up (e.g. initialize package-level variable value). InitCode []byte - // dce stores the information for dead-code elimination. - dce dce.Info + // DCEInfo stores the information for dead-code elimination. + DCEInfo dce.Info // Set to true if a function performs a blocking operation (I/O or // synchronization). The compiler will have to generate function code such // that it can be resumed after a blocking operation completes without @@ -73,7 +76,7 @@ func (d Decl) minify() Decl { // Dce gets the information for dead-code elimination. func (d *Decl) Dce() *dce.Info { - return &d.dce + return &d.DCEInfo } // topLevelObjects extracts package-level variables, functions and named types diff --git a/compiler/internal/dce/info.go b/compiler/internal/dce/info.go index 07d818855..6a45e9ef3 100644 --- a/compiler/internal/dce/info.go +++ b/compiler/internal/dce/info.go @@ -1,6 +1,8 @@ package dce import ( + "bytes" + "encoding/gob" "fmt" "go/types" "sort" @@ -117,3 +119,39 @@ func (id *Info) getDeps() []string { sort.Strings(deps) return deps } + +type serializableInfo struct { + Alive bool + ObjectFilter string + MethodFilter string + Deps []string +} + +func (id *Info) GobEncode() ([]byte, error) { + si := serializableInfo{ + Alive: id.alive, + ObjectFilter: id.objectFilter, + MethodFilter: id.methodFilter, + Deps: id.getDeps(), + } + + buf := &bytes.Buffer{} + err := gob.NewEncoder(buf).Encode(si) + return buf.Bytes(), err +} + +func (id *Info) GobDecode(data []byte) error { + var si serializableInfo + if err := gob.NewDecoder(bytes.NewReader(data)).Decode(&si); err != nil { + return err + } + + id.alive = si.Alive + id.objectFilter = si.ObjectFilter + id.methodFilter = si.MethodFilter + id.deps = make(map[string]struct{}, len(si.Deps)) + for _, dep := range si.Deps { + id.deps[dep] = struct{}{} + } + return nil +} diff --git a/compiler/package.go b/compiler/package.go index 4cd800607..2f6af9c6b 100644 --- a/compiler/package.go +++ b/compiler/package.go @@ -1,21 +1,17 @@ package compiler import ( - "bytes" - "encoding/json" "fmt" "go/ast" "go/token" "go/types" "strings" - "time" "github.com/gopherjs/gopherjs/compiler/internal/analysis" "github.com/gopherjs/gopherjs/compiler/internal/dce" "github.com/gopherjs/gopherjs/compiler/internal/typeparams" "github.com/gopherjs/gopherjs/compiler/typesutil" "github.com/gopherjs/gopherjs/internal/experiments" - "golang.org/x/tools/go/gcexportdata" "golang.org/x/tools/go/types/typeutil" ) @@ -285,25 +281,15 @@ func Compile(importPath string, files []*ast.File, fileSet *token.FileSet, impor return nil, rootCtx.pkgCtx.errList } - exportData := new(bytes.Buffer) - if err := gcexportdata.Write(exportData, nil, typesPkg); err != nil { - return nil, fmt.Errorf("failed to write export data: %w", err) - } - encodedFileSet := new(bytes.Buffer) - if err := srcs.FileSet.Write(json.NewEncoder(encodedFileSet).Encode); err != nil { - return nil, err - } - return &Archive{ ImportPath: srcs.ImportPath, Name: typesPkg.Name(), Imports: importedPaths, - ExportData: exportData.Bytes(), + Package: typesPkg, Declarations: allDecls, - FileSet: encodedFileSet.Bytes(), + FileSet: srcs.FileSet, Minified: minify, GoLinknames: goLinknames, - BuildTime: time.Now(), }, nil }