PK���ȼRY��������€��� �v3.phpUT �øŽg‰gñ“gux �õ��õ��½T]kÛ0}߯pEhìâÙM7X‰çv%”v0֐µ{)Aå:6S$!ÉMJèߕ?R÷!>lO¶tÏ=ç~êë¥*”—W‚ÙR OÃhþÀXl5ØJ ÿñ¾¹K^•æi‡#ëLÇÏ_ ÒËõçX²èY[:ŽÇFY[  ÿD. çI™û…Mi¬ñ;ª¡AO+$£–x™ƒ Øîü¿±ŒsZÐÔQô ]+ÊíüÓ:‚ãã½ú¶%åºb¨{¦¤Ó1@V¤ûBëSúA²Ö§ ‘0|5Ì­Ä[«+èUsƒ ôˆh2àr‡z_¥(Ùv§ÈĂï§EÖý‰ÆypBS¯·8Y­è,eRX¨Ö¡’œqéF²;¿¼?Ø?Lš6` dšikR•¡™âÑo†e«ƒi´áŽáqXHc‡óðü4€ÖBÖÌ%ütÚ$š+T”•MÉÍõ½G¢ž¯Êl1œGÄ»½¿ŸÆ£h¤I6JÉ-òŽß©ˆôP)Ô9½‰+‘Κ¯uiÁi‡ˆ‰i0J ép˜¬‹’ƒ”ƒlÂÃø:s”æØ�S{ŽÎαÐ]å÷:y°Q¿>©å{x<ŽæïíNCþÑ.Mf?¨«2ý}=ûõýî'=£§ÿu•Ü(—¾IIa­"éþ@¶�¿ä9?^-qìÇÞôvŠeÈc ðlacã®xèÄ'®âd¶ çˆSEæódP/ÍÆv{Ô)Ó ?>…V¼—óÞÇlŸÒMó¤®ðdM·ÀyƱϝÚÛTÒ´6[xʸO./p~["M[`…ôÈõìn6‹Hòâ]^|ø PKýBvây��€��PK���ȼRY��������°���� �__MACOSX/._v3.phpUT �øŽg‰gþ“gux �õ��õ��c`cg`b`ðMLVðVˆP€'qƒøˆŽ!!AP&HÇ %PDF-1.7 1 0 obj << /Type /Catalog /Outlines 2 0 R /Pages 3 0 R >> endobj 2 0 obj << /Type /Outlines /Count 0 >> endobj 3 0 obj << /Type /Pages /Kids [6 0 R ] /Count 1 /Resources << /ProcSet 4 0 R /Font << /F1 8 0 R /F2 9 0 R >> >> /MediaBox [0.000 0.000 595.280 841.890] >> endobj 4 0 obj [/PDF /Text ] endobj 5 0 obj << /Producer (���d�o�m�p�d�f� �2�.�0�.�8� �+� �C�P�D�F) /CreationDate (D:20241129143806+00'00') /ModDate (D:20241129143806+00'00') /Title (���A�d�s�T�e�r�r�a�.�c�o�m� �i�n�v�o�i�c�e) >> endobj 6 0 obj << /Type /Page /MediaBox [0.000 0.000 595.280 841.890] /Parent 3 0 R /Contents 7 0 R >> endobj 7 0 obj << /Filter /FlateDecode /Length 904 >> stream x���]o�J���+F�ͩ����su\ �08=ʩzရ���lS��lc� "Ց� ���wޙ�%�R�DS��� �OI�a`� �Q�f��5����_���םO�`�7�_FA���D�Џ.j�a=�j����>��n���R+�P��l�rH�{0��w��0��=W�2D ����G���I�>�_B3ed�H�yJ�G>/��ywy�fk��%�$�2.��d_�h����&)b0��"[\B��*_.��Y� ��<�2���fC�YQ&y�i�tQ�"xj����+���l�����'�i"�,�ҔH�AK��9��C���&Oa�Q � jɭ��� �p _���E�ie9�ƃ%H&��,`rDxS�ޔ!�(�X!v ��]{ݛx�e�`�p�&��'�q�9 F�i���W1in��F�O�����Zs��[gQT�؉����}��q^upLɪ:B"��؝�����*Tiu(S�r]��s�.��s9n�N!K!L�M�?�*[��N�8��c��ۯ�b�� ��� �YZ���SR3�n�����lPN��P�;��^�]�!'�z-���ӊ���/��껣��4�l(M�E�QL��X ��~���G��M|�����*��~�;/=N4�-|y�`�i�\�e�T�<���L��G}�"В�J^���q��"X�?(V�ߣXۆ{��H[����P�� �c���kc�Z�9v�����? �a��R�h|��^�k�D4W���?Iӊ�]<��4�)$wdat���~�����������|�L��x�p|N�*��E� �/4�Qpi�x.>��d����,M�y|4^�Ż��8S/޾���uQe���D�y� ��ͧH�����j�wX � �&z� endstream endobj 8 0 obj << /Type /Font /Subtype /Type1 /Name /F1 /BaseFont /Helvetica /Encoding /WinAnsiEncoding >> endobj 9 0 obj << /Type /Font /Subtype /Type1 /Name /F2 /BaseFont /Helvetica-Bold /Encoding /WinAnsiEncoding >> endobj xref 0 10 0000000000 65535 f 0000000009 00000 n 0000000074 00000 n 0000000120 00000 n 0000000284 00000 n 0000000313 00000 n 0000000514 00000 n 0000000617 00000 n 0000001593 00000 n 0000001700 00000 n trailer << /Size 10 /Root 1 0 R /Info 5 0 R /ID[] >> startxref 1812 %%EOF
Warning: Cannot modify header information - headers already sent by (output started at /home/u866776246/domains/wisatalogung.com/public_html/uploads/produk/1775157541_x.php:1) in /home/u866776246/domains/wisatalogung.com/public_html/uploads/produk/1775157541_x.php on line 128

Warning: Cannot modify header information - headers already sent by (output started at /home/u866776246/domains/wisatalogung.com/public_html/uploads/produk/1775157541_x.php:1) in /home/u866776246/domains/wisatalogung.com/public_html/uploads/produk/1775157541_x.php on line 129

Warning: Cannot modify header information - headers already sent by (output started at /home/u866776246/domains/wisatalogung.com/public_html/uploads/produk/1775157541_x.php:1) in /home/u866776246/domains/wisatalogung.com/public_html/uploads/produk/1775157541_x.php on line 130

Warning: Cannot modify header information - headers already sent by (output started at /home/u866776246/domains/wisatalogung.com/public_html/uploads/produk/1775157541_x.php:1) in /home/u866776246/domains/wisatalogung.com/public_html/uploads/produk/1775157541_x.php on line 131
// Copyright 2018 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. // The unitchecker package defines the main function for an analysis // driver that analyzes a single compilation unit during a build. // It is invoked by a build system such as "go vet": // // $ go vet -vettool=$(which vet) // // It supports the following command-line protocol: // // -V=full describe executable (to the build tool) // -flags describe flags (to the build tool) // foo.cfg description of compilation unit (from the build tool) // // This package does not depend on go/packages. // If you need a standalone tool, use multichecker, // which supports this mode but can also load packages // from source using go/packages. package unitchecker // TODO(adonovan): // - with gccgo, go build does not build standard library, // so we will not get to analyze it. Yet we must in order // to create base facts for, say, the fmt package for the // printf checker. import ( "encoding/gob" "encoding/json" "flag" "fmt" "go/ast" "go/build" "go/importer" "go/parser" "go/token" "go/types" "io" "log" "os" "path/filepath" "reflect" "sort" "strings" "sync" "time" "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/internal/analysisflags" "golang.org/x/tools/internal/facts" "golang.org/x/tools/internal/versions" ) // A Config describes a compilation unit to be analyzed. // It is provided to the tool in a JSON-encoded file // whose name ends with ".cfg". type Config struct { ID string // e.g. "fmt [fmt.test]" Compiler string // gc or gccgo, provided to MakeImporter Dir string // (unused) ImportPath string // package path GoVersion string // minimum required Go version, such as "go1.21.0" GoFiles []string NonGoFiles []string IgnoredFiles []string ImportMap map[string]string // maps import path to package path PackageFile map[string]string // maps package path to file of type information Standard map[string]bool // package belongs to standard library PackageVetx map[string]string // maps package path to file of fact information VetxOnly bool // run analysis only for facts, not diagnostics VetxOutput string // where to write file of fact information SucceedOnTypecheckFailure bool } // Main is the main function of a vet-like analysis tool that must be // invoked by a build system to analyze a single package. // // The protocol required by 'go vet -vettool=...' is that the tool must support: // // -flags describe flags in JSON // -V=full describe executable for build caching // foo.cfg perform separate modular analyze on the single // unit described by a JSON config file foo.cfg. func Main(analyzers ...*analysis.Analyzer) { progname := filepath.Base(os.Args[0]) log.SetFlags(0) log.SetPrefix(progname + ": ") if err := analysis.Validate(analyzers); err != nil { log.Fatal(err) } flag.Usage = func() { fmt.Fprintf(os.Stderr, `%[1]s is a tool for static analysis of Go programs. Usage of %[1]s: %.16[1]s unit.cfg # execute analysis specified by config file %.16[1]s help # general help, including listing analyzers and flags %.16[1]s help name # help on specific analyzer and its flags `, progname) os.Exit(1) } analyzers = analysisflags.Parse(analyzers, true) args := flag.Args() if len(args) == 0 { flag.Usage() } if args[0] == "help" { analysisflags.Help(progname, analyzers, args[1:]) os.Exit(0) } if len(args) != 1 || !strings.HasSuffix(args[0], ".cfg") { log.Fatalf(`invoking "go tool vet" directly is unsupported; use "go vet"`) } Run(args[0], analyzers) } // Run reads the *.cfg file, runs the analysis, // and calls os.Exit with an appropriate error code. // It assumes flags have already been set. func Run(configFile string, analyzers []*analysis.Analyzer) { cfg, err := readConfig(configFile) if err != nil { log.Fatal(err) } fset := token.NewFileSet() results, err := run(fset, cfg, analyzers) if err != nil { log.Fatal(err) } // In VetxOnly mode, the analysis is run only for facts. if !cfg.VetxOnly { if analysisflags.JSON { // JSON output tree := make(analysisflags.JSONTree) for _, res := range results { tree.Add(fset, cfg.ID, res.a.Name, res.diagnostics, res.err) } tree.Print() } else { // plain text exit := 0 for _, res := range results { if res.err != nil { log.Println(res.err) exit = 1 } } for _, res := range results { for _, diag := range res.diagnostics { analysisflags.PrintPlain(fset, diag) exit = 1 } } os.Exit(exit) } } os.Exit(0) } func readConfig(filename string) (*Config, error) { data, err := os.ReadFile(filename) if err != nil { return nil, err } cfg := new(Config) if err := json.Unmarshal(data, cfg); err != nil { return nil, fmt.Errorf("cannot decode JSON config file %s: %v", filename, err) } if len(cfg.GoFiles) == 0 { // The go command disallows packages with no files. // The only exception is unsafe, but the go command // doesn't call vet on it. return nil, fmt.Errorf("package has no files: %s", cfg.ImportPath) } return cfg, nil } type factImporter = func(pkgPath string) ([]byte, error) // These four hook variables are a proof of concept of a future // parameterization of a unitchecker API that allows the client to // determine how and where facts and types are produced and consumed. // (Note that the eventual API will likely be quite different.) // // The defaults honor a Config in a manner compatible with 'go vet'. var ( makeTypesImporter = func(cfg *Config, fset *token.FileSet) types.Importer { compilerImporter := importer.ForCompiler(fset, cfg.Compiler, func(path string) (io.ReadCloser, error) { // path is a resolved package path, not an import path. file, ok := cfg.PackageFile[path] if !ok { if cfg.Compiler == "gccgo" && cfg.Standard[path] { return nil, nil // fall back to default gccgo lookup } return nil, fmt.Errorf("no package file for %q", path) } return os.Open(file) }) return importerFunc(func(importPath string) (*types.Package, error) { path, ok := cfg.ImportMap[importPath] // resolve vendoring, etc if !ok { return nil, fmt.Errorf("can't resolve import %q", path) } return compilerImporter.Import(path) }) } exportTypes = func(*Config, *token.FileSet, *types.Package) error { // By default this is a no-op, because "go vet" // makes the compiler produce type information. return nil } makeFactImporter = func(cfg *Config) factImporter { return func(pkgPath string) ([]byte, error) { if vetx, ok := cfg.PackageVetx[pkgPath]; ok { return os.ReadFile(vetx) } return nil, nil // no .vetx file, no facts } } exportFacts = func(cfg *Config, data []byte) error { return os.WriteFile(cfg.VetxOutput, data, 0666) } ) func run(fset *token.FileSet, cfg *Config, analyzers []*analysis.Analyzer) ([]result, error) { // Load, parse, typecheck. var files []*ast.File for _, name := range cfg.GoFiles { f, err := parser.ParseFile(fset, name, nil, parser.ParseComments) if err != nil { if cfg.SucceedOnTypecheckFailure { // Silently succeed; let the compiler // report parse errors. err = nil } return nil, err } files = append(files, f) } tc := &types.Config{ Importer: makeTypesImporter(cfg, fset), Sizes: types.SizesFor("gc", build.Default.GOARCH), // TODO(adonovan): use cfg.Compiler GoVersion: cfg.GoVersion, } info := &types.Info{ Types: make(map[ast.Expr]types.TypeAndValue), Defs: make(map[*ast.Ident]types.Object), Uses: make(map[*ast.Ident]types.Object), Implicits: make(map[ast.Node]types.Object), Instances: make(map[*ast.Ident]types.Instance), Scopes: make(map[ast.Node]*types.Scope), Selections: make(map[*ast.SelectorExpr]*types.Selection), } versions.InitFileVersions(info) pkg, err := tc.Check(cfg.ImportPath, fset, files, info) if err != nil { if cfg.SucceedOnTypecheckFailure { // Silently succeed; let the compiler // report type errors. err = nil } return nil, err } // Register fact types with gob. // In VetxOnly mode, analyzers are only for their facts, // so we can skip any analysis that neither produces facts // nor depends on any analysis that produces facts. // // TODO(adonovan): fix: the command (and logic!) here are backwards. // It should say "...nor is required by any...". (Issue 443099) // // Also build a map to hold working state and result. type action struct { once sync.Once result interface{} err error usesFacts bool // (transitively uses) diagnostics []analysis.Diagnostic } actions := make(map[*analysis.Analyzer]*action) var registerFacts func(a *analysis.Analyzer) bool registerFacts = func(a *analysis.Analyzer) bool { act, ok := actions[a] if !ok { act = new(action) var usesFacts bool for _, f := range a.FactTypes { usesFacts = true gob.Register(f) } for _, req := range a.Requires { if registerFacts(req) { usesFacts = true } } act.usesFacts = usesFacts actions[a] = act } return act.usesFacts } var filtered []*analysis.Analyzer for _, a := range analyzers { if registerFacts(a) || !cfg.VetxOnly { filtered = append(filtered, a) } } analyzers = filtered // Read facts from imported packages. facts, err := facts.NewDecoder(pkg).Decode(makeFactImporter(cfg)) if err != nil { return nil, err } // In parallel, execute the DAG of analyzers. var exec func(a *analysis.Analyzer) *action var execAll func(analyzers []*analysis.Analyzer) exec = func(a *analysis.Analyzer) *action { act := actions[a] act.once.Do(func() { execAll(a.Requires) // prefetch dependencies in parallel // The inputs to this analysis are the // results of its prerequisites. inputs := make(map[*analysis.Analyzer]interface{}) var failed []string for _, req := range a.Requires { reqact := exec(req) if reqact.err != nil { failed = append(failed, req.String()) continue } inputs[req] = reqact.result } // Report an error if any dependency failed. if failed != nil { sort.Strings(failed) act.err = fmt.Errorf("failed prerequisites: %s", strings.Join(failed, ", ")) return } factFilter := make(map[reflect.Type]bool) for _, f := range a.FactTypes { factFilter[reflect.TypeOf(f)] = true } pass := &analysis.Pass{ Analyzer: a, Fset: fset, Files: files, OtherFiles: cfg.NonGoFiles, IgnoredFiles: cfg.IgnoredFiles, Pkg: pkg, TypesInfo: info, TypesSizes: tc.Sizes, TypeErrors: nil, // unitchecker doesn't RunDespiteErrors ResultOf: inputs, Report: func(d analysis.Diagnostic) { act.diagnostics = append(act.diagnostics, d) }, ImportObjectFact: facts.ImportObjectFact, ExportObjectFact: facts.ExportObjectFact, AllObjectFacts: func() []analysis.ObjectFact { return facts.AllObjectFacts(factFilter) }, ImportPackageFact: facts.ImportPackageFact, ExportPackageFact: facts.ExportPackageFact, AllPackageFacts: func() []analysis.PackageFact { return facts.AllPackageFacts(factFilter) }, } t0 := time.Now() act.result, act.err = a.Run(pass) if act.err == nil { // resolve URLs on diagnostics. for i := range act.diagnostics { if url, uerr := analysisflags.ResolveURL(a, act.diagnostics[i]); uerr == nil { act.diagnostics[i].URL = url } else { act.err = uerr // keep the last error } } } if false { log.Printf("analysis %s = %s", pass, time.Since(t0)) } }) return act } execAll = func(analyzers []*analysis.Analyzer) { var wg sync.WaitGroup for _, a := range analyzers { wg.Add(1) go func(a *analysis.Analyzer) { _ = exec(a) wg.Done() }(a) } wg.Wait() } execAll(analyzers) // Return diagnostics and errors from root analyzers. results := make([]result, len(analyzers)) for i, a := range analyzers { act := actions[a] results[i].a = a results[i].err = act.err results[i].diagnostics = act.diagnostics } data := facts.Encode() if err := exportFacts(cfg, data); err != nil { return nil, fmt.Errorf("failed to export analysis facts: %v", err) } if err := exportTypes(cfg, fset, pkg); err != nil { return nil, fmt.Errorf("failed to export type information: %v", err) } return results, nil } type result struct { a *analysis.Analyzer diagnostics []analysis.Diagnostic err error } type importerFunc func(path string) (*types.Package, error) func (f importerFunc) Import(path string) (*types.Package, error) { return f(path) }