diff --git a/cmd/ipsw/cmd/fw/aop.go b/cmd/ipsw/cmd/fw/aop.go index 48a436011..2623229d8 100644 --- a/cmd/ipsw/cmd/fw/aop.go +++ b/cmd/ipsw/cmd/fw/aop.go @@ -26,6 +26,8 @@ import ( "path/filepath" "github.com/apex/log" + "github.com/blacktop/go-macho" + "github.com/blacktop/ipsw/internal/magic" "github.com/blacktop/ipsw/pkg/bundle" "github.com/spf13/cobra" "github.com/spf13/viper" @@ -63,13 +65,21 @@ var aopCmd = &cobra.Command{ showInfo := viper.GetBool("fw.aop.info") // output := viper.GetString("fw.aop.output") - bn, err := bundle.Parse(filepath.Clean(args[0])) - if err != nil { - return err - } - if showInfo { - fmt.Println(bn) + if ok, _ := magic.IsMachO(args[0]); ok { /* MachO binary */ + m, err := macho.Open(filepath.Clean(args[0])) + if err != nil { + return fmt.Errorf("failed to parse MachO file: %v", err) + } + defer m.Close() + fmt.Println(m.FileTOC.String()) + } else { + bn, err := bundle.Parse(filepath.Clean(args[0])) + if err != nil { + return err + } + fmt.Println(bn) + } } else { panic("not implemented") } diff --git a/cmd/ipsw/cmd/fw/dcp.go b/cmd/ipsw/cmd/fw/dcp.go index 1ee75df7c..3a65b9000 100644 --- a/cmd/ipsw/cmd/fw/dcp.go +++ b/cmd/ipsw/cmd/fw/dcp.go @@ -26,6 +26,8 @@ import ( "path/filepath" "github.com/apex/log" + "github.com/blacktop/go-macho" + "github.com/blacktop/ipsw/internal/magic" "github.com/blacktop/ipsw/pkg/bundle" "github.com/spf13/cobra" "github.com/spf13/viper" @@ -61,13 +63,21 @@ var dcpCmd = &cobra.Command{ showInfo := viper.GetBool("fw.dcp.info") // output := viper.GetString("fw.dcp.output") - bn, err := bundle.Parse(filepath.Clean(args[0])) - if err != nil { - return err - } - if showInfo { - fmt.Println(bn) + if ok, _ := magic.IsMachO(args[0]); ok { /* MachO binary */ + m, err := macho.Open(filepath.Clean(args[0])) + if err != nil { + return fmt.Errorf("failed to parse MachO file: %v", err) + } + defer m.Close() + fmt.Println(m.FileTOC.String()) + } else { + bn, err := bundle.Parse(filepath.Clean(args[0])) + if err != nil { + return err + } + fmt.Println(bn) + } } else { panic("not implemented") } diff --git a/cmd/ipsw/cmd/fw/exc.go b/cmd/ipsw/cmd/fw/exc.go index 24af89760..ca59dc337 100644 --- a/cmd/ipsw/cmd/fw/exc.go +++ b/cmd/ipsw/cmd/fw/exc.go @@ -23,11 +23,11 @@ package fw import ( "fmt" - "io" - "os" "path/filepath" "github.com/apex/log" + fwcmd "github.com/blacktop/ipsw/internal/commands/fw" + "github.com/blacktop/ipsw/internal/utils" "github.com/blacktop/ipsw/pkg/bundle" "github.com/spf13/cobra" "github.com/spf13/viper" @@ -51,8 +51,9 @@ var excCmd = &cobra.Command{ Use: "exclave", Aliases: []string{"exc"}, Short: "🚧 Dump MachOs", + Args: cobra.ExactArgs(1), Hidden: true, - RunE: func(cmd *cobra.Command, args []string) error { + RunE: func(cmd *cobra.Command, args []string) (err error) { if viper.GetBool("verbose") { log.SetLevel(log.DebugLevel) @@ -62,38 +63,25 @@ var excCmd = &cobra.Command{ showInfo := viper.GetBool("fw.exclave.info") output := viper.GetString("fw.exclave.output") - bn, err := bundle.Parse(filepath.Clean(args[0])) - if err != nil { - return err - } - if showInfo { - fmt.Println(bn) - } else { - f, err := os.Open(filepath.Clean(args[0])) + bn, err := bundle.Parse(filepath.Clean(args[0])) if err != nil { - return fmt.Errorf("failed to open file %s: %v", filepath.Clean(args[0]), err) + return fmt.Errorf("failed to parse bundle: %v", err) } - defer f.Close() - - for _, bf := range bn.Files { - fmt.Println(bf) - - fname := filepath.Join(output, bf.Type, bf.Name) - if err := os.MkdirAll(filepath.Dir(fname), 0o750); err != nil { - return fmt.Errorf("failed to create directory %s: %v", filepath.Dir(fname), err) - } - of, err := os.Create(fname) - if err != nil { - return fmt.Errorf("failed to create file %s: %v", fname, err) - } - defer of.Close() + if bn.Type != 3 { + return fmt.Errorf("bundle is not an exclave bundle") + } - for _, seg := range bf.Segments { - f.Seek(int64(seg.Offset), io.SeekStart) - io.CopyN(of, f, int64(seg.Size)) - } + fmt.Println(bn) + } else { + log.Info("Extracting Exclave Bundle") + out, err := fwcmd.Extract(filepath.Clean(args[0]), output) + if err != nil { + return fmt.Errorf("failed to extract files from exclave bundle: %v", err) + } + for _, f := range out { + utils.Indent(log.Info, 2)("Created " + f) } } diff --git a/go.mod b/go.mod index e474c0b8b..de3138d0f 100644 --- a/go.mod +++ b/go.mod @@ -12,7 +12,7 @@ require ( github.com/aymanbagabas/go-udiff v0.2.0 github.com/blacktop/arm64-cgo v1.0.57 github.com/blacktop/go-dwarf v1.0.10 - github.com/blacktop/go-macho v1.1.212 + github.com/blacktop/go-macho v1.1.213 github.com/blacktop/go-plist v1.0.2 github.com/blacktop/lzfse-cgo v1.1.19 github.com/blacktop/lzss v0.1.1 diff --git a/go.sum b/go.sum index 80f3de770..1c3b19ef7 100644 --- a/go.sum +++ b/go.sum @@ -67,8 +67,8 @@ github.com/blacktop/cast v1.5.1 h1:gwXxBZ2XlcNnU57Ws+pTDFYHKKtLdxwBqvUI2wS31wg= github.com/blacktop/cast v1.5.1/go.mod h1:SpXXQ5YoyJw6s3/6cMTQuxvgRl3PCJiyaX9p6b155UU= github.com/blacktop/go-dwarf v1.0.10 h1:i9zYgcIROETsNZ6V+zZn3uDH21FCG5BLLZ837GitxS0= github.com/blacktop/go-dwarf v1.0.10/go.mod h1:4W2FKgSFYcZLDwnR7k+apv5i3nrau4NGl9N6VQ9DSTo= -github.com/blacktop/go-macho v1.1.212 h1:zjH7MiZcjBXB5LT5gyCohacABY3llnxxIQ6830/i8XU= -github.com/blacktop/go-macho v1.1.212/go.mod h1:dtlW2AJKQpFzImBVPWiUKZ6OxrQ2MLfWi/BPPe0EONE= +github.com/blacktop/go-macho v1.1.213 h1:63gQpzIWJzJ7gMJz+1IuAn++udVQTEZsyLjFYF/Tp+M= +github.com/blacktop/go-macho v1.1.213/go.mod h1:dtlW2AJKQpFzImBVPWiUKZ6OxrQ2MLfWi/BPPe0EONE= github.com/blacktop/go-plist v1.0.2 h1:DMX8uBiE308HWZkO9o37j7Z2b0neI3GSzN2caNa2zXk= github.com/blacktop/go-plist v1.0.2/go.mod h1:fqVhCVVXVJWsIGY9QPgdK0mDWZD82HrRMfU5PanvdTA= github.com/blacktop/lzfse-cgo v1.1.19 h1:1I/1y9/sjyDLomACLrR4f7GFJ1fTaIjhWb1PQrEtKiw= diff --git a/internal/commands/extract/extract.go b/internal/commands/extract/extract.go index a1111cd7e..9e552f801 100644 --- a/internal/commands/extract/extract.go +++ b/internal/commands/extract/extract.go @@ -6,7 +6,6 @@ import ( "bytes" "encoding/json" "fmt" - "io" "net/url" "os" "path/filepath" @@ -16,9 +15,9 @@ import ( "github.com/blacktop/go-macho" "github.com/blacktop/go-plist" + fwcmd "github.com/blacktop/ipsw/internal/commands/fw" "github.com/blacktop/ipsw/internal/download" "github.com/blacktop/ipsw/internal/utils" - "github.com/blacktop/ipsw/pkg/bundle" "github.com/blacktop/ipsw/pkg/dyld" "github.com/blacktop/ipsw/pkg/img4" "github.com/blacktop/ipsw/pkg/info" @@ -284,38 +283,11 @@ func Exclave(c *Config) ([]string, error) { } for _, exc := range outfiles { - bn, err := bundle.Parse(exc) + out, err := fwcmd.Extract(exc, filepath.Dir(exc)) if err != nil { - return nil, fmt.Errorf("failed to parse exclave bundle: %v", err) - } - - f, err := os.Open(exc) - if err != nil { - return nil, fmt.Errorf("failed to open file %s: %v", exc, err) - } - defer f.Close() - - for _, bf := range bn.Files { - fmt.Println(bf) - - fname := filepath.Join(filepath.Dir(exc), bf.Type, bf.Name) - if err := os.MkdirAll(filepath.Dir(fname), 0o750); err != nil { - return nil, fmt.Errorf("failed to create directory %s: %v", filepath.Dir(fname), err) - } - - of, err := os.Create(fname) - if err != nil { - return nil, fmt.Errorf("failed to create file %s: %v", fname, err) - } - defer of.Close() - - for _, seg := range bf.Segments { - f.Seek(int64(seg.Offset), io.SeekStart) - io.CopyN(of, f, int64(seg.Size)) - } - - outfiles = append(outfiles, fname) + return nil, fmt.Errorf("failed to extract files from exclave bundle: %v", err) } + outfiles = append(outfiles, out...) } return outfiles, nil diff --git a/internal/commands/fw/exclave.go b/internal/commands/fw/exclave.go new file mode 100644 index 000000000..03ad6d4a4 --- /dev/null +++ b/internal/commands/fw/exclave.go @@ -0,0 +1,113 @@ +package fw + +import ( + "bytes" + "encoding/binary" + "fmt" + "io" + "os" + "path/filepath" + + "github.com/blacktop/go-macho" + "github.com/blacktop/go-macho/types" + "github.com/blacktop/ipsw/pkg/bundle" +) + +func Extract(input, output string) ([]string, error) { + var m *macho.File + var outfiles []string + + bn, err := bundle.Parse(input) + if err != nil { + return nil, fmt.Errorf("failed to parse bundle: %v", err) + } + + if bn.Type != 3 { + return nil, fmt.Errorf("bundle is not an exclave bundle") + } + + f, err := os.Open(input) + if err != nil { + return nil, fmt.Errorf("failed to open file %s: %v", input, err) + } + defer f.Close() + + for idx, bf := range bn.Files { + fname := filepath.Join(output, bf.Type, bf.Name) + if err := os.MkdirAll(filepath.Dir(fname), 0o750); err != nil { + return nil, fmt.Errorf("failed to create directory %s: %v", filepath.Dir(fname), err) + } + + of, err := os.Create(fname) + if err != nil { + return nil, fmt.Errorf("failed to create file %s: %v", fname, err) + } + defer of.Close() + + if len(bf.Segments) == 0 { // FIXME: should this be removed? + continue + } + + // Get MachO header + if entry := bn.Config.TOC[idx].GetEntry(); entry != nil && entry.Type == 2 { // kernel (SYSTEM) + if _, err := f.Seek(int64(bn.Config.Assets[idx].Offset), io.SeekStart); err != nil { + return nil, fmt.Errorf("failed to seek to offset %d: %v", bn.Config.Assets[idx].Offset, err) + } + mHdrData := make([]byte, bn.Config.Assets[idx].Size) // __MACHOHEADERLC + if err := binary.Read(f, binary.LittleEndian, &mHdrData); err != nil { + return nil, fmt.Errorf("failed to read data from file %s: %v", fname, err) + } + m, err = macho.NewFile(bytes.NewReader(mHdrData), macho.FileConfig{ + LoadIncluding: []types.LoadCmd{types.LC_SEGMENT_64}, + }) + if err != nil { + return nil, fmt.Errorf("failed to parse MachO file: %v", err) + } + defer m.Close() + // write MACHOHEADERLC to output file + if _, err := of.Write(mHdrData); err != nil { + return nil, fmt.Errorf("failed to write data to file %s: %v", fname, err) + } + } else { + if text := bf.Segment("TEXT"); text == nil { + return nil, fmt.Errorf("failed to find TEXT segment") + } else { + if _, err := f.Seek(int64(text.Offset), io.SeekStart); err != nil { + return nil, fmt.Errorf("failed to seek to offset %d: %v", text.Offset, err) + } + tdata := make([]byte, text.Size) + if err := binary.Read(f, binary.LittleEndian, &tdata); err != nil { + return nil, fmt.Errorf("failed to read data from file %s: %v", fname, err) + } + m, err = macho.NewFile(bytes.NewReader(tdata), macho.FileConfig{ + LoadIncluding: []types.LoadCmd{types.LC_SEGMENT_64}, + }) + if err != nil { + return nil, fmt.Errorf("failed to parse MachO file: %v", err) + } + defer m.Close() + } + } + + for _, seg := range bf.Segments { + if _, err := f.Seek(int64(seg.Offset), io.SeekStart); err != nil { + return nil, fmt.Errorf("failed to seek to offset %d: %v", seg.Offset, err) + } + data := make([]byte, seg.Size) + if err := binary.Read(f, binary.LittleEndian, &data); err != nil { + return nil, fmt.Errorf("failed to read data from file %s: %v", fname, err) + } + if s := m.Segment("__" + seg.Name); s == nil { // lookup segment in MachO header + return nil, fmt.Errorf("failed to find segment %s", seg.Name) + } else { + if _, err := of.WriteAt(data, int64(s.Offset)); err != nil { + return nil, fmt.Errorf("failed to write data to file %s: %v", fname, err) + } + } + } + + outfiles = append(outfiles, fname) + } + + return outfiles, nil +} diff --git a/pkg/bundle/bundle.go b/pkg/bundle/bundle.go index 4f43599de..8193a6fb2 100644 --- a/pkg/bundle/bundle.go +++ b/pkg/bundle/bundle.go @@ -44,15 +44,15 @@ func (b Bundle) String() string { s += " Config:\n" s += fmt.Sprintf(" Unk1: %d\n", b.Config.Unk1) s += fmt.Sprintf(" Unk2: %d\n", b.Config.Unk2) - s += " Header:\n" - for i, h := range b.Config.Header { + s += " Assets:\n" + for i, h := range b.Config.Assets { s += fmt.Sprintf(" %3s) %s\n", fmt.Sprintf("%d", i+1), h) } s += " TOC:\n" for _, t := range b.Config.TOC { s += fmt.Sprintf(" %s\n", t) } - s += "Files:\n" + s += "Compartments:\n" for _, f := range b.Files { s += fmt.Sprintf("%s\n", f) } @@ -85,6 +85,15 @@ type File struct { Endpoints []Endpoint } +func (f File) Segment(name string) *Segment { + for _, seg := range f.Segments { + if seg.Name == name { + return &seg + } + } + return nil +} + func (f File) String() string { s := fmt.Sprintf(" %s (%s)\n", f.Name, f.Type) for _, seg := range f.Segments { @@ -162,44 +171,44 @@ type Type3 struct { } type Type4 struct { - Unk0 uint32 // 1 - Unk1 uint32 // 0xc == 11 - _ uint64 // padding ? - Unk2 uint64 // F000h - _ [4]uint64 // padding ? - Unk3 uint64 // C000h - _ [2]uint64 // padding ? - Unk4 uint64 // 3 ? - _ uint64 // padding ? - Unk5 uint64 // 16000h ? - _ uint64 // padding ? - _ uint64 // padding ? - NumRanges uint64 // 0xD == 12 ? - UUID types.UUID // 636B62C3-4647-34F7-9089-A58256078A27 - _ uint64 // padding ? - Unk7 uint64 // 14000h ? - Unk7again uint64 // 14000h ? - Unk8 uint64 // 8000000h ? - Unk9 uint64 // 1 ? - _ uint64 // padding ? - Unk10 uint64 // C000h ? - Unk10again uint64 // C000h ? - Unk11 uint64 // 8014000h ? - Unk12 uint64 // 4 ? - _ [3]uint64 // padding ? - Unk13 uint64 // 8020000h ? - Unk14 uint64 // 6 ? - Unk15 uint64 // 8003E80h ? - _ [36]uint64 // padding ? - Unk17 uint64 // 0xa == 10 ? - _ uint64 // padding ? - Unk18 uint64 // 16000h ? - Unk19 uint64 // 1582Ch ? - _ uint64 // padding ? - Unk20 uint64 // 0xa == 10 ? - _ uint64 // padding ? - Unk21 uint64 // F000h ? - Ranges [0xD]typ4Range + Unk0 uint32 // 1 + Unk1 uint32 // 0xc == 11 + _ uint64 // padding ? + Unk2 uint64 // F000h + _ [4]uint64 // padding ? + Unk3 uint64 // C000h + _ [2]uint64 // padding ? + Unk4 uint64 // 3 ? + _ uint64 // padding ? + Unk5 uint64 // 16000h ? + _ uint64 // padding ? + _ uint64 // padding ? + NumRanges uint64 // 0xD == 12 ? + UUID types.UUID // 636B62C3-4647-34F7-9089-A58256078A27 + _ uint64 // padding ? + Unk7 uint64 // 14000h ? + Unk7again uint64 // 14000h ? + Unk8 uint64 // 8000000h ? + Unk9 uint64 // 1 ? + _ uint64 // padding ? + Unk10 uint64 // C000h ? + Unk10again uint64 // C000h ? + Unk11 uint64 // 8014000h ? + Unk12 uint64 // 4 ? + _ [3]uint64 // padding ? + Unk13 uint64 // 8020000h ? + Unk14 uint64 // 6 ? + Unk15 uint64 // 8003E80h ? + _ [36]uint64 // padding ? + Unk17 uint64 // 0xa == 10 ? + _ uint64 // padding ? + Unk18 uint64 // 16000h ? + Unk19 uint64 // 1582Ch ? + _ uint64 // padding ? + Unk20 uint64 // 0xa == 10 ? + _ uint64 // padding ? + Unk21 uint64 // F000h ? + Ranges [0xD]typ4Range // FIXME: this should be read AFTER the Type4 header is read } type typ4Range struct { @@ -218,14 +227,14 @@ func (t4 typ4Range) String() string { } type Config struct { - Unk1 int - Unk2 int - Header []hdrPart - TOC []tocEntry - Files []ConfigFile + Unk1 int + Unk2 int + Assets []Asset + TOC []TocEntry + Compartments []Compartment } -type hdrPart struct { +type Asset struct { Raw asn1.RawContent Name asn1.RawValue Type int @@ -233,37 +242,44 @@ type hdrPart struct { Size int } -func (h hdrPart) String() string { +func (h Asset) String() string { return fmt.Sprintf("%15s type=%d off=%#07x sz=%#x", h.Name.Bytes, h.Type, h.Offset, h.Size) } -type tocEntry struct { +type TocEntry struct { Index int Entry asn1.RawValue `asn1:"optional"` } -type tocEntryType struct { +type TocEntryType struct { Name asn1.RawValue Type int } -func (t tocEntry) String() string { +func (t TocEntry) GetEntry() *TocEntryType { if len(t.Entry.Bytes) > 0 { - var typ tocEntryType + var typ TocEntryType if _, err := asn1.Unmarshal(t.Entry.Bytes, &typ); err == nil { - return fmt.Sprintf("%3d) %15s type=%d", t.Index, typ.Name.Bytes, typ.Type) + return &typ } } + return nil +} + +func (t TocEntry) String() string { + if entry := t.GetEntry(); entry != nil { + return fmt.Sprintf("%3d) %15s type=%d", t.Index, entry.Name.Bytes, entry.Type) + } return fmt.Sprintf("%3d) %s", t.Index, "nil") } -type ConfigFile struct { - Raw asn1.RawContent - Index int - Info []Info +type Compartment struct { + Raw asn1.RawContent + AppUID int + Metadata []metadata } -type Info struct { +type metadata struct { Raw asn1.RawContent Key asn1.RawValue Value asn1.RawValue @@ -280,64 +296,64 @@ func (e Endpoint) String() string { return string(e.Name.Bytes) } -func (i Info) ParseValue() (any, error) { - if bytes.HasPrefix(i.Key.Bytes, []byte("__COMPONENT")) { - return string(i.Value.Bytes), nil +func (md metadata) ParseValue() (any, error) { + if bytes.HasPrefix(md.Key.Bytes, []byte("__COMPONENT")) { + return string(md.Value.Bytes), nil } - if bytes.HasPrefix(i.Key.Bytes, []byte("__ENDPOINT")) { + if bytes.HasPrefix(md.Key.Bytes, []byte("__ENDPOINT")) { var e Endpoint - if _, err := asn1.Unmarshal(i.Value.Bytes, &e); err == nil { + if _, err := asn1.Unmarshal(md.Value.Bytes, &e); err == nil { return e, nil } else { return nil, fmt.Errorf("failed to unmarshal bundle file info value: %v", err) } } - if len(i.Value.Bytes) <= 8 { + if len(md.Value.Bytes) <= 8 { var num uint64 - for idx, b := range i.Value.Bytes { - num |= uint64(b) << (8 * uint64(len(i.Value.Bytes)-1-idx)) + for idx, b := range md.Value.Bytes { + num |= uint64(b) << (8 * uint64(len(md.Value.Bytes)-1-idx)) } return num, nil } - return i.Value.Bytes, nil + return md.Value.Bytes, nil } -func (i Info) String() string { - val, err := i.ParseValue() +func (md metadata) String() string { + val, err := md.ParseValue() if err != nil { return fmt.Sprintf("[ERROR] failed to parse value: %v", err) } switch v := val.(type) { case string: - return fmt.Sprintf("%s: %s", string(i.Key.Bytes), v) + return fmt.Sprintf("%s: %s", string(md.Key.Bytes), v) case Endpoint: - return fmt.Sprintf("%s: %s", string(i.Key.Bytes), v) + return fmt.Sprintf("%s: %s", string(md.Key.Bytes), v) case uint64: - if len(i.Value.Bytes) == 1 { - return fmt.Sprintf("%s: %d", string(i.Key.Bytes), v) + if len(md.Value.Bytes) == 1 { + return fmt.Sprintf("%s: %d", string(md.Key.Bytes), v) } - return fmt.Sprintf("%s: %#x", string(i.Key.Bytes), v) + return fmt.Sprintf("%s: %#x", string(md.Key.Bytes), v) default: - return fmt.Sprintf("%s: %v", string(i.Key.Bytes), v) + return fmt.Sprintf("%s: %v", string(md.Key.Bytes), v) } } func (b *Bundle) ParseFiles() error { - for _, bf := range b.Config.Files { + for _, bf := range b.Config.Compartments { var f File var sec Section var seg Segment entpoints := make(map[int]Endpoint, 0) - for _, i := range bf.Info { - val, err := i.ParseValue() + for _, md := range bf.Metadata { + val, err := md.ParseValue() if err != nil { return fmt.Errorf("failed to parse bundle file info value: %v", err) } - if strings.EqualFold(string(i.Key.Bytes), "__COMPONENTNAME") { + if strings.EqualFold(string(md.Key.Bytes), "__COMPONENTNAME") { f.Name = val.(string) - } else if strings.EqualFold(string(i.Key.Bytes), "__COMPONENTTYPE") { + } else if strings.EqualFold(string(md.Key.Bytes), "__COMPONENTTYPE") { f.Type = val.(string) - } else if _, secpart, ok := strings.Cut(string(i.Key.Bytes), "__MACHO__"); ok { // SECTION + } else if _, secpart, ok := strings.Cut(string(md.Key.Bytes), "__MACHO__"); ok { // SECTION if name, _, ok := strings.Cut(secpart, "OFF"); ok { sec.Name = name sec.Offset = val.(uint64) @@ -352,7 +368,7 @@ func (b *Bundle) ParseFiles() error { f.Sections = append(f.Sections, sec) sec = Section{} } - } else if _, segpart, ok := strings.Cut(string(i.Key.Bytes), "__MACHO"); ok { // SEGMENT + } else if _, segpart, ok := strings.Cut(string(md.Key.Bytes), "__MACHO"); ok { // SEGMENT if name, _, ok := strings.Cut(segpart, "OFF"); ok { seg.Name = name seg.Offset = val.(uint64) @@ -367,7 +383,7 @@ func (b *Bundle) ParseFiles() error { f.Segments = append(f.Segments, seg) seg = Segment{} } - } else if _, idx, ok := strings.Cut(string(i.Key.Bytes), "__ENDPOINT__"); ok { // ENDPOINT + } else if _, idx, ok := strings.Cut(string(md.Key.Bytes), "__ENDPOINT__"); ok { // ENDPOINT if i, err := strconv.Atoi(idx); err == nil { entpoints[i] = val.(Endpoint) } else {