2021-04-06 19:39:02 +00:00
|
|
|
// Copyright (c) 2021 The Srpmproc Authors
|
|
|
|
//
|
|
|
|
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
|
|
// of this software and associated documentation files (the "Software"), to deal
|
|
|
|
// in the Software without restriction, including without limitation the rights
|
|
|
|
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
|
|
// copies of the Software, and to permit persons to whom the Software is
|
|
|
|
// furnished to do so, subject to the following conditions:
|
|
|
|
//
|
|
|
|
// The above copyright notice and this permission notice shall be included in all
|
|
|
|
// copies or substantial portions of the Software.
|
|
|
|
//
|
|
|
|
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
|
|
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
|
|
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
|
|
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
|
|
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
|
|
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
|
|
// SOFTWARE.
|
|
|
|
|
2021-09-03 21:07:02 +00:00
|
|
|
package modes
|
2020-12-20 09:36:13 +00:00
|
|
|
|
|
|
|
import (
|
2023-02-23 04:23:47 +00:00
|
|
|
"bytes"
|
2020-12-20 09:36:13 +00:00
|
|
|
"fmt"
|
2024-06-10 15:31:26 +00:00
|
|
|
"io"
|
2023-02-23 17:31:31 +00:00
|
|
|
"log"
|
2020-12-20 09:36:13 +00:00
|
|
|
"net/http"
|
|
|
|
"path/filepath"
|
|
|
|
"sort"
|
|
|
|
"strings"
|
2023-02-23 04:23:47 +00:00
|
|
|
"text/template"
|
2021-01-01 19:09:23 +00:00
|
|
|
"time"
|
2021-04-15 04:41:12 +00:00
|
|
|
|
2022-11-06 03:53:02 +00:00
|
|
|
"github.com/go-git/go-git/v5/plumbing/transport"
|
|
|
|
"github.com/rocky-linux/srpmproc/pkg/misc"
|
|
|
|
|
2021-04-15 04:41:12 +00:00
|
|
|
"github.com/go-git/go-billy/v5/memfs"
|
|
|
|
"github.com/go-git/go-git/v5"
|
|
|
|
"github.com/go-git/go-git/v5/config"
|
|
|
|
"github.com/go-git/go-git/v5/plumbing"
|
|
|
|
"github.com/go-git/go-git/v5/plumbing/object"
|
|
|
|
"github.com/go-git/go-git/v5/storage/memory"
|
2021-08-19 10:09:53 +00:00
|
|
|
"github.com/rocky-linux/srpmproc/pkg/data"
|
2020-12-20 09:36:13 +00:00
|
|
|
)
|
|
|
|
|
2021-01-01 19:09:23 +00:00
|
|
|
type remoteTarget struct {
|
|
|
|
remote string
|
|
|
|
when time.Time
|
|
|
|
}
|
|
|
|
|
2023-02-23 18:57:21 +00:00
|
|
|
// Struct to define the possible template values ( {{.Value}} in CDN URL strings:
|
|
|
|
type Lookaside struct {
|
|
|
|
Name string
|
|
|
|
Branch string
|
|
|
|
Hash string
|
|
|
|
Hashtype string
|
|
|
|
Filename string
|
|
|
|
}
|
|
|
|
|
2021-01-01 19:09:23 +00:00
|
|
|
type remoteTargetSlice []remoteTarget
|
|
|
|
|
|
|
|
func (p remoteTargetSlice) Len() int {
|
|
|
|
return len(p)
|
|
|
|
}
|
|
|
|
|
|
|
|
func (p remoteTargetSlice) Less(i, j int) bool {
|
|
|
|
return p[i].when.Before(p[j].when)
|
|
|
|
}
|
|
|
|
|
|
|
|
func (p remoteTargetSlice) Swap(i, j int) {
|
|
|
|
p[i], p[j] = p[j], p[i]
|
|
|
|
}
|
|
|
|
|
2020-12-20 09:36:13 +00:00
|
|
|
type GitMode struct{}
|
|
|
|
|
2021-09-03 21:07:02 +00:00
|
|
|
func (g *GitMode) RetrieveSource(pd *data.ProcessData) (*data.ModeData, error) {
|
2020-12-20 09:36:13 +00:00
|
|
|
repo, err := git.Init(memory.NewStorage(), memfs.New())
|
|
|
|
if err != nil {
|
2021-09-03 21:07:02 +00:00
|
|
|
return nil, fmt.Errorf("could not init git Repo: %v", err)
|
2020-12-20 09:36:13 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
w, err := repo.Worktree()
|
|
|
|
if err != nil {
|
2021-09-03 21:07:02 +00:00
|
|
|
return nil, fmt.Errorf("could not get Worktree: %v", err)
|
2020-12-20 09:36:13 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
refspec := config.RefSpec("+refs/heads/*:refs/remotes/*")
|
|
|
|
remote, err := repo.CreateRemote(&config.RemoteConfig{
|
|
|
|
Name: "upstream",
|
2021-04-11 22:36:35 +00:00
|
|
|
URLs: []string{fmt.Sprintf("%s.git", pd.RpmLocation)},
|
2020-12-20 09:36:13 +00:00
|
|
|
Fetch: []config.RefSpec{refspec},
|
|
|
|
})
|
|
|
|
if err != nil {
|
2021-09-03 21:07:02 +00:00
|
|
|
return nil, fmt.Errorf("could not create remote: %v", err)
|
2020-12-20 09:36:13 +00:00
|
|
|
}
|
|
|
|
|
2022-03-25 17:40:09 +00:00
|
|
|
fetchOpts := &git.FetchOptions{
|
2022-03-25 16:39:50 +00:00
|
|
|
Auth: pd.Authenticator,
|
2021-02-21 13:51:30 +00:00
|
|
|
RefSpecs: []config.RefSpec{refspec},
|
|
|
|
Tags: git.AllTags,
|
|
|
|
Force: true,
|
2022-03-25 17:40:09 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
err = remote.Fetch(fetchOpts)
|
2020-12-20 09:36:13 +00:00
|
|
|
if err != nil {
|
2022-03-25 17:54:23 +00:00
|
|
|
if err == transport.ErrInvalidAuthMethod || err == transport.ErrAuthenticationRequired {
|
2022-03-25 17:40:09 +00:00
|
|
|
fetchOpts.Auth = nil
|
|
|
|
err = remote.Fetch(fetchOpts)
|
|
|
|
if err != nil {
|
|
|
|
return nil, fmt.Errorf("could not fetch upstream: %v", err)
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
return nil, fmt.Errorf("could not fetch upstream: %v", err)
|
|
|
|
}
|
2020-12-20 09:36:13 +00:00
|
|
|
}
|
|
|
|
|
2021-01-01 19:09:23 +00:00
|
|
|
var branches remoteTargetSlice
|
2020-12-20 09:36:13 +00:00
|
|
|
|
2021-03-08 22:06:52 +00:00
|
|
|
latestTags := map[string]*remoteTarget{}
|
|
|
|
|
2021-02-21 13:51:30 +00:00
|
|
|
tagAdd := func(tag *object.Tag) error {
|
|
|
|
if strings.HasPrefix(tag.Name, fmt.Sprintf("imports/%s%d", pd.ImportBranchPrefix, pd.Version)) {
|
2021-03-08 22:06:52 +00:00
|
|
|
refSpec := fmt.Sprintf("refs/tags/%s", tag.Name)
|
2022-04-21 04:30:33 +00:00
|
|
|
if misc.GetTagImportRegex(pd).MatchString(refSpec) {
|
|
|
|
match := misc.GetTagImportRegex(pd).FindStringSubmatch(refSpec)
|
2021-03-08 22:06:52 +00:00
|
|
|
|
|
|
|
exists := latestTags[match[2]]
|
|
|
|
if exists != nil && exists.when.After(tag.Tagger.When) {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
latestTags[match[2]] = &remoteTarget{
|
|
|
|
remote: refSpec,
|
|
|
|
when: tag.Tagger.When,
|
|
|
|
}
|
|
|
|
}
|
2020-12-22 16:28:04 +00:00
|
|
|
}
|
2021-01-01 19:09:23 +00:00
|
|
|
return nil
|
2021-02-21 13:51:30 +00:00
|
|
|
}
|
|
|
|
|
2022-09-28 03:32:50 +00:00
|
|
|
// In case of "tagless mode", we need to get the head ref of the branch instead
|
|
|
|
// This is a kind of alternative implementation of the above tagAdd assignment
|
|
|
|
refAdd := func(tag *object.Tag) error {
|
|
|
|
if misc.TaglessRefOk(tag.Name, pd) {
|
|
|
|
pd.Log.Printf("Tagless mode: Identified tagless commit for import: %s\n", tag.Name)
|
|
|
|
refSpec := fmt.Sprintf(tag.Name)
|
|
|
|
|
|
|
|
// We split the string by "/", the branch name we're looking for to pass to latestTags is always last
|
|
|
|
// (ex: "refs/heads/c9s" ---> we want latestTags[c9s]
|
|
|
|
tmpRef := strings.Split(refSpec, "/")
|
|
|
|
tmpBranchName := tmpRef[(len(tmpRef) - 1)]
|
|
|
|
|
|
|
|
latestTags[tmpBranchName] = &remoteTarget{
|
|
|
|
remote: refSpec,
|
|
|
|
when: tag.Tagger.When,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2021-02-21 13:51:30 +00:00
|
|
|
tagIter, err := repo.TagObjects()
|
|
|
|
if err != nil {
|
2021-09-03 21:07:02 +00:00
|
|
|
return nil, fmt.Errorf("could not get tag objects: %v", err)
|
2021-02-21 13:51:30 +00:00
|
|
|
}
|
2022-09-28 03:32:50 +00:00
|
|
|
|
|
|
|
// tagless mode means we use "refAdd" (add commit by reference)
|
|
|
|
// normal mode means we can rely on "tagAdd" (the tag should be present for us in the source repo)
|
|
|
|
if pd.TaglessMode {
|
|
|
|
_ = tagIter.ForEach(refAdd)
|
|
|
|
} else {
|
|
|
|
_ = tagIter.ForEach(tagAdd)
|
|
|
|
}
|
2021-02-21 13:51:30 +00:00
|
|
|
|
2022-03-25 17:40:09 +00:00
|
|
|
listOpts := &git.ListOptions{
|
2022-03-25 16:39:50 +00:00
|
|
|
Auth: pd.Authenticator,
|
2022-03-25 17:40:09 +00:00
|
|
|
}
|
|
|
|
list, err := remote.List(listOpts)
|
2021-04-08 16:36:30 +00:00
|
|
|
if err != nil {
|
2022-03-25 17:54:23 +00:00
|
|
|
if err == transport.ErrInvalidAuthMethod || err == transport.ErrAuthenticationRequired {
|
2022-03-25 17:40:09 +00:00
|
|
|
listOpts.Auth = nil
|
|
|
|
list, err = remote.List(listOpts)
|
|
|
|
if err != nil {
|
|
|
|
return nil, fmt.Errorf("could not list upstream: %v", err)
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
return nil, fmt.Errorf("could not list upstream: %v", err)
|
|
|
|
}
|
2021-04-08 16:36:30 +00:00
|
|
|
}
|
2021-02-21 13:51:30 +00:00
|
|
|
|
2021-04-08 16:36:30 +00:00
|
|
|
for _, ref := range list {
|
|
|
|
if ref.Hash().IsZero() {
|
|
|
|
continue
|
|
|
|
}
|
2021-02-21 13:51:30 +00:00
|
|
|
|
2021-04-08 16:36:30 +00:00
|
|
|
commit, err := repo.CommitObject(ref.Hash())
|
|
|
|
if err != nil {
|
|
|
|
continue
|
2021-02-21 13:51:30 +00:00
|
|
|
}
|
2022-09-28 03:32:50 +00:00
|
|
|
|
|
|
|
// Call refAdd instead of tagAdd in the case of TaglessMode enabled
|
|
|
|
if pd.TaglessMode {
|
|
|
|
_ = refAdd(&object.Tag{
|
|
|
|
Name: string(ref.Name()),
|
|
|
|
Tagger: commit.Committer,
|
|
|
|
})
|
|
|
|
} else {
|
|
|
|
_ = tagAdd(&object.Tag{
|
|
|
|
Name: strings.TrimPrefix(string(ref.Name()), "refs/tags/"),
|
|
|
|
Tagger: commit.Committer,
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2021-02-21 13:51:30 +00:00
|
|
|
}
|
|
|
|
|
2021-03-08 22:06:52 +00:00
|
|
|
for _, branch := range latestTags {
|
2022-01-05 14:42:49 +00:00
|
|
|
pd.Log.Printf("tag: %s", strings.TrimPrefix(branch.remote, "refs/tags/"))
|
2021-03-08 22:06:52 +00:00
|
|
|
branches = append(branches, *branch)
|
|
|
|
}
|
2021-01-01 19:09:23 +00:00
|
|
|
sort.Sort(branches)
|
|
|
|
|
|
|
|
var sortedBranches []string
|
|
|
|
for _, branch := range branches {
|
|
|
|
sortedBranches = append(sortedBranches, branch.remote)
|
2020-12-22 16:28:04 +00:00
|
|
|
}
|
2020-12-20 09:36:13 +00:00
|
|
|
|
2021-02-24 07:27:51 +00:00
|
|
|
return &data.ModeData{
|
2021-09-03 21:07:02 +00:00
|
|
|
Name: filepath.Base(pd.RpmLocation),
|
2021-02-24 07:27:51 +00:00
|
|
|
Repo: repo,
|
|
|
|
Worktree: w,
|
|
|
|
FileWrites: nil,
|
|
|
|
Branches: sortedBranches,
|
2021-09-03 21:07:02 +00:00
|
|
|
}, nil
|
2020-12-20 09:36:13 +00:00
|
|
|
}
|
|
|
|
|
2021-09-03 21:07:02 +00:00
|
|
|
func (g *GitMode) WriteSource(pd *data.ProcessData, md *data.ModeData) error {
|
2021-02-24 07:27:51 +00:00
|
|
|
remote, err := md.Repo.Remote("upstream")
|
2022-09-28 03:32:50 +00:00
|
|
|
|
|
|
|
if err != nil && !pd.TaglessMode {
|
2021-09-03 21:07:02 +00:00
|
|
|
return fmt.Errorf("could not get upstream remote: %v", err)
|
2020-12-20 09:36:13 +00:00
|
|
|
}
|
|
|
|
|
2020-12-22 16:28:04 +00:00
|
|
|
var refspec config.RefSpec
|
|
|
|
var branchName string
|
|
|
|
|
2022-09-28 03:32:50 +00:00
|
|
|
// In the case of tagless mode, we already have the transformed repo sitting in the worktree,
|
|
|
|
// and don't need to perform any checkout or fetch operations
|
|
|
|
if !pd.TaglessMode {
|
|
|
|
if strings.HasPrefix(md.TagBranch, "refs/heads") {
|
|
|
|
refspec = config.RefSpec(fmt.Sprintf("+%s:%s", md.TagBranch, md.TagBranch))
|
|
|
|
branchName = strings.TrimPrefix(md.TagBranch, "refs/heads/")
|
|
|
|
} else {
|
|
|
|
match := misc.GetTagImportRegex(pd).FindStringSubmatch(md.TagBranch)
|
|
|
|
branchName = match[2]
|
|
|
|
refspec = config.RefSpec(fmt.Sprintf("+refs/heads/%s:%s", branchName, md.TagBranch))
|
|
|
|
fmt.Println("Found branchname that does not start w/ refs/heads :: ", branchName)
|
|
|
|
}
|
|
|
|
pd.Log.Printf("checking out upstream refspec %s", refspec)
|
|
|
|
|
|
|
|
fetchOpts := &git.FetchOptions{
|
|
|
|
Auth: pd.Authenticator,
|
|
|
|
RemoteName: "upstream",
|
|
|
|
RefSpecs: []config.RefSpec{refspec},
|
|
|
|
Tags: git.AllTags,
|
|
|
|
Force: true,
|
|
|
|
}
|
|
|
|
err = remote.Fetch(fetchOpts)
|
|
|
|
if err != nil && err != git.NoErrAlreadyUpToDate {
|
|
|
|
if err == transport.ErrInvalidAuthMethod || err == transport.ErrAuthenticationRequired {
|
|
|
|
fetchOpts.Auth = nil
|
|
|
|
err = remote.Fetch(fetchOpts)
|
|
|
|
if err != nil && err != git.NoErrAlreadyUpToDate {
|
|
|
|
return fmt.Errorf("could not fetch upstream: %v", err)
|
|
|
|
}
|
|
|
|
} else {
|
2022-03-25 17:40:09 +00:00
|
|
|
return fmt.Errorf("could not fetch upstream: %v", err)
|
|
|
|
}
|
|
|
|
}
|
2020-12-20 09:36:13 +00:00
|
|
|
|
2022-09-28 03:32:50 +00:00
|
|
|
err = md.Worktree.Checkout(&git.CheckoutOptions{
|
|
|
|
Branch: plumbing.ReferenceName(md.TagBranch),
|
|
|
|
Force: true,
|
|
|
|
})
|
|
|
|
if err != nil {
|
|
|
|
return fmt.Errorf("could not checkout source from git: %v", err)
|
|
|
|
}
|
|
|
|
|
|
|
|
_, err = md.Worktree.Add(".")
|
|
|
|
if err != nil {
|
|
|
|
return fmt.Errorf("could not add Worktree: %v", err)
|
|
|
|
}
|
2020-12-20 09:36:13 +00:00
|
|
|
}
|
|
|
|
|
2022-09-28 03:32:50 +00:00
|
|
|
if pd.TaglessMode {
|
|
|
|
branchName = fmt.Sprintf("%s%d%s", pd.ImportBranchPrefix, pd.Version, pd.BranchSuffix)
|
2020-12-20 09:36:13 +00:00
|
|
|
}
|
|
|
|
|
2022-05-09 17:57:43 +00:00
|
|
|
metadataPath := ""
|
|
|
|
ls, err := md.Worktree.Filesystem.ReadDir(".")
|
|
|
|
if err != nil {
|
|
|
|
return fmt.Errorf("could not read directory: %v", err)
|
|
|
|
}
|
|
|
|
for _, f := range ls {
|
|
|
|
if strings.HasSuffix(f.Name(), ".metadata") {
|
|
|
|
if metadataPath != "" {
|
|
|
|
return fmt.Errorf("multiple metadata files found")
|
|
|
|
}
|
|
|
|
metadataPath = f.Name()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if metadataPath == "" {
|
|
|
|
metadataPath = fmt.Sprintf(".%s.metadata", md.Name)
|
|
|
|
}
|
|
|
|
|
|
|
|
metadataFile, err := md.Worktree.Filesystem.Open(metadataPath)
|
2020-12-20 09:36:13 +00:00
|
|
|
if err != nil {
|
2022-01-05 14:42:49 +00:00
|
|
|
pd.Log.Printf("warn: could not open metadata file, so skipping: %v", err)
|
2021-09-03 21:07:02 +00:00
|
|
|
return nil
|
2020-12-20 09:36:13 +00:00
|
|
|
}
|
|
|
|
|
2024-06-10 15:31:26 +00:00
|
|
|
fileBytes, err := io.ReadAll(metadataFile)
|
2020-12-20 09:36:13 +00:00
|
|
|
if err != nil {
|
2021-09-03 21:07:02 +00:00
|
|
|
return fmt.Errorf("could not read metadata file: %v", err)
|
2020-12-20 09:36:13 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
client := &http.Client{
|
|
|
|
Transport: &http.Transport{
|
|
|
|
DisableCompression: false,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
fileContent := strings.Split(string(fileBytes), "\n")
|
|
|
|
for _, line := range fileContent {
|
|
|
|
if strings.TrimSpace(line) == "" {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
2020-12-22 05:13:27 +00:00
|
|
|
lineInfo := strings.SplitN(line, " ", 2)
|
|
|
|
hash := strings.TrimSpace(lineInfo[0])
|
|
|
|
path := strings.TrimSpace(lineInfo[1])
|
2020-12-20 09:36:13 +00:00
|
|
|
|
2021-02-19 15:22:36 +00:00
|
|
|
var body []byte
|
|
|
|
|
2021-02-24 07:27:51 +00:00
|
|
|
if md.BlobCache[hash] != nil {
|
|
|
|
body = md.BlobCache[hash]
|
2022-01-05 14:42:49 +00:00
|
|
|
pd.Log.Printf("retrieving %s from cache", hash)
|
2021-02-19 15:22:36 +00:00
|
|
|
} else {
|
2021-09-10 20:31:59 +00:00
|
|
|
fromBlobStorage, err := pd.BlobStorage.Read(hash)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2021-02-24 07:27:51 +00:00
|
|
|
if fromBlobStorage != nil && !pd.NoStorageDownload {
|
2021-02-19 15:22:36 +00:00
|
|
|
body = fromBlobStorage
|
2022-01-05 14:42:49 +00:00
|
|
|
pd.Log.Printf("downloading %s from blob storage", hash)
|
2021-02-19 15:22:36 +00:00
|
|
|
} else {
|
2022-09-28 03:32:50 +00:00
|
|
|
|
|
|
|
url := ""
|
|
|
|
|
2023-02-23 04:23:47 +00:00
|
|
|
// We need to figure out the hashtype for templating purposes:
|
|
|
|
hashType := "sha512"
|
|
|
|
switch len(hash) {
|
|
|
|
case 128:
|
|
|
|
hashType = "sha512"
|
|
|
|
case 64:
|
|
|
|
hashType = "sha256"
|
|
|
|
case 40:
|
|
|
|
hashType = "sha1"
|
|
|
|
case 32:
|
|
|
|
hashType = "md5"
|
2022-09-28 03:32:50 +00:00
|
|
|
}
|
|
|
|
|
2023-02-23 04:23:47 +00:00
|
|
|
// need the name of the file without "SOURCES/":
|
|
|
|
fileName := strings.Split(path, "/")[1]
|
2021-02-19 15:22:36 +00:00
|
|
|
|
2023-02-23 04:23:47 +00:00
|
|
|
// Feed our template info to ProcessUrl and transform to the real values: ( {{.Name}}, {{.Branch}}, {{.Hash}}, {{.Hashtype}}, {{.Filename}} )
|
2023-02-23 17:31:31 +00:00
|
|
|
url, hasTemplate := ProcessUrl(pd.CdnUrl, md.Name, branchName, hash, hashType, fileName)
|
2021-02-19 15:22:36 +00:00
|
|
|
|
2023-02-23 04:23:47 +00:00
|
|
|
var req *http.Request
|
|
|
|
var resp *http.Response
|
|
|
|
|
|
|
|
// Download the --cdn-url given, but *only* if it contains template strings ( {{.Name}} , {{.Hash}} , etc. )
|
|
|
|
// Otherwise we need to fall back to the traditional cdn-url patterns
|
2023-02-23 17:31:31 +00:00
|
|
|
if hasTemplate {
|
2023-02-23 04:23:47 +00:00
|
|
|
pd.Log.Printf("downloading %s", url)
|
|
|
|
|
|
|
|
req, err := http.NewRequest("GET", url, nil)
|
|
|
|
if err != nil {
|
|
|
|
return fmt.Errorf("could not create new http request: %v", err)
|
|
|
|
}
|
|
|
|
req.Header.Set("Accept-Encoding", "*")
|
|
|
|
|
|
|
|
resp, err = client.Do(req)
|
|
|
|
if err != nil {
|
|
|
|
return fmt.Errorf("could not download dist-git file: %v", err)
|
|
|
|
}
|
2021-02-19 15:22:36 +00:00
|
|
|
}
|
2023-02-23 04:23:47 +00:00
|
|
|
|
|
|
|
// Default cdn-url: If we don't have a templated download string, try the default <SITE>/<PKG>/<BRANCH>/<HASH> pattern:
|
|
|
|
if resp == nil || resp.StatusCode != http.StatusOK {
|
|
|
|
url = fmt.Sprintf("%s/%s/%s/%s", pd.CdnUrl, md.Name, branchName, hash)
|
|
|
|
pd.Log.Printf("Attempting default URL: %s", url)
|
|
|
|
req, err = http.NewRequest("GET", url, nil)
|
|
|
|
if err != nil {
|
|
|
|
return fmt.Errorf("could not create new http request: %v", err)
|
|
|
|
}
|
|
|
|
req.Header.Set("Accept-Encoding", "*")
|
|
|
|
resp, err = client.Do(req)
|
|
|
|
if err != nil {
|
|
|
|
return fmt.Errorf("could not download dist-git file: %v", err)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// If the default URL fails, we have one more pattern to try. The simple <SITE>/<HASH> pattern
|
|
|
|
// If this one fails, we are truly lost, and have to bail out w/ an error:
|
|
|
|
if resp == nil || resp.StatusCode != http.StatusOK {
|
2022-03-25 17:40:09 +00:00
|
|
|
url = fmt.Sprintf("%s/%s", pd.CdnUrl, hash)
|
2023-02-23 04:23:47 +00:00
|
|
|
pd.Log.Printf("Attempting 2nd fallback URL: %s", url)
|
2022-03-25 17:40:09 +00:00
|
|
|
req, err = http.NewRequest("GET", url, nil)
|
|
|
|
if err != nil {
|
|
|
|
return fmt.Errorf("could not create new http request: %v", err)
|
|
|
|
}
|
|
|
|
req.Header.Set("Accept-Encoding", "*")
|
|
|
|
resp, err = client.Do(req)
|
|
|
|
if err != nil {
|
|
|
|
return fmt.Errorf("could not download dist-git file: %v", err)
|
|
|
|
}
|
|
|
|
if resp.StatusCode != http.StatusOK {
|
|
|
|
return fmt.Errorf("could not download dist-git file (status code %d): %v", resp.StatusCode, err)
|
|
|
|
}
|
|
|
|
}
|
2021-02-19 15:22:36 +00:00
|
|
|
|
2024-06-10 15:31:26 +00:00
|
|
|
body, err = io.ReadAll(resp.Body)
|
2021-02-19 15:22:36 +00:00
|
|
|
if err != nil {
|
2021-09-03 21:07:02 +00:00
|
|
|
return fmt.Errorf("could not read the whole dist-git file: %v", err)
|
2021-02-19 15:22:36 +00:00
|
|
|
}
|
|
|
|
err = resp.Body.Close()
|
|
|
|
if err != nil {
|
2021-09-03 21:07:02 +00:00
|
|
|
return fmt.Errorf("could not close body handle: %v", err)
|
2021-02-19 15:22:36 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-02-24 07:27:51 +00:00
|
|
|
md.BlobCache[hash] = body
|
2020-12-20 09:36:13 +00:00
|
|
|
}
|
|
|
|
|
2021-02-24 07:27:51 +00:00
|
|
|
f, err := md.Worktree.Filesystem.Create(path)
|
2020-12-20 09:36:13 +00:00
|
|
|
if err != nil {
|
2021-09-03 21:07:02 +00:00
|
|
|
return fmt.Errorf("could not open file pointer: %v", err)
|
2020-12-20 09:36:13 +00:00
|
|
|
}
|
|
|
|
|
2022-01-05 14:42:49 +00:00
|
|
|
hasher := pd.CompareHash(body, hash)
|
2020-12-20 09:36:13 +00:00
|
|
|
if hasher == nil {
|
2021-09-10 20:31:59 +00:00
|
|
|
return fmt.Errorf("checksum in metadata does not match dist-git file")
|
2020-12-20 09:36:13 +00:00
|
|
|
}
|
|
|
|
|
2021-02-24 07:27:51 +00:00
|
|
|
md.SourcesToIgnore = append(md.SourcesToIgnore, &data.IgnoredSource{
|
|
|
|
Name: path,
|
|
|
|
HashFunction: hasher,
|
2020-12-20 09:36:13 +00:00
|
|
|
})
|
|
|
|
|
|
|
|
_, err = f.Write(body)
|
|
|
|
if err != nil {
|
2021-09-03 21:07:02 +00:00
|
|
|
return fmt.Errorf("could not copy dist-git file to in-tree: %v", err)
|
2020-12-20 09:36:13 +00:00
|
|
|
}
|
|
|
|
_ = f.Close()
|
|
|
|
}
|
2021-09-03 21:07:02 +00:00
|
|
|
|
|
|
|
return nil
|
2020-12-20 09:36:13 +00:00
|
|
|
}
|
|
|
|
|
2021-09-03 21:07:02 +00:00
|
|
|
func (g *GitMode) PostProcess(md *data.ModeData) error {
|
2021-02-24 07:27:51 +00:00
|
|
|
for _, source := range md.SourcesToIgnore {
|
|
|
|
_, err := md.Worktree.Filesystem.Stat(source.Name)
|
2021-01-01 15:55:21 +00:00
|
|
|
if err == nil {
|
2021-02-24 07:27:51 +00:00
|
|
|
err := md.Worktree.Filesystem.Remove(source.Name)
|
2021-01-01 15:55:21 +00:00
|
|
|
if err != nil {
|
2021-09-03 21:07:02 +00:00
|
|
|
return fmt.Errorf("could not remove dist-git file: %v", err)
|
2021-01-01 15:55:21 +00:00
|
|
|
}
|
2020-12-20 09:36:13 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-02-24 07:27:51 +00:00
|
|
|
_, err := md.Worktree.Add(".")
|
2020-12-20 09:36:13 +00:00
|
|
|
if err != nil {
|
2021-09-03 21:07:02 +00:00
|
|
|
return fmt.Errorf("could not add git sources: %v", err)
|
2020-12-20 09:36:13 +00:00
|
|
|
}
|
2021-09-03 21:07:02 +00:00
|
|
|
|
|
|
|
return nil
|
2020-12-20 09:36:13 +00:00
|
|
|
}
|
|
|
|
|
2021-09-03 21:07:02 +00:00
|
|
|
func (g *GitMode) ImportName(pd *data.ProcessData, md *data.ModeData) string {
|
2022-04-21 04:30:33 +00:00
|
|
|
if misc.GetTagImportRegex(pd).MatchString(md.TagBranch) {
|
|
|
|
match := misc.GetTagImportRegex(pd).FindStringSubmatch(md.TagBranch)
|
2020-12-22 16:28:04 +00:00
|
|
|
return match[3]
|
|
|
|
}
|
|
|
|
|
2021-04-26 02:02:07 +00:00
|
|
|
return strings.Replace(strings.TrimPrefix(md.TagBranch, "refs/heads/"), "%", "_", -1)
|
2020-12-20 09:36:13 +00:00
|
|
|
}
|
2023-02-23 04:23:47 +00:00
|
|
|
|
|
|
|
// Given a cdnUrl string as input, return same string, but with substituted
|
|
|
|
// template values ( {{.Name}} , {{.Hash}}, {{.Filename}}, etc. )
|
2023-02-23 17:31:31 +00:00
|
|
|
func ProcessUrl(cdnUrl string, name string, branch string, hash string, hashtype string, filename string) (string, bool) {
|
2023-02-23 04:23:47 +00:00
|
|
|
tmpUrl := Lookaside{name, branch, hash, hashtype, filename}
|
|
|
|
|
2023-02-27 20:26:21 +00:00
|
|
|
// Return cdnUrl as-is if we don't have any templates ("{{ .Variable }}") to process:
|
|
|
|
if !(strings.Contains(cdnUrl, "{{") && strings.Contains(cdnUrl, "}}")) {
|
|
|
|
return cdnUrl, false
|
|
|
|
}
|
|
|
|
|
2023-02-23 17:31:31 +00:00
|
|
|
// If we run into trouble with our template parsing, we'll just return the cdnUrl, exactly as we found it
|
2023-02-23 04:23:47 +00:00
|
|
|
tmpl, err := template.New("").Parse(cdnUrl)
|
|
|
|
if err != nil {
|
2023-02-23 17:31:31 +00:00
|
|
|
return cdnUrl, false
|
2023-02-23 04:23:47 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
var result bytes.Buffer
|
|
|
|
err = tmpl.Execute(&result, tmpUrl)
|
|
|
|
if err != nil {
|
2023-02-23 17:31:31 +00:00
|
|
|
log.Fatalf("ERROR: Could not process CDN URL template(s) from URL string: %s\n", cdnUrl)
|
2023-02-23 04:23:47 +00:00
|
|
|
}
|
|
|
|
|
2023-02-23 17:31:31 +00:00
|
|
|
return result.String(), true
|
2023-02-23 04:23:47 +00:00
|
|
|
|
|
|
|
}
|