Merge pull request '[v7.0/forgejo] Add an immutable tarball link to archive download headers for Nix' (#3946) from bp-v7.0/forgejo-6631f56 into v7.0/forgejo

Reviewed-on: https://codeberg.org/forgejo/forgejo/pulls/3946
Reviewed-by: Earl Warren <earl-warren@noreply.codeberg.org>
This commit is contained in:
Earl Warren 2024-05-29 19:22:14 +00:00
commit 4af90387d4
4 changed files with 28 additions and 0 deletions

View file

@ -0,0 +1,5 @@
Support the [Nix tarball fetcher immutable link protocol](https://github.com/nixos/nix/blob/56763ff918eb308db23080e560ed2ea3e00c80a7/doc/manual/src/protocols/tarball-fetcher.md) on archive URLs, so Forgejo-generated tarballs for branches will go into Nix's `flake.lock` as their respective commit URLs and `nix flake update` will just work. This allows natively fetching Forgejo repositories for Nix flake inputs as tarballs rather than as Git repositories, significantly improving fetch times and avoiding depending on Git at runtime.
Concretely, Forgejo now returns a header of the following format from its archive URLs: `Link: <https://my-forgejo/api/v1/someuser/somerepo/archive/some-commit-hash.tar.gz?rev=some-commit-hash> rel="immutable"`.
Example usage: `inputs.meow.url = "https://my-forgejo/someuser/somerepo/archive/main.tar.gz";` in `flake.nix`. For a private repository, configure `netrc-file` in `nix.conf` and use `https://my-forgejo/api/v1/repos/someuser/somerepo/archive/main.tar.gz` as a URL instead, since the normal archive endpoint doesn't support tokens.

View file

@ -326,6 +326,12 @@ func archiveDownload(ctx *context.APIContext) {
func download(ctx *context.APIContext, archiveName string, archiver *repo_model.RepoArchiver) { func download(ctx *context.APIContext, archiveName string, archiver *repo_model.RepoArchiver) {
downloadName := ctx.Repo.Repository.Name + "-" + archiveName downloadName := ctx.Repo.Repository.Name + "-" + archiveName
// Add nix format link header so tarballs lock correctly:
// https://github.com/nixos/nix/blob/56763ff918eb308db23080e560ed2ea3e00c80a7/doc/manual/src/protocols/tarball-fetcher.md
ctx.Resp.Header().Add("Link", fmt.Sprintf("<%s/archive/%s.tar.gz?rev=%s>; rel=\"immutable\"",
ctx.Repo.Repository.APIURL(),
archiver.CommitID, archiver.CommitID))
rPath := archiver.RelativePath() rPath := archiver.RelativePath()
if setting.RepoArchive.Storage.MinioConfig.ServeDirect { if setting.RepoArchive.Storage.MinioConfig.ServeDirect {
// If we have a signed url (S3, object storage), redirect to this directly. // If we have a signed url (S3, object storage), redirect to this directly.

View file

@ -480,6 +480,12 @@ func Download(ctx *context.Context) {
func download(ctx *context.Context, archiveName string, archiver *repo_model.RepoArchiver) { func download(ctx *context.Context, archiveName string, archiver *repo_model.RepoArchiver) {
downloadName := ctx.Repo.Repository.Name + "-" + archiveName downloadName := ctx.Repo.Repository.Name + "-" + archiveName
// Add nix format link header so tarballs lock correctly:
// https://github.com/nixos/nix/blob/56763ff918eb308db23080e560ed2ea3e00c80a7/doc/manual/src/protocols/tarball-fetcher.md
ctx.Resp.Header().Add("Link", fmt.Sprintf("<%s/archive/%s.tar.gz?rev=%s>; rel=\"immutable\"",
ctx.Repo.Repository.APIURL(),
archiver.CommitID, archiver.CommitID))
rPath := archiver.RelativePath() rPath := archiver.RelativePath()
if setting.RepoArchive.Storage.MinioConfig.ServeDirect { if setting.RepoArchive.Storage.MinioConfig.ServeDirect {
// If we have a signed url (S3, object storage), redirect to this directly. // If we have a signed url (S3, object storage), redirect to this directly.

View file

@ -8,6 +8,7 @@ import (
"io" "io"
"net/http" "net/http"
"net/url" "net/url"
"regexp"
"testing" "testing"
auth_model "code.gitea.io/gitea/models/auth" auth_model "code.gitea.io/gitea/models/auth"
@ -41,6 +42,16 @@ func TestAPIDownloadArchive(t *testing.T) {
assert.Len(t, bs, 266) assert.Len(t, bs, 266)
assert.EqualValues(t, "application/gzip", resp.Header().Get("Content-Type")) assert.EqualValues(t, "application/gzip", resp.Header().Get("Content-Type"))
// Must return a link to a commit ID as the "immutable" archive link
linkHeaderRe := regexp.MustCompile(`<(?P<url>https?://.*/api/v1/repos/user2/repo1/archive/[a-f0-9]+\.tar\.gz.*)>; rel="immutable"`)
m := linkHeaderRe.FindStringSubmatch(resp.Header().Get("Link"))
assert.NotEmpty(t, m[1])
resp = MakeRequest(t, NewRequest(t, "GET", m[1]).AddTokenAuth(token), http.StatusOK)
bs2, err := io.ReadAll(resp.Body)
assert.NoError(t, err)
// The locked URL should give the same bytes as the non-locked one
assert.EqualValues(t, bs, bs2)
link, _ = url.Parse(fmt.Sprintf("/api/v1/repos/%s/%s/archive/master.bundle", user2.Name, repo.Name)) link, _ = url.Parse(fmt.Sprintf("/api/v1/repos/%s/%s/archive/master.bundle", user2.Name, repo.Name))
resp = MakeRequest(t, NewRequest(t, "GET", link.String()).AddTokenAuth(token), http.StatusOK) resp = MakeRequest(t, NewRequest(t, "GET", link.String()).AddTokenAuth(token), http.StatusOK)
bs, err = io.ReadAll(resp.Body) bs, err = io.ReadAll(resp.Body)