mirror of
https://codeberg.org/forgejo/forgejo.git
synced 2024-11-21 21:27:13 +00:00
parent
1e7b368ccf
commit
c33cbae7cc
|
@ -96,6 +96,6 @@ func TestGetUserHeatmapDataByUser(t *testing.T) {
|
||||||
// Test JSON rendering
|
// Test JSON rendering
|
||||||
jsonData, err := json.Marshal(heatmap)
|
jsonData, err := json.Marshal(heatmap)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Equal(t, tc.JSONResult, string(jsonData))
|
assert.JSONEq(t, tc.JSONResult, string(jsonData))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,7 +5,6 @@ package project
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"strings"
|
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"code.gitea.io/gitea/models/db"
|
"code.gitea.io/gitea/models/db"
|
||||||
|
@ -124,5 +123,5 @@ func Test_NewColumn(t *testing.T) {
|
||||||
ProjectID: project1.ID,
|
ProjectID: project1.ID,
|
||||||
})
|
})
|
||||||
require.Error(t, err)
|
require.Error(t, err)
|
||||||
assert.True(t, strings.Contains(err.Error(), "maximum number of columns reached"))
|
assert.Contains(t, err.Error(), "maximum number of columns reached")
|
||||||
}
|
}
|
||||||
|
|
|
@ -9,7 +9,6 @@ import (
|
||||||
"io"
|
"io"
|
||||||
"net/http"
|
"net/http"
|
||||||
"net/http/httptest"
|
"net/http/httptest"
|
||||||
"regexp"
|
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
@ -119,7 +118,7 @@ func TestActivityPubSignedPost(t *testing.T) {
|
||||||
|
|
||||||
expected := "BODY"
|
expected := "BODY"
|
||||||
srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||||
assert.Regexp(t, regexp.MustCompile("^"+setting.Federation.DigestAlgorithm), r.Header.Get("Digest"))
|
assert.Regexp(t, "^"+setting.Federation.DigestAlgorithm, r.Header.Get("Digest"))
|
||||||
assert.Contains(t, r.Header.Get("Signature"), pubID)
|
assert.Contains(t, r.Header.Get("Signature"), pubID)
|
||||||
assert.Equal(t, ActivityStreamsContentType, r.Header.Get("Content-Type"))
|
assert.Equal(t, ActivityStreamsContentType, r.Header.Get("Content-Type"))
|
||||||
body, err := io.ReadAll(r.Body)
|
body, err := io.ReadAll(r.Body)
|
||||||
|
|
|
@ -267,7 +267,7 @@ func TestHTTPClientDownload(t *testing.T) {
|
||||||
return nil
|
return nil
|
||||||
})
|
})
|
||||||
if len(c.expectederror) > 0 {
|
if len(c.expectederror) > 0 {
|
||||||
assert.True(t, strings.Contains(err.Error(), c.expectederror), "case %d: '%s' should contain '%s'", n, err.Error(), c.expectederror)
|
assert.Contains(t, err.Error(), c.expectederror, "case %d: '%s' should contain '%s'", n, err.Error(), c.expectederror)
|
||||||
} else {
|
} else {
|
||||||
require.NoError(t, err, "case %d", n)
|
require.NoError(t, err, "case %d", n)
|
||||||
}
|
}
|
||||||
|
@ -369,7 +369,7 @@ func TestHTTPClientUpload(t *testing.T) {
|
||||||
return io.NopCloser(new(bytes.Buffer)), objectError
|
return io.NopCloser(new(bytes.Buffer)), objectError
|
||||||
})
|
})
|
||||||
if len(c.expectederror) > 0 {
|
if len(c.expectederror) > 0 {
|
||||||
assert.True(t, strings.Contains(err.Error(), c.expectederror), "case %d: '%s' should contain '%s'", n, err.Error(), c.expectederror)
|
assert.Contains(t, err.Error(), c.expectederror, "case %d: '%s' should contain '%s'", n, err.Error(), c.expectederror)
|
||||||
} else {
|
} else {
|
||||||
require.NoError(t, err, "case %d", n)
|
require.NoError(t, err, "case %d", n)
|
||||||
}
|
}
|
||||||
|
|
|
@ -97,7 +97,7 @@ func TestBasicTransferAdapter(t *testing.T) {
|
||||||
for n, c := range cases {
|
for n, c := range cases {
|
||||||
_, err := a.Download(context.Background(), c.link)
|
_, err := a.Download(context.Background(), c.link)
|
||||||
if len(c.expectederror) > 0 {
|
if len(c.expectederror) > 0 {
|
||||||
assert.True(t, strings.Contains(err.Error(), c.expectederror), "case %d: '%s' should contain '%s'", n, err.Error(), c.expectederror)
|
assert.Contains(t, err.Error(), c.expectederror, "case %d: '%s' should contain '%s'", n, err.Error(), c.expectederror)
|
||||||
} else {
|
} else {
|
||||||
require.NoError(t, err, "case %d", n)
|
require.NoError(t, err, "case %d", n)
|
||||||
}
|
}
|
||||||
|
@ -130,7 +130,7 @@ func TestBasicTransferAdapter(t *testing.T) {
|
||||||
for n, c := range cases {
|
for n, c := range cases {
|
||||||
err := a.Upload(context.Background(), c.link, p, bytes.NewBufferString("dummy"))
|
err := a.Upload(context.Background(), c.link, p, bytes.NewBufferString("dummy"))
|
||||||
if len(c.expectederror) > 0 {
|
if len(c.expectederror) > 0 {
|
||||||
assert.True(t, strings.Contains(err.Error(), c.expectederror), "case %d: '%s' should contain '%s'", n, err.Error(), c.expectederror)
|
assert.Contains(t, err.Error(), c.expectederror, "case %d: '%s' should contain '%s'", n, err.Error(), c.expectederror)
|
||||||
} else {
|
} else {
|
||||||
require.NoError(t, err, "case %d", n)
|
require.NoError(t, err, "case %d", n)
|
||||||
}
|
}
|
||||||
|
@ -163,7 +163,7 @@ func TestBasicTransferAdapter(t *testing.T) {
|
||||||
for n, c := range cases {
|
for n, c := range cases {
|
||||||
err := a.Verify(context.Background(), c.link, p)
|
err := a.Verify(context.Background(), c.link, p)
|
||||||
if len(c.expectederror) > 0 {
|
if len(c.expectederror) > 0 {
|
||||||
assert.True(t, strings.Contains(err.Error(), c.expectederror), "case %d: '%s' should contain '%s'", n, err.Error(), c.expectederror)
|
assert.Contains(t, err.Error(), c.expectederror, "case %d: '%s' should contain '%s'", n, err.Error(), c.expectederror)
|
||||||
} else {
|
} else {
|
||||||
require.NoError(t, err, "case %d", n)
|
require.NoError(t, err, "case %d", n)
|
||||||
}
|
}
|
||||||
|
|
|
@ -445,7 +445,7 @@ MINIO_BASE_PATH = /lfs
|
||||||
require.NoError(t, loadLFSFrom(cfg))
|
require.NoError(t, loadLFSFrom(cfg))
|
||||||
assert.EqualValues(t, "my_access_key", LFS.Storage.MinioConfig.AccessKeyID)
|
assert.EqualValues(t, "my_access_key", LFS.Storage.MinioConfig.AccessKeyID)
|
||||||
assert.EqualValues(t, "my_secret_key", LFS.Storage.MinioConfig.SecretAccessKey)
|
assert.EqualValues(t, "my_secret_key", LFS.Storage.MinioConfig.SecretAccessKey)
|
||||||
assert.True(t, true, LFS.Storage.MinioConfig.UseSSL)
|
assert.True(t, LFS.Storage.MinioConfig.UseSSL)
|
||||||
assert.EqualValues(t, "/lfs", LFS.Storage.MinioConfig.BasePath)
|
assert.EqualValues(t, "/lfs", LFS.Storage.MinioConfig.BasePath)
|
||||||
|
|
||||||
cfg, err = NewConfigProviderFromData(`
|
cfg, err = NewConfigProviderFromData(`
|
||||||
|
|
|
@ -10,7 +10,6 @@ import (
|
||||||
"crypto/sha256"
|
"crypto/sha256"
|
||||||
"crypto/x509"
|
"crypto/x509"
|
||||||
"encoding/pem"
|
"encoding/pem"
|
||||||
"regexp"
|
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
|
@ -24,8 +23,8 @@ func TestKeygen(t *testing.T) {
|
||||||
assert.NotEmpty(t, priv)
|
assert.NotEmpty(t, priv)
|
||||||
assert.NotEmpty(t, pub)
|
assert.NotEmpty(t, pub)
|
||||||
|
|
||||||
assert.Regexp(t, regexp.MustCompile("^-----BEGIN RSA PRIVATE KEY-----.*"), priv)
|
assert.Regexp(t, "^-----BEGIN RSA PRIVATE KEY-----.*", priv)
|
||||||
assert.Regexp(t, regexp.MustCompile("^-----BEGIN PUBLIC KEY-----.*"), pub)
|
assert.Regexp(t, "^-----BEGIN PUBLIC KEY-----.*", pub)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestSignUsingKeys(t *testing.T) {
|
func TestSignUsingKeys(t *testing.T) {
|
||||||
|
|
|
@ -43,7 +43,7 @@ func TestRepository_ContributorsGraph(t *testing.T) {
|
||||||
dataString, isData := mockCache.Get("key2").(string)
|
dataString, isData := mockCache.Get("key2").(string)
|
||||||
assert.True(t, isData)
|
assert.True(t, isData)
|
||||||
// Verify that JSON is actually stored in the cache.
|
// Verify that JSON is actually stored in the cache.
|
||||||
assert.EqualValues(t, `{"ethantkoenig@gmail.com":{"name":"Ethan Koenig","login":"","avatar_link":"https://secure.gravatar.com/avatar/b42fb195faa8c61b8d88abfefe30e9e3?d=identicon","home_link":"","total_commits":1,"weeks":{"1511654400000":{"week":1511654400000,"additions":3,"deletions":0,"commits":1}}},"jimmy.praet@telenet.be":{"name":"Jimmy Praet","login":"","avatar_link":"https://secure.gravatar.com/avatar/93c49b7c89eb156971d11161c9b52795?d=identicon","home_link":"","total_commits":1,"weeks":{"1624752000000":{"week":1624752000000,"additions":2,"deletions":0,"commits":1}}},"jon@allspice.io":{"name":"Jon","login":"","avatar_link":"https://secure.gravatar.com/avatar/00388ce725e6886f3e07c3733007289b?d=identicon","home_link":"","total_commits":1,"weeks":{"1607817600000":{"week":1607817600000,"additions":10,"deletions":0,"commits":1}}},"total":{"name":"Total","login":"","avatar_link":"","home_link":"","total_commits":3,"weeks":{"1511654400000":{"week":1511654400000,"additions":3,"deletions":0,"commits":1},"1607817600000":{"week":1607817600000,"additions":10,"deletions":0,"commits":1},"1624752000000":{"week":1624752000000,"additions":2,"deletions":0,"commits":1}}}}`, dataString)
|
assert.JSONEq(t, `{"ethantkoenig@gmail.com":{"name":"Ethan Koenig","login":"","avatar_link":"https://secure.gravatar.com/avatar/b42fb195faa8c61b8d88abfefe30e9e3?d=identicon","home_link":"","total_commits":1,"weeks":{"1511654400000":{"week":1511654400000,"additions":3,"deletions":0,"commits":1}}},"jimmy.praet@telenet.be":{"name":"Jimmy Praet","login":"","avatar_link":"https://secure.gravatar.com/avatar/93c49b7c89eb156971d11161c9b52795?d=identicon","home_link":"","total_commits":1,"weeks":{"1624752000000":{"week":1624752000000,"additions":2,"deletions":0,"commits":1}}},"jon@allspice.io":{"name":"Jon","login":"","avatar_link":"https://secure.gravatar.com/avatar/00388ce725e6886f3e07c3733007289b?d=identicon","home_link":"","total_commits":1,"weeks":{"1607817600000":{"week":1607817600000,"additions":10,"deletions":0,"commits":1}}},"total":{"name":"Total","login":"","avatar_link":"","home_link":"","total_commits":3,"weeks":{"1511654400000":{"week":1511654400000,"additions":3,"deletions":0,"commits":1},"1607817600000":{"week":1607817600000,"additions":10,"deletions":0,"commits":1},"1624752000000":{"week":1624752000000,"additions":2,"deletions":0,"commits":1}}}}`, dataString)
|
||||||
|
|
||||||
var data map[string]*ContributorData
|
var data map[string]*ContributorData
|
||||||
require.NoError(t, json.Unmarshal([]byte(dataString), &data))
|
require.NoError(t, json.Unmarshal([]byte(dataString), &data))
|
||||||
|
|
|
@ -40,7 +40,7 @@ func TestCompareTag(t *testing.T) {
|
||||||
|
|
||||||
req = NewRequest(t, "GET", "/user2/repo1/compare/invalid")
|
req = NewRequest(t, "GET", "/user2/repo1/compare/invalid")
|
||||||
resp = session.MakeRequest(t, req, http.StatusNotFound)
|
resp = session.MakeRequest(t, req, http.StatusNotFound)
|
||||||
assert.False(t, strings.Contains(resp.Body.String(), ">500<"), "expect 404 page not 500")
|
assert.NotContains(t, resp.Body.String(), ">500<", "expect 404 page not 500")
|
||||||
}
|
}
|
||||||
|
|
||||||
// Compare with inferred default branch (master)
|
// Compare with inferred default branch (master)
|
||||||
|
|
|
@ -48,7 +48,7 @@ func TestBranchLastUpdatedTime(t *testing.T) {
|
||||||
{
|
{
|
||||||
buf := ""
|
buf := ""
|
||||||
findTextNonNested(t, node, &buf)
|
findTextNonNested(t, node, &buf)
|
||||||
assert.True(t, strings.Contains(buf, "Updated"))
|
assert.Contains(t, buf, "Updated")
|
||||||
}
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
|
|
|
@ -902,7 +902,7 @@ func TestRepoFollowSymlink(t *testing.T) {
|
||||||
symlinkURL, ok := htmlDoc.Find(".file-actions .button[data-kind='follow-symlink']").Attr("href")
|
symlinkURL, ok := htmlDoc.Find(".file-actions .button[data-kind='follow-symlink']").Attr("href")
|
||||||
if shouldExist {
|
if shouldExist {
|
||||||
assert.True(t, ok)
|
assert.True(t, ok)
|
||||||
assert.EqualValues(t, expectedSymlinkURL, symlinkURL)
|
assert.Equal(t, expectedSymlinkURL, symlinkURL)
|
||||||
} else {
|
} else {
|
||||||
assert.False(t, ok)
|
assert.False(t, ok)
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue