mirror of
https://codeberg.org/forgejo/forgejo
synced 2024-11-25 11:16:11 +01:00
Fix uploaded artifacts should be overwritten (#28726)
Fix `Uploaded artifacts should be overwritten` https://github.com/go-gitea/gitea/issues/28549 When upload different content to uploaded artifact, it checks that content size is not match in db record with previous artifact size, then the new artifact is refused. Now if it finds uploading content size is not matching db record when receiving chunks, it updates db records to follow the latest size value.
This commit is contained in:
parent
49eb168677
commit
ad98ea63ee
|
@ -257,8 +257,11 @@ func (ar artifactRoutes) uploadArtifact(ctx *ArtifactContext) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// update artifact size if zero
|
// update artifact size if zero or not match, over write artifact size
|
||||||
if artifact.FileSize == 0 || artifact.FileCompressedSize == 0 {
|
if artifact.FileSize == 0 ||
|
||||||
|
artifact.FileCompressedSize == 0 ||
|
||||||
|
artifact.FileSize != fileRealTotalSize ||
|
||||||
|
artifact.FileCompressedSize != chunksTotalSize {
|
||||||
artifact.FileSize = fileRealTotalSize
|
artifact.FileSize = fileRealTotalSize
|
||||||
artifact.FileCompressedSize = chunksTotalSize
|
artifact.FileCompressedSize = chunksTotalSize
|
||||||
artifact.ContentEncoding = ctx.Req.Header.Get("Content-Encoding")
|
artifact.ContentEncoding = ctx.Req.Header.Get("Content-Encoding")
|
||||||
|
@ -267,6 +270,8 @@ func (ar artifactRoutes) uploadArtifact(ctx *ArtifactContext) {
|
||||||
ctx.Error(http.StatusInternalServerError, "Error update artifact")
|
ctx.Error(http.StatusInternalServerError, "Error update artifact")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
log.Debug("[artifact] update artifact size, artifact_id: %d, size: %d, compressed size: %d",
|
||||||
|
artifact.ID, artifact.FileSize, artifact.FileCompressedSize)
|
||||||
}
|
}
|
||||||
|
|
||||||
ctx.JSON(http.StatusOK, map[string]string{
|
ctx.JSON(http.StatusOK, map[string]string{
|
||||||
|
|
|
@ -186,7 +186,14 @@ func mergeChunksForArtifact(ctx *ArtifactContext, chunks []*chunkFileItem, st st
|
||||||
}()
|
}()
|
||||||
|
|
||||||
// save storage path to artifact
|
// save storage path to artifact
|
||||||
log.Debug("[artifact] merge chunks to artifact: %d, %s", artifact.ID, storagePath)
|
log.Debug("[artifact] merge chunks to artifact: %d, %s, old:%s", artifact.ID, storagePath, artifact.StoragePath)
|
||||||
|
// if artifact is already uploaded, delete the old file
|
||||||
|
if artifact.StoragePath != "" {
|
||||||
|
if err := st.Delete(artifact.StoragePath); err != nil {
|
||||||
|
log.Warn("Error deleting old artifact: %s, %v", artifact.StoragePath, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
artifact.StoragePath = storagePath
|
artifact.StoragePath = storagePath
|
||||||
artifact.Status = int64(actions.ArtifactStatusUploadConfirmed)
|
artifact.Status = int64(actions.ArtifactStatusUploadConfirmed)
|
||||||
if err := actions.UpdateArtifactByID(ctx, artifact.ID, artifact); err != nil {
|
if err := actions.UpdateArtifactByID(ctx, artifact.ID, artifact); err != nil {
|
||||||
|
|
|
@ -287,3 +287,92 @@ func TestActionsArtifactUploadWithRetentionDays(t *testing.T) {
|
||||||
AddTokenAuth("8061e833a55f6fc0157c98b883e91fcfeeb1a71a")
|
AddTokenAuth("8061e833a55f6fc0157c98b883e91fcfeeb1a71a")
|
||||||
MakeRequest(t, req, http.StatusOK)
|
MakeRequest(t, req, http.StatusOK)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestActionsArtifactOverwrite(t *testing.T) {
|
||||||
|
defer tests.PrepareTestEnv(t)()
|
||||||
|
|
||||||
|
{
|
||||||
|
// download old artifact uploaded by tests above, it should 1024 A
|
||||||
|
req := NewRequest(t, "GET", "/api/actions_pipeline/_apis/pipelines/workflows/791/artifacts").
|
||||||
|
AddTokenAuth("8061e833a55f6fc0157c98b883e91fcfeeb1a71a")
|
||||||
|
resp := MakeRequest(t, req, http.StatusOK)
|
||||||
|
var listResp listArtifactsResponse
|
||||||
|
DecodeJSON(t, resp, &listResp)
|
||||||
|
|
||||||
|
idx := strings.Index(listResp.Value[0].FileContainerResourceURL, "/api/actions_pipeline/_apis/pipelines/")
|
||||||
|
url := listResp.Value[0].FileContainerResourceURL[idx+1:] + "?itemPath=artifact"
|
||||||
|
req = NewRequest(t, "GET", url).
|
||||||
|
AddTokenAuth("8061e833a55f6fc0157c98b883e91fcfeeb1a71a")
|
||||||
|
resp = MakeRequest(t, req, http.StatusOK)
|
||||||
|
var downloadResp downloadArtifactResponse
|
||||||
|
DecodeJSON(t, resp, &downloadResp)
|
||||||
|
|
||||||
|
idx = strings.Index(downloadResp.Value[0].ContentLocation, "/api/actions_pipeline/_apis/pipelines/")
|
||||||
|
url = downloadResp.Value[0].ContentLocation[idx:]
|
||||||
|
req = NewRequest(t, "GET", url).
|
||||||
|
AddTokenAuth("8061e833a55f6fc0157c98b883e91fcfeeb1a71a")
|
||||||
|
resp = MakeRequest(t, req, http.StatusOK)
|
||||||
|
body := strings.Repeat("A", 1024)
|
||||||
|
assert.Equal(t, resp.Body.String(), body)
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
// upload same artifact, it uses 4096 B
|
||||||
|
req := NewRequestWithJSON(t, "POST", "/api/actions_pipeline/_apis/pipelines/workflows/791/artifacts", getUploadArtifactRequest{
|
||||||
|
Type: "actions_storage",
|
||||||
|
Name: "artifact",
|
||||||
|
}).AddTokenAuth("8061e833a55f6fc0157c98b883e91fcfeeb1a71a")
|
||||||
|
resp := MakeRequest(t, req, http.StatusOK)
|
||||||
|
var uploadResp uploadArtifactResponse
|
||||||
|
DecodeJSON(t, resp, &uploadResp)
|
||||||
|
|
||||||
|
idx := strings.Index(uploadResp.FileContainerResourceURL, "/api/actions_pipeline/_apis/pipelines/")
|
||||||
|
url := uploadResp.FileContainerResourceURL[idx:] + "?itemPath=artifact/abc.txt"
|
||||||
|
body := strings.Repeat("B", 4096)
|
||||||
|
req = NewRequestWithBody(t, "PUT", url, strings.NewReader(body)).
|
||||||
|
AddTokenAuth("8061e833a55f6fc0157c98b883e91fcfeeb1a71a").
|
||||||
|
SetHeader("Content-Range", "bytes 0-4095/4096").
|
||||||
|
SetHeader("x-tfs-filelength", "4096").
|
||||||
|
SetHeader("x-actions-results-md5", "wUypcJFeZCK5T6r4lfqzqg==") // base64(md5(body))
|
||||||
|
MakeRequest(t, req, http.StatusOK)
|
||||||
|
|
||||||
|
// confirm artifact upload
|
||||||
|
req = NewRequest(t, "PATCH", "/api/actions_pipeline/_apis/pipelines/workflows/791/artifacts?artifactName=artifact").
|
||||||
|
AddTokenAuth("8061e833a55f6fc0157c98b883e91fcfeeb1a71a")
|
||||||
|
MakeRequest(t, req, http.StatusOK)
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
// download artifact again, it should 4096 B
|
||||||
|
req := NewRequest(t, "GET", "/api/actions_pipeline/_apis/pipelines/workflows/791/artifacts").
|
||||||
|
AddTokenAuth("8061e833a55f6fc0157c98b883e91fcfeeb1a71a")
|
||||||
|
resp := MakeRequest(t, req, http.StatusOK)
|
||||||
|
var listResp listArtifactsResponse
|
||||||
|
DecodeJSON(t, resp, &listResp)
|
||||||
|
|
||||||
|
var uploadedItem listArtifactsResponseItem
|
||||||
|
for _, item := range listResp.Value {
|
||||||
|
if item.Name == "artifact" {
|
||||||
|
uploadedItem = item
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
assert.Equal(t, uploadedItem.Name, "artifact")
|
||||||
|
|
||||||
|
idx := strings.Index(uploadedItem.FileContainerResourceURL, "/api/actions_pipeline/_apis/pipelines/")
|
||||||
|
url := uploadedItem.FileContainerResourceURL[idx+1:] + "?itemPath=artifact"
|
||||||
|
req = NewRequest(t, "GET", url).
|
||||||
|
AddTokenAuth("8061e833a55f6fc0157c98b883e91fcfeeb1a71a")
|
||||||
|
resp = MakeRequest(t, req, http.StatusOK)
|
||||||
|
var downloadResp downloadArtifactResponse
|
||||||
|
DecodeJSON(t, resp, &downloadResp)
|
||||||
|
|
||||||
|
idx = strings.Index(downloadResp.Value[0].ContentLocation, "/api/actions_pipeline/_apis/pipelines/")
|
||||||
|
url = downloadResp.Value[0].ContentLocation[idx:]
|
||||||
|
req = NewRequest(t, "GET", url).
|
||||||
|
AddTokenAuth("8061e833a55f6fc0157c98b883e91fcfeeb1a71a")
|
||||||
|
resp = MakeRequest(t, req, http.StatusOK)
|
||||||
|
body := strings.Repeat("B", 4096)
|
||||||
|
assert.Equal(t, resp.Body.String(), body)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
Loading…
Reference in a new issue