2023-05-19 15:37:57 +02:00
|
|
|
// Copyright 2023 The Gitea Authors. All rights reserved.
|
|
|
|
// SPDX-License-Identifier: MIT
|
|
|
|
|
|
|
|
package actions
|
|
|
|
|
2023-05-21 03:50:53 +02:00
|
|
|
// GitHub Actions Artifacts API Simple Description
|
2023-05-19 15:37:57 +02:00
|
|
|
//
|
|
|
|
// 1. Upload artifact
|
|
|
|
// 1.1. Post upload url
|
|
|
|
// Post: /api/actions_pipeline/_apis/pipelines/workflows/{run_id}/artifacts?api-version=6.0-preview
|
|
|
|
// Request:
|
|
|
|
// {
|
|
|
|
// "Type": "actions_storage",
|
|
|
|
// "Name": "artifact"
|
|
|
|
// }
|
|
|
|
// Response:
|
|
|
|
// {
|
|
|
|
// "fileContainerResourceUrl":"/api/actions_pipeline/_apis/pipelines/workflows/{run_id}/artifacts/{artifact_id}/upload"
|
|
|
|
// }
|
|
|
|
// it acquires an upload url for artifact upload
|
|
|
|
// 1.2. Upload artifact
|
|
|
|
// PUT: /api/actions_pipeline/_apis/pipelines/workflows/{run_id}/artifacts/{artifact_id}/upload?itemPath=artifact%2Ffilename
|
|
|
|
// it upload chunk with headers:
|
|
|
|
// x-tfs-filelength: 1024 // total file length
|
|
|
|
// content-length: 1024 // chunk length
|
|
|
|
// x-actions-results-md5: md5sum // md5sum of chunk
|
|
|
|
// content-range: bytes 0-1023/1024 // chunk range
|
|
|
|
// we save all chunks to one storage directory after md5sum check
|
|
|
|
// 1.3. Confirm upload
|
|
|
|
// PATCH: /api/actions_pipeline/_apis/pipelines/workflows/{run_id}/artifacts/{artifact_id}/upload?itemPath=artifact%2Ffilename
|
|
|
|
// it confirm upload and merge all chunks to one file, save this file to storage
|
|
|
|
//
|
|
|
|
// 2. Download artifact
|
|
|
|
// 2.1 list artifacts
|
|
|
|
// GET: /api/actions_pipeline/_apis/pipelines/workflows/{run_id}/artifacts?api-version=6.0-preview
|
|
|
|
// Response:
|
|
|
|
// {
|
|
|
|
// "count": 1,
|
|
|
|
// "value": [
|
|
|
|
// {
|
|
|
|
// "name": "artifact",
|
|
|
|
// "fileContainerResourceUrl": "/api/actions_pipeline/_apis/pipelines/workflows/{run_id}/artifacts/{artifact_id}/path"
|
|
|
|
// }
|
|
|
|
// ]
|
|
|
|
// }
|
|
|
|
// 2.2 download artifact
|
|
|
|
// GET: /api/actions_pipeline/_apis/pipelines/workflows/{run_id}/artifacts/{artifact_id}/path?api-version=6.0-preview
|
|
|
|
// Response:
|
|
|
|
// {
|
|
|
|
// "value": [
|
|
|
|
// {
|
|
|
|
// "contentLocation": "/api/actions_pipeline/_apis/pipelines/workflows/{run_id}/artifacts/{artifact_id}/download",
|
|
|
|
// "path": "artifact/filename",
|
|
|
|
// "itemType": "file"
|
|
|
|
// }
|
|
|
|
// ]
|
|
|
|
// }
|
|
|
|
// 2.3 download artifact file
|
|
|
|
// GET: /api/actions_pipeline/_apis/pipelines/workflows/{run_id}/artifacts/{artifact_id}/download?itemPath=artifact%2Ffilename
|
|
|
|
// Response:
|
|
|
|
// download file
|
|
|
|
//
|
|
|
|
|
|
|
|
import (
|
|
|
|
"crypto/md5"
|
2024-02-15 21:39:50 +01:00
|
|
|
"errors"
|
2023-05-19 15:37:57 +02:00
|
|
|
"fmt"
|
|
|
|
"net/http"
|
|
|
|
"strconv"
|
|
|
|
"strings"
|
|
|
|
|
|
|
|
"code.gitea.io/gitea/models/actions"
|
2023-11-24 04:49:41 +01:00
|
|
|
"code.gitea.io/gitea/models/db"
|
2023-05-19 15:37:57 +02:00
|
|
|
"code.gitea.io/gitea/modules/json"
|
|
|
|
"code.gitea.io/gitea/modules/log"
|
|
|
|
"code.gitea.io/gitea/modules/setting"
|
|
|
|
"code.gitea.io/gitea/modules/storage"
|
|
|
|
"code.gitea.io/gitea/modules/util"
|
|
|
|
"code.gitea.io/gitea/modules/web"
|
2023-06-18 09:59:09 +02:00
|
|
|
web_types "code.gitea.io/gitea/modules/web/types"
|
2024-02-02 15:25:59 +01:00
|
|
|
actions_service "code.gitea.io/gitea/services/actions"
|
2024-02-27 08:12:22 +01:00
|
|
|
"code.gitea.io/gitea/services/context"
|
2023-05-19 15:37:57 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
const artifactRouteBase = "/_apis/pipelines/workflows/{run_id}/artifacts"
|
|
|
|
|
2023-05-21 03:50:53 +02:00
|
|
|
type artifactContextKeyType struct{}
|
|
|
|
|
|
|
|
var artifactContextKey = artifactContextKeyType{}
|
|
|
|
|
|
|
|
type ArtifactContext struct {
|
|
|
|
*context.Base
|
|
|
|
|
|
|
|
ActionTask *actions.ActionTask
|
|
|
|
}
|
|
|
|
|
|
|
|
func init() {
|
2023-06-18 09:59:09 +02:00
|
|
|
web.RegisterResponseStatusProvider[*ArtifactContext](func(req *http.Request) web_types.ResponseStatusProvider {
|
2023-05-21 03:50:53 +02:00
|
|
|
return req.Context().Value(artifactContextKey).(*ArtifactContext)
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
func ArtifactsRoutes(prefix string) *web.Route {
|
2023-05-19 15:37:57 +02:00
|
|
|
m := web.NewRoute()
|
2023-05-21 03:50:53 +02:00
|
|
|
m.Use(ArtifactContexter())
|
2023-05-19 15:37:57 +02:00
|
|
|
|
|
|
|
r := artifactRoutes{
|
|
|
|
prefix: prefix,
|
|
|
|
fs: storage.ActionsArtifacts,
|
|
|
|
}
|
|
|
|
|
|
|
|
m.Group(artifactRouteBase, func() {
|
|
|
|
// retrieve, list and confirm artifacts
|
|
|
|
m.Combo("").Get(r.listArtifacts).Post(r.getUploadArtifactURL).Patch(r.comfirmUploadArtifact)
|
|
|
|
// handle container artifacts list and download
|
2023-07-21 04:42:01 +02:00
|
|
|
m.Put("/{artifact_hash}/upload", r.uploadArtifact)
|
|
|
|
// handle artifacts download
|
|
|
|
m.Get("/{artifact_hash}/download_url", r.getDownloadArtifactURL)
|
|
|
|
m.Get("/{artifact_id}/download", r.downloadArtifact)
|
2023-05-19 15:37:57 +02:00
|
|
|
})
|
|
|
|
|
|
|
|
return m
|
|
|
|
}
|
|
|
|
|
2023-05-21 03:50:53 +02:00
|
|
|
func ArtifactContexter() func(next http.Handler) http.Handler {
|
2023-05-19 15:37:57 +02:00
|
|
|
return func(next http.Handler) http.Handler {
|
|
|
|
return http.HandlerFunc(func(resp http.ResponseWriter, req *http.Request) {
|
2023-05-21 03:50:53 +02:00
|
|
|
base, baseCleanUp := context.NewBaseContext(resp, req)
|
|
|
|
defer baseCleanUp()
|
|
|
|
|
|
|
|
ctx := &ArtifactContext{Base: base}
|
|
|
|
ctx.AppendContextValue(artifactContextKey, ctx)
|
2023-05-19 15:37:57 +02:00
|
|
|
|
|
|
|
// action task call server api with Bearer ACTIONS_RUNTIME_TOKEN
|
|
|
|
// we should verify the ACTIONS_RUNTIME_TOKEN
|
|
|
|
authHeader := req.Header.Get("Authorization")
|
|
|
|
if len(authHeader) == 0 || !strings.HasPrefix(authHeader, "Bearer ") {
|
|
|
|
ctx.Error(http.StatusUnauthorized, "Bad authorization header")
|
|
|
|
return
|
|
|
|
}
|
2023-05-21 03:50:53 +02:00
|
|
|
|
2024-02-02 15:25:59 +01:00
|
|
|
// New act_runner uses jwt to authenticate
|
|
|
|
tID, err := actions_service.ParseAuthorizationToken(req)
|
|
|
|
|
|
|
|
var task *actions.ActionTask
|
|
|
|
if err == nil {
|
|
|
|
task, err = actions.GetTaskByID(req.Context(), tID)
|
|
|
|
if err != nil {
|
|
|
|
log.Error("Error runner api getting task by ID: %v", err)
|
|
|
|
ctx.Error(http.StatusInternalServerError, "Error runner api getting task by ID")
|
|
|
|
return
|
|
|
|
}
|
|
|
|
if task.Status != actions.StatusRunning {
|
|
|
|
log.Error("Error runner api getting task: task is not running")
|
|
|
|
ctx.Error(http.StatusInternalServerError, "Error runner api getting task: task is not running")
|
|
|
|
return
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
// Old act_runner uses GITEA_TOKEN to authenticate
|
|
|
|
authToken := strings.TrimPrefix(authHeader, "Bearer ")
|
|
|
|
|
|
|
|
task, err = actions.GetRunningTaskByToken(req.Context(), authToken)
|
|
|
|
if err != nil {
|
|
|
|
log.Error("Error runner api getting task: %v", err)
|
|
|
|
ctx.Error(http.StatusInternalServerError, "Error runner api getting task")
|
|
|
|
return
|
|
|
|
}
|
2023-05-19 15:37:57 +02:00
|
|
|
}
|
|
|
|
|
2023-05-21 03:50:53 +02:00
|
|
|
if err := task.LoadJob(req.Context()); err != nil {
|
2023-05-19 15:37:57 +02:00
|
|
|
log.Error("Error runner api getting job: %v", err)
|
|
|
|
ctx.Error(http.StatusInternalServerError, "Error runner api getting job")
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2023-05-21 03:50:53 +02:00
|
|
|
ctx.ActionTask = task
|
2023-05-19 15:37:57 +02:00
|
|
|
next.ServeHTTP(ctx.Resp, ctx.Req)
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
type artifactRoutes struct {
|
|
|
|
prefix string
|
|
|
|
fs storage.ObjectStorage
|
|
|
|
}
|
|
|
|
|
2023-07-21 04:42:01 +02:00
|
|
|
func (ar artifactRoutes) buildArtifactURL(runID int64, artifactHash, suffix string) string {
|
2023-05-19 15:37:57 +02:00
|
|
|
uploadURL := strings.TrimSuffix(setting.AppURL, "/") + strings.TrimSuffix(ar.prefix, "/") +
|
|
|
|
strings.ReplaceAll(artifactRouteBase, "{run_id}", strconv.FormatInt(runID, 10)) +
|
2023-07-21 04:42:01 +02:00
|
|
|
"/" + artifactHash + "/" + suffix
|
2023-05-19 15:37:57 +02:00
|
|
|
return uploadURL
|
|
|
|
}
|
|
|
|
|
|
|
|
type getUploadArtifactRequest struct {
|
2023-09-06 09:41:06 +02:00
|
|
|
Type string
|
|
|
|
Name string
|
|
|
|
RetentionDays int64
|
2023-05-19 15:37:57 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
type getUploadArtifactResponse struct {
|
|
|
|
FileContainerResourceURL string `json:"fileContainerResourceUrl"`
|
|
|
|
}
|
|
|
|
|
|
|
|
// getUploadArtifactURL generates a URL for uploading an artifact
|
2023-05-21 03:50:53 +02:00
|
|
|
func (ar artifactRoutes) getUploadArtifactURL(ctx *ArtifactContext) {
|
2023-07-21 04:42:01 +02:00
|
|
|
_, runID, ok := validateRunID(ctx)
|
2023-05-19 15:37:57 +02:00
|
|
|
if !ok {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
var req getUploadArtifactRequest
|
|
|
|
if err := json.NewDecoder(ctx.Req.Body).Decode(&req); err != nil {
|
|
|
|
log.Error("Error decode request body: %v", err)
|
|
|
|
ctx.Error(http.StatusInternalServerError, "Error decode request body")
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2023-09-06 09:41:06 +02:00
|
|
|
// set retention days
|
|
|
|
retentionQuery := ""
|
|
|
|
if req.RetentionDays > 0 {
|
|
|
|
retentionQuery = fmt.Sprintf("?retentionDays=%d", req.RetentionDays)
|
|
|
|
}
|
|
|
|
|
2023-07-21 04:42:01 +02:00
|
|
|
// use md5(artifact_name) to create upload url
|
|
|
|
artifactHash := fmt.Sprintf("%x", md5.Sum([]byte(req.Name)))
|
2023-05-19 15:37:57 +02:00
|
|
|
resp := getUploadArtifactResponse{
|
2023-09-06 09:41:06 +02:00
|
|
|
FileContainerResourceURL: ar.buildArtifactURL(runID, artifactHash, "upload"+retentionQuery),
|
2023-05-19 15:37:57 +02:00
|
|
|
}
|
2023-07-21 04:42:01 +02:00
|
|
|
log.Debug("[artifact] get upload url: %s", resp.FileContainerResourceURL)
|
2023-05-19 15:37:57 +02:00
|
|
|
ctx.JSON(http.StatusOK, resp)
|
|
|
|
}
|
|
|
|
|
2023-05-21 03:50:53 +02:00
|
|
|
func (ar artifactRoutes) uploadArtifact(ctx *ArtifactContext) {
|
2023-07-21 04:42:01 +02:00
|
|
|
task, runID, ok := validateRunID(ctx)
|
2023-05-19 15:37:57 +02:00
|
|
|
if !ok {
|
|
|
|
return
|
|
|
|
}
|
2023-07-21 04:42:01 +02:00
|
|
|
artifactName, artifactPath, ok := parseArtifactItemPath(ctx)
|
|
|
|
if !ok {
|
2023-05-19 15:37:57 +02:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2023-07-21 04:42:01 +02:00
|
|
|
// get upload file size
|
|
|
|
fileRealTotalSize, contentLength, err := getUploadFileSize(ctx)
|
|
|
|
if err != nil {
|
|
|
|
log.Error("Error get upload file size: %v", err)
|
|
|
|
ctx.Error(http.StatusInternalServerError, "Error get upload file size")
|
2023-05-19 15:37:57 +02:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2023-09-06 09:41:06 +02:00
|
|
|
// get artifact retention days
|
|
|
|
expiredDays := setting.Actions.ArtifactRetentionDays
|
|
|
|
if queryRetentionDays := ctx.Req.URL.Query().Get("retentionDays"); queryRetentionDays != "" {
|
|
|
|
expiredDays, err = strconv.ParseInt(queryRetentionDays, 10, 64)
|
|
|
|
if err != nil {
|
|
|
|
log.Error("Error parse retention days: %v", err)
|
|
|
|
ctx.Error(http.StatusBadRequest, "Error parse retention days")
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
log.Debug("[artifact] upload chunk, name: %s, path: %s, size: %d, retention days: %d",
|
|
|
|
artifactName, artifactPath, fileRealTotalSize, expiredDays)
|
|
|
|
|
2023-07-21 04:42:01 +02:00
|
|
|
// create or get artifact with name and path
|
2023-09-06 09:41:06 +02:00
|
|
|
artifact, err := actions.CreateArtifact(ctx, task, artifactName, artifactPath, expiredDays)
|
2023-05-19 15:37:57 +02:00
|
|
|
if err != nil {
|
2023-07-21 04:42:01 +02:00
|
|
|
log.Error("Error create or get artifact: %v", err)
|
|
|
|
ctx.Error(http.StatusInternalServerError, "Error create or get artifact")
|
2023-05-19 15:37:57 +02:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2023-07-21 04:42:01 +02:00
|
|
|
// save chunk to storage, if success, return chunk stotal size
|
|
|
|
// if artifact is not gzip when uploading, chunksTotalSize == fileRealTotalSize
|
|
|
|
// if artifact is gzip when uploading, chunksTotalSize < fileRealTotalSize
|
|
|
|
chunksTotalSize, err := saveUploadChunk(ar.fs, ctx, artifact, contentLength, runID)
|
2023-05-19 15:37:57 +02:00
|
|
|
if err != nil {
|
2023-07-21 04:42:01 +02:00
|
|
|
log.Error("Error save upload chunk: %v", err)
|
|
|
|
ctx.Error(http.StatusInternalServerError, "Error save upload chunk")
|
2023-05-19 15:37:57 +02:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2024-01-17 04:21:16 +01:00
|
|
|
// update artifact size if zero or not match, over write artifact size
|
|
|
|
if artifact.FileSize == 0 ||
|
|
|
|
artifact.FileCompressedSize == 0 ||
|
|
|
|
artifact.FileSize != fileRealTotalSize ||
|
|
|
|
artifact.FileCompressedSize != chunksTotalSize {
|
2023-07-21 04:42:01 +02:00
|
|
|
artifact.FileSize = fileRealTotalSize
|
|
|
|
artifact.FileCompressedSize = chunksTotalSize
|
2023-05-19 15:37:57 +02:00
|
|
|
artifact.ContentEncoding = ctx.Req.Header.Get("Content-Encoding")
|
|
|
|
if err := actions.UpdateArtifactByID(ctx, artifact.ID, artifact); err != nil {
|
2023-07-21 04:42:01 +02:00
|
|
|
log.Error("Error update artifact: %v", err)
|
|
|
|
ctx.Error(http.StatusInternalServerError, "Error update artifact")
|
2023-05-19 15:37:57 +02:00
|
|
|
return
|
|
|
|
}
|
2024-01-17 04:21:16 +01:00
|
|
|
log.Debug("[artifact] update artifact size, artifact_id: %d, size: %d, compressed size: %d",
|
|
|
|
artifact.ID, artifact.FileSize, artifact.FileCompressedSize)
|
2023-05-19 15:37:57 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
ctx.JSON(http.StatusOK, map[string]string{
|
|
|
|
"message": "success",
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
// comfirmUploadArtifact comfirm upload artifact.
|
|
|
|
// if all chunks are uploaded, merge them to one file.
|
2023-05-21 03:50:53 +02:00
|
|
|
func (ar artifactRoutes) comfirmUploadArtifact(ctx *ArtifactContext) {
|
2023-07-21 04:42:01 +02:00
|
|
|
_, runID, ok := validateRunID(ctx)
|
2023-05-19 15:37:57 +02:00
|
|
|
if !ok {
|
|
|
|
return
|
|
|
|
}
|
2023-07-21 04:42:01 +02:00
|
|
|
artifactName := ctx.Req.URL.Query().Get("artifactName")
|
|
|
|
if artifactName == "" {
|
|
|
|
log.Error("Error artifact name is empty")
|
|
|
|
ctx.Error(http.StatusBadRequest, "Error artifact name is empty")
|
|
|
|
return
|
|
|
|
}
|
|
|
|
if err := mergeChunksForRun(ctx, ar.fs, runID, artifactName); err != nil {
|
|
|
|
log.Error("Error merge chunks: %v", err)
|
|
|
|
ctx.Error(http.StatusInternalServerError, "Error merge chunks")
|
2023-05-19 15:37:57 +02:00
|
|
|
return
|
|
|
|
}
|
|
|
|
ctx.JSON(http.StatusOK, map[string]string{
|
|
|
|
"message": "success",
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
type (
|
|
|
|
listArtifactsResponse struct {
|
|
|
|
Count int64 `json:"count"`
|
|
|
|
Value []listArtifactsResponseItem `json:"value"`
|
|
|
|
}
|
|
|
|
listArtifactsResponseItem struct {
|
|
|
|
Name string `json:"name"`
|
|
|
|
FileContainerResourceURL string `json:"fileContainerResourceUrl"`
|
|
|
|
}
|
|
|
|
)
|
|
|
|
|
2023-05-21 03:50:53 +02:00
|
|
|
func (ar artifactRoutes) listArtifacts(ctx *ArtifactContext) {
|
2023-07-21 04:42:01 +02:00
|
|
|
_, runID, ok := validateRunID(ctx)
|
2023-05-19 15:37:57 +02:00
|
|
|
if !ok {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2023-11-24 04:49:41 +01:00
|
|
|
artifacts, err := db.Find[actions.ActionArtifact](ctx, actions.FindArtifactsOptions{RunID: runID})
|
2023-05-19 15:37:57 +02:00
|
|
|
if err != nil {
|
|
|
|
log.Error("Error getting artifacts: %v", err)
|
|
|
|
ctx.Error(http.StatusInternalServerError, err.Error())
|
|
|
|
return
|
|
|
|
}
|
2023-07-21 04:42:01 +02:00
|
|
|
if len(artifacts) == 0 {
|
|
|
|
log.Debug("[artifact] handleListArtifacts, no artifacts")
|
|
|
|
ctx.Error(http.StatusNotFound)
|
|
|
|
return
|
|
|
|
}
|
2023-05-19 15:37:57 +02:00
|
|
|
|
2023-07-21 04:42:01 +02:00
|
|
|
var (
|
|
|
|
items []listArtifactsResponseItem
|
|
|
|
values = make(map[string]bool)
|
|
|
|
)
|
|
|
|
|
|
|
|
for _, art := range artifacts {
|
|
|
|
if values[art.ArtifactName] {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
artifactHash := fmt.Sprintf("%x", md5.Sum([]byte(art.ArtifactName)))
|
|
|
|
item := listArtifactsResponseItem{
|
|
|
|
Name: art.ArtifactName,
|
|
|
|
FileContainerResourceURL: ar.buildArtifactURL(runID, artifactHash, "download_url"),
|
|
|
|
}
|
|
|
|
items = append(items, item)
|
|
|
|
values[art.ArtifactName] = true
|
|
|
|
|
|
|
|
log.Debug("[artifact] handleListArtifacts, name: %s, url: %s", item.Name, item.FileContainerResourceURL)
|
2023-05-19 15:37:57 +02:00
|
|
|
}
|
2023-07-21 04:42:01 +02:00
|
|
|
|
2023-05-19 15:37:57 +02:00
|
|
|
respData := listArtifactsResponse{
|
2023-07-21 04:42:01 +02:00
|
|
|
Count: int64(len(items)),
|
|
|
|
Value: items,
|
2023-05-19 15:37:57 +02:00
|
|
|
}
|
|
|
|
ctx.JSON(http.StatusOK, respData)
|
|
|
|
}
|
|
|
|
|
|
|
|
type (
|
|
|
|
downloadArtifactResponse struct {
|
|
|
|
Value []downloadArtifactResponseItem `json:"value"`
|
|
|
|
}
|
|
|
|
downloadArtifactResponseItem struct {
|
|
|
|
Path string `json:"path"`
|
|
|
|
ItemType string `json:"itemType"`
|
|
|
|
ContentLocation string `json:"contentLocation"`
|
|
|
|
}
|
|
|
|
)
|
|
|
|
|
2023-07-21 04:42:01 +02:00
|
|
|
// getDownloadArtifactURL generates download url for each artifact
|
2023-05-21 03:50:53 +02:00
|
|
|
func (ar artifactRoutes) getDownloadArtifactURL(ctx *ArtifactContext) {
|
2023-07-21 04:42:01 +02:00
|
|
|
_, runID, ok := validateRunID(ctx)
|
2023-05-19 15:37:57 +02:00
|
|
|
if !ok {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2023-07-21 04:42:01 +02:00
|
|
|
itemPath := util.PathJoinRel(ctx.Req.URL.Query().Get("itemPath"))
|
|
|
|
if !validateArtifactHash(ctx, itemPath) {
|
2023-05-19 15:37:57 +02:00
|
|
|
return
|
2023-07-21 04:42:01 +02:00
|
|
|
}
|
|
|
|
|
2023-11-24 04:49:41 +01:00
|
|
|
artifacts, err := db.Find[actions.ActionArtifact](ctx, actions.FindArtifactsOptions{
|
|
|
|
RunID: runID,
|
|
|
|
ArtifactName: itemPath,
|
|
|
|
})
|
2023-07-21 04:42:01 +02:00
|
|
|
if err != nil {
|
|
|
|
log.Error("Error getting artifacts: %v", err)
|
2023-05-19 15:37:57 +02:00
|
|
|
ctx.Error(http.StatusInternalServerError, err.Error())
|
|
|
|
return
|
|
|
|
}
|
2023-07-21 04:42:01 +02:00
|
|
|
if len(artifacts) == 0 {
|
|
|
|
log.Debug("[artifact] getDownloadArtifactURL, no artifacts")
|
|
|
|
ctx.Error(http.StatusNotFound)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
if itemPath != artifacts[0].ArtifactName {
|
|
|
|
log.Error("Error dismatch artifact name, itemPath: %v, artifact: %v", itemPath, artifacts[0].ArtifactName)
|
|
|
|
ctx.Error(http.StatusBadRequest, "Error dismatch artifact name")
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
var items []downloadArtifactResponseItem
|
|
|
|
for _, artifact := range artifacts {
|
2024-02-15 21:39:50 +01:00
|
|
|
var downloadURL string
|
|
|
|
if setting.Actions.ArtifactStorage.MinioConfig.ServeDirect {
|
|
|
|
u, err := ar.fs.URL(artifact.StoragePath, artifact.ArtifactName)
|
|
|
|
if err != nil && !errors.Is(err, storage.ErrURLNotSupported) {
|
|
|
|
log.Error("Error getting serve direct url: %v", err)
|
|
|
|
}
|
|
|
|
if u != nil {
|
|
|
|
downloadURL = u.String()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if downloadURL == "" {
|
|
|
|
downloadURL = ar.buildArtifactURL(runID, strconv.FormatInt(artifact.ID, 10), "download")
|
|
|
|
}
|
2023-07-21 04:42:01 +02:00
|
|
|
item := downloadArtifactResponseItem{
|
2023-05-19 15:37:57 +02:00
|
|
|
Path: util.PathJoinRel(itemPath, artifact.ArtifactPath),
|
|
|
|
ItemType: "file",
|
|
|
|
ContentLocation: downloadURL,
|
2023-07-21 04:42:01 +02:00
|
|
|
}
|
|
|
|
log.Debug("[artifact] getDownloadArtifactURL, path: %s, url: %s", item.Path, item.ContentLocation)
|
|
|
|
items = append(items, item)
|
|
|
|
}
|
|
|
|
respData := downloadArtifactResponse{
|
|
|
|
Value: items,
|
2023-05-19 15:37:57 +02:00
|
|
|
}
|
|
|
|
ctx.JSON(http.StatusOK, respData)
|
|
|
|
}
|
|
|
|
|
2023-07-21 04:42:01 +02:00
|
|
|
// downloadArtifact downloads artifact content
|
2023-05-21 03:50:53 +02:00
|
|
|
func (ar artifactRoutes) downloadArtifact(ctx *ArtifactContext) {
|
2023-07-21 04:42:01 +02:00
|
|
|
_, runID, ok := validateRunID(ctx)
|
2023-05-19 15:37:57 +02:00
|
|
|
if !ok {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
artifactID := ctx.ParamsInt64("artifact_id")
|
2023-12-25 21:25:29 +01:00
|
|
|
artifact, exist, err := db.GetByID[actions.ActionArtifact](ctx, artifactID)
|
|
|
|
if err != nil {
|
2023-05-19 15:37:57 +02:00
|
|
|
log.Error("Error getting artifact: %v", err)
|
|
|
|
ctx.Error(http.StatusInternalServerError, err.Error())
|
|
|
|
return
|
2023-12-25 21:25:29 +01:00
|
|
|
} else if !exist {
|
|
|
|
log.Error("artifact with ID %d does not exist", artifactID)
|
|
|
|
ctx.Error(http.StatusNotFound, fmt.Sprintf("artifact with ID %d does not exist", artifactID))
|
|
|
|
return
|
2023-05-19 15:37:57 +02:00
|
|
|
}
|
|
|
|
if artifact.RunID != runID {
|
|
|
|
log.Error("Error dismatch runID and artifactID, task: %v, artifact: %v", runID, artifactID)
|
|
|
|
ctx.Error(http.StatusBadRequest, err.Error())
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
fd, err := ar.fs.Open(artifact.StoragePath)
|
|
|
|
if err != nil {
|
|
|
|
log.Error("Error opening file: %v", err)
|
|
|
|
ctx.Error(http.StatusInternalServerError, err.Error())
|
|
|
|
return
|
|
|
|
}
|
|
|
|
defer fd.Close()
|
|
|
|
|
2023-07-21 04:42:01 +02:00
|
|
|
// if artifact is compressed, set content-encoding header to gzip
|
|
|
|
if artifact.ContentEncoding == "gzip" {
|
2023-05-19 15:37:57 +02:00
|
|
|
ctx.Resp.Header().Set("Content-Encoding", "gzip")
|
|
|
|
}
|
2023-07-21 04:42:01 +02:00
|
|
|
log.Debug("[artifact] downloadArtifact, name: %s, path: %s, storage: %s, size: %d", artifact.ArtifactName, artifact.ArtifactPath, artifact.StoragePath, artifact.FileSize)
|
2023-05-19 15:37:57 +02:00
|
|
|
ctx.ServeContent(fd, &context.ServeHeaderOptions{
|
|
|
|
Filename: artifact.ArtifactName,
|
|
|
|
LastModified: artifact.CreatedUnix.AsLocalTime(),
|
|
|
|
})
|
|
|
|
}
|