Manager: more work on Shaman integration

This commit is contained in:
Sybren A. Stüvel 2022-03-21 17:58:49 +01:00
parent 6f35b3303d
commit 2b0d154a07
22 changed files with 503 additions and 960 deletions

View File

@ -86,10 +86,10 @@ type Shaman interface {
// Requirements checks a Shaman Requirements file, and returns the subset // Requirements checks a Shaman Requirements file, and returns the subset
// containing the unknown files. // containing the unknown files.
Requirements(ctx context.Context, requirements api.ShamanRequirements) (api.ShamanRequirements, error) Requirements(ctx context.Context, requirements api.ShamanRequirementsRequest) (api.ShamanRequirementsResponse, error)
// Check the status of a file on the Shaman server. // Check the status of a file on the Shaman server.
FileStoreCheck(ctx context.Context, checksum string, filesize int64) (api.ShamanFileStatusStatus, error) FileStoreCheck(ctx context.Context, checksum string, filesize int64) (api.ShamanFileStatus, error)
// Store a new file on the Shaman server. Note that the Shaman server can // Store a new file on the Shaman server. Note that the Shaman server can
// return early when another client finishes uploading the exact same file, to // return early when another client finishes uploading the exact same file, to

View File

@ -440,10 +440,10 @@ func (mr *MockShamanMockRecorder) FileStore(arg0, arg1, arg2, arg3, arg4, arg5 i
} }
// FileStoreCheck mocks base method. // FileStoreCheck mocks base method.
func (m *MockShaman) FileStoreCheck(arg0 context.Context, arg1 string, arg2 int64) (int, error) { func (m *MockShaman) FileStoreCheck(arg0 context.Context, arg1 string, arg2 int64) (api.ShamanFileStatus, error) {
m.ctrl.T.Helper() m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "FileStoreCheck", arg0, arg1, arg2) ret := m.ctrl.Call(m, "FileStoreCheck", arg0, arg1, arg2)
ret0, _ := ret[0].(int) ret0, _ := ret[0].(api.ShamanFileStatus)
ret1, _ := ret[1].(error) ret1, _ := ret[1].(error)
return ret0, ret1 return ret0, ret1
} }
@ -455,10 +455,10 @@ func (mr *MockShamanMockRecorder) FileStoreCheck(arg0, arg1, arg2 interface{}) *
} }
// Requirements mocks base method. // Requirements mocks base method.
func (m *MockShaman) Requirements(arg0 context.Context, arg1 api.ShamanRequirements) (api.ShamanRequirements, error) { func (m *MockShaman) Requirements(arg0 context.Context, arg1 api.ShamanRequirementsRequest) (api.ShamanRequirementsResponse, error) {
m.ctrl.T.Helper() m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "Requirements", arg0, arg1) ret := m.ctrl.Call(m, "Requirements", arg0, arg1)
ret0, _ := ret[0].(api.ShamanRequirements) ret0, _ := ret[0].(api.ShamanRequirementsResponse)
ret1, _ := ret[1].(error) ret1, _ := ret[1].(error)
return ret0, ret1 return ret0, ret1
} }

View File

@ -47,7 +47,7 @@ func (f *Flamenco) ShamanCheckoutRequirements(e echo.Context) error {
return sendAPIError(e, http.StatusBadRequest, "invalid format") return sendAPIError(e, http.StatusBadRequest, "invalid format")
} }
unknownFiles, err := f.shaman.Requirements(e.Request().Context(), api.ShamanRequirements(reqBody)) unknownFiles, err := f.shaman.Requirements(e.Request().Context(), api.ShamanRequirementsRequest(reqBody))
if err != nil { if err != nil {
logger.Warn().Err(err).Msg("Shaman: checking checkout requirements file") logger.Warn().Err(err).Msg("Shaman: checking checkout requirements file")
return sendAPIError(e, http.StatusInternalServerError, "unexpected error: %v", err) return sendAPIError(e, http.StatusInternalServerError, "unexpected error: %v", err)
@ -71,11 +71,11 @@ func (f *Flamenco) ShamanFileStoreCheck(e echo.Context, checksum string, filesiz
// TODO: actually switch over the actual statuses, see the TODO in the Shaman interface. // TODO: actually switch over the actual statuses, see the TODO in the Shaman interface.
switch status { switch status {
case api.ShamanFileStatusStatusStored: case api.ShamanFileStatusStored:
return e.String(http.StatusOK, "") return e.String(http.StatusOK, "")
case api.ShamanFileStatusStatusUploading: case api.ShamanFileStatusUploading:
return e.String(420 /* Enhance Your Calm */, "") return e.String(420 /* Enhance Your Calm */, "")
case api.ShamanFileStatusStatusUnknown: case api.ShamanFileStatusUnknown:
return e.String(http.StatusNotFound, "") return e.String(http.StatusNotFound, "")
} }

View File

@ -272,13 +272,13 @@ paths:
content: content:
application/json: application/json:
schema: schema:
$ref: "#/components/schemas/ShamanRequirements" $ref: "#/components/schemas/ShamanRequirementsRequest"
responses: responses:
"200": "200":
description: Subset of the posted requirements, indicating the unknown files. description: Subset of the posted requirements, indicating the unknown files.
content: content:
application/json: application/json:
schema: {$ref: "#/components/schemas/ShamanRequirements"} schema: {$ref: "#/components/schemas/ShamanRequirementsResponse"}
default: default:
description: unexpected error description: unexpected error
content: content:
@ -307,6 +307,12 @@ paths:
responses: responses:
"204": "204":
description: Checkout was created succesfully. description: Checkout was created succesfully.
"424":
description: There were files missing. Use `shamanCheckoutRequirements` to figure out which ones.
content:
application/json:
schema:
$ref: '#/components/schemas/Error'
"409": "409":
description: Checkout already exists. description: Checkout already exists.
content: content:
@ -344,7 +350,12 @@ paths:
content: content:
application/json: application/json:
schema: schema:
$ref: '#/components/schemas/ShamanFileStatus' type: object
description: Status of a file in the Shaman storage.
properties:
"status": {$ref: "#/components/schemas/ShamanFileStatus"}
required: [status]
default: default:
description: unexpected error description: unexpected error
content: content:
@ -677,57 +688,80 @@ components:
properties: properties:
message: {type: string} message: {type: string}
ShamanRequirements: ShamanRequirementsRequest:
type: object type: object
description: Set of files with their SHA256 checksum and size in bytes. description: Set of files with their SHA256 checksum and size in bytes.
properties: properties:
"req": "files":
type: array type: array
items: items: {$ref: "#/components/schemas/ShamanFileSpec"}
type: object required: [files]
properties:
"c": {type: string, description: "SHA256 checksum of the file"}
"s": {type: integer, description: "File size in bytes"}
required: [c, s]
required: [req]
example: example:
req: files:
- c: 35b0491c27b0333d1fb45fc0789a12ca06b1d640d2569780b807de504d7029e0 - sha: 35b0491c27b0333d1fb45fc0789a12ca06b1d640d2569780b807de504d7029e0
s: 1424 size: 1424
- c: 63b72c63b9424fd13b9370fb60069080c3a15717cf3ad442635b187c6a895079 - sha: 63b72c63b9424fd13b9370fb60069080c3a15717cf3ad442635b187c6a895079
s: 127 size: 127
ShamanRequirementsResponse:
type: object
description: The files from a requirements request, with their status on the Shaman server. Files that are known to Shaman are excluded from the response.
properties:
"files":
type: array
items: {$ref: "#/components/schemas/ShamanFileSpecWithStatus"}
required: [files]
example:
files:
- sha: 35b0491c27b0333d1fb45fc0789a12ca06b1d640d2569780b807de504d7029e0
size: 1424
status: unknown
- sha: 63b72c63b9424fd13b9370fb60069080c3a15717cf3ad442635b187c6a895079
size: 127
status: uploading
ShamanFileSpec:
type: object
description: Specification of a file in the Shaman storage.
properties:
"sha": {type: string, description: "SHA256 checksum of the file"}
"size": {type: integer, description: "File size in bytes"}
required: [sha, size]
ShamanFileSpecWithStatus:
allOf:
- $ref: '#/components/schemas/ShamanFileSpec'
- properties:
"status": {$ref: "#/components/schemas/ShamanFileStatus"}
required: [status]
ShamanFileSpecWithPath:
allOf:
- $ref: '#/components/schemas/ShamanFileSpec'
- properties:
"path": {type: string, description: Location of the file in the checkout}
required: [path]
ShamanCheckout: ShamanCheckout:
type: object type: object
description: Set of files with their SHA256 checksum, size in bytes, and desired location in the checkout directory. description: Set of files with their SHA256 checksum, size in bytes, and desired location in the checkout directory.
properties: properties:
"req": "files":
type: array type: array
items: items: {$ref: "#/components/schemas/ShamanFileSpecWithPath"}
type: object required: [files]
properties:
"c": {type: string, description: "SHA256 checksum of the file"}
"s": {type: integer, description: "File size in bytes"}
"p": {type: string, description: "File checkout path"}
required: [c, s, p]
required: [req]
example: example:
req: files:
- c: 35b0491c27b0333d1fb45fc0789a12ca06b1d640d2569780b807de504d7029e0 - sha: 35b0491c27b0333d1fb45fc0789a12ca06b1d640d2569780b807de504d7029e0
s: 1424 size: 1424
p: definition.go path: definition.go
- c: 63b72c63b9424fd13b9370fb60069080c3a15717cf3ad442635b187c6a895079 - sha: 63b72c63b9424fd13b9370fb60069080c3a15717cf3ad442635b187c6a895079
s: 127 size: 127
p: logging.go path: logging.go
ShamanFileStatus: ShamanFileStatus:
type: object
description: Status of a file in the Shaman storage.
properties:
"status":
type: string type: string
enum: [unknown, uploading, stored] enum: [unknown, uploading, stored]
required: [status]
securitySchemes: securitySchemes:
worker_auth: worker_auth:

View File

@ -1338,6 +1338,7 @@ type ShamanCheckoutResponse struct {
Body []byte Body []byte
HTTPResponse *http.Response HTTPResponse *http.Response
JSON409 *Error JSON409 *Error
JSON424 *Error
JSONDefault *Error JSONDefault *Error
} }
@ -1360,7 +1361,7 @@ func (r ShamanCheckoutResponse) StatusCode() int {
type ShamanCheckoutRequirementsResponse struct { type ShamanCheckoutRequirementsResponse struct {
Body []byte Body []byte
HTTPResponse *http.Response HTTPResponse *http.Response
JSON200 *ShamanRequirements JSON200 *ShamanRequirementsResponse
JSONDefault *Error JSONDefault *Error
} }
@ -1383,7 +1384,9 @@ func (r ShamanCheckoutRequirementsResponse) StatusCode() int {
type ShamanFileStoreCheckResponse struct { type ShamanFileStoreCheckResponse struct {
Body []byte Body []byte
HTTPResponse *http.Response HTTPResponse *http.Response
JSON200 *ShamanFileStatus JSON200 *struct {
Status ShamanFileStatus `json:"status"`
}
JSONDefault *Error JSONDefault *Error
} }
@ -1406,7 +1409,6 @@ func (r ShamanFileStoreCheckResponse) StatusCode() int {
type ShamanFileStoreResponse struct { type ShamanFileStoreResponse struct {
Body []byte Body []byte
HTTPResponse *http.Response HTTPResponse *http.Response
JSON409 *Error
JSONDefault *Error JSONDefault *Error
} }
@ -1966,6 +1968,13 @@ func ParseShamanCheckoutResponse(rsp *http.Response) (*ShamanCheckoutResponse, e
} }
response.JSON409 = &dest response.JSON409 = &dest
case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 424:
var dest Error
if err := json.Unmarshal(bodyBytes, &dest); err != nil {
return nil, err
}
response.JSON424 = &dest
case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true:
var dest Error var dest Error
if err := json.Unmarshal(bodyBytes, &dest); err != nil { if err := json.Unmarshal(bodyBytes, &dest); err != nil {
@ -1993,7 +2002,7 @@ func ParseShamanCheckoutRequirementsResponse(rsp *http.Response) (*ShamanCheckou
switch { switch {
case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200:
var dest ShamanRequirements var dest ShamanRequirementsResponse
if err := json.Unmarshal(bodyBytes, &dest); err != nil { if err := json.Unmarshal(bodyBytes, &dest); err != nil {
return nil, err return nil, err
} }
@ -2026,7 +2035,9 @@ func ParseShamanFileStoreCheckResponse(rsp *http.Response) (*ShamanFileStoreChec
switch { switch {
case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200:
var dest ShamanFileStatus var dest struct {
Status ShamanFileStatus `json:"status"`
}
if err := json.Unmarshal(bodyBytes, &dest); err != nil { if err := json.Unmarshal(bodyBytes, &dest); err != nil {
return nil, err return nil, err
} }
@ -2058,13 +2069,6 @@ func ParseShamanFileStoreResponse(rsp *http.Response) (*ShamanFileStoreResponse,
} }
switch { switch {
case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 409:
var dest Error
if err := json.Unmarshal(bodyBytes, &dest); err != nil {
return nil, err
}
response.JSON409 = &dest
case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true:
var dest Error var dest Error
if err := json.Unmarshal(bodyBytes, &dest); err != nil { if err := json.Unmarshal(bodyBytes, &dest); err != nil {

View File

@ -18,86 +18,90 @@ import (
// Base64 encoded, gzipped, json marshaled Swagger object // Base64 encoded, gzipped, json marshaled Swagger object
var swaggerSpec = []string{ var swaggerSpec = []string{
"H4sIAAAAAAAC/+w86W4cR3qvUugN4DXSc/AQKfFXuJJlU7AtwqTWASyCqu7+ZqbE6qp2VTVHY4HAPkTe", "H4sIAAAAAAAC/+Q8224cN5a/QtQskATbN11s2XpajR0nMpJYiOTJArEhsapOddNikRWSpXbHEDAfsX+y",
"JFkgP7K/8gLaNwq+OvqYriFHNilrk/iHMeyu47vv1vskl2UlBQijk6P3ic4XUFL781hrNhdQnFN9hX8X", "O8A+7DztD3j+aHFIVhWriy21Esnj2fGD0epiHR6e+439IclkWUkBwujk8EOiswWU1H480prNBeRnVF/i",
"oHPFKsOkSI56bwnThBKDv6gmzODfCnJg11CQbEXMAsiPUl2BGidpUilZgTIM7C25LEsqCvubGSjtj39S", "3znoTLHKMCmSw95TwjShxOAnqgkz+LeCDNgV5CRdEbMA8pNUl6AmySiplKxAGQZ2l0yWJRW5/cwMlPbD",
"MEuOkj9MWuAmHrLJU7chuUkTs6ogOUqoUnSFf7+VGe72j7VRTMz988tKMamYWXUWMGFgDiqscE8j2wUt", "vygoksPkD9MOuanHbPrMvZBcjxKzqiA5TKhSdIV/v5Mpvu2/1kYxMfffn1eKScXMKljAhIE5qGaF+zby",
"4y9uP1Mbauo70UH6nbmViBHVV5sBqWtW4IuZVCU1yZF7kK4vvEkTBT/XTEGRHP0UFiFxPC4NbB0U1qjU", "uqBl/MHNMLWhpr71OEi/U7cST0T15WZE6prl+KCQqqQmOXRfjNYXXo8SBb/UTEGeHP7cLELi+LO0uAVH",
"IUkXqrTl10Vzr8zeQm4QwONryjjNOLyQ2RkYg+AMJOeMiTkHot17ImeEkhcyI3iajgjIQrLc/eyf8+MC", "WKNSQJIQq1HHr7ftvjJ9B5lBBI+uKOM05fBSpqdgDKIzkJxTJuYciHbPiSwIJS9lShCajgjIQrLMfezD",
"BJmzaxAp4axkxsrZNeWswP/XoImR+EwD8YeMyUvBV6TWCCNZMrMgjmj2cry7EcEB8deFrYAZrbkZwnW+", "+WkBgszZFYgR4axkxsrZFeUsx/9r0MRI/E4D8UAm5JXgK1JrxJEsmVkQRzS7Oe7diuCA+OvClkNBa26G",
"AOJfOjiIXsil8MCQWoMiS4S9AAOqZMLev2A6kGTsju+cGb+ieTIxUnLDKn8RE+1FKI9qRnOwh0LBDKLu", "eJ0tgPiHDg+iF3IpPDKk1qDIEnHPwYAqmbD7L5huSDJx4AOY8S3ab6ZGSm5Y5TdiotsI5VEVNAMLFHJm",
"TvTwzyjXkA6JaxagEGjKuVwS3LoOKKEzg2sWQN7KjCyoJhmAILrOSmYMFGPyo6x5QVhZ8RUpgIPbxjmB", "8OgOose/oFzDaEhcswCFSFPO5ZLgq+uIEloYXLMA8k6mZEE1SQEE0XVaMmMgn5CfZM1zwsqKr0gOHNxr",
"d0y7A6m+0mQmlTv6rcxSQkWBBkSWFeO4hpnxa9EKeiYlByosRteUD+lzujILKQi8qxRozaQlfgYEV9fU", "nBN4z7QDSPWlJoVUDvQ7mY4IFTkaEFlWjOMaZiZvRCfoqZQcqLAnuqJ8SJ+TlVlIQeB9pUBrJi3xUyC4",
"QIE0kqpwCAY+gMWkz7oGroY36VA0rmA1hOGkAGHYjIHyhzQin5Ky1gbhqQX7uXaC6Jn21itC9B5UDKrm", "uqYGcqSRVLk7YMMHsCfps67Fq+XNaCgal7Aa4nCcgzCsYKA8kFbkR6SstUF8asF+qZ0geqa984oQ3QcV",
"EV04FisC74yihKp5XaKFCfKWVasxbtTjM1nCqdOt1R+/JDmyodZQ4MpcATXgUPX6t+rA0Kp4a1k+QoRY", "g6p5RBeOxIrAe6MooWpel2hhGnlLq9UEX9STU1nCidOt1ZdfkQzZUGvIcWWmgBpwR/X6twpw6FS8syx3",
"WULBqAG+IgrwKEItqgXMmGC4IUVDYK/HK1NLE1kbDxFVhuU1p6rhwwZ50HUWzOdtVjdiqM78zkbVP/qE", "ECFWlpAzaoCviAIERag9ag4FEwxfGKEhsNvjliNLE1kbjxFVhmU1p6rlwwZ50HXamM+brG7EUJ36N1tV",
"c7/9mmm2rmRG1bcRCBW3r1peHl6dOAOJxApqpcgfObsCQsmfOAgUYloUIym+HJMzMHjcG8uQN87MOH9M", "vzOEM//6FdNsXcmMqm8iECpuX7W8PLw+dgYSidWolSJfcnYJhJI/chAoxDTPx1J8NSGnYBDchWXIhTMz",
"hbMFgvLmDrOgBq+ueSG+sALZWCoQhTUgOk7oNReDCuAXbekWzlo+rXmHOhvhGycOTiECz8nTWikQhq+I", "zh9T4WyBoLzdwyyowa1rnosvrEC2lgpEbg2IjhN6zcWgAvhFW7qF045Pa96hTsf4xImDU4iG5+RZrRQI",
"RDtOw7lWwzqWXI/Jm2+Oz7756tnl85Nvv7o8PT7/5o2LUgqmIDdSrUhFzYL8M3nzOpn8wf73OnlDaFUh", "w1dEoh2nDVyrYYEl1xNy8e3R6bdfPz9/cfzd1+cnR2ffXrgoJWcKMiPVilTULMi/kos3yfQP9t+b5ILQ",
"SQuHNoi6RPxmjMMlrk/SpGAq/LSPvUddUL2A4rJdeRFR4E1CMzTwngId7DtWw7kvqsnJs6DPFm0UGi8S", "qkKS5u7YIOoSz1cwDue4PhklOVPNR/u196gLqheQn3cr30YUeJPQDA28p0Bw+sBqOPdFNTl+3uizPTYK",
"Y/K9JAI02jptVJ2bWoEmf7TuS6ekYDleRRUD/SWhCoiuq0oqs466Bz7FyGZvF5HmkpoktbJwJ5Jx7IK3", "jReJCflBEgEabZ02qs5MrUCTL6370iOSswy3ooqB/opQBUTXVSWVWT+6R36Ekc3eLh6aS2qSkZWFWw8Z",
"b+90USLT5Dsq6ByUcwHMWNWnJRroSGjAaQb840I2T8ztw81YSDOIBtbUwYuEA69z5126gdSKGPdvmTZB", "P13j7bs9XZTINPmeCjoH5VwAM1b1aYkGOhIacJoCv1vI5om5fbgZC2kG0cCaOniRcOgFe96mG0itiHH/",
"GKx0b6bbkEYhjPt1GJ/3LOIGdNsrYgiGeH2Aln9BFKCXti6LEu2CQx9lWkv0DvLawF15xOYgvRGgzusA", "jmnTCIOV7s10G9KoCeN+24nPehZxw3G7LWIHbOL1wbH8A6IAvbR1WZRoFxz6KNNaoveQ1QZuyyM2B+mt",
"XpxxnS0xjL5SSio8bD2TKaAXnQeNGaYGJWhN5zF41wCyZ7brY9A857QEkcs/g9I+WNySMtftjtuhCAu9", "AAWPG/TijAteiZ3oa6WkQmDrmUwOvei80ZhhalCC1nQew3cNIQuzWx/D5gWnJYhM/gmU9sHilpS56t64",
"XsWgeOFSL8r5y1ly9NPtEnYW4kPcdZMOCGljkZjE4AsbzbEStKFlhfYokLugBkb4JhY6schxr16dPAtu", "GYtmoderGBYvXepFOX9VJIc/3yxhp018iG9djwaEtLFITGLwgY3mWAna0LJCe9SQO6cGxvgkFjqxCLjX",
"5oXNju5IrLbN6dBUNCldXRX3jM0adyykgWbtfQ2wFzcXjkHfgaEFNdQyqihs2EX5aY/2A4zX4kyVMaOo", "r4+fN27mpc2Obkmsts3p0FS0KV1d5fd8mjXuWEwbmnX7tci+vX7rGPQ9GJpTQy2j8tyGXZSf9Gg/OPFa",
"WpHSH+bdrh6T76SyiltxeNf1OTkV6LVKifG/tVg1ajl5Q8fZOH9DhDSODiFMvgIbesI7imd5gbaCdpSc", "nKlSZhRVK1J6YN7t6gn5XiqruBWH96HPyahAr1VKjP+txapRy8kFnaST7IIIaRwdmjD5EmzoCe8pwvIC",
"VYoZIM8Vmy/QC2GMMoaSMo5QrzIF4l8y7wKlmocVTgeSM7uAnJn//q9r4B3D1hPks46PiNPJRXPRvY2A", "bQXtMDmtFDNAXig2X6AXwhhlAiVlHLFepQrEv6XeBUo1b1Y4HUhO7QJyav73f66AB4atJ8ingY+I08lF",
"BAdKc8OubeZMRY4UcEl0xcH438IRi0kxmlHmVjQ/KoohepImP9dQ2x9U5Qt23fnp/LM7foSSYd2+P6T3", "c9F3WwFpHCjNDLuymTMVGVLAJdEVB+M/C0csJsW4oMytaD9UFEP0ZJT8UkNtP1CVLdhV8NH5Zwd+jJJh",
"wP52p9RIolH38iRNltQmeaOZVCOMZHTUwf8Ac6YNKCicMR6aHFoUmHhFBYpTbS4tUfqVk47zZvnVZnPO", "3b4H0vvCfnZQaiTRONw8GSVLapO8cSHVGCMZHXXwP8KcaQMKcmeMhyaH5jkmXlGB4lSbc0uUfuUkcN4s",
"qUEliXt3OTNLqja4/q1016HUqm/jai+bKkjfld5ZKPhNVZuGFmlD1G71JhAjTXIXGlsok3UqdyizAaOY", "u9xszjk1qCRx7y4Ls6Rqg+vfSnfdkTr1bV3teVsF6bvSWwsFv6tq09Ji1BI1rN40xBglmQuNLZbJOpUD",
"TT+DvFbMrDb4u62d2G3e62xBSyqeLiC/knWkmIIJjZwRK4yuYGMWwBQ5++Z499EByXGjrsuUaPaLjX+z", "ymw4Ucymn0JWK2ZWG/zd1k7sJu91uqAlFc8WkF3KOlJMwYRGFsQKoyvYmAUwRU6/Pdp99Jhk+KKuyxHR",
"lQHtwscCNIJAuMydgfE5Ve5va3OBNXOj4Gfrw/LkKNl7lE33n+zku4fZdG9vr9iZZfuPZvn08PETurOb", "7Fcb/6YrA9qFjzloRIFwmTkD43OqzO/W5QJr5saJPnoxG8kfJl2aOplLJOGCJofJ3qN0tv90J9s9SGd7",
"0+lBtlMc7E+L3UcHTw4fT7PH08MCHk33i8Pp7hOYIpfQnDeJ7ngukQnJ0c7+7j46P7zlYC873M0P9rIn", "e3v5TpHuPyqy2cGTp3RnN6Ozx+lO/nh/lu8+evz04MksfTI7yOHRbD8/mO0+hRkCYr9Ccrizv7tv3aDb",
"+7v7s2JnL3uydzidZQfT6cGT6eNpvkd3Hh3uHOazPVrs7+8e7D3Kdh4f5gf08ZNH08Mn/hYu53NMlJor", "jcv5HNOdYKvHe+nBbvZ4L326v7tf5Dt76dO9g1mRPp7NHj+dPZlle3Tn0cHOQVbs0Xx/f/fx3qN058lB",
"dg+t8e8zzqLXkeA17xthQp/YwWciU6JliuEJzxnv0N2nT0MN3bCxx9tIQLUeNFnkEY6YxN0ayiJpNosp", "9pg+efpodvC022r3wLqDPiv9gbcMEB3PXjAOpxVkPzGzOEHEbwsT1w3KuhA0ACNCUEHGCua5aPNhBNbw",
"AtMa+DUa2ecuWUfCBGFzO4k2UtE5DONWPXAYtbgScimsK+eSFs684wFQROzxGgbB6W9E4ge3uAwV/1+l", "071OtJGKzmEYGloSDmD2BaiJAxBu1PFb6q0DQZT7khcJ99bIgNh4eLfToiXu9rFVn5bD6Kry4NZyDNlR",
"b1a/emx5WBW6f3X53bTjXoX8/gS8G5JHE/bWgrX1XczPQv6xJgBlJ9h8uPDNv9j78G/k73/58NcPf/vw", "t6HDur7carNc3mvla3iMzjPe00G2s+cBGG/T1/kRRkmD1YEjr8WlkEthQywuae7cLkod5FE/6YD96Pay",
"Hx/++ve/fPjPD3/78O/d9sbRo2k/+fe3XOZlkRwl7/2fN+hJF7W4ukR2JEd7iJNRNDeXtC6YDKEfMtda", "NbkfnS/+zWbOmrWevG20XA9koj57c/S7zVCfX7qSQkO8Au+4VShZEkpU8BrxMdcoZKWTMyL7NgvUFagJ",
"saNkouzOiZ5N3soMAwkQsLO7N7ZHdkP60++/xj8rnRzt7qfJDJNMnRwlO6MdlHFW0jnoS6kur1kBEnlv", "eWFB2XocVUCsoGHy65fhd/A+43UOudsQYSiP3aeUgS6MavXhYcQi3KhVtwdwXUHv7TdKTZjMRktdXfTQ",
"nyRpImtT1cYVl+CdAeHytmRcWdvgILh0q/oguUsaoDryoRmyauQRH7ktyUAduny8I+dp8otte2ZNdRSZ", "dUaQuU3mvsa/MkjTHi7x8Q/2Pv4H+dufP/7l418//tfHv/ztzx//++NfP/5n2Bg8fDTrl838LudZmSeH",
"E2mgddh1V7oVlnaqt7fbSR9U+a5WA1VMNzotuo+I65sIvgm5MQZrI/xIvO5j/VjMjTC8spldpFjYvCO2", "yQf/5zXGoItaXJ47Fu7hmYyimTmndc5kkzQhIX2cM1X2zakupu9kikwCATu7exMLMkyGT374Bv+sdHKI",
"gCwMJlnUl0pQR11O6HoQzk+9rqfT3QPC5dz7LNu9ZeYL7QsuvtexFtd3wvY+DC8FjDgTvtwvCpbjhcsF", "IlgoWiLTk53xDoonK+kc9LlU51csB4nO136TjBJZm6o2riwL7w0IV/FIJpW13g6Dc7eqj5LbpEUqkBDN",
"xRPzpmy7sPVVzP6Cb7EXj8nLa1BLtA2aVAqumaw1XzlcwqVNphmzqlxGOo3fyjlBoDrtJbwtJUvGOeak", "kFVjf/CxeyUZSGbIx1uqBW1mvm23ue0rIHMireeAXbcVKpqlQd/jZi/v0xHfD26xiulG0Ny+Q0bc5r5t",
"odqLQFtS2AuBKs5cjWkY3PdkYdvGbizRdNxxuZSiJl66+fWZEOQKTPzVb8xo1gMOd1MvGYle0UlmLjbS", "sorZS5cbRzJdnyXHvDDi8NrWRCJWvH1GbOtFGJKuCPVFRtRRV01x3TtnwN/Us9nuY8LlXLtQ1M49MPOF",
"44zNxcuPpURIbi43V7TuHe1OYrYB2wFUt2BtqIGnCyrmMETdaexlayg+KoONhoedw7YCqtgE1T3AcgcE", "9qVK3yVcy4iDhLePwysBY86Eb5SJHMNcIMsFRYhZ2/BY2M4EE/PWp9iNJ+TVFagl2gZNKgVXTNaar9xZ",
"faOrDVXGBcR0Sa9sWqw5QIXBh01T00QvalO4ANqA9qvlbIaWIB4/a5/oniHUDr2lBeCS1ujjB4VDDQp5", "mk3bGk0ssuVyHgsH5wSRChqzuBs6NM5JCm2fBJG2pLAbAlWcuersMC3uycK2IxGxEo3jjqtCKGriRc/f",
"j+YWTZhbTE6epaSiWi+lKsIrpx1uTIFQE5aqjtqjnbH0sh02qlneGp6FMVVygzAyMZOuyiwMzU1b2G0K", "XkOATIGJP/qdtYD1yNPt1Evjo1sEZYC3G+lxyubi1V0p0ZQFzjfXgu/92EFJY8NpB1jdcGpDDTxbUDGH",
"wOQcKCpfrbjfqY8mk1kIz5icDOt5P7j+4XOqSlK6FgI5Pj1J0oSzHISGzj1fn357vTc4f7lcjueixmht", "4dGdxp53huJOtZ9onhAA2wqpfBNW94DLLRj0ja42VBmXstAlvbQFJc0BKgw+bIFnlOhFbXKX4hjQfrUs",
"4vfoybzio73xdAxivDClK7Qxw3vQ+uuSTh062RlPx1NcLSsQtGIY2tlH6BvNwnJmQitmIy0rk1JbUqBk", "CrQEEdvqlMWWiE4Ra3e8pUXgnNaxNPO1BoW8R3OLJswtJsfPR6SiWi+lyptHTjvcgA+hplmqArVHO2Pp",
"WmKeFK6HWDLjSrpe0v8ki1UgHwi7h1YVZ64yMHmrndVwcnuXVPfr1zcDqtr+lvRhctIVeowerRboSiKl", "ZXvTVLOsMzwLY6rkGnFkopCuPyMMzUzXEmlbJ+QMKCpfrbh/Ux9Op0UTnjE5HVbCf3Sd9xdUlaR0zTdy",
"8Kbd6fTeILsFoCXVRNd5DnpWc74ibrrCjkJ4l33NippyN5AxXhtxuRfoXCEpAp99QUKdyKpkXZZUrRpm", "dHKcjBLOMvA5g9/nm5PvrvYG8JfL5WQuaozWpv4dPZ1XfLw3mU1ATBamdCVqZngPW79dEnRwkp3JbDLD",
"EkoELG0LDH15I0W+79VpFFm3TTFqtJ0pnVz0jnsRGuluLgREUUkmjMW3Ea1J4x3mEJGvr8E03boHZOaw", "1bICQSuGoZ39amSzcsuZKa2YjbSsTEqXh6JkWmIe5677XjLjmiFe0v8o81VDPhD2HVpV3Fdjpu+0sxpO",
"NRghXbOobQ+uEfBrMIQPWoi2u2ZT+36H9RbStVc15H/bzm316Pf+rcwuWXGzkYTPweQLp6HdBt1P7xOG", "bm+Nv3udn+sBVW1nWPowOQmFHqNHqwUu57Fn2J3N7g2zGxBaUk10nWWgi5rzFXFzSXaIyLvsK5bXlLtR",
"WPkKkbc87rCBIqUdOt5VXL34fZTOWu0+Oyzm9gWhmZtwsbzbQm7dJlF421ki5IHsndBnk8z+uWnjPRgp", "psnacNi9YOdKsBH87APSVFitStZlSdWqZSahRMDSNo/Rl7dS5DvGQYvVum2KUaPt6erkbQ/cy2YExU1U",
"1puREbII5BQnAYSIsCJBGgnzeDXTQd81biMQCzPUNWK58MG1nmrtx5qMdDUa9xfTmFjUFE0hba/ToK4x", "gcgryYSx521Fa9p6hzlE5OsbMG2f+wGZOWyqR0jXLuoa62sE/AYM4YPmu+1L25S+P5twA+m6rVryv+sm",
"9A9kdf56onzLY7RsOx5R1xN6I74z8jD+J5I6RAjdpn8B+k/qigZdom1k4RP6nFrAuwpyAwUBv6YrQgF8", "Hnv0+/BOpucsv95IwhdgsoXT0LC1/fOHhOGp/GiKtzwO2ECRRgEdb2tLvP37KJ212n122JPbB4SmbjbM",
"73iWgZ9B6vyDi8gmxxKU2HanXpcozeZiJGezW6IYTIVms6G67g8j0s+PkD6ktia9F0z/dIHGuKXZd1Rd", "8m4LuXUvidzbzhIxb8gehD6bZPZPbQP8wUix3saPkEUgp3hb9okIKxKklTB/rnau7vvWbTTEwgx1jVgu",
"daNoqkkI1u+g9lPKfUM56Dum8d6AhMDgStjJOlh9oYDMpZs4tseP4ywRd3BEPKhS+ys2q3NTj/uUujzM", "fHBN21r7gUAjXQXS/cU0JhY1RVNIu+18Kaslq/PXU+WbheNl1yuMup6mq+h7ig/jfyKpQ4TQXfrXYP9J",
"Uv8hlHlrGTyuzQKEcUUrXxpDaQhNnGUzdHTPAqmAFitchee5rmWvXMdahg/F1fhqYNTfd1iW/N6SYSEl", "XdGgv7qNLHxCn1MLeF9BZiAn4NeEItSg7x3PsuFnI3X+i7eRl1Tbg+je1OsSpdlcjGVR3BDFYCpUFEN1",
"uX1P2tLDTbrJmJHNOz5vkfp48XAhyTKMAi9AgRvXXW0gQlwORnmnUBM1XpGizoMasu5FEfJ+37hGh+cW", "3R9GpJ8fIX1IbU16L5j++S0a445m31N1GUbRVJMmWL+F2s8o96MYjb5jGu8NSBMYXAo7kwqrLxSQuXSz",
"9ux/l9/z9tzzzRFhTM4xNs3tRxOZHfGlORoMDoWL912x3tuStnnQk5WUSIWWK1Al2BdQIy5zyq1po1zf", "+hb8JM4ScQtHxIMqtd9iszq39bhPqcvDLPUfQpm3lsGj2ixAGFe08qUxlIamd7hsx/XuWSAV0HyFqxCe",
"tz27hh42tR6IqvGfkm1wr/kCiprDuRthebi8uvthW4Sx9pO2bkFhk6H6XvqvV/qD6Da/CHOqN2myP927", "6/f3ynWsY/hQXI2vBkb9fcCy5O8tGRZTktnnpCs9XI82GTOy+Y3PW6TuLh4uJFk2Q/QLUOAG3VcbiBCX",
"v9JTbyYnAvwpqFDbeAaCOaO5P30S+X7KCSDTREgTPJ3rajlxSomW4bX9CAh6A7kOddvJJUIuHaq7e5/W", "g3EWFGqixitS1HlQQxZuFCHvD61rdOfcwp79//J73p57vjkiTMgZxqaZvW6U2uF4mqHB4JC7eN8V670t",
"tQQtogKhlJmhTNiw20KXkqw2bm5+Lu23TEJaO+u07SM19qU7nTbnd6hxlypZmdJewFWk7NTRkMl720fw", "6ZoHPVkZEanQcjVUaewLqDGXGeXWtFGu79ueXUHvNLUeiKrxlzA3uNdsAXnN4cwNfz1cXh1eCY0w1l4G",
"5ZO4rnT6gdtUUPyBv72Ecv/uooPJJl308RATDsRQw/hob3G+gHDW0prWHKrgUaMqcu77k9Yje6vRFSPH", "DQsKmwzVD9Lf++pf4bD5RTPhfT1K9md791d66k2zRZA/AdXUNp6DYM5o7s+eRm4eOgFkmghpGk/nulpO",
"NKsnpn+21Znu+f8obulV2yp2vVKzqlhuyyTdzm6l5FyB1qmf+PWfcCkyo4zXCu70LcGjaBBFrxqG5A6n", "nEZEy+axvT4HvVF2d3TbySVCLt1Rd/c+rWtptIgKxFKmhjJhw26L3YiktXE3TubS3gIU0tpZp2131NhX",
"oxXDiMipibbzS5MwRjZxM8+T9+HBybNbFGZt0HAbpWnPvVVvPpGerCEQ4XxvdqsJLWVtxr9OXcJdVqD9", "Djpt4QfUuE2VrExpL+AqUnYKNGT6wfYRfPkkritBP3CbCooH+PtLKPfvLoKTbNJFHw8x4VBsahh39hZn",
"gHm3hdDVm4eV6QYSyl3GZL8e1ePPqWD01H1B2fnCzOVzelVyJlyJIzDB88h1XgxGfeAfWV9FOScLeg3u", "C2hgLa1pzaBqPGpURc58f9J6ZG81QjFyTLN6Yvqwrc6E8P9R3NLrrlXseqVmVbHMlknCzm6l5FyB1iM/",
"a1o3BegMj58mzGBmh+0p5803ua1LaTXPacya5p15gCjRXYGxwPTGY6kCGtc8tTZDuI3K9eYOH1JFehdt", "K+8vPypSUMZrBbf6lsajaBB5rxqG5G6goxXDiMipibZjL9NmkG/qbgtMPzRfHD+/QWHWRnS3UZoO7o16",
"qyaftJyyJZx15hmEXEESQ0G6lE+DOXYyAMRPjjrUPi/lsMORhAYB7uJuwQ3ffFdSGU2WC5YvPIeoahC7", "84n0ZO0AEc73xu/a0FLWZvLb1KXZywq0v5oRthBCvXlYmW4xodxlTPbetfYuZ//hETizcfkS/3PktT5W",
"U7SPMUrFa3LOMFnq5Nf9A9uAXQrfp1fX4bNDt1gKv1YbxnkLQkcf7Hne/+i6vJm8t0/YL3DjtAGJozcp", "zCfktQZyoXu8CWfvLpARBZvXCogl5YJlCyIF6MnnVO165i5OBxdLXTKqVyVn4tIP6jkJ8hRwbSODIWtL",
"hpsmlgqeegFc80hbT5naf7dgg/vSdflRzisd/msNv0D3st7Ab+TWQIFtbm1nWy8eXNs6s9uxakS/jvD5", "FHS0lHOyoFfgLtG7qTdnNf0YYAqFvWNDOW+v4nf+sDMbjqhrZuPUI0SJDqXdItObiqcKaNxshDOO29qL",
"6U53dq+dMZf9GXPXu3IfbA/V5DYj3cji/20xTGMJgLclIfTV7rN5Zmx3sYAZKNIM6jtXbKlhnfrrZHf6", "kKXJQ+p3bM52W1X/pCWhG8ZMY/jWqecXMgkpDnlv2HTUuBYnEkD8XKY74uelK3aMmdBGnkMaWHSbX36o",
"+HXSlmLs5KFNVQVfkQxDAlMrTCvst/oterqJddy4hrPs3kX3GO6SXMq1dGdoWYIUQIBre047fRkD00qL", "pDLaa7zjFFXtwW6V9COMuHGbjDNM/IJaQR9gl3z4qVzXw3BYdPbGXY82jPMOhUA9LDzvS3VdXk8/2G/Y",
"JeACaGFbXJ6E/zpy14yeUjF6hniOXtkDkggNO/8yQIyGUrE5E5TbO/H8MTmZ+fFOLrvjoD4uSJHAYUyT", "r3DtlAOJozfpiZs1lwqeeUFc865b31qwv16ywRXruryTIx4Nf7PlV1i/GtDOn0d2bSiwza7dRYnf22pc",
"iZzXhftXO1pjbSc2U2+tkReU2RUFZLX7DGYL3F56wEbPPWDJvYT2MjdgRtoooHY+sxmMT15ggGfDLLfT", "w9kPXN/5usj93isYzOhEKjr9Wsznp7Ph/GNHz+gou/u5iKF63uQrWh345xb/USyJ8jasSR/8JQFmbIc2",
"qbNYeRWKZMBeJL7QzZaHjOd3p4/jebAVywVt5bIXnI7J99KmjdT/kx2WISiTGTg+e/n2ctcXTM/XSskc", "hwIUaefjXURgqWFjizfJ7uzJm6QrZ9npTZvuC74iKUYmplaYmtlfCumOp9t40Y28tBcSBgx3hQLKtXQw",
"tKVIBiim4XTnl99slMgjgkR442acnLJ2hQkF4f9TlU4GgJbe17E2+5EeR9deWvbOpMpZxlck51K7ksE3", "tCxBCiDAtYXTTbDG0LTSYgm4AJrbNqEn4b+P3TbjZ1SMn+M5x68tgCRCw+B3SWI0lIrNmaDc7onwJ+S4",
"5+enKKYC7CeljumhWuIN6YwJphege+YICLyjuSGaluADQiPtKDhuKWSNsZrboDd6uG4VA1e2Mf4QO18W", "8COyXIYjte3FDWbaUVcm/MULFjoJO/U68l4CeUGZXZFDWrvre1uc7ZVHbPzCI5bcS3okMwNmrI0Camdc",
"wd/Oz7mxxknSaeMM/kmf/tDOYAiNGQ18Nm7NjB1NGVrEFzILXUZb7vi5BsVAp53BtHRtzmfcm4bSkUOP", "28sFyUuMM22059506ixWXoWi4bt99oVuX3nInGh39mTzXRv7A1ONXPZi5An5QdrUm/ofDLIMQZlMwfHZ",
"T0/6o3HdJpMsy1r4mXy0tOugd4731ZqIC3b0Oz49Se1FVnJaHnqErF3Bv9/KrEklded8z6+bi5v/CQAA", "y7eXu75ger5WSmagLUVSQDFtoLt44GKjRB4SJMKFmxNzyhoKEwrCpjLJjenZzkGUFsrXYDCTKqnJFiQF",
"//+lKaE0fE4AAA==", "swTobRrMJjUDS35qwxHAXiKTamBK26zDC8Tn44isg/AlxM3upycIaw+tVBRSZSzlK5JxqV215tuzsxOU",
"bgH2HryTlaZQ5e1vwQTTC9A9KwYE3tPMEE1L8PGrkXYKH1/JZY2hpXtBb3SMYQEJV3apyfB0viKFn517",
"dBOl0yTooA1+h6w/LzWY/2NGAy8mnXWyU0FDQ/pSpk2D11aafqlBMdCjYCZwtDZiNekNoukI0KOT4/5U",
"Ytjfk2VZC38dAg30OuoBeF8oi3huR7+jk+OR3chKTsdDfyBrjvDvdzJtE2EdwPf8un57/X8BAAD//wca",
"4NAxUwAA",
} }
// GetSwagger returns the content of the embedded swagger specification file // GetSwagger returns the content of the embedded swagger specification file

View File

@ -66,13 +66,13 @@ const (
JobStatusWaitingForFiles JobStatus = "waiting-for-files" JobStatusWaitingForFiles JobStatus = "waiting-for-files"
) )
// Defines values for ShamanFileStatusStatus. // Defines values for ShamanFileStatus.
const ( const (
ShamanFileStatusStatusStored ShamanFileStatusStatus = "stored" ShamanFileStatusStored ShamanFileStatus = "stored"
ShamanFileStatusStatusUnknown ShamanFileStatusStatus = "unknown" ShamanFileStatusUnknown ShamanFileStatus = "unknown"
ShamanFileStatusStatusUploading ShamanFileStatusStatus = "uploading" ShamanFileStatusUploading ShamanFileStatus = "uploading"
) )
// Defines values for TaskStatus. // Defines values for TaskStatus.
@ -244,35 +244,46 @@ type SecurityError struct {
// Set of files with their SHA256 checksum, size in bytes, and desired location in the checkout directory. // Set of files with their SHA256 checksum, size in bytes, and desired location in the checkout directory.
type ShamanCheckout struct { type ShamanCheckout struct {
Req []struct { Files []ShamanFileSpecWithPath `json:"files"`
// SHA256 checksum of the file }
C string `json:"c"`
// File checkout path // Specification of a file in the Shaman storage.
P string `json:"p"` type ShamanFileSpec struct {
// SHA256 checksum of the file
Sha string `json:"sha"`
// File size in bytes // File size in bytes
S int `json:"s"` Size int `json:"size"`
} `json:"req"`
} }
// Status of a file in the Shaman storage. // ShamanFileSpecWithPath defines model for ShamanFileSpecWithPath.
type ShamanFileStatus struct { type ShamanFileSpecWithPath struct {
Status ShamanFileStatusStatus `json:"status"` // Embedded struct due to allOf(#/components/schemas/ShamanFileSpec)
ShamanFileSpec `yaml:",inline"`
// Embedded fields due to inline allOf schema
// Location of the file in the checkout
Path string `json:"path"`
} }
// ShamanFileStatusStatus defines model for ShamanFileStatus.Status. // ShamanFileSpecWithStatus defines model for ShamanFileSpecWithStatus.
type ShamanFileStatusStatus string type ShamanFileSpecWithStatus struct {
// Embedded struct due to allOf(#/components/schemas/ShamanFileSpec)
ShamanFileSpec `yaml:",inline"`
// Embedded fields due to inline allOf schema
Status ShamanFileStatus `json:"status"`
}
// ShamanFileStatus defines model for ShamanFileStatus.
type ShamanFileStatus string
// Set of files with their SHA256 checksum and size in bytes. // Set of files with their SHA256 checksum and size in bytes.
type ShamanRequirements struct { type ShamanRequirementsRequest struct {
Req []struct { Files []ShamanFileSpec `json:"files"`
// SHA256 checksum of the file }
C string `json:"c"`
// File size in bytes // The files from a requirements request, with their status on the Shaman server. Files that are known to Shaman are excluded from the response.
S int `json:"s"` type ShamanRequirementsResponse struct {
} `json:"req"` Files []ShamanFileSpecWithStatus `json:"files"`
} }
// Job definition submitted to Flamenco. // Job definition submitted to Flamenco.
@ -345,7 +356,7 @@ type TaskUpdateJSONBody TaskUpdate
type ShamanCheckoutJSONBody ShamanCheckout type ShamanCheckoutJSONBody ShamanCheckout
// ShamanCheckoutRequirementsJSONBody defines parameters for ShamanCheckoutRequirements. // ShamanCheckoutRequirementsJSONBody defines parameters for ShamanCheckoutRequirements.
type ShamanCheckoutRequirementsJSONBody ShamanRequirements type ShamanCheckoutRequirementsJSONBody ShamanRequirementsRequest
// ShamanFileStoreParams defines parameters for ShamanFileStore. // ShamanFileStoreParams defines parameters for ShamanFileStore.
type ShamanFileStoreParams struct { type ShamanFileStoreParams struct {

View File

@ -0,0 +1,52 @@
package checkout
// SPDX-License-Identifier: GPL-3.0-or-later
import (
"context"
"errors"
"fmt"
"git.blender.org/flamenco/pkg/api"
"git.blender.org/flamenco/pkg/shaman/filestore"
"github.com/rs/zerolog"
)
var (
ErrMissingFiles = errors.New("unknown files requested in checkout")
)
func (m *Manager) Checkout(ctx context.Context, checkoutID string, checkout api.ShamanCheckout) error {
logger := *zerolog.Ctx(ctx)
logger.Debug().Msg("shaman: user requested checkout creation")
// Actually create the checkout.
resolvedCheckoutInfo, err := m.PrepareCheckout(checkoutID)
if err != nil {
return err
}
// The checkout directory was created, so if anything fails now, it should be erased.
var checkoutOK bool
defer func() {
if !checkoutOK {
m.EraseCheckout(checkoutID)
}
}()
for _, fileSpec := range checkout.Files {
blobPath, status := m.fileStore.ResolveFile(fileSpec.Sha, int64(fileSpec.Size), filestore.ResolveStoredOnly)
if status != filestore.StatusStored {
// Caller should upload this file before we can create the checkout.
return ErrMissingFiles
}
if err := m.SymlinkToCheckout(blobPath, resolvedCheckoutInfo.absolutePath, fileSpec.Path); err != nil {
return fmt.Errorf("symlinking %q to checkout: %w", fileSpec.Path, err)
}
}
checkoutOK = true // Prevent the checkout directory from being erased again.
logger.Info().Msg("shaman: checkout created")
return nil
}

View File

@ -1,168 +0,0 @@
/* (c) 2019, Blender Foundation - Sybren A. Stüvel
*
* Permission is hereby granted, free of charge, to any person obtaining
* a copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sublicense, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to
* the following conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package checkout
import (
"bufio"
"context"
"fmt"
"io"
"path"
"regexp"
"strconv"
"strings"
"github.com/sirupsen/logrus"
)
/* Checkout Definition files contain a line for each to-be-checked-out file.
* Each line consists of three fields: checksum, file size, path in the checkout.
*/
// FileInvalidError is returned when there is an invalid line in a checkout definition file.
type FileInvalidError struct {
lineNumber int // base-1 line number that's bad
innerErr error
reason string
}
func (cfie FileInvalidError) Error() string {
return fmt.Sprintf("invalid line %d: %s", cfie.lineNumber, cfie.reason)
}
// DefinitionLine is a single line in a checkout definition file.
type DefinitionLine struct {
Checksum string
FileSize int64
FilePath string
}
// DefinitionReader reads and parses a checkout definition
type DefinitionReader struct {
ctx context.Context
channel chan *DefinitionLine
reader *bufio.Reader
Err error
LineNumber int
}
var (
// This is a wider range than used in SHA256 sums, but there is no harm in accepting a few more ASCII letters.
validChecksumRegexp = regexp.MustCompile("^[a-zA-Z0-9]{16,}$")
)
// NewDefinitionReader creates a new DefinitionReader for the given reader.
func NewDefinitionReader(ctx context.Context, reader io.Reader) *DefinitionReader {
return &DefinitionReader{
ctx: ctx,
channel: make(chan *DefinitionLine),
reader: bufio.NewReader(reader),
}
}
// Read spins up a new goroutine for parsing the checkout definition.
// The returned channel will receive definition lines.
func (fr *DefinitionReader) Read() <-chan *DefinitionLine {
go func() {
defer close(fr.channel)
defer logrus.Debug("done reading request")
for {
line, err := fr.reader.ReadString('\n')
if err != nil && err != io.EOF {
fr.Err = FileInvalidError{
lineNumber: fr.LineNumber,
innerErr: err,
reason: fmt.Sprintf("I/O error: %v", err),
}
return
}
if err == io.EOF && line == "" {
return
}
if contextError := fr.ctx.Err(); contextError != nil {
fr.Err = fr.ctx.Err()
return
}
fr.LineNumber++
logrus.WithFields(logrus.Fields{
"line": line,
"number": fr.LineNumber,
}).Debug("read line")
line = strings.TrimSpace(line)
if line == "" {
continue
}
definitionLine, err := fr.parseLine(line)
if err != nil {
fr.Err = err
return
}
fr.channel <- definitionLine
}
}()
return fr.channel
}
func (fr *DefinitionReader) parseLine(line string) (*DefinitionLine, error) {
parts := strings.SplitN(strings.TrimSpace(line), " ", 3)
if len(parts) != 3 {
return nil, FileInvalidError{
lineNumber: fr.LineNumber,
reason: fmt.Sprintf("line should consist of three space-separated parts, not %d: %v",
len(parts), line),
}
}
checksum := parts[0]
if !validChecksumRegexp.MatchString(checksum) {
return nil, FileInvalidError{fr.LineNumber, nil, "invalid checksum"}
}
fileSize, err := strconv.ParseInt(parts[1], 10, 64)
if err != nil {
return nil, FileInvalidError{fr.LineNumber, err, "invalid file size"}
}
filePath := strings.TrimSpace(parts[2])
if path.IsAbs(filePath) {
return nil, FileInvalidError{fr.LineNumber, err, "no absolute paths allowed"}
}
if filePath != path.Clean(filePath) || strings.Contains(filePath, "..") {
return nil, FileInvalidError{fr.LineNumber, err, "paths must be clean and not have any .. in them."}
}
return &DefinitionLine{
Checksum: parts[0],
FileSize: fileSize,
FilePath: filePath,
}, nil
}

View File

@ -1,86 +0,0 @@
/* (c) 2019, Blender Foundation - Sybren A. Stüvel
*
* Permission is hereby granted, free of charge, to any person obtaining
* a copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sublicense, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to
* the following conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package checkout
import (
"bytes"
"context"
"os"
"testing"
"github.com/stretchr/testify/assert"
)
func TestDefinitionReader(t *testing.T) {
file, err := os.Open("definition_test_example.txt")
if err != nil {
panic(err)
}
defer file.Close()
ctx, cancelFunc := context.WithCancel(context.Background())
reader := NewDefinitionReader(ctx, file)
readChan := reader.Read()
line := <-readChan
assert.Equal(t, "35b0491c27b0333d1fb45fc0789a12ca06b1d640d2569780b807de504d7029e0", line.Checksum)
assert.Equal(t, int64(1424), line.FileSize)
assert.Equal(t, "definition.go", line.FilePath)
line = <-readChan
assert.Equal(t, "63b72c63b9424fd13b9370fb60069080c3a15717cf3ad442635b187c6a895079", line.Checksum)
assert.Equal(t, int64(127), line.FileSize)
assert.Equal(t, "logging.go", line.FilePath)
assert.Nil(t, reader.Err)
// Cancelling is only found out after the next read.
cancelFunc()
line = <-readChan
assert.Nil(t, line)
assert.Equal(t, context.Canceled, reader.Err)
assert.Equal(t, 2, reader.LineNumber)
}
func TestDefinitionReaderBadRequests(t *testing.T) {
ctx := context.Background()
testRejects := func(checksum, path string) {
buffer := bytes.NewReader([]byte(checksum + " 30 " + path))
reader := NewDefinitionReader(ctx, buffer)
readChan := reader.Read()
var line *DefinitionLine
line = <-readChan
assert.Nil(t, line)
assert.NotNil(t, reader.Err)
assert.Equal(t, 1, reader.LineNumber)
}
testRejects("35b0491c27b0333d1fb45fc0789a12c", "/etc/passwd") // absolute
testRejects("35b0491c27b0333d1fb45fc0789a12c", "../../../../../../etc/passwd") // ../ in there that path.Clean() will keep
testRejects("35b0491c27b0333d1fb45fc0789a12c", "some/path/../etc/passwd") // ../ in there that path.Clean() will remove
testRejects("35b", "some/path") // checksum way too short
testRejects("35b0491c.7b0333d1fb45fc0789a12c", "some/path") // checksum invalid
testRejects("35b0491c/7b0333d1fb45fc0789a12c", "some/path") // checksum invalid
}

View File

@ -1,5 +0,0 @@
35b0491c27b0333d1fb45fc0789a12ca06b1d640d2569780b807de504d7029e0 1424 definition.go
63b72c63b9424fd13b9370fb60069080c3a15717cf3ad442635b187c6a895079 127 logging.go
9f1470441beb98dbb66e3339e7da697d9c2312999a6a5610c461cbf55040e210 795 manager.go
59c6bd72af62aa860343adcafd46e3998934a9db2997ce08514b4361f099fa58 1134 routes.go
59c6bd72af62aa860343adcafd46e3998934a9db2997ce08514b4361f099fa58 1134 another-routes.go

View File

@ -1,29 +0,0 @@
/* (c) 2019, Blender Foundation - Sybren A. Stüvel
*
* Permission is hereby granted, free of charge, to any person obtaining
* a copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sublicense, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to
* the following conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package checkout
import (
"github.com/sirupsen/logrus"
)
var packageLogger = logrus.WithField("package", "shaman/checkout")

View File

@ -30,7 +30,7 @@ import (
"sync" "sync"
"time" "time"
"github.com/sirupsen/logrus" "github.com/rs/zerolog/log"
"git.blender.org/flamenco/pkg/shaman/config" "git.blender.org/flamenco/pkg/shaman/config"
"git.blender.org/flamenco/pkg/shaman/filestore" "git.blender.org/flamenco/pkg/shaman/filestore"
@ -62,12 +62,12 @@ var (
// NewManager creates and returns a new Checkout Manager. // NewManager creates and returns a new Checkout Manager.
func NewManager(conf config.Config, fileStore filestore.Storage) *Manager { func NewManager(conf config.Config, fileStore filestore.Storage) *Manager {
logger := packageLogger.WithField("checkoutDir", conf.CheckoutPath) logger := log.With().Str("checkoutDir", conf.CheckoutPath).Logger()
logger.Info("opening checkout directory") logger.Info().Msg("opening checkout directory")
err := os.MkdirAll(conf.CheckoutPath, 0777) err := os.MkdirAll(conf.CheckoutPath, 0777)
if err != nil { if err != nil {
logger.WithError(err).Fatal("unable to create checkout directory") logger.Error().Err(err).Msg("unable to create checkout directory")
} }
return &Manager{conf.CheckoutPath, fileStore, sync.WaitGroup{}} return &Manager{conf.CheckoutPath, fileStore, sync.WaitGroup{}}
@ -75,7 +75,7 @@ func NewManager(conf config.Config, fileStore filestore.Storage) *Manager {
// Close waits for still-running touch() calls to finish, then returns. // Close waits for still-running touch() calls to finish, then returns.
func (m *Manager) Close() { func (m *Manager) Close() {
packageLogger.Info("shutting down Checkout manager") log.Info().Msg("shutting down Checkout manager")
m.wg.Wait() m.wg.Wait()
} }
@ -106,31 +106,31 @@ func (m *Manager) PrepareCheckout(checkoutID string) (ResolvedCheckoutInfo, erro
return ResolvedCheckoutInfo{}, err return ResolvedCheckoutInfo{}, err
} }
logger := logrus.WithFields(logrus.Fields{ logger := log.With().
"checkoutPath": checkoutPaths.absolutePath, Str("checkoutPath", checkoutPaths.absolutePath).
"checkoutID": checkoutID, Str("checkoutID", checkoutID).
}) Logger()
if stat, err := os.Stat(checkoutPaths.absolutePath); !os.IsNotExist(err) { if stat, err := os.Stat(checkoutPaths.absolutePath); !os.IsNotExist(err) {
if err == nil { if err == nil {
if stat.IsDir() { if stat.IsDir() {
logger.Debug("checkout path exists") logger.Debug().Msg("checkout path exists")
} else { } else {
logger.Error("checkout path exists but is not a directory") logger.Error().Msg("checkout path exists but is not a directory")
} }
// No error stat'ing this path, indicating it's an existing checkout. // No error stat'ing this path, indicating it's an existing checkout.
return ResolvedCheckoutInfo{}, ErrCheckoutAlreadyExists return ResolvedCheckoutInfo{}, ErrCheckoutAlreadyExists
} }
// If it's any other error, it's really a problem on our side. // If it's any other error, it's really a problem on our side.
logger.WithError(err).Error("unable to stat checkout directory") logger.Error().Err(err).Msg("unable to stat checkout directory")
return ResolvedCheckoutInfo{}, err return ResolvedCheckoutInfo{}, err
} }
if err := os.MkdirAll(checkoutPaths.absolutePath, 0777); err != nil { if err := os.MkdirAll(checkoutPaths.absolutePath, 0777); err != nil {
logger.WithError(err).Fatal("unable to create checkout directory") logger.Error().Err(err).Msg("unable to create checkout directory")
} }
logger.WithField("relPath", checkoutPaths.RelativePath).Info("created checkout directory") logger.Info().Str("relPath", checkoutPaths.RelativePath).Msg("created checkout directory")
return checkoutPaths, nil return checkoutPaths, nil
} }
@ -141,19 +141,19 @@ func (m *Manager) EraseCheckout(checkoutID string) error {
return err return err
} }
logger := logrus.WithFields(logrus.Fields{ logger := log.With().
"checkoutPath": checkoutPaths.absolutePath, Str("checkoutPath", checkoutPaths.absolutePath).
"checkoutID": checkoutID, Str("checkoutID", checkoutID).
}) Logger()
if err := os.RemoveAll(checkoutPaths.absolutePath); err != nil { if err := os.RemoveAll(checkoutPaths.absolutePath); err != nil {
logger.WithError(err).Error("unable to remove checkout directory") logger.Error().Err(err).Msg("unable to remove checkout directory")
return err return err
} }
// Try to remove the parent path as well, to not keep the dangling two-letter dirs. // Try to remove the parent path as well, to not keep the dangling two-letter dirs.
// Failure is fine, though, because there is no guarantee it's empty anyway. // Failure is fine, though, because there is no guarantee it's empty anyway.
os.Remove(path.Dir(checkoutPaths.absolutePath)) os.Remove(path.Dir(checkoutPaths.absolutePath))
logger.Info("removed checkout directory") logger.Info().Msg("removed checkout directory")
return nil return nil
} }
@ -161,18 +161,18 @@ func (m *Manager) EraseCheckout(checkoutID string) error {
// It does *not* do any validation of the validity of the paths! // It does *not* do any validation of the validity of the paths!
func (m *Manager) SymlinkToCheckout(blobPath, checkoutPath, symlinkRelativePath string) error { func (m *Manager) SymlinkToCheckout(blobPath, checkoutPath, symlinkRelativePath string) error {
symlinkPath := path.Join(checkoutPath, symlinkRelativePath) symlinkPath := path.Join(checkoutPath, symlinkRelativePath)
logger := logrus.WithFields(logrus.Fields{ logger := log.With().
"blobPath": blobPath, Str("blobPath", blobPath).
"symlinkPath": symlinkPath, Str("symlinkPath", symlinkPath).
}) Logger()
blobPath, err := filepath.Abs(blobPath) blobPath, err := filepath.Abs(blobPath)
if err != nil { if err != nil {
logger.WithError(err).Error("unable to make blobPath absolute") logger.Error().Err(err).Msg("unable to make blobPath absolute")
return err return err
} }
logger.Debug("creating symlink") logger.Debug().Msg("creating symlink")
// This is expected to fail sometimes, because we don't create parent directories yet. // This is expected to fail sometimes, because we don't create parent directories yet.
// We only create those when we get a failure from symlinking. // We only create those when we get a failure from symlinking.
@ -181,20 +181,20 @@ func (m *Manager) SymlinkToCheckout(blobPath, checkoutPath, symlinkRelativePath
return err return err
} }
if !os.IsNotExist(err) { if !os.IsNotExist(err) {
logger.WithError(err).Error("unable to create symlink") logger.Error().Err(err).Msg("unable to create symlink")
return err return err
} }
logger.Debug("creating parent directory") logger.Debug().Msg("creating parent directory")
dir := path.Dir(symlinkPath) dir := path.Dir(symlinkPath)
if err := os.MkdirAll(dir, 0777); err != nil { if err := os.MkdirAll(dir, 0777); err != nil {
logger.WithError(err).Error("unable to create parent directory") logger.Error().Err(err).Msg("unable to create parent directory")
return err return err
} }
if err := os.Symlink(blobPath, symlinkPath); err != nil { if err := os.Symlink(blobPath, symlinkPath); err != nil {
logger.WithError(err).Error("unable to create symlink, after creating parent directory") logger.Error().Err(err).Msg("unable to create symlink, after creating parent directory")
return err return err
} }
@ -215,23 +215,17 @@ func touchFile(blobPath string) error {
} }
now := time.Now() now := time.Now()
logger := logrus.WithField("file", blobPath)
logger.Debug("touching")
err := touch.Touch(blobPath) err := touch.Touch(blobPath)
logLevel := logrus.DebugLevel
if err != nil { if err != nil {
logger = logger.WithError(err) return err
logLevel = logrus.WarnLevel
} }
duration := time.Now().Sub(now) duration := time.Now().Sub(now)
logger = logger.WithField("duration", duration) logger := log.With().Str("file", blobPath).Logger()
if duration < 1*time.Second { if duration > 1*time.Second {
logger.Log(logLevel, "done touching") logger.Warn().Str("duration", duration.String()).Msg("done touching but took a long time")
} else {
logger.Log(logLevel, "done touching but took a long time")
} }
logger.Debug().Msg("done touching")
return err return err
} }

View File

@ -0,0 +1,63 @@
package checkout
// SPDX-License-Identifier: GPL-3.0-or-later
import (
"context"
"fmt"
"git.blender.org/flamenco/pkg/api"
"git.blender.org/flamenco/pkg/shaman/filestore"
"github.com/rs/zerolog"
)
func (m *Manager) ReportRequirements(ctx context.Context, requirements api.ShamanRequirementsRequest) (api.ShamanRequirementsResponse, error) {
logger := zerolog.Ctx(ctx)
logger.Debug().Msg("user requested checkout requirements")
missing := api.ShamanRequirementsResponse{}
alreadyRequested := map[string]bool{}
for _, fileSpec := range requirements.Files {
fileKey := fmt.Sprintf("%s/%d", fileSpec.Sha, fileSpec.Size)
if alreadyRequested[fileKey] {
// User asked for this (checksum, filesize) tuple already.
continue
}
path, status := m.fileStore.ResolveFile(fileSpec.Sha, int64(fileSpec.Size), filestore.ResolveEverything)
var apiStatus api.ShamanFileStatus
switch status {
case filestore.StatusDoesNotExist:
// Caller can upload this file immediately.
apiStatus = api.ShamanFileStatusUnknown
case filestore.StatusUploading:
// Caller should postpone uploading this file until all 'unknown' files have been uploaded.
apiStatus = api.ShamanFileStatusUploading
case filestore.StatusStored:
// We expect this file to be sent soon, though, so we need to
// 'touch' it to make sure it won't be GC'd in the mean time.
go touchFile(path)
// Only send a response when the caller needs to do something.
continue
default:
logger.Error().
Str("path", path).
Str("status", status.String()).
Str("checksum", fileSpec.Sha).
Int("filesize", fileSpec.Size).
Msg("invalid status returned by ResolveFile")
continue
}
alreadyRequested[fileKey] = true
missing.Files = append(missing.Files, api.ShamanFileSpecWithStatus{
ShamanFileSpec: fileSpec,
Status: apiStatus,
})
}
return missing, nil
}

View File

@ -1,191 +0,0 @@
/* (c) 2019, Blender Foundation - Sybren A. Stüvel
*
* Permission is hereby granted, free of charge, to any person obtaining
* a copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sublicense, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to
* the following conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package checkout
import (
"fmt"
"net/http"
"strings"
"git.blender.org/flamenco/pkg/shaman/filestore"
"git.blender.org/flamenco/pkg/shaman/httpserver"
"git.blender.org/flamenco/pkg/shaman/jwtauth"
"github.com/gorilla/mux"
"github.com/sirupsen/logrus"
)
// Responses for each line of a checkout definition file.
const (
responseFileUnkown = "file-unknown"
responseAlreadyUploading = "already-uploading"
responseError = "ERROR"
)
// AddRoutes adds HTTP routes to the muxer.
func (m *Manager) AddRoutes(router *mux.Router, auther jwtauth.Authenticator) {
router.Handle("/checkout/requirements", auther.WrapFunc(m.reportRequirements)).Methods("POST")
router.Handle("/checkout/create/{checkoutID}", auther.WrapFunc(m.createCheckout)).Methods("POST")
}
func (m *Manager) reportRequirements(w http.ResponseWriter, r *http.Request) {
logger := packageLogger.WithFields(jwtauth.RequestLogFields(r))
logger.Debug("user requested checkout requirements")
w.Header().Set("Content-Type", "text/plain; charset=utf-8")
if r.Header.Get("Content-Type") != "text/plain" {
http.Error(w, "Expecting text/plain content type", http.StatusBadRequest)
return
}
bodyReader, err := httpserver.DecompressedReader(r)
if err != nil {
http.Error(w, err.Error(), http.StatusBadRequest)
return
}
defer bodyReader.Close()
// Unfortunately, Golang doesn't allow us (for good reason) to send a reply while
// still reading the response. See https://github.com/golang/go/issues/4637
responseLines := []string{}
alreadyRequested := map[string]bool{}
reader := NewDefinitionReader(r.Context(), bodyReader)
for line := range reader.Read() {
fileKey := fmt.Sprintf("%s/%d", line.Checksum, line.FileSize)
if alreadyRequested[fileKey] {
// User asked for this (checksum, filesize) tuple already.
continue
}
path, status := m.fileStore.ResolveFile(line.Checksum, line.FileSize, filestore.ResolveEverything)
response := ""
switch status {
case filestore.StatusDoesNotExist:
// Caller can upload this file immediately.
response = responseFileUnkown
case filestore.StatusUploading:
// Caller should postpone uploading this file until all 'does-not-exist' files have been uploaded.
response = responseAlreadyUploading
case filestore.StatusStored:
// We expect this file to be sent soon, though, so we need to
// 'touch' it to make sure it won't be GC'd in the mean time.
go touchFile(path)
// Only send a response when the caller needs to do something.
continue
default:
logger.WithFields(logrus.Fields{
"path": path,
"status": status,
"checksum": line.Checksum,
"filesize": line.FileSize,
}).Error("invalid status returned by ResolveFile")
continue
}
alreadyRequested[fileKey] = true
responseLines = append(responseLines, fmt.Sprintf("%s %s\n", response, line.FilePath))
}
if reader.Err != nil {
logger.WithError(reader.Err).Warning("error reading checkout definition")
http.Error(w, fmt.Sprintf("%s %v\n", responseError, reader.Err), http.StatusBadRequest)
return
}
w.WriteHeader(http.StatusOK)
w.Write([]byte(strings.Join(responseLines, "")))
}
func (m *Manager) createCheckout(w http.ResponseWriter, r *http.Request) {
checkoutID := mux.Vars(r)["checkoutID"]
logger := packageLogger.WithFields(jwtauth.RequestLogFields(r)).WithField("checkoutID", checkoutID)
logger.Debug("user requested checkout creation")
w.Header().Set("Content-Type", "text/plain; charset=utf-8")
if r.Header.Get("Content-Type") != "text/plain" {
http.Error(w, "Expecting text/plain content type", http.StatusBadRequest)
return
}
bodyReader, err := httpserver.DecompressedReader(r)
if err != nil {
http.Error(w, err.Error(), http.StatusBadRequest)
return
}
defer bodyReader.Close()
// Actually create the checkout.
resolvedCheckoutInfo, err := m.PrepareCheckout(checkoutID)
if err != nil {
switch err {
case ErrInvalidCheckoutID:
http.Error(w, fmt.Sprintf("invalid checkout ID '%s'", checkoutID), http.StatusBadRequest)
case ErrCheckoutAlreadyExists:
http.Error(w, fmt.Sprintf("checkout '%s' already exists", checkoutID), http.StatusConflict)
default:
http.Error(w, err.Error(), http.StatusInternalServerError)
}
return
}
// The checkout directory was created, so if anything fails now, it should be erased.
var checkoutOK bool
defer func() {
if !checkoutOK {
m.EraseCheckout(checkoutID)
}
}()
responseLines := []string{}
reader := NewDefinitionReader(r.Context(), bodyReader)
for line := range reader.Read() {
blobPath, status := m.fileStore.ResolveFile(line.Checksum, line.FileSize, filestore.ResolveStoredOnly)
if status != filestore.StatusStored {
// Caller should upload this file before we can create the checkout.
responseLines = append(responseLines, fmt.Sprintf("%s %s\n", responseFileUnkown, line.FilePath))
continue
}
if err := m.SymlinkToCheckout(blobPath, resolvedCheckoutInfo.absolutePath, line.FilePath); err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
}
if reader.Err != nil {
http.Error(w, fmt.Sprintf("ERROR %v\n", reader.Err), http.StatusBadRequest)
return
}
// If there was any file missing, we should just stop now.
if len(responseLines) > 0 {
http.Error(w, strings.Join(responseLines, ""), http.StatusBadRequest)
return
}
w.WriteHeader(http.StatusOK)
w.Write([]byte(resolvedCheckoutInfo.RelativePath))
checkoutOK = true // Prevent the checkout directory from being erased again.
logger.Info("checkout created")
}

View File

@ -23,18 +23,10 @@
package checkout package checkout
import ( import (
"io/ioutil" "context"
"net/http"
"net/http/httptest"
"os"
"path"
"strings"
"testing" "testing"
"git.blender.org/flamenco/pkg/shaman/filestore" "git.blender.org/flamenco/pkg/api"
"git.blender.org/flamenco/pkg/shaman/httpserver"
"github.com/gorilla/mux"
"github.com/sirupsen/logrus"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
) )
@ -42,84 +34,72 @@ func TestReportRequirements(t *testing.T) {
manager, cleanup := createTestManager() manager, cleanup := createTestManager()
defer cleanup() defer cleanup()
defFile, err := ioutil.ReadFile("definition_test_example.txt") required := api.ShamanRequirementsRequest{
assert.Nil(t, err) Files: []api.ShamanFileSpec{
compressedDefFile := httpserver.CompressBuffer(defFile) {"63b72c63b9424fd13b9370fb60069080c3a15717cf3ad442635b187c6a895079", 127},
{"9f1470441beb98dbb66e3339e7da697d9c2312999a6a5610c461cbf55040e210", 795},
// 5 files, all ending in newline, so defFileLines has trailing "" element. {"59c6bd72af62aa860343adcafd46e3998934a9db2997ce08514b4361f099fa58", 1134},
defFileLines := strings.Split(string(defFile), "\n") {"59c6bd72af62aa860343adcafd46e3998934a9db2997ce08514b4361f099fa58", 1134}, // duplicate of the above
assert.Equal(t, 6, len(defFileLines), defFileLines) },
respRec := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/checkout/requirement", compressedDefFile)
req.Header.Set("Content-Type", "text/plain")
req.Header.Set("Content-Encoding", "gzip")
manager.reportRequirements(respRec, req)
bodyBytes, err := ioutil.ReadAll(respRec.Body)
assert.Nil(t, err)
body := string(bodyBytes)
assert.Equal(t, respRec.Code, http.StatusOK, body)
// We should not be required to upload the same file twice,
// so another-routes.go should not be in the response.
lines := strings.Split(body, "\n")
expectLines := []string{
"file-unknown definition.go",
"file-unknown logging.go",
"file-unknown manager.go",
"file-unknown routes.go",
"",
} }
assert.EqualValues(t, expectLines, lines)
response, err := manager.ReportRequirements(context.Background(), required)
assert.NoError(t, err)
// We should not be required to upload the same file twice, so the duplicate
// should not be in the response.
assert.Equal(t, []api.ShamanFileSpecWithStatus{
{api.ShamanFileSpec{"63b72c63b9424fd13b9370fb60069080c3a15717cf3ad442635b187c6a895079", 127}, api.ShamanFileStatusUnknown},
{api.ShamanFileSpec{"9f1470441beb98dbb66e3339e7da697d9c2312999a6a5610c461cbf55040e210", 795}, api.ShamanFileStatusUnknown},
{api.ShamanFileSpec{"59c6bd72af62aa860343adcafd46e3998934a9db2997ce08514b4361f099fa58", 1134}, api.ShamanFileStatusUnknown},
}, response.Files)
} }
func TestCreateCheckout(t *testing.T) { // func TestCreateCheckout(t *testing.T) {
manager, cleanup := createTestManager() // manager, cleanup := createTestManager()
defer cleanup() // defer cleanup()
filestore.LinkTestFileStore(manager.fileStore.BasePath()) // filestore.LinkTestFileStore(manager.fileStore.BasePath())
defFile, err := ioutil.ReadFile("../_test_file_store/checkout_definition.txt") // defFile, err := ioutil.ReadFile("../_test_file_store/checkout_definition.txt")
assert.Nil(t, err) // assert.Nil(t, err)
compressedDefFile := httpserver.CompressBuffer(defFile) // compressedDefFile := httpserver.CompressBuffer(defFile)
respRec := httptest.NewRecorder() // respRec := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/checkout/create/{checkoutID}", compressedDefFile) // req := httptest.NewRequest("POST", "/checkout/create/{checkoutID}", compressedDefFile)
req = mux.SetURLVars(req, map[string]string{ // req = mux.SetURLVars(req, map[string]string{
"checkoutID": "jemoeder", // "checkoutID": "jemoeder",
}) // })
req.Header.Set("Content-Type", "text/plain") // req.Header.Set("Content-Type", "text/plain")
req.Header.Set("Content-Encoding", "gzip") // req.Header.Set("Content-Encoding", "gzip")
logrus.SetLevel(logrus.DebugLevel) // logrus.SetLevel(logrus.DebugLevel)
manager.createCheckout(respRec, req) // manager.createCheckout(respRec, req)
bodyBytes, err := ioutil.ReadAll(respRec.Body) // bodyBytes, err := ioutil.ReadAll(respRec.Body)
assert.Nil(t, err) // assert.Nil(t, err)
body := string(bodyBytes) // body := string(bodyBytes)
assert.Equal(t, http.StatusOK, respRec.Code, body) // assert.Equal(t, http.StatusOK, respRec.Code, body)
// Check the symlinks of the checkout // // Check the symlinks of the checkout
coPath := path.Join(manager.checkoutBasePath, "er", "jemoeder") // coPath := path.Join(manager.checkoutBasePath, "er", "jemoeder")
assert.FileExists(t, path.Join(coPath, "subdir", "replacer.py")) // assert.FileExists(t, path.Join(coPath, "subdir", "replacer.py"))
assert.FileExists(t, path.Join(coPath, "feed.py")) // assert.FileExists(t, path.Join(coPath, "feed.py"))
assert.FileExists(t, path.Join(coPath, "httpstuff.py")) // assert.FileExists(t, path.Join(coPath, "httpstuff.py"))
assert.FileExists(t, path.Join(coPath, "filesystemstuff.py")) // assert.FileExists(t, path.Join(coPath, "filesystemstuff.py"))
storePath := manager.fileStore.StoragePath() // storePath := manager.fileStore.StoragePath()
assertLinksTo(t, path.Join(coPath, "subdir", "replacer.py"), // assertLinksTo(t, path.Join(coPath, "subdir", "replacer.py"),
path.Join(storePath, "59", "0c148428d5c35fab3ebad2f3365bb469ab9c531b60831f3e826c472027a0b9", "3367.blob")) // path.Join(storePath, "59", "0c148428d5c35fab3ebad2f3365bb469ab9c531b60831f3e826c472027a0b9", "3367.blob"))
assertLinksTo(t, path.Join(coPath, "feed.py"), // assertLinksTo(t, path.Join(coPath, "feed.py"),
path.Join(storePath, "80", "b749c27b2fef7255e7e7b3c2029b03b31299c75ff1f1c72732081c70a713a3", "7488.blob")) // path.Join(storePath, "80", "b749c27b2fef7255e7e7b3c2029b03b31299c75ff1f1c72732081c70a713a3", "7488.blob"))
assertLinksTo(t, path.Join(coPath, "httpstuff.py"), // assertLinksTo(t, path.Join(coPath, "httpstuff.py"),
path.Join(storePath, "91", "4853599dd2c351ab7b82b219aae6e527e51518a667f0ff32244b0c94c75688", "486.blob")) // path.Join(storePath, "91", "4853599dd2c351ab7b82b219aae6e527e51518a667f0ff32244b0c94c75688", "486.blob"))
assertLinksTo(t, path.Join(coPath, "filesystemstuff.py"), // assertLinksTo(t, path.Join(coPath, "filesystemstuff.py"),
path.Join(storePath, "d6", "fc7289b5196cc96748ea72f882a22c39b8833b457fe854ef4c03a01f5db0d3", "7217.blob")) // path.Join(storePath, "d6", "fc7289b5196cc96748ea72f882a22c39b8833b457fe854ef4c03a01f5db0d3", "7217.blob"))
} // }
func assertLinksTo(t *testing.T, linkPath, expectedTarget string) { // func assertLinksTo(t *testing.T, linkPath, expectedTarget string) {
actualTarget, err := os.Readlink(linkPath) // actualTarget, err := os.Readlink(linkPath)
assert.Nil(t, err) // assert.Nil(t, err)
assert.Equal(t, expectedTarget, actualTarget) // assert.Equal(t, expectedTarget, actualTarget)
} // }

View File

@ -23,17 +23,16 @@
package fileserver package fileserver
import ( import (
"bytes"
"context"
"io"
"io/ioutil" "io/ioutil"
"net/http"
"net/http/httptest"
"strconv"
"testing" "testing"
"git.blender.org/flamenco/pkg/shaman/config"
"git.blender.org/flamenco/pkg/shaman/hasher" "git.blender.org/flamenco/pkg/shaman/hasher"
"git.blender.org/flamenco/pkg/shaman/httpserver"
"git.blender.org/flamenco/pkg/shaman/filestore" "git.blender.org/flamenco/pkg/shaman/filestore"
"github.com/gorilla/mux"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
) )
@ -46,37 +45,42 @@ func TestStoreFile(t *testing.T) {
assert.EqualValues(t, []byte("h\xc3\xa4h\xc3\xa4h\xc3\xa4"), payload) assert.EqualValues(t, []byte("h\xc3\xa4h\xc3\xa4h\xc3\xa4"), payload)
filesize := int64(len(payload)) filesize := int64(len(payload))
correctChecksum := hasher.Checksum(payload)
testWithChecksum := func(checksum string) *httptest.ResponseRecorder { testWithChecksum := func(checksum string, reportSize int64) error {
compressedPayload := httpserver.CompressBuffer(payload) buffer := io.NopCloser(bytes.NewBuffer(payload))
respRec := httptest.NewRecorder() return server.ReceiveFile(context.Background(), buffer, checksum, reportSize, false)
req := httptest.NewRequest("POST", "/files/{checksum}/{filesize}", compressedPayload)
req = mux.SetURLVars(req, map[string]string{
"checksum": checksum,
"filesize": strconv.FormatInt(filesize, 10),
})
req.Header.Set("Content-Encoding", "gzip")
req.Header.Set("X-Shaman-Original-Filename", "in-memory-file.txt")
server.ServeHTTP(respRec, req)
return respRec
} }
var respRec *httptest.ResponseRecorder var err error
var path string var path string
var status filestore.FileStatus var status filestore.FileStatus
// A bad checksum should be rejected. // A bad checksum should be rejected.
badChecksum := "da-checksum-is-long-enough-like-this" badChecksum := "da-checksum-is-long-enough-like-this"
respRec = testWithChecksum(badChecksum) err = testWithChecksum(badChecksum, filesize)
assert.Equal(t, http.StatusExpectationFailed, respRec.Code) assert.ErrorIs(t, err, ErrFileChecksumMismatch{
DeclaredChecksum: badChecksum,
ActualChecksum: correctChecksum,
})
path, status = server.fileStore.ResolveFile(badChecksum, filesize, filestore.ResolveEverything)
assert.Equal(t, filestore.StatusDoesNotExist, status)
assert.Equal(t, "", path)
// A bad file size should be rejected.
err = testWithChecksum(correctChecksum, filesize+1)
assert.ErrorIs(t, err, ErrFileSizeMismatch{
DeclaredSize: filesize + 1,
ActualSize: filesize,
})
path, status = server.fileStore.ResolveFile(badChecksum, filesize, filestore.ResolveEverything) path, status = server.fileStore.ResolveFile(badChecksum, filesize, filestore.ResolveEverything)
assert.Equal(t, filestore.StatusDoesNotExist, status) assert.Equal(t, filestore.StatusDoesNotExist, status)
assert.Equal(t, "", path) assert.Equal(t, "", path)
// The correct checksum should be accepted. // The correct checksum should be accepted.
correctChecksum := hasher.Checksum(payload) err = testWithChecksum(correctChecksum, filesize)
respRec = testWithChecksum(correctChecksum) assert.NoError(t, err)
assert.Equal(t, http.StatusNoContent, respRec.Code)
path, status = server.fileStore.ResolveFile(correctChecksum, filesize, filestore.ResolveEverything) path, status = server.fileStore.ResolveFile(correctChecksum, filesize, filestore.ResolveEverything)
assert.Equal(t, filestore.StatusStored, status) assert.Equal(t, filestore.StatusStored, status)
assert.FileExists(t, path) assert.FileExists(t, path)
@ -85,3 +89,17 @@ func TestStoreFile(t *testing.T) {
assert.Nil(t, err) assert.Nil(t, err)
assert.EqualValues(t, payload, savedContent, "The file should be saved uncompressed") assert.EqualValues(t, payload, savedContent, "The file should be saved uncompressed")
} }
func createTestServer() (server *FileServer, cleanup func()) {
config, configCleanup := config.CreateTestConfig()
store := filestore.New(config)
server = New(store)
server.Go()
cleanup = func() {
server.Close()
configCleanup()
}
return
}

View File

@ -25,6 +25,8 @@ package fileserver
import ( import (
"fmt" "fmt"
"time" "time"
"github.com/rs/zerolog/log"
) )
// Returns a channel that is open while the given file is being received. // Returns a channel that is open while the given file is being received.
@ -68,10 +70,10 @@ func (fs *FileServer) receiveListenerPeriodicCheck() {
numChans := len(fs.receiverChannels) numChans := len(fs.receiverChannels)
if numChans == 0 { if numChans == 0 {
if lastReportedChans != 0 { if lastReportedChans != 0 {
packageLogger.Debug("no receive listener channels") log.Debug().Msg("no receive listener channels")
} }
} else { } else {
packageLogger.WithField("num_receiver_channels", numChans).Debug("receiving files") log.Debug().Int("num_receiver_channels", numChans).Msg("receiving files")
} }
lastReportedChans = numChans lastReportedChans = numChans
} }
@ -79,7 +81,7 @@ func (fs *FileServer) receiveListenerPeriodicCheck() {
for { for {
select { select {
case <-fs.ctx.Done(): case <-fs.ctx.Done():
packageLogger.Debug("stopping receive listener periodic check") log.Debug().Msg("stopping receive listener periodic check")
return return
case <-time.After(1 * time.Minute): case <-time.After(1 * time.Minute):
doCheck() doCheck()

View File

@ -1,83 +0,0 @@
/* (c) 2019, Blender Foundation - Sybren A. Stüvel
*
* Permission is hereby granted, free of charge, to any person obtaining
* a copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sublicense, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to
* the following conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package fileserver
import (
"context"
"fmt"
"io"
"net/http"
"os"
"strconv"
"github.com/sirupsen/logrus"
"git.blender.org/flamenco/pkg/shaman/filestore"
)
// serveFile only serves stored files (not 'uploading' or 'checking')
func (fs *FileServer) serveFile(ctx context.Context, w http.ResponseWriter, checksum string, filesize int64) {
path, status := fs.fileStore.ResolveFile(checksum, filesize, filestore.ResolveStoredOnly)
if status != filestore.StatusStored {
http.Error(w, "File Not Found", http.StatusNotFound)
return
}
logger := packageLogger.WithField("path", path)
stat, err := os.Stat(path)
if err != nil {
logger.WithError(err).Error("unable to stat file")
http.Error(w, "File Not Found", http.StatusNotFound)
return
}
if stat.Size() != filesize {
logger.WithFields(logrus.Fields{
"realSize": stat.Size(),
"expectedSize": filesize,
}).Error("file size in storage is corrupt")
http.Error(w, "File Size Incorrect", http.StatusInternalServerError)
return
}
infile, err := os.Open(path)
if err != nil {
logger.WithError(err).Error("unable to read file")
http.Error(w, "File Not Found", http.StatusNotFound)
return
}
filesizeStr := strconv.FormatInt(filesize, 10)
w.Header().Set("Content-Type", "application/binary")
w.Header().Set("Content-Length", filesizeStr)
w.Header().Set("ETag", fmt.Sprintf("'%s-%s'", checksum, filesizeStr))
w.Header().Set("X-Shaman-Checksum", checksum)
written, err := io.Copy(w, infile)
if err != nil {
logger.WithError(err).Error("unable to copy file to writer")
// Anything could have been sent by now, so just close the connection.
return
}
logger.WithField("written", written).Debug("file send to writer")
}

View File

@ -1,71 +0,0 @@
/* (c) 2019, Blender Foundation - Sybren A. Stüvel
*
* Permission is hereby granted, free of charge, to any person obtaining
* a copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sublicense, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to
* the following conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package fileserver
import (
"net/http"
"net/http/httptest"
"strconv"
"testing"
"git.blender.org/flamenco/pkg/shaman/config"
"git.blender.org/flamenco/pkg/shaman/filestore"
"github.com/gorilla/mux"
"github.com/stretchr/testify/assert"
)
func createTestServer() (server *FileServer, cleanup func()) {
config, configCleanup := config.CreateTestConfig()
store := filestore.New(config)
server = New(store)
server.Go()
cleanup = func() {
server.Close()
configCleanup()
}
return
}
func TestServeFile(t *testing.T) {
server, cleanup := createTestServer()
defer cleanup()
payload := []byte("hähähä")
checksum := "da-checksum-is-long-enough-like-this"
filesize := int64(len(payload))
server.fileStore.(*filestore.Store).MustStoreFileForTest(checksum, filesize, payload)
respRec := httptest.NewRecorder()
req := httptest.NewRequest("GET", "/files/{checksum}/{filesize}", nil)
req = mux.SetURLVars(req, map[string]string{
"checksum": checksum,
"filesize": strconv.FormatInt(filesize, 10),
})
server.ServeHTTP(respRec, req)
assert.Equal(t, http.StatusOK, respRec.Code)
assert.EqualValues(t, payload, respRec.Body.Bytes())
}

View File

@ -24,6 +24,7 @@ package filestore
import ( import (
"errors" "errors"
"fmt"
"os" "os"
) )
@ -65,6 +66,19 @@ const (
StatusStored StatusStored
) )
func (fs FileStatus) String() string {
switch fs {
case StatusDoesNotExist:
return "DoesNotExist"
case StatusUploading:
return "Uploading"
case StatusStored:
return "Stored"
default:
return fmt.Sprintf("invalid(%d)", int(fs))
}
}
// StoredOnly indicates whether to resolve only 'stored' files or also 'uploading' or 'checking'. // StoredOnly indicates whether to resolve only 'stored' files or also 'uploading' or 'checking'.
type StoredOnly bool type StoredOnly bool

View File

@ -104,24 +104,24 @@ func (s *Server) Close() {
// Checkout creates a directory, and symlinks the required files into it. The // Checkout creates a directory, and symlinks the required files into it. The
// files must all have been uploaded to Shaman before calling this. // files must all have been uploaded to Shaman before calling this.
func (s *Server) Checkout(ctx context.Context, checkoutID string, checkout api.ShamanCheckout) error { func (s *Server) Checkout(ctx context.Context, checkoutID string, checkout api.ShamanCheckout) error {
return nil return s.checkoutMan.Checkout(ctx, checkoutID, checkout)
} }
// Requirements checks a Shaman Requirements file, and returns the subset // Requirements checks a Shaman Requirements file, and returns the subset
// containing the unknown files. // containing the unknown files.
func (s *Server) Requirements(ctx context.Context, requirements api.ShamanRequirements) (api.ShamanRequirements, error) { func (s *Server) Requirements(ctx context.Context, requirements api.ShamanRequirementsRequest) (api.ShamanRequirementsResponse, error) {
return requirements, nil return s.checkoutMan.ReportRequirements(ctx, requirements)
} }
var fsStatusToApiStatus = map[filestore.FileStatus]api.ShamanFileStatusStatus{ var fsStatusToApiStatus = map[filestore.FileStatus]api.ShamanFileStatus{
filestore.StatusDoesNotExist: api.ShamanFileStatusStatusUnknown, filestore.StatusDoesNotExist: api.ShamanFileStatusUnknown,
filestore.StatusUploading: api.ShamanFileStatusStatusUploading, filestore.StatusUploading: api.ShamanFileStatusUploading,
filestore.StatusStored: api.ShamanFileStatusStatusStored, filestore.StatusStored: api.ShamanFileStatusStored,
} }
// Check the status of a file on the Shaman server. // Check the status of a file on the Shaman server.
// status (stored, currently being uploaded, unknown). // status (stored, currently being uploaded, unknown).
func (s *Server) FileStoreCheck(ctx context.Context, checksum string, filesize int64) api.ShamanFileStatusStatus { func (s *Server) FileStoreCheck(ctx context.Context, checksum string, filesize int64) api.ShamanFileStatus {
status := s.fileServer.CheckFile(checksum, filesize) status := s.fileServer.CheckFile(checksum, filesize)
apiStatus, ok := fsStatusToApiStatus[status] apiStatus, ok := fsStatusToApiStatus[status]
if !ok { if !ok {
@ -130,7 +130,7 @@ func (s *Server) FileStoreCheck(ctx context.Context, checksum string, filesize i
Int64("filesize", filesize). Int64("filesize", filesize).
Int("fileserverStatus", int(status)). Int("fileserverStatus", int(status)).
Msg("shaman: unknown status on fileserver") Msg("shaman: unknown status on fileserver")
return api.ShamanFileStatusStatusUnknown return api.ShamanFileStatusUnknown
} }
return apiStatus return apiStatus
} }