changes
This commit is contained in:
parent
86ab1a451b
commit
33dd7c5ffc
7
.vscode/settings.json
vendored
7
.vscode/settings.json
vendored
@ -1,18 +1,25 @@
|
||||
{
|
||||
"cSpell.words": [
|
||||
"apidoc",
|
||||
"apikey",
|
||||
"axum",
|
||||
"chrono",
|
||||
"color",
|
||||
"colored",
|
||||
"Conn",
|
||||
"distr",
|
||||
"dotenv",
|
||||
"draftfiles",
|
||||
"exop",
|
||||
"hmac",
|
||||
"kellnr",
|
||||
"ldaps",
|
||||
"minisign",
|
||||
"oneshot",
|
||||
"openapi",
|
||||
"postgres",
|
||||
"recv",
|
||||
"redoc",
|
||||
"repr",
|
||||
"serde",
|
||||
"Servable",
|
||||
|
10
.yaak/yaak.fl_Ffdms6jtTs.yaml
Normal file
10
.yaak/yaak.fl_Ffdms6jtTs.yaml
Normal file
@ -0,0 +1,10 @@
|
||||
type: folder
|
||||
model: folder
|
||||
id: fl_Ffdms6jtTs
|
||||
createdAt: 2025-04-29T13:36:18.082441
|
||||
updatedAt: 2025-05-04T06:56:59.387677
|
||||
workspaceId: wk_SlydsyH2WI
|
||||
folderId: fl_1twfKS3Z0A
|
||||
name: Files
|
||||
description: ''
|
||||
sortPriority: 4000.0
|
10
.yaak/yaak.fl_MkZHYA6WTy.yaml
Normal file
10
.yaak/yaak.fl_MkZHYA6WTy.yaml
Normal file
@ -0,0 +1,10 @@
|
||||
type: folder
|
||||
model: folder
|
||||
id: fl_MkZHYA6WTy
|
||||
createdAt: 2025-04-29T13:37:56.870080
|
||||
updatedAt: 2025-05-04T06:56:59.387728
|
||||
workspaceId: wk_SlydsyH2WI
|
||||
folderId: fl_1twfKS3Z0A
|
||||
name: Units
|
||||
description: ''
|
||||
sortPriority: 6000.0
|
10
.yaak/yaak.fl_N4bxVTBvxq.yaml
Normal file
10
.yaak/yaak.fl_N4bxVTBvxq.yaml
Normal file
@ -0,0 +1,10 @@
|
||||
type: folder
|
||||
model: folder
|
||||
id: fl_N4bxVTBvxq
|
||||
createdAt: 2025-04-29T13:37:22.809630
|
||||
updatedAt: 2025-05-04T06:56:59.387056
|
||||
workspaceId: wk_SlydsyH2WI
|
||||
folderId: fl_1twfKS3Z0A
|
||||
name: Signature
|
||||
description: ''
|
||||
sortPriority: 2000.0
|
10
.yaak/yaak.fl_bbQJBGXney.yaml
Normal file
10
.yaak/yaak.fl_bbQJBGXney.yaml
Normal file
@ -0,0 +1,10 @@
|
||||
type: folder
|
||||
model: folder
|
||||
id: fl_bbQJBGXney
|
||||
createdAt: 2025-04-29T13:36:00.949626
|
||||
updatedAt: 2025-05-04T06:56:59.387075
|
||||
workspaceId: wk_SlydsyH2WI
|
||||
folderId: fl_1twfKS3Z0A
|
||||
name: Users
|
||||
description: ''
|
||||
sortPriority: 3000.0
|
10
.yaak/yaak.fl_cD4DDJcE6q.yaml
Normal file
10
.yaak/yaak.fl_cD4DDJcE6q.yaml
Normal file
@ -0,0 +1,10 @@
|
||||
type: folder
|
||||
model: folder
|
||||
id: fl_cD4DDJcE6q
|
||||
createdAt: 2025-05-04T06:20:41.227029
|
||||
updatedAt: 2025-05-04T06:56:59.399015
|
||||
workspaceId: wk_SlydsyH2WI
|
||||
folderId: fl_1twfKS3Z0A
|
||||
name: Vendors
|
||||
description: ''
|
||||
sortPriority: 7000.0
|
10
.yaak/yaak.fl_cdHsBtENVb.yaml
Normal file
10
.yaak/yaak.fl_cdHsBtENVb.yaml
Normal file
@ -0,0 +1,10 @@
|
||||
type: folder
|
||||
model: folder
|
||||
id: fl_cdHsBtENVb
|
||||
createdAt: 2025-04-29T13:36:56.735465
|
||||
updatedAt: 2025-05-04T06:56:59.387465
|
||||
workspaceId: wk_SlydsyH2WI
|
||||
folderId: fl_1twfKS3Z0A
|
||||
name: Departments
|
||||
description: ''
|
||||
sortPriority: 5000.0
|
10
.yaak/yaak.fl_qcKuCivw9q.yaml
Normal file
10
.yaak/yaak.fl_qcKuCivw9q.yaml
Normal file
@ -0,0 +1,10 @@
|
||||
type: folder
|
||||
model: folder
|
||||
id: fl_qcKuCivw9q
|
||||
createdAt: 2025-05-04T06:56:56.471277
|
||||
updatedAt: 2025-05-04T06:56:59.400306
|
||||
workspaceId: wk_SlydsyH2WI
|
||||
folderId: fl_1twfKS3Z0A
|
||||
name: Data Hub
|
||||
description: ''
|
||||
sortPriority: 8000.0
|
@ -2,9 +2,9 @@ type: http_request
|
||||
model: http_request
|
||||
id: rq_1YHYKIkG8x
|
||||
createdAt: 2025-01-06T11:35:07
|
||||
updatedAt: 2025-03-31T17:29:24.409065
|
||||
updatedAt: 2025-04-29T13:37:39.099949
|
||||
workspaceId: wk_SlydsyH2WI
|
||||
folderId: fl_1twfKS3Z0A
|
||||
folderId: fl_N4bxVTBvxq
|
||||
authentication:
|
||||
token: ${[ response.body.raw(request='rq_Eb5hJAiaKG') ]}
|
||||
authenticationType: bearer
|
||||
@ -35,6 +35,6 @@ headers:
|
||||
id: null
|
||||
method: GET
|
||||
name: Verify Something
|
||||
sortPriority: 6000.0
|
||||
sortPriority: 2000.0
|
||||
url: http://localhost:8080/api/signature/verify
|
||||
urlParameters: []
|
||||
|
54
.yaak/yaak.rq_5V5Ana9beE.yaml
Normal file
54
.yaak/yaak.rq_5V5Ana9beE.yaml
Normal file
@ -0,0 +1,54 @@
|
||||
type: http_request
|
||||
model: http_request
|
||||
id: rq_5V5Ana9beE
|
||||
createdAt: 2025-04-29T13:46:49.596651
|
||||
updatedAt: 2025-04-29T13:47:38.677134
|
||||
workspaceId: wk_SlydsyH2WI
|
||||
folderId: fl_cdHsBtENVb
|
||||
authentication:
|
||||
token: ${[ response.body.raw(request='rq_Eb5hJAiaKG') ]}
|
||||
authenticationType: bearer
|
||||
body:
|
||||
text: |-
|
||||
{
|
||||
"id": 1,
|
||||
"name": "Test2",
|
||||
"description": "Bla bla bla",
|
||||
"manager": {
|
||||
"user_id": "einstein",
|
||||
"name": "Albert",
|
||||
"surname": "EINSTEIN",
|
||||
"email": "albert.einstein@uno.com"
|
||||
},
|
||||
"vice_manager": {
|
||||
"user_id": "einstein",
|
||||
"name": "Max",
|
||||
"surname": "Mustermann",
|
||||
"email": ""
|
||||
},
|
||||
"active": true
|
||||
}
|
||||
bodyType: application/json
|
||||
description: ''
|
||||
headers:
|
||||
- enabled: true
|
||||
name: x-api-key
|
||||
value: ${[ ApiKey ]}
|
||||
id: null
|
||||
- enabled: false
|
||||
name: ''
|
||||
value: ''
|
||||
id: null
|
||||
- enabled: true
|
||||
name: ''
|
||||
value: ''
|
||||
id: null
|
||||
- enabled: true
|
||||
name: Content-Type
|
||||
value: application/json
|
||||
id: FWIGbxmTAH
|
||||
method: PUT
|
||||
name: Departments
|
||||
sortPriority: 1000.001
|
||||
url: http://localhost:8080/api/departments
|
||||
urlParameters: []
|
36
.yaak/yaak.rq_7SWbqNW2zP.yaml
Normal file
36
.yaak/yaak.rq_7SWbqNW2zP.yaml
Normal file
@ -0,0 +1,36 @@
|
||||
type: http_request
|
||||
model: http_request
|
||||
id: rq_7SWbqNW2zP
|
||||
createdAt: 2025-04-29T13:37:56.872905
|
||||
updatedAt: 2025-05-04T06:22:35.588966
|
||||
workspaceId: wk_SlydsyH2WI
|
||||
folderId: fl_MkZHYA6WTy
|
||||
authentication:
|
||||
token: ${[ response.body.raw(request='rq_Eb5hJAiaKG') ]}
|
||||
authenticationType: bearer
|
||||
body:
|
||||
text: ''
|
||||
bodyType: application/json
|
||||
description: ''
|
||||
headers:
|
||||
- enabled: true
|
||||
name: x-api-key
|
||||
value: ${[ ApiKey ]}
|
||||
id: null
|
||||
- enabled: false
|
||||
name: ''
|
||||
value: ''
|
||||
id: null
|
||||
- enabled: true
|
||||
name: ''
|
||||
value: ''
|
||||
id: null
|
||||
- enabled: true
|
||||
name: Content-Type
|
||||
value: application/json
|
||||
id: FWIGbxmTAH
|
||||
method: GET
|
||||
name: Units
|
||||
sortPriority: 0.0
|
||||
url: http://localhost:8080/api/units
|
||||
urlParameters: []
|
@ -2,9 +2,9 @@ type: http_request
|
||||
model: http_request
|
||||
id: rq_A81dnTkhL5
|
||||
createdAt: 2024-12-31T10:05:25
|
||||
updatedAt: 2025-03-31T17:28:59.911928
|
||||
updatedAt: 2025-04-29T13:36:05.874583
|
||||
workspaceId: wk_SlydsyH2WI
|
||||
folderId: fl_1twfKS3Z0A
|
||||
folderId: fl_bbQJBGXney
|
||||
authentication:
|
||||
token: ${[ response.body.raw(request='rq_Eb5hJAiaKG') ]}
|
||||
authenticationType: bearer
|
||||
@ -43,6 +43,6 @@ headers:
|
||||
id: null
|
||||
method: GET
|
||||
name: Users
|
||||
sortPriority: 2000.0
|
||||
sortPriority: 0.0
|
||||
url: http://localhost:8080/api/users
|
||||
urlParameters: []
|
||||
|
48
.yaak/yaak.rq_CBoUYLuHbS.yaml
Normal file
48
.yaak/yaak.rq_CBoUYLuHbS.yaml
Normal file
@ -0,0 +1,48 @@
|
||||
type: http_request
|
||||
model: http_request
|
||||
id: rq_CBoUYLuHbS
|
||||
createdAt: 2025-05-04T06:22:32.817130
|
||||
updatedAt: 2025-05-04T20:15:35.856146
|
||||
workspaceId: wk_SlydsyH2WI
|
||||
folderId: fl_cD4DDJcE6q
|
||||
authentication:
|
||||
token: ${[ response.body.raw(request='rq_Eb5hJAiaKG') ]}
|
||||
authenticationType: bearer
|
||||
body:
|
||||
form:
|
||||
- enabled: true
|
||||
id: znUUa4C4eQ
|
||||
name: file
|
||||
value: ''
|
||||
- enabled: true
|
||||
id: 27IzGGTVYi
|
||||
name: ''
|
||||
value: ''
|
||||
bodyType: multipart/form-data
|
||||
description: ''
|
||||
headers:
|
||||
- enabled: true
|
||||
name: x-api-key
|
||||
value: ${[ ApiKey ]}
|
||||
id: null
|
||||
- enabled: false
|
||||
name: ''
|
||||
value: ''
|
||||
id: null
|
||||
- enabled: true
|
||||
name: ''
|
||||
value: ''
|
||||
id: null
|
||||
- enabled: true
|
||||
name: Content-Type
|
||||
value: multipart/form-data
|
||||
id: rf64n05TdT
|
||||
method: POST
|
||||
name: Import Vendor
|
||||
sortPriority: 1000.0
|
||||
url: http://localhost:8080/api/vendors/import
|
||||
urlParameters:
|
||||
- enabled: true
|
||||
name: ''
|
||||
value: ''
|
||||
id: psbQxyKQgT
|
@ -2,7 +2,7 @@ type: http_request
|
||||
model: http_request
|
||||
id: rq_Eb5hJAiaKG
|
||||
createdAt: 2024-12-01T08:39:12
|
||||
updatedAt: 2025-03-02T07:37:25.323717
|
||||
updatedAt: 2025-05-04T07:46:47.954899
|
||||
workspaceId: wk_SlydsyH2WI
|
||||
folderId: fl_1twfKS3Z0A
|
||||
authentication: {}
|
||||
|
@ -2,9 +2,9 @@ type: http_request
|
||||
model: http_request
|
||||
id: rq_FJT5RHtZ2B
|
||||
createdAt: 2025-04-05T16:17:53.461290
|
||||
updatedAt: 2025-04-05T16:52:50.453961
|
||||
updatedAt: 2025-05-02T06:21:14.891856
|
||||
workspaceId: wk_SlydsyH2WI
|
||||
folderId: fl_1twfKS3Z0A
|
||||
folderId: fl_Ffdms6jtTs
|
||||
authentication:
|
||||
token: ${[ response.body.raw(request='rq_Eb5hJAiaKG') ]}
|
||||
authenticationType: bearer
|
||||
@ -33,7 +33,7 @@ headers:
|
||||
value: application/json
|
||||
id: FWIGbxmTAH
|
||||
method: GET
|
||||
name: Get attached Draft Files
|
||||
sortPriority: 6000.002
|
||||
name: Draft - Get attached Files
|
||||
sortPriority: 1000.0
|
||||
url: http://localhost:8080/api/files/draft
|
||||
urlParameters: []
|
||||
|
@ -2,9 +2,9 @@ type: http_request
|
||||
model: http_request
|
||||
id: rq_FmsM38yvHM
|
||||
createdAt: 2025-01-06T10:53:14
|
||||
updatedAt: 2025-03-31T17:29:12.196535
|
||||
updatedAt: 2025-04-29T13:37:39.099430
|
||||
workspaceId: wk_SlydsyH2WI
|
||||
folderId: fl_1twfKS3Z0A
|
||||
folderId: fl_N4bxVTBvxq
|
||||
authentication:
|
||||
token: ${[ response.body.raw(request='rq_Eb5hJAiaKG') ]}
|
||||
authenticationType: bearer
|
||||
@ -35,6 +35,6 @@ headers:
|
||||
id: null
|
||||
method: PUT
|
||||
name: Verify Signing Key
|
||||
sortPriority: 4000.0
|
||||
sortPriority: 0.0
|
||||
url: http://localhost:8080/api/signature/key
|
||||
urlParameters: []
|
||||
|
@ -2,9 +2,9 @@ type: http_request
|
||||
model: http_request
|
||||
id: rq_G6OxWcR5j2
|
||||
createdAt: 2025-01-06T10:50:59
|
||||
updatedAt: 2025-03-31T17:29:16.792639
|
||||
updatedAt: 2025-04-29T13:37:41.199403
|
||||
workspaceId: wk_SlydsyH2WI
|
||||
folderId: fl_1twfKS3Z0A
|
||||
folderId: fl_N4bxVTBvxq
|
||||
authentication:
|
||||
token: ${[ response.body.raw(request='rq_Eb5hJAiaKG') ]}
|
||||
authenticationType: bearer
|
||||
@ -43,6 +43,6 @@ headers:
|
||||
id: null
|
||||
method: GET
|
||||
name: Verify Signing Key
|
||||
sortPriority: 4500.0
|
||||
sortPriority: 500.0
|
||||
url: http://localhost:8080/api/signature/key
|
||||
urlParameters: []
|
||||
|
36
.yaak/yaak.rq_H3BCA2ycfz.yaml
Normal file
36
.yaak/yaak.rq_H3BCA2ycfz.yaml
Normal file
@ -0,0 +1,36 @@
|
||||
type: http_request
|
||||
model: http_request
|
||||
id: rq_H3BCA2ycfz
|
||||
createdAt: 2025-05-04T06:20:58.170971
|
||||
updatedAt: 2025-05-04T06:22:39.018953
|
||||
workspaceId: wk_SlydsyH2WI
|
||||
folderId: fl_cD4DDJcE6q
|
||||
authentication:
|
||||
token: ${[ response.body.raw(request='rq_Eb5hJAiaKG') ]}
|
||||
authenticationType: bearer
|
||||
body:
|
||||
text: ''
|
||||
bodyType: application/json
|
||||
description: ''
|
||||
headers:
|
||||
- enabled: true
|
||||
name: x-api-key
|
||||
value: ${[ ApiKey ]}
|
||||
id: null
|
||||
- enabled: false
|
||||
name: ''
|
||||
value: ''
|
||||
id: null
|
||||
- enabled: true
|
||||
name: ''
|
||||
value: ''
|
||||
id: null
|
||||
- enabled: true
|
||||
name: Content-Type
|
||||
value: application/json
|
||||
id: FWIGbxmTAH
|
||||
method: GET
|
||||
name: Vendors
|
||||
sortPriority: 0.0
|
||||
url: http://localhost:8080/api/vendors
|
||||
urlParameters: []
|
43
.yaak/yaak.rq_Hy5iPWhu2m.yaml
Normal file
43
.yaak/yaak.rq_Hy5iPWhu2m.yaml
Normal file
@ -0,0 +1,43 @@
|
||||
type: http_request
|
||||
model: http_request
|
||||
id: rq_Hy5iPWhu2m
|
||||
createdAt: 2025-04-29T13:50:03.769054
|
||||
updatedAt: 2025-05-04T06:22:35.589567
|
||||
workspaceId: wk_SlydsyH2WI
|
||||
folderId: fl_MkZHYA6WTy
|
||||
authentication:
|
||||
token: ${[ response.body.raw(request='rq_Eb5hJAiaKG') ]}
|
||||
authenticationType: bearer
|
||||
body:
|
||||
text: |-
|
||||
{
|
||||
"id": 1,
|
||||
"active": false,
|
||||
"description": "nicest units of all",
|
||||
"display": "Millimeter",
|
||||
"abbreviation": "mm"
|
||||
}
|
||||
bodyType: application/json
|
||||
description: ''
|
||||
headers:
|
||||
- enabled: true
|
||||
name: x-api-key
|
||||
value: ${[ ApiKey ]}
|
||||
id: null
|
||||
- enabled: false
|
||||
name: ''
|
||||
value: ''
|
||||
id: null
|
||||
- enabled: true
|
||||
name: ''
|
||||
value: ''
|
||||
id: null
|
||||
- enabled: true
|
||||
name: Content-Type
|
||||
value: application/json
|
||||
id: FWIGbxmTAH
|
||||
method: PUT
|
||||
name: Units
|
||||
sortPriority: 3000.0
|
||||
url: http://localhost:8080/api/units
|
||||
urlParameters: []
|
36
.yaak/yaak.rq_JErbH8SXBa.yaml
Normal file
36
.yaak/yaak.rq_JErbH8SXBa.yaml
Normal file
@ -0,0 +1,36 @@
|
||||
type: http_request
|
||||
model: http_request
|
||||
id: rq_JErbH8SXBa
|
||||
createdAt: 2025-04-29T09:16:11.511514
|
||||
updatedAt: 2025-04-29T13:37:06.916430
|
||||
workspaceId: wk_SlydsyH2WI
|
||||
folderId: fl_cdHsBtENVb
|
||||
authentication:
|
||||
token: ${[ response.body.raw(request='rq_Eb5hJAiaKG') ]}
|
||||
authenticationType: bearer
|
||||
body:
|
||||
text: ''
|
||||
bodyType: application/json
|
||||
description: ''
|
||||
headers:
|
||||
- enabled: true
|
||||
name: x-api-key
|
||||
value: ${[ ApiKey ]}
|
||||
id: null
|
||||
- enabled: false
|
||||
name: ''
|
||||
value: ''
|
||||
id: null
|
||||
- enabled: true
|
||||
name: ''
|
||||
value: ''
|
||||
id: null
|
||||
- enabled: true
|
||||
name: Content-Type
|
||||
value: application/json
|
||||
id: FWIGbxmTAH
|
||||
method: GET
|
||||
name: Departments
|
||||
sortPriority: 0.0
|
||||
url: http://localhost:8080/api/departments
|
||||
urlParameters: []
|
53
.yaak/yaak.rq_LfNKQYDoYS.yaml
Normal file
53
.yaak/yaak.rq_LfNKQYDoYS.yaml
Normal file
@ -0,0 +1,53 @@
|
||||
type: http_request
|
||||
model: http_request
|
||||
id: rq_LfNKQYDoYS
|
||||
createdAt: 2025-04-29T10:03:58.391323
|
||||
updatedAt: 2025-05-04T08:16:15.236391
|
||||
workspaceId: wk_SlydsyH2WI
|
||||
folderId: fl_cdHsBtENVb
|
||||
authentication:
|
||||
token: ${[ response.body.raw(request='rq_Eb5hJAiaKG') ]}
|
||||
authenticationType: bearer
|
||||
body:
|
||||
text: |-
|
||||
{
|
||||
"status": "Active",
|
||||
"description": "So so happy to be here",
|
||||
"id": 1,
|
||||
"manager": {
|
||||
"user_id": "einstein",
|
||||
"name": "Albert",
|
||||
"surname": "EINSTEIN",
|
||||
"email": "albert.einstein@uno.com"
|
||||
},
|
||||
"name": "SUper Department",
|
||||
"vice_manager": {
|
||||
"user_id": "einstein",
|
||||
"name": "Albert",
|
||||
"surname": "EINSTEIN",
|
||||
"email": "albert.einstein@uno.com"}
|
||||
}
|
||||
bodyType: application/json
|
||||
description: ''
|
||||
headers:
|
||||
- enabled: true
|
||||
name: x-api-key
|
||||
value: ${[ ApiKey ]}
|
||||
id: null
|
||||
- enabled: false
|
||||
name: ''
|
||||
value: ''
|
||||
id: null
|
||||
- enabled: true
|
||||
name: ''
|
||||
value: ''
|
||||
id: null
|
||||
- enabled: true
|
||||
name: Content-Type
|
||||
value: application/json
|
||||
id: FWIGbxmTAH
|
||||
method: POST
|
||||
name: Departments
|
||||
sortPriority: 1000.0
|
||||
url: http://localhost:8080/api/departments
|
||||
urlParameters: []
|
36
.yaak/yaak.rq_PpkPSxxoTY.yaml
Normal file
36
.yaak/yaak.rq_PpkPSxxoTY.yaml
Normal file
@ -0,0 +1,36 @@
|
||||
type: http_request
|
||||
model: http_request
|
||||
id: rq_PpkPSxxoTY
|
||||
createdAt: 2025-05-04T06:57:09.002303
|
||||
updatedAt: 2025-05-04T06:57:24.275722
|
||||
workspaceId: wk_SlydsyH2WI
|
||||
folderId: fl_qcKuCivw9q
|
||||
authentication:
|
||||
token: ${[ response.body.raw(request='rq_Eb5hJAiaKG') ]}
|
||||
authenticationType: bearer
|
||||
body:
|
||||
text: ''
|
||||
bodyType: application/json
|
||||
description: ''
|
||||
headers:
|
||||
- enabled: true
|
||||
name: x-api-key
|
||||
value: ${[ ApiKey ]}
|
||||
id: null
|
||||
- enabled: false
|
||||
name: ''
|
||||
value: ''
|
||||
id: null
|
||||
- enabled: true
|
||||
name: ''
|
||||
value: ''
|
||||
id: null
|
||||
- enabled: true
|
||||
name: Content-Type
|
||||
value: application/json
|
||||
id: FWIGbxmTAH
|
||||
method: GET
|
||||
name: SSE Stream
|
||||
sortPriority: 0.0
|
||||
url: http://localhost:8080/api/dataHub
|
||||
urlParameters: []
|
@ -2,9 +2,9 @@ type: http_request
|
||||
model: http_request
|
||||
id: rq_SFA4nX8S0k
|
||||
createdAt: 2025-01-06T10:55:51
|
||||
updatedAt: 2025-03-31T17:29:07.550911
|
||||
updatedAt: 2025-04-29T13:37:39.099633
|
||||
workspaceId: wk_SlydsyH2WI
|
||||
folderId: fl_1twfKS3Z0A
|
||||
folderId: fl_N4bxVTBvxq
|
||||
authentication:
|
||||
token: ${[ response.body.raw(request='rq_Eb5hJAiaKG') ]}
|
||||
authenticationType: bearer
|
||||
@ -34,6 +34,6 @@ headers:
|
||||
id: null
|
||||
method: POST
|
||||
name: Create Signing Key
|
||||
sortPriority: 3000.0
|
||||
sortPriority: 1000.0
|
||||
url: http://localhost:8080/api/signature/key
|
||||
urlParameters: []
|
||||
|
40
.yaak/yaak.rq_arfy5H4MQ7.yaml
Normal file
40
.yaak/yaak.rq_arfy5H4MQ7.yaml
Normal file
@ -0,0 +1,40 @@
|
||||
type: http_request
|
||||
model: http_request
|
||||
id: rq_arfy5H4MQ7
|
||||
createdAt: 2025-05-02T06:21:07.107918
|
||||
updatedAt: 2025-05-02T06:22:04.577841
|
||||
workspaceId: wk_SlydsyH2WI
|
||||
folderId: fl_Ffdms6jtTs
|
||||
authentication:
|
||||
token: ${[ response.body.raw(request='rq_Eb5hJAiaKG') ]}
|
||||
authenticationType: bearer
|
||||
body:
|
||||
text: |-
|
||||
{
|
||||
"order_id": 1,
|
||||
"position_id": 1
|
||||
}
|
||||
bodyType: application/json
|
||||
description: ''
|
||||
headers:
|
||||
- enabled: true
|
||||
name: x-api-key
|
||||
value: ${[ ApiKey ]}
|
||||
id: null
|
||||
- enabled: false
|
||||
name: ''
|
||||
value: ''
|
||||
id: null
|
||||
- enabled: true
|
||||
name: ''
|
||||
value: ''
|
||||
id: null
|
||||
- enabled: true
|
||||
name: Content-Type
|
||||
value: application/json
|
||||
id: FWIGbxmTAH
|
||||
method: GET
|
||||
name: Order - Get attached Files
|
||||
sortPriority: 2500.0
|
||||
url: http://localhost:8080/api/files/order
|
||||
urlParameters: []
|
@ -2,16 +2,16 @@ type: http_request
|
||||
model: http_request
|
||||
id: rq_eFkZk6Z6QS
|
||||
createdAt: 2025-04-05T16:52:35.932090
|
||||
updatedAt: 2025-04-05T16:55:08.986210
|
||||
updatedAt: 2025-05-02T06:21:14.892405
|
||||
workspaceId: wk_SlydsyH2WI
|
||||
folderId: fl_1twfKS3Z0A
|
||||
folderId: fl_Ffdms6jtTs
|
||||
authentication:
|
||||
token: ${[ response.body.raw(request='rq_Eb5hJAiaKG') ]}
|
||||
authenticationType: bearer
|
||||
body:
|
||||
text: |-
|
||||
{
|
||||
"draft_id": "2015-09-05 23:56:04",
|
||||
"name": "My Favourite Attachment",
|
||||
"hash": "9CE71B15548BEAC9745F464531B70BEE30F71F310908D3A0A86E73E07F61ED61"
|
||||
}
|
||||
bodyType: application/json
|
||||
@ -34,7 +34,7 @@ headers:
|
||||
value: application/json
|
||||
id: FWIGbxmTAH
|
||||
method: GET
|
||||
name: Download Draft File
|
||||
sortPriority: 6000.003
|
||||
url: http://localhost:8080/api/files/draft/file
|
||||
name: Download File
|
||||
sortPriority: 4000.0
|
||||
url: http://localhost:8080/api/files/file
|
||||
urlParameters: []
|
||||
|
@ -2,7 +2,7 @@ type: http_request
|
||||
model: http_request
|
||||
id: rq_gU10vzCIxt
|
||||
createdAt: 2025-01-04T18:07:52
|
||||
updatedAt: 2025-03-31T17:28:46.667681
|
||||
updatedAt: 2025-05-04T06:56:59.386563
|
||||
workspaceId: wk_SlydsyH2WI
|
||||
folderId: fl_1twfKS3Z0A
|
||||
authentication:
|
||||
|
@ -2,9 +2,9 @@ type: http_request
|
||||
model: http_request
|
||||
id: rq_lGuHoFSNGa
|
||||
createdAt: 2025-01-06T10:56:17
|
||||
updatedAt: 2025-03-31T17:29:20.133348
|
||||
updatedAt: 2025-04-29T13:37:45.317110
|
||||
workspaceId: wk_SlydsyH2WI
|
||||
folderId: fl_1twfKS3Z0A
|
||||
folderId: fl_N4bxVTBvxq
|
||||
authentication:
|
||||
token: ${[ response.body.raw(request='rq_Eb5hJAiaKG') ]}
|
||||
authenticationType: bearer
|
||||
@ -34,6 +34,6 @@ headers:
|
||||
id: null
|
||||
method: POST
|
||||
name: Sign Something
|
||||
sortPriority: 5000.0
|
||||
sortPriority: 1500.0
|
||||
url: http://localhost:8080/api/signature/sign
|
||||
urlParameters: []
|
||||
|
48
.yaak/yaak.rq_sx5QjHrLQG.yaml
Normal file
48
.yaak/yaak.rq_sx5QjHrLQG.yaml
Normal file
@ -0,0 +1,48 @@
|
||||
type: http_request
|
||||
model: http_request
|
||||
id: rq_sx5QjHrLQG
|
||||
createdAt: 2025-05-03T04:14:02.035480
|
||||
updatedAt: 2025-05-03T04:14:58.095730
|
||||
workspaceId: wk_SlydsyH2WI
|
||||
folderId: fl_Ffdms6jtTs
|
||||
authentication:
|
||||
token: ${[ response.body.raw(request='rq_Eb5hJAiaKG') ]}
|
||||
authenticationType: bearer
|
||||
body:
|
||||
text: |-
|
||||
{
|
||||
"draft_id": "2015-09-05 23:56:04",
|
||||
"hash": "7DE9EFF4B557242E72B10AA5E959A9F60B758E3FC67B7A329C758EFE25F8789D"
|
||||
}
|
||||
bodyType: application/json
|
||||
description: ''
|
||||
headers:
|
||||
- enabled: true
|
||||
name: x-api-key
|
||||
value: ${[ ApiKey ]}
|
||||
id: null
|
||||
- enabled: false
|
||||
name: ''
|
||||
value: ''
|
||||
id: null
|
||||
- enabled: true
|
||||
name: ''
|
||||
value: ''
|
||||
id: null
|
||||
- enabled: true
|
||||
name: Content-Type
|
||||
value: application/json
|
||||
id: vhM5fHsm8c
|
||||
method: DELETE
|
||||
name: Draft- Upload File
|
||||
sortPriority: 2000.001
|
||||
url: http://localhost:8080/api/files/draft
|
||||
urlParameters:
|
||||
- enabled: true
|
||||
name: draft_id
|
||||
value: 2015-09-05 23:56:04
|
||||
id: WVUgAXzXtu
|
||||
- enabled: true
|
||||
name: ''
|
||||
value: ''
|
||||
id: QtKYTXkc27
|
@ -2,28 +2,28 @@ type: http_request
|
||||
model: http_request
|
||||
id: rq_t5k4XC8Poe
|
||||
createdAt: 2025-04-02T16:41:11.369047
|
||||
updatedAt: 2025-04-05T17:00:29.492513
|
||||
updatedAt: 2025-05-03T04:21:12.206197
|
||||
workspaceId: wk_SlydsyH2WI
|
||||
folderId: fl_1twfKS3Z0A
|
||||
folderId: fl_Ffdms6jtTs
|
||||
authentication:
|
||||
token: ${[ response.body.raw(request='rq_Eb5hJAiaKG') ]}
|
||||
authenticationType: bearer
|
||||
body:
|
||||
form:
|
||||
- enabled: true
|
||||
file: ''
|
||||
id: u6yikyzRTH
|
||||
name: file
|
||||
value: ''
|
||||
- enabled: true
|
||||
id: njtBaKefpN
|
||||
name: name
|
||||
value: '"TestValue"'
|
||||
- enabled: true
|
||||
- enabled: false
|
||||
id: Iav8TimHS8
|
||||
name: draft_id
|
||||
value: 2015-09-05 23:56:04
|
||||
value: ''
|
||||
- enabled: true
|
||||
id: d57ZzimswD
|
||||
id: HZuvdvDs9F
|
||||
name: ''
|
||||
value: ''
|
||||
bodyType: multipart/form-data
|
||||
@ -46,7 +46,15 @@ headers:
|
||||
value: multipart/form-data
|
||||
id: rf64n05TdT
|
||||
method: POST
|
||||
name: Upload Draft File
|
||||
sortPriority: 6000.001
|
||||
name: Draft- Upload File
|
||||
sortPriority: 2000.0
|
||||
url: http://localhost:8080/api/files/draft
|
||||
urlParameters: []
|
||||
urlParameters:
|
||||
- enabled: true
|
||||
name: draft_id
|
||||
value: 2015-09-05 23:56:04
|
||||
id: WVUgAXzXtu
|
||||
- enabled: true
|
||||
name: ''
|
||||
value: ''
|
||||
id: QtKYTXkc27
|
||||
|
43
.yaak/yaak.rq_uBUf3zBdwm.yaml
Normal file
43
.yaak/yaak.rq_uBUf3zBdwm.yaml
Normal file
@ -0,0 +1,43 @@
|
||||
type: http_request
|
||||
model: http_request
|
||||
id: rq_uBUf3zBdwm
|
||||
createdAt: 2025-04-29T13:37:56.871629
|
||||
updatedAt: 2025-05-04T08:10:14.530064
|
||||
workspaceId: wk_SlydsyH2WI
|
||||
folderId: fl_MkZHYA6WTy
|
||||
authentication:
|
||||
token: ${[ response.body.raw(request='rq_Eb5hJAiaKG') ]}
|
||||
authenticationType: bearer
|
||||
body:
|
||||
text: |-
|
||||
{
|
||||
"id": 0,
|
||||
"status": "Active",
|
||||
"description": "nicest units of all",
|
||||
"display": "Kilogram",
|
||||
"abbreviation": "kg"
|
||||
}
|
||||
bodyType: application/json
|
||||
description: ''
|
||||
headers:
|
||||
- enabled: true
|
||||
name: x-api-key
|
||||
value: ${[ ApiKey ]}
|
||||
id: null
|
||||
- enabled: false
|
||||
name: ''
|
||||
value: ''
|
||||
id: null
|
||||
- enabled: true
|
||||
name: ''
|
||||
value: ''
|
||||
id: null
|
||||
- enabled: true
|
||||
name: Content-Type
|
||||
value: application/json
|
||||
id: FWIGbxmTAH
|
||||
method: POST
|
||||
name: Units
|
||||
sortPriority: 1000.0
|
||||
url: http://localhost:8080/api/units
|
||||
urlParameters: []
|
40
.yaak/yaak.rq_wm8CVPbpGM.yaml
Normal file
40
.yaak/yaak.rq_wm8CVPbpGM.yaml
Normal file
@ -0,0 +1,40 @@
|
||||
type: http_request
|
||||
model: http_request
|
||||
id: rq_wm8CVPbpGM
|
||||
createdAt: 2025-05-03T05:02:48.253678
|
||||
updatedAt: 2025-05-03T05:55:14.935036
|
||||
workspaceId: wk_SlydsyH2WI
|
||||
folderId: fl_Ffdms6jtTs
|
||||
authentication:
|
||||
token: ${[ response.body.raw(request='rq_Eb5hJAiaKG') ]}
|
||||
authenticationType: bearer
|
||||
body:
|
||||
text: |-
|
||||
{
|
||||
"order_id": 1,
|
||||
"position_id": 1
|
||||
}
|
||||
bodyType: application/json
|
||||
description: ''
|
||||
headers:
|
||||
- enabled: true
|
||||
name: x-api-key
|
||||
value: ${[ ApiKey ]}
|
||||
id: null
|
||||
- enabled: false
|
||||
name: ''
|
||||
value: ''
|
||||
id: null
|
||||
- enabled: true
|
||||
name: ''
|
||||
value: ''
|
||||
id: null
|
||||
- enabled: true
|
||||
name: Content-Type
|
||||
value: application/json
|
||||
id: FWIGbxmTAH
|
||||
method: GET
|
||||
name: Order - Download ZIP bundle
|
||||
sortPriority: 3500.0
|
||||
url: http://localhost:8080/api/files/order/bundle
|
||||
urlParameters: []
|
56
.yaak/yaak.rq_zzXmXLHsVP.yaml
Normal file
56
.yaak/yaak.rq_zzXmXLHsVP.yaml
Normal file
@ -0,0 +1,56 @@
|
||||
type: http_request
|
||||
model: http_request
|
||||
id: rq_zzXmXLHsVP
|
||||
createdAt: 2025-04-29T12:23:07.340483
|
||||
updatedAt: 2025-05-03T05:55:43.915850
|
||||
workspaceId: wk_SlydsyH2WI
|
||||
folderId: fl_Ffdms6jtTs
|
||||
authentication:
|
||||
token: ${[ response.body.raw(request='rq_Eb5hJAiaKG') ]}
|
||||
authenticationType: bearer
|
||||
body:
|
||||
form:
|
||||
- enabled: true
|
||||
id: u6yikyzRTH
|
||||
name: file
|
||||
value: ''
|
||||
- enabled: true
|
||||
id: sQqxfBX5aF
|
||||
name: ''
|
||||
value: ''
|
||||
bodyType: multipart/form-data
|
||||
description: ''
|
||||
headers:
|
||||
- enabled: true
|
||||
name: x-api-key
|
||||
value: ${[ ApiKey ]}
|
||||
id: null
|
||||
- enabled: false
|
||||
name: ''
|
||||
value: ''
|
||||
id: null
|
||||
- enabled: true
|
||||
name: ''
|
||||
value: ''
|
||||
id: null
|
||||
- enabled: true
|
||||
name: Content-Type
|
||||
value: multipart/form-data
|
||||
id: rf64n05TdT
|
||||
method: POST
|
||||
name: Order - Upload File
|
||||
sortPriority: 3000.0
|
||||
url: http://localhost:8080/api/files/order
|
||||
urlParameters:
|
||||
- enabled: true
|
||||
name: order_id
|
||||
value: '1'
|
||||
id: bNhFCvXvE2
|
||||
- enabled: true
|
||||
name: position_id
|
||||
value: '1'
|
||||
id: 8TgIHPNtwV
|
||||
- enabled: true
|
||||
name: ''
|
||||
value: ''
|
||||
id: SBj7PWgvYs
|
363
Cargo.lock
generated
363
Cargo.lock
generated
@ -17,6 +17,17 @@ version = "2.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "512761e0bb2578dd7380c6baaa0f4ce03e84f95e960231d1dec8bf4d7d6e2627"
|
||||
|
||||
[[package]]
|
||||
name = "aes"
|
||||
version = "0.8.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b169f7a6d4742236a0a00c541b845991d0ac43e546831af1249753ab4c3aa3a0"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"cipher",
|
||||
"cpufeatures",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "aho-corasick"
|
||||
version = "1.1.3"
|
||||
@ -157,9 +168,9 @@ checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26"
|
||||
|
||||
[[package]]
|
||||
name = "axum"
|
||||
version = "0.8.3"
|
||||
version = "0.8.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "de45108900e1f9b9242f7f2e254aa3e2c029c921c258fe9e6b4217eeebd54288"
|
||||
checksum = "021e862c184ae977658b36c4500f7feac3221ca5da43e3f25bd04ab6c79a29b5"
|
||||
dependencies = [
|
||||
"axum-core",
|
||||
"axum-macros",
|
||||
@ -203,11 +214,12 @@ dependencies = [
|
||||
"config",
|
||||
"dotenv",
|
||||
"error-stack",
|
||||
"futures",
|
||||
"hmac",
|
||||
"ldap3",
|
||||
"minisign",
|
||||
"once_cell",
|
||||
"rand 0.9.0",
|
||||
"rand 0.9.1",
|
||||
"rust-argon2",
|
||||
"serde",
|
||||
"serde_json",
|
||||
@ -215,6 +227,7 @@ dependencies = [
|
||||
"sqlx",
|
||||
"strum",
|
||||
"tokio",
|
||||
"tokio-stream",
|
||||
"tokio-util",
|
||||
"tracing",
|
||||
"tracing-appender",
|
||||
@ -227,7 +240,9 @@ dependencies = [
|
||||
"utoipa-scalar",
|
||||
"utoipa-swagger-ui",
|
||||
"uuid",
|
||||
"validator",
|
||||
"windows-service",
|
||||
"zip",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -358,12 +373,33 @@ version = "1.9.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "325918d6fe32f23b19878fe4b34794ae41fc19ddbe53b10571a4874d44ffd39b"
|
||||
|
||||
[[package]]
|
||||
name = "bzip2"
|
||||
version = "0.5.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "49ecfb22d906f800d4fe833b6282cf4dc1c298f5057ca0b5445e5c209735ca47"
|
||||
dependencies = [
|
||||
"bzip2-sys",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "bzip2-sys"
|
||||
version = "0.1.13+1.0.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "225bff33b2141874fe80d71e07d6eec4f85c5c216453dd96388240f96e1acc14"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"pkg-config",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cc"
|
||||
version = "1.2.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a012a0df96dd6d06ba9a1b29d6402d1a5d77c6befd2566afdc26e10603dc93d7"
|
||||
dependencies = [
|
||||
"jobserver",
|
||||
"libc",
|
||||
"shlex",
|
||||
]
|
||||
|
||||
@ -375,9 +411,9 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
|
||||
|
||||
[[package]]
|
||||
name = "chrono"
|
||||
version = "0.4.40"
|
||||
version = "0.4.41"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1a7964611d71df112cb1730f2ee67324fcf4d0fc6606acbbe9bfe06df124637c"
|
||||
checksum = "c469d952047f47f91b68d1cba3f10d63c11d73e4636f24f08daf0278abf01c4d"
|
||||
dependencies = [
|
||||
"android-tzdata",
|
||||
"iana-time-zone",
|
||||
@ -400,9 +436,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "clap"
|
||||
version = "4.5.35"
|
||||
version = "4.5.37"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d8aa86934b44c19c50f87cc2790e19f54f7a67aedb64101c2e1a2e5ecfb73944"
|
||||
checksum = "eccb054f56cbd38340b380d4a8e69ef1f02f1af43db2f0cc817a4774d80ae071"
|
||||
dependencies = [
|
||||
"clap_builder",
|
||||
"clap_derive",
|
||||
@ -410,9 +446,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "clap_builder"
|
||||
version = "4.5.35"
|
||||
version = "4.5.37"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2414dbb2dd0695280da6ea9261e327479e9d37b0630f6b53ba2a11c60c679fd9"
|
||||
checksum = "efd9466fac8543255d3b1fcad4762c5e116ffe808c8a3043d4263cd4fd4862a2"
|
||||
dependencies = [
|
||||
"anstream",
|
||||
"anstyle",
|
||||
@ -617,6 +653,47 @@ version = "1.1.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b916ba8ce9e4182696896f015e8a5ae6081b305f74690baa8465e35f5a142ea4"
|
||||
|
||||
[[package]]
|
||||
name = "darling"
|
||||
version = "0.20.11"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fc7f46116c46ff9ab3eb1597a45688b6715c6e628b5c133e288e709a29bcb4ee"
|
||||
dependencies = [
|
||||
"darling_core",
|
||||
"darling_macro",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "darling_core"
|
||||
version = "0.20.11"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0d00b9596d185e565c2207a0b01f8bd1a135483d02d9b7b0a54b11da8d53412e"
|
||||
dependencies = [
|
||||
"fnv",
|
||||
"ident_case",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"strsim",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "darling_macro"
|
||||
version = "0.20.11"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fc34b93ccb385b40dc71c6fceac4b2ad23662c7eeb248cf10d529b7e055b6ead"
|
||||
dependencies = [
|
||||
"darling_core",
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "deflate64"
|
||||
version = "0.1.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "da692b8d1080ea3045efaab14434d40468c3d8657e42abddfffca87b428f4c1b"
|
||||
|
||||
[[package]]
|
||||
name = "der"
|
||||
version = "0.7.9"
|
||||
@ -951,9 +1028,11 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "73fea8450eea4bac3940448fb7ae50d91f034f941199fcd9d909a5a07aa455f0"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"js-sys",
|
||||
"libc",
|
||||
"r-efi",
|
||||
"wasi 0.14.2+wasi-0.2.4",
|
||||
"wasm-bindgen",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -1249,6 +1328,12 @@ dependencies = [
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ident_case"
|
||||
version = "1.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39"
|
||||
|
||||
[[package]]
|
||||
name = "idna"
|
||||
version = "1.0.3"
|
||||
@ -1303,10 +1388,19 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d75a2a4b1b190afb6f5425f10f6a8f959d2ea0b9c2b1d79553551850539e4674"
|
||||
|
||||
[[package]]
|
||||
name = "js-sys"
|
||||
version = "0.3.76"
|
||||
name = "jobserver"
|
||||
version = "0.1.32"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6717b6b5b077764fb5966237269cb3c64edddde4b14ce42647430a78ced9e7b7"
|
||||
checksum = "48d1dbcbbeb6a7fec7e059840aa538bd62aaccf972c7346c4d9d2059312853d0"
|
||||
dependencies = [
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "js-sys"
|
||||
version = "0.3.77"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1cfaf33c695fc6e08064efbc1f72ec937429614f25eef83af942d0e227c3a28f"
|
||||
dependencies = [
|
||||
"once_cell",
|
||||
"wasm-bindgen",
|
||||
@ -1437,6 +1531,27 @@ version = "0.4.22"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24"
|
||||
|
||||
[[package]]
|
||||
name = "lzma-rs"
|
||||
version = "0.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "297e814c836ae64db86b36cf2a557ba54368d03f6afcd7d947c266692f71115e"
|
||||
dependencies = [
|
||||
"byteorder",
|
||||
"crc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "lzma-sys"
|
||||
version = "0.1.20"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5fda04ab3764e6cde78b9974eec4f779acaba7c4e84b36eca3cf77c581b85d27"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"libc",
|
||||
"pkg-config",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "matchers"
|
||||
version = "0.1.0"
|
||||
@ -1895,7 +2010,29 @@ version = "0.2.20"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "77957b295656769bb8ad2b6a6b09d897d94f05c41b069aede1fcdaa675eaea04"
|
||||
dependencies = [
|
||||
"zerocopy 0.7.35",
|
||||
"zerocopy",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "proc-macro-error-attr2"
|
||||
version = "2.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "96de42df36bb9bba5542fe9f1a054b8cc87e172759a1868aa05c1f3acc89dfc5"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "proc-macro-error2"
|
||||
version = "2.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "11ec05c52be0a07b08061f7dd003e7d7092e0472bc731b4af7bb1ef876109802"
|
||||
dependencies = [
|
||||
"proc-macro-error-attr2",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -1935,13 +2072,12 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "rand"
|
||||
version = "0.9.0"
|
||||
version = "0.9.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3779b94aeb87e8bd4e834cee3650289ee9e0d5677f976ecdb6d219e5f4f6cd94"
|
||||
checksum = "9fbfd9d094a40bf3ae768db9361049ace4c0e04a4fd6b359518bd7b73a73dd97"
|
||||
dependencies = [
|
||||
"rand_chacha 0.9.0",
|
||||
"rand_core 0.9.3",
|
||||
"zerocopy 0.8.24",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -2348,9 +2484,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "sha2"
|
||||
version = "0.10.8"
|
||||
version = "0.10.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8"
|
||||
checksum = "a7507d819769d01a365ab707794a4084392c824f54a7a6a7862f8c3d0892b283"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"cpufeatures",
|
||||
@ -2458,9 +2594,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "sqlx"
|
||||
version = "0.8.3"
|
||||
version = "0.8.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4410e73b3c0d8442c5f99b425d7a435b5ee0ae4167b3196771dd3f7a01be745f"
|
||||
checksum = "f3c3a85280daca669cfd3bcb68a337882a8bc57ec882f72c5d13a430613a738e"
|
||||
dependencies = [
|
||||
"sqlx-core",
|
||||
"sqlx-macros",
|
||||
@ -2471,10 +2607,11 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "sqlx-core"
|
||||
version = "0.8.3"
|
||||
version = "0.8.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6a007b6936676aa9ab40207cde35daab0a04b823be8ae004368c0793b96a61e0"
|
||||
checksum = "f743f2a3cea30a58cd479013f75550e879009e3a02f616f18ca699335aa248c3"
|
||||
dependencies = [
|
||||
"base64 0.22.1",
|
||||
"bytes",
|
||||
"chrono",
|
||||
"crc",
|
||||
@ -2505,9 +2642,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "sqlx-macros"
|
||||
version = "0.8.3"
|
||||
version = "0.8.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3112e2ad78643fef903618d78cf0aec1cb3134b019730edb039b69eaf531f310"
|
||||
checksum = "7f4200e0fde19834956d4252347c12a083bdcb237d7a1a1446bffd8768417dce"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@ -2518,9 +2655,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "sqlx-macros-core"
|
||||
version = "0.8.3"
|
||||
version = "0.8.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4e9f90acc5ab146a99bf5061a7eb4976b573f560bc898ef3bf8435448dd5e7ad"
|
||||
checksum = "882ceaa29cade31beca7129b6beeb05737f44f82dbe2a9806ecea5a7093d00b7"
|
||||
dependencies = [
|
||||
"dotenvy",
|
||||
"either",
|
||||
@ -2544,9 +2681,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "sqlx-mysql"
|
||||
version = "0.8.3"
|
||||
version = "0.8.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4560278f0e00ce64938540546f59f590d60beee33fffbd3b9cd47851e5fff233"
|
||||
checksum = "0afdd3aa7a629683c2d750c2df343025545087081ab5942593a5288855b1b7a7"
|
||||
dependencies = [
|
||||
"atoi",
|
||||
"base64 0.22.1",
|
||||
@ -2587,9 +2724,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "sqlx-postgres"
|
||||
version = "0.8.3"
|
||||
version = "0.8.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c5b98a57f363ed6764d5b3a12bfedf62f07aa16e1856a7ddc2a0bb190a959613"
|
||||
checksum = "a0bedbe1bbb5e2615ef347a5e9d8cd7680fb63e77d9dafc0f29be15e53f1ebe6"
|
||||
dependencies = [
|
||||
"atoi",
|
||||
"base64 0.22.1",
|
||||
@ -2625,9 +2762,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "sqlx-sqlite"
|
||||
version = "0.8.3"
|
||||
version = "0.8.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f85ca71d3a5b24e64e1d08dd8fe36c6c95c339a896cc33068148906784620540"
|
||||
checksum = "c26083e9a520e8eb87a06b12347679b142dc2ea29e6e409f805644a7a979a5bc"
|
||||
dependencies = [
|
||||
"atoi",
|
||||
"chrono",
|
||||
@ -2643,6 +2780,7 @@ dependencies = [
|
||||
"serde",
|
||||
"serde_urlencoded",
|
||||
"sqlx-core",
|
||||
"thiserror 2.0.9",
|
||||
"tracing",
|
||||
"url",
|
||||
]
|
||||
@ -2868,9 +3006,9 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
|
||||
|
||||
[[package]]
|
||||
name = "tokio"
|
||||
version = "1.44.1"
|
||||
version = "1.44.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f382da615b842244d4b8738c82ed1275e6c5dd90c459a30941cd07080b06c91a"
|
||||
checksum = "e6b88822cbe49de4185e3a4cbf8321dd487cf5fe0c5c65695fef6346371e9c48"
|
||||
dependencies = [
|
||||
"backtrace",
|
||||
"bytes",
|
||||
@ -2914,19 +3052,20 @@ dependencies = [
|
||||
"futures-core",
|
||||
"pin-project-lite",
|
||||
"tokio",
|
||||
"tokio-util",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tokio-util"
|
||||
version = "0.7.14"
|
||||
version = "0.7.15"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6b9590b93e6fcc1739458317cccd391ad3955e2bde8913edf6f95f9e65a8f034"
|
||||
checksum = "66a539a9ad6d5d281510d5bd368c973d636c02dbf8a67300bfb6b950696ad7df"
|
||||
dependencies = [
|
||||
"bytes",
|
||||
"futures-core",
|
||||
"futures-sink",
|
||||
"futures-util",
|
||||
"hashbrown 0.14.5",
|
||||
"hashbrown 0.15.2",
|
||||
"pin-project-lite",
|
||||
"tokio",
|
||||
]
|
||||
@ -3256,9 +3395,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "utoipa-swagger-ui"
|
||||
version = "9.0.0"
|
||||
version = "9.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "161166ec520c50144922a625d8bc4925cc801b2dda958ab69878527c0e5c5d61"
|
||||
checksum = "d29519b3c485df6b13f4478ac909a491387e9ef70204487c3b64b53749aec0be"
|
||||
dependencies = [
|
||||
"axum",
|
||||
"base64 0.22.1",
|
||||
@ -3281,6 +3420,36 @@ dependencies = [
|
||||
"getrandom 0.3.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "validator"
|
||||
version = "0.20.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "43fb22e1a008ece370ce08a3e9e4447a910e92621bb49b85d6e48a45397e7cfa"
|
||||
dependencies = [
|
||||
"idna",
|
||||
"once_cell",
|
||||
"regex",
|
||||
"serde",
|
||||
"serde_derive",
|
||||
"serde_json",
|
||||
"url",
|
||||
"validator_derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "validator_derive"
|
||||
version = "0.20.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b7df16e474ef958526d1205f6dda359fdfab79d9aa6d54bafcb92dcd07673dca"
|
||||
dependencies = [
|
||||
"darling",
|
||||
"once_cell",
|
||||
"proc-macro-error2",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "valuable"
|
||||
version = "0.1.0"
|
||||
@ -3332,20 +3501,21 @@ checksum = "b8dad83b4f25e74f184f64c43b150b91efe7647395b42289f38e50566d82855b"
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen"
|
||||
version = "0.2.99"
|
||||
version = "0.2.100"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a474f6281d1d70c17ae7aa6a613c87fce69a127e2624002df63dcb39d6cf6396"
|
||||
checksum = "1edc8929d7499fc4e8f0be2262a241556cfc54a0bea223790e71446f2aab1ef5"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"once_cell",
|
||||
"rustversion",
|
||||
"wasm-bindgen-macro",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen-backend"
|
||||
version = "0.2.99"
|
||||
version = "0.2.100"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5f89bb38646b4f81674e8f5c3fb81b562be1fd936d84320f3264486418519c79"
|
||||
checksum = "2f0a0651a5c2bc21487bde11ee802ccaf4c51935d0d3d42a6101f98161700bc6"
|
||||
dependencies = [
|
||||
"bumpalo",
|
||||
"log",
|
||||
@ -3357,9 +3527,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen-macro"
|
||||
version = "0.2.99"
|
||||
version = "0.2.100"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2cc6181fd9a7492eef6fef1f33961e3695e4579b9872a6f7c83aee556666d4fe"
|
||||
checksum = "7fe63fc6d09ed3792bd0897b314f53de8e16568c2b3f7982f468c0bf9bd0b407"
|
||||
dependencies = [
|
||||
"quote",
|
||||
"wasm-bindgen-macro-support",
|
||||
@ -3367,9 +3537,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen-macro-support"
|
||||
version = "0.2.99"
|
||||
version = "0.2.100"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "30d7a95b763d3c45903ed6c81f156801839e5ee968bb07e534c44df0fcd330c2"
|
||||
checksum = "8ae87ea40c9f689fc23f209965b6fb8a99ad69aeeb0231408be24920604395de"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@ -3380,9 +3550,12 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen-shared"
|
||||
version = "0.2.99"
|
||||
version = "0.2.100"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "943aab3fdaaa029a6e0271b35ea10b72b943135afe9bffca82384098ad0e06a6"
|
||||
checksum = "1a05d73b933a847d6cccdda8f838a22ff101ad9bf93e33684f39c1f5f0eece3d"
|
||||
dependencies = [
|
||||
"unicode-ident",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "whoami"
|
||||
@ -3644,6 +3817,15 @@ version = "0.5.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1e9df38ee2d2c3c5948ea468a8406ff0db0b29ae1ffde1bcf20ef305bcc95c51"
|
||||
|
||||
[[package]]
|
||||
name = "xz2"
|
||||
version = "0.1.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "388c44dc09d76f1536602ead6d325eb532f5c122f17782bd57fb47baeeb767e2"
|
||||
dependencies = [
|
||||
"lzma-sys",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "yaml-rust2"
|
||||
version = "0.10.0"
|
||||
@ -3686,16 +3868,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1b9b4fd18abc82b8136838da5d50bae7bdea537c574d8dc1a34ed098d6c166f0"
|
||||
dependencies = [
|
||||
"byteorder",
|
||||
"zerocopy-derive 0.7.35",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zerocopy"
|
||||
version = "0.8.24"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2586fea28e186957ef732a5f8b3be2da217d65c5969d4b1e17f973ebbe876879"
|
||||
dependencies = [
|
||||
"zerocopy-derive 0.8.24",
|
||||
"zerocopy-derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -3709,17 +3882,6 @@ dependencies = [
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zerocopy-derive"
|
||||
version = "0.8.24"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a996a8f63c5c4448cd959ac1bab0aaa3306ccfd060472f85943ee0750f0169be"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zerofrom"
|
||||
version = "0.1.5"
|
||||
@ -3746,6 +3908,20 @@ name = "zeroize"
|
||||
version = "1.8.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ced3678a2879b30306d323f4542626697a464a97c0a07c9aebf7ebca65cd4dde"
|
||||
dependencies = [
|
||||
"zeroize_derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zeroize_derive"
|
||||
version = "1.4.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zerovec"
|
||||
@ -3771,19 +3947,30 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "zip"
|
||||
version = "2.2.2"
|
||||
version = "2.6.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ae9c1ea7b3a5e1f4b922ff856a129881167511563dc219869afe3787fc0c1a45"
|
||||
checksum = "1dcb24d0152526ae49b9b96c1dcf71850ca1e0b882e4e28ed898a93c41334744"
|
||||
dependencies = [
|
||||
"aes",
|
||||
"arbitrary",
|
||||
"bzip2",
|
||||
"constant_time_eq",
|
||||
"crc32fast",
|
||||
"crossbeam-utils",
|
||||
"displaydoc",
|
||||
"deflate64",
|
||||
"flate2",
|
||||
"getrandom 0.3.2",
|
||||
"hmac",
|
||||
"indexmap",
|
||||
"lzma-rs",
|
||||
"memchr",
|
||||
"thiserror 2.0.9",
|
||||
"pbkdf2",
|
||||
"sha1",
|
||||
"time",
|
||||
"xz2",
|
||||
"zeroize",
|
||||
"zopfli",
|
||||
"zstd",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -3799,3 +3986,31 @@ dependencies = [
|
||||
"once_cell",
|
||||
"simd-adler32",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zstd"
|
||||
version = "0.13.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e91ee311a569c327171651566e07972200e76fcfe2242a4fa446149a3881c08a"
|
||||
dependencies = [
|
||||
"zstd-safe",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zstd-safe"
|
||||
version = "7.2.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8f49c4d5f0abb602a93fb8736af2a4f4dd9512e36f7f570d66e65ff867ed3b9d"
|
||||
dependencies = [
|
||||
"zstd-sys",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zstd-sys"
|
||||
version = "2.0.15+zstd.1.5.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "eb81183ddd97d0c74cedf1d50d85c8d08c1b8b68ee863bdee9e706eedba1a237"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"pkg-config",
|
||||
]
|
||||
|
22
Cargo.toml
22
Cargo.toml
@ -18,31 +18,32 @@ error-stack = "0.5.0"
|
||||
# CLI
|
||||
# ========================================
|
||||
dotenv = "0.15"
|
||||
clap = { version = "4.5.35", features = ["derive"] }
|
||||
clap = { version = "4.5.37", features = ["derive"] }
|
||||
config = "0.15.11"
|
||||
colored = "3.0.0"
|
||||
|
||||
# User Authentication
|
||||
# ========================================
|
||||
uuid = { version = "1.16.0", features = ["v4"] }
|
||||
sha2 = "0.10.8"
|
||||
sha2 = "0.10.9"
|
||||
hmac = "0.12.1"
|
||||
minisign = "0.7.9"
|
||||
# axum-jwt-login = { path = "../axum-login-jwt" }
|
||||
axum-jwt-login = { version = "0.1.0", registry = "kellnr" }
|
||||
rust-argon2 = "2.1.0"
|
||||
rand = "0.9.0"
|
||||
rand = "0.9.1"
|
||||
ldap3 = "0.11.5"
|
||||
|
||||
# Service
|
||||
# ========================================
|
||||
windows-service = "0.8.0"
|
||||
|
||||
axum = { version = "0.8.3", features = ["macros", "multipart"] }
|
||||
axum = { version = "0.8.4", features = ["macros", "multipart"] }
|
||||
validator = { version = "0.20.0", features = ["derive"] }
|
||||
strum = { version = "0.27", features = ["derive"] }
|
||||
utoipa = { version = "5.3.1", features = ["axum_extras"] }
|
||||
utoipa-axum = "0.2.0"
|
||||
utoipa-swagger-ui = { version = "9.0.0", features = ["axum"] }
|
||||
utoipa-swagger-ui = { version = "9.0.1", features = ["axum"] }
|
||||
utoipa-redoc = { version = "6.0.0", features = ["axum"] }
|
||||
utoipa-scalar = { version = "0.3.0", features = ["axum"] }
|
||||
ts-rs = { version = "10.1.0", features = ["chrono-impl"] }
|
||||
@ -51,8 +52,11 @@ ts-rs = { version = "10.1.0", features = ["chrono-impl"] }
|
||||
# ========================================
|
||||
serde = { version = "1.0.219", features = ["derive"] }
|
||||
serde_json = "1.0.140"
|
||||
tokio = { version = "1.44.1", features = ["full"] }
|
||||
tokio-util = { version = "0.7.14", features = ["rt"] }
|
||||
tokio = { version = "1.44.2", features = ["full"] }
|
||||
tokio-util = { version = "0.7.15", features = ["rt"] }
|
||||
tokio-stream = { version = "0.1.17", features = ["sync"] }
|
||||
futures = "0.3"
|
||||
once_cell = "1.21.3"
|
||||
sqlx = { version = "0.8.3", features = ["runtime-tokio", "postgres", "chrono"] }
|
||||
chrono = { version = "0.4.40", features = ["serde"] }
|
||||
sqlx = { version = "0.8.5", features = ["runtime-tokio", "postgres", "chrono"] }
|
||||
chrono = { version = "0.4.41", features = ["serde"] }
|
||||
zip = "2.6.1"
|
||||
|
@ -178,7 +178,7 @@ mod test {
|
||||
|
||||
println!("{res:?}");
|
||||
|
||||
let (res, re) = ldap
|
||||
let (res, _re) = ldap
|
||||
.search(
|
||||
// "CN=Abel Austin,OU=Accounting,OU=Mylab Users,DC=mylab,DC=local",
|
||||
"OU=DevUsers,DC=example,DC=org",
|
||||
|
@ -3,13 +3,9 @@ use ldap::LDAPBackend;
|
||||
use private_key_cache::PrivateKeyCache;
|
||||
use sqlx::PgPool;
|
||||
|
||||
use crate::{
|
||||
api::routes::users::{models::UserStatus, sql::get_users},
|
||||
config::Configuration,
|
||||
errors::ApiError,
|
||||
};
|
||||
use crate::{api::routes::users::sql::get_users, config::Configuration, errors::ApiError};
|
||||
|
||||
use super::routes::{auth::models::Credentials, users::models::User};
|
||||
use super::routes::{auth::models::Credentials, models::Status, users::models::User};
|
||||
|
||||
pub mod ldap;
|
||||
pub mod private_key_cache;
|
||||
@ -52,7 +48,7 @@ impl AuthBackend<User> for ApiBackend {
|
||||
Credentials { id, password }: Self::Credentials,
|
||||
) -> Result<Option<User>, Self::Error> {
|
||||
// get user from Database
|
||||
let user = get_users(&self.pool, Some(UserStatus::Active), Some(id)).await?;
|
||||
let user = get_users(&self.pool, Some(Status::Active), Some(id)).await?;
|
||||
let user = user.first().ok_or(ApiError::InvalidCredentials)?;
|
||||
|
||||
// authenticate user
|
||||
|
@ -5,10 +5,14 @@ use utoipa::{
|
||||
|
||||
pub const AUTH_TAG: &str = "Authentication";
|
||||
pub const USERS_TAG: &str = "Users";
|
||||
pub const ORDER_TAG: &str = "order";
|
||||
pub const ORDER_TAG: &str = "Orders";
|
||||
pub const API_KEY_TAG: &str = "API Keys";
|
||||
pub const SIGNATURE_TAG: &str = "Signature";
|
||||
pub const FILE_TAG: &str = "Files";
|
||||
pub const DEPARTMENTS_TAG: &str = "Departments";
|
||||
pub const UNITS_TAG: &str = "Units";
|
||||
pub const VENDORS_TAG: &str = "Vendors";
|
||||
pub const DATA_HUB_TAG: &str = "Data Hub";
|
||||
|
||||
#[derive(OpenApi)]
|
||||
#[openapi(
|
||||
@ -16,7 +20,14 @@ pub const FILE_TAG: &str = "Files";
|
||||
tags(
|
||||
(name = AUTH_TAG, description = "API Authentication endpoints"),
|
||||
(name = FILE_TAG, description = "Upload and Download Files"),
|
||||
(name = ORDER_TAG, description = "Order API endpoints")
|
||||
(name = ORDER_TAG, description = "Order API endpoints"),
|
||||
(name = API_KEY_TAG, description = "API Key endpoints"),
|
||||
(name = SIGNATURE_TAG, description = "API endpoints for signing"),
|
||||
(name = FILE_TAG, description = "File attachment API endpoints"),
|
||||
(name = DEPARTMENTS_TAG, description = "Departments API endpoints"),
|
||||
(name = UNITS_TAG, description = "Units API endpoints"),
|
||||
(name = VENDORS_TAG, description = "Vendors API endpoints"),
|
||||
(name = DATA_HUB_TAG, description = "Data Hub API endpoints")
|
||||
),
|
||||
)]
|
||||
pub struct ApiDocumentation;
|
||||
|
@ -66,99 +66,3 @@ async fn shutdown_signal(stop_signal: CancellationToken) {
|
||||
|
||||
info!("Shutting down {APP_NAME}...");
|
||||
}
|
||||
|
||||
// // Set Report Colour Mode to NONE
|
||||
// Report::set_color_mode(error_stack::fmt::ColorMode::None);
|
||||
|
||||
// // Enable the `INFO` level for anything in `darl` as default
|
||||
// let level_filter =
|
||||
// filter::Targets::new().with_target(env!("CARGO_PKG_NAME"), DEFAULT_LOG_LEVEL_FILTER);
|
||||
// let (level_filter, tracing_target_reload_handle) = reload::Layer::new(level_filter);
|
||||
|
||||
// // Prepare logging to file
|
||||
// let file_appender = RollingFileAppenderBase::new(
|
||||
// ROOT_PATH.with_file_name(format!("{}.log", env!("CARGO_PKG_NAME"))),
|
||||
// RollingConditionBase::new().max_size(1024 * 1024 * 2),
|
||||
// 5,
|
||||
// )
|
||||
// .change_context(ServiceError::Starting)?;
|
||||
// let (non_blocking, _guard) = tracing_appender::non_blocking(file_appender);
|
||||
|
||||
// // Prepare live logging to config webserver
|
||||
// let (log_receiver, live_log_layer) = LiveLogLayer::new();
|
||||
|
||||
// // prepare initialization of logging
|
||||
// let log_layers = tracing_subscriber::registry()
|
||||
// .with(level_filter)
|
||||
// // .with(filter::LevelFilter::DEBUG)
|
||||
// .with(live_log_layer)
|
||||
// .with(
|
||||
// fmt::Layer::default()
|
||||
// .with_target(false)
|
||||
// .with_ansi(false)
|
||||
// .with_writer(non_blocking),
|
||||
// );
|
||||
|
||||
// // also log to console in debug mode
|
||||
// #[cfg(debug_assertions)]
|
||||
// let stdout_log = tracing_subscriber::fmt::layer().pretty();
|
||||
// #[cfg(debug_assertions)]
|
||||
// let log_layers = log_layers.with(stdout_log);
|
||||
|
||||
// // Initialize logging
|
||||
// log_layers.init();
|
||||
|
||||
// // Initialize local database
|
||||
// let local_database = LocalDatabase::init()
|
||||
// .await
|
||||
// .change_context(ServiceError::Starting)?;
|
||||
|
||||
// // Load configuration from config files
|
||||
// let (config, external_database) = Configuration::initialize(&local_database)
|
||||
// .await
|
||||
// .change_context(ServiceError::Starting)?;
|
||||
// let standalone_external_db = StandaloneExternalDatabase::from(&external_database);
|
||||
|
||||
// // change log level to configured value
|
||||
// if let Err(error) = tracing_target_reload_handle.modify(|filter| {
|
||||
// *filter = filter::Targets::new().with_target(env!("CARGO_PKG_NAME"), &config.log_level)
|
||||
// }) {
|
||||
// error!("{error}");
|
||||
// }
|
||||
|
||||
// // prepare and start connections
|
||||
// let connections = MachineConnections::init(&local_database, &standalone_external_db, &config)
|
||||
// .await
|
||||
// .change_context(ServiceError::Starting)?;
|
||||
|
||||
// // start config server
|
||||
// ConfigServer::start(
|
||||
// config.extended_config.webserver.config_server_port,
|
||||
// config.opc_configuration.clone(),
|
||||
// &local_database,
|
||||
// standalone_external_db,
|
||||
// connections,
|
||||
// tracing_target_reload_handle,
|
||||
// log_receiver,
|
||||
// )
|
||||
// .await;
|
||||
|
||||
// // start webserver
|
||||
// WebServer::start(&config, &local_database).await;
|
||||
|
||||
// // initialize Logging to external database
|
||||
// external_database.start_writer().await;
|
||||
|
||||
// info!("{APP_NAME} service is now running...");
|
||||
|
||||
// // block thread
|
||||
// loop {
|
||||
// // Poll shutdown event.
|
||||
// if (stop_signal.recv().await).is_some() {
|
||||
// // Break the loop either upon stop or channel disconnect
|
||||
// info!("Shutting down {APP_NAME} service");
|
||||
// break;
|
||||
// };
|
||||
// }
|
||||
|
||||
// Ok(())
|
||||
|
@ -11,7 +11,10 @@ use utoipa::ToSchema;
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::{
|
||||
api::routes::users::permissions::{Permission, PermissionContainer},
|
||||
api::routes::{
|
||||
models::Status,
|
||||
users::permissions::{Permission, PermissionContainer},
|
||||
},
|
||||
config::Configuration,
|
||||
errors::ApiError,
|
||||
utils::create_random,
|
||||
@ -37,6 +40,7 @@ pub struct ApiKey {
|
||||
#[serde(default)]
|
||||
#[schema(value_type = String, read_only)]
|
||||
pub last_change: Option<NaiveDateTime>,
|
||||
pub status: Status,
|
||||
}
|
||||
|
||||
impl ApiKey {
|
||||
@ -80,6 +84,7 @@ impl ApiKey {
|
||||
api_config_secret: Some(config.token_secret.clone()),
|
||||
creation_date: None,
|
||||
last_change: None,
|
||||
status: Status::Active,
|
||||
},
|
||||
))
|
||||
}
|
||||
|
@ -1,8 +1,7 @@
|
||||
use sqlx::{PgPool, Postgres, Transaction};
|
||||
|
||||
use crate::errors::ApiError;
|
||||
|
||||
use super::models::ApiKey;
|
||||
use crate::{api::routes::models::Status, errors::ApiError};
|
||||
|
||||
pub async fn get_api_keys(pool: &PgPool) -> Result<Vec<ApiKey>, ApiError> {
|
||||
Ok(sqlx::query_as!(
|
||||
@ -14,6 +13,7 @@ pub async fn get_api_keys(pool: &PgPool) -> Result<Vec<ApiKey>, ApiError> {
|
||||
APIKEYS."UserAuthRequired" as auth_required,
|
||||
APIKEYS."CreationDate" as "creation_date?",
|
||||
APIKEYS."LastChanged" as "last_change?",
|
||||
APIKEYS."Status" as "status: Status",
|
||||
NULL as api_config_secret,
|
||||
array_remove(ARRAY_AGG(APIKEY_PERMISSIONS."Permission"), NULL) AS permissions
|
||||
FROM
|
||||
|
53
src/api/routes/data_hub/handlers/data_hub_get.rs
Normal file
53
src/api/routes/data_hub/handlers/data_hub_get.rs
Normal file
@ -0,0 +1,53 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use axum::{
|
||||
debug_handler,
|
||||
response::{
|
||||
sse::{Event, KeepAlive},
|
||||
Sse,
|
||||
},
|
||||
Extension,
|
||||
};
|
||||
use futures::stream::Stream;
|
||||
|
||||
use tokio_stream::{wrappers::BroadcastStream, StreamExt as _};
|
||||
|
||||
use crate::{
|
||||
api::{
|
||||
description::DATA_HUB_TAG,
|
||||
routes::{
|
||||
data_hub::models::{DataHub, DataHubStream},
|
||||
AuthBackendType,
|
||||
},
|
||||
},
|
||||
errors::ApiError,
|
||||
};
|
||||
|
||||
#[debug_handler]
|
||||
#[utoipa::path(
|
||||
get,
|
||||
path = "/dataHub",
|
||||
summary = "Subscribe to real-time data updates",
|
||||
description = "This endpoint streams real-time data updates to the client using server-sent events (SSE).
|
||||
The client must establish a persistent HTTP connection to receive updates.",
|
||||
responses(
|
||||
(status = OK, body = DataHubStream, description = "Stream of real-time data updates",content_type = "text/event-stream"),
|
||||
),
|
||||
tag = DATA_HUB_TAG)]
|
||||
pub async fn sse_data_hub(
|
||||
_auth_session: AuthBackendType,
|
||||
Extension(data_hub): Extension<Arc<DataHub>>,
|
||||
) -> Sse<impl Stream<Item = Result<Event, ApiError>>> {
|
||||
println!("{data_hub:?}");
|
||||
// use BroadcastStream to convert Receiver into Stream
|
||||
// alternatively use async-stream (https://github.com/tokio-rs/axum/discussions/1060)
|
||||
let rx = data_hub.receiver.resubscribe();
|
||||
|
||||
// convert stream values into SSE events
|
||||
let stream = BroadcastStream::from(rx).map(|value| match value {
|
||||
Ok(value) => Ok(value.into()),
|
||||
Err(_) => Ok(Event::default()), // send event where no subscription is on,
|
||||
});
|
||||
|
||||
Sse::new(stream).keep_alive(KeepAlive::default())
|
||||
}
|
1
src/api/routes/data_hub/handlers/mod.rs
Normal file
1
src/api/routes/data_hub/handlers/mod.rs
Normal file
@ -0,0 +1 @@
|
||||
pub mod data_hub_get;
|
17
src/api/routes/data_hub/mod.rs
Normal file
17
src/api/routes/data_hub/mod.rs
Normal file
@ -0,0 +1,17 @@
|
||||
use utoipa_axum::{router::OpenApiRouter, routes};
|
||||
|
||||
use crate::login_required;
|
||||
|
||||
use handlers::data_hub_get::*;
|
||||
|
||||
mod handlers;
|
||||
pub mod models;
|
||||
|
||||
// expose the OpenAPI to parent module
|
||||
pub fn router() -> OpenApiRouter {
|
||||
let read = OpenApiRouter::new()
|
||||
.routes(routes!(sse_data_hub))
|
||||
.route_layer(login_required!());
|
||||
|
||||
OpenApiRouter::new().merge(read)
|
||||
}
|
67
src/api/routes/data_hub/models.rs
Normal file
67
src/api/routes/data_hub/models.rs
Normal file
@ -0,0 +1,67 @@
|
||||
use axum::response::sse;
|
||||
use serde::Serialize;
|
||||
use tokio::sync::broadcast::{self, Receiver, Sender};
|
||||
use ts_rs::TS;
|
||||
use utoipa::ToSchema;
|
||||
|
||||
use crate::api::routes::{units::models::Unit, vendors::models::Vendor};
|
||||
|
||||
#[derive(Debug, Clone, Serialize, TS, ToSchema)]
|
||||
#[ts(export)]
|
||||
pub struct DataHubStream {
|
||||
/// Unique identifier for the data update event
|
||||
id: String,
|
||||
/// Event Type
|
||||
event: EventType,
|
||||
/// Data content
|
||||
data: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, TS, ToSchema, strum::Display)]
|
||||
#[ts(export)]
|
||||
pub enum EventType {
|
||||
Units,
|
||||
Vendors,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct DataHub {
|
||||
pub sender: Sender<DataHubData>,
|
||||
pub receiver: Receiver<DataHubData>,
|
||||
}
|
||||
|
||||
impl DataHub {
|
||||
pub fn new() -> Self {
|
||||
let (sender, receiver) = broadcast::channel(15);
|
||||
|
||||
Self {
|
||||
sender,
|
||||
receiver: receiver,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize)]
|
||||
pub enum DataHubData {
|
||||
Units(Vec<Unit>),
|
||||
Vendors(Vec<Vendor>),
|
||||
}
|
||||
|
||||
impl From<DataHubData> for sse::Event {
|
||||
fn from(value: DataHubData) -> Self {
|
||||
let (event_type, data) = match value {
|
||||
DataHubData::Units(units) => (
|
||||
EventType::Units,
|
||||
serde_json::to_string(&units).unwrap_or_default(),
|
||||
),
|
||||
DataHubData::Vendors(vendors) => (
|
||||
EventType::Vendors,
|
||||
serde_json::to_string(&vendors).unwrap_or_default(),
|
||||
),
|
||||
};
|
||||
|
||||
sse::Event::default()
|
||||
.event(event_type.to_string())
|
||||
.data(data)
|
||||
}
|
||||
}
|
28
src/api/routes/departments/handlers/departments_get.rs
Normal file
28
src/api/routes/departments/handlers/departments_get.rs
Normal file
@ -0,0 +1,28 @@
|
||||
use axum::{debug_handler, Extension, Json};
|
||||
|
||||
use super::super::sql;
|
||||
use crate::{
|
||||
api::{
|
||||
backend::ApiBackend, description::DEPARTMENTS_TAG, routes::departments::models::Department,
|
||||
},
|
||||
errors::ApiError,
|
||||
};
|
||||
|
||||
#[debug_handler]
|
||||
#[utoipa::path(
|
||||
get,
|
||||
path = "/departments",
|
||||
summary = "Get all Departments",
|
||||
description = "Get a list of all Departments.",
|
||||
responses(
|
||||
(status = OK, body = Vec<Department>, description = "List of departments"),
|
||||
),
|
||||
security(
|
||||
("user_auth" = ["read:departments",]),
|
||||
),
|
||||
tag = DEPARTMENTS_TAG)]
|
||||
pub async fn get_departments(
|
||||
Extension(backend): Extension<ApiBackend>,
|
||||
) -> Result<Json<Vec<Department>>, ApiError> {
|
||||
Ok(Json(sql::get_departments(backend.pool(), None).await?))
|
||||
}
|
33
src/api/routes/departments/handlers/departments_post.rs
Normal file
33
src/api/routes/departments/handlers/departments_post.rs
Normal file
@ -0,0 +1,33 @@
|
||||
use axum::{debug_handler, Extension, Json};
|
||||
use validator::Validate;
|
||||
|
||||
use super::super::sql;
|
||||
use crate::{
|
||||
api::{
|
||||
backend::ApiBackend, description::DEPARTMENTS_TAG, routes::departments::models::Department,
|
||||
},
|
||||
errors::ApiError,
|
||||
};
|
||||
|
||||
#[debug_handler]
|
||||
#[utoipa::path(
|
||||
post,
|
||||
path = "/departments",
|
||||
summary = "Create new Department",
|
||||
description = "Create new Department.",
|
||||
responses(
|
||||
(status = OK, description = "Creation succeeded"),
|
||||
),
|
||||
security(
|
||||
("user_auth" = ["write:departments",]),
|
||||
),
|
||||
tag = DEPARTMENTS_TAG)]
|
||||
pub async fn create_department(
|
||||
Extension(backend): Extension<ApiBackend>,
|
||||
Json(department): Json<Department>,
|
||||
) -> Result<(), ApiError> {
|
||||
// validate request
|
||||
department.validate()?;
|
||||
// insert new department
|
||||
sql::create_department(backend.pool(), department).await
|
||||
}
|
33
src/api/routes/departments/handlers/departments_put.rs
Normal file
33
src/api/routes/departments/handlers/departments_put.rs
Normal file
@ -0,0 +1,33 @@
|
||||
use axum::{debug_handler, Extension, Json};
|
||||
use validator::Validate;
|
||||
|
||||
use super::super::sql;
|
||||
use crate::{
|
||||
api::{
|
||||
backend::ApiBackend, description::DEPARTMENTS_TAG, routes::departments::models::Department,
|
||||
},
|
||||
errors::ApiError,
|
||||
};
|
||||
|
||||
#[debug_handler]
|
||||
#[utoipa::path(
|
||||
put,
|
||||
path = "/departments",
|
||||
summary = "Update Department",
|
||||
description = "Change details of Departments.",
|
||||
responses(
|
||||
(status = OK, description = "Update succeeded"),
|
||||
),
|
||||
security(
|
||||
("user_auth" = ["write:departments",]),
|
||||
),
|
||||
tag = DEPARTMENTS_TAG)]
|
||||
pub async fn update_department(
|
||||
Extension(backend): Extension<ApiBackend>,
|
||||
Json(department): Json<Department>,
|
||||
) -> Result<(), ApiError> {
|
||||
// validate request
|
||||
department.validate()?;
|
||||
// insert new department
|
||||
sql::update_department(backend.pool(), department).await
|
||||
}
|
3
src/api/routes/departments/handlers/mod.rs
Normal file
3
src/api/routes/departments/handlers/mod.rs
Normal file
@ -0,0 +1,3 @@
|
||||
pub mod departments_get;
|
||||
pub mod departments_post;
|
||||
pub mod departments_put;
|
28
src/api/routes/departments/mod.rs
Normal file
28
src/api/routes/departments/mod.rs
Normal file
@ -0,0 +1,28 @@
|
||||
use utoipa_axum::{router::OpenApiRouter, routes};
|
||||
|
||||
use crate::{
|
||||
api::routes::users::permissions::{Permission, PermissionDetail},
|
||||
login_required, permission_required,
|
||||
};
|
||||
|
||||
use handlers::departments_get::*;
|
||||
use handlers::departments_post::*;
|
||||
use handlers::departments_put::*;
|
||||
|
||||
mod handlers;
|
||||
pub mod models;
|
||||
pub mod sql;
|
||||
|
||||
// expose the OpenAPI to parent module
|
||||
pub fn router() -> OpenApiRouter {
|
||||
let read = OpenApiRouter::new()
|
||||
.routes(routes!(get_departments))
|
||||
.route_layer(login_required!());
|
||||
let write = OpenApiRouter::new()
|
||||
.routes(routes!(create_department, update_department))
|
||||
.route_layer(permission_required!(Permission::Write(
|
||||
PermissionDetail::Departments
|
||||
)));
|
||||
|
||||
OpenApiRouter::new().merge(read).merge(write)
|
||||
}
|
59
src/api/routes/departments/models.rs
Normal file
59
src/api/routes/departments/models.rs
Normal file
@ -0,0 +1,59 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::types::JsonValue;
|
||||
use tracing::error;
|
||||
use ts_rs::TS;
|
||||
use utoipa::ToSchema;
|
||||
use validator::{Validate, ValidationError};
|
||||
|
||||
use crate::api::routes::{models::Status, users::models::ShortUser};
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, TS, ToSchema, Validate)]
|
||||
#[ts(export)]
|
||||
pub struct Department {
|
||||
pub id: i32,
|
||||
pub name: String,
|
||||
pub description: Option<String>,
|
||||
#[schema(inline)]
|
||||
#[validate(custom(function = "validate_manager", message = "Missing Manager"))]
|
||||
pub manager: Manager,
|
||||
#[schema(inline)]
|
||||
#[validate(custom(function = "validate_manager", message = "Missing Vice Manager"))]
|
||||
pub vice_manager: Manager,
|
||||
pub status: Status,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, TS, ToSchema)]
|
||||
pub struct Manager(pub Option<ShortUser>);
|
||||
|
||||
impl From<Option<JsonValue>> for Manager {
|
||||
fn from(value: Option<JsonValue>) -> Self {
|
||||
Self(
|
||||
value
|
||||
.map(|value| -> Option<ShortUser> {
|
||||
match serde_json::from_value::<ShortUser>(value) {
|
||||
Ok(manager) => Some(manager),
|
||||
Err(err) => {
|
||||
error!("{err:?}");
|
||||
None
|
||||
}
|
||||
}
|
||||
})
|
||||
.flatten(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
fn validate_manager(value: &Manager) -> Result<(), ValidationError> {
|
||||
match value.0 {
|
||||
Some(_) => Ok(()),
|
||||
None => Err(ValidationError::new("Missing Manager")),
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, TS, ToSchema)]
|
||||
#[ts(export)]
|
||||
pub struct ShortDepartment {
|
||||
pub id: i32,
|
||||
pub name: String,
|
||||
pub description: Option<String>,
|
||||
}
|
91
src/api/routes/departments/sql.rs
Normal file
91
src/api/routes/departments/sql.rs
Normal file
@ -0,0 +1,91 @@
|
||||
use sqlx::PgPool;
|
||||
|
||||
use crate::{api::routes::models::Status, errors::ApiError};
|
||||
|
||||
use super::models::Department;
|
||||
|
||||
pub async fn get_departments(
|
||||
pool: &PgPool,
|
||||
filter_active: Option<Status>,
|
||||
) -> Result<Vec<Department>, ApiError> {
|
||||
Ok(sqlx::query_as!(
|
||||
Department,
|
||||
r#"SELECT
|
||||
DEPARTMENTS."DepartmentID" as id,
|
||||
DEPARTMENTS."DepartmentName" as name,
|
||||
DEPARTMENTS."Description" as description,
|
||||
DEPARTMENTS."Status" as "status: Status",
|
||||
json_build_object(
|
||||
'user_id', MANAGER."UserID",
|
||||
'name', MANAGER."Name",
|
||||
'surname', MANAGER."Surname",
|
||||
'email', MANAGER."Email"
|
||||
) AS manager,
|
||||
json_build_object(
|
||||
'user_id', vManager."UserID",
|
||||
'name', vManager."Name",
|
||||
'surname', vManager."Surname",
|
||||
'email', vManager."Email"
|
||||
) AS vice_manager
|
||||
FROM
|
||||
departments
|
||||
LEFT JOIN
|
||||
USERS MANAGER ON DEPARTMENTS."ManagerID" = MANAGER."UserID"
|
||||
LEFT JOIN
|
||||
USERS vManager ON DEPARTMENTS."ViceManagerID" = vManager."UserID"
|
||||
WHERE
|
||||
($1::smallint IS NULL OR DEPARTMENTS."Status" = $1)"#,
|
||||
filter_active.map(|v| v as i16),
|
||||
)
|
||||
.fetch_all(pool)
|
||||
.await?)
|
||||
}
|
||||
|
||||
pub async fn create_department(pool: &PgPool, department: Department) -> Result<(), ApiError> {
|
||||
let mut transaction = pool.begin().await?;
|
||||
|
||||
sqlx::query!(
|
||||
r#"INSERT INTO departments
|
||||
("DepartmentName", "Description", "Status", "ManagerID", "ViceManagerID")
|
||||
VALUES ($1, $2, $3, $4, $5)"#,
|
||||
department.name,
|
||||
department.description,
|
||||
department.status as i16,
|
||||
department.manager.0.unwrap_or_default().user_id,
|
||||
department.vice_manager.0.unwrap_or_default().user_id
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
|
||||
// commit transaction
|
||||
transaction.commit().await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn update_department(pool: &PgPool, department: Department) -> Result<(), ApiError> {
|
||||
let mut transaction = pool.begin().await?;
|
||||
|
||||
sqlx::query!(
|
||||
r#"UPDATE departments SET
|
||||
"DepartmentName" = $2,
|
||||
"Description" = $3,
|
||||
"Status" = $4,
|
||||
"ManagerID" = $5,
|
||||
"ViceManagerID" = $6
|
||||
WHERE "DepartmentID" = $1"#,
|
||||
department.id,
|
||||
department.name,
|
||||
department.description,
|
||||
department.status as i16,
|
||||
department.manager.0.unwrap_or_default().user_id,
|
||||
department.vice_manager.0.unwrap_or_default().user_id,
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
|
||||
// commit transaction
|
||||
transaction.commit().await?;
|
||||
|
||||
Ok(())
|
||||
}
|
48
src/api/routes/files/handlers/files_draft_delete.rs
Normal file
48
src/api/routes/files/handlers/files_draft_delete.rs
Normal file
@ -0,0 +1,48 @@
|
||||
use axum::{debug_handler, Json};
|
||||
use chrono::NaiveDateTime;
|
||||
use serde::Deserialize;
|
||||
use ts_rs::TS;
|
||||
use utoipa::ToSchema;
|
||||
|
||||
use crate::{
|
||||
api::{
|
||||
description::FILE_TAG,
|
||||
routes::{files::sql, AuthBackendType},
|
||||
},
|
||||
errors::ApiError,
|
||||
};
|
||||
|
||||
#[derive(Debug, Deserialize, TS, ToSchema)]
|
||||
#[ts(export)]
|
||||
pub struct GetDeleteFileRequest {
|
||||
draft_id: String,
|
||||
hash: String,
|
||||
}
|
||||
|
||||
#[debug_handler]
|
||||
#[utoipa::path(
|
||||
delete,
|
||||
path = "/files/draft",
|
||||
summary = "Delete specified file attached to Draft",
|
||||
description = "Delete the specified draft file.",
|
||||
request_body(content = GetDeleteFileRequest, description = "Request for file deletion", content_type = "application/json"),
|
||||
responses(
|
||||
(status = OK, description = "File successfully deleted"),
|
||||
),
|
||||
tag = FILE_TAG)]
|
||||
pub async fn delete_draft_file(
|
||||
auth_session: AuthBackendType,
|
||||
Json(request): Json<GetDeleteFileRequest>,
|
||||
) -> Result<(), ApiError> {
|
||||
let backend = auth_session.backend();
|
||||
let user = auth_session
|
||||
.is_authenticated()
|
||||
.ok_or(ApiError::AccessDenied)?;
|
||||
|
||||
let draft_id =
|
||||
NaiveDateTime::parse_from_str(&request.draft_id, "%Y-%m-%d %H:%M:%S").map_err(|_| {
|
||||
ApiError::InvalidRequest("Could not parse NaiveDateTime from draft id".to_string())
|
||||
})?;
|
||||
|
||||
sql::delete_draft_file(backend.pool(), user, draft_id, request.hash).await
|
||||
}
|
@ -17,7 +17,7 @@ use crate::{
|
||||
|
||||
#[derive(Debug, Deserialize, TS, ToSchema)]
|
||||
#[ts(export)]
|
||||
pub struct GetAttachedFilesRequest {
|
||||
pub struct GetAttachedDraftFilesRequest {
|
||||
draft_id: String,
|
||||
}
|
||||
|
||||
@ -27,14 +27,14 @@ pub struct GetAttachedFilesRequest {
|
||||
path = "/files/draft",
|
||||
summary = "Get Files attached to Draft",
|
||||
description = "Get list of all files that are attached to a specified draft.",
|
||||
request_body(content = GetAttachedFilesRequest, description = "Request for attached files", content_type = "application/json"),
|
||||
request_body(content = GetAttachedDraftFilesRequest, description = "Request for attached files", content_type = "application/json"),
|
||||
responses(
|
||||
(status = OK, body = AttachedFile, description = "Attached Files List", content_type = "application/json"),
|
||||
),
|
||||
tag = FILE_TAG)]
|
||||
pub async fn get_attached_draft_files(
|
||||
auth_session: AuthBackendType,
|
||||
Json(request): Json<GetAttachedFilesRequest>,
|
||||
Json(request): Json<GetAttachedDraftFilesRequest>,
|
||||
) -> Result<Json<Vec<AttachedFile>>, ApiError> {
|
||||
let backend = auth_session.backend();
|
||||
let user = auth_session
|
||||
|
@ -1,15 +1,17 @@
|
||||
use axum::{debug_handler, extract::Multipart, http::StatusCode};
|
||||
use axum::{
|
||||
debug_handler,
|
||||
extract::{Multipart, Query},
|
||||
};
|
||||
use chrono::NaiveDateTime;
|
||||
use serde::Deserialize;
|
||||
use tokio_util::bytes::Bytes;
|
||||
use ts_rs::TS;
|
||||
use utoipa::ToSchema;
|
||||
use utoipa::{schema, IntoParams, ToSchema};
|
||||
|
||||
use crate::{
|
||||
api::{
|
||||
description::FILE_TAG,
|
||||
routes::{
|
||||
files::{models::File, sql},
|
||||
files::{handlers::parse_file_from_multipart_form, sql},
|
||||
AuthBackendType,
|
||||
},
|
||||
},
|
||||
@ -23,18 +25,22 @@ use super::super::FILE_SIZE_LIMIT_MB;
|
||||
#[allow(unused)]
|
||||
pub struct FileUploadRequest {
|
||||
name: Option<String>,
|
||||
#[schema(value_type = String)]
|
||||
draft_id: NaiveDateTime,
|
||||
#[schema(format = Binary, content_media_type = "application/octet-stream")]
|
||||
file: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize, IntoParams, ToSchema)]
|
||||
pub struct DraftFileUploadQueryParams {
|
||||
draft_id: String,
|
||||
}
|
||||
|
||||
#[debug_handler]
|
||||
#[utoipa::path(
|
||||
post,
|
||||
path = "/files/draft",
|
||||
summary = "Draft File Upload",
|
||||
description = "Upload a file as draft file.",
|
||||
params(DraftFileUploadQueryParams),
|
||||
request_body(content = FileUploadRequest, description = "File Data", content_type = "multipart/form-data"),
|
||||
responses(
|
||||
(status = OK, body = String, description = "Successfully uploaded and stored file"),
|
||||
@ -43,6 +49,7 @@ pub struct FileUploadRequest {
|
||||
tag = FILE_TAG)]
|
||||
pub async fn upload_draft_file(
|
||||
auth_session: AuthBackendType,
|
||||
Query(parameters): Query<DraftFileUploadQueryParams>,
|
||||
mut multipart: Multipart,
|
||||
) -> Result<(), ApiError> {
|
||||
let backend = auth_session.backend();
|
||||
@ -50,63 +57,14 @@ pub async fn upload_draft_file(
|
||||
.is_authenticated()
|
||||
.ok_or(ApiError::AccessDenied)?;
|
||||
|
||||
let mut name: Option<String> = None;
|
||||
let mut draft_id: Option<NaiveDateTime> = None;
|
||||
// get draft id
|
||||
let draft_id = NaiveDateTime::parse_from_str(¶meters.draft_id, "%Y-%m-%d %H:%M:%S")
|
||||
.map_err(|_| {
|
||||
ApiError::InvalidRequest("Could not parse NaiveDateTime from draft id".to_string())
|
||||
})?;
|
||||
|
||||
let mut content_type: Option<String> = None;
|
||||
let mut file_name: Option<String> = None;
|
||||
let mut bytes: Option<Bytes> = None;
|
||||
let mut size: Option<i32> = None;
|
||||
let file = parse_file_from_multipart_form(&mut multipart).await?;
|
||||
sql::insert_new_draft_file(backend.pool(), user, draft_id, &file).await?;
|
||||
|
||||
while let Some(field) = multipart.next_field().await.unwrap() {
|
||||
let field_name = field.name();
|
||||
|
||||
match &field_name {
|
||||
Some("name") => name = Some(field.text().await?),
|
||||
Some("draft_id") => {
|
||||
draft_id = Some(
|
||||
NaiveDateTime::parse_from_str(&field.text().await?, "%Y-%m-%d %H:%M:%S")
|
||||
.map_err(|_| {
|
||||
ApiError::InvalidRequest(
|
||||
"Could not parse NaiveDateTime from draft id".to_string(),
|
||||
)
|
||||
})?,
|
||||
)
|
||||
}
|
||||
Some("file") => {
|
||||
file_name = field.file_name().map(ToString::to_string);
|
||||
content_type = field.content_type().map(ToString::to_string);
|
||||
let _bytes = field.bytes().await?;
|
||||
size = Some(_bytes.len() as i32);
|
||||
bytes = Some(_bytes);
|
||||
}
|
||||
_ => (),
|
||||
};
|
||||
}
|
||||
|
||||
// store file in database
|
||||
if let (Some(data), Some(draft_id), Some(content_type), Some(filename)) =
|
||||
(bytes, draft_id, content_type, file_name)
|
||||
{
|
||||
sql::insert_new_draft_file(
|
||||
backend.pool(),
|
||||
user,
|
||||
File {
|
||||
name: match name {
|
||||
Some(name) => name,
|
||||
None => filename,
|
||||
},
|
||||
draft_id,
|
||||
content_type,
|
||||
data,
|
||||
size: size.unwrap_or_default(),
|
||||
},
|
||||
)
|
||||
.await
|
||||
} else {
|
||||
Err(ApiError::MultipartForm(
|
||||
StatusCode::BAD_REQUEST,
|
||||
"Missing fields in request".to_string(),
|
||||
))
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
@ -5,7 +5,6 @@ use axum::{
|
||||
response::{IntoResponse, Response},
|
||||
Json,
|
||||
};
|
||||
use chrono::NaiveDateTime;
|
||||
use serde::Deserialize;
|
||||
use ts_rs::TS;
|
||||
use utoipa::ToSchema;
|
||||
@ -21,36 +20,28 @@ use crate::{
|
||||
#[derive(Debug, Deserialize, TS, ToSchema)]
|
||||
#[ts(export)]
|
||||
pub struct GetFileRequest {
|
||||
draft_id: String,
|
||||
name: String,
|
||||
hash: String,
|
||||
}
|
||||
|
||||
#[debug_handler]
|
||||
#[utoipa::path(
|
||||
get,
|
||||
path = "/files/draft/file",
|
||||
summary = "Download specified file",
|
||||
path = "/files/file",
|
||||
summary = "File Download",
|
||||
description = "Download specified file.",
|
||||
request_body(content = GetFileRequest, description = "Request to download specified file", content_type = "application/json"),
|
||||
responses(
|
||||
(status = OK, description = "File Data", content_type = "application/octet-stream"),
|
||||
),
|
||||
tag = FILE_TAG)]
|
||||
pub async fn get_specified_draft_file(
|
||||
pub async fn get_specified_file(
|
||||
auth_session: AuthBackendType,
|
||||
Json(request): Json<GetFileRequest>,
|
||||
) -> Result<impl IntoResponse, ApiError> {
|
||||
let backend = auth_session.backend();
|
||||
let user = auth_session
|
||||
.is_authenticated()
|
||||
.ok_or(ApiError::AccessDenied)?;
|
||||
|
||||
let draft_id =
|
||||
NaiveDateTime::parse_from_str(&request.draft_id, "%Y-%m-%d %H:%M:%S").map_err(|_| {
|
||||
ApiError::InvalidRequest("Could not parse NaiveDateTime from draft id".to_string())
|
||||
})?;
|
||||
|
||||
match sql::get_specified_draft_file(backend.pool(), user, draft_id, request.hash).await? {
|
||||
match sql::get_specified_file(backend.pool(), request.name, request.hash).await? {
|
||||
Some(file) => Ok(Response::builder()
|
||||
.header(header::CONTENT_TYPE, file.content_type)
|
||||
.header(
|
66
src/api/routes/files/handlers/files_order_bundle_get.rs
Normal file
66
src/api/routes/files/handlers/files_order_bundle_get.rs
Normal file
@ -0,0 +1,66 @@
|
||||
use axum::{
|
||||
body::Body,
|
||||
debug_handler,
|
||||
http::{header, StatusCode},
|
||||
response::{IntoResponse, Response},
|
||||
Json,
|
||||
};
|
||||
|
||||
use crate::{
|
||||
api::{
|
||||
description::FILE_TAG,
|
||||
routes::{
|
||||
files::{
|
||||
handlers::{create_zip_bundle, files_order_get::GetAttachedOrderFilesRequest},
|
||||
sql,
|
||||
},
|
||||
AuthBackendType,
|
||||
},
|
||||
},
|
||||
errors::ApiError,
|
||||
};
|
||||
|
||||
#[debug_handler]
|
||||
#[utoipa::path(
|
||||
get,
|
||||
path = "/files/order/bundle",
|
||||
summary = "Get Files attached to Order Position as ZIP bundle",
|
||||
description = "Downloads a ZIP bundle of alle the files attached to the order.",
|
||||
request_body(content = GetAttachedOrderFilesRequest, description = "Request for attached files ZIP bundle", content_type = "application/json"),
|
||||
responses(
|
||||
(status = OK, description = "ZIP bundle Data", content_type = "application/octet-stream"),
|
||||
),
|
||||
tag = FILE_TAG)]
|
||||
pub async fn get_attached_order_files_zip_bundle(
|
||||
auth_session: AuthBackendType,
|
||||
Json(request): Json<GetAttachedOrderFilesRequest>,
|
||||
) -> Result<impl IntoResponse, ApiError> {
|
||||
let backend = auth_session.backend();
|
||||
|
||||
// get all attached files with data
|
||||
let files =
|
||||
sql::get_order_attached_files_data(backend.pool(), request.order_id, request.position_id)
|
||||
.await?;
|
||||
|
||||
if files.len() == 0 {
|
||||
return Ok(Response::builder()
|
||||
.status(StatusCode::NO_CONTENT)
|
||||
.body(Body::from(""))
|
||||
.unwrap_or_default());
|
||||
}
|
||||
|
||||
// build zip file
|
||||
let zip_data = create_zip_bundle(files)?;
|
||||
|
||||
Ok(Response::builder()
|
||||
.header(header::CONTENT_TYPE, "application/x-zip-compressed")
|
||||
.header(
|
||||
header::CONTENT_DISPOSITION,
|
||||
format!(
|
||||
"attachment; filename=\"{}\"",
|
||||
format!("{}-{}.zip", request.order_id, request.position_id)
|
||||
),
|
||||
)
|
||||
.body(Body::from(zip_data))
|
||||
.unwrap_or_default())
|
||||
}
|
45
src/api/routes/files/handlers/files_order_get.rs
Normal file
45
src/api/routes/files/handlers/files_order_get.rs
Normal file
@ -0,0 +1,45 @@
|
||||
use axum::{debug_handler, Json};
|
||||
use serde::Deserialize;
|
||||
use ts_rs::TS;
|
||||
use utoipa::ToSchema;
|
||||
|
||||
use crate::{
|
||||
api::{
|
||||
description::FILE_TAG,
|
||||
routes::{
|
||||
files::{models::AttachedFile, sql},
|
||||
AuthBackendType,
|
||||
},
|
||||
},
|
||||
errors::ApiError,
|
||||
};
|
||||
|
||||
#[derive(Debug, Deserialize, TS, ToSchema)]
|
||||
#[ts(export)]
|
||||
pub struct GetAttachedOrderFilesRequest {
|
||||
pub order_id: i32,
|
||||
pub position_id: i32,
|
||||
}
|
||||
|
||||
#[debug_handler]
|
||||
#[utoipa::path(
|
||||
get,
|
||||
path = "/files/order",
|
||||
summary = "Get Files attached to Order Position",
|
||||
description = "Get list of all files that are attached to a specified Position.",
|
||||
request_body(content = GetAttachedOrderFilesRequest, description = "Request for attached files", content_type = "application/json"),
|
||||
responses(
|
||||
(status = OK, body = AttachedFile, description = "Attached Files List", content_type = "application/json"),
|
||||
),
|
||||
tag = FILE_TAG)]
|
||||
pub async fn get_attached_order_files(
|
||||
auth_session: AuthBackendType,
|
||||
Json(request): Json<GetAttachedOrderFilesRequest>,
|
||||
) -> Result<Json<Vec<AttachedFile>>, ApiError> {
|
||||
let backend = auth_session.backend();
|
||||
|
||||
Ok(Json(
|
||||
sql::get_order_attached_files(backend.pool(), request.order_id, request.position_id)
|
||||
.await?,
|
||||
))
|
||||
}
|
51
src/api/routes/files/handlers/files_order_post.rs
Normal file
51
src/api/routes/files/handlers/files_order_post.rs
Normal file
@ -0,0 +1,51 @@
|
||||
use axum::{
|
||||
debug_handler,
|
||||
extract::{Multipart, Query},
|
||||
};
|
||||
use serde::Deserialize;
|
||||
use utoipa::{IntoParams, ToSchema};
|
||||
|
||||
use crate::{
|
||||
api::{
|
||||
description::FILE_TAG,
|
||||
routes::{
|
||||
files::{handlers::files_draft_post::FileUploadRequest, sql},
|
||||
AuthBackendType,
|
||||
},
|
||||
},
|
||||
errors::ApiError,
|
||||
};
|
||||
|
||||
use super::{super::FILE_SIZE_LIMIT_MB, parse_file_from_multipart_form};
|
||||
|
||||
#[derive(Debug, Deserialize, IntoParams, ToSchema)]
|
||||
pub struct OrderFileUploadQueryParams {
|
||||
pub order_id: i32,
|
||||
pub position_id: i32,
|
||||
}
|
||||
|
||||
#[debug_handler]
|
||||
#[utoipa::path(
|
||||
post,
|
||||
path = "/files/order",
|
||||
summary = "Order File Upload",
|
||||
description = "Upload a file to an Order.",
|
||||
params(OrderFileUploadQueryParams),
|
||||
request_body(content = FileUploadRequest, description = "File Data", content_type = "multipart/form-data"),
|
||||
responses(
|
||||
(status = OK, body = String, description = "Successfully uploaded and stored file"),
|
||||
(status = 413, description = format!("The size of the uploaded file is too large (max {FILE_SIZE_LIMIT_MB} MB)"))
|
||||
),
|
||||
tag = FILE_TAG)]
|
||||
pub async fn upload_order_file(
|
||||
auth_session: AuthBackendType,
|
||||
Query(parameters): Query<OrderFileUploadQueryParams>,
|
||||
mut multipart: Multipart,
|
||||
) -> Result<String, ApiError> {
|
||||
let backend = auth_session.backend();
|
||||
|
||||
let file = parse_file_from_multipart_form(&mut multipart).await?;
|
||||
sql::insert_new_order_file(backend.pool(), parameters, &file).await?;
|
||||
|
||||
Ok(file.hash())
|
||||
}
|
@ -1,3 +1,74 @@
|
||||
pub mod files_draft_file_get;
|
||||
use std::io::Write;
|
||||
|
||||
use axum::{extract::Multipart, http::StatusCode};
|
||||
use tokio_util::bytes::Bytes;
|
||||
use zip::{write::SimpleFileOptions, ZipWriter};
|
||||
|
||||
use crate::errors::ApiError;
|
||||
|
||||
use super::models::File;
|
||||
|
||||
pub mod files_draft_delete;
|
||||
pub mod files_draft_get;
|
||||
pub mod files_draft_post;
|
||||
pub mod files_file_get;
|
||||
pub mod files_order_bundle_get;
|
||||
pub mod files_order_get;
|
||||
pub mod files_order_post;
|
||||
|
||||
pub async fn parse_file_from_multipart_form(multipart: &mut Multipart) -> Result<File, ApiError> {
|
||||
let mut name: Option<String> = None;
|
||||
|
||||
let mut content_type: Option<String> = None;
|
||||
let mut file_name: Option<String> = None;
|
||||
let mut bytes: Option<Bytes> = None;
|
||||
let mut size: Option<i32> = None;
|
||||
|
||||
while let Some(field) = multipart.next_field().await.unwrap() {
|
||||
let field_name = field.name();
|
||||
|
||||
match &field_name {
|
||||
Some("name") => name = Some(field.text().await?),
|
||||
Some("file") => {
|
||||
file_name = field.file_name().map(ToString::to_string);
|
||||
content_type = field.content_type().map(ToString::to_string);
|
||||
let _bytes = field.bytes().await?;
|
||||
size = Some(_bytes.len() as i32);
|
||||
bytes = Some(_bytes);
|
||||
}
|
||||
_ => (),
|
||||
};
|
||||
}
|
||||
|
||||
if let (Some(data), Some(content_type), Some(filename)) = (bytes, content_type, file_name) {
|
||||
Ok(File {
|
||||
name: match name {
|
||||
Some(name) => name,
|
||||
None => filename,
|
||||
},
|
||||
content_type,
|
||||
data,
|
||||
size: size.unwrap_or_default(),
|
||||
})
|
||||
} else {
|
||||
Err(ApiError::MultipartForm(
|
||||
StatusCode::BAD_REQUEST,
|
||||
"Missing fields in request".to_string(),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn create_zip_bundle(files: Vec<File>) -> Result<Vec<u8>, ApiError> {
|
||||
let mut data = std::io::Cursor::new(Vec::new());
|
||||
let mut zip = ZipWriter::new(&mut data);
|
||||
let options = SimpleFileOptions::default().compression_method(zip::CompressionMethod::Deflated);
|
||||
|
||||
for file in files {
|
||||
zip.start_file(file.name, options)?;
|
||||
zip.write_all(&file.data)?;
|
||||
}
|
||||
|
||||
zip.finish()?;
|
||||
|
||||
Ok(data.into_inner())
|
||||
}
|
||||
|
@ -1,26 +1,30 @@
|
||||
mod handlers;
|
||||
pub mod handlers;
|
||||
mod models;
|
||||
mod sql;
|
||||
|
||||
use axum::extract::DefaultBodyLimit;
|
||||
use utoipa_axum::{router::OpenApiRouter, routes};
|
||||
|
||||
use crate::{
|
||||
api::routes::users::permissions::{Permission, PermissionDetail},
|
||||
permission_required,
|
||||
};
|
||||
use crate::login_required;
|
||||
|
||||
use handlers::{files_draft_file_get::*, files_draft_get::*, files_draft_post::*};
|
||||
use handlers::{
|
||||
files_draft_delete::*, files_draft_get::*, files_draft_post::*, files_file_get::*,
|
||||
files_order_bundle_get::*, files_order_get::*, files_order_post::*,
|
||||
};
|
||||
|
||||
const FILE_SIZE_LIMIT_MB: usize = 20;
|
||||
|
||||
// expose the OpenAPI to parent module
|
||||
pub fn router() -> OpenApiRouter {
|
||||
OpenApiRouter::new()
|
||||
.routes(routes!(get_specified_draft_file))
|
||||
.routes(routes!(upload_draft_file, get_attached_draft_files))
|
||||
.routes(routes!(get_specified_file))
|
||||
.routes(routes!(get_attached_order_files_zip_bundle))
|
||||
.routes(routes!(
|
||||
upload_draft_file,
|
||||
get_attached_draft_files,
|
||||
delete_draft_file
|
||||
))
|
||||
.routes(routes!(upload_order_file, get_attached_order_files))
|
||||
.layer(DefaultBodyLimit::max(FILE_SIZE_LIMIT_MB * 1000 * 1000))
|
||||
// .route_layer(permission_required!(Permission::Write(
|
||||
// PermissionDetail::Users // TODO adjust permissions
|
||||
// )))
|
||||
.route_layer(login_required!())
|
||||
}
|
||||
|
@ -1,4 +1,3 @@
|
||||
use chrono::NaiveDateTime;
|
||||
use serde::Serialize;
|
||||
use sha2::{Digest, Sha256};
|
||||
use tokio_util::bytes::Bytes;
|
||||
@ -14,9 +13,9 @@ pub struct AttachedFile {
|
||||
pub size: i32,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct File {
|
||||
pub name: String,
|
||||
pub draft_id: NaiveDateTime,
|
||||
pub content_type: String,
|
||||
pub data: Bytes,
|
||||
pub size: i32,
|
||||
|
@ -1,24 +1,33 @@
|
||||
use chrono::NaiveDateTime;
|
||||
use sqlx::PgPool;
|
||||
use sqlx::{PgPool, Postgres, Transaction};
|
||||
|
||||
use crate::{api::routes::users::models::User, errors::ApiError};
|
||||
|
||||
use super::models::{AttachedFile, File};
|
||||
use super::{
|
||||
handlers::files_order_post::OrderFileUploadQueryParams,
|
||||
models::{AttachedFile, File},
|
||||
};
|
||||
|
||||
pub async fn insert_new_draft_file(pool: &PgPool, user: &User, file: File) -> Result<(), ApiError> {
|
||||
pub async fn insert_new_draft_file(
|
||||
pool: &PgPool,
|
||||
user: &User,
|
||||
draft_id: NaiveDateTime,
|
||||
file: &File,
|
||||
) -> Result<(), ApiError> {
|
||||
let mut transaction = pool.begin().await?;
|
||||
|
||||
let affected_rows = sqlx::query!(
|
||||
r#"INSERT INTO draftfiles
|
||||
("UserID", "DraftID", "Hash", "ContentType", "Name", "Data", "Size")
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7) ON CONFLICT DO NOTHING"#,
|
||||
user.user_id,
|
||||
file.draft_id,
|
||||
// add file
|
||||
let affected_rows = insert_new_file(&mut transaction, &file).await?;
|
||||
|
||||
// add file <-> draft connection
|
||||
let added_connections = sqlx::query!(
|
||||
r#"INSERT INTO drafts_files
|
||||
("FileHash", "FileName", "DraftIDTime", "DraftIDUser")
|
||||
VALUES ($1, $2, $3, $4) ON CONFLICT DO NOTHING"#,
|
||||
file.hash(),
|
||||
file.content_type,
|
||||
file.name,
|
||||
&file.data.to_vec(),
|
||||
file.size
|
||||
draft_id,
|
||||
user.user_id
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?
|
||||
@ -38,15 +47,17 @@ pub async fn get_draft_attached_files(
|
||||
Ok(sqlx::query_as!(
|
||||
AttachedFile,
|
||||
r#"SELECT
|
||||
"Hash" as hash,
|
||||
"FileHash" as hash,
|
||||
"ContentType" as content_type,
|
||||
"Name" as name,
|
||||
"FileName" as name,
|
||||
"Size" as size
|
||||
FROM
|
||||
"draftfiles"
|
||||
"drafts_files"
|
||||
LEFT JOIN files ON
|
||||
FILES."Hash" = DRAFTS_FILES."FileHash"
|
||||
WHERE
|
||||
"UserID" = $1 AND "DraftID" = $2
|
||||
ORDER BY "Name" ASC"#,
|
||||
DRAFTS_FILES."DraftIDUser" = $1 AND "DraftIDTime" = $2
|
||||
ORDER BY "FileName" ASC"#,
|
||||
user.user_id,
|
||||
draft_id
|
||||
)
|
||||
@ -54,31 +65,187 @@ pub async fn get_draft_attached_files(
|
||||
.await?)
|
||||
}
|
||||
|
||||
pub async fn get_specified_draft_file(
|
||||
pub async fn delete_draft_file(
|
||||
pool: &PgPool,
|
||||
user: &User,
|
||||
draft_id: NaiveDateTime,
|
||||
hash: String,
|
||||
) -> Result<(), ApiError> {
|
||||
let mut transaction = pool.begin().await?;
|
||||
|
||||
// delete draft file
|
||||
sqlx::query!(
|
||||
r#"DELETE FROM
|
||||
"drafts_files"
|
||||
WHERE
|
||||
"DraftIDUser" = $1
|
||||
AND "DraftIDTime" = $2
|
||||
AND "FileHash" = $3"#,
|
||||
user.user_id,
|
||||
draft_id,
|
||||
hash
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
|
||||
// check if file is now dangling and can be deleted
|
||||
if check_dangling_file(&mut transaction, &hash).await? {
|
||||
delete_file(&mut transaction, &hash).await?;
|
||||
}
|
||||
|
||||
// commit transaction
|
||||
transaction.commit().await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn get_specified_file(
|
||||
pool: &PgPool,
|
||||
name: String,
|
||||
hash: String,
|
||||
) -> Result<Option<File>, ApiError> {
|
||||
let file = sqlx::query_as!(
|
||||
File,
|
||||
r#"SELECT
|
||||
"ContentType" as content_type,
|
||||
"Name" as name,
|
||||
$2 as "name!: String",
|
||||
"Data" as data,
|
||||
"DraftID" as draft_id,
|
||||
"Size" as size
|
||||
FROM
|
||||
"draftfiles"
|
||||
files
|
||||
WHERE
|
||||
"UserID" = $1 AND "DraftID" = $2 AND "Hash" = $3
|
||||
ORDER BY "Name" ASC"#,
|
||||
user.user_id,
|
||||
draft_id,
|
||||
hash
|
||||
"Hash" = $1"#,
|
||||
hash,
|
||||
name
|
||||
)
|
||||
.fetch_optional(pool)
|
||||
.await?;
|
||||
|
||||
Ok(file)
|
||||
}
|
||||
|
||||
pub async fn insert_new_order_file(
|
||||
pool: &PgPool,
|
||||
order: OrderFileUploadQueryParams,
|
||||
file: &File,
|
||||
) -> Result<(), ApiError> {
|
||||
let mut transaction = pool.begin().await?;
|
||||
// add file
|
||||
insert_new_file(&mut transaction, &file).await?;
|
||||
|
||||
// add file <-> draft connection
|
||||
let added_connections = sqlx::query!(
|
||||
r#"INSERT INTO "orderPositions_files"
|
||||
("FileHash", "FileName", "OrderID", "PositionID")
|
||||
VALUES ($1, $2, $3, $4) ON CONFLICT DO NOTHING"#,
|
||||
file.hash(),
|
||||
file.name,
|
||||
order.order_id,
|
||||
order.position_id,
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?
|
||||
.rows_affected();
|
||||
|
||||
// commit transaction
|
||||
transaction.commit().await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn get_order_attached_files(
|
||||
pool: &PgPool,
|
||||
order_id: i32,
|
||||
position_id: i32,
|
||||
) -> Result<Vec<AttachedFile>, ApiError> {
|
||||
Ok(sqlx::query_as!(
|
||||
AttachedFile,
|
||||
r#"SELECT
|
||||
"FileHash" as hash,
|
||||
"ContentType" as content_type,
|
||||
"FileName" as name,
|
||||
"Size" as size
|
||||
FROM
|
||||
"orderPositions_files"
|
||||
LEFT JOIN files ON
|
||||
FILES."Hash" = "orderPositions_files"."FileHash"
|
||||
WHERE
|
||||
"orderPositions_files"."OrderID" = $1 AND "orderPositions_files"."PositionID" = $2
|
||||
ORDER BY "FileName" ASC"#,
|
||||
order_id,
|
||||
position_id
|
||||
)
|
||||
.fetch_all(pool)
|
||||
.await?)
|
||||
}
|
||||
|
||||
pub async fn get_order_attached_files_data(
|
||||
pool: &PgPool,
|
||||
order_id: i32,
|
||||
position_id: i32,
|
||||
) -> Result<Vec<File>, ApiError> {
|
||||
Ok(sqlx::query_as!(
|
||||
File,
|
||||
r#"SELECT
|
||||
"ContentType" as content_type,
|
||||
"FileName" as name,
|
||||
"Size" as size,
|
||||
"Data" as data
|
||||
FROM
|
||||
"orderPositions_files"
|
||||
LEFT JOIN files ON
|
||||
FILES."Hash" = "orderPositions_files"."FileHash"
|
||||
WHERE
|
||||
"orderPositions_files"."OrderID" = $1 AND "orderPositions_files"."PositionID" = $2
|
||||
ORDER BY "FileName" ASC"#,
|
||||
order_id,
|
||||
position_id
|
||||
)
|
||||
.fetch_all(pool)
|
||||
.await?)
|
||||
}
|
||||
|
||||
pub async fn insert_new_file(
|
||||
transaction: &mut Transaction<'static, Postgres>,
|
||||
file: &File,
|
||||
) -> Result<u64, ApiError> {
|
||||
Ok(sqlx::query!(
|
||||
r#"INSERT INTO files
|
||||
("Hash", "ContentType", "Data", "Size")
|
||||
VALUES ($1, $2, $3, $4) ON CONFLICT DO NOTHING"#,
|
||||
file.hash(),
|
||||
file.content_type,
|
||||
&file.data.to_vec(),
|
||||
file.size
|
||||
)
|
||||
.execute(&mut **transaction)
|
||||
.await?
|
||||
.rows_affected())
|
||||
}
|
||||
|
||||
pub async fn check_dangling_file(
|
||||
transaction: &mut Transaction<'static, Postgres>,
|
||||
hash: &str,
|
||||
) -> Result<bool, ApiError> {
|
||||
Ok(sqlx::query_scalar!(
|
||||
r#"SELECT COUNT(*) as "count!" FROM
|
||||
(SELECT "FileHash" FROM drafts_files WHERE "FileHash" = $1
|
||||
UNION
|
||||
SELECT "FileHash" FROM "orderPositions_files" WHERE "FileHash" = $1)"#,
|
||||
hash
|
||||
)
|
||||
.fetch_one(&mut **transaction)
|
||||
.await?
|
||||
== 0)
|
||||
}
|
||||
|
||||
pub async fn delete_file(
|
||||
transaction: &mut Transaction<'static, Postgres>,
|
||||
hash: &str,
|
||||
) -> Result<(), ApiError> {
|
||||
sqlx::query!(r#"DELETE FROM files WHERE "Hash" = $1"#, hash)
|
||||
.execute(&mut **transaction)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
@ -1,12 +1,20 @@
|
||||
pub mod api_keys;
|
||||
pub mod auth;
|
||||
mod data_hub;
|
||||
mod departments;
|
||||
mod files;
|
||||
pub mod models;
|
||||
mod signature;
|
||||
mod units;
|
||||
pub mod users;
|
||||
pub mod vendors;
|
||||
|
||||
use std::sync::Arc;
|
||||
|
||||
use api_keys::models::ApiKey as APIKey;
|
||||
use axum::{Extension, Router};
|
||||
use axum_jwt_login::AuthSessionWithApiKey;
|
||||
use data_hub::models::DataHub;
|
||||
use users::models::User;
|
||||
use utoipa::OpenApi;
|
||||
use utoipa_axum::router::OpenApiRouter;
|
||||
@ -22,7 +30,11 @@ type AuthBackendType = AuthSessionWithApiKey<User, ApiBackend, APIKey>;
|
||||
#[macro_export]
|
||||
macro_rules! login_required {
|
||||
() => {
|
||||
axum_jwt_login::login_required!(User, ApiBackend, APIKey)
|
||||
axum_jwt_login::login_required!(
|
||||
crate::api::routes::User,
|
||||
crate::api::ApiBackend,
|
||||
crate::api::routes::APIKey
|
||||
)
|
||||
};
|
||||
}
|
||||
#[macro_export]
|
||||
@ -46,6 +58,10 @@ pub fn create_routes(session: AuthBackendType) -> Router {
|
||||
.nest(API_BASE, api_keys::router())
|
||||
.nest(API_BASE, signature::router())
|
||||
.nest(API_BASE, files::router())
|
||||
.nest(API_BASE, departments::router())
|
||||
.nest(API_BASE, units::router())
|
||||
.nest(API_BASE, vendors::router())
|
||||
.nest(API_BASE, data_hub::router())
|
||||
// .nest(
|
||||
// "/api/order",
|
||||
// // order::router().route_layer(crate::login_required!(AuthenticationBackend<ApiKey>)),
|
||||
@ -59,6 +75,7 @@ pub fn create_routes(session: AuthBackendType) -> Router {
|
||||
// // .layer(auth_layer)
|
||||
.layer(session.into_layer())
|
||||
.layer(Extension(backend))
|
||||
.layer(Extension(Arc::new(DataHub::new())))
|
||||
.split_for_parts();
|
||||
|
||||
router
|
||||
|
11
src/api/routes/models.rs
Normal file
11
src/api/routes/models.rs
Normal file
@ -0,0 +1,11 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
use ts_rs::TS;
|
||||
use utoipa::ToSchema;
|
||||
|
||||
#[derive(Debug, Clone, Copy, Serialize, Deserialize, sqlx::Type, TS, ToSchema)]
|
||||
#[repr(i16)]
|
||||
pub enum Status {
|
||||
Deleted = -1,
|
||||
Deactivated = 0,
|
||||
Active = 1,
|
||||
}
|
3
src/api/routes/units/handlers/mod.rs
Normal file
3
src/api/routes/units/handlers/mod.rs
Normal file
@ -0,0 +1,3 @@
|
||||
pub mod units_get;
|
||||
pub mod units_post;
|
||||
pub mod units_put;
|
26
src/api/routes/units/handlers/units_get.rs
Normal file
26
src/api/routes/units/handlers/units_get.rs
Normal file
@ -0,0 +1,26 @@
|
||||
use axum::{debug_handler, Extension, Json};
|
||||
|
||||
use super::super::sql;
|
||||
use crate::{
|
||||
api::{backend::ApiBackend, description::UNITS_TAG, routes::units::models::Unit},
|
||||
errors::ApiError,
|
||||
};
|
||||
|
||||
#[debug_handler]
|
||||
#[utoipa::path(
|
||||
get,
|
||||
path = "/units",
|
||||
summary = "Get all Units",
|
||||
description = "Get a list of all Units.",
|
||||
responses(
|
||||
(status = OK, body = Vec<Unit>, description = "List of Units"),
|
||||
),
|
||||
security(
|
||||
("user_auth" = ["read:units",]),
|
||||
),
|
||||
tag = UNITS_TAG)]
|
||||
pub async fn get_units(
|
||||
Extension(backend): Extension<ApiBackend>,
|
||||
) -> Result<Json<Vec<Unit>>, ApiError> {
|
||||
Ok(Json(sql::get_units(backend.pool(), None).await?))
|
||||
}
|
44
src/api/routes/units/handlers/units_post.rs
Normal file
44
src/api/routes/units/handlers/units_post.rs
Normal file
@ -0,0 +1,44 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use axum::{debug_handler, Extension, Json};
|
||||
|
||||
use super::super::sql;
|
||||
use crate::{
|
||||
api::{
|
||||
backend::ApiBackend,
|
||||
description::UNITS_TAG,
|
||||
routes::{
|
||||
data_hub::models::{DataHub, DataHubData},
|
||||
units::models::Unit,
|
||||
},
|
||||
},
|
||||
errors::ApiError,
|
||||
};
|
||||
|
||||
#[debug_handler]
|
||||
#[utoipa::path(
|
||||
post,
|
||||
path = "/units",
|
||||
summary = "Create new Unit",
|
||||
description = "Create new Unit.",
|
||||
responses(
|
||||
(status = OK, description = "Creation succeeded"),
|
||||
),
|
||||
security(
|
||||
("user_auth" = ["write:units",]),
|
||||
),
|
||||
tag = UNITS_TAG)]
|
||||
pub async fn create_unit(
|
||||
Extension(backend): Extension<ApiBackend>,
|
||||
Extension(data_hub): Extension<Arc<DataHub>>,
|
||||
Json(unit): Json<Unit>,
|
||||
) -> Result<(), ApiError> {
|
||||
// insert new unit
|
||||
sql::create_unit(backend.pool(), unit).await?;
|
||||
|
||||
// get updated unit list
|
||||
let units = sql::get_units(backend.pool(), None).await?;
|
||||
let _ = data_hub.sender.send(DataHubData::Units(units));
|
||||
|
||||
Ok(())
|
||||
}
|
27
src/api/routes/units/handlers/units_put.rs
Normal file
27
src/api/routes/units/handlers/units_put.rs
Normal file
@ -0,0 +1,27 @@
|
||||
use axum::{debug_handler, Extension, Json};
|
||||
|
||||
use super::super::sql;
|
||||
use crate::{
|
||||
api::{backend::ApiBackend, description::UNITS_TAG, routes::units::models::Unit},
|
||||
errors::ApiError,
|
||||
};
|
||||
|
||||
#[debug_handler]
|
||||
#[utoipa::path(
|
||||
put,
|
||||
path = "/units",
|
||||
summary = "Update Unit",
|
||||
description = "Change details of Units.",
|
||||
responses(
|
||||
(status = OK, description = "Update succeeded"),
|
||||
),
|
||||
security(
|
||||
("user_auth" = ["write:units",]),
|
||||
),
|
||||
tag = UNITS_TAG)]
|
||||
pub async fn update_unit(
|
||||
Extension(backend): Extension<ApiBackend>,
|
||||
Json(unit): Json<Unit>,
|
||||
) -> Result<(), ApiError> {
|
||||
sql::update_unit(backend.pool(), unit).await
|
||||
}
|
31
src/api/routes/units/mod.rs
Normal file
31
src/api/routes/units/mod.rs
Normal file
@ -0,0 +1,31 @@
|
||||
use utoipa_axum::{router::OpenApiRouter, routes};
|
||||
|
||||
use crate::{
|
||||
api::routes::users::permissions::{Permission, PermissionDetail},
|
||||
login_required, permission_required,
|
||||
};
|
||||
|
||||
use handlers::units_get::*;
|
||||
use handlers::units_post::*;
|
||||
use handlers::units_put::*;
|
||||
|
||||
mod handlers;
|
||||
pub mod models;
|
||||
pub mod sql;
|
||||
|
||||
// expose the OpenAPI to parent module
|
||||
pub fn router() -> OpenApiRouter {
|
||||
let read = OpenApiRouter::new()
|
||||
.routes(routes!(get_units))
|
||||
// .route_layer(permission_required!(Permission::Read(
|
||||
// PermissionDetail::units
|
||||
// )));
|
||||
.route_layer(login_required!());
|
||||
let write = OpenApiRouter::new()
|
||||
.routes(routes!(create_unit, update_unit))
|
||||
.route_layer(permission_required!(Permission::Write(
|
||||
PermissionDetail::Units
|
||||
)));
|
||||
|
||||
OpenApiRouter::new().merge(read).merge(write)
|
||||
}
|
15
src/api/routes/units/models.rs
Normal file
15
src/api/routes/units/models.rs
Normal file
@ -0,0 +1,15 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
use ts_rs::TS;
|
||||
use utoipa::ToSchema;
|
||||
|
||||
use crate::api::routes::models::Status;
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, TS, ToSchema)]
|
||||
#[ts(export)]
|
||||
pub struct Unit {
|
||||
pub id: i32,
|
||||
pub display: String,
|
||||
pub abbreviation: String,
|
||||
pub description: Option<String>,
|
||||
pub status: Status,
|
||||
}
|
73
src/api/routes/units/sql.rs
Normal file
73
src/api/routes/units/sql.rs
Normal file
@ -0,0 +1,73 @@
|
||||
use sqlx::PgPool;
|
||||
|
||||
use crate::{api::routes::models::Status, errors::ApiError};
|
||||
|
||||
use super::models::Unit;
|
||||
|
||||
pub async fn get_units(
|
||||
pool: &PgPool,
|
||||
filter_active: Option<Status>,
|
||||
) -> Result<Vec<Unit>, ApiError> {
|
||||
Ok(sqlx::query_as!(
|
||||
Unit,
|
||||
r#"SELECT
|
||||
UNITS."UnitID" as id,
|
||||
UNITS."Display" as display,
|
||||
UNITS."Description" as description,
|
||||
UNITS."Abbreviation" as abbreviation,
|
||||
UNITS."Status" as "status: Status"
|
||||
FROM
|
||||
units
|
||||
WHERE
|
||||
($1::smallint IS NULL OR UNITS."Status" = $1)"#,
|
||||
filter_active.map(|v| v as i16),
|
||||
)
|
||||
.fetch_all(pool)
|
||||
.await?)
|
||||
}
|
||||
|
||||
pub async fn create_unit(pool: &PgPool, unit: Unit) -> Result<(), ApiError> {
|
||||
let mut transaction = pool.begin().await?;
|
||||
|
||||
sqlx::query!(
|
||||
r#"INSERT INTO units
|
||||
("Display", "Description", "Status", "Abbreviation")
|
||||
VALUES ($1, $2, $3, $4)"#,
|
||||
unit.display,
|
||||
unit.description,
|
||||
unit.status as i16,
|
||||
unit.abbreviation
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
|
||||
// commit transaction
|
||||
transaction.commit().await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn update_unit(pool: &PgPool, unit: Unit) -> Result<(), ApiError> {
|
||||
let mut transaction = pool.begin().await?;
|
||||
|
||||
sqlx::query!(
|
||||
r#"UPDATE units SET
|
||||
"Display" = $2,
|
||||
"Description" = $3,
|
||||
"Status" = $4,
|
||||
"Abbreviation" = $5
|
||||
WHERE "UnitID" = $1"#,
|
||||
unit.id,
|
||||
unit.display,
|
||||
unit.description,
|
||||
unit.status as i16,
|
||||
unit.abbreviation
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
|
||||
// commit transaction
|
||||
transaction.commit().await?;
|
||||
|
||||
Ok(())
|
||||
}
|
@ -1,27 +1,26 @@
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use std::{
|
||||
collections::{HashMap, HashSet},
|
||||
fmt,
|
||||
};
|
||||
|
||||
use axum_jwt_login::UserPermissions;
|
||||
use chrono::NaiveDateTime;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::PgPool;
|
||||
use sqlx::{types::JsonValue, PgPool};
|
||||
use tracing::error;
|
||||
use ts_rs::TS;
|
||||
use utoipa::ToSchema;
|
||||
|
||||
use crate::errors::ApiError;
|
||||
use crate::{
|
||||
api::routes::{departments::models::ShortDepartment, models::Status},
|
||||
errors::ApiError,
|
||||
};
|
||||
|
||||
use super::{
|
||||
permissions::{Permission, PermissionContainer},
|
||||
sql,
|
||||
};
|
||||
|
||||
#[derive(Debug, Clone, Copy, Serialize, Deserialize, sqlx::Type, TS, ToSchema)]
|
||||
#[repr(i16)]
|
||||
pub enum UserStatus {
|
||||
Deleted = -1,
|
||||
Deactivated = 0,
|
||||
Active = 1,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, TS, ToSchema)]
|
||||
pub struct GroupContainer(pub HashSet<i32>);
|
||||
|
||||
@ -37,6 +36,27 @@ impl From<Option<Vec<i32>>> for GroupContainer {
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, TS, ToSchema)]
|
||||
pub struct DepartmentContainer(pub Option<ShortDepartment>);
|
||||
|
||||
impl From<Option<JsonValue>> for DepartmentContainer {
|
||||
fn from(value: Option<JsonValue>) -> Self {
|
||||
Self(
|
||||
value
|
||||
.map(|value| -> Option<ShortDepartment> {
|
||||
match serde_json::from_value::<ShortDepartment>(value) {
|
||||
Ok(dep) => Some(dep),
|
||||
Err(err) => {
|
||||
error!("{err:?}");
|
||||
None
|
||||
}
|
||||
}
|
||||
})
|
||||
.flatten(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Serialize, Deserialize, TS, ToSchema)]
|
||||
#[ts(export)]
|
||||
pub struct User {
|
||||
pub user_id: String,
|
||||
@ -47,12 +67,14 @@ pub struct User {
|
||||
pub surname: String,
|
||||
pub email: String,
|
||||
#[schema(inline)]
|
||||
pub department: DepartmentContainer,
|
||||
#[schema(inline)]
|
||||
pub groups: GroupContainer,
|
||||
#[schema(inline)]
|
||||
pub group_permissions: PermissionContainer,
|
||||
#[schema(inline)]
|
||||
pub permissions: PermissionContainer,
|
||||
pub status_flag: UserStatus,
|
||||
pub status_flag: Status,
|
||||
#[serde(default)]
|
||||
#[schema(value_type = String, read_only)]
|
||||
pub creation_date: NaiveDateTime,
|
||||
@ -61,6 +83,27 @@ pub struct User {
|
||||
pub last_change: NaiveDateTime,
|
||||
}
|
||||
|
||||
/// Manually implement Debug to prevent password leaking
|
||||
impl fmt::Debug for User {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.debug_struct("User")
|
||||
.field("user_id", &self.user_id)
|
||||
.field("active_directory_auth", &self.active_directory_auth)
|
||||
.field("password", &"[redacted]")
|
||||
.field("name", &self.name)
|
||||
.field("surname", &self.surname)
|
||||
.field("email", &self.email)
|
||||
.field("department", &self.department)
|
||||
.field("groups", &self.groups)
|
||||
.field("group_permissions", &self.group_permissions)
|
||||
.field("permissions", &self.permissions)
|
||||
.field("status_flag", &self.status_flag)
|
||||
.field("creation_date", &self.creation_date)
|
||||
.field("last_change", &self.last_change)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl UserPermissions for User {
|
||||
type Error = ApiError;
|
||||
type Permission = Permission;
|
||||
@ -116,3 +159,12 @@ impl User {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Default, Serialize, Deserialize, TS, ToSchema)]
|
||||
#[ts(export)]
|
||||
pub struct ShortUser {
|
||||
pub user_id: String,
|
||||
pub name: String,
|
||||
pub surname: String,
|
||||
pub email: String,
|
||||
}
|
||||
|
@ -62,6 +62,9 @@ pub enum PermissionDetail {
|
||||
Users,
|
||||
APIKeys,
|
||||
Signature,
|
||||
Departments,
|
||||
Units,
|
||||
Vendors,
|
||||
#[default]
|
||||
None,
|
||||
}
|
||||
|
@ -1,12 +1,12 @@
|
||||
use sqlx::{PgPool, Postgres, Transaction};
|
||||
|
||||
use crate::errors::ApiError;
|
||||
use crate::{api::routes::models::Status, errors::ApiError};
|
||||
|
||||
use super::models::{User, UserStatus};
|
||||
use super::models::{ShortUser, User};
|
||||
|
||||
pub async fn get_users(
|
||||
pool: &PgPool,
|
||||
filter_status: Option<UserStatus>,
|
||||
filter_status: Option<Status>,
|
||||
filter_id: Option<String>,
|
||||
) -> Result<Vec<User>, ApiError> {
|
||||
Ok(sqlx::query_as!(
|
||||
@ -18,21 +18,27 @@ pub async fn get_users(
|
||||
USERS."Name" as name,
|
||||
USERS."Surname" as surname,
|
||||
USERS."Email" as email,
|
||||
USERS."StatusFlag" as "status_flag: UserStatus",
|
||||
USERS."Status" as "status_flag: Status",
|
||||
USERS."CreationDate" as "creation_date",
|
||||
USERS."LastChanged" as "last_change",
|
||||
json_build_object(
|
||||
'id', DEPARTMENTS."DepartmentID",
|
||||
'name', DEPARTMENTS."DepartmentName",
|
||||
'description', DEPARTMENTS."Description"
|
||||
) AS department,
|
||||
array_remove(ARRAY_AGG(USERS_GROUPS."GroupID"), NULL) AS groups,
|
||||
array_remove(ARRAY_AGG(USER_PERMISSIONS."Permission"), NULL) AS permissions,
|
||||
array_remove(ARRAY_AGG(GROUP_PERMISSIONS."Permission"), NULL) AS group_permissions
|
||||
FROM
|
||||
users
|
||||
LEFT JOIN DEPARTMENTS ON DEPARTMENTS."DepartmentID" = USERS."DepartmentID"
|
||||
LEFT JOIN PUBLIC.USER_PERMISSIONS ON USER_PERMISSIONS."UserID" = USERS."UserID"
|
||||
LEFT JOIN USERS_GROUPS ON USERS."UserID" = USERS_GROUPS."UserID"
|
||||
LEFT JOIN GROUP_PERMISSIONS ON GROUP_PERMISSIONS."GroupID" = USERS_GROUPS."GroupID"
|
||||
WHERE
|
||||
($1::smallint IS NULL OR USERS."StatusFlag" = $1)
|
||||
($1::smallint IS NULL OR USERS."Status" = $1)
|
||||
AND ($2::varchar IS NULL OR USERS."UserID" = $2)
|
||||
GROUP BY USERS."UserID""#,
|
||||
GROUP BY USERS."UserID", DEPARTMENTS."DepartmentID""#,
|
||||
filter_status.map(|s| s as i16),
|
||||
filter_id
|
||||
)
|
||||
@ -47,7 +53,7 @@ pub async fn update_user(pool: &PgPool, user: &User) -> Result<(), ApiError> {
|
||||
sqlx::query!(
|
||||
r#"UPDATE users SET
|
||||
"ActiveDirectoryAuth" = $2,
|
||||
"StatusFlag" = $3,
|
||||
"Status" = $3,
|
||||
"LastChanged" = NOW()
|
||||
WHERE "UserID" = $1"#,
|
||||
user.user_id,
|
||||
@ -182,3 +188,24 @@ pub async fn create_new_user(
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn get_short_user_list(
|
||||
pool: &PgPool,
|
||||
filter_status: Option<Status>,
|
||||
) -> Result<Vec<ShortUser>, ApiError> {
|
||||
Ok(sqlx::query_as!(
|
||||
ShortUser,
|
||||
r#"SELECT
|
||||
USERS."UserID" as user_id,
|
||||
USERS."Name" as name,
|
||||
USERS."Surname" as surname,
|
||||
USERS."Email" as email
|
||||
FROM
|
||||
users
|
||||
WHERE
|
||||
($1::smallint IS NULL OR USERS."Status" = $1)"#,
|
||||
filter_status.map(|s| s as i16),
|
||||
)
|
||||
.fetch_all(pool)
|
||||
.await?)
|
||||
}
|
||||
|
2
src/api/routes/vendors/handlers/mod.rs
vendored
Normal file
2
src/api/routes/vendors/handlers/mod.rs
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
pub mod vendors_get;
|
||||
pub mod vendors_import_post;
|
23
src/api/routes/vendors/handlers/vendors_get.rs
vendored
Normal file
23
src/api/routes/vendors/handlers/vendors_get.rs
vendored
Normal file
@ -0,0 +1,23 @@
|
||||
use axum::{debug_handler, Extension, Json};
|
||||
|
||||
use super::super::sql;
|
||||
use crate::{
|
||||
api::{backend::ApiBackend, description::VENDORS_TAG, routes::vendors::models::Vendor},
|
||||
errors::ApiError,
|
||||
};
|
||||
|
||||
#[debug_handler]
|
||||
#[utoipa::path(
|
||||
get,
|
||||
path = "/vendors",
|
||||
summary = "Get all Vendors",
|
||||
description = "Get a list of all Vendors.",
|
||||
responses(
|
||||
(status = OK, body = Vec<Vendor>, description = "List of vendors"),
|
||||
),
|
||||
tag = VENDORS_TAG)]
|
||||
pub async fn get_vendors(
|
||||
Extension(backend): Extension<ApiBackend>,
|
||||
) -> Result<Json<Vec<Vendor>>, ApiError> {
|
||||
Ok(Json(sql::get_vendors(backend.pool()).await?))
|
||||
}
|
72
src/api/routes/vendors/handlers/vendors_import_post.rs
vendored
Normal file
72
src/api/routes/vendors/handlers/vendors_import_post.rs
vendored
Normal file
@ -0,0 +1,72 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use axum::{debug_handler, extract::Multipart, Extension};
|
||||
use tracing::error;
|
||||
|
||||
use super::super::sql;
|
||||
use crate::{
|
||||
api::{
|
||||
backend::ApiBackend,
|
||||
description::VENDORS_TAG,
|
||||
routes::{
|
||||
data_hub::models::{DataHub, DataHubData},
|
||||
files::handlers::parse_file_from_multipart_form,
|
||||
},
|
||||
},
|
||||
errors::ApiError,
|
||||
};
|
||||
|
||||
const SKIP_TOP: usize = 4;
|
||||
const SKIP_BOTTOM: usize = 3;
|
||||
|
||||
const COLUMN_ID: usize = 1;
|
||||
const COLUMN_NAME: usize = 2;
|
||||
|
||||
#[debug_handler]
|
||||
#[utoipa::path(
|
||||
post,
|
||||
path = "/vendors/import",
|
||||
summary = "Import Vendors from CSV",
|
||||
description = "Import Vendors from an uploaded CSV file.",
|
||||
responses(
|
||||
(status = OK, description = "Successfully imported vendors"),
|
||||
),
|
||||
tag = VENDORS_TAG)]
|
||||
pub async fn import_vendors(
|
||||
Extension(backend): Extension<ApiBackend>,
|
||||
Extension(data_hub): Extension<Arc<DataHub>>,
|
||||
mut multipart: Multipart,
|
||||
) -> Result<(), ApiError> {
|
||||
let file = parse_file_from_multipart_form(&mut multipart).await?;
|
||||
let file_content = String::from_utf8(file.data.to_vec()).map_err(|e| {
|
||||
error!("{e}");
|
||||
ApiError::InvalidRequest("Invalid file content".to_string())
|
||||
})?;
|
||||
|
||||
// skip header lines
|
||||
let lines = file_content.lines().into_iter().skip(SKIP_TOP.into());
|
||||
let line_count = lines.clone().count();
|
||||
|
||||
// begin transaction
|
||||
let mut transaction = backend.pool().begin().await?;
|
||||
|
||||
// get data lines (skipping footer lines)
|
||||
for line in lines.take(line_count - SKIP_BOTTOM as usize) {
|
||||
// get columns
|
||||
let columns = line.split('\t').collect::<Vec<&str>>();
|
||||
|
||||
let id = columns[COLUMN_ID];
|
||||
let name = columns[COLUMN_NAME];
|
||||
|
||||
sql::import_vendor_line(&mut transaction, id, name).await?;
|
||||
}
|
||||
|
||||
// commit to db
|
||||
transaction.commit().await?;
|
||||
|
||||
// get updated unit list
|
||||
let vendors = sql::get_vendors(backend.pool()).await?;
|
||||
let _ = data_hub.sender.send(DataHubData::Vendors(vendors));
|
||||
|
||||
Ok(())
|
||||
}
|
26
src/api/routes/vendors/mod.rs
vendored
Normal file
26
src/api/routes/vendors/mod.rs
vendored
Normal file
@ -0,0 +1,26 @@
|
||||
use utoipa_axum::{router::OpenApiRouter, routes};
|
||||
|
||||
use crate::{
|
||||
api::routes::users::permissions::{Permission, PermissionDetail},
|
||||
login_required, permission_required,
|
||||
};
|
||||
|
||||
use handlers::{vendors_get::*, vendors_import_post::*};
|
||||
|
||||
mod handlers;
|
||||
pub mod models;
|
||||
pub mod sql;
|
||||
|
||||
// expose the OpenAPI to parent module s
|
||||
pub fn router() -> OpenApiRouter {
|
||||
let read = OpenApiRouter::new()
|
||||
.routes(routes!(get_vendors))
|
||||
.route_layer(login_required!());
|
||||
let write = OpenApiRouter::new()
|
||||
.routes(routes!(import_vendors))
|
||||
.route_layer(permission_required!(Permission::Write(
|
||||
PermissionDetail::Vendors
|
||||
)));
|
||||
|
||||
OpenApiRouter::new().merge(read).merge(write)
|
||||
}
|
12
src/api/routes/vendors/models.rs
vendored
Normal file
12
src/api/routes/vendors/models.rs
vendored
Normal file
@ -0,0 +1,12 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
use ts_rs::TS;
|
||||
use utoipa::ToSchema;
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, TS, ToSchema)]
|
||||
#[ts(export)]
|
||||
pub struct Vendor {
|
||||
/// Vendor ID
|
||||
pub id: String,
|
||||
/// Name of vendor
|
||||
pub name: String,
|
||||
}
|
40
src/api/routes/vendors/sql.rs
vendored
Normal file
40
src/api/routes/vendors/sql.rs
vendored
Normal file
@ -0,0 +1,40 @@
|
||||
use sqlx::{PgPool, Postgres, Transaction};
|
||||
|
||||
use crate::errors::ApiError;
|
||||
|
||||
use super::models::Vendor;
|
||||
|
||||
pub async fn get_vendors(pool: &PgPool) -> Result<Vec<Vendor>, ApiError> {
|
||||
Ok(sqlx::query_as!(
|
||||
Vendor,
|
||||
r#"SELECT
|
||||
"VendorID" as id,
|
||||
"Name" as name
|
||||
FROM
|
||||
vendors
|
||||
ORDER BY "Name" ASC"#
|
||||
)
|
||||
.fetch_all(pool)
|
||||
.await?)
|
||||
}
|
||||
|
||||
pub async fn import_vendor_line(
|
||||
transaction: &mut Transaction<'static, Postgres>,
|
||||
id: &str,
|
||||
name: &str,
|
||||
) -> Result<(), ApiError> {
|
||||
sqlx::query!(
|
||||
r#"INSERT INTO vendors
|
||||
("VendorID", "Name")
|
||||
VALUES ($1, $2)
|
||||
ON CONFLICT ("VendorID") DO UPDATE
|
||||
SET "Name" = $2;
|
||||
"#,
|
||||
id,
|
||||
name
|
||||
)
|
||||
.execute(&mut **transaction)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
@ -6,12 +6,13 @@ use axum::{
|
||||
response::{IntoResponse, Response},
|
||||
Json,
|
||||
};
|
||||
use chrono::{NaiveDateTime, ParseError, ParseResult};
|
||||
use error_stack::Context;
|
||||
use minisign::PError;
|
||||
use serde::Serialize;
|
||||
use sha2::digest::InvalidLength;
|
||||
use tracing::error;
|
||||
use validator::ValidationErrors;
|
||||
use zip::result::ZipError;
|
||||
|
||||
use crate::api::{backend::ApiBackend, routes::users::models::User};
|
||||
|
||||
@ -41,6 +42,8 @@ pub enum ApiError {
|
||||
MultipartForm(StatusCode, String),
|
||||
InvalidRequest(String),
|
||||
FileNotFound,
|
||||
ValidationError(String),
|
||||
ZIPArchive(String),
|
||||
}
|
||||
|
||||
impl ApiError {
|
||||
@ -64,16 +67,13 @@ impl ApiError {
|
||||
),
|
||||
Self::MultipartForm(c, s) => (*c, "Multipart Error", Some(s)),
|
||||
Self::FileNotFound => (StatusCode::NOT_FOUND, "File not found", None),
|
||||
Self::AccessDenied => (StatusCode::FORBIDDEN, "Access Denied", None), // ApiError::WrongCredentials => (StatusCode::UNAUTHORIZED, "Wrong credentials"),
|
||||
// ApiError::MissingCredentials => (StatusCode::BAD_REQUEST, "Missing credentials"),
|
||||
// ApiError::TokenCreation => (StatusCode::INTERNAL_SERVER_ERROR, "Token creation error"),
|
||||
// ApiError::InvalidToken => (StatusCode::BAD_REQUEST, "Invalid token"),
|
||||
// ApiError::InvalidApiKey => (StatusCode::BAD_REQUEST, "Invalid api key"),
|
||||
// ApiError::InternalServerError => {
|
||||
// (StatusCode::INTERNAL_SERVER_ERROR, "Unspecified error")
|
||||
// }
|
||||
// ApiError::AccessDenied => (StatusCode::UNAUTHORIZED, "Access denied"),
|
||||
// ApiError::InvalidPermissions => (StatusCode::BAD_REQUEST, "Invalid permissions"),
|
||||
Self::ValidationError(s) => (StatusCode::BAD_REQUEST, "Bad request", Some(s)),
|
||||
Self::AccessDenied => (StatusCode::FORBIDDEN, "Access Denied", None),
|
||||
Self::ZIPArchive(s) => (
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
"Error creating ZIP bundle",
|
||||
Some(s),
|
||||
),
|
||||
};
|
||||
|
||||
(
|
||||
@ -141,6 +141,24 @@ impl From<MultipartError> for ApiError {
|
||||
}
|
||||
}
|
||||
|
||||
impl From<ValidationErrors> for ApiError {
|
||||
fn from(value: ValidationErrors) -> Self {
|
||||
Self::ValidationError(format!("{value}"))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<ZipError> for ApiError {
|
||||
fn from(value: ZipError) -> Self {
|
||||
Self::ZIPArchive(value.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<std::io::Error> for ApiError {
|
||||
fn from(value: std::io::Error) -> Self {
|
||||
Self::ZIPArchive(value.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for ApiError {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
let error_info = self.as_error_info().1;
|
||||
|
@ -7,9 +7,10 @@ CREATE TABLE IF NOT EXISTS public.apikeys
|
||||
"KeyID" character varying(32) COLLATE pg_catalog."default" NOT NULL,
|
||||
"Name" character varying(255) COLLATE pg_catalog."default" NOT NULL,
|
||||
"UserAuthRequired" boolean NOT NULL DEFAULT true,
|
||||
"Hash" bytea NOT NULL,
|
||||
"CreationDate" timestamp without time zone NOT NULL DEFAULT now(),
|
||||
"LastChanged" timestamp without time zone NOT NULL DEFAULT now(),
|
||||
"Hash" bytea NOT NULL,
|
||||
"Status" smallint NOT NULL DEFAULT 1,
|
||||
CONSTRAINT apikeys_pkey PRIMARY KEY ("KeyID")
|
||||
)
|
||||
|
||||
@ -24,9 +25,6 @@ COMMENT ON COLUMN public.apikeys."KeyID"
|
||||
COMMENT ON COLUMN public.apikeys."Name"
|
||||
IS 'Name/Description of API Key';
|
||||
|
||||
COMMENT ON COLUMN public.apikeys."Hash"
|
||||
IS 'Hashed value of API Key';
|
||||
|
||||
COMMENT ON COLUMN public.apikeys."UserAuthRequired"
|
||||
IS 'Indication if this api key requires additional user authentication';
|
||||
|
||||
|
@ -1,17 +1,29 @@
|
||||
CREATE TABLE public.users
|
||||
-- Table: public.users
|
||||
|
||||
-- DROP TABLE IF EXISTS public.users;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS public.users
|
||||
(
|
||||
"UserID" character varying(10)[] NOT NULL,
|
||||
"UserID" character varying(10) COLLATE pg_catalog."default" NOT NULL,
|
||||
"ActiveDirectoryAuth" boolean NOT NULL DEFAULT false,
|
||||
"Name" character varying(250) NOT NULL DEFAULT '',
|
||||
"Surname" character varying(250) NOT NULL DEFAULT '',
|
||||
"Email" character varying(500) NOT NULL DEFAULT '',
|
||||
"Password" character varying(255) NOT NULL DEFAULT '',
|
||||
"PrivateKey" text COLLATE,
|
||||
"CreationDate" timestamp without time zone NOT NULL DEFAULT NOW(),
|
||||
"LastChanged" timestamp without time zone NOT NULL DEFAULT NOW(),
|
||||
"StatusFlag" smallint NOT NULL,
|
||||
PRIMARY KEY ("UserID")
|
||||
);
|
||||
"Name" character varying(250) COLLATE pg_catalog."default" NOT NULL DEFAULT ''::character varying,
|
||||
"Surname" character varying(250) COLLATE pg_catalog."default" NOT NULL DEFAULT ''::character varying,
|
||||
"Email" character varying(500) COLLATE pg_catalog."default" NOT NULL DEFAULT ''::character varying,
|
||||
"Password" character varying(255) COLLATE pg_catalog."default" NOT NULL DEFAULT ''::character varying,
|
||||
"CreationDate" timestamp without time zone NOT NULL DEFAULT now(),
|
||||
"LastChanged" timestamp without time zone NOT NULL DEFAULT now(),
|
||||
"PrivateKey" text COLLATE pg_catalog."default",
|
||||
"DepartmentID" integer NOT NULL,
|
||||
"Status" smallint NOT NULL DEFAULT 1,
|
||||
CONSTRAINT users_pkey PRIMARY KEY ("UserID"),
|
||||
CONSTRAINT "DepartmentID" FOREIGN KEY ("DepartmentID")
|
||||
REFERENCES public.departments ("DepartmentID") MATCH SIMPLE
|
||||
ON UPDATE NO ACTION
|
||||
ON DELETE NO ACTION
|
||||
NOT VALID
|
||||
)
|
||||
|
||||
TABLESPACE pg_default;
|
||||
|
||||
ALTER TABLE IF EXISTS public.users
|
||||
OWNER to postgres;
|
||||
|
@ -1,21 +0,0 @@
|
||||
CREATE TABLE IF NOT EXISTS public.draftfiles
|
||||
(
|
||||
"DraftID" timestamp without time zone NOT NULL,
|
||||
"UserID" character varying(10) COLLATE pg_catalog."default" NOT NULL,
|
||||
"ContentType" character varying(255) COLLATE pg_catalog."default" NOT NULL,
|
||||
"Name" character varying COLLATE pg_catalog."default" NOT NULL,
|
||||
"Hash" character varying COLLATE pg_catalog."default" NOT NULL,
|
||||
"Data" bytea NOT NULL,
|
||||
"Size" integer NOT NULL,
|
||||
CONSTRAINT draftfiles_pkey PRIMARY KEY ("DraftID", "UserID", "Hash"),
|
||||
CONSTRAINT "UserID" FOREIGN KEY ("UserID")
|
||||
REFERENCES public.users ("UserID") MATCH SIMPLE
|
||||
ON UPDATE NO ACTION
|
||||
ON DELETE NO ACTION
|
||||
NOT VALID
|
||||
)
|
||||
|
||||
TABLESPACE pg_default;
|
||||
|
||||
ALTER TABLE IF EXISTS public.draftfiles
|
||||
OWNER to postgres;
|
19
src/migrations/07_drafts.sql
Normal file
19
src/migrations/07_drafts.sql
Normal file
@ -0,0 +1,19 @@
|
||||
-- Table: public.drafts
|
||||
|
||||
-- DROP TABLE IF EXISTS public.drafts;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS public.drafts
|
||||
(
|
||||
"DraftIDTime" timestamp without time zone NOT NULL DEFAULT now(),
|
||||
"DraftIDUser" character varying COLLATE pg_catalog."default" NOT NULL,
|
||||
CONSTRAINT drafts_pkey PRIMARY KEY ("DraftIDUser", "DraftIDTime"),
|
||||
CONSTRAINT "drafts_DraftIDUser_fkey" FOREIGN KEY ("DraftIDUser")
|
||||
REFERENCES public.users ("UserID") MATCH SIMPLE
|
||||
ON UPDATE NO ACTION
|
||||
ON DELETE NO ACTION
|
||||
)
|
||||
|
||||
TABLESPACE pg_default;
|
||||
|
||||
ALTER TABLE IF EXISTS public.drafts
|
||||
OWNER to postgres;
|
17
src/migrations/08_files.sql
Normal file
17
src/migrations/08_files.sql
Normal file
@ -0,0 +1,17 @@
|
||||
-- Table: public.files
|
||||
|
||||
-- DROP TABLE IF EXISTS public.files;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS public.files
|
||||
(
|
||||
"Hash" character varying COLLATE pg_catalog."default" NOT NULL,
|
||||
"ContentType" character varying(255) COLLATE pg_catalog."default" NOT NULL,
|
||||
Size integer NOT NULL,
|
||||
Data bytea NOT NULL,
|
||||
CONSTRAINT files_pkey PRIMARY KEY ("Hash")
|
||||
)
|
||||
|
||||
TABLESPACE pg_default;
|
||||
|
||||
ALTER TABLE IF EXISTS public.files
|
||||
OWNER to postgres;
|
27
src/migrations/09_departments.sql
Normal file
27
src/migrations/09_departments.sql
Normal file
@ -0,0 +1,27 @@
|
||||
-- Table: public.departments
|
||||
|
||||
-- DROP TABLE IF EXISTS public.departments;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS public.departments
|
||||
(
|
||||
"DepartmentID" integer NOT NULL GENERATED ALWAYS AS IDENTITY ( INCREMENT 1 START 1 MINVALUE 1 MAXVALUE 2147483647 CACHE 1 ),
|
||||
"DepartmentName" character varying COLLATE pg_catalog."default" NOT NULL,
|
||||
"Description" character varying COLLATE pg_catalog."default",
|
||||
"ManagerID" character varying COLLATE pg_catalog."default" NOT NULL,
|
||||
"ViceManagerID" character varying COLLATE pg_catalog."default" NOT NULL,
|
||||
"Status" smallint NOT NULL DEFAULT 1,
|
||||
CONSTRAINT departments_pkey PRIMARY KEY ("DepartmentID"),
|
||||
CONSTRAINT "ForeignKeyManager" FOREIGN KEY ("ManagerID")
|
||||
REFERENCES public.users ("UserID") MATCH SIMPLE
|
||||
ON UPDATE NO ACTION
|
||||
ON DELETE NO ACTION,
|
||||
CONSTRAINT "ForeignKeyViceManager" FOREIGN KEY ("ViceManagerID")
|
||||
REFERENCES public.users ("UserID") MATCH SIMPLE
|
||||
ON UPDATE NO ACTION
|
||||
ON DELETE NO ACTION
|
||||
)
|
||||
|
||||
TABLESPACE pg_default;
|
||||
|
||||
ALTER TABLE IF EXISTS public.departments
|
||||
OWNER to postgres;
|
18
src/migrations/10_uits.sql
Normal file
18
src/migrations/10_uits.sql
Normal file
@ -0,0 +1,18 @@
|
||||
-- Table: public.units
|
||||
|
||||
-- DROP TABLE IF EXISTS public.units;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS public.units
|
||||
(
|
||||
"UnitID" integer NOT NULL GENERATED ALWAYS AS IDENTITY ( INCREMENT 1 START 1 MINVALUE 1 MAXVALUE 2147483647 CACHE 1 ),
|
||||
"Display" character varying(255) COLLATE pg_catalog."default" NOT NULL,
|
||||
"Abbreviation" character varying(10) COLLATE pg_catalog."default" NOT NULL,
|
||||
"Description" character varying COLLATE pg_catalog."default",
|
||||
"Status" smallint NOT NULL DEFAULT 1,
|
||||
CONSTRAINT units_pkey PRIMARY KEY ("UnitID")
|
||||
)
|
||||
|
||||
TABLESPACE pg_default;
|
||||
|
||||
ALTER TABLE IF EXISTS public.units
|
||||
OWNER to postgres;
|
0
src/migrations/11_orders.sql
Normal file
0
src/migrations/11_orders.sql
Normal file
15
src/migrations/12_orderPositions.sql
Normal file
15
src/migrations/12_orderPositions.sql
Normal file
@ -0,0 +1,15 @@
|
||||
-- Table: public.orderPositions
|
||||
|
||||
-- DROP TABLE IF EXISTS public."orderPositions";
|
||||
|
||||
CREATE TABLE IF NOT EXISTS public."orderPositions"
|
||||
(
|
||||
"OrderID" integer NOT NULL,
|
||||
"PositionID" integer NOT NULL,
|
||||
CONSTRAINT "oderPositions_pkey" PRIMARY KEY ("PositionID", "OrderID")
|
||||
)
|
||||
|
||||
TABLESPACE pg_default;
|
||||
|
||||
ALTER TABLE IF EXISTS public."orderPositions"
|
||||
OWNER to postgres;
|
25
src/migrations/13_drafts_files.sql
Normal file
25
src/migrations/13_drafts_files.sql
Normal file
@ -0,0 +1,25 @@
|
||||
-- Table: public.drafts_files
|
||||
|
||||
-- DROP TABLE IF EXISTS public.drafts_files;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS public.drafts_files
|
||||
(
|
||||
"DraftIDTime" timestamp without time zone NOT NULL,
|
||||
"DraftIDUser" character varying COLLATE pg_catalog."default" NOT NULL,
|
||||
"FileHash" character varying COLLATE pg_catalog."default" NOT NULL,
|
||||
"FileName" character varying COLLATE pg_catalog."default" NOT NULL,
|
||||
CONSTRAINT drafts_files_pkey PRIMARY KEY ("DraftIDTime", "DraftIDUser", "FileHash"),
|
||||
CONSTRAINT "drafts_files_DraftIDTime_DraftIDUser_fkey" FOREIGN KEY ("DraftIDTime", "DraftIDUser")
|
||||
REFERENCES public.drafts ("DraftIDTime", "DraftIDUser") MATCH SIMPLE
|
||||
ON UPDATE NO ACTION
|
||||
ON DELETE CASCADE,
|
||||
CONSTRAINT "drafts_files_FileHash_fkey" FOREIGN KEY ("FileHash")
|
||||
REFERENCES public.files ("Hash") MATCH SIMPLE
|
||||
ON UPDATE NO ACTION
|
||||
ON DELETE CASCADE
|
||||
)
|
||||
|
||||
TABLESPACE pg_default;
|
||||
|
||||
ALTER TABLE IF EXISTS public.drafts_files
|
||||
OWNER to postgres;
|
36
src/migrations/14_orderPosition_files.sql
Normal file
36
src/migrations/14_orderPosition_files.sql
Normal file
@ -0,0 +1,36 @@
|
||||
-- Table: public.orderPositions_files
|
||||
|
||||
-- DROP TABLE IF EXISTS public."orderPositions_files";
|
||||
|
||||
CREATE TABLE IF NOT EXISTS public."orderPositions_files"
|
||||
(
|
||||
"OrderID" integer NOT NULL,
|
||||
"PositionID" integer NOT NULL,
|
||||
"FileHash" character varying COLLATE pg_catalog."default" NOT NULL,
|
||||
"FileName" character varying COLLATE pg_catalog."default" NOT NULL,
|
||||
CONSTRAINT "orderPositions_files_pkey" PRIMARY KEY ("OrderID", "PositionID", "FileHash"),
|
||||
CONSTRAINT "orderPositions_files_FileHash_fkey" FOREIGN KEY ("FileHash")
|
||||
REFERENCES public.files ("Hash") MATCH SIMPLE
|
||||
ON UPDATE NO ACTION
|
||||
ON DELETE CASCADE
|
||||
NOT VALID,
|
||||
CONSTRAINT "orderPositions_files_OrderID_PositionID_fkey" FOREIGN KEY ("OrderID", "PositionID")
|
||||
REFERENCES public."orderPositions" ("OrderID", "PositionID") MATCH SIMPLE
|
||||
ON UPDATE NO ACTION
|
||||
ON DELETE CASCADE
|
||||
NOT VALID
|
||||
)
|
||||
|
||||
TABLESPACE pg_default;
|
||||
|
||||
ALTER TABLE IF EXISTS public."orderPositions_files"
|
||||
OWNER to postgres;
|
||||
-- Index: FileHashIndex
|
||||
|
||||
-- DROP INDEX IF EXISTS public."FileHashIndex";
|
||||
|
||||
CREATE INDEX IF NOT EXISTS "FileHashIndex"
|
||||
ON public."orderPositions_files" USING btree
|
||||
("FileHash" COLLATE pg_catalog."default" ASC NULLS LAST)
|
||||
WITH (deduplicate_items=True)
|
||||
TABLESPACE pg_default;
|
24
src/migrations/15_rulesets.sql
Normal file
24
src/migrations/15_rulesets.sql
Normal file
@ -0,0 +1,24 @@
|
||||
-- Table: public.rulesets
|
||||
|
||||
-- DROP TABLE IF EXISTS public.rulesets;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS public.rulesets
|
||||
(
|
||||
"RulesetID" integer NOT NULL,
|
||||
"RulesetHash" character varying COLLATE pg_catalog."default" NOT NULL,
|
||||
"CreatedBy" character varying COLLATE pg_catalog."default" NOT NULL,
|
||||
"Timestamp" time with time zone NOT NULL DEFAULT now(),
|
||||
"Status" smallint NOT NULL DEFAULT 0,
|
||||
"Signature" character varying COLLATE pg_catalog."default" NOT NULL,
|
||||
"PublicKeyID" integer NOT NULL,
|
||||
CONSTRAINT rulesets_pkey PRIMARY KEY ("RulesetID"),
|
||||
CONSTRAINT "rulesets_CreatedBy_fkey" FOREIGN KEY ("CreatedBy")
|
||||
REFERENCES public.users ("UserID") MATCH SIMPLE
|
||||
ON UPDATE NO ACTION
|
||||
ON DELETE NO ACTION
|
||||
)
|
||||
|
||||
TABLESPACE pg_default;
|
||||
|
||||
ALTER TABLE IF EXISTS public.rulesets
|
||||
OWNER to postgres;
|
15
src/migrations/16_vendors.sql
Normal file
15
src/migrations/16_vendors.sql
Normal file
@ -0,0 +1,15 @@
|
||||
-- Table: public.vendors
|
||||
|
||||
-- DROP TABLE IF EXISTS public.vendors;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS public.vendors
|
||||
(
|
||||
"VendorID" character varying(20) COLLATE pg_catalog."default" NOT NULL,
|
||||
"Name" character varying(100) COLLATE pg_catalog."default" NOT NULL,
|
||||
CONSTRAINT vendors_pkey PRIMARY KEY ("VendorID")
|
||||
)
|
||||
|
||||
TABLESPACE pg_default;
|
||||
|
||||
ALTER TABLE IF EXISTS public.vendors
|
||||
OWNER to postgres;
|
Loading…
Reference in New Issue
Block a user