First working version
This commit is contained in:
1
.gitignore
vendored
1
.gitignore
vendored
@@ -8,3 +8,4 @@ __pycache__
|
||||
# CDK asset staging directory
|
||||
.cdk.staging
|
||||
cdk.out
|
||||
.build
|
||||
|
||||
20
Pipfile
Normal file
20
Pipfile
Normal file
@@ -0,0 +1,20 @@
|
||||
[[source]]
|
||||
name = "pypi"
|
||||
url = "https://pypi.org/simple"
|
||||
verify_ssl = true
|
||||
|
||||
[dev-packages]
|
||||
|
||||
[packages]
|
||||
setuptools = {editable = true, file = "file:///Users/domenico/dev/once"}
|
||||
"aws-cdk.core" = "*"
|
||||
"aws-cdk.aws-apigatewayv2" = "*"
|
||||
"aws-cdk.aws-dynamodb" = "*"
|
||||
"aws-cdk.aws-lambda" = "*"
|
||||
"aws-cdk.aws-s3" = "*"
|
||||
click = "*"
|
||||
requests = "*"
|
||||
Pygments = "*"
|
||||
|
||||
[requires]
|
||||
python_version = "3.7"
|
||||
329
Pipfile.lock
generated
Normal file
329
Pipfile.lock
generated
Normal file
@@ -0,0 +1,329 @@
|
||||
{
|
||||
"_meta": {
|
||||
"hash": {
|
||||
"sha256": "41f21128886eed53df2092337d629cdfa157f5e32f93ddf8faf59d51c3b8da3e"
|
||||
},
|
||||
"pipfile-spec": 6,
|
||||
"requires": {
|
||||
"python_version": "3.7"
|
||||
},
|
||||
"sources": [
|
||||
{
|
||||
"name": "pypi",
|
||||
"url": "https://pypi.org/simple",
|
||||
"verify_ssl": true
|
||||
}
|
||||
]
|
||||
},
|
||||
"default": {
|
||||
"attrs": {
|
||||
"hashes": [
|
||||
"sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c",
|
||||
"sha256:f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72"
|
||||
],
|
||||
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
|
||||
"version": "==19.3.0"
|
||||
},
|
||||
"aws-cdk.assets": {
|
||||
"hashes": [
|
||||
"sha256:38866f982420c5d1a26dc8fb6d88cdd20d0fe6febb94994cfd80e60a1706aa6c",
|
||||
"sha256:fd07d1b704aac06e05b45490a1a1fe86abf34c33e160b2a5ecabb7696e1b0c83"
|
||||
],
|
||||
"markers": "python_version >= '3.6'",
|
||||
"version": "==1.44.0"
|
||||
},
|
||||
"aws-cdk.aws-apigatewayv2": {
|
||||
"hashes": [
|
||||
"sha256:c1d21aec9633556f85d89662bd1793b87caa65081ed50fb5c3805e8f4cc809cf",
|
||||
"sha256:df54154ce6acc664f3ab93d0875c824d0bee10fd38d184f5f4dfc09b2309d473"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==1.44.0"
|
||||
},
|
||||
"aws-cdk.aws-applicationautoscaling": {
|
||||
"hashes": [
|
||||
"sha256:2eea9539f8068f162066ce1b5e2f63bc2e3c359ac278666e84ed9e8962716c7e",
|
||||
"sha256:ff38d2a0c88848147a4cd7eb698d0e954d752435a68fc0493bb21e57ab7e81ac"
|
||||
],
|
||||
"markers": "python_version >= '3.6'",
|
||||
"version": "==1.44.0"
|
||||
},
|
||||
"aws-cdk.aws-autoscaling-common": {
|
||||
"hashes": [
|
||||
"sha256:c72dcb3ad1b30820ae46e49a3f5812a2c31f70a054a4b224905c4cf26c2217aa",
|
||||
"sha256:fd0292e2924f60914407c580da69c827c9ce5b4efdfdb033ae4b4996c1c1ad9e"
|
||||
],
|
||||
"markers": "python_version >= '3.6'",
|
||||
"version": "==1.44.0"
|
||||
},
|
||||
"aws-cdk.aws-cloudformation": {
|
||||
"hashes": [
|
||||
"sha256:89ac7fd0fee061e7843aed6b03ca4bfc93eb645c64694a24868ee2b4c7982e9f",
|
||||
"sha256:d91ca34e399c542cea35717d6490550ccbbe2b187e4d364c714ba5bf5aae8d1e"
|
||||
],
|
||||
"markers": "python_version >= '3.6'",
|
||||
"version": "==1.44.0"
|
||||
},
|
||||
"aws-cdk.aws-cloudwatch": {
|
||||
"hashes": [
|
||||
"sha256:24d1b6d6ecd16327c24f906d31795a1822bdb08bbf67b0c59e9803d0b3801880",
|
||||
"sha256:41ceba4eb2f8646de7296616289053f82a715317840fc962c2dba638308ad8f3"
|
||||
],
|
||||
"markers": "python_version >= '3.6'",
|
||||
"version": "==1.44.0"
|
||||
},
|
||||
"aws-cdk.aws-dynamodb": {
|
||||
"hashes": [
|
||||
"sha256:4c94ae9b43749ed616c82372cfb6e26a5cc2c6928badd8433cf455d38d0183d1",
|
||||
"sha256:5b6394360d903918d2d9f381ac9e9b8c3198a92bcd591e3b54ce00c743ee82ca"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==1.44.0"
|
||||
},
|
||||
"aws-cdk.aws-ec2": {
|
||||
"hashes": [
|
||||
"sha256:0cbfe1ad838849bebd2dfc2fb18f6ab120525617e7d1195b2cfa12adb00eb28f",
|
||||
"sha256:78e24d71496c01b172adf231e4b156e46dc37a513906d87616e108d9998dbffb"
|
||||
],
|
||||
"markers": "python_version >= '3.6'",
|
||||
"version": "==1.44.0"
|
||||
},
|
||||
"aws-cdk.aws-events": {
|
||||
"hashes": [
|
||||
"sha256:9a273514ac6f3070435f83efadd154b3a367f99c43c9eaa2729eae348ac66f40",
|
||||
"sha256:e490e508461d6f13424fa1b04d581565c9ce9e3eaa5ea7bc2d50d4965f710a77"
|
||||
],
|
||||
"markers": "python_version >= '3.6'",
|
||||
"version": "==1.44.0"
|
||||
},
|
||||
"aws-cdk.aws-iam": {
|
||||
"hashes": [
|
||||
"sha256:b1372ac47af2399d97cae3c94e5e3800211e39b89dbf58467b62462b3e4b40d8",
|
||||
"sha256:fcb11db48211113554a7fd0c17ebc239226cc6bcfed96fa8c740efd76e252a6d"
|
||||
],
|
||||
"markers": "python_version >= '3.6'",
|
||||
"version": "==1.44.0"
|
||||
},
|
||||
"aws-cdk.aws-kms": {
|
||||
"hashes": [
|
||||
"sha256:ad2cc8426c384681071164842bc3b6e7f298258dad794553d4016a2b8e1f7b39",
|
||||
"sha256:d1cd82adab05f6ac9dda200e7305b7c142e62d5c803610cd7fa147a8b2ce5c55"
|
||||
],
|
||||
"markers": "python_version >= '3.6'",
|
||||
"version": "==1.44.0"
|
||||
},
|
||||
"aws-cdk.aws-lambda": {
|
||||
"hashes": [
|
||||
"sha256:f209f53c8324f92cbcdb124db63b6f008fcc496fc57f56ff953d68841b385554",
|
||||
"sha256:f2116c7184de3d0f521016d042b21f9351f8af54fffc2691190eae95a708733f"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==1.44.0"
|
||||
},
|
||||
"aws-cdk.aws-logs": {
|
||||
"hashes": [
|
||||
"sha256:7d2d33f7f90800dd93e6159e67c119311c52534b8618d96766423af230a7b667",
|
||||
"sha256:f4a52f8b834c96825938c8a70eb190924e64bb52e4896ecbc3aeeb48a92dee6d"
|
||||
],
|
||||
"markers": "python_version >= '3.6'",
|
||||
"version": "==1.44.0"
|
||||
},
|
||||
"aws-cdk.aws-s3": {
|
||||
"hashes": [
|
||||
"sha256:9c4411ea3f0efa4b5359d2681b2b60440fc8f43972862d16b66cabea2b1ee8a0",
|
||||
"sha256:f3fec357cf831c95e22fd1ae6666524f2493a99cc125e86f784b28040694e4bf"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==1.44.0"
|
||||
},
|
||||
"aws-cdk.aws-s3-assets": {
|
||||
"hashes": [
|
||||
"sha256:43cb7c804976140ad04ab456f595d4cad7c713feb3b78a678eb8e0c68d4a6dda",
|
||||
"sha256:b4a69afcbe08e8ff6bf06508fbcb9ca7c253723b2df79a88fba69612c767f852"
|
||||
],
|
||||
"markers": "python_version >= '3.6'",
|
||||
"version": "==1.44.0"
|
||||
},
|
||||
"aws-cdk.aws-sns": {
|
||||
"hashes": [
|
||||
"sha256:235467a8a9b1f8824d967201f932231f105dabababa09384c0ad68e2bf26593a",
|
||||
"sha256:5df1e11298b55ad4d1c2fa6d11143e73ecbae8311b1cc78cb402c62d2e5b4e4b"
|
||||
],
|
||||
"markers": "python_version >= '3.6'",
|
||||
"version": "==1.44.0"
|
||||
},
|
||||
"aws-cdk.aws-sqs": {
|
||||
"hashes": [
|
||||
"sha256:29ed98a3f447725e3a8f8542295696d5ba41dbce708d6acf87f65796e699b97d",
|
||||
"sha256:f90a98d73918e986127e8bb32ab27b94890f6ba2b72f2c6b847511a480b484bc"
|
||||
],
|
||||
"markers": "python_version >= '3.6'",
|
||||
"version": "==1.44.0"
|
||||
},
|
||||
"aws-cdk.aws-ssm": {
|
||||
"hashes": [
|
||||
"sha256:59d7bec1fbc9d3795ec2376f1cd50d34e789c3d04791364c6b15200d426d17c5",
|
||||
"sha256:9ebb5c0729560a72d5982fae874125758dbc81bf379bfd532272479dc989e5b8"
|
||||
],
|
||||
"markers": "python_version >= '3.6'",
|
||||
"version": "==1.44.0"
|
||||
},
|
||||
"aws-cdk.cdk-assets-schema": {
|
||||
"hashes": [
|
||||
"sha256:056a1612932a35cd05b252d346403c7861fd4cd2475c6dcbd2da84ead2392b27",
|
||||
"sha256:58b0e2fcdc43da88ef605a595c4d546f442f060606adfa8986bc3d0a15ab26f7"
|
||||
],
|
||||
"markers": "python_version >= '3.6'",
|
||||
"version": "==1.44.0"
|
||||
},
|
||||
"aws-cdk.cloud-assembly-schema": {
|
||||
"hashes": [
|
||||
"sha256:0f3b8cfbe8eeca7e29283a01d4c55921de0b412c71e845b105ee676f37981575",
|
||||
"sha256:d71cbee43f5d60f67c72cacc01d07676af83f373cbb938b22fdca55859c18a85"
|
||||
],
|
||||
"markers": "python_version >= '3.6'",
|
||||
"version": "==1.44.0"
|
||||
},
|
||||
"aws-cdk.core": {
|
||||
"hashes": [
|
||||
"sha256:589ae5c0fecb5aa5ba59873da1e6a51b49b85c2cfa35b957c8a0734adc1ed510",
|
||||
"sha256:948cb0c63149a3d0dba739047a2705149c8ae4b69e725ade9c4f0f5b168a3673"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==1.44.0"
|
||||
},
|
||||
"aws-cdk.custom-resources": {
|
||||
"hashes": [
|
||||
"sha256:20a27c05624c8c047e5dd3fef84242c1c17c9d1440767d40b43889176cfd90f6",
|
||||
"sha256:e972f346e61069f345003caa42defe274b348359a7b07304246dc7dce14411d3"
|
||||
],
|
||||
"markers": "python_version >= '3.6'",
|
||||
"version": "==1.44.0"
|
||||
},
|
||||
"aws-cdk.cx-api": {
|
||||
"hashes": [
|
||||
"sha256:04af7a26bd19e28115fdf8bf78d02bb08d1c13786932026003ecd1c7c5d2bf88",
|
||||
"sha256:af386b4d6d9e26809c87be0d71af1183f9f6a338a060a393e8cf233b99b6f792"
|
||||
],
|
||||
"markers": "python_version >= '3.6'",
|
||||
"version": "==1.44.0"
|
||||
},
|
||||
"aws-cdk.region-info": {
|
||||
"hashes": [
|
||||
"sha256:09079b8dd2b69299a13312515bc465f2f43a7a400b4a334216bb4148b7323605",
|
||||
"sha256:c4caa4c2ebf7cd9959105038972ddfae2c827c40976afdafec47a97cdcfef514"
|
||||
],
|
||||
"markers": "python_version >= '3.6'",
|
||||
"version": "==1.44.0"
|
||||
},
|
||||
"cattrs": {
|
||||
"hashes": [
|
||||
"sha256:616972ae3dfa6e623a40ad3cb845420e64942989152774ab055e5c2b2f89f997",
|
||||
"sha256:b7ab5cf8ad127c42eefd01410c1c6e28569a45a255ea80ed968511873c433c7a"
|
||||
],
|
||||
"version": "==1.0.0"
|
||||
},
|
||||
"certifi": {
|
||||
"hashes": [
|
||||
"sha256:1d987a998c75633c40847cc966fcf5904906c920a7f17ef374f5aa4282abd304",
|
||||
"sha256:51fcb31174be6e6664c5f69e3e1691a2d72a1a12e90f872cbdb1567eb47b6519"
|
||||
],
|
||||
"version": "==2020.4.5.1"
|
||||
},
|
||||
"chardet": {
|
||||
"hashes": [
|
||||
"sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae",
|
||||
"sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"
|
||||
],
|
||||
"version": "==3.0.4"
|
||||
},
|
||||
"click": {
|
||||
"hashes": [
|
||||
"sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a",
|
||||
"sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==7.1.2"
|
||||
},
|
||||
"constructs": {
|
||||
"hashes": [
|
||||
"sha256:ba2e0c3ff46e08095307ba45e9dcbe2e16599c2761ca661c62024121fb331cb9",
|
||||
"sha256:ce96789470fe6a05c3ba168f6fbf4bd41e6daa01b62731bb7728941ce1d3312a"
|
||||
],
|
||||
"markers": "python_version >= '3.6'",
|
||||
"version": "==3.0.3"
|
||||
},
|
||||
"idna": {
|
||||
"hashes": [
|
||||
"sha256:7588d1c14ae4c77d74036e8c22ff447b26d0fde8f007354fd48a7814db15b7cb",
|
||||
"sha256:a068a21ceac8a4d63dbfd964670474107f541babbd2250d61922f029858365fa"
|
||||
],
|
||||
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
|
||||
"version": "==2.9"
|
||||
},
|
||||
"jsii": {
|
||||
"hashes": [
|
||||
"sha256:953d81df16cd292f32512e024acd3409c26b16e0b91dfb73090a78974d362d54",
|
||||
"sha256:f3950287f6cb592931963eeaedc5c223ba4a2f02f0c45108584affcd66685257"
|
||||
],
|
||||
"markers": "python_version >= '3.6'",
|
||||
"version": "==1.6.0"
|
||||
},
|
||||
"publication": {
|
||||
"hashes": [
|
||||
"sha256:0248885351febc11d8a1098d5c8e3ab2dabcf3e8c0c96db1e17ecd12b53afbe6",
|
||||
"sha256:68416a0de76dddcdd2930d1c8ef853a743cc96c82416c4e4d3b5d901c6276dc4"
|
||||
],
|
||||
"version": "==0.0.3"
|
||||
},
|
||||
"pygments": {
|
||||
"hashes": [
|
||||
"sha256:647344a061c249a3b74e230c739f434d7ea4d8b1d5f3721bc0f3558049b38f44",
|
||||
"sha256:ff7a40b4860b727ab48fad6360eb351cc1b33cbf9b15a0f689ca5353e9463324"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==2.6.1"
|
||||
},
|
||||
"python-dateutil": {
|
||||
"hashes": [
|
||||
"sha256:73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c",
|
||||
"sha256:75bb3f31ea686f1197762692a9ee6a7550b59fc6ca3a1f4b5d7e32fb98e2da2a"
|
||||
],
|
||||
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
|
||||
"version": "==2.8.1"
|
||||
},
|
||||
"requests": {
|
||||
"hashes": [
|
||||
"sha256:43999036bfa82904b6af1d99e4882b560e5e2c68e5c4b0aa03b655f3d7d73fee",
|
||||
"sha256:b3f43d496c6daba4493e7c431722aeb7dbc6288f52a6e04e7b6023b0247817e6"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==2.23.0"
|
||||
},
|
||||
"setuptools": {
|
||||
"editable": true,
|
||||
"file": "file:///Users/domenico/dev/once"
|
||||
},
|
||||
"six": {
|
||||
"hashes": [
|
||||
"sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259",
|
||||
"sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced"
|
||||
],
|
||||
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
|
||||
"version": "==1.15.0"
|
||||
},
|
||||
"typing-extensions": {
|
||||
"hashes": [
|
||||
"sha256:6e95524d8a547a91e08f404ae485bbb71962de46967e1b71a0cb89af24e761c5",
|
||||
"sha256:79ee589a3caca649a9bfd2a8de4709837400dfa00b6cc81962a1e6a1815969ae",
|
||||
"sha256:f8d2bd89d25bc39dabe7d23df520442fa1d8969b82544370e03d88b5a591c392"
|
||||
],
|
||||
"version": "==3.7.4.2"
|
||||
},
|
||||
"urllib3": {
|
||||
"editable": true,
|
||||
"file": "file:///Users/domenico/dev/once"
|
||||
}
|
||||
},
|
||||
"develop": {}
|
||||
}
|
||||
69
README.md
69
README.md
@@ -1,58 +1,33 @@
|
||||
# once: a one-time file sharing personal service
|
||||
|
||||
# Welcome to your CDK Python project!
|
||||
It happens that I want to share a file with someone which is sensitive enough
|
||||
that I don't want to upload on a public free service.
|
||||
|
||||
This is a blank project for Python development with CDK.
|
||||
I would like to have something like transfer.sh, running
|
||||
as a personal service, with the following features:
|
||||
|
||||
The `cdk.json` file tells the CDK Toolkit how to execute your app.
|
||||
- it must be serverless (I'm not willing to pay except for the actual file storage, and only for the time strictly required)
|
||||
- it must return a link that I can share to anyone
|
||||
- file must be deleted as soon as it get *successfully downloaded*
|
||||
- it must expose a simple HTTP API, so *curl* should suffice to share a file
|
||||
- it must be protected with some form of authentication
|
||||
|
||||
This project is set up like a standard Python project. The initialization
|
||||
process also creates a virtualenv within this project, stored under the .env
|
||||
directory. To create the virtualenv it assumes that there is a `python3`
|
||||
(or `python` for Windows) executable in your path with access to the `venv`
|
||||
package. If for any reason the automatic creation of the virtualenv fails,
|
||||
you can create the virtualenv manually.
|
||||
With CDK I could create the following resources:
|
||||
|
||||
To manually create a virtualenv on MacOS and Linux:
|
||||
- An S3 bucket to host the uploaded files
|
||||
- A Lambda function to implement the 'get-upload-ticket'
|
||||
- A Dynamodb table to store the information about the entries
|
||||
- Another Lambda function to implement a "smart" download handler, to delete the file after the very first successful transfer.
|
||||
|
||||
```
|
||||
$ python3 -m venv .env
|
||||
```
|
||||
I will use API Gateway to expose the lambda functions as an HTTP API.
|
||||
|
||||
After the init process completes and the virtualenv is created, you can use the following
|
||||
step to activate your virtualenv.
|
||||
|
||||
```
|
||||
$ source .env/bin/activate
|
||||
```
|
||||
HERE BE DIAGRAM!
|
||||
|
||||
If you are a Windows platform, you would activate the virtualenv like this:
|
||||
|
||||
```
|
||||
% .env\Scripts\activate.bat
|
||||
```
|
||||
mkdir once
|
||||
cd once
|
||||
cdk init app
|
||||
|
||||
Once the virtualenv is activated, you can install the required dependencies.
|
||||
|
||||
```
|
||||
$ pip install -r requirements.txt
|
||||
```
|
||||
|
||||
At this point you can now synthesize the CloudFormation template for this code.
|
||||
|
||||
```
|
||||
$ cdk synth
|
||||
```
|
||||
|
||||
To add additional dependencies, for example other CDK libraries, just add
|
||||
them to your `setup.py` file and rerun the `pip install -r requirements.txt`
|
||||
command.
|
||||
|
||||
## Useful commands
|
||||
|
||||
* `cdk ls` list all stacks in the app
|
||||
* `cdk synth` emits the synthesized CloudFormation template
|
||||
* `cdk deploy` deploy this stack to your default AWS account/region
|
||||
* `cdk diff` compare deployed stack with current state
|
||||
* `cdk docs` open CDK documentation
|
||||
|
||||
Enjoy!
|
||||
Then it should be easy to start organizing the project layout.
|
||||
One single stack, one folder for each lambda function.
|
||||
|
||||
68
client/once.py
Normal file
68
client/once.py
Normal file
@@ -0,0 +1,68 @@
|
||||
'''
|
||||
Simple command to share one-time files
|
||||
'''
|
||||
|
||||
import os
|
||||
import json
|
||||
import time
|
||||
|
||||
import click
|
||||
import requests
|
||||
from pygments import highlight, lexers, formatters
|
||||
|
||||
|
||||
ONCE_API_URL = os.getenv('ONCE_API_URL')
|
||||
|
||||
|
||||
def highlight_json(obj):
|
||||
formatted_json = json.dumps(obj, sort_keys=True, indent=4)
|
||||
return highlight(formatted_json, lexers.JsonLexer(), formatters.TerminalFormatter())
|
||||
|
||||
|
||||
def echo_obj(obj):
|
||||
click.echo(highlight_json(obj))
|
||||
|
||||
|
||||
def api_req(method: str, url: str, verbose: bool = False, **kwargs):
|
||||
method = method.lower()
|
||||
if method not in ['get', 'post']:
|
||||
raise ValueError(f'Unsupported HTTP method "{method}"')
|
||||
|
||||
actual_url = f'{ONCE_API_URL}{url}'
|
||||
|
||||
if verbose:
|
||||
print(f'{method.upper()} {actual_url}')
|
||||
|
||||
response = getattr(requests, method)(actual_url, **kwargs)
|
||||
|
||||
if verbose:
|
||||
print(f'Server response status: {response.status_code}')
|
||||
echo_obj(response.json())
|
||||
|
||||
return response
|
||||
|
||||
|
||||
@click.command('share')
|
||||
@click.argument('file', type=click.File(mode='rb'), required=True)
|
||||
@click.option('--verbose', '-v', is_flag=True, default=False, help='Enables verbose output.')
|
||||
def share(file: click.File, verbose: bool):
|
||||
entry = api_req('GET', '/',
|
||||
params={'f': os.path.basename(file.name)},
|
||||
verbose=verbose).json()
|
||||
|
||||
once_url = entry['once_url']
|
||||
upload_data = entry['presigned_post']
|
||||
files = {'file': file}
|
||||
|
||||
upload_started = time.time()
|
||||
response = requests.post(upload_data['url'],
|
||||
data=upload_data['fields'],
|
||||
files=files)
|
||||
|
||||
upload_time = time.time() - upload_started
|
||||
print(f"File uploaded in {upload_time}s")
|
||||
print(f"File can be downloaded once at: {once_url}")
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
share()
|
||||
3
client/requirements.txt
Normal file
3
client/requirements.txt
Normal file
@@ -0,0 +1,3 @@
|
||||
click
|
||||
pygments
|
||||
requests
|
||||
72
once/download-and-delete/handler.py
Normal file
72
once/download-and-delete/handler.py
Normal file
@@ -0,0 +1,72 @@
|
||||
import os
|
||||
import io
|
||||
import json
|
||||
import logging
|
||||
|
||||
import boto3
|
||||
|
||||
|
||||
def is_debug_enabled() -> bool:
|
||||
value = os.getenv('DEBUG', 'false').lower()
|
||||
if value in ['false', '0']:
|
||||
return False
|
||||
else:
|
||||
return bool(value)
|
||||
|
||||
|
||||
DEBUG = is_debug_enabled()
|
||||
FILES_BUCKET = os.getenv('FILES_BUCKET')
|
||||
FILES_TABLE_NAME = os.getenv('FILES_TABLE_NAME')
|
||||
PRESIGNED_URL_EXPIRES_IN = int(os.getenv('PRESIGNED_URL_EXPIRES_IN', 20))
|
||||
|
||||
|
||||
log = logging.getLogger()
|
||||
if DEBUG:
|
||||
log.setLevel(logging.DEBUG)
|
||||
else:
|
||||
log.setLevel(logging.INFO)
|
||||
|
||||
|
||||
def on_event(event, context):
|
||||
log.info(f'Event received: {event}')
|
||||
log.info(f'Context is: {context}')
|
||||
log.debug(f'Debug mode is {DEBUG}')
|
||||
log.debug(f'Files bucket is "{FILES_BUCKET}"')
|
||||
|
||||
entry_id = event['pathParameters']['entry_id']
|
||||
filename = event['pathParameters']['filename']
|
||||
object_name = f'{entry_id}/{filename}'
|
||||
|
||||
dynamodb = boto3.client('dynamodb')
|
||||
entry = dynamodb.get_item(
|
||||
TableName=FILES_TABLE_NAME,
|
||||
Key={'id': {'S': entry_id}})
|
||||
|
||||
log.debug(f'This is the GET_ITEM response: {entry}')
|
||||
|
||||
if 'Item' not in entry or 'deleted' in entry['Item']:
|
||||
error_message = f'Entry not found: {object_name}'
|
||||
log.info(error_message)
|
||||
return {'statusCode': 404, 'body': error_message}
|
||||
|
||||
s3 = boto3.client('s3')
|
||||
download_url = s3.generate_presigned_url(
|
||||
'get_object',
|
||||
Params={'Bucket': FILES_BUCKET, 'Key': object_name},
|
||||
ExpiresIn=PRESIGNED_URL_EXPIRES_IN)
|
||||
|
||||
dynamodb.update_item(
|
||||
TableName=FILES_TABLE_NAME,
|
||||
Key={'id': {'S': entry_id}},
|
||||
UpdateExpression='SET deleted = :deleted',
|
||||
ExpressionAttributeValues={':deleted': {'BOOL': True}})
|
||||
|
||||
log.info(f'Entry {object_name} marked as deleted')
|
||||
|
||||
return {
|
||||
'statusCode': 301,
|
||||
'headers': {
|
||||
'Location': download_url
|
||||
}
|
||||
}
|
||||
|
||||
167
once/get-upload-ticket/handler.py
Normal file
167
once/get-upload-ticket/handler.py
Normal file
@@ -0,0 +1,167 @@
|
||||
import base64
|
||||
import hashlib
|
||||
import hmac
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import random
|
||||
import string
|
||||
from typing import Dict
|
||||
from urllib.parse import quote_plus, urlencode
|
||||
|
||||
import boto3
|
||||
import requests
|
||||
from botocore.config import Config
|
||||
from botocore.exceptions import ClientError
|
||||
|
||||
|
||||
def is_debug_enabled() -> bool:
|
||||
value = os.getenv('DEBUG', 'false').lower()
|
||||
if value in ['false', '0']:
|
||||
return False
|
||||
else:
|
||||
return bool(value)
|
||||
|
||||
|
||||
DEBUG = is_debug_enabled()
|
||||
APP_URL = os.getenv('APP_URL')
|
||||
EXPIRATION_TIMEOUT = int(os.getenv('EXPIRATION_TIMEOUT', 60*5))
|
||||
FILES_BUCKET = os.getenv('FILES_BUCKET')
|
||||
FILES_TABLE_NAME = os.getenv('FILES_TABLE_NAME')
|
||||
S3_REGION_NAME = os.getenv('S3_REGION_NAME', 'eu-west-1')
|
||||
S3_SIGNATURE_VERSION = os.getenv('S3_SIGNATURE_VERSION', 's3v4')
|
||||
|
||||
|
||||
log = logging.getLogger()
|
||||
if DEBUG:
|
||||
log.setLevel(logging.DEBUG)
|
||||
else:
|
||||
log.setLevel(logging.INFO)
|
||||
|
||||
|
||||
class BadRequestError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class UnauthorizedError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def on_event(event, context):
|
||||
log.info(f'Event received: {event}')
|
||||
log.info(f'Context is: {context}')
|
||||
log.info(f'Requests library version: {requests.__version__}')
|
||||
|
||||
log.debug(f'Debug mode is {DEBUG}')
|
||||
log.debug(f'App URL is "{APP_URL}"')
|
||||
log.debug(f'Files bucket is "{FILES_BUCKET}"')
|
||||
log.debug(f'Files Dynamodb table name is "{FILES_TABLE_NAME}"')
|
||||
log.debug(f'S3 region name is: "{S3_REGION_NAME}"')
|
||||
log.debug(f'S3 signature algorithm version is "{S3_SIGNATURE_VERSION}"')
|
||||
log.debug(f'Pre-signed urls will expire after {EXPIRATION_TIMEOUT} seconds')
|
||||
|
||||
q = event.get('queryStringParameters', {})
|
||||
filename = q.get('f')
|
||||
response_code = 200
|
||||
response = {}
|
||||
try:
|
||||
if filename is None:
|
||||
raise BadRequestError(
|
||||
'Please provide a valid value for the `filename_prefix` '
|
||||
'query parameter')
|
||||
|
||||
domain = string.ascii_uppercase + string.ascii_lowercase + string.digits
|
||||
entry_id = ''.join(random.choice(domain) for _ in range(6))
|
||||
object_name = f'{entry_id}/{filename}'
|
||||
response['once_url'] = f'{APP_URL}{entry_id}/{filename}'
|
||||
|
||||
dynamodb = boto3.client('dynamodb')
|
||||
dynamodb.put_item(
|
||||
TableName=FILES_TABLE_NAME,
|
||||
Item={
|
||||
'id': {'S': entry_id},
|
||||
'object_name': {'S': object_name}
|
||||
})
|
||||
|
||||
log.debug(f'Creating pre-signed post for {object_name} on '
|
||||
f'{FILES_BUCKET} (expiration={EXPIRATION_TIMEOUT})')
|
||||
|
||||
presigned_post = create_presigned_post(
|
||||
bucket_name=FILES_BUCKET,
|
||||
object_name=object_name,
|
||||
expiration=EXPIRATION_TIMEOUT)
|
||||
|
||||
log.debug(f'Presigned-Post response: {presigned_post}')
|
||||
|
||||
# Long life and prosperity!
|
||||
log.info(f'Authorized upload request for {object_name}')
|
||||
response['presigned_post'] = presigned_post
|
||||
except BadRequestError as e:
|
||||
response_code = 400
|
||||
response = dict(message=str(e))
|
||||
except UnauthorizedError:
|
||||
response_code = 401
|
||||
response = dict(message=str(e))
|
||||
except Exception as e:
|
||||
response_code = 500
|
||||
response = dict(message=str(e))
|
||||
finally:
|
||||
return {
|
||||
'statusCode': response_code,
|
||||
'body': json.dumps(response)
|
||||
}
|
||||
|
||||
|
||||
|
||||
# def validate_request(event: Dict, secret_key: str) -> bool:
|
||||
# '''
|
||||
# Validates the HMAC(SHA256) signature against the given `request`.
|
||||
# '''
|
||||
|
||||
# # discard any url prefix before '/v1/'
|
||||
# path = event['rawPath']
|
||||
# canonicalized_url = path[path.find('/v1/'):]
|
||||
|
||||
# if 'queryStringParameters' in event:
|
||||
# qs = urlencode(event['queryStringParameters'], quote_via=quote_plus)
|
||||
# canonicalized_url = f'{canonicalized_url}?{qs}'
|
||||
|
||||
# plain_text = canonicalized_url.encode('utf-8')
|
||||
# log.debug(f'Plain text: {plain_text}')
|
||||
|
||||
# encoded_signature = event['headers'][HMAC_SIGNATURE_HEADER]
|
||||
# log.debug(f'Received signature: {encoded_signature}')
|
||||
|
||||
# signature_value = base64.b64decode(encoded_signature)
|
||||
|
||||
# hmac_obj = hmac.new(base64.b64decode(secret_key),
|
||||
# msg=plain_text,
|
||||
# digestmod=hashlib.sha256)
|
||||
|
||||
# calculated_signature = hmac_obj.digest()
|
||||
# return calculated_signature == signature_value
|
||||
|
||||
|
||||
def create_presigned_post(bucket_name: str, object_name: str,
|
||||
fields=None, conditions=None, expiration=3600):
|
||||
"""Generate a presigned URL S3 POST request to upload a file
|
||||
|
||||
:param bucket_name: string
|
||||
:param object_name: string
|
||||
:param fields: Dictionary of prefilled form fields
|
||||
:param conditions: List of conditions to include in the policy
|
||||
:param expiration: Time in seconds for the presigned URL to remain valid
|
||||
:return: Dictionary with the following keys:
|
||||
url: URL to post to
|
||||
fields: Dictionary of form fields and values to submit with the POST
|
||||
:return: None if error.
|
||||
"""
|
||||
s3_client = boto3.client('s3',
|
||||
region_name=S3_REGION_NAME,
|
||||
config=Config(signature_version=S3_SIGNATURE_VERSION))
|
||||
|
||||
return s3_client.generate_presigned_post(
|
||||
bucket_name, object_name,
|
||||
Fields=fields,
|
||||
Conditions=conditions,
|
||||
ExpiresIn=expiration)
|
||||
1
once/get-upload-ticket/requirements.txt
Normal file
1
once/get-upload-ticket/requirements.txt
Normal file
@@ -0,0 +1 @@
|
||||
requests
|
||||
@@ -1,9 +1,76 @@
|
||||
from aws_cdk import core
|
||||
import os
|
||||
|
||||
from aws_cdk import(
|
||||
core,
|
||||
aws_apigatewayv2 as apigw,
|
||||
aws_dynamodb as dynamodb,
|
||||
aws_lambda as lambda_,
|
||||
aws_s3 as s3)
|
||||
|
||||
from .utils import make_python_zip_bundle
|
||||
|
||||
|
||||
BASE_PATH = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
|
||||
class OnceStack(core.Stack):
|
||||
|
||||
def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
|
||||
super().__init__(scope, id, **kwargs)
|
||||
|
||||
# The code that defines your stack goes here
|
||||
self.files_bucket = s3.Bucket(self, 'files-bucket',
|
||||
bucket_name='once-shared-files',
|
||||
block_public_access=s3.BlockPublicAccess.BLOCK_ALL,
|
||||
encryption=s3.BucketEncryption.S3_MANAGED,
|
||||
removal_policy=core.RemovalPolicy.DESTROY)
|
||||
|
||||
self.files_table = dynamodb.Table(self, 'once-files-table',
|
||||
table_name='once-files',
|
||||
partition_key=dynamodb.Attribute(name='id', type=dynamodb.AttributeType.STRING),
|
||||
billing_mode=dynamodb.BillingMode.PAY_PER_REQUEST,
|
||||
removal_policy=core.RemovalPolicy.DESTROY)
|
||||
|
||||
self.api = apigw.HttpApi(self, 'once-api', api_name='once-api')
|
||||
|
||||
self.get_upload_ticket_function = lambda_.Function(self, 'get-upload-ticket-function',
|
||||
function_name='once-get-upload-ticket',
|
||||
runtime=lambda_.Runtime.PYTHON_3_7,
|
||||
code=make_python_zip_bundle(os.path.join(BASE_PATH, 'get-upload-ticket')),
|
||||
handler='handler.on_event',
|
||||
description='Returns a pre-signed request to share a file',
|
||||
environment={
|
||||
'APP_URL': self.api.url,
|
||||
'FILES_TABLE_NAME': self.files_table.table_name,
|
||||
'FILES_BUCKET': self.files_bucket.bucket_name
|
||||
})
|
||||
|
||||
self.files_bucket.grant_put(self.get_upload_ticket_function)
|
||||
self.files_table.grant_read_write_data(self.get_upload_ticket_function)
|
||||
|
||||
self.download_and_delete_function = lambda_.Function(self, 'download-and-delete-function',
|
||||
function_name='once-download-and-delete',
|
||||
runtime=lambda_.Runtime.PYTHON_3_7,
|
||||
code=lambda_.Code.from_asset(os.path.join(BASE_PATH, 'download-and-delete')),
|
||||
handler='handler.on_event',
|
||||
description='Serves a file from S3 and deletes it as soon as it has been successfully transferred.',
|
||||
environment={
|
||||
'FILES_BUCKET': self.files_bucket.bucket_name,
|
||||
'FILES_TABLE_NAME': self.files_table.table_name
|
||||
})
|
||||
|
||||
self.files_bucket.grant_read(self.download_and_delete_function)
|
||||
self.files_bucket.grant_delete(self.download_and_delete_function)
|
||||
self.files_table.grant_read_write_data(self.download_and_delete_function)
|
||||
|
||||
get_upload_ticket_integration = apigw.LambdaProxyIntegration(handler=self.get_upload_ticket_function)
|
||||
self.api.add_routes(
|
||||
path='/',
|
||||
methods=[apigw.HttpMethod.GET],
|
||||
integration=get_upload_ticket_integration)
|
||||
|
||||
download_and_delete_integration = apigw.LambdaProxyIntegration(handler=self.download_and_delete_function)
|
||||
self.api.add_routes(
|
||||
path='/{entry_id}/{filename}',
|
||||
methods=[apigw.HttpMethod.GET],
|
||||
integration=download_and_delete_integration)
|
||||
|
||||
core.CfnOutput(self, 'api-url', value=self.api.url)
|
||||
|
||||
154
once/utils.py
Normal file
154
once/utils.py
Normal file
@@ -0,0 +1,154 @@
|
||||
import os
|
||||
import sys
|
||||
|
||||
import hashlib
|
||||
import logging
|
||||
import shutil
|
||||
import subprocess
|
||||
import zipfile
|
||||
|
||||
from typing import Dict, List, Union
|
||||
from aws_cdk import aws_lambda as _lambda
|
||||
|
||||
|
||||
class MissingPrerequisiteCommand(Exception):
|
||||
'''A required system command is missing'''
|
||||
|
||||
|
||||
def add_folder_to_zip(zip_obj: zipfile.ZipFile, folder: str, ignore_names: List[str] = [], ignore_dotfiles: bool = True):
|
||||
for root, dirs, files in os.walk(folder):
|
||||
if ignore_dotfiles:
|
||||
dirs[:] = [d for d in dirs if not d.startswith('.')]
|
||||
files[:] = [f for f in files if not f.startswith('.')]
|
||||
|
||||
dirs[:] = [d for d in dirs if d not in ignore_names]
|
||||
files[:] = [f for f in files if f not in ignore_names]
|
||||
|
||||
logging.debug(f'FILES: {files}, DIRS: {dirs}')
|
||||
|
||||
if root == folder:
|
||||
archive_folder_name = ''
|
||||
else:
|
||||
archive_folder_name = os.path.relpath(root, folder)
|
||||
zip_obj.write(root, arcname=archive_folder_name)
|
||||
|
||||
for filename in files:
|
||||
f = os.path.join(root, filename)
|
||||
d = os.path.join(archive_folder_name, filename)
|
||||
zip_obj.write(f, arcname=d)
|
||||
|
||||
|
||||
def execute_shell_command(command: Union[str, List[str]],
|
||||
env: Union[Dict, None] = None) -> str:
|
||||
if isinstance(command, list):
|
||||
command = ' '.join(command)
|
||||
|
||||
logging.debug(f'Executing command: {command}')
|
||||
|
||||
completed_process = subprocess.run(command,
|
||||
env=env,
|
||||
shell=True,
|
||||
check=True,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE)
|
||||
|
||||
logging.debug(completed_process)
|
||||
return completed_process.stdout.strip().decode('utf-8')
|
||||
|
||||
|
||||
def locate_command(command: str) -> str:
|
||||
path = execute_shell_command(['which', command])
|
||||
if path is None:
|
||||
raise MissingPrerequisiteCommand(f'Unable to find "{command}"')
|
||||
return path
|
||||
|
||||
|
||||
def make_python_zip_bundle(input_path: str,
|
||||
python_version: str = '3.7',
|
||||
build_folder: str = '.build',
|
||||
requirements_file: str = 'requirements.txt',
|
||||
output_bundle_name: str = 'bundle.zip') -> _lambda.AssetCode:
|
||||
'''
|
||||
Builds an lambda AssetCode bundling python dependencies along with the code.
|
||||
The bundle is built using docker and the target lambda runtime image.
|
||||
'''
|
||||
|
||||
build_path = os.path.abspath(os.path.join(input_path, build_folder))
|
||||
asset_path = os.path.join(build_path, output_bundle_name)
|
||||
|
||||
# checks if it's required to build a new zip file
|
||||
if not os.path.exists(asset_path) or os.path.getmtime(asset_path) < get_folder_latest_mtime(input_path):
|
||||
docker = locate_command('docker')
|
||||
lambda_runtime_docker_image = f'lambci/lambda:build-python{python_version}'
|
||||
|
||||
# cleans the target folder
|
||||
logging.debug(f'Cleaning folder: {build_path}')
|
||||
shutil.rmtree(build_path, ignore_errors=True)
|
||||
|
||||
# builds requirements using target runtime
|
||||
build_log = execute_shell_command(command=[
|
||||
'docker', 'run', '--rm',
|
||||
'-v', f'{input_path}:/app',
|
||||
'-w', '/app',
|
||||
lambda_runtime_docker_image,
|
||||
'pip', 'install',
|
||||
'-r', requirements_file,
|
||||
'-t', build_folder])
|
||||
|
||||
logging.info(build_log)
|
||||
|
||||
# creates the zip archive
|
||||
logging.debug(f'Deleting file: {asset_path}')
|
||||
shutil.rmtree(asset_path, ignore_errors=True)
|
||||
|
||||
logging.debug(f'Creating bundle: {asset_path}')
|
||||
with zipfile.ZipFile(asset_path, 'w', zipfile.ZIP_DEFLATED) as zip_obj:
|
||||
add_folder_to_zip(zip_obj, input_path, ignore_names=[output_bundle_name, '__pycache__'])
|
||||
add_folder_to_zip(zip_obj, build_path, ignore_names=[output_bundle_name, '__pycache__'], ignore_dotfiles=False)
|
||||
|
||||
logging.info(f'Lambda bundle created at {asset_path}')
|
||||
|
||||
source_hash = get_folder_checksum(input_path)
|
||||
logging.debug(f'Source folder hash {input_path} -> {source_hash}')
|
||||
return _lambda.AssetCode.from_asset(asset_path, source_hash=source_hash)
|
||||
|
||||
|
||||
def get_folder_checksum(path: str, ignore_dotfiles: bool = True,
|
||||
chunk_size: int = 4096,
|
||||
digest_method: hashlib._hashlib.HASH = hashlib.md5) -> str:
|
||||
def _hash_file(filename: str) -> bytes:
|
||||
with open(filename, mode='rb', buffering=0) as fp:
|
||||
hash_func = digest_method()
|
||||
buffer = fp.read(chunk_size)
|
||||
while len(buffer) > 0:
|
||||
hash_func.update(buffer)
|
||||
buffer = fp.read(chunk_size)
|
||||
return hash_func.digest()
|
||||
|
||||
folder_hash = b''
|
||||
for root, dirs, files in os.walk(path):
|
||||
files = [f for f in files if not f.startswith('.')]
|
||||
dirs[:] = [d for d in dirs if not d.startswith('.')]
|
||||
|
||||
for file_name in sorted(files):
|
||||
file_path = os.path.join(root, file_name)
|
||||
file_hash = _hash_file(file_path)
|
||||
folder_hash += file_hash
|
||||
|
||||
return digest_method(folder_hash).hexdigest()
|
||||
|
||||
|
||||
def get_folder_latest_mtime(path: str, ignore_dotfiles: bool = True) -> float:
|
||||
latest_mtime = None
|
||||
for root, dirs, files in os.walk(path):
|
||||
if ignore_dotfiles:
|
||||
files = [f for f in files if not f.startswith('.')]
|
||||
dirs[:] = [d for d in dirs if not d.startswith('.')]
|
||||
|
||||
for file_name in files:
|
||||
file_path = os.path.join(root, file_name)
|
||||
file_mtime = os.path.getmtime(file_path)
|
||||
if latest_mtime is None or file_mtime > latest_mtime:
|
||||
latest_mtime = file_mtime
|
||||
|
||||
return latest_mtime
|
||||
@@ -1 +1,6 @@
|
||||
-e .
|
||||
aws_cdk.core
|
||||
aws_cdk.aws_apigatewayv2
|
||||
aws_cdk.aws_dynamodb
|
||||
aws_cdk.aws_lambda
|
||||
aws_cdk.aws_s3
|
||||
|
||||
4
setup.py
4
setup.py
@@ -9,7 +9,7 @@ setuptools.setup(
|
||||
name="once",
|
||||
version="0.0.1",
|
||||
|
||||
description="An empty CDK Python app",
|
||||
description="An one-time file sharing personal service",
|
||||
long_description=long_description,
|
||||
long_description_content_type="text/markdown",
|
||||
|
||||
@@ -19,7 +19,7 @@ setuptools.setup(
|
||||
packages=setuptools.find_packages(where="once"),
|
||||
|
||||
install_requires=[
|
||||
"aws-cdk.core==1.43.0",
|
||||
"aws-cdk.core==1.44.0",
|
||||
],
|
||||
|
||||
python_requires=">=3.6",
|
||||
|
||||
Reference in New Issue
Block a user